mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-01-25 02:08:11 +00:00
Compare commits
7 Commits
PRWLR-4669
...
4.3.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8df7f3ab72 | ||
|
|
8adc72ad57 | ||
|
|
9addf86aa5 | ||
|
|
2913d50a52 | ||
|
|
c6c06b3354 | ||
|
|
8242fa883e | ||
|
|
6646bae26c |
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"repoOwner": "prowler-cloud",
|
||||
"repoName": "prowler",
|
||||
"targetPRLabels": [
|
||||
"backport"
|
||||
],
|
||||
"sourcePRLabels": [
|
||||
"was-backported"
|
||||
],
|
||||
"copySourcePRLabels": false,
|
||||
"copySourcePRReviewers": true,
|
||||
"prTitle": "{{sourcePullRequest.title}}",
|
||||
"commitConflicts": true
|
||||
}
|
||||
89
.env
89
.env
@@ -1,89 +0,0 @@
|
||||
#### Important Note ####
|
||||
# This file is used to store environment variables for the Prowler App.
|
||||
# For production, it is recommended to use a secure method to store these variables and change the default secret keys.
|
||||
|
||||
#### Prowler UI Configuration ####
|
||||
PROWLER_UI_VERSION="latest"
|
||||
SITE_URL=http://localhost:3000
|
||||
API_BASE_URL=http://prowler-api:8080/api/v1
|
||||
AUTH_TRUST_HOST=true
|
||||
UI_PORT=3000
|
||||
# openssl rand -base64 32
|
||||
AUTH_SECRET="N/c6mnaS5+SWq81+819OrzQZlmx1Vxtp/orjttJSmw8="
|
||||
|
||||
#### Prowler API Configuration ####
|
||||
PROWLER_API_VERSION="latest"
|
||||
# PostgreSQL settings
|
||||
# If running Django and celery on host, use 'localhost', else use 'postgres-db'
|
||||
POSTGRES_HOST=postgres-db
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_ADMIN_USER=prowler_admin
|
||||
POSTGRES_ADMIN_PASSWORD=postgres
|
||||
POSTGRES_USER=prowler
|
||||
POSTGRES_PASSWORD=postgres
|
||||
POSTGRES_DB=prowler_db
|
||||
|
||||
# Valkey settings
|
||||
# If running Valkey and celery on host, use localhost, else use 'valkey'
|
||||
VALKEY_HOST=valkey
|
||||
VALKEY_PORT=6379
|
||||
VALKEY_DB=0
|
||||
|
||||
# Django settings
|
||||
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1,prowler-api
|
||||
DJANGO_BIND_ADDRESS=0.0.0.0
|
||||
DJANGO_PORT=8080
|
||||
DJANGO_DEBUG=False
|
||||
DJANGO_SETTINGS_MODULE=config.django.production
|
||||
# Select one of [ndjson|human_readable]
|
||||
DJANGO_LOGGING_FORMATTER=human_readable
|
||||
# Select one of [DEBUG|INFO|WARNING|ERROR|CRITICAL]
|
||||
# Applies to both Django and Celery Workers
|
||||
DJANGO_LOGGING_LEVEL=INFO
|
||||
DJANGO_WORKERS=4 # Defaults to the maximum available based on CPU cores if not set.
|
||||
DJANGO_ACCESS_TOKEN_LIFETIME=30 # Token lifetime is in minutes
|
||||
DJANGO_REFRESH_TOKEN_LIFETIME=1440 # Token lifetime is in minutes
|
||||
DJANGO_CACHE_MAX_AGE=3600
|
||||
DJANGO_STALE_WHILE_REVALIDATE=60
|
||||
DJANGO_MANAGE_DB_PARTITIONS=True
|
||||
# openssl genrsa -out private.pem 2048
|
||||
DJANGO_TOKEN_SIGNING_KEY="-----BEGIN PRIVATE KEY-----
|
||||
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDs4e+kt7SnUJek
|
||||
6V5r9zMGzXCoU5qnChfPiqu+BgANyawz+MyVZPs6RCRfeo6tlCknPQtOziyXYM2I
|
||||
7X+qckmuzsjqp8+u+o1mw3VvUuJew5k2SQLPYwsiTzuFNVJEOgRo3hywGiGwS2iv
|
||||
/5nh2QAl7fq2qLqZEXQa5+/xJlQggS1CYxOJgggvLyra50QZlBvPve/AxKJ/EV/Q
|
||||
irWTZU5lLNI8sH2iZR05vQeBsxZ0dCnGMT+vGl+cGkqrvzQzKsYbDmabMcfTYhYi
|
||||
78fpv6A4uharJFHayypYBjE39PwhMyyeycrNXlpm1jpq+03HgmDuDMHydk1tNwuT
|
||||
nEC7m7iNAgMBAAECggEAA2m48nJcJbn9SVi8bclMwKkWmbJErOnyEGEy2sTK3Of+
|
||||
NWx9BB0FmqAPNxn0ss8K7cANKOhDD7ZLF9E2MO4/HgfoMKtUzHRbM7MWvtEepldi
|
||||
nnvcUMEgULD8Dk4HnqiIVjt3BdmGiTv46OpBnRWrkSBV56pUL+7msZmMZTjUZvh2
|
||||
ZWv0+I3gtDIjo2Zo/FiwDV7CfwRjJarRpYUj/0YyuSA4FuOUYl41WAX1I301FKMH
|
||||
xo3jiAYi1s7IneJ16OtPpOA34Wg5F6ebm/UO0uNe+iD4kCXKaZmxYQPh5tfB0Qa3
|
||||
qj1T7GNpFNyvtG7VVdauhkb8iu8X/wl6PCwbg0RCKQKBgQD9HfpnpH0lDlHMRw9K
|
||||
X7Vby/1fSYy1BQtlXFEIPTN/btJ/asGxLmAVwJ2HAPXWlrfSjVAH7CtVmzN7v8oj
|
||||
HeIHfeSgoWEu1syvnv2AMaYSo03UjFFlfc/GUxF7DUScRIhcJUPCP8jkAROz9nFv
|
||||
DByNjUL17Q9r43DmDiRsy0IFqQKBgQDvlJ9Uhl+Sp7gRgKYwa/IG0+I4AduAM+Gz
|
||||
Dxbm52QrMGMTjaJFLmLHBUZ/ot+pge7tZZGws8YR8ufpyMJbMqPjxhIvRRa/p1Tf
|
||||
E3TQPW93FMsHUvxAgY3MV5MzXFPhlNAKb+akP/RcXUhetGAuZKLubtDCWa55ZQuL
|
||||
wj2OS+niRQKBgE7K8zUqNi6/22S8xhy/2GPgB1qPObbsABUofK0U6CAGLo6te+gc
|
||||
6Jo84IyzFtQbDNQFW2Fr+j1m18rw9AqkdcUhQndiZS9AfG07D+zFB86LeWHt4DS4
|
||||
ymIRX8Kvaak/iDcu/n3Mf0vCrhB6aetImObTj4GgrwlFvtJOmrYnO8EpAoGAIXXP
|
||||
Xt25gWD9OyyNiVu6HKwA/zN7NYeJcRmdaDhO7B1A6R0x2Zml4AfjlbXoqOLlvLAf
|
||||
zd79vcoAC82nH1eOPiSOq51plPDI0LMF8IN0CtyTkn1Lj7LIXA6rF1RAvtOqzppc
|
||||
SvpHpZK9pcRpXnFdtBE0BMDDtl6fYzCIqlP94UUCgYEAnhXbAQMF7LQifEm34Dx8
|
||||
BizRMOKcqJGPvbO2+Iyt50O5X6onU2ITzSV1QHtOvAazu+B1aG9pEuBFDQ+ASxEu
|
||||
L9ruJElkOkb/o45TSF6KCsHd55ReTZ8AqnRjf5R+lyzPqTZCXXb8KTcRvWT4zQa3
|
||||
VxyT2PnaSqEcexWUy4+UXoQ=
|
||||
-----END PRIVATE KEY-----"
|
||||
# openssl rsa -in private.pem -pubout -out public.pem
|
||||
DJANGO_TOKEN_VERIFYING_KEY="-----BEGIN PUBLIC KEY-----
|
||||
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA7OHvpLe0p1CXpOlea/cz
|
||||
Bs1wqFOapwoXz4qrvgYADcmsM/jMlWT7OkQkX3qOrZQpJz0LTs4sl2DNiO1/qnJJ
|
||||
rs7I6qfPrvqNZsN1b1LiXsOZNkkCz2MLIk87hTVSRDoEaN4csBohsEtor/+Z4dkA
|
||||
Je36tqi6mRF0Gufv8SZUIIEtQmMTiYIILy8q2udEGZQbz73vwMSifxFf0Iq1k2VO
|
||||
ZSzSPLB9omUdOb0HgbMWdHQpxjE/rxpfnBpKq780MyrGGw5mmzHH02IWIu/H6b+g
|
||||
OLoWqyRR2ssqWAYxN/T8ITMsnsnKzV5aZtY6avtNx4Jg7gzB8nZNbTcLk5xAu5u4
|
||||
jQIDAQAB
|
||||
-----END PUBLIC KEY-----"
|
||||
# openssl rand -base64 32
|
||||
DJANGO_SECRETS_ENCRYPTION_KEY="oE/ltOhp/n1TdbHjVmzcjDPLcLA41CVI/4Rk+UB5ESc="
|
||||
9
.github/CODEOWNERS
vendored
9
.github/CODEOWNERS
vendored
@@ -1,6 +1,5 @@
|
||||
/* @prowler-cloud/sdk
|
||||
* @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
|
||||
# To protect a repository fully against unauthorized changes, you also need to define an owner for the CODEOWNERS file itself.
|
||||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#codeowners-and-branch-protection
|
||||
/.github/ @prowler-cloud/sdk
|
||||
prowler @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
tests @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
api @prowler-cloud/api
|
||||
ui @prowler-cloud/ui
|
||||
3
.github/codeql/api-codeql-config.yml
vendored
3
.github/codeql/api-codeql-config.yml
vendored
@@ -1,3 +0,0 @@
|
||||
name: "API - CodeQL Config"
|
||||
paths:
|
||||
- "api/"
|
||||
4
.github/codeql/sdk-codeql-config.yml
vendored
4
.github/codeql/sdk-codeql-config.yml
vendored
@@ -1,4 +0,0 @@
|
||||
name: "SDK - CodeQL Config"
|
||||
paths-ignore:
|
||||
- "api/"
|
||||
- "ui/"
|
||||
3
.github/codeql/ui-codeql-config.yml
vendored
3
.github/codeql/ui-codeql-config.yml
vendored
@@ -1,3 +0,0 @@
|
||||
name: "UI - CodeQL Config"
|
||||
paths:
|
||||
- "ui/"
|
||||
66
.github/dependabot.yml
vendored
66
.github/dependabot.yml
vendored
@@ -5,7 +5,6 @@
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
# v5
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
@@ -15,90 +14,29 @@ updates:
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "npm"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "docker"
|
||||
|
||||
# v4.6
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v4"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "v4"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "docker"
|
||||
- "v4"
|
||||
|
||||
# v3
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v3"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "v3"
|
||||
|
||||
13
.github/labeler.yml
vendored
13
.github/labeler.yml
vendored
@@ -22,11 +22,6 @@ provider/kubernetes:
|
||||
- any-glob-to-any-file: "prowler/providers/kubernetes/**"
|
||||
- any-glob-to-any-file: "tests/providers/kubernetes/**"
|
||||
|
||||
provider/github:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/github/**"
|
||||
- any-glob-to-any-file: "tests/providers/github/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
@@ -84,11 +79,3 @@ output/csv:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/csv/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/csv/**"
|
||||
|
||||
component/api:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "api/**"
|
||||
|
||||
component/ui:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "ui/**"
|
||||
|
||||
3
.github/pull_request_template.md
vendored
3
.github/pull_request_template.md
vendored
@@ -14,8 +14,7 @@ Please include a summary of the change and which issue is fixed. List any depend
|
||||
- If so, do we need to update permissions for the provider? Please review this carefully.
|
||||
- [ ] Review if the code is being covered by tests.
|
||||
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
|
||||
- [ ] Review if backport is needed.
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
name: API - Build and Push containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
paths:
|
||||
- "api/**"
|
||||
- ".github/workflows/api-build-lint-push-containers.yml"
|
||||
|
||||
# Uncomment the code below to test this action on PRs
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - "master"
|
||||
# paths:
|
||||
# - "api/**"
|
||||
# - ".github/workflows/api-build-lint-push-containers.yml"
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
|
||||
WORKING_DIRECTORY: ./api
|
||||
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-api
|
||||
|
||||
jobs:
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
|
||||
steps:
|
||||
- name: Repository check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
[[ ${{ github.repository }} != "prowler-cloud/prowler" ]] && echo "This action only runs for prowler-cloud/prowler"; exit 0
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
171
.github/workflows/api-pull-request.yml
vendored
171
.github/workflows/api-pull-request.yml
vendored
@@ -1,171 +0,0 @@
|
||||
name: API - Pull Request
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
paths:
|
||||
- "api/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
paths:
|
||||
- "api/**"
|
||||
|
||||
|
||||
env:
|
||||
POSTGRES_HOST: localhost
|
||||
POSTGRES_PORT: 5432
|
||||
POSTGRES_ADMIN_USER: prowler
|
||||
POSTGRES_ADMIN_PASSWORD: S3cret
|
||||
POSTGRES_USER: prowler_user
|
||||
POSTGRES_PASSWORD: prowler
|
||||
POSTGRES_DB: postgres-db
|
||||
VALKEY_HOST: localhost
|
||||
VALKEY_PORT: 6379
|
||||
VALKEY_DB: 0
|
||||
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.12"]
|
||||
|
||||
# Service containers to run with `test`
|
||||
services:
|
||||
# Label used to access the service container
|
||||
postgres:
|
||||
image: postgres
|
||||
env:
|
||||
POSTGRES_HOST: ${{ env.POSTGRES_HOST }}
|
||||
POSTGRES_PORT: ${{ env.POSTGRES_PORT }}
|
||||
POSTGRES_USER: ${{ env.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }}
|
||||
POSTGRES_DB: ${{ env.POSTGRES_DB }}
|
||||
# Set health checks to wait until postgres has started
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
valkey:
|
||||
image: valkey/valkey:7-alpine3.19
|
||||
env:
|
||||
VALKEY_HOST: ${{ env.VALKEY_HOST }}
|
||||
VALKEY_PORT: ${{ env.VALKEY_PORT }}
|
||||
VALKEY_DB: ${{ env.VALKEY_DB }}
|
||||
# Set health checks to wait until postgres has started
|
||||
ports:
|
||||
- 6379:6379
|
||||
options: >-
|
||||
--health-cmd "valkey-cli ping"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: |
|
||||
api/.github/**
|
||||
api/docs/**
|
||||
api/permissions/**
|
||||
api/README.md
|
||||
api/mkdocs.yml
|
||||
|
||||
- name: Install poetry
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
|
||||
&& chmod +x /tmp/hadolint
|
||||
|
||||
- name: Poetry check
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry lock --check
|
||||
|
||||
- name: Lint with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run ruff check . --exclude contrib
|
||||
|
||||
- name: Check Format with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run ruff format --check . --exclude contrib
|
||||
|
||||
- name: Lint with pylint
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
|
||||
|
||||
- name: Bandit
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
|
||||
- name: Safety
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 70612,66963
|
||||
|
||||
- name: Vulture
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
|
||||
|
||||
- name: Hadolint
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
- name: Test with pytest
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest --cov=./src/backend --cov-report=xml src/backend
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@v5
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: api
|
||||
47
.github/workflows/backport.yml
vendored
47
.github/workflows/backport.yml
vendored
@@ -1,47 +0,0 @@
|
||||
name: Prowler - Automatic Backport
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ['master']
|
||||
types: ['labeled', 'closed']
|
||||
|
||||
env:
|
||||
# The prefix of the label that triggers the backport must not contain the branch name
|
||||
# so, for example, if the branch is 'master', the label should be 'backport-to-<branch>'
|
||||
BACKPORT_LABEL_PREFIX: backport-to-
|
||||
BACKPORT_LABEL_IGNORE: was-backported
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport PR
|
||||
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport')) && !(contains(github.event.pull_request.labels.*.name, 'was-backported'))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Check labels
|
||||
id: preview_label_check
|
||||
uses: docker://agilepathway/pull-request-label-checker:v1.6.55
|
||||
with:
|
||||
allow_failure: true
|
||||
prefix_mode: true
|
||||
any_of: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
none_of: ${{ env.BACKPORT_LABEL_IGNORE }}
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Backport Action
|
||||
if: steps.preview_label_check.outputs.label_check == 'success'
|
||||
uses: sorenlouv/backport-github-action@v9.5.1
|
||||
with:
|
||||
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
|
||||
- name: Info log
|
||||
if: ${{ success() && steps.preview_label_check.outputs.label_check == 'success' }}
|
||||
run: cat ~/.backport/backport.info.log
|
||||
|
||||
- name: Debug log
|
||||
if: ${{ failure() && steps.preview_label_check.outputs.label_check == 'success' }}
|
||||
run: cat ~/.backport/backport.debug.log
|
||||
@@ -1,4 +1,4 @@
|
||||
name: Prowler - Pull Request Documentation Link
|
||||
name: Pull Request Documentation Link
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
@@ -16,9 +16,9 @@ jobs:
|
||||
name: Documentation Link
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Leave PR comment with the Prowler Documentation URI
|
||||
- name: Leave PR comment with the SaaS Documentation URI
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
issue-number: ${{ env.PR_NUMBER }}
|
||||
body: |
|
||||
You can check the documentation for this PR here -> [Prowler Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)
|
||||
You can check the documentation for this PR here -> [SaaS Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)
|
||||
|
||||
@@ -1,20 +1,14 @@
|
||||
name: SDK - Build and Push containers
|
||||
name: build-lint-push-containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# For `v3-latest`
|
||||
- "v3"
|
||||
# For `v4-latest`
|
||||
- "v4.6"
|
||||
# For `latest`
|
||||
- "master"
|
||||
paths-ignore:
|
||||
- ".github/**"
|
||||
- "README.md"
|
||||
- "docs/**"
|
||||
- "ui/**"
|
||||
- "api/**"
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
@@ -42,10 +36,6 @@ env:
|
||||
# Python configuration
|
||||
PYTHON_VERSION: 3.12
|
||||
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler
|
||||
|
||||
jobs:
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
@@ -53,7 +43,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
prowler_version_major: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION_MAJOR }}
|
||||
prowler_version: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION }}
|
||||
prowler_version: ${{ steps.update-prowler-version.outputs.PROWLER_VERSION }}
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
|
||||
@@ -75,8 +65,6 @@ jobs:
|
||||
id: get-prowler-version
|
||||
run: |
|
||||
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Store prowler version major just for the release
|
||||
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
|
||||
@@ -89,13 +77,7 @@ jobs:
|
||||
echo "STABLE_TAG=v3-stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
|
||||
|
||||
4)
|
||||
echo "LATEST_TAG=v4-latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=v4-stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
|
||||
5)
|
||||
echo "LATEST_TAG=latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
@@ -107,6 +89,15 @@ jobs:
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Update Prowler version (release)
|
||||
id: update-prowler-version
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
PROWLER_VERSION="${{ github.event.release.tag_name }}"
|
||||
poetry version "${PROWLER_VERSION}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -133,7 +124,6 @@ jobs:
|
||||
tags: |
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
@@ -151,8 +141,6 @@ jobs:
|
||||
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.PROWLER_VERSION }}
|
||||
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.STABLE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.PROWLER_VERSION }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
@@ -172,7 +160,7 @@ jobs:
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"v3-latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
|
||||
|
||||
@@ -181,6 +169,6 @@ jobs:
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ needs.container-build-push.outputs.prowler_version }}"}}'
|
||||
@@ -9,25 +9,14 @@
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: API - CodeQL
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "api/**"
|
||||
branches: [ "master", "v3" ]
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "api/**"
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ "master", "v3" ]
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
@@ -55,7 +44,12 @@ jobs:
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
6
.github/workflows/find-secrets.yml
vendored
6
.github/workflows/find-secrets.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Prowler - Find secrets
|
||||
name: find-secrets
|
||||
|
||||
on: pull_request
|
||||
|
||||
@@ -11,9 +11,9 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@v3.86.1
|
||||
uses: trufflesecurity/trufflehog@3.80.4
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
head: HEAD
|
||||
extra_args: --only-verified
|
||||
extra_args: --only-verified
|
||||
3
.github/workflows/labeler.yml
vendored
3
.github/workflows/labeler.yml
vendored
@@ -1,11 +1,10 @@
|
||||
name: Prowler - PR Labeler
|
||||
name: "Pull Request Labeler"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
|
||||
@@ -1,18 +1,14 @@
|
||||
name: SDK - Pull Request
|
||||
name: pr-lint-test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -22,35 +18,28 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@v45
|
||||
uses: tj-actions/changed-files@v44
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
.github/**
|
||||
README.md
|
||||
docs/**
|
||||
permissions/**
|
||||
api/**
|
||||
ui/**
|
||||
README.md
|
||||
mkdocs.yml
|
||||
.backportrc.json
|
||||
|
||||
- name: Install poetry
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
@@ -61,56 +50,44 @@ jobs:
|
||||
sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
|
||||
&& chmod +x /tmp/hadolint
|
||||
|
||||
- name: Poetry check
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry lock --check
|
||||
|
||||
- name: Lint with flake8
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib,ui,api
|
||||
|
||||
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib
|
||||
- name: Checking format with black
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run black --exclude api ui --check .
|
||||
|
||||
poetry run black --check .
|
||||
- name: Lint with pylint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
|
||||
|
||||
- name: Bandit
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/,./api/,./ui' -r .
|
||||
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
- name: Safety
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 70612 -r pyproject.toml
|
||||
|
||||
poetry run safety check --ignore 70612
|
||||
- name: Vulture
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
|
||||
|
||||
poetry run vulture --exclude "contrib" --min-confidence 100 .
|
||||
- name: Hadolint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
- name: Test with pytest
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler --cov-report=xml tests
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: prowler
|
||||
@@ -1,4 +1,4 @@
|
||||
name: SDK - PyPI release
|
||||
name: pypi-release
|
||||
|
||||
on:
|
||||
release:
|
||||
@@ -8,6 +8,8 @@ env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
PYTHON_VERSION: 3.11
|
||||
CACHE: "poetry"
|
||||
# TODO: create a bot user for this kind of tasks, like prowler-bot
|
||||
GIT_COMMITTER_EMAIL: "sergio@prowler.com"
|
||||
|
||||
jobs:
|
||||
release-prowler-job:
|
||||
@@ -27,9 +29,6 @@ jobs:
|
||||
4)
|
||||
echo "Releasing Prowler v4 with tag ${PROWLER_VERSION}"
|
||||
;;
|
||||
5)
|
||||
echo "Releasing Prowler v5 with tag ${PROWLER_VERSION}"
|
||||
;;
|
||||
*)
|
||||
echo "Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
@@ -41,6 +40,7 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -48,6 +48,34 @@ jobs:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: ${{ env.CACHE }}
|
||||
|
||||
- name: Update Poetry and config version
|
||||
run: |
|
||||
poetry version ${{ env.RELEASE_TAG }}
|
||||
|
||||
- name: Import GPG key
|
||||
uses: crazy-max/ghaction-import-gpg@v6
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
git_user_signingkey: true
|
||||
git_commit_gpgsign: true
|
||||
|
||||
- name: Push updated version to the release tag
|
||||
run: |
|
||||
# Configure Git
|
||||
git config user.name "github-actions"
|
||||
git config user.email "${{ env.GIT_COMMITTER_EMAIL }}"
|
||||
|
||||
# Add the files with the version changed
|
||||
git add prowler/config/config.py pyproject.toml
|
||||
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify -S
|
||||
|
||||
# Replace the tag with the version updated
|
||||
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}" --sign
|
||||
|
||||
# Push the tag
|
||||
git push -f origin ${{ env.RELEASE_TAG }}
|
||||
|
||||
- name: Build Prowler package
|
||||
run: |
|
||||
poetry build
|
||||
@@ -1,6 +1,6 @@
|
||||
# This is a basic workflow to help you get started with Actions
|
||||
|
||||
name: SDK - Refresh AWS services' regions
|
||||
name: Refresh regions of AWS services
|
||||
|
||||
on:
|
||||
schedule:
|
||||
@@ -50,13 +50,13 @@ jobs:
|
||||
|
||||
# Create pull request
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services"
|
||||
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services."
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
|
||||
title: "chore(regions_update): Changes in regions for AWS services"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-v3"
|
||||
title: "chore(regions_update): Changes in regions for AWS services."
|
||||
body: |
|
||||
### Description
|
||||
|
||||
63
.github/workflows/sdk-codeql.yml
vendored
63
.github/workflows/sdk-codeql.yml
vendored
@@ -1,63 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: SDK - CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
@@ -1,81 +0,0 @@
|
||||
name: UI - Build and Push containers
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
paths:
|
||||
- "ui/**"
|
||||
- ".github/workflows/ui-build-lint-push-containers.yml"
|
||||
|
||||
# Uncomment the below code to test this action on PRs
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - "master"
|
||||
# paths:
|
||||
# - "ui/**"
|
||||
# - ".github/workflows/ui-build-lint-push-containers.yml"
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
|
||||
WORKING_DIRECTORY: ./ui
|
||||
|
||||
# Container Registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
|
||||
|
||||
jobs:
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
|
||||
steps:
|
||||
- name: Repository check
|
||||
working-directory: /tmp
|
||||
run: |
|
||||
[[ ${{ github.repository }} != "prowler-cloud/prowler" ]] && echo "This action only runs for prowler-cloud/prowler"; exit 0
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
# Comment the following line for testing
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
# Set push: false for testing
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
61
.github/workflows/ui-codeql.yml
vendored
61
.github/workflows/ui-codeql.yml
vendored
@@ -1,61 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: UI - CodeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "ui/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths:
|
||||
- "ui/**"
|
||||
schedule:
|
||||
- cron: "00 12 * * *"
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: ["javascript"]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
34
.github/workflows/ui-pull-request.yml
vendored
34
.github/workflows/ui-pull-request.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: UI - Pull Request
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'ui/**'
|
||||
|
||||
jobs:
|
||||
test-and-coverage:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Install dependencies
|
||||
working-directory: ./ui
|
||||
run: npm install
|
||||
- name: Run Healthcheck
|
||||
working-directory: ./ui
|
||||
run: npm run healthcheck
|
||||
- name: Build the application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -12,7 +12,6 @@ build/
|
||||
/dist/
|
||||
*.egg-info/
|
||||
*/__pycache__/*.pyc
|
||||
.idea/
|
||||
|
||||
# Session
|
||||
Session.vim
|
||||
@@ -47,8 +46,7 @@ junit-reports/
|
||||
*.tfstate
|
||||
|
||||
# .env
|
||||
ui/.env*
|
||||
api/.env*
|
||||
.env*
|
||||
|
||||
# Coverage
|
||||
.coverage*
|
||||
@@ -57,6 +55,3 @@ coverage*
|
||||
|
||||
# Node
|
||||
node_modules
|
||||
|
||||
# Persistent data
|
||||
_data/
|
||||
|
||||
@@ -85,7 +85,7 @@ repos:
|
||||
# For running trufflehog in docker, use the following entry instead:
|
||||
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
|
||||
language: system
|
||||
stages: ["pre-commit", "pre-push"]
|
||||
stages: ["commit", "push"]
|
||||
|
||||
- id: bandit
|
||||
name: bandit
|
||||
@@ -97,13 +97,12 @@ repos:
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'safety check --ignore 70612,66963'
|
||||
entry: bash -c 'safety check --ignore 70612'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
entry: bash -c 'vulture --exclude "contrib" --min-confidence 100 .'
|
||||
exclude: 'api/src/backend/'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
15
Dockerfile
15
Dockerfile
@@ -1,29 +1,30 @@
|
||||
FROM python:3.12.8-alpine3.20
|
||||
FROM python:3.12-alpine
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
|
||||
# Update system dependencies and install essential tools
|
||||
# Update system dependencies
|
||||
#hadolint ignore=DL3018
|
||||
RUN apk --no-cache upgrade && apk --no-cache add curl git
|
||||
RUN apk --no-cache upgrade && apk --no-cache add curl
|
||||
|
||||
# Create non-root user
|
||||
# Create nonroot user
|
||||
RUN mkdir -p /home/prowler && \
|
||||
echo 'prowler:x:1000:1000:prowler:/home/prowler:' > /etc/passwd && \
|
||||
echo 'prowler:x:1000:' > /etc/group && \
|
||||
chown -R prowler:prowler /home/prowler
|
||||
USER prowler
|
||||
|
||||
# Copy necessary files
|
||||
# Copy necessary files
|
||||
WORKDIR /home/prowler
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY dashboard/ /home/prowler/dashboard/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler
|
||||
|
||||
# Install Python dependencies
|
||||
# Install dependencies
|
||||
ENV HOME='/home/prowler'
|
||||
ENV PATH="$HOME/.local/bin:$PATH"
|
||||
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
|
||||
#hadolint ignore=DL3013
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir .
|
||||
|
||||
# Remove deprecated dash dependencies
|
||||
|
||||
148
README.md
148
README.md
@@ -3,20 +3,20 @@
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
|
||||
</p>
|
||||
<p align="center">
|
||||
<b><i>Prowler Open Source</b> is as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
|
||||
<b><i>Prowler SaaS </b> and <b>Prowler Open Source</b> are as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
|
||||
</p>
|
||||
<p align="center">
|
||||
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://goto.prowler.com/slack"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/38561120/3c8b4ec5-6849-41a5-b5e1-52bbb94af73a"></a>
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/38561120/3c8b4ec5-6849-41a5-b5e1-52bbb94af73a"></a>
|
||||
<br>
|
||||
<a href="https://goto.prowler.com/slack">Join our Prowler community!</a>
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog">Join our Prowler community!</a>
|
||||
</p>
|
||||
<hr>
|
||||
<p align="center">
|
||||
<a href="https://goto.prowler.com/slack"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
|
||||
<a href="https://pypi.org/project/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler.svg"></a>
|
||||
<a href="https://pypi.python.org/pypi/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler.svg"></a>
|
||||
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Prowler Downloads" src="https://img.shields.io/pypi/dw/prowler.svg?label=prowler%20downloads"></a>
|
||||
@@ -29,7 +29,7 @@
|
||||
<p align="center">
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="Repo size" src="https://img.shields.io/github/repo-size/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/issues"><img alt="Issues" src="https://img.shields.io/github/issues/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler?include_prereleases"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/release-date/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="Contributors" src="https://img.shields.io/github/contributors-anon/prowler-cloud/prowler"></a>
|
||||
<a href="https://github.com/prowler-cloud/prowler"><img alt="License" src="https://img.shields.io/github/license/prowler-cloud/prowler"></a>
|
||||
@@ -43,15 +43,7 @@
|
||||
|
||||
# Description
|
||||
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler Cloud</a>.
|
||||
|
||||
## Prowler App
|
||||
|
||||
Prowler App is a web application that allows you to run Prowler in your cloud provider accounts and visualize the results in a user-friendly interface.
|
||||
|
||||

|
||||
|
||||
>More details at [Prowler App Documentation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-app-installation)
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler SaaS</a>.
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
@@ -71,134 +63,42 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 561 | 81 -> `prowler aws --list-services` | 30 -> `prowler aws --list-compliance` | 9 -> `prowler aws --list-categories` |
|
||||
| GCP | 77 | 13 -> `prowler gcp --list-services` | 3 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 139 | 18 -> `prowler azure --list-services` | 4 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| AWS | 385 | 67 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 7 -> `prowler aws --list-categories` |
|
||||
| GCP | 77 | 13 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 135 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
|
||||
|
||||
# 💻 Installation
|
||||
|
||||
## Prowler App
|
||||
|
||||
Prowler App can be installed in different ways, depending on your environment:
|
||||
|
||||
> See how to use Prowler App in the [Prowler App Usage Guide](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/prowler-app/).
|
||||
|
||||
### Docker Compose
|
||||
|
||||
**Requirements**
|
||||
|
||||
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
**Commands**
|
||||
|
||||
``` console
|
||||
curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/master/docker-compose.yml
|
||||
curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/master/.env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
|
||||
### From GitHub
|
||||
|
||||
**Requirements**
|
||||
|
||||
* `git` installed.
|
||||
* `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
**Commands to run the API**
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
poetry shell
|
||||
set -a
|
||||
source .env
|
||||
docker compose up postgres valkey -d
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
gunicorn -c config/guniconf.py config.wsgi:application
|
||||
```
|
||||
|
||||
> Now, you can access the API documentation at http://localhost:8080/api/v1/docs.
|
||||
|
||||
**Commands to run the API Worker**
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
poetry shell
|
||||
set -a
|
||||
source .env
|
||||
cd src/backend
|
||||
python -m celery -A config.celery worker -l info -E
|
||||
```
|
||||
|
||||
**Commands to run the API Scheduler**
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/api
|
||||
poetry install
|
||||
poetry shell
|
||||
set -a
|
||||
source .env
|
||||
cd src/backend
|
||||
python -m celery -A config.celery beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
|
||||
```
|
||||
|
||||
**Commands to run the UI**
|
||||
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/ui
|
||||
npm install
|
||||
npm run build
|
||||
npm start
|
||||
```
|
||||
|
||||
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
|
||||
|
||||
## Prowler CLI
|
||||
### Pip package
|
||||
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9, < 3.13:
|
||||
## Pip package
|
||||
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9, < 3.13:
|
||||
|
||||
```console
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```
|
||||
>More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-cli-installation)
|
||||
>More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
|
||||
|
||||
### Containers
|
||||
## Containers
|
||||
|
||||
The available versions of Prowler CLI are the following:
|
||||
The available versions of Prowler are the following:
|
||||
|
||||
- `latest`: in sync with `master` branch (bear in mind that it is not a stable version)
|
||||
- `v4-latest`: in sync with `v4` branch (bear in mind that it is not a stable version)
|
||||
- `v3-latest`: in sync with `v3` branch (bear in mind that it is not a stable version)
|
||||
- `<x.y.z>` (release): you can find the releases [here](https://github.com/prowler-cloud/prowler/releases), those are stable releases.
|
||||
- `stable`: this tag always point to the latest release.
|
||||
- `v4-stable`: this tag always point to the latest release for v4.
|
||||
- `v3-stable`: this tag always point to the latest release for v3.
|
||||
|
||||
The container images are available here:
|
||||
- Prowler CLI:
|
||||
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
|
||||
- Prowler App:
|
||||
- [DockerHub - Prowler UI](https://hub.docker.com/r/prowlercloud/prowler-ui/tags)
|
||||
- [DockerHub - Prowler API](https://hub.docker.com/r/prowlercloud/prowler-api/tags)
|
||||
|
||||
### From GitHub
|
||||
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
|
||||
|
||||
## From GitHub
|
||||
|
||||
Python >= 3.9, < 3.13 is required with pip and poetry:
|
||||
|
||||
``` console
|
||||
```
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
poetry shell
|
||||
@@ -208,16 +108,6 @@ python prowler.py -v
|
||||
> If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
|
||||
# 📐✏️ High level architecture
|
||||
|
||||
## Prowler App
|
||||
The **Prowler App** consists of three main components:
|
||||
|
||||
- **Prowler UI**: A user-friendly web interface for running Prowler and viewing results, powered by Next.js.
|
||||
- **Prowler API**: The backend API that executes Prowler scans and stores the results, built with Django REST Framework.
|
||||
- **Prowler SDK**: A Python SDK that integrates with the Prowler CLI for advanced functionality.
|
||||
|
||||

|
||||
|
||||
## Prowler CLI
|
||||
You can run Prowler from your workstation, a Kubernetes Job, a Google Compute Engine, an Azure VM, an EC2 instance, Fargate or any other container, CloudShell and many more.
|
||||
|
||||

|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
# Django settings
|
||||
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1
|
||||
DJANGO_BIND_ADDRESS=0.0.0.0
|
||||
DJANGO_PORT=8000
|
||||
DJANGO_DEBUG=False
|
||||
# Select one of [production|devel]
|
||||
DJANGO_SETTINGS_MODULE=config.django.[production|devel]
|
||||
# Select one of [ndjson|human_readable]
|
||||
DJANGO_LOGGING_FORMATTER=[ndjson|human_readable]
|
||||
# Select one of [DEBUG|INFO|WARNING|ERROR|CRITICAL]
|
||||
# Applies to both Django and Celery Workers
|
||||
DJANGO_LOGGING_LEVEL=INFO
|
||||
DJANGO_WORKERS=4 # Defaults to the maximum available based on CPU cores if not set.
|
||||
DJANGO_TOKEN_SIGNING_KEY=""
|
||||
DJANGO_TOKEN_VERIFYING_KEY=""
|
||||
# Token lifetime is in minutes
|
||||
DJANGO_ACCESS_TOKEN_LIFETIME=30
|
||||
DJANGO_REFRESH_TOKEN_LIFETIME=1440
|
||||
DJANGO_CACHE_MAX_AGE=3600
|
||||
DJANGO_STALE_WHILE_REVALIDATE=60
|
||||
DJANGO_SECRETS_ENCRYPTION_KEY=""
|
||||
# Decide whether to allow Django manage database table partitions
|
||||
DJANGO_MANAGE_DB_PARTITIONS=[True|False]
|
||||
DJANGO_CELERY_DEADLOCK_ATTEMPTS=5
|
||||
|
||||
# PostgreSQL settings
|
||||
# If running django and celery on host, use 'localhost', else use 'postgres-db'
|
||||
POSTGRES_HOST=[localhost|postgres-db]
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_ADMIN_USER=prowler
|
||||
POSTGRES_ADMIN_PASSWORD=S3cret
|
||||
POSTGRES_USER=prowler_user
|
||||
POSTGRES_PASSWORD=S3cret
|
||||
POSTGRES_DB=prowler_db
|
||||
|
||||
# Valkey settings
|
||||
# If running django and celery on host, use localhost, else use 'valkey'
|
||||
VALKEY_HOST=[localhost|valkey]
|
||||
VALKEY_PORT=6379
|
||||
VALKEY_DB=0
|
||||
168
api/.gitignore
vendored
168
api/.gitignore
vendored
@@ -1,168 +0,0 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
/_data/
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||
.pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
*.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
.idea/
|
||||
|
||||
# VSCode
|
||||
.vscode/
|
||||
@@ -1,91 +0,0 @@
|
||||
repos:
|
||||
## GENERAL
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
args: ["--unsafe"]
|
||||
- id: check-json
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- id: no-commit-to-branch
|
||||
- id: pretty-format-json
|
||||
args: ["--autofix", "--no-sort-keys", "--no-ensure-ascii"]
|
||||
exclude: 'src/backend/api/fixtures/dev/.*\.json$'
|
||||
|
||||
## TOML
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.13.0
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
files: pyproject.toml
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
rev: v0.10.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
## PYTHON
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.0
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.8.0
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
args: ["--directory=src"]
|
||||
- id: poetry-lock
|
||||
args: ["--no-update", "--directory=src"]
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.13.0-beta
|
||||
hooks:
|
||||
- id: hadolint
|
||||
args: ["--ignore=DL3013", "Dockerfile"]
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: bash -c 'poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: trufflehog
|
||||
name: TruffleHog
|
||||
description: Detect secrets in your data.
|
||||
entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail'
|
||||
# For running trufflehog in docker, use the following entry instead:
|
||||
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
|
||||
language: system
|
||||
stages: ["commit", "push"]
|
||||
|
||||
- id: bandit
|
||||
name: bandit
|
||||
description: "Bandit is a tool for finding common security issues in Python code"
|
||||
entry: bash -c 'poetry run bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'poetry run safety check --ignore 70612,66963'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
entry: bash -c 'poetry run vulture --exclude "contrib,.venv,tests,conftest.py" --min-confidence 100 .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
@@ -1,46 +0,0 @@
|
||||
FROM python:3.12.8-alpine3.20 AS build
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/api"
|
||||
|
||||
# hadolint ignore=DL3018
|
||||
RUN apk --no-cache add gcc python3-dev musl-dev linux-headers curl-dev
|
||||
|
||||
RUN apk --no-cache upgrade && \
|
||||
addgroup -g 1000 prowler && \
|
||||
adduser -D -u 1000 -G prowler prowler
|
||||
USER prowler
|
||||
|
||||
WORKDIR /home/prowler
|
||||
|
||||
COPY pyproject.toml ./
|
||||
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
COPY src/backend/ ./backend/
|
||||
|
||||
ENV PATH="/home/prowler/.local/bin:$PATH"
|
||||
|
||||
RUN poetry install && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
COPY docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
|
||||
WORKDIR /home/prowler/backend
|
||||
|
||||
# Development image
|
||||
# hadolint ignore=DL3006
|
||||
FROM build AS dev
|
||||
|
||||
USER 0
|
||||
# hadolint ignore=DL3018
|
||||
RUN apk --no-cache add curl vim
|
||||
|
||||
USER prowler
|
||||
|
||||
ENTRYPOINT ["../docker-entrypoint.sh", "dev"]
|
||||
|
||||
# Production image
|
||||
FROM build
|
||||
|
||||
ENTRYPOINT ["../docker-entrypoint.sh", "prod"]
|
||||
271
api/README.md
271
api/README.md
@@ -1,271 +0,0 @@
|
||||
# Description
|
||||
|
||||
This repository contains the JSON API and Task Runner components for Prowler, which facilitate a complete backend that interacts with the Prowler SDK and is used by the Prowler UI.
|
||||
|
||||
# Components
|
||||
The Prowler API is composed of the following components:
|
||||
|
||||
- The JSON API, which is an API built with Django Rest Framework.
|
||||
- The Celery worker, which is responsible for executing the background tasks that are defined in the JSON API.
|
||||
- The PostgreSQL database, which is used to store the data.
|
||||
- The Valkey database, which is an in-memory database which is used as a message broker for the Celery workers.
|
||||
|
||||
## Note about Valkey
|
||||
|
||||
[Valkey](https://valkey.io/) is an open source (BSD) high performance key/value datastore.
|
||||
|
||||
Valkey exposes a Redis 7.2 compliant API. Any service that exposes the Redis API can be used with Prowler API.
|
||||
|
||||
# Modify environment variables
|
||||
|
||||
Under the root path of the project, you can find a file called `.env.example`. This file shows all the environment variables that the project uses. You *must* create a new file called `.env` and set the values for the variables.
|
||||
|
||||
## Local deployment
|
||||
Keep in mind if you export the `.env` file to use it with local deployment that you will have to do it within the context of the Poetry interpreter, not before. Otherwise, variables will not be loaded properly.
|
||||
|
||||
To do this, you can run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
set -a
|
||||
source .env
|
||||
```
|
||||
|
||||
# 🚀 Production deployment
|
||||
## Docker deployment
|
||||
|
||||
This method requires `docker` and `docker compose`.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
|
||||
### Build the base image
|
||||
|
||||
```console
|
||||
docker compose --profile prod build
|
||||
```
|
||||
|
||||
### Run the production service
|
||||
|
||||
This command will start the Django production server and the Celery worker and also the Valkey and PostgreSQL databases.
|
||||
|
||||
```console
|
||||
docker compose --profile prod up -d
|
||||
```
|
||||
|
||||
You can access the server in `http://localhost:8080`.
|
||||
|
||||
> **NOTE:** notice how the port is different. When developing using docker, the port will be `8080` to prevent conflicts.
|
||||
|
||||
### View the Production Server Logs
|
||||
|
||||
To view the logs for any component (e.g., Django, Celery worker), you can use the following command with a wildcard. This command will follow logs for any container that matches the specified pattern:
|
||||
|
||||
```console
|
||||
docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
|
||||
|
||||
## Local deployment
|
||||
|
||||
To use this method, you'll need to set up a Python virtual environment (version ">=3.11,<3.13") and keep dependencies updated. Additionally, ensure that `poetry` and `docker compose` are installed.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
### Install all dependencies with Poetry
|
||||
|
||||
```console
|
||||
poetry install
|
||||
poetry shell
|
||||
```
|
||||
|
||||
## Start the PostgreSQL Database and Valkey
|
||||
|
||||
The PostgreSQL database (version 16.3) and Valkey (version 7) are required for the development environment. To make development easier, we have provided a `docker-compose` file that will start these components for you.
|
||||
|
||||
**Note:** Make sure to use the specified versions, as there are features in our setup that may not be compatible with older versions of PostgreSQL and Valkey.
|
||||
|
||||
|
||||
```console
|
||||
docker compose up postgres valkey -d
|
||||
```
|
||||
|
||||
## Deploy Django and the Celery worker
|
||||
|
||||
### Run migrations
|
||||
|
||||
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
```
|
||||
|
||||
### Run the Celery worker
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python -m celery -A config.celery worker -l info -E
|
||||
```
|
||||
|
||||
### Run the Django server with Gunicorn
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
gunicorn -c config/guniconf.py config.wsgi:application
|
||||
```
|
||||
|
||||
> By default, the Gunicorn server will try to use as many workers as your machine can handle. You can manually change that in the `src/backend/config/guniconf.py` file.
|
||||
|
||||
# 🧪 Development guide
|
||||
|
||||
## Local deployment
|
||||
|
||||
To use this method, you'll need to set up a Python virtual environment (version ">=3.11,<3.13") and keep dependencies updated. Additionally, ensure that `poetry` and `docker compose` are installed.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
|
||||
### Start the PostgreSQL Database and Valkey
|
||||
|
||||
The PostgreSQL database (version 16.3) and Valkey (version 7) are required for the development environment. To make development easier, we have provided a `docker-compose` file that will start these components for you.
|
||||
|
||||
**Note:** Make sure to use the specified versions, as there are features in our setup that may not be compatible with older versions of PostgreSQL and Valkey.
|
||||
|
||||
|
||||
```console
|
||||
docker compose up postgres valkey -d
|
||||
```
|
||||
|
||||
### Install the Python dependencies
|
||||
|
||||
> You must have Poetry installed
|
||||
|
||||
```console
|
||||
poetry install
|
||||
poetry shell
|
||||
```
|
||||
|
||||
### Apply migrations
|
||||
|
||||
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
```
|
||||
|
||||
### Run the Django development server
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python manage.py runserver
|
||||
```
|
||||
|
||||
You can access the server in `http://localhost:8000`.
|
||||
All changes in the code will be automatically reloaded in the server.
|
||||
|
||||
### Run the Celery worker
|
||||
|
||||
```console
|
||||
python -m celery -A config.celery worker -l info -E
|
||||
```
|
||||
|
||||
The Celery worker does not detect and reload changes in the code, so you need to restart it manually when you make changes.
|
||||
|
||||
## Docker deployment
|
||||
|
||||
This method requires `docker` and `docker compose`.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
|
||||
### Build the base image
|
||||
|
||||
```console
|
||||
docker compose --profile dev build
|
||||
```
|
||||
|
||||
### Run the development service
|
||||
|
||||
This command will start the Django development server and the Celery worker and also the Valkey and PostgreSQL databases.
|
||||
|
||||
```console
|
||||
docker compose --profile dev up -d
|
||||
```
|
||||
|
||||
You can access the server in `http://localhost:8080`.
|
||||
All changes in the code will be automatically reloaded in the server.
|
||||
|
||||
> **NOTE:** notice how the port is different. When developing using docker, the port will be `8080` to prevent conflicts.
|
||||
|
||||
### View the development server logs
|
||||
|
||||
To view the logs for any component (e.g., Django, Celery worker), you can use the following command with a wildcard. This command will follow logs for any container that matches the specified pattern:
|
||||
|
||||
```console
|
||||
docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
|
||||
|
||||
## Applying migrations
|
||||
|
||||
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
```
|
||||
|
||||
## Apply fixtures
|
||||
|
||||
Fixtures are used to populate the database with initial development data.
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
python manage.py loaddata api/fixtures/0_dev_users.json --database admin
|
||||
```
|
||||
|
||||
> The default credentials are `dev@prowler.com:thisisapassword123` or `dev2@prowler.com:thisisapassword123`
|
||||
|
||||
## Run tests
|
||||
|
||||
Note that the tests will fail if you use the same `.env` file as the development environment.
|
||||
|
||||
For best results, run in a new shell with no environment variables set.
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
pytest
|
||||
```
|
||||
@@ -1,125 +0,0 @@
|
||||
services:
|
||||
api:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-api
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
ports:
|
||||
- "${DJANGO_PORT:-8000}:${DJANGO_PORT:-8000}"
|
||||
profiles:
|
||||
- prod
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "prod"
|
||||
|
||||
api-dev:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
target: dev
|
||||
image: prowler-api-dev
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=config.django.devel
|
||||
- DJANGO_LOGGING_FORMATTER=human_readable
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
ports:
|
||||
- "${DJANGO_PORT:-8080}:${DJANGO_PORT:-8080}"
|
||||
volumes:
|
||||
- "./src/backend:/home/prowler/backend"
|
||||
- "./pyproject.toml:/home/prowler/pyproject.toml"
|
||||
profiles:
|
||||
- dev
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "dev"
|
||||
|
||||
postgres:
|
||||
image: postgres:16.3-alpine
|
||||
ports:
|
||||
- "${POSTGRES_PORT:-5432}:${POSTGRES_PORT:-5432}"
|
||||
hostname: "postgres-db"
|
||||
volumes:
|
||||
- ./_data/postgres:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_USER=${POSTGRES_ADMIN_USER:-prowler}
|
||||
- POSTGRES_PASSWORD=${POSTGRES_ADMIN_PASSWORD:-S3cret}
|
||||
- POSTGRES_DB=${POSTGRES_DB:-prowler_db}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_ADMIN_USER:-prowler} -d ${POSTGRES_DB:-prowler_db}'"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
valkey:
|
||||
image: valkey/valkey:7-alpine3.19
|
||||
ports:
|
||||
- "${VALKEY_PORT:-6379}:6379"
|
||||
hostname: "valkey"
|
||||
volumes:
|
||||
- ./_data/valkey:/data
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sh -c 'valkey-cli ping'"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
worker:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-worker
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
profiles:
|
||||
- dev
|
||||
- prod
|
||||
depends_on:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "worker"
|
||||
|
||||
worker-beat:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-worker
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
profiles:
|
||||
- dev
|
||||
- prod
|
||||
depends_on:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "beat"
|
||||
@@ -1,71 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
|
||||
apply_migrations() {
|
||||
echo "Applying database migrations..."
|
||||
poetry run python manage.py migrate --database admin
|
||||
}
|
||||
|
||||
apply_fixtures() {
|
||||
echo "Applying Django fixtures..."
|
||||
for fixture in api/fixtures/dev/*.json; do
|
||||
if [ -f "$fixture" ]; then
|
||||
echo "Loading $fixture"
|
||||
poetry run python manage.py loaddata "$fixture" --database admin
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
start_dev_server() {
|
||||
echo "Starting the development server..."
|
||||
poetry run python manage.py runserver 0.0.0.0:"${DJANGO_PORT:-8080}"
|
||||
}
|
||||
|
||||
start_prod_server() {
|
||||
echo "Starting the Gunicorn server..."
|
||||
poetry run gunicorn -c config/guniconf.py config.wsgi:application
|
||||
}
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans -E
|
||||
}
|
||||
|
||||
start_worker_beat() {
|
||||
echo "Starting the worker-beat..."
|
||||
sleep 15
|
||||
poetry run python -m celery -A config.celery beat -l "${DJANGO_LOGGING_LEVEL:-info}" --scheduler django_celery_beat.schedulers:DatabaseScheduler
|
||||
}
|
||||
|
||||
manage_db_partitions() {
|
||||
if [ "${DJANGO_MANAGE_DB_PARTITIONS}" = "True" ]; then
|
||||
echo "Managing DB partitions..."
|
||||
# For now we skip the deletion of partitions until we define the data retention policy
|
||||
# --yes auto approves the operation without the need of an interactive terminal
|
||||
poetry run python manage.py pgpartition --using admin --skip-delete --yes
|
||||
fi
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
dev)
|
||||
apply_migrations
|
||||
apply_fixtures
|
||||
manage_db_partitions
|
||||
start_dev_server
|
||||
;;
|
||||
prod)
|
||||
apply_migrations
|
||||
manage_db_partitions
|
||||
start_prod_server
|
||||
;;
|
||||
worker)
|
||||
start_worker
|
||||
;;
|
||||
beat)
|
||||
start_worker_beat
|
||||
;;
|
||||
*)
|
||||
echo "Usage: $0 {dev|prod|worker|beat}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -1,65 +0,0 @@
|
||||
# Partitions
|
||||
|
||||
## Overview
|
||||
|
||||
Partitions are used to split the data in a table into smaller chunks, allowing for more efficient querying and storage.
|
||||
|
||||
The Prowler API uses partitions to store findings. The partitions are created based on the UUIDv7 `id` field.
|
||||
|
||||
You can use the Prowler API without ever creating additional partitions. This documentation is only relevant if you want to manage partitions to gain additional query performance.
|
||||
|
||||
### Required Postgres Configuration
|
||||
|
||||
There are 3 configuration options that need to be set in the `postgres.conf` file to get the most performance out of the partitioning:
|
||||
|
||||
- `enable_partition_pruning = on` (default is on)
|
||||
- `enable_partitionwise_join = on` (default is off)
|
||||
- `enable_partitionwise_aggregate = on` (default is off)
|
||||
|
||||
For more information on these options, see the [Postgres documentation](https://www.postgresql.org/docs/current/runtime-config-query.html).
|
||||
|
||||
## Partitioning Strategy
|
||||
|
||||
The partitioning strategy is defined in the `api.partitions` module. The strategy is responsible for creating and deleting partitions based on the provided configuration.
|
||||
|
||||
## Managing Partitions
|
||||
|
||||
The application will run without any extra work on your part. If you want to add or delete partitions, you can use the following commands:
|
||||
|
||||
To manage the partitions, run `python manage.py pgpartition --using admin`
|
||||
|
||||
This command will generate a list of partitions to create and delete based on the provided configuration.
|
||||
|
||||
By default, the command will prompt you to accept the changes before applying them.
|
||||
|
||||
```shell
|
||||
Finding:
|
||||
+ 2024_nov
|
||||
name: 2024_nov
|
||||
from_values: 0192e505-9000-72c8-a47c-cce719d8fb93
|
||||
to_values: 01937f84-5418-7eb8-b2a6-e3be749e839d
|
||||
size_unit: months
|
||||
size_value: 1
|
||||
+ 2024_dec
|
||||
name: 2024_dec
|
||||
from_values: 01937f84-5800-7b55-879c-9cdb46f023f6
|
||||
to_values: 01941f29-7818-7f9f-b4be-20b05bb2f574
|
||||
size_unit: months
|
||||
size_value: 1
|
||||
|
||||
0 partitions will be deleted
|
||||
2 partitions will be created
|
||||
```
|
||||
|
||||
If you choose to apply the partitions, tables will be generated with the following format: `<table_name>_<year>_<month>`.
|
||||
|
||||
For more info on the partitioning manager, see https://github.com/SectorLabs/django-postgres-extra
|
||||
|
||||
### Changing the Partitioning Parameters
|
||||
|
||||
There are 4 environment variables that can be used to change the partitioning parameters:
|
||||
|
||||
- `DJANGO_MANAGE_DB_PARTITIONS`: Allow Django to manage database partitons. By default is set to `False`.
|
||||
- `FINDINGS_TABLE_PARTITION_MONTHS`: Set the months for each partition. Setting the partition monts to 1 will create partitions with a size of 1 natural month.
|
||||
- `FINDINGS_TABLE_PARTITION_COUNT`: Set the number of partitions to create
|
||||
- `FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS`: Set the number of months to keep partitions before deleting them. Setting this to `None` will keep partitions indefinitely.
|
||||
5074
api/poetry.lock
generated
5074
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,55 +0,0 @@
|
||||
[build-system]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["poetry-core"]
|
||||
|
||||
[tool.poetry]
|
||||
authors = ["Prowler Team"]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
name = "prowler-api"
|
||||
package-mode = false
|
||||
version = "1.1.0"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
celery = {extras = ["pytest"], version = "^5.4.0"}
|
||||
django = "5.1.1"
|
||||
django-celery-beat = "^2.7.0"
|
||||
django-celery-results = "^2.5.1"
|
||||
django-cors-headers = "4.4.0"
|
||||
django-environ = "0.11.2"
|
||||
django-filter = "24.3"
|
||||
django-guid = "3.5.0"
|
||||
django-postgres-extra = "^2.0.8"
|
||||
djangorestframework = "3.15.2"
|
||||
djangorestframework-jsonapi = "7.0.2"
|
||||
djangorestframework-simplejwt = "^5.3.1"
|
||||
drf-nested-routers = "^0.94.1"
|
||||
drf-spectacular = "0.27.2"
|
||||
drf-spectacular-jsonapi = "0.5.1"
|
||||
gunicorn = "23.0.0"
|
||||
prowler = {git = "https://github.com/prowler-cloud/prowler.git", tag = "5.0.0"}
|
||||
psycopg2-binary = "2.9.9"
|
||||
pytest-celery = {extras = ["redis"], version = "^1.0.1"}
|
||||
# Needed for prowler compatibility
|
||||
python = ">=3.11,<3.13"
|
||||
uuid6 = "2024.7.10"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
bandit = "1.7.9"
|
||||
coverage = "7.5.4"
|
||||
docker = "7.1.0"
|
||||
freezegun = "1.5.1"
|
||||
mypy = "1.10.1"
|
||||
pylint = "3.2.5"
|
||||
pytest = "8.2.2"
|
||||
pytest-cov = "5.0.0"
|
||||
pytest-django = "4.8.0"
|
||||
pytest-env = "1.1.3"
|
||||
pytest-randomly = "3.15.0"
|
||||
pytest-xdist = "3.6.1"
|
||||
ruff = "0.5.0"
|
||||
safety = "3.2.3"
|
||||
vulture = "2.11"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
@@ -1,3 +0,0 @@
|
||||
# from django.contrib import admin
|
||||
|
||||
# Register your models here.
|
||||
@@ -1,12 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "api"
|
||||
|
||||
def ready(self):
|
||||
from api import signals # noqa: F401
|
||||
from api.compliance import load_prowler_compliance
|
||||
|
||||
load_prowler_compliance()
|
||||
@@ -1,152 +0,0 @@
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import transaction
|
||||
from rest_framework import permissions
|
||||
from rest_framework.exceptions import NotAuthenticated
|
||||
from rest_framework.filters import SearchFilter
|
||||
from rest_framework_json_api import filters
|
||||
from rest_framework_json_api.views import ModelViewSet
|
||||
from rest_framework_simplejwt.authentication import JWTAuthentication
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.db_utils import POSTGRES_USER_VAR, rls_transaction
|
||||
from api.filters import CustomDjangoFilterBackend
|
||||
from api.models import Role, Tenant
|
||||
from api.rbac.permissions import HasPermissions
|
||||
|
||||
|
||||
class BaseViewSet(ModelViewSet):
|
||||
authentication_classes = [JWTAuthentication]
|
||||
required_permissions = []
|
||||
permission_classes = [permissions.IsAuthenticated, HasPermissions]
|
||||
filter_backends = [
|
||||
filters.QueryParameterValidationFilter,
|
||||
filters.OrderingFilter,
|
||||
CustomDjangoFilterBackend,
|
||||
SearchFilter,
|
||||
]
|
||||
|
||||
filterset_fields = []
|
||||
search_fields = []
|
||||
|
||||
ordering_fields = "__all__"
|
||||
ordering = ["id"]
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
"""
|
||||
Sets required_permissions before permissions are checked.
|
||||
"""
|
||||
self.set_required_permissions()
|
||||
super().initial(request, *args, **kwargs)
|
||||
|
||||
def set_required_permissions(self):
|
||||
"""This is an abstract method that must be implemented by subclasses."""
|
||||
NotImplemented
|
||||
|
||||
def get_queryset(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class BaseRLSViewSet(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
# Ideally, this logic would be in the `.setup()` method but DRF view sets don't call it
|
||||
# https://docs.djangoproject.com/en/5.1/ref/class-based-views/base/#django.views.generic.base.View.setup
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
tenant_id = request.auth.get("tenant_id")
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
context["tenant_id"] = self.request.tenant_id
|
||||
return context
|
||||
|
||||
|
||||
class BaseTenantViewset(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
tenant = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
try:
|
||||
# If the request is a POST, create the admin role
|
||||
if request.method == "POST":
|
||||
isinstance(tenant, dict) and self._create_admin_role(tenant.data["id"])
|
||||
except Exception as e:
|
||||
self._handle_creation_error(e, tenant)
|
||||
raise
|
||||
|
||||
return tenant
|
||||
|
||||
def _create_admin_role(self, tenant_id):
|
||||
Role.objects.using(MainRouter.admin_db).create(
|
||||
name="admin",
|
||||
tenant_id=tenant_id,
|
||||
manage_users=True,
|
||||
manage_account=True,
|
||||
manage_billing=True,
|
||||
manage_providers=True,
|
||||
manage_integrations=True,
|
||||
manage_scans=True,
|
||||
unlimited_visibility=True,
|
||||
)
|
||||
|
||||
def _handle_creation_error(self, error, tenant):
|
||||
if tenant.data.get("id"):
|
||||
try:
|
||||
Tenant.objects.using(MainRouter.admin_db).filter(
|
||||
id=tenant.data["id"]
|
||||
).delete()
|
||||
except ObjectDoesNotExist:
|
||||
pass # Tenant might not exist, handle gracefully
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
if (
|
||||
request.resolver_match.url_name != "tenant-detail"
|
||||
and request.method != "DELETE"
|
||||
):
|
||||
user_id = str(request.user.id)
|
||||
|
||||
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
# TODO: DRY this when we have time
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
tenant_id = request.auth.get("tenant_id")
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
|
||||
class BaseUserViewset(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
# TODO refactor after improving RLS on users
|
||||
if request.stream is not None and request.stream.method == "POST":
|
||||
return super().initial(request, *args, **kwargs)
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
tenant_id = request.auth.get("tenant_id")
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
@@ -1,209 +0,0 @@
|
||||
from types import MappingProxyType
|
||||
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
|
||||
from api.models import Provider
|
||||
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = {}
|
||||
PROWLER_CHECKS = {}
|
||||
|
||||
|
||||
def get_prowler_provider_checks(provider_type: Provider.ProviderChoices):
|
||||
"""
|
||||
Retrieve all check IDs for the specified provider type.
|
||||
|
||||
This function fetches the check metadata for the given cloud provider
|
||||
and returns an iterable of check IDs.
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The provider type
|
||||
(e.g., 'aws', 'azure') for which to retrieve check IDs.
|
||||
|
||||
Returns:
|
||||
Iterable[str]: An iterable of check IDs associated with the specified provider type.
|
||||
"""
|
||||
return CheckMetadata.get_bulk(provider_type).keys()
|
||||
|
||||
|
||||
def get_prowler_provider_compliance(provider_type: Provider.ProviderChoices) -> dict:
|
||||
"""
|
||||
Retrieve the Prowler compliance data for a specified provider type.
|
||||
|
||||
This function fetches the compliance frameworks and their associated
|
||||
requirements for the given cloud provider.
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The provider type
|
||||
(e.g., 'aws', 'azure') for which to retrieve compliance data.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary mapping compliance framework names to their respective
|
||||
Compliance objects for the specified provider.
|
||||
"""
|
||||
return Compliance.get_bulk(provider_type)
|
||||
|
||||
|
||||
def load_prowler_compliance():
|
||||
"""
|
||||
Load and initialize the Prowler compliance data and checks for all provider types.
|
||||
|
||||
This function retrieves compliance data for all supported provider types,
|
||||
generates a compliance overview template, and populates the global variables
|
||||
`PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE` and `PROWLER_CHECKS` with read-only mappings
|
||||
of the compliance templates and checks, respectively.
|
||||
"""
|
||||
global PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE
|
||||
global PROWLER_CHECKS
|
||||
|
||||
prowler_compliance = {
|
||||
provider_type: get_prowler_provider_compliance(provider_type)
|
||||
for provider_type in Provider.ProviderChoices.values
|
||||
}
|
||||
template = generate_compliance_overview_template(prowler_compliance)
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = MappingProxyType(template)
|
||||
PROWLER_CHECKS = MappingProxyType(load_prowler_checks(prowler_compliance))
|
||||
|
||||
|
||||
def load_prowler_checks(prowler_compliance):
|
||||
"""
|
||||
Generate a mapping of checks to the compliance frameworks that include them.
|
||||
|
||||
This function processes the provided compliance data and creates a dictionary
|
||||
mapping each provider type to a dictionary where each check ID maps to a set
|
||||
of compliance names that include that check.
|
||||
|
||||
Args:
|
||||
prowler_compliance (dict): The compliance data for all provider types,
|
||||
as returned by `get_prowler_provider_compliance`.
|
||||
|
||||
Returns:
|
||||
dict: A nested dictionary where the first-level keys are provider types,
|
||||
and the values are dictionaries mapping check IDs to sets of compliance names.
|
||||
"""
|
||||
checks = {}
|
||||
for provider_type in Provider.ProviderChoices.values:
|
||||
checks[provider_type] = {
|
||||
check_id: set() for check_id in get_prowler_provider_checks(provider_type)
|
||||
}
|
||||
for compliance_name, compliance_data in prowler_compliance[
|
||||
provider_type
|
||||
].items():
|
||||
for requirement in compliance_data.Requirements:
|
||||
for check in requirement.Checks:
|
||||
try:
|
||||
checks[provider_type][check].add(compliance_name)
|
||||
except KeyError:
|
||||
continue
|
||||
return checks
|
||||
|
||||
|
||||
def generate_scan_compliance(
|
||||
compliance_overview, provider_type: str, check_id: str, status: str
|
||||
):
|
||||
"""
|
||||
Update the compliance overview with the status of a specific check.
|
||||
|
||||
This function updates the compliance overview by setting the status of the given check
|
||||
within all compliance frameworks and requirements that include it. It then updates the
|
||||
requirement status to 'FAIL' if any of its checks have failed, and adjusts the counts
|
||||
of passed and failed requirements in the compliance overview.
|
||||
|
||||
Args:
|
||||
compliance_overview (dict): The compliance overview data structure to update.
|
||||
provider_type (str): The provider type (e.g., 'aws', 'azure') associated with the check.
|
||||
check_id (str): The identifier of the check whose status is being updated.
|
||||
status (str): The status of the check (e.g., 'PASS', 'FAIL', 'MUTED').
|
||||
|
||||
Returns:
|
||||
None: This function modifies the compliance_overview in place.
|
||||
"""
|
||||
for compliance_id in PROWLER_CHECKS[provider_type][check_id]:
|
||||
for requirement in compliance_overview[compliance_id]["requirements"].values():
|
||||
if check_id in requirement["checks"]:
|
||||
requirement["checks"][check_id] = status
|
||||
requirement["checks_status"][status.lower()] += 1
|
||||
|
||||
if requirement["status"] != "FAIL" and any(
|
||||
value == "FAIL" for value in requirement["checks"].values()
|
||||
):
|
||||
requirement["status"] = "FAIL"
|
||||
compliance_overview[compliance_id]["requirements_status"]["passed"] -= 1
|
||||
compliance_overview[compliance_id]["requirements_status"]["failed"] += 1
|
||||
|
||||
|
||||
def generate_compliance_overview_template(prowler_compliance: dict):
|
||||
"""
|
||||
Generate a compliance overview template for all provider types.
|
||||
|
||||
This function creates a nested dictionary structure representing the compliance
|
||||
overview template for each provider type, compliance framework, and requirement.
|
||||
It initializes the status of all checks and requirements, and calculates initial
|
||||
counts for requirements status.
|
||||
|
||||
Args:
|
||||
prowler_compliance (dict): The compliance data for all provider types,
|
||||
as returned by `get_prowler_provider_compliance`.
|
||||
|
||||
Returns:
|
||||
dict: A nested dictionary representing the compliance overview template,
|
||||
structured by provider type and compliance framework.
|
||||
"""
|
||||
template = {}
|
||||
for provider_type in Provider.ProviderChoices.values:
|
||||
provider_compliance = template.setdefault(provider_type, {})
|
||||
compliance_data_dict = prowler_compliance[provider_type]
|
||||
|
||||
for compliance_name, compliance_data in compliance_data_dict.items():
|
||||
compliance_requirements = {}
|
||||
requirements_status = {"passed": 0, "failed": 0, "manual": 0}
|
||||
total_requirements = 0
|
||||
|
||||
for requirement in compliance_data.Requirements:
|
||||
total_requirements += 1
|
||||
total_checks = len(requirement.Checks)
|
||||
checks_dict = {check: None for check in requirement.Checks}
|
||||
|
||||
# Build requirement dictionary
|
||||
requirement_dict = {
|
||||
"name": requirement.Name or requirement.Id,
|
||||
"description": requirement.Description,
|
||||
"attributes": [
|
||||
dict(attribute) for attribute in requirement.Attributes
|
||||
],
|
||||
"checks": checks_dict,
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": total_checks,
|
||||
},
|
||||
"status": "PASS",
|
||||
}
|
||||
|
||||
# Update requirements status
|
||||
if total_checks == 0:
|
||||
requirements_status["manual"] += 1
|
||||
|
||||
# Add requirement to compliance requirements
|
||||
compliance_requirements[requirement.Id] = requirement_dict
|
||||
|
||||
# Calculate pending requirements
|
||||
pending_requirements = total_requirements - requirements_status["manual"]
|
||||
requirements_status["passed"] = pending_requirements
|
||||
|
||||
# Build compliance dictionary
|
||||
compliance_dict = {
|
||||
"framework": compliance_data.Framework,
|
||||
"version": compliance_data.Version,
|
||||
"provider": provider_type,
|
||||
"description": compliance_data.Description,
|
||||
"requirements": compliance_requirements,
|
||||
"requirements_status": requirements_status,
|
||||
"total_requirements": total_requirements,
|
||||
}
|
||||
|
||||
# Add compliance to provider compliance
|
||||
provider_compliance[compliance_name] = compliance_dict
|
||||
|
||||
return template
|
||||
@@ -1,18 +0,0 @@
|
||||
class MainRouter:
|
||||
default_db = "default"
|
||||
admin_db = "admin"
|
||||
|
||||
def db_for_read(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
if model_table_name.startswith("django_"):
|
||||
return self.admin_db
|
||||
return None
|
||||
|
||||
def db_for_write(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
if model_table_name.startswith("django_"):
|
||||
return self.admin_db
|
||||
return None
|
||||
|
||||
def allow_migrate(self, db, app_label, model_name=None, **hints): # noqa: F841
|
||||
return db == self.admin_db
|
||||
@@ -1,318 +0,0 @@
|
||||
import secrets
|
||||
import uuid
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import BaseUserManager
|
||||
from django.core.paginator import Paginator
|
||||
from django.db import connection, models, transaction
|
||||
from psycopg2 import connect as psycopg2_connect
|
||||
from psycopg2.extensions import AsIs, new_type, register_adapter, register_type
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
DB_USER = settings.DATABASES["default"]["USER"] if not settings.TESTING else "test"
|
||||
DB_PASSWORD = (
|
||||
settings.DATABASES["default"]["PASSWORD"] if not settings.TESTING else "test"
|
||||
)
|
||||
DB_PROWLER_USER = (
|
||||
settings.DATABASES["prowler_user"]["USER"] if not settings.TESTING else "test"
|
||||
)
|
||||
DB_PROWLER_PASSWORD = (
|
||||
settings.DATABASES["prowler_user"]["PASSWORD"] if not settings.TESTING else "test"
|
||||
)
|
||||
TASK_RUNNER_DB_TABLE = "django_celery_results_taskresult"
|
||||
POSTGRES_TENANT_VAR = "api.tenant_id"
|
||||
POSTGRES_USER_VAR = "api.user_id"
|
||||
|
||||
SET_CONFIG_QUERY = "SELECT set_config(%s, %s::text, TRUE);"
|
||||
|
||||
|
||||
@contextmanager
|
||||
def psycopg_connection(database_alias: str):
|
||||
psycopg2_connection = None
|
||||
try:
|
||||
admin_db = settings.DATABASES[database_alias]
|
||||
|
||||
psycopg2_connection = psycopg2_connect(
|
||||
dbname=admin_db["NAME"],
|
||||
user=admin_db["USER"],
|
||||
password=admin_db["PASSWORD"],
|
||||
host=admin_db["HOST"],
|
||||
port=admin_db["PORT"],
|
||||
)
|
||||
yield psycopg2_connection
|
||||
finally:
|
||||
if psycopg2_connection is not None:
|
||||
psycopg2_connection.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def rls_transaction(value: str, parameter: str = POSTGRES_TENANT_VAR):
|
||||
"""
|
||||
Creates a new database transaction setting the given configuration value for Postgres RLS. It validates the
|
||||
if the value is a valid UUID.
|
||||
|
||||
Args:
|
||||
value (str): Database configuration parameter value.
|
||||
parameter (str): Database configuration parameter name, by default is 'api.tenant_id'.
|
||||
"""
|
||||
with transaction.atomic():
|
||||
with connection.cursor() as cursor:
|
||||
try:
|
||||
# just in case the value is an UUID object
|
||||
uuid.UUID(str(value))
|
||||
except ValueError:
|
||||
raise ValidationError("Must be a valid UUID")
|
||||
cursor.execute(SET_CONFIG_QUERY, [parameter, value])
|
||||
yield cursor
|
||||
|
||||
|
||||
class CustomUserManager(BaseUserManager):
|
||||
def create_user(self, email, password=None, **extra_fields):
|
||||
if not email:
|
||||
raise ValueError("The email field must be set")
|
||||
email = self.normalize_email(email)
|
||||
user = self.model(email=email, **extra_fields)
|
||||
user.set_password(password)
|
||||
user.save(using=self._db)
|
||||
return user
|
||||
|
||||
def get_by_natural_key(self, email):
|
||||
return self.get(email__iexact=email)
|
||||
|
||||
|
||||
def enum_to_choices(enum_class):
|
||||
"""
|
||||
This function converts a Python Enum to a list of tuples, where the first element is the value and the second element is the name.
|
||||
|
||||
It's for use with Django's `choices` attribute, which expects a list of tuples.
|
||||
"""
|
||||
return [(item.value, item.name.replace("_", " ").title()) for item in enum_class]
|
||||
|
||||
|
||||
def one_week_from_now():
|
||||
"""
|
||||
Return a datetime object with a date one week from now.
|
||||
"""
|
||||
return datetime.now(timezone.utc) + timedelta(days=7)
|
||||
|
||||
|
||||
def generate_random_token(length: int = 14, symbols: str | None = None) -> str:
|
||||
"""
|
||||
Generate a random token with the specified length.
|
||||
"""
|
||||
_symbols = "23456789ABCDEFGHJKMNPQRSTVWXYZ"
|
||||
return "".join(secrets.choice(symbols or _symbols) for _ in range(length))
|
||||
|
||||
|
||||
def batch_delete(queryset, batch_size=5000):
|
||||
"""
|
||||
Deletes objects in batches and returns the total number of deletions and a summary.
|
||||
|
||||
Args:
|
||||
queryset (QuerySet): The queryset of objects to delete.
|
||||
batch_size (int): The number of objects to delete in each batch.
|
||||
|
||||
Returns:
|
||||
tuple: (total_deleted, deletion_summary)
|
||||
"""
|
||||
total_deleted = 0
|
||||
deletion_summary = {}
|
||||
|
||||
paginator = Paginator(queryset.order_by("id").only("id"), batch_size)
|
||||
|
||||
for page_num in paginator.page_range:
|
||||
batch_ids = [obj.id for obj in paginator.page(page_num).object_list]
|
||||
|
||||
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
|
||||
|
||||
total_deleted += deleted_count
|
||||
|
||||
for model_label, count in deleted_info.items():
|
||||
deletion_summary[model_label] = deletion_summary.get(model_label, 0) + count
|
||||
|
||||
return total_deleted, deletion_summary
|
||||
|
||||
|
||||
# Postgres Enums
|
||||
|
||||
|
||||
class PostgresEnumMigration:
|
||||
def __init__(self, enum_name: str, enum_values: tuple):
|
||||
self.enum_name = enum_name
|
||||
self.enum_values = enum_values
|
||||
|
||||
def create_enum_type(self, apps, schema_editor): # noqa: F841
|
||||
string_enum_values = ", ".join([f"'{value}'" for value in self.enum_values])
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f"CREATE TYPE {self.enum_name} AS ENUM ({string_enum_values});"
|
||||
)
|
||||
|
||||
def drop_enum_type(self, apps, schema_editor): # noqa: F841
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(f"DROP TYPE {self.enum_name};")
|
||||
|
||||
|
||||
class PostgresEnumField(models.Field):
|
||||
def __init__(self, enum_type_name, *args, **kwargs):
|
||||
self.enum_type_name = enum_type_name
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def db_type(self, connection):
|
||||
return self.enum_type_name
|
||||
|
||||
def from_db_value(self, value, expression, connection): # noqa: F841
|
||||
return value
|
||||
|
||||
def to_python(self, value):
|
||||
if isinstance(value, EnumType):
|
||||
return value.value
|
||||
return value
|
||||
|
||||
def get_prep_value(self, value):
|
||||
if isinstance(value, EnumType):
|
||||
return value.value
|
||||
return value
|
||||
|
||||
|
||||
class EnumType:
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return self.value
|
||||
|
||||
|
||||
def enum_adapter(enum_obj):
|
||||
return AsIs(f"'{enum_obj.value}'::{enum_obj.__class__.enum_type_name}")
|
||||
|
||||
|
||||
def get_enum_oid(connection, enum_type_name: str):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT oid FROM pg_type WHERE typname = %s;", (enum_type_name,))
|
||||
result = cursor.fetchone()
|
||||
if result is None:
|
||||
raise ValueError(f"Enum type '{enum_type_name}' not found")
|
||||
return result[0]
|
||||
|
||||
|
||||
def register_enum(apps, schema_editor, enum_class): # noqa: F841
|
||||
with psycopg_connection(schema_editor.connection.alias) as connection:
|
||||
enum_oid = get_enum_oid(connection, enum_class.enum_type_name)
|
||||
enum_instance = new_type(
|
||||
(enum_oid,),
|
||||
enum_class.enum_type_name,
|
||||
lambda value, cur: value, # noqa: F841
|
||||
)
|
||||
register_type(enum_instance, connection)
|
||||
register_adapter(enum_class, enum_adapter)
|
||||
|
||||
|
||||
# Postgres enum definition for member role
|
||||
|
||||
|
||||
class MemberRoleEnum(EnumType):
|
||||
enum_type_name = "member_role"
|
||||
|
||||
|
||||
class MemberRoleEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("member_role", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Provider.provider
|
||||
|
||||
|
||||
class ProviderEnum(EnumType):
|
||||
enum_type_name = "provider"
|
||||
|
||||
|
||||
class ProviderEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("provider", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Scan.type
|
||||
|
||||
|
||||
class ScanTriggerEnum(EnumType):
|
||||
enum_type_name = "scan_trigger"
|
||||
|
||||
|
||||
class ScanTriggerEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("scan_trigger", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for state
|
||||
|
||||
|
||||
class StateEnum(EnumType):
|
||||
enum_type_name = "state"
|
||||
|
||||
|
||||
class StateEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("state", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Finding.Delta
|
||||
|
||||
|
||||
class FindingDeltaEnum(EnumType):
|
||||
enum_type_name = "finding_delta"
|
||||
|
||||
|
||||
class FindingDeltaEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("finding_delta", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Severity
|
||||
|
||||
|
||||
class SeverityEnum(EnumType):
|
||||
enum_type_name = "severity"
|
||||
|
||||
|
||||
class SeverityEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("severity", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Status
|
||||
|
||||
|
||||
class StatusEnum(EnumType):
|
||||
enum_type_name = "status"
|
||||
|
||||
|
||||
class StatusEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("status", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Provider secrets type
|
||||
|
||||
|
||||
class ProviderSecretTypeEnum(EnumType):
|
||||
enum_type_name = "provider_secret_type"
|
||||
|
||||
|
||||
class ProviderSecretTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("provider_secret_type", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Provider secrets type
|
||||
|
||||
|
||||
class InvitationStateEnum(EnumType):
|
||||
enum_type_name = "invitation_state"
|
||||
|
||||
|
||||
class InvitationStateEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("invitation_state", *args, **kwargs)
|
||||
@@ -1,59 +0,0 @@
|
||||
import uuid
|
||||
from functools import wraps
|
||||
|
||||
from django.db import connection, transaction
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
|
||||
|
||||
|
||||
def set_tenant(func):
|
||||
"""
|
||||
Decorator to set the tenant context for a Celery task based on the provided tenant_id.
|
||||
|
||||
This decorator extracts the `tenant_id` from the task's keyword arguments,
|
||||
and uses it to set the tenant context for the current database session.
|
||||
The `tenant_id` is then removed from the kwargs before the task function
|
||||
is executed. If `tenant_id` is not provided, a KeyError is raised.
|
||||
|
||||
Args:
|
||||
func (function): The Celery task function to be decorated.
|
||||
|
||||
Raises:
|
||||
KeyError: If `tenant_id` is not found in the task's keyword arguments.
|
||||
|
||||
Returns:
|
||||
function: The wrapped function with tenant context set.
|
||||
|
||||
Example:
|
||||
# This decorator MUST be defined the last in the decorator chain
|
||||
|
||||
@shared_task
|
||||
@set_tenant
|
||||
def some_task(arg1, **kwargs):
|
||||
# Task logic here
|
||||
pass
|
||||
|
||||
# When calling the task
|
||||
some_task.delay(arg1, tenant_id="8db7ca86-03cc-4d42-99f6-5e480baf6ab5")
|
||||
|
||||
# The tenant context will be set before the task logic executes.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
@transaction.atomic
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
tenant_id = kwargs.pop("tenant_id")
|
||||
except KeyError:
|
||||
raise KeyError("This task requires the tenant_id")
|
||||
try:
|
||||
uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise ValidationError("Tenant ID must be a valid UUID")
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
@@ -1,45 +0,0 @@
|
||||
from django.core.exceptions import ValidationError as django_validation_error
|
||||
from rest_framework import status
|
||||
from rest_framework.exceptions import APIException
|
||||
from rest_framework_json_api.exceptions import exception_handler
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_simplejwt.exceptions import TokenError, InvalidToken
|
||||
|
||||
|
||||
class ModelValidationError(ValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
detail: str | None = None,
|
||||
code: str | None = None,
|
||||
pointer: str | None = None,
|
||||
status_code: int = 400,
|
||||
):
|
||||
super().__init__(
|
||||
detail=[
|
||||
{
|
||||
"detail": detail,
|
||||
"status": str(status_code),
|
||||
"source": {"pointer": pointer},
|
||||
"code": code,
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class InvitationTokenExpiredException(APIException):
|
||||
status_code = status.HTTP_410_GONE
|
||||
default_detail = "The invitation token has expired and is no longer valid."
|
||||
default_code = "token_expired"
|
||||
|
||||
|
||||
def custom_exception_handler(exc, context):
|
||||
if isinstance(exc, django_validation_error):
|
||||
if hasattr(exc, "error_dict"):
|
||||
exc = ValidationError(exc.message_dict)
|
||||
else:
|
||||
exc = ValidationError(detail=exc.messages[0], code=exc.code)
|
||||
elif isinstance(exc, (TokenError, InvalidToken)):
|
||||
exc.detail["messages"] = [
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
return exception_handler(exc, context)
|
||||
@@ -1,567 +0,0 @@
|
||||
from datetime import date, datetime, timezone
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
from django_filters.rest_framework import (
|
||||
BaseInFilter,
|
||||
BooleanFilter,
|
||||
CharFilter,
|
||||
ChoiceFilter,
|
||||
DateFilter,
|
||||
FilterSet,
|
||||
UUIDFilter,
|
||||
)
|
||||
from rest_framework_json_api.django_filters.backends import DjangoFilterBackend
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
from api.db_utils import (
|
||||
FindingDeltaEnumField,
|
||||
InvitationStateEnumField,
|
||||
ProviderEnumField,
|
||||
SeverityEnumField,
|
||||
StatusEnumField,
|
||||
)
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
Finding,
|
||||
Invitation,
|
||||
Membership,
|
||||
PermissionChoices,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
ProviderSecret,
|
||||
Resource,
|
||||
ResourceTag,
|
||||
Role,
|
||||
Scan,
|
||||
ScanSummary,
|
||||
SeverityChoices,
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
Task,
|
||||
User,
|
||||
)
|
||||
from api.rls import Tenant
|
||||
from api.uuid_utils import (
|
||||
datetime_to_uuid7,
|
||||
transform_into_uuid7,
|
||||
uuid7_end,
|
||||
uuid7_range,
|
||||
uuid7_start,
|
||||
)
|
||||
from api.v1.serializers import TaskBase
|
||||
|
||||
|
||||
class CustomDjangoFilterBackend(DjangoFilterBackend):
|
||||
def to_html(self, _request, _queryset, _view):
|
||||
"""Override this method to use the Browsable API in dev environments.
|
||||
|
||||
This disables the HTML render for the default filter.
|
||||
"""
|
||||
return None
|
||||
|
||||
def get_filterset_class(self, view, queryset=None):
|
||||
# Check if the view has 'get_filterset_class' method
|
||||
if hasattr(view, "get_filterset_class"):
|
||||
return view.get_filterset_class()
|
||||
# Fallback to the default implementation
|
||||
return super().get_filterset_class(view, queryset)
|
||||
|
||||
|
||||
class UUIDInFilter(BaseInFilter, UUIDFilter):
|
||||
pass
|
||||
|
||||
|
||||
class CharInFilter(BaseInFilter, CharFilter):
|
||||
pass
|
||||
|
||||
|
||||
class ChoiceInFilter(BaseInFilter, ChoiceFilter):
|
||||
pass
|
||||
|
||||
|
||||
class TenantFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = Tenant
|
||||
fields = {
|
||||
"name": ["exact", "icontains"],
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class MembershipFilter(FilterSet):
|
||||
date_joined = DateFilter(field_name="date_joined", lookup_expr="date")
|
||||
role = ChoiceFilter(choices=Membership.RoleChoices.choices)
|
||||
|
||||
class Meta:
|
||||
model = Membership
|
||||
fields = {
|
||||
"tenant": ["exact"],
|
||||
"role": ["exact"],
|
||||
"date_joined": ["date", "gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ProviderFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
connected = BooleanFilter()
|
||||
provider = ChoiceFilter(choices=Provider.ProviderChoices.choices)
|
||||
|
||||
class Meta:
|
||||
model = Provider
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"id": ["exact", "in"],
|
||||
"uid": ["exact", "icontains", "in"],
|
||||
"alias": ["exact", "icontains", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
filter_overrides = {
|
||||
ProviderEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class ProviderRelationshipFilterSet(FilterSet):
|
||||
provider_type = ChoiceFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="provider__provider"
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="provider__provider"
|
||||
)
|
||||
provider_uid = CharFilter(field_name="provider__uid", lookup_expr="exact")
|
||||
provider_uid__in = CharInFilter(field_name="provider__uid", lookup_expr="in")
|
||||
provider_uid__icontains = CharFilter(
|
||||
field_name="provider__uid", lookup_expr="icontains"
|
||||
)
|
||||
provider_alias = CharFilter(field_name="provider__alias", lookup_expr="exact")
|
||||
provider_alias__in = CharInFilter(field_name="provider__alias", lookup_expr="in")
|
||||
provider_alias__icontains = CharFilter(
|
||||
field_name="provider__alias", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
|
||||
class ProviderGroupFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = ProviderGroup
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"name": ["exact", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ScanFilter(ProviderRelationshipFilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
completed_at = DateFilter(field_name="completed_at", lookup_expr="date")
|
||||
started_at = DateFilter(field_name="started_at", lookup_expr="date")
|
||||
next_scan_at = DateFilter(field_name="next_scan_at", lookup_expr="date")
|
||||
trigger = ChoiceFilter(choices=Scan.TriggerChoices.choices)
|
||||
state = ChoiceFilter(choices=StateChoices.choices)
|
||||
state__in = ChoiceInFilter(
|
||||
field_name="state", choices=StateChoices.choices, lookup_expr="in"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Scan
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"name": ["exact", "icontains"],
|
||||
"started_at": ["gte", "lte"],
|
||||
"next_scan_at": ["gte", "lte"],
|
||||
"trigger": ["exact"],
|
||||
}
|
||||
|
||||
|
||||
class TaskFilter(FilterSet):
|
||||
name = CharFilter(field_name="task_runner_task__task_name", lookup_expr="exact")
|
||||
name__icontains = CharFilter(
|
||||
field_name="task_runner_task__task_name", lookup_expr="icontains"
|
||||
)
|
||||
state = ChoiceFilter(
|
||||
choices=StateChoices.choices, method="filter_state", lookup_expr="exact"
|
||||
)
|
||||
task_state_inverse_mapping_values = {
|
||||
v: k for k, v in TaskBase.state_mapping.items()
|
||||
}
|
||||
|
||||
def filter_state(self, queryset, name, value):
|
||||
if value not in StateChoices:
|
||||
raise ValidationError(
|
||||
f"Invalid provider value: '{value}'. Valid values are: "
|
||||
f"{', '.join(StateChoices)}"
|
||||
)
|
||||
|
||||
return queryset.filter(
|
||||
task_runner_task__status=self.task_state_inverse_mapping_values[value]
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Task
|
||||
fields = []
|
||||
|
||||
|
||||
class ResourceTagFilter(FilterSet):
|
||||
class Meta:
|
||||
model = ResourceTag
|
||||
fields = {
|
||||
"key": ["exact", "icontains"],
|
||||
"value": ["exact", "icontains"],
|
||||
}
|
||||
search = ["text_search"]
|
||||
|
||||
|
||||
class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
tag_key = CharFilter(method="filter_tag_key")
|
||||
tag_value = CharFilter(method="filter_tag_value")
|
||||
tag = CharFilter(method="filter_tag")
|
||||
tags = CharFilter(method="filter_tag")
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"uid": ["exact", "icontains"],
|
||||
"name": ["exact", "icontains"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
"service": ["exact", "icontains", "in"],
|
||||
"type": ["exact", "icontains", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
def filter_tag_key(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
|
||||
|
||||
def filter_tag_value(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__value=value) | Q(tags__value__icontains=value))
|
||||
|
||||
def filter_tag(self, queryset, name, value):
|
||||
# We won't know what the user wants to filter on just based on the value,
|
||||
# and we don't want to build special filtering logic for every possible
|
||||
# provider tag spec, so we'll just do a full text search
|
||||
return queryset.filter(tags__text_search=value)
|
||||
|
||||
|
||||
class FindingFilter(FilterSet):
|
||||
# We filter providers from the scan in findings
|
||||
provider = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_uid = CharFilter(field_name="scan__provider__uid", lookup_expr="exact")
|
||||
provider_uid__in = CharInFilter(field_name="scan__provider__uid", lookup_expr="in")
|
||||
provider_uid__icontains = CharFilter(
|
||||
field_name="scan__provider__uid", lookup_expr="icontains"
|
||||
)
|
||||
provider_alias = CharFilter(field_name="scan__provider__alias", lookup_expr="exact")
|
||||
provider_alias__in = CharInFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="in"
|
||||
)
|
||||
provider_alias__icontains = CharFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
uid = CharFilter(field_name="uid")
|
||||
delta = ChoiceFilter(choices=Finding.DeltaChoices.choices)
|
||||
status = ChoiceFilter(choices=StatusChoices.choices)
|
||||
severity = ChoiceFilter(choices=SeverityChoices)
|
||||
impact = ChoiceFilter(choices=SeverityChoices)
|
||||
|
||||
resources = UUIDInFilter(field_name="resource__id", lookup_expr="in")
|
||||
|
||||
region = CharFilter(field_name="resources__region")
|
||||
region__in = CharInFilter(field_name="resources__region", lookup_expr="in")
|
||||
region__icontains = CharFilter(
|
||||
field_name="resources__region", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
service = CharFilter(field_name="resources__service")
|
||||
service__in = CharInFilter(field_name="resources__service", lookup_expr="in")
|
||||
service__icontains = CharFilter(
|
||||
field_name="resources__service", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_uid = CharFilter(field_name="resources__uid")
|
||||
resource_uid__in = CharInFilter(field_name="resources__uid", lookup_expr="in")
|
||||
resource_uid__icontains = CharFilter(
|
||||
field_name="resources__uid", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_name = CharFilter(field_name="resources__name")
|
||||
resource_name__in = CharInFilter(field_name="resources__name", lookup_expr="in")
|
||||
resource_name__icontains = CharFilter(
|
||||
field_name="resources__name", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_type = CharFilter(field_name="resources__type")
|
||||
resource_type__in = CharInFilter(field_name="resources__type", lookup_expr="in")
|
||||
resource_type__icontains = CharFilter(
|
||||
field_name="resources__type", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
scan = UUIDFilter(method="filter_scan_id")
|
||||
scan__in = UUIDInFilter(method="filter_scan_id_in")
|
||||
|
||||
inserted_at = DateFilter(method="filter_inserted_at", lookup_expr="date")
|
||||
inserted_at__date = DateFilter(method="filter_inserted_at", lookup_expr="date")
|
||||
inserted_at__gte = DateFilter(method="filter_inserted_at_gte")
|
||||
inserted_at__lte = DateFilter(method="filter_inserted_at_lte")
|
||||
|
||||
class Meta:
|
||||
model = Finding
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"uid": ["exact", "in"],
|
||||
"scan": ["exact", "in"],
|
||||
"delta": ["exact", "in"],
|
||||
"status": ["exact", "in"],
|
||||
"severity": ["exact", "in"],
|
||||
"impact": ["exact", "in"],
|
||||
"check_id": ["exact", "in", "icontains"],
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
filter_overrides = {
|
||||
FindingDeltaEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
StatusEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
SeverityEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
}
|
||||
|
||||
# Convert filter values to UUIDv7 values for use with partitioning
|
||||
def filter_scan_id(self, queryset, name, value):
|
||||
try:
|
||||
value_uuid = transform_into_uuid7(value)
|
||||
start = uuid7_start(value_uuid)
|
||||
end = uuid7_end(value_uuid, settings.FINDINGS_TABLE_PARTITION_MONTHS)
|
||||
except ValidationError as validation_error:
|
||||
detail = str(validation_error.detail[0])
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": detail,
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/relationships/scan"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return (
|
||||
queryset.filter(id__gte=start)
|
||||
.filter(id__lt=end)
|
||||
.filter(scan__id=value_uuid)
|
||||
)
|
||||
|
||||
def filter_scan_id_in(self, queryset, name, value):
|
||||
try:
|
||||
uuid_list = [
|
||||
transform_into_uuid7(value_uuid)
|
||||
for value_uuid in value
|
||||
if value_uuid is not None
|
||||
]
|
||||
|
||||
start, end = uuid7_range(uuid_list)
|
||||
except ValidationError as validation_error:
|
||||
detail = str(validation_error.detail[0])
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": detail,
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/relationships/scan"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
if start == end:
|
||||
return queryset.filter(id__gte=start).filter(scan__id__in=uuid_list)
|
||||
else:
|
||||
return (
|
||||
queryset.filter(id__gte=start)
|
||||
.filter(id__lt=end)
|
||||
.filter(scan__id__in=uuid_list)
|
||||
)
|
||||
|
||||
def filter_inserted_at(self, queryset, name, value):
|
||||
value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(value))
|
||||
|
||||
return queryset.filter(id__gte=start).filter(inserted_at__date=value)
|
||||
|
||||
def filter_inserted_at_gte(self, queryset, name, value):
|
||||
value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(value))
|
||||
|
||||
return queryset.filter(id__gte=start).filter(inserted_at__gte=value)
|
||||
|
||||
def filter_inserted_at_lte(self, queryset, name, value):
|
||||
value = self.maybe_date_to_datetime(value)
|
||||
end = uuid7_start(datetime_to_uuid7(value))
|
||||
|
||||
return queryset.filter(id__lte=end).filter(inserted_at__lte=value)
|
||||
|
||||
@staticmethod
|
||||
def maybe_date_to_datetime(value):
|
||||
dt = value
|
||||
if isinstance(value, date):
|
||||
dt = datetime.combine(value, datetime.min.time(), tzinfo=timezone.utc)
|
||||
return dt
|
||||
|
||||
|
||||
class ProviderSecretFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
provider = UUIDFilter(field_name="provider__id", lookup_expr="exact")
|
||||
|
||||
class Meta:
|
||||
model = ProviderSecret
|
||||
fields = {
|
||||
"name": ["exact", "icontains"],
|
||||
}
|
||||
|
||||
|
||||
class InvitationFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
expires_at = DateFilter(field_name="expires_at", lookup_expr="date")
|
||||
state = ChoiceFilter(choices=Invitation.State.choices)
|
||||
state__in = ChoiceInFilter(choices=Invitation.State.choices, lookup_expr="in")
|
||||
|
||||
class Meta:
|
||||
model = Invitation
|
||||
fields = {
|
||||
"email": ["exact", "icontains"],
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"updated_at": ["date", "gte", "lte"],
|
||||
"expires_at": ["date", "gte", "lte"],
|
||||
"inviter": ["exact"],
|
||||
}
|
||||
filter_overrides = {
|
||||
InvitationStateEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class UserFilter(FilterSet):
|
||||
date_joined = DateFilter(field_name="date_joined", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = {
|
||||
"name": ["exact", "icontains"],
|
||||
"email": ["exact", "icontains"],
|
||||
"company_name": ["exact", "icontains"],
|
||||
"date_joined": ["date", "gte", "lte"],
|
||||
"is_active": ["exact"],
|
||||
}
|
||||
|
||||
|
||||
class RoleFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
permission_state = ChoiceFilter(
|
||||
choices=PermissionChoices.choices, method="filter_permission_state"
|
||||
)
|
||||
|
||||
def filter_permission_state(self, queryset, name, value):
|
||||
return Role.filter_by_permission_state(queryset, value)
|
||||
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"name": ["exact", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ComplianceOverviewFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
provider_type = ChoiceFilter(choices=Provider.ProviderChoices.choices)
|
||||
provider_type__in = ChoiceInFilter(choices=Provider.ProviderChoices.choices)
|
||||
scan_id = UUIDFilter(field_name="scan__id")
|
||||
|
||||
class Meta:
|
||||
model = ComplianceOverview
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"compliance_id": ["exact", "icontains"],
|
||||
"framework": ["exact", "iexact", "icontains"],
|
||||
"version": ["exact", "icontains"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
|
||||
class ScanSummaryFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
provider_id = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider_type = ChoiceFilter(
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
region = CharFilter(field_name="region")
|
||||
muted_findings = BooleanFilter(method="filter_muted_findings")
|
||||
|
||||
def filter_muted_findings(self, queryset, name, value):
|
||||
if not value:
|
||||
return queryset.exclude(muted__gt=0)
|
||||
return queryset
|
||||
|
||||
class Meta:
|
||||
model = ScanSummary
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
|
||||
class ServiceOverviewFilter(ScanSummaryFilter):
|
||||
muted_findings = None
|
||||
|
||||
def is_valid(self):
|
||||
# Check if at least one of the inserted_at filters is present
|
||||
inserted_at_filters = [
|
||||
self.data.get("inserted_at"),
|
||||
self.data.get("inserted_at__gte"),
|
||||
self.data.get("inserted_at__lte"),
|
||||
]
|
||||
if not any(inserted_at_filters):
|
||||
raise ValidationError(
|
||||
{
|
||||
"inserted_at": [
|
||||
"At least one of filter[inserted_at], filter[inserted_at__gte], or "
|
||||
"filter[inserted_at__lte] is required."
|
||||
]
|
||||
}
|
||||
)
|
||||
return super().is_valid()
|
||||
@@ -1,28 +0,0 @@
|
||||
[
|
||||
{
|
||||
"model": "api.user",
|
||||
"pk": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
|
||||
"last_login": null,
|
||||
"name": "Devie Prowlerson",
|
||||
"email": "dev@prowler.com",
|
||||
"company_name": "Prowler Developers",
|
||||
"is_active": true,
|
||||
"date_joined": "2024-09-17T09:04:20.850Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.user",
|
||||
"pk": "b6493a3a-c997-489b-8b99-278bf74de9f6",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
|
||||
"last_login": null,
|
||||
"name": "Devietoo Prowlerson",
|
||||
"email": "dev2@prowler.com",
|
||||
"company_name": "Prowler Developers",
|
||||
"is_active": true,
|
||||
"date_joined": "2024-09-18T09:04:20.850Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,50 +0,0 @@
|
||||
[
|
||||
{
|
||||
"model": "api.tenant",
|
||||
"pk": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"fields": {
|
||||
"inserted_at": "2024-03-21T23:00:00Z",
|
||||
"updated_at": "2024-03-21T23:00:00Z",
|
||||
"name": "Tenant1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.tenant",
|
||||
"pk": "0412980b-06e3-436a-ab98-3c9b1d0333d3",
|
||||
"fields": {
|
||||
"inserted_at": "2024-03-21T23:00:00Z",
|
||||
"updated_at": "2024-03-21T23:00:00Z",
|
||||
"name": "Tenant2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "2b0db93a-7e0b-4edf-a851-ea448676b7eb",
|
||||
"fields": {
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"tenant": "0412980b-06e3-436a-ab98-3c9b1d0333d3",
|
||||
"role": "owner",
|
||||
"date_joined": "2024-09-19T11:03:59.712Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "797d7cee-abc9-4598-98bb-4bf4bfb97f27",
|
||||
"fields": {
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "owner",
|
||||
"date_joined": "2024-09-19T11:02:59.712Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "dea37563-7009-4dcf-9f18-25efb41462a7",
|
||||
"fields": {
|
||||
"user": "b6493a3a-c997-489b-8b99-278bf74de9f6",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "member",
|
||||
"date_joined": "2024-09-19T11:03:59.712Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,177 +0,0 @@
|
||||
[
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "37b065f8-26b0-4218-a665-0b23d07b27d9",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:20:27.050Z",
|
||||
"updated_at": "2024-08-01T17:20:27.050Z",
|
||||
"provider": "gcp",
|
||||
"uid": "a12322-test321",
|
||||
"alias": "gcp_testing_2",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "8851db6b-42e5-4533-aa9e-30a32d67e875",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:19:42.453Z",
|
||||
"updated_at": "2024-08-01T17:19:42.453Z",
|
||||
"provider": "gcp",
|
||||
"uid": "a12345-test123",
|
||||
"alias": "gcp_testing_1",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:19:09.556Z",
|
||||
"updated_at": "2024-08-01T17:19:09.556Z",
|
||||
"provider": "aws",
|
||||
"uid": "123456789020",
|
||||
"alias": "aws_testing_2",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "baa7b895-8bac-4f47-b010-4226d132856e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:20:16.962Z",
|
||||
"updated_at": "2024-08-01T17:20:16.962Z",
|
||||
"provider": "gcp",
|
||||
"uid": "a12322-test123",
|
||||
"alias": "gcp_testing_3",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "d7c7ea89-d9af-423b-a364-1290dcad5a01",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:18:58.132Z",
|
||||
"updated_at": "2024-08-01T17:18:58.132Z",
|
||||
"provider": "aws",
|
||||
"uid": "123456789015",
|
||||
"alias": "aws_testing_1",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-06T16:03:26.176Z",
|
||||
"updated_at": "2024-08-06T16:03:26.176Z",
|
||||
"provider": "azure",
|
||||
"uid": "8851db6b-42e5-4533-aa9e-30a32d67e875",
|
||||
"alias": "azure_testing",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "26e55a24-cb2c-4cef-ac87-6f91fddb2c97",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-06T16:03:07.037Z",
|
||||
"updated_at": "2024-08-06T16:03:07.037Z",
|
||||
"provider": "kubernetes",
|
||||
"uid": "kubernetes-test-12345",
|
||||
"alias": "k8s_testing",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:45:26.352Z",
|
||||
"updated_at": "2024-10-18T11:16:23.533Z",
|
||||
"provider": "aws",
|
||||
"uid": "106908755759",
|
||||
"alias": "real testing aws provider",
|
||||
"connected": true,
|
||||
"connection_last_checked_at": "2024-10-18T11:16:23.503Z",
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "11491b47-75ae-4f71-ad8d-3e630a72182e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-11T08:03:05.026Z",
|
||||
"updated_at": "2024-10-11T08:04:47.033Z",
|
||||
"name": "GCP static secrets",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5DTndmZW9KakRZUHM2UHhQN2V3RzN0QmM1cERham8yMHp5cnVTT0lzdGFyS1FuVmJXUlpYSGsyU0cxR3RMMEdQYXlYMUVsaWtqLU1OZWlaVUp6OFREYlotZTVBY3BuTlZYbm9YcUJydzAxV2p5dkpLamI1Y2tUYzA0MmJUNWxsNTBRM0E1SDRCa0pPQWVlb05YU3dfeUhkLTRmOEh3dGczOGh1ZGhQcVdZdVAtYmtoSWlwNXM4VGFoVmF3dno2X1hrbk5GZjZTWjVuWEdEZUFXeHJSQjEzbTlVakhNdzYyWTdiVEpvUEc2MTNpRzUtczhEank1eGI0b3MyMlAyaGN6dlByZmtUWHByaDNUYWFqYS1tYnNBUkRKTzBacFNSRjFuVmd5bUtFUEJhd1ZVS1ZDd2xSUV9PaEtLTnc0XzVkY2lhM01WTjQwaWdJSk9wNUJSXzQ4RUNQLXFPNy1VdzdPYkZyWkVkU3RyQjVLTS1MVHN0R3k4THNKZ2NBNExaZnl3Q1EwN2dwNGRsUXptMjB0LXUzTUpzTDE2Q1hmS0ZSN2g1ZjBPeV8taFoxNUwxc2FEcktXX0dCM1IzeUZTTHNiTmNxVXBvNWViZTJScUVWV2VYTFQ4UHlid21PY1A0UjdNMGtERkZCd0lLMlJENDMzMVZUM09DQ0twd1N3VHlZd09XLUctOWhYcFJIR1p5aUlZeEUzejc2dWRYdGNsd0xOODNqRUFEczhSTWNtWU0tdFZ1ZTExaHNHUVYtd0Zxdld1LTdKVUNINzlZTGdHODhKeVVpQmRZMHRUNTJRRWhwS1F1Y3I2X2Iwc0c1NHlXSVRLZWxreEt0dVRnOTZFMkptU2VMS1dWXzdVOVRzMUNUWXM2aFlxVDJXdGo3d2cxSVZGWlI2ZWhIZzZBcEl4bEJ6UnVHc0RYWVNHcjFZUHI5ZUYyWG9rSlo0QUVSUkFCX3h2UmtJUTFzVXJUZ25vTmk2VzdoTTNta05ucmNfTi0yR1ZxN1E2MnZJOVVKOGxmMXMzdHMxVndmSVhQbUItUHgtMVpVcHJwMU5JVHJLb0Y1aHV5OEEwS0kzQkEtcFJkdkRnWGxmZnprNFhndWg1TmQyd09yTFdTRmZ3d2ZvZFUtWXp4a2VYb3JjckFIcE13MDUzX0RHSnlzM0N2ZE5IRzJzMXFMc0k4MDRyTHdLZFlWOG9SaFF0LU43Ynd6VFlEcVNvdFZ0emJEVk10aEp4dDZFTFNFNzk0UUo2WTlVLWRGYm1fanZHaFZreHBIMmtzVjhyS0xPTk9fWHhiVTJHQXZwVlVuY3JtSjFUYUdHQzhEaHFNZXhwUHBmY0kxaUVrOHo4a0FYOTdpZVJDbFRvdFlQeWo3eFZHX1ZMZ1Myc3prU3o2c3o2eXNja1U4N0Y1T0d1REVjZFRGNTByUkgyemVCSjlQYkY2bmJ4YTZodHB0cUNzd2xZcENycUdsczBIaEZPbG1jVUlqNlM2cEE3aGpVaWswTzBDLVFGUHM5UHhvM09saWNtaDhaNVlsc3FZdktKeWlheDF5OGhTODE2N3JWamdTZG5Fa3JSQ2ZUSEVfRjZOZXdreXRZLTBZRFhleVFFeC1YUzc0cWhYeEhobGxvdnZ3Rm15WFlBWXp0dm1DeTA5eExLeEFRRXVRSXBXdTNEaWdZZ3JDenItdDhoZlFiTzI0SGZ1c01FR1FNaFVweVBKR1YxWGRUMW1Mc2JVdW9raWR6UHk2ZTBnS05pV3oyZVBjREdkY3k4ZHZPUWE5S281MkJRSHF3NnpTclZ5bl90bk1wUEh6Tkp5dXlDcE5paWRqcVhxRFVObWIzRldWOGJ2aC1CRHZpbFZrb0hjNGpCMm5POGRiS2lETUpMLUVfQlhCdTZPLW9USW1LTFlTSF9zRUJYZ1NKeFFEQjNOR215ZXJDbkFndmcxWl9rWlk9",
|
||||
"provider": "8851db6b-42e5-4533-aa9e-30a32d67e875"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "40191ad5-d8c2-40a9-826d-241397626b68",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-10T11:11:44.515Z",
|
||||
"updated_at": "2024-10-11T07:59:56.102Z",
|
||||
"name": "AWS static secrets",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5DTnI4Y1RyV19UWEJzc3kzQUExcU5tdlQzbFVLeDdZMWd1MzkwWkl2UF9oZGhiVEJHVWpSMXV4MjYyN3g2OVpvNVpkQUQ3S0VGaGdQLTFhQWE3MkpWZUt2cnVhODc4d3FpY3FVZkpwdHJzNUJPeFRwZ3N4bGpPZTlkNWRNdFlwTHU3aTNWR3JjSzJwLWRITHdfQWpXb1F0c1l3bVFxbnFrTEpPTGgxcnF1VUprSzZ5dGRQU2VGYmZhTTlwbVpsNFBNWlFhVW9RbjJyYnZ5N0oweE5kV0ZEaUdpUUpNVExOa3oyQ2dNREVSenJ0TEFZc0RrRWpXNUhyMmtybGNLWDVOR0FabEl4QVR1bkZyb2hBLWc1MFNIekVyeXI0SmVreHBjRnJ1YUlVdXpVbW9JZkk0aEgxYlM1VGhSRlhtcS14YzdTYUhXR2xodElmWjZuNUVwaHozX1RVTG1QWHdPZWd4clNHYnAyOTBsWEl5UU83RGxZb0RKWjdadjlsTmJtSHQ0Yl9uaDJoODB0QV9sWmFYbFAxcjA1bmhNVlNqc2xEeHlvcUJFbVZvY250ZENnMnZLT1psb1JDclB3WVR6NGdZb2pzb3U4Ny04QlB0UTZub0dMOXZEUTZEcVJhZldCWEZZSDdLTy02UVZqck5zVTZwS3pObGlOejNJeHUzbFRabFM2V2xaekZVRjZtX3VzZlplendnOWQzT01WMFd3ejNadHVlTFlqRGR2dk5Da29zOFYwOUdOaEc4OHhHRnJFMmJFMk12VDNPNlBBTGlsXy13cUM1QkVYb0o1Z2U4ZXJnWXpZdm1sWjA5bzQzb2NFWC1xbmIycGZRbGtCaGNaOWlkX094UUNNampwbkZoREctNWI4QnZRaE8zM3BEQ1BwNzA1a3BzOGczZXdIM2s1NHFGN1ZTbmJhZkc4RVdfM0ZIZU5udTBYajd1RGxpWXZpRWdSMmhHa2RKOEIzbmM0X2F1OGxrN2p6LW9UVldDOFVpREoxZ1UzcTBZX19OQ0xJb0syWlhNSlQ4MzQwdzRtVG94Y01GS3FMLV95UVlxOTFORk8zdjE5VGxVaXdhbGlzeHdoYWNzazZWai1GUGtUM2gzR0ZWTTY4SThWeVFnZldIaklOTTJqTTg1VkhEYW5wNmdEVllXMmJCV2tpVmVYeUV2c0E1T00xbHJRNzgzVG9wb0Q1cV81UEhqYUFsQ2p1a0VpRDVINl9SVkpyZVRNVnVXQUxwY3NWZnJrNmRVREpiLWNHYUpXWmxkQlhNbWhuR1NmQ1BaVDlidUxCWHJMaHhZbk1FclVBaEVZeWg1ZlFoenZzRHlKbV8wa3lmMGZrd3NmTDZjQkE0UXNSUFhpTWtUUHBrX29BVzc4QzEtWEJIQW1GMGFuZVlXQWZIOXJEamloeGFCeHpYMHNjMFVfNXpQdlJfSkk2bzFROU5NU0c1SHREWW1nbkFNZFZ0UjdPRGdjaF96RGplY1hjdFFzLVR6MTVXYlRjbHIxQ2JRejRpVko5NWhBU0ZHR3ZvczU5elljRGpHRTdIc0FsSm5fUHEwT1gtTS1lN3M3X3ZZRnlkYUZoZXRQeEJsZlhLdFdTUzU1NUl4a29aOWZIdTlPM0Fnak1xYWVkYTNiMmZXUHlXS2lwUVBZLXQyaUxuRmtQNFFieE9SVmdZVW9WTHlzbnBPZlNIdGVHOE1LNVNESjN3cGtVSHVpT1NJWHE1ZzNmUTVTOC0xX3NGSmJqU19IbjZfQWtMRG1YNUQtRy13TUJIZFlyOXJkQzFQbkdZVXVzM2czbS1HWHFBT1pXdVd3N09tcG82SVhnY1ZtUWxqTEg2UzJCUmllb2pweVN2aGwwS1FVRUhjNEN2amRMc3MwVU4zN3dVMWM5Slg4SERtenFaQk1yMWx0LWtxVWtLZVVtbU4yejVEM2h6TEt0RGdfWE09",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "ed89d1ea-366a-4d12-a602-f2ab77019742",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-10T11:11:44.515Z",
|
||||
"updated_at": "2024-10-11T07:59:56.102Z",
|
||||
"name": "Azure static secrets",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5DTnI4Y1RyV19UWEJzc3kzQUExcU5tdlQzbFVLeDdZMWd1MzkwWkl2UF9oZGhiVEJHVWpSMXV4MjYyN3g2OVpvNVpkQUQ3S0VGaGdQLTFhQWE3MkpWZUt2cnVhODc4d3FpY3FVZkpwdHJzNUJPeFRwZ3N4bGpPZTlkNWRNdFlwTHU3aTNWR3JjSzJwLWRITHdfQWpXb1F0c1l3bVFxbnFrTEpPTGgxcnF1VUprSzZ5dGRQU2VGYmZhTTlwbVpsNFBNWlFhVW9RbjJyYnZ5N0oweE5kV0ZEaUdpUUpNVExOa3oyQ2dNREVSenJ0TEFZc0RrRWpXNUhyMmtybGNLWDVOR0FabEl4QVR1bkZyb2hBLWc1MFNIekVyeXI0SmVreHBjRnJ1YUlVdXpVbW9JZkk0aEgxYlM1VGhSRlhtcS14YzdTYUhXR2xodElmWjZuNUVwaHozX1RVTG1QWHdPZWd4clNHYnAyOTBsWEl5UU83RGxZb0RKWjdadjlsTmJtSHQ0Yl9uaDJoODB0QV9sWmFYbFAxcjA1bmhNVlNqc2xEeHlvcUJFbVZvY250ZENnMnZLT1psb1JDclB3WVR6NGdZb2pzb3U4Ny04QlB0UTZub0dMOXZEUTZEcVJhZldCWEZZSDdLTy02UVZqck5zVTZwS3pObGlOejNJeHUzbFRabFM2V2xaekZVRjZtX3VzZlplendnOWQzT01WMFd3ejNadHVlTFlqRGR2dk5Da29zOFYwOUdOaEc4OHhHRnJFMmJFMk12VDNPNlBBTGlsXy13cUM1QkVYb0o1Z2U4ZXJnWXpZdm1sWjA5bzQzb2NFWC1xbmIycGZRbGtCaGNaOWlkX094UUNNampwbkZoREctNWI4QnZRaE8zM3BEQ1BwNzA1a3BzOGczZXdIM2s1NHFGN1ZTbmJhZkc4RVdfM0ZIZU5udTBYajd1RGxpWXZpRWdSMmhHa2RKOEIzbmM0X2F1OGxrN2p6LW9UVldDOFVpREoxZ1UzcTBZX19OQ0xJb0syWlhNSlQ4MzQwdzRtVG94Y01GS3FMLV95UVlxOTFORk8zdjE5VGxVaXdhbGlzeHdoYWNzazZWai1GUGtUM2gzR0ZWTTY4SThWeVFnZldIaklOTTJqTTg1VkhEYW5wNmdEVllXMmJCV2tpVmVYeUV2c0E1T00xbHJRNzgzVG9wb0Q1cV81UEhqYUFsQ2p1a0VpRDVINl9SVkpyZVRNVnVXQUxwY3NWZnJrNmRVREpiLWNHYUpXWmxkQlhNbWhuR1NmQ1BaVDlidUxCWHJMaHhZbk1FclVBaEVZeWg1ZlFoenZzRHlKbV8wa3lmMGZrd3NmTDZjQkE0UXNSUFhpTWtUUHBrX29BVzc4QzEtWEJIQW1GMGFuZVlXQWZIOXJEamloeGFCeHpYMHNjMFVfNXpQdlJfSkk2bzFROU5NU0c1SHREWW1nbkFNZFZ0UjdPRGdjaF96RGplY1hjdFFzLVR6MTVXYlRjbHIxQ2JRejRpVko5NWhBU0ZHR3ZvczU5elljRGpHRTdIc0FsSm5fUHEwT1gtTS1lN3M3X3ZZRnlkYUZoZXRQeEJsZlhLdFdTUzU1NUl4a29aOWZIdTlPM0Fnak1xYWVkYTNiMmZXUHlXS2lwUVBZLXQyaUxuRmtQNFFieE9SVmdZVW9WTHlzbnBPZlNIdGVHOE1LNVNESjN3cGtVSHVpT1NJWHE1ZzNmUTVTOC0xX3NGSmJqU19IbjZfQWtMRG1YNUQtRy13TUJIZFlyOXJkQzFQbkdZVXVzM2czbS1HWHFBT1pXdVd3N09tcG82SVhnY1ZtUWxqTEg2UzJCUmllb2pweVN2aGwwS1FVRUhjNEN2amRMc3MwVU4zN3dVMWM5Slg4SERtenFaQk1yMWx0LWtxVWtLZVVtbU4yejVEM2h6TEt0RGdfWE09",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "ae48ecde-75cd-4814-92ab-18f48719e5d9",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:45:26.412Z",
|
||||
"updated_at": "2024-10-18T10:45:26.412Z",
|
||||
"name": "Valid AWS Credentials",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5FanhHa3dXS0I3M2NmWm56SktiaGNqdDZUN0xQU1QwUi15QkhLZldFUmRENk1BXzlscG9JSUxVSTF5ekxuMkdEanlJNjhPUS1VSV9wVTBvU2l4ZnNGOVJhYW93RC1LTEhmc2pyOTJvUWwyWnpFY19WN1pRQk5IdDYwYnBDQnF1eU9nUzdwTGU3QU5qMGFyX1E4SXdpSk9paGVLcVpOVUhwb3duaXgxZ0ZxME5Pcm40QzBGWEZKY2lmRVlCMGFuVFVzemxuVjVNalZVQ2JsY2ZqNWt3Z01IYUZ0dk92YkdtSUZ5SlBvQWZoVU5DWlRFWmExNnJGVEY4Q1Bnd2VJUW9TSWdRcG9rSDNfREQwRld3Q1RYVnVYWVJLWWIxZmpsWGpwd0xQM0dtLTlYUjdHOVhhNklLWXFGTHpFQUVyVmNhYW9CU0tocGVyX3VjMkVEcVdjdFBfaVpsLTBzaUxrWTlta3dpelNtTG9xYVhBUHUzNUE4RnI1WXdJdHcxcFVfaG1XRHhDVFBKamxJb1FaQ2lsQ3FzRmxZbEJVemVkT1E2aHZfbDJqWDJPT3ViOWJGYzQ3eTNWNlFQSHBWRDFiV2tneDM4SmVqMU9Bd01TaXhPY2dmWG5RdENURkM2b2s5V3luVUZQcnFKNldnWEdYaWE2MnVNQkEwMHd6cUY5cVJkcGw4bHBtNzhPeHhkREdwSXNEc1JqQkxUR1FYRTV0UFNwbVlVSWF5LWgtbVhJZXlPZ0Q4cG9HX2E0Qld0LTF1TTFEVy1XNGdnQTRpLWpQQmFJUEdaOFJGNDVoUVJnQ25YVU5DTENMaTY4YmxtYWJFRERXTjAydVN2YnBDb3RkUE0zSDRlN1A3TXc4d2h1Wmd0LWUzZEcwMUstNUw2YnFyS2Z0NEVYMXllQW5GLVBpeU55SkNhczFIeFhrWXZpVXdwSFVrTDdiQjQtWHZJdERXVThzSnJsT2FNZzJDaUt6Y2NXYUZhUlo3VkY0R1BrSHNHNHprTmxjYmp1TXVKakRha0VtNmRFZWRmZHJWdnRCOVNjVGFVWjVQM3RwWWl4SkNmOU1pb2xqMFdOblhNY3Y3aERpOHFlWjJRc2dtRDkzZm1Qc29wdk5OQmJPbGk5ZUpGM1I2YzRJN2gxR3FEMllXR1pma1k0emVqSjZyMUliMGZsc3NfSlVDbGt4QzJTc3hHOU9FRHlZb09zVnlvcDR6WC1uclRSenI0Yy13WlFWNzJWRkwydjhmSjFZdnZ5X3NmZVF6UWRNMXo5STVyV3B0d09UUlFtOURITGhXSDVIUl9zYURJc05KWUNxekVyYkxJclNFNV9leEk4R2xsMGJod3lYeFIwaXR2dllwLTZyNWlXdDRpRkxVYkxWZFdvYUhKck5aeElBZUtKejNKS2tYVW1rTnVrRjJBQmdlZmV6ckozNjNwRmxLS1FaZzRVTTBZYzFFYi1idjBpZkQ3bWVvbEdRZXJrWFNleWZmSmFNdG1wQlp0YmxjWDV5T0tEbHRsYnNHbjRPRjl5MkttOUhRWlJtd1pmTnY4Z1lPRlZoTzFGVDdTZ0RDY1ByV0RndTd5LUNhcHNXUnNIeXdLMEw3WS1tektRTWFLQy1zakpMLWFiM3FOakE1UWU4LXlOX2VPbmd4MTZCRk9OY3Z4UGVDSWxhRlg4eHI4X1VUTDZZM0pjV0JDVi1UUjlTUl85cm1LWlZ0T1dzU0lpdWUwbXgtZ0l6eHNSNExRTV9MczJ6UkRkVElnRV9Rc0RoTDFnVHRZSEFPb2paX200TzZiRzVmRE5hOW5CTjh5Qi1WaEtueEpqRzJDY1luVWZtX1pseUpQSE5lQ0RrZ05EbWo5cU9MZ0ZkcXlqUll4UUkyejRfY2p4RXdEeC1PS1JIQVNUcmNIdkRJbzRiUktMWEQxUFM3aGNzeVFWUDdtcm5xNHlOYUU9",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,218 +0,0 @@
|
||||
[
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "0191e280-9d2f-71c8-9b18-487a23ba185e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "37b065f8-26b0-4218-a665-0b23d07b27d9",
|
||||
"trigger": "manual",
|
||||
"name": "test scan 1",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 1,
|
||||
"duration": 5,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
},
|
||||
"inserted_at": "2024-09-01T17:25:27.050Z",
|
||||
"started_at": "2024-09-01T17:25:27.050Z",
|
||||
"updated_at": "2024-09-01T17:25:27.050Z",
|
||||
"completed_at": "2024-09-01T17:25:32.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01920573-aa9c-73c9-bcda-f2e35c9b19d2",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "manual",
|
||||
"name": "test aws scan 2",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 1,
|
||||
"duration": 20,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
},
|
||||
"inserted_at": "2024-09-02T17:24:27.050Z",
|
||||
"started_at": "2024-09-02T17:24:27.050Z",
|
||||
"updated_at": "2024-09-02T17:24:27.050Z",
|
||||
"completed_at": "2024-09-01T17:24:37.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01920573-ea5b-77fd-a93f-1ed2ae12f728",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "baa7b895-8bac-4f47-b010-4226d132856e",
|
||||
"trigger": "manual",
|
||||
"name": "test gcp scan",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 10,
|
||||
"duration": 10,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"cloudsql_instance_automated_backups"
|
||||
]
|
||||
},
|
||||
"inserted_at": "2024-09-02T19:26:27.050Z",
|
||||
"started_at": "2024-09-02T19:26:27.050Z",
|
||||
"updated_at": "2024-09-02T19:26:27.050Z",
|
||||
"completed_at": "2024-09-01T17:26:37.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01920573-ea5b-77fd-a93f-1ed2ae12f728",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "manual",
|
||||
"name": "test aws scan",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 1,
|
||||
"duration": 35,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
},
|
||||
"inserted_at": "2024-09-02T19:27:27.050Z",
|
||||
"started_at": "2024-09-02T19:27:27.050Z",
|
||||
"updated_at": "2024-09-02T19:27:27.050Z",
|
||||
"completed_at": "2024-09-01T17:27:37.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "c281c924-23f3-4fcc-ac63-73a22154b7de",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled aws scan",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"cloudformation_stack_outputs_find_secrets"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-09-02T19:20:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:24:27.050Z",
|
||||
"updated_at": "2024-09-02T19:24:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled aws scan 2",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"cloudformation_stack_outputs_find_secrets"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-08-02T19:31:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:38:27.050Z",
|
||||
"updated_at": "2024-09-02T19:38:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "baa7b895-8bac-4f47-b010-4226d132856e",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled gcp scan",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"cloudsql_instance_automated_backups",
|
||||
"iam_audit_logs_enabled"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-07-02T19:30:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:29:27.050Z",
|
||||
"updated_at": "2024-09-02T19:29:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled azure scan",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"aks_cluster_rbac_enabled",
|
||||
"defender_additional_email_configured_with_a_security_contact"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-08-05T19:32:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:29:27.050Z",
|
||||
"updated_at": "2024-09-02T19:29:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01929f3b-ed2e-7623-ad63-7c37cd37828f",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan 1",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 19,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
},
|
||||
"duration": 7,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T10:45:57.678Z",
|
||||
"updated_at": "2024-10-18T10:46:05.127Z",
|
||||
"started_at": "2024-10-18T10:45:57.909Z",
|
||||
"completed_at": "2024-10-18T10:46:05.127Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01929f57-c0ee-7553-be0b-cbde006fb6f7",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan 2",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 20,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"account_security_contact_information_is_registered"
|
||||
]
|
||||
},
|
||||
"duration": 4,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T11:16:21.358Z",
|
||||
"updated_at": "2024-10-18T11:16:26.060Z",
|
||||
"started_at": "2024-10-18T11:16:21.593Z",
|
||||
"completed_at": "2024-10-18T11:16:26.060Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1,322 +0,0 @@
|
||||
[
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "0234477d-0b8e-439f-87d3-ce38dff3a434",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.772Z",
|
||||
"updated_at": "2024-10-18T11:16:24.466Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root",
|
||||
"name": "",
|
||||
"region": "eu-south-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-south':6C 'iam':3A 'other':11 'root':5A 'south':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "17ce30a3-6e77-42a5-bb08-29dfcad7396a",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.882Z",
|
||||
"updated_at": "2024-10-18T11:16:24.533Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root2",
|
||||
"name": "",
|
||||
"region": "eu-west-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "1f9de587-ba5b-415a-b9b0-ceed4c6c9f32",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.091Z",
|
||||
"updated_at": "2024-10-18T11:16:24.637Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root3",
|
||||
"name": "",
|
||||
"region": "ap-northeast-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "29b35668-6dad-411d-bfec-492311889892",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.008Z",
|
||||
"updated_at": "2024-10-18T11:16:24.600Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root4",
|
||||
"name": "",
|
||||
"region": "us-west-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'us':7C 'us-west':6C 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "30505514-01d4-42bb-8b0c-471bbab27460",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:26.014Z",
|
||||
"updated_at": "2024-10-18T11:16:26.023Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root5",
|
||||
"name": "",
|
||||
"region": "us-east-1",
|
||||
"service": "account",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'account':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'us':7C 'us-east':6C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "372932f0-e4df-4968-9721-bb4f6236fae4",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.848Z",
|
||||
"updated_at": "2024-10-18T11:16:24.516Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root6",
|
||||
"name": "",
|
||||
"region": "eu-west-3",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'3':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "3a37d124-7637-43f6-9df7-e9aa7ef98c53",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.979Z",
|
||||
"updated_at": "2024-10-18T11:16:24.585Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root7",
|
||||
"name": "",
|
||||
"region": "sa-east-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'sa':7C 'sa-east':6C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "3c49318e-03c6-4f12-876f-40451ce7de3d",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.072Z",
|
||||
"updated_at": "2024-10-18T11:16:24.630Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root8",
|
||||
"name": "",
|
||||
"region": "ap-southeast-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-southeast':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'southeast':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "430bf313-8733-4bc5-ac70-5402adfce880",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.994Z",
|
||||
"updated_at": "2024-10-18T11:16:24.593Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root9",
|
||||
"name": "",
|
||||
"region": "eu-north-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-north':6C 'iam':3A 'north':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "78bd2a52-82f9-45df-90a9-4ad78254fdc4",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.055Z",
|
||||
"updated_at": "2024-10-18T11:16:24.622Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root10",
|
||||
"name": "",
|
||||
"region": "ap-northeast-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "7973e332-795e-4a74-b4d4-a53a21c98c80",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.896Z",
|
||||
"updated_at": "2024-10-18T11:16:24.542Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root11",
|
||||
"name": "",
|
||||
"region": "us-east-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'us':7C 'us-east':6C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "8ca0a188-5699-436e-80fd-e566edaeb259",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.938Z",
|
||||
"updated_at": "2024-10-18T11:16:24.565Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root12",
|
||||
"name": "",
|
||||
"region": "ca-central-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'ca':7C 'ca-central':6C 'central':8C 'iam':3A 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "8fe4514f-71d7-46ab-b0dc-70cef23b4d13",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.965Z",
|
||||
"updated_at": "2024-10-18T11:16:24.578Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root13",
|
||||
"name": "",
|
||||
"region": "eu-west-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "9ab35225-dc7c-4ebd-bbc0-d81fb5d9de77",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.909Z",
|
||||
"updated_at": "2024-10-18T11:16:24.549Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root14",
|
||||
"name": "",
|
||||
"region": "ap-south-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-south':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'south':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "9be26c1d-adf0-4ba8-9ca9-c740f4a0dc4e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.863Z",
|
||||
"updated_at": "2024-10-18T11:16:24.524Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root15",
|
||||
"name": "",
|
||||
"region": "eu-central-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'central':8C 'eu':7C 'eu-central':6C 'iam':3A 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "ba108c01-bcad-44f1-b211-c1d8985da89d",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.110Z",
|
||||
"updated_at": "2024-10-18T11:16:24.644Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root16",
|
||||
"name": "",
|
||||
"region": "ap-northeast-3",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'3':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "dc6cfb5d-6835-4c7b-9152-c18c734a6eaa",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.038Z",
|
||||
"updated_at": "2024-10-18T11:16:24.615Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root17",
|
||||
"name": "",
|
||||
"region": "eu-central-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'central':8C 'eu':7C 'eu-central':6C 'iam':3A 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "e0664164-cfda-44a4-b743-acee1c69386c",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.924Z",
|
||||
"updated_at": "2024-10-18T11:16:24.557Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root18",
|
||||
"name": "",
|
||||
"region": "us-west-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'us':7C 'us-west':6C 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "e1929daa-a984-4116-8131-492a48321dba",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.023Z",
|
||||
"updated_at": "2024-10-18T11:16:24.607Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root19",
|
||||
"name": "",
|
||||
"region": "ap-southeast-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-southeast':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'southeast':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "e37bb1f1-1669-4bb3-be86-e3378ddfbcba",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.952Z",
|
||||
"updated_at": "2024-10-18T11:16:24.571Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:access-analyzer:us-east-1:112233445566:analyzer/ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
|
||||
"name": "",
|
||||
"region": "us-east-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9A,15C '112233445566':10A 'access':4A 'access-analyzer':3A 'accessanalyzer':16 'analyzer':5A 'analyzer/consoleanalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c':11A 'arn':1A 'aws':2A 'east':8A,14C 'other':17 'us':7A,13C 'us-east':6A,12C"
|
||||
}
|
||||
}
|
||||
]
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,153 +0,0 @@
|
||||
[
|
||||
{
|
||||
"model": "api.providergroup",
|
||||
"pk": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"fields": {
|
||||
"name": "first_group",
|
||||
"inserted_at": "2024-11-13T11:36:19.503Z",
|
||||
"updated_at": "2024-11-13T11:36:19.503Z",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroup",
|
||||
"pk": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"fields": {
|
||||
"name": "second_group",
|
||||
"inserted_at": "2024-11-13T11:36:25.421Z",
|
||||
"updated_at": "2024-11-13T11:36:25.421Z",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroup",
|
||||
"pk": "481769f5-db2b-447b-8b00-1dee18db90ec",
|
||||
"fields": {
|
||||
"name": "third_group",
|
||||
"inserted_at": "2024-11-13T11:36:37.603Z",
|
||||
"updated_at": "2024-11-13T11:36:37.603Z",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroupmembership",
|
||||
"pk": "13625bd3-f428-4021-ac1b-b0bd41b6e02f",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"inserted_at": "2024-11-13T11:55:17.138Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroupmembership",
|
||||
"pk": "54784ebe-42d2-4937-aa6a-e21c62879567",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"inserted_at": "2024-11-13T11:55:17.138Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroupmembership",
|
||||
"pk": "c8bd52d5-42a5-48fe-8e0a-3eef154b8ebe",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"inserted_at": "2024-11-13T11:55:41.237Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "admin_test",
|
||||
"manage_users": true,
|
||||
"manage_account": true,
|
||||
"manage_billing": true,
|
||||
"manage_providers": true,
|
||||
"manage_integrations": true,
|
||||
"manage_scans": true,
|
||||
"unlimited_visibility": true,
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z",
|
||||
"updated_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "845ff03a-87ef-42ba-9786-6577c70c4df0",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "first_role",
|
||||
"manage_users": true,
|
||||
"manage_account": true,
|
||||
"manage_billing": true,
|
||||
"manage_providers": true,
|
||||
"manage_integrations": false,
|
||||
"manage_scans": false,
|
||||
"unlimited_visibility": true,
|
||||
"inserted_at": "2024-11-20T15:31:53.239Z",
|
||||
"updated_at": "2024-11-20T15:31:53.239Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.role",
|
||||
"pk": "902d726c-4bd5-413a-a2a4-f7b4754b6b20",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "third_role",
|
||||
"manage_users": false,
|
||||
"manage_account": false,
|
||||
"manage_billing": false,
|
||||
"manage_providers": false,
|
||||
"manage_integrations": false,
|
||||
"manage_scans": true,
|
||||
"unlimited_visibility": false,
|
||||
"inserted_at": "2024-11-20T15:34:05.440Z",
|
||||
"updated_at": "2024-11-20T15:34:05.440Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "57fd024a-0a7f-49b4-a092-fa0979a07aaf",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "a3cd0099-1c13-4df1-a5e5-ecdfec561b35",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "481769f5-db2b-447b-8b00-1dee18db90ec",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.roleprovidergrouprelationship",
|
||||
"pk": "cfd84182-a058-40c2-af3c-0189b174940f",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"inserted_at": "2024-11-20T15:32:42.402Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.userrolerelationship",
|
||||
"pk": "92339663-e954-4fd8-98fb-8bfe15949975",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "3f01e759-bdf9-4a99-8888-1ab805b79f93",
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"inserted_at": "2024-11-20T15:36:14.302Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
File diff suppressed because one or more lines are too long
@@ -1,49 +0,0 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
from config.custom_logging import BackendLogger
|
||||
|
||||
|
||||
def extract_auth_info(request) -> dict:
|
||||
if getattr(request, "auth", None) is not None:
|
||||
tenant_id = request.auth.get("tenant_id", "N/A")
|
||||
user_id = request.auth.get("sub", "N/A")
|
||||
else:
|
||||
tenant_id, user_id = "N/A", "N/A"
|
||||
return {"tenant_id": tenant_id, "user_id": user_id}
|
||||
|
||||
|
||||
class APILoggingMiddleware:
|
||||
"""
|
||||
Middleware for logging API requests.
|
||||
|
||||
This middleware logs details of API requests, including the typical request metadata among other useful information.
|
||||
|
||||
Args:
|
||||
get_response (Callable): A callable to get the response, typically the next middleware or view.
|
||||
"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
self.logger = logging.getLogger(BackendLogger.API)
|
||||
|
||||
def __call__(self, request):
|
||||
request_start_time = time.time()
|
||||
|
||||
response = self.get_response(request)
|
||||
duration = time.time() - request_start_time
|
||||
auth_info = extract_auth_info(request)
|
||||
self.logger.info(
|
||||
"",
|
||||
extra={
|
||||
"user_id": auth_info["user_id"],
|
||||
"tenant_id": auth_info["tenant_id"],
|
||||
"method": request.method,
|
||||
"path": request.path,
|
||||
"query_params": request.GET.dict(),
|
||||
"status_code": response.status_code,
|
||||
"duration": duration,
|
||||
},
|
||||
)
|
||||
|
||||
return response
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,23 +0,0 @@
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import DB_PROWLER_USER
|
||||
|
||||
DB_NAME = settings.DATABASES["default"]["NAME"]
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0001_initial"),
|
||||
("token_blacklist", "0012_alter_outstandingtoken_user"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
f"""
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON token_blacklist_blacklistedtoken TO {DB_PROWLER_USER};
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON token_blacklist_outstandingtoken TO {DB_PROWLER_USER};
|
||||
GRANT SELECT, DELETE ON django_admin_log TO {DB_PROWLER_USER};
|
||||
"""
|
||||
),
|
||||
]
|
||||
@@ -1,246 +0,0 @@
|
||||
# Generated by Django 5.1.1 on 2024-12-05 12:29
|
||||
|
||||
import api.rls
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0002_token_migrations"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Role",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("manage_users", models.BooleanField(default=False)),
|
||||
("manage_account", models.BooleanField(default=False)),
|
||||
("manage_billing", models.BooleanField(default=False)),
|
||||
("manage_providers", models.BooleanField(default=False)),
|
||||
("manage_integrations", models.BooleanField(default=False)),
|
||||
("manage_scans", models.BooleanField(default=False)),
|
||||
("unlimited_visibility", models.BooleanField(default=False)),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "roles",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RoleProviderGroupRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_provider_group_relationship",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserRoleRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_user_relationship",
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
name="provider_group",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.providergroup"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
name="role",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="provider_groups",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.RoleProviderGroupRelationship",
|
||||
to="api.providergroup",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="userrolerelationship",
|
||||
name="role",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="userrolerelationship",
|
||||
name="user",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="users",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.UserRoleRelationship",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "provider_group_id"),
|
||||
name="unique_role_provider_group_relationship",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="roleprovidergrouprelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_roleprovidergrouprelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="userrolerelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "user_id"), name="unique_role_user_relationship"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="userrolerelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_userrolerelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="role",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "name"), name="unique_role_per_tenant"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="role",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_role",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="InvitationRoleRelationship",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"invitation",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.invitation"
|
||||
),
|
||||
),
|
||||
(
|
||||
"role",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.role"
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "role_invitation_relationship",
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="invitationrolerelationship",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("role_id", "invitation_id"),
|
||||
name="unique_role_invitation_relationship",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="invitationrolerelationship",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_invitationrolerelationship",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="role",
|
||||
name="invitations",
|
||||
field=models.ManyToManyField(
|
||||
related_name="roles",
|
||||
through="api.InvitationRoleRelationship",
|
||||
to="api.invitation",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,43 +0,0 @@
|
||||
from django.db import migrations
|
||||
from api.db_router import MainRouter
|
||||
|
||||
|
||||
def create_admin_role(apps, schema_editor):
|
||||
Tenant = apps.get_model("api", "Tenant")
|
||||
Role = apps.get_model("api", "Role")
|
||||
User = apps.get_model("api", "User")
|
||||
UserRoleRelationship = apps.get_model("api", "UserRoleRelationship")
|
||||
|
||||
for tenant in Tenant.objects.using(MainRouter.admin_db).all():
|
||||
admin_role, _ = Role.objects.using(MainRouter.admin_db).get_or_create(
|
||||
name="admin",
|
||||
tenant=tenant,
|
||||
defaults={
|
||||
"manage_users": True,
|
||||
"manage_account": True,
|
||||
"manage_billing": True,
|
||||
"manage_providers": True,
|
||||
"manage_integrations": True,
|
||||
"manage_scans": True,
|
||||
"unlimited_visibility": True,
|
||||
},
|
||||
)
|
||||
users = User.objects.using(MainRouter.admin_db).filter(
|
||||
membership__tenant=tenant
|
||||
)
|
||||
for user in users:
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).get_or_create(
|
||||
user=user,
|
||||
role=admin_role,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0003_rbac"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(create_admin_role),
|
||||
]
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +0,0 @@
|
||||
from rest_framework_json_api.pagination import JsonApiPageNumberPagination
|
||||
|
||||
|
||||
class ComplianceOverviewPagination(JsonApiPageNumberPagination):
|
||||
page_size = 50
|
||||
max_page_size = 100
|
||||
@@ -1,203 +0,0 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Generator, Optional
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from django.conf import settings
|
||||
from psqlextra.partitioning import (
|
||||
PostgresPartitioningManager,
|
||||
PostgresRangePartition,
|
||||
PostgresRangePartitioningStrategy,
|
||||
PostgresTimePartitionSize,
|
||||
PostgresPartitioningError,
|
||||
)
|
||||
from psqlextra.partitioning.config import PostgresPartitioningConfig
|
||||
from uuid6 import UUID
|
||||
|
||||
from api.models import Finding, ResourceFindingMapping
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
from api.uuid_utils import datetime_to_uuid7
|
||||
|
||||
|
||||
class PostgresUUIDv7RangePartition(PostgresRangePartition):
|
||||
def __init__(
|
||||
self,
|
||||
from_values: UUID,
|
||||
to_values: UUID,
|
||||
size: PostgresTimePartitionSize,
|
||||
name_format: Optional[str] = None,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
self.from_values = from_values
|
||||
self.to_values = to_values
|
||||
self.size = size
|
||||
self.name_format = name_format
|
||||
|
||||
self.rls_statements = None
|
||||
if "rls_statements" in kwargs:
|
||||
self.rls_statements = kwargs["rls_statements"]
|
||||
|
||||
start_timestamp_ms = self.from_values.time
|
||||
|
||||
self.start_datetime = datetime.fromtimestamp(
|
||||
start_timestamp_ms / 1000, timezone.utc
|
||||
)
|
||||
|
||||
def name(self) -> str:
|
||||
if not self.name_format:
|
||||
raise PostgresPartitioningError("Unknown size/unit")
|
||||
|
||||
return self.start_datetime.strftime(self.name_format).lower()
|
||||
|
||||
def deconstruct(self) -> dict:
|
||||
return {
|
||||
**super().deconstruct(),
|
||||
"size_unit": self.size.unit.value,
|
||||
"size_value": self.size.value,
|
||||
}
|
||||
|
||||
def create(
|
||||
self,
|
||||
model,
|
||||
schema_editor,
|
||||
comment,
|
||||
) -> None:
|
||||
super().create(model, schema_editor, comment)
|
||||
|
||||
# if this model has RLS statements, add them to the partition
|
||||
if isinstance(self.rls_statements, list):
|
||||
schema_editor.add_constraint(
|
||||
model,
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name=f"rls_on_{self.name()}",
|
||||
partition_name=self.name(),
|
||||
statements=self.rls_statements,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class PostgresUUIDv7PartitioningStrategy(PostgresRangePartitioningStrategy):
|
||||
def __init__(
|
||||
self,
|
||||
size: PostgresTimePartitionSize,
|
||||
count: int,
|
||||
start_date: datetime = None,
|
||||
max_age: Optional[relativedelta] = None,
|
||||
name_format: Optional[str] = None,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
self.start_date = start_date.replace(
|
||||
day=1, hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
self.size = size
|
||||
self.count = count
|
||||
self.max_age = max_age
|
||||
self.name_format = name_format
|
||||
|
||||
self.rls_statements = None
|
||||
if "rls_statements" in kwargs:
|
||||
self.rls_statements = kwargs["rls_statements"]
|
||||
|
||||
def to_create(self) -> Generator[PostgresUUIDv7RangePartition, None, None]:
|
||||
current_datetime = (
|
||||
self.start_date if self.start_date else self.get_start_datetime()
|
||||
)
|
||||
|
||||
for _ in range(self.count):
|
||||
end_datetime = (
|
||||
current_datetime + self.size.as_delta() - relativedelta(microseconds=1)
|
||||
)
|
||||
start_uuid7 = datetime_to_uuid7(current_datetime)
|
||||
end_uuid7 = datetime_to_uuid7(end_datetime)
|
||||
|
||||
yield PostgresUUIDv7RangePartition(
|
||||
from_values=start_uuid7,
|
||||
to_values=end_uuid7,
|
||||
size=self.size,
|
||||
name_format=self.name_format,
|
||||
rls_statements=self.rls_statements,
|
||||
)
|
||||
|
||||
current_datetime += self.size.as_delta()
|
||||
|
||||
def to_delete(self) -> Generator[PostgresUUIDv7RangePartition, None, None]:
|
||||
if not self.max_age:
|
||||
return
|
||||
|
||||
current_datetime = self.get_start_datetime() - self.max_age
|
||||
|
||||
while True:
|
||||
end_datetime = current_datetime + self.size.as_delta()
|
||||
start_uuid7 = datetime_to_uuid7(current_datetime)
|
||||
end_uuid7 = datetime_to_uuid7(end_datetime)
|
||||
|
||||
# dropping table will delete indexes and policies
|
||||
yield PostgresUUIDv7RangePartition(
|
||||
from_values=start_uuid7,
|
||||
to_values=end_uuid7,
|
||||
size=self.size,
|
||||
name_format=self.name_format,
|
||||
)
|
||||
|
||||
current_datetime -= self.size.as_delta()
|
||||
|
||||
def get_start_datetime(self) -> datetime:
|
||||
"""
|
||||
Gets the start of the current month in UTC timezone.
|
||||
|
||||
This function returns a `datetime` object set to the first day of the current
|
||||
month, at midnight (00:00:00), in UTC.
|
||||
|
||||
Returns:
|
||||
datetime: A `datetime` object representing the start of the current month in UTC.
|
||||
"""
|
||||
return datetime.now(timezone.utc).replace(
|
||||
day=1, hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
|
||||
|
||||
def relative_days_or_none(value):
|
||||
if value is None:
|
||||
return None
|
||||
return relativedelta(days=value)
|
||||
|
||||
|
||||
#
|
||||
# To manage the partitions, run `python manage.py pgpartition --using admin`
|
||||
#
|
||||
# For more info on the partitioning manager, see https://github.com/SectorLabs/django-postgres-extra
|
||||
manager = PostgresPartitioningManager(
|
||||
[
|
||||
PostgresPartitioningConfig(
|
||||
model=Finding,
|
||||
strategy=PostgresUUIDv7PartitioningStrategy(
|
||||
start_date=datetime.now(timezone.utc),
|
||||
size=PostgresTimePartitionSize(
|
||||
months=settings.FINDINGS_TABLE_PARTITION_MONTHS
|
||||
),
|
||||
count=settings.FINDINGS_TABLE_PARTITION_COUNT,
|
||||
max_age=relative_days_or_none(
|
||||
settings.FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS
|
||||
),
|
||||
name_format="%Y_%b",
|
||||
rls_statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
# ResourceFindingMapping should always follow the Finding partitioning
|
||||
PostgresPartitioningConfig(
|
||||
model=ResourceFindingMapping,
|
||||
strategy=PostgresUUIDv7PartitioningStrategy(
|
||||
start_date=datetime.now(timezone.utc),
|
||||
size=PostgresTimePartitionSize(
|
||||
months=settings.FINDINGS_TABLE_PARTITION_MONTHS
|
||||
),
|
||||
count=settings.FINDINGS_TABLE_PARTITION_COUNT,
|
||||
max_age=relative_days_or_none(
|
||||
settings.FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS
|
||||
),
|
||||
name_format="%Y_%b",
|
||||
rls_statements=["SELECT"],
|
||||
),
|
||||
),
|
||||
]
|
||||
)
|
||||
@@ -1,70 +0,0 @@
|
||||
from enum import Enum
|
||||
from rest_framework.permissions import BasePermission
|
||||
from api.models import Provider, Role, User
|
||||
from api.db_router import MainRouter
|
||||
from typing import Optional
|
||||
from django.db.models import QuerySet
|
||||
|
||||
|
||||
class Permissions(Enum):
|
||||
MANAGE_USERS = "manage_users"
|
||||
MANAGE_ACCOUNT = "manage_account"
|
||||
MANAGE_BILLING = "manage_billing"
|
||||
MANAGE_PROVIDERS = "manage_providers"
|
||||
MANAGE_INTEGRATIONS = "manage_integrations"
|
||||
MANAGE_SCANS = "manage_scans"
|
||||
UNLIMITED_VISIBILITY = "unlimited_visibility"
|
||||
|
||||
|
||||
class HasPermissions(BasePermission):
|
||||
"""
|
||||
Custom permission to check if the user's role has the required permissions.
|
||||
The required permissions should be specified in the view as a list in `required_permissions`.
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
required_permissions = getattr(view, "required_permissions", [])
|
||||
if not required_permissions:
|
||||
return True
|
||||
|
||||
user_roles = (
|
||||
User.objects.using(MainRouter.admin_db).get(id=request.user.id).roles.all()
|
||||
)
|
||||
if not user_roles:
|
||||
return False
|
||||
|
||||
for perm in required_permissions:
|
||||
if not getattr(user_roles[0], perm.value, False):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_role(user: User) -> Optional[Role]:
|
||||
"""
|
||||
Retrieve the first role assigned to the given user.
|
||||
|
||||
Returns:
|
||||
The user's first Role instance if the user has any roles, otherwise None.
|
||||
"""
|
||||
return user.roles.first()
|
||||
|
||||
|
||||
def get_providers(role: Role) -> QuerySet[Provider]:
|
||||
"""
|
||||
Return a distinct queryset of Providers accessible by the given role.
|
||||
|
||||
If the role has no associated provider groups, an empty queryset is returned.
|
||||
|
||||
Args:
|
||||
role: A Role instance.
|
||||
|
||||
Returns:
|
||||
A QuerySet of Provider objects filtered by the role's provider groups.
|
||||
If the role has no provider groups, returns an empty queryset.
|
||||
"""
|
||||
provider_groups = role.provider_groups.all()
|
||||
if not provider_groups.exists():
|
||||
return Provider.objects.none()
|
||||
|
||||
return Provider.objects.filter(provider_groups__in=provider_groups).distinct()
|
||||
@@ -1,23 +0,0 @@
|
||||
from contextlib import nullcontext
|
||||
|
||||
from rest_framework_json_api.renderers import JSONRenderer
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
|
||||
|
||||
class APIJSONRenderer(JSONRenderer):
|
||||
"""JSONRenderer override to apply tenant RLS when there are included resources in the request."""
|
||||
|
||||
def render(self, data, accepted_media_type=None, renderer_context=None):
|
||||
request = renderer_context.get("request")
|
||||
tenant_id = getattr(request, "tenant_id", None) if request else None
|
||||
include_param_present = "include" in request.query_params if request else False
|
||||
|
||||
# Use rls_transaction if needed for included resources, otherwise do nothing
|
||||
context_manager = (
|
||||
rls_transaction(tenant_id)
|
||||
if tenant_id and include_param_present
|
||||
else nullcontext()
|
||||
)
|
||||
with context_manager:
|
||||
return super().render(data, accepted_media_type, renderer_context)
|
||||
@@ -1,188 +0,0 @@
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import DEFAULT_DB_ALIAS
|
||||
from django.db import models
|
||||
from django.db.backends.ddl_references import Statement, Table
|
||||
|
||||
from api.db_utils import DB_USER, POSTGRES_TENANT_VAR
|
||||
|
||||
|
||||
class Tenant(models.Model):
|
||||
"""
|
||||
The Tenant is the basic grouping in the system. It is used to separate data between customers.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
db_table = "tenants"
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "tenants"
|
||||
|
||||
|
||||
class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
"""
|
||||
Model constraint to enforce row-level security on a tenant based model, in addition to the least privileges.
|
||||
|
||||
The constraint can be applied to a partitioned table by specifying the `partition_name` keyword argument.
|
||||
"""
|
||||
|
||||
rls_sql_query = """
|
||||
ALTER TABLE %(table_name)s ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE %(table_name)s FORCE ROW LEVEL SECURITY;
|
||||
"""
|
||||
|
||||
policy_sql_query = """
|
||||
CREATE POLICY %(db_user)s_%(table_name)s_{statement}
|
||||
ON %(table_name)s
|
||||
FOR {statement}
|
||||
TO %(db_user)s
|
||||
{clause} (
|
||||
CASE
|
||||
WHEN current_setting('%(tenant_setting)s', True) IS NULL THEN FALSE
|
||||
ELSE %(field_column)s = current_setting('%(tenant_setting)s')::uuid
|
||||
END
|
||||
);
|
||||
"""
|
||||
|
||||
grant_sql_query = """
|
||||
GRANT {statement} ON %(table_name)s TO %(db_user)s;
|
||||
"""
|
||||
|
||||
drop_sql_query = """
|
||||
ALTER TABLE %(table_name)s NO FORCE ROW LEVEL SECURITY;
|
||||
ALTER TABLE %(table_name)s DISABLE ROW LEVEL SECURITY;
|
||||
REVOKE ALL ON TABLE %(table_name) TO %(db_user)s;
|
||||
"""
|
||||
|
||||
drop_policy_sql_query = """
|
||||
DROP POLICY IF EXISTS %(db_user)s_%(table_name)s_{statement} on %(table_name)s;
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, field: str, name: str, statements: list | None = None, **kwargs
|
||||
) -> None:
|
||||
super().__init__(name=name)
|
||||
self.target_field: str = field
|
||||
self.statements = statements or ["SELECT"]
|
||||
self.partition_name = None
|
||||
if "partition_name" in kwargs:
|
||||
self.partition_name = kwargs["partition_name"]
|
||||
|
||||
def create_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
field_column = schema_editor.quote_name(self.target_field)
|
||||
|
||||
policy_queries = ""
|
||||
grant_queries = ""
|
||||
for statement in self.statements:
|
||||
clause = f"{'WITH CHECK' if statement == 'INSERT' else 'USING'}"
|
||||
policy_queries = f"{policy_queries}{self.policy_sql_query.format(statement=statement, clause=clause)}"
|
||||
grant_queries = (
|
||||
f"{grant_queries}{self.grant_sql_query.format(statement=statement)}"
|
||||
)
|
||||
|
||||
full_create_sql_query = (
|
||||
f"{self.rls_sql_query}" f"{policy_queries}" f"{grant_queries}"
|
||||
)
|
||||
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
table_name = f"{table_name}_{self.partition_name}"
|
||||
|
||||
return Statement(
|
||||
full_create_sql_query,
|
||||
table_name=table_name,
|
||||
field_column=field_column,
|
||||
db_user=DB_USER,
|
||||
tenant_setting=POSTGRES_TENANT_VAR,
|
||||
partition_name=self.partition_name,
|
||||
)
|
||||
|
||||
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
field_column = schema_editor.quote_name(self.target_field)
|
||||
full_drop_sql_query = (
|
||||
f"{self.drop_sql_query}"
|
||||
f"{''.join([self.drop_policy_sql_query.format(statement) for statement in self.statements])}"
|
||||
)
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
table_name = f"{table_name}_{self.partition_name}"
|
||||
return Statement(
|
||||
full_drop_sql_query,
|
||||
table_name=Table(table_name, schema_editor.quote_name),
|
||||
field_column=field_column,
|
||||
db_user=DB_USER,
|
||||
partition_name=self.partition_name,
|
||||
)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, RowLevelSecurityConstraint):
|
||||
return self.name == other.name and self.target_field == other.target_field
|
||||
return super().__eq__(other)
|
||||
|
||||
def deconstruct(self) -> tuple[str, tuple, dict]:
|
||||
path, _, kwargs = super().deconstruct()
|
||||
return (path, (self.target_field,), kwargs)
|
||||
|
||||
def validate(self, model, instance, exclude=None, using=DEFAULT_DB_ALIAS): # noqa: F841
|
||||
if not hasattr(instance, "tenant_id"):
|
||||
raise ValidationError(f"{model.__name__} does not have a tenant_id field.")
|
||||
|
||||
|
||||
class BaseSecurityConstraint(models.BaseConstraint):
|
||||
"""Model constraint to grant the least privileges to the API database user."""
|
||||
|
||||
grant_sql_query = """
|
||||
GRANT {statement} ON %(table_name)s TO %(db_user)s;
|
||||
"""
|
||||
|
||||
drop_sql_query = """
|
||||
REVOKE ALL ON TABLE %(table_name) TO %(db_user)s;
|
||||
"""
|
||||
|
||||
def __init__(self, name: str, statements: list | None = None) -> None:
|
||||
super().__init__(name=name)
|
||||
self.statements = statements or ["SELECT"]
|
||||
|
||||
def create_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
grant_queries = ""
|
||||
for statement in self.statements:
|
||||
grant_queries = (
|
||||
f"{grant_queries}{self.grant_sql_query.format(statement=statement)}"
|
||||
)
|
||||
|
||||
return Statement(
|
||||
grant_queries,
|
||||
table_name=model._meta.db_table,
|
||||
db_user=DB_USER,
|
||||
)
|
||||
|
||||
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
return Statement(
|
||||
self.drop_sql_query,
|
||||
table_name=Table(model._meta.db_table, schema_editor.quote_name),
|
||||
db_user=DB_USER,
|
||||
)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, BaseSecurityConstraint):
|
||||
return self.name == other.name
|
||||
return super().__eq__(other)
|
||||
|
||||
def deconstruct(self) -> tuple[str, tuple, dict]:
|
||||
path, args, kwargs = super().deconstruct()
|
||||
return path, args, kwargs
|
||||
|
||||
|
||||
class RowLevelSecurityProtectedModel(models.Model):
|
||||
tenant = models.ForeignKey("Tenant", on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
@@ -1,35 +0,0 @@
|
||||
from celery import states
|
||||
from celery.signals import before_task_publish
|
||||
from django.db.models.signals import post_delete
|
||||
from django.dispatch import receiver
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from django_celery_results.backends.database import DatabaseBackend
|
||||
|
||||
from api.models import Provider
|
||||
from config.celery import celery_app
|
||||
|
||||
|
||||
def create_task_result_on_publish(sender=None, headers=None, **kwargs): # noqa: F841
|
||||
"""Celery signal to store TaskResult entries when tasks reach the broker."""
|
||||
db_result_backend = DatabaseBackend(celery_app)
|
||||
request = type("request", (object,), headers)
|
||||
|
||||
db_result_backend.store_result(
|
||||
headers["id"],
|
||||
None,
|
||||
states.PENDING,
|
||||
traceback=None,
|
||||
request=request,
|
||||
)
|
||||
|
||||
|
||||
before_task_publish.connect(
|
||||
create_task_result_on_publish, dispatch_uid="create_task_result_on_publish"
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_delete, sender=Provider)
|
||||
def delete_provider_scan_task(sender, instance, **kwargs): # noqa: F841
|
||||
# Delete the associated periodic task when the provider is deleted
|
||||
task_name = f"scan-perform-scheduled-{instance.id}"
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,98 +0,0 @@
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from conftest import TEST_PASSWORD, get_api_tokens, get_authorization_header
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_basic_authentication():
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
|
||||
# Check that a 401 is returned when no basic authentication is provided
|
||||
no_auth_response = client.get(reverse("provider-list"))
|
||||
assert no_auth_response.status_code == 401
|
||||
|
||||
# Check that we can create a new user without any kind of authentication
|
||||
user_creation_response = client.post(
|
||||
reverse("user-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "test",
|
||||
"email": test_user,
|
||||
"password": test_password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user_creation_response.status_code == 201
|
||||
|
||||
# Check that using our new user's credentials we can authenticate and get the providers
|
||||
access_token, _ = get_api_tokens(client, test_user, test_password)
|
||||
auth_headers = get_authorization_header(access_token)
|
||||
|
||||
auth_response = client.get(
|
||||
reverse("provider-list"),
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert auth_response.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_refresh_token(create_test_user, tenants_fixture):
|
||||
client = APIClient()
|
||||
|
||||
# Assert that we can obtain a new access token using the refresh one
|
||||
access_token, refresh_token = get_api_tokens(
|
||||
client, create_test_user.email, TEST_PASSWORD
|
||||
)
|
||||
valid_refresh_response = client.post(
|
||||
reverse("token-refresh"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-refresh",
|
||||
"attributes": {"refresh": refresh_token},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert valid_refresh_response.status_code == 200
|
||||
assert (
|
||||
valid_refresh_response.json()["data"]["attributes"]["refresh"] != refresh_token
|
||||
)
|
||||
|
||||
# Assert the former refresh token gets invalidated
|
||||
invalid_refresh_response = client.post(
|
||||
reverse("token-refresh"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-refresh",
|
||||
"attributes": {"refresh": refresh_token},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert invalid_refresh_response.status_code == 400
|
||||
|
||||
# Assert that the new refresh token could be used
|
||||
new_refresh_response = client.post(
|
||||
reverse("token-refresh"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-refresh",
|
||||
"attributes": {
|
||||
"refresh": valid_refresh_response.json()["data"]["attributes"][
|
||||
"refresh"
|
||||
]
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert new_refresh_response.status_code == 200
|
||||
@@ -1,98 +0,0 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
|
||||
from conftest import TEST_USER, TEST_PASSWORD, get_api_tokens, get_authorization_header
|
||||
|
||||
|
||||
@patch("api.v1.views.schedule_provider_scan")
|
||||
@pytest.mark.django_db
|
||||
def test_check_resources_between_different_tenants(
|
||||
schedule_mock,
|
||||
enforce_test_user_db_connection,
|
||||
authenticated_api_client,
|
||||
tenants_fixture,
|
||||
set_user_admin_roles_fixture,
|
||||
):
|
||||
client = authenticated_api_client
|
||||
|
||||
tenant1 = str(tenants_fixture[0].id)
|
||||
tenant2 = str(tenants_fixture[1].id)
|
||||
|
||||
tenant1_token, _ = get_api_tokens(
|
||||
client, TEST_USER, TEST_PASSWORD, tenant_id=tenant1
|
||||
)
|
||||
tenant2_token, _ = get_api_tokens(
|
||||
client, TEST_USER, TEST_PASSWORD, tenant_id=tenant2
|
||||
)
|
||||
|
||||
tenant1_headers = get_authorization_header(tenant1_token)
|
||||
tenant2_headers = get_authorization_header(tenant2_token)
|
||||
|
||||
# Create a provider on tenant 1
|
||||
provider_data = {
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"alias": "test_provider_tenant_1",
|
||||
"provider": "aws",
|
||||
"uid": "123456789012",
|
||||
},
|
||||
}
|
||||
}
|
||||
provider1_response = client.post(
|
||||
reverse("provider-list"),
|
||||
data=provider_data,
|
||||
format="vnd.api+json",
|
||||
headers=tenant1_headers,
|
||||
)
|
||||
assert provider1_response.status_code == 201
|
||||
provider1_id = provider1_response.json()["data"]["id"]
|
||||
|
||||
# Create a provider on tenant 2
|
||||
provider_data = {
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"alias": "test_provider_tenant_2",
|
||||
"provider": "aws",
|
||||
"uid": "123456789013",
|
||||
},
|
||||
}
|
||||
}
|
||||
provider2_response = client.post(
|
||||
reverse("provider-list"),
|
||||
data=provider_data,
|
||||
format="vnd.api+json",
|
||||
headers=tenant2_headers,
|
||||
)
|
||||
assert provider2_response.status_code == 201
|
||||
provider2_id = provider2_response.json()["data"]["id"]
|
||||
|
||||
# Try to get the provider from tenant 1 on tenant 2 and vice versa
|
||||
tenant1_response = client.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider1_id}),
|
||||
headers=tenant2_headers,
|
||||
)
|
||||
assert tenant1_response.status_code == 404
|
||||
tenant2_response = client.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider1_id}),
|
||||
headers=tenant1_headers,
|
||||
)
|
||||
assert tenant2_response.status_code == 200
|
||||
assert tenant2_response.json()["data"]["id"] == provider1_id
|
||||
|
||||
# Vice versa
|
||||
|
||||
tenant2_response = client.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider2_id}),
|
||||
headers=tenant1_headers,
|
||||
)
|
||||
assert tenant2_response.status_code == 404
|
||||
tenant1_response = client.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider2_id}),
|
||||
headers=tenant2_headers,
|
||||
)
|
||||
assert tenant1_response.status_code == 200
|
||||
assert tenant1_response.json()["data"]["id"] == provider2_id
|
||||
@@ -1,284 +0,0 @@
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from api.compliance import (
|
||||
get_prowler_provider_checks,
|
||||
get_prowler_provider_compliance,
|
||||
load_prowler_compliance,
|
||||
load_prowler_checks,
|
||||
generate_scan_compliance,
|
||||
generate_compliance_overview_template,
|
||||
)
|
||||
from api.models import Provider
|
||||
|
||||
|
||||
class TestCompliance:
|
||||
@patch("api.compliance.CheckMetadata")
|
||||
def test_get_prowler_provider_checks(self, mock_check_metadata):
|
||||
provider_type = Provider.ProviderChoices.AWS
|
||||
mock_check_metadata.get_bulk.return_value = {
|
||||
"check1": MagicMock(),
|
||||
"check2": MagicMock(),
|
||||
"check3": MagicMock(),
|
||||
}
|
||||
checks = get_prowler_provider_checks(provider_type)
|
||||
assert set(checks) == {"check1", "check2", "check3"}
|
||||
mock_check_metadata.get_bulk.assert_called_once_with(provider_type)
|
||||
|
||||
@patch("api.compliance.Compliance")
|
||||
def test_get_prowler_provider_compliance(self, mock_compliance):
|
||||
provider_type = Provider.ProviderChoices.AWS
|
||||
mock_compliance.get_bulk.return_value = {
|
||||
"compliance1": MagicMock(),
|
||||
"compliance2": MagicMock(),
|
||||
}
|
||||
compliance_data = get_prowler_provider_compliance(provider_type)
|
||||
assert compliance_data == mock_compliance.get_bulk.return_value
|
||||
mock_compliance.get_bulk.assert_called_once_with(provider_type)
|
||||
|
||||
@patch("api.models.Provider.ProviderChoices")
|
||||
@patch("api.compliance.get_prowler_provider_compliance")
|
||||
@patch("api.compliance.generate_compliance_overview_template")
|
||||
@patch("api.compliance.load_prowler_checks")
|
||||
def test_load_prowler_compliance(
|
||||
self,
|
||||
mock_load_prowler_checks,
|
||||
mock_generate_compliance_overview_template,
|
||||
mock_get_prowler_provider_compliance,
|
||||
mock_provider_choices,
|
||||
):
|
||||
mock_provider_choices.values = ["aws", "azure"]
|
||||
|
||||
compliance_data_aws = {"compliance_aws": MagicMock()}
|
||||
compliance_data_azure = {"compliance_azure": MagicMock()}
|
||||
|
||||
compliance_data_dict = {
|
||||
"aws": compliance_data_aws,
|
||||
"azure": compliance_data_azure,
|
||||
}
|
||||
|
||||
def mock_get_compliance(provider_type):
|
||||
return compliance_data_dict[provider_type]
|
||||
|
||||
mock_get_prowler_provider_compliance.side_effect = mock_get_compliance
|
||||
|
||||
mock_generate_compliance_overview_template.return_value = {
|
||||
"template_key": "template_value"
|
||||
}
|
||||
|
||||
mock_load_prowler_checks.return_value = {"checks_key": "checks_value"}
|
||||
|
||||
load_prowler_compliance()
|
||||
|
||||
from api.compliance import PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE, PROWLER_CHECKS
|
||||
|
||||
assert PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE == {
|
||||
"template_key": "template_value"
|
||||
}
|
||||
assert PROWLER_CHECKS == {"checks_key": "checks_value"}
|
||||
|
||||
expected_prowler_compliance = compliance_data_dict
|
||||
mock_get_prowler_provider_compliance.assert_any_call("aws")
|
||||
mock_get_prowler_provider_compliance.assert_any_call("azure")
|
||||
mock_generate_compliance_overview_template.assert_called_once_with(
|
||||
expected_prowler_compliance
|
||||
)
|
||||
mock_load_prowler_checks.assert_called_once_with(expected_prowler_compliance)
|
||||
|
||||
@patch("api.compliance.get_prowler_provider_checks")
|
||||
@patch("api.models.Provider.ProviderChoices")
|
||||
def test_load_prowler_checks(
|
||||
self, mock_provider_choices, mock_get_prowler_provider_checks
|
||||
):
|
||||
mock_provider_choices.values = ["aws"]
|
||||
|
||||
mock_get_prowler_provider_checks.return_value = ["check1", "check2", "check3"]
|
||||
|
||||
prowler_compliance = {
|
||||
"aws": {
|
||||
"compliance1": MagicMock(
|
||||
Requirements=[
|
||||
MagicMock(
|
||||
Checks=["check1", "check2"],
|
||||
),
|
||||
],
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
expected_checks = {
|
||||
"aws": {
|
||||
"check1": {"compliance1"},
|
||||
"check2": {"compliance1"},
|
||||
"check3": set(),
|
||||
}
|
||||
}
|
||||
|
||||
checks = load_prowler_checks(prowler_compliance)
|
||||
assert checks == expected_checks
|
||||
mock_get_prowler_provider_checks.assert_called_once_with("aws")
|
||||
|
||||
@patch("api.compliance.PROWLER_CHECKS", new_callable=dict)
|
||||
def test_generate_scan_compliance(self, mock_prowler_checks):
|
||||
mock_prowler_checks["aws"] = {
|
||||
"check1": {"compliance1"},
|
||||
"check2": {"compliance1", "compliance2"},
|
||||
}
|
||||
|
||||
compliance_overview = {
|
||||
"compliance1": {
|
||||
"requirements": {
|
||||
"requirement1": {
|
||||
"checks": {"check1": None, "check2": None},
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": 2,
|
||||
},
|
||||
"status": "PASS",
|
||||
}
|
||||
},
|
||||
"requirements_status": {"passed": 1, "failed": 0, "manual": 0},
|
||||
},
|
||||
"compliance2": {
|
||||
"requirements": {
|
||||
"requirement2": {
|
||||
"checks": {"check2": None},
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": 1,
|
||||
},
|
||||
"status": "PASS",
|
||||
}
|
||||
},
|
||||
"requirements_status": {"passed": 1, "failed": 0, "manual": 0},
|
||||
},
|
||||
}
|
||||
|
||||
provider_type = "aws"
|
||||
check_id = "check2"
|
||||
status = "FAIL"
|
||||
|
||||
generate_scan_compliance(compliance_overview, provider_type, check_id, status)
|
||||
|
||||
assert (
|
||||
compliance_overview["compliance1"]["requirements"]["requirement1"][
|
||||
"checks"
|
||||
]["check2"]
|
||||
== "FAIL"
|
||||
)
|
||||
assert (
|
||||
compliance_overview["compliance1"]["requirements"]["requirement1"][
|
||||
"checks_status"
|
||||
]["fail"]
|
||||
== 1
|
||||
)
|
||||
assert (
|
||||
compliance_overview["compliance1"]["requirements"]["requirement1"]["status"]
|
||||
== "FAIL"
|
||||
)
|
||||
assert compliance_overview["compliance1"]["requirements_status"]["passed"] == 0
|
||||
assert compliance_overview["compliance1"]["requirements_status"]["failed"] == 1
|
||||
|
||||
assert (
|
||||
compliance_overview["compliance2"]["requirements"]["requirement2"][
|
||||
"checks"
|
||||
]["check2"]
|
||||
== "FAIL"
|
||||
)
|
||||
assert (
|
||||
compliance_overview["compliance2"]["requirements"]["requirement2"][
|
||||
"checks_status"
|
||||
]["fail"]
|
||||
== 1
|
||||
)
|
||||
assert (
|
||||
compliance_overview["compliance2"]["requirements"]["requirement2"]["status"]
|
||||
== "FAIL"
|
||||
)
|
||||
assert compliance_overview["compliance2"]["requirements_status"]["passed"] == 0
|
||||
assert compliance_overview["compliance2"]["requirements_status"]["failed"] == 1
|
||||
|
||||
assert (
|
||||
compliance_overview["compliance1"]["requirements"]["requirement1"][
|
||||
"checks"
|
||||
]["check1"]
|
||||
is None
|
||||
)
|
||||
|
||||
@patch("api.models.Provider.ProviderChoices")
|
||||
def test_generate_compliance_overview_template(self, mock_provider_choices):
|
||||
mock_provider_choices.values = ["aws"]
|
||||
|
||||
requirement1 = MagicMock(
|
||||
Id="requirement1",
|
||||
Name="Requirement 1",
|
||||
Description="Description of requirement 1",
|
||||
Attributes=[],
|
||||
Checks=["check1", "check2"],
|
||||
)
|
||||
requirement2 = MagicMock(
|
||||
Id="requirement2",
|
||||
Name="Requirement 2",
|
||||
Description="Description of requirement 2",
|
||||
Attributes=[],
|
||||
Checks=[],
|
||||
)
|
||||
compliance1 = MagicMock(
|
||||
Requirements=[requirement1, requirement2],
|
||||
Framework="Framework 1",
|
||||
Version="1.0",
|
||||
Description="Description of compliance1",
|
||||
)
|
||||
prowler_compliance = {"aws": {"compliance1": compliance1}}
|
||||
|
||||
template = generate_compliance_overview_template(prowler_compliance)
|
||||
|
||||
expected_template = {
|
||||
"aws": {
|
||||
"compliance1": {
|
||||
"framework": "Framework 1",
|
||||
"version": "1.0",
|
||||
"provider": "aws",
|
||||
"description": "Description of compliance1",
|
||||
"requirements": {
|
||||
"requirement1": {
|
||||
"name": "Requirement 1",
|
||||
"description": "Description of requirement 1",
|
||||
"attributes": [],
|
||||
"checks": {"check1": None, "check2": None},
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": 2,
|
||||
},
|
||||
"status": "PASS",
|
||||
},
|
||||
"requirement2": {
|
||||
"name": "Requirement 2",
|
||||
"description": "Description of requirement 2",
|
||||
"attributes": [],
|
||||
"checks": {},
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": 0,
|
||||
},
|
||||
"status": "PASS",
|
||||
},
|
||||
},
|
||||
"requirements_status": {
|
||||
"passed": 1, # total_requirements - manual
|
||||
"failed": 0,
|
||||
"manual": 1, # requirement2 has 0 checks
|
||||
},
|
||||
"total_requirements": 2,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert template == expected_template
|
||||
@@ -1,31 +0,0 @@
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
from django.db.utils import ConnectionRouter
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.rls import Tenant
|
||||
from config.django.base import DATABASE_ROUTERS as PROD_DATABASE_ROUTERS
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
@patch("api.db_router.MainRouter.admin_db", new="admin")
|
||||
class TestMainDatabaseRouter:
|
||||
@pytest.fixture(scope="module")
|
||||
def router(self):
|
||||
testing_routers = settings.DATABASE_ROUTERS.copy()
|
||||
settings.DATABASE_ROUTERS = PROD_DATABASE_ROUTERS
|
||||
yield ConnectionRouter()
|
||||
settings.DATABASE_ROUTERS = testing_routers
|
||||
|
||||
@pytest.mark.parametrize("api_model", [Tenant])
|
||||
def test_router_api_models(self, api_model, router):
|
||||
assert router.db_for_read(api_model) == "default"
|
||||
assert router.db_for_write(api_model) == "default"
|
||||
|
||||
assert router.allow_migrate_model(MainRouter.admin_db, api_model)
|
||||
assert not router.allow_migrate_model("default", api_model)
|
||||
|
||||
def test_router_django_models(self, router):
|
||||
assert router.db_for_read(MigrationRecorder.Migration) == MainRouter.admin_db
|
||||
assert not router.db_for_read(MigrationRecorder.Migration) == "default"
|
||||
@@ -1,108 +0,0 @@
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from unittest.mock import patch
|
||||
|
||||
from api.db_utils import enum_to_choices, one_week_from_now, generate_random_token
|
||||
|
||||
|
||||
class TestEnumToChoices:
|
||||
def test_enum_to_choices_simple(self):
|
||||
class Color(Enum):
|
||||
RED = 1
|
||||
GREEN = 2
|
||||
BLUE = 3
|
||||
|
||||
expected_result = [
|
||||
(1, "Red"),
|
||||
(2, "Green"),
|
||||
(3, "Blue"),
|
||||
]
|
||||
|
||||
result = enum_to_choices(Color)
|
||||
assert result == expected_result
|
||||
|
||||
def test_enum_to_choices_with_underscores(self):
|
||||
class Status(Enum):
|
||||
PENDING_APPROVAL = "pending"
|
||||
IN_PROGRESS = "in_progress"
|
||||
COMPLETED_SUCCESSFULLY = "completed"
|
||||
|
||||
expected_result = [
|
||||
("pending", "Pending Approval"),
|
||||
("in_progress", "In Progress"),
|
||||
("completed", "Completed Successfully"),
|
||||
]
|
||||
|
||||
result = enum_to_choices(Status)
|
||||
assert result == expected_result
|
||||
|
||||
def test_enum_to_choices_empty_enum(self):
|
||||
class EmptyEnum(Enum):
|
||||
pass
|
||||
|
||||
expected_result = []
|
||||
|
||||
result = enum_to_choices(EmptyEnum)
|
||||
assert result == expected_result
|
||||
|
||||
def test_enum_to_choices_numeric_values(self):
|
||||
class Numbers(Enum):
|
||||
ONE = 1
|
||||
TWO = 2
|
||||
THREE = 3
|
||||
|
||||
expected_result = [
|
||||
(1, "One"),
|
||||
(2, "Two"),
|
||||
(3, "Three"),
|
||||
]
|
||||
|
||||
result = enum_to_choices(Numbers)
|
||||
assert result == expected_result
|
||||
|
||||
|
||||
class TestOneWeekFromNow:
|
||||
def test_one_week_from_now(self):
|
||||
with patch("api.db_utils.datetime") as mock_datetime:
|
||||
mock_datetime.now.return_value = datetime(2023, 1, 1, tzinfo=timezone.utc)
|
||||
expected_result = datetime(2023, 1, 8, tzinfo=timezone.utc)
|
||||
|
||||
result = one_week_from_now()
|
||||
assert result == expected_result
|
||||
|
||||
def test_one_week_from_now_with_timezone(self):
|
||||
with patch("api.db_utils.datetime") as mock_datetime:
|
||||
mock_datetime.now.return_value = datetime(
|
||||
2023, 6, 15, 12, 0, tzinfo=timezone.utc
|
||||
)
|
||||
expected_result = datetime(2023, 6, 22, 12, 0, tzinfo=timezone.utc)
|
||||
|
||||
result = one_week_from_now()
|
||||
assert result == expected_result
|
||||
|
||||
|
||||
class TestGenerateRandomToken:
|
||||
def test_generate_random_token_default_length(self):
|
||||
token = generate_random_token()
|
||||
assert len(token) == 14
|
||||
|
||||
def test_generate_random_token_custom_length(self):
|
||||
length = 20
|
||||
token = generate_random_token(length=length)
|
||||
assert len(token) == length
|
||||
|
||||
def test_generate_random_token_with_symbols(self):
|
||||
symbols = "ABC123"
|
||||
token = generate_random_token(length=10, symbols=symbols)
|
||||
assert len(token) == 10
|
||||
assert all(char in symbols for char in token)
|
||||
|
||||
def test_generate_random_token_unique(self):
|
||||
tokens = {generate_random_token() for _ in range(1000)}
|
||||
# Assuming that generating 1000 tokens should result in unique values
|
||||
assert len(tokens) == 1000
|
||||
|
||||
def test_generate_random_token_no_symbols_provided(self):
|
||||
token = generate_random_token(length=5, symbols="")
|
||||
# Default symbols
|
||||
assert len(token) == 5
|
||||
@@ -1,36 +0,0 @@
|
||||
import uuid
|
||||
from unittest.mock import call, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
|
||||
from api.decorators import set_tenant
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestSetTenantDecorator:
|
||||
@patch("api.decorators.connection.cursor")
|
||||
def test_set_tenant(self, mock_cursor):
|
||||
mock_cursor.return_value.__enter__.return_value = mock_cursor
|
||||
|
||||
@set_tenant
|
||||
def random_func(arg):
|
||||
return arg
|
||||
|
||||
tenant_id = str(uuid.uuid4())
|
||||
|
||||
result = random_func("test_arg", tenant_id=tenant_id)
|
||||
|
||||
assert (
|
||||
call(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
|
||||
in mock_cursor.execute.mock_calls
|
||||
)
|
||||
assert result == "test_arg"
|
||||
|
||||
def test_set_tenant_exception(self):
|
||||
@set_tenant
|
||||
def random_func(arg):
|
||||
return arg
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
random_func("test_arg")
|
||||
@@ -1,54 +0,0 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from django.http import HttpResponse
|
||||
from django.test import RequestFactory
|
||||
|
||||
from api.middleware import APILoggingMiddleware
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("logging.getLogger")
|
||||
def test_api_logging_middleware_logging(mock_logger):
|
||||
factory = RequestFactory()
|
||||
|
||||
request = factory.get("/test-path?param1=value1¶m2=value2")
|
||||
request.method = "GET"
|
||||
|
||||
response = HttpResponse()
|
||||
response.status_code = 200
|
||||
|
||||
get_response = MagicMock(return_value=response)
|
||||
|
||||
with patch("api.middleware.extract_auth_info") as mock_extract_auth_info:
|
||||
mock_extract_auth_info.return_value = {
|
||||
"user_id": "user123",
|
||||
"tenant_id": "tenant456",
|
||||
}
|
||||
|
||||
with patch("api.middleware.logging.getLogger") as mock_get_logger:
|
||||
mock_logger = MagicMock()
|
||||
mock_get_logger.return_value = mock_logger
|
||||
|
||||
middleware = APILoggingMiddleware(get_response)
|
||||
|
||||
with patch("api.middleware.time.time") as mock_time:
|
||||
mock_time.side_effect = [1000.0, 1001.0] # Start time and end time
|
||||
|
||||
middleware(request)
|
||||
|
||||
get_response.assert_called_once_with(request)
|
||||
|
||||
mock_extract_auth_info.assert_called_once_with(request)
|
||||
|
||||
expected_extra = {
|
||||
"user_id": "user123",
|
||||
"tenant_id": "tenant456",
|
||||
"method": "GET",
|
||||
"path": "/test-path",
|
||||
"query_params": {"param1": "value1", "param2": "value2"},
|
||||
"status_code": 200,
|
||||
"duration": 1.0,
|
||||
}
|
||||
|
||||
mock_logger.info.assert_called_once_with("", extra=expected_extra)
|
||||
@@ -1,89 +0,0 @@
|
||||
import pytest
|
||||
|
||||
from api.models import Resource, ResourceTag
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestResourceModel:
|
||||
def test_setting_tags(self, providers_fixture):
|
||||
provider, *_ = providers_fixture
|
||||
|
||||
resource = Resource.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
provider=provider,
|
||||
uid="arn:aws:ec2:us-east-1:123456789012:instance/i-1234567890abcdef0",
|
||||
name="My Instance 1",
|
||||
region="us-east-1",
|
||||
service="ec2",
|
||||
type="prowler-test",
|
||||
)
|
||||
|
||||
tags = [
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
key="key",
|
||||
value="value",
|
||||
),
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
key="key2",
|
||||
value="value2",
|
||||
),
|
||||
]
|
||||
|
||||
resource.upsert_or_delete_tags(tags)
|
||||
|
||||
assert len(tags) == len(resource.tags.all())
|
||||
|
||||
tags_dict = resource.get_tags()
|
||||
|
||||
for tag in tags:
|
||||
assert tag.key in tags_dict
|
||||
assert tag.value == tags_dict[tag.key]
|
||||
|
||||
def test_adding_tags(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
|
||||
tags = [
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=resource.tenant_id,
|
||||
key="env",
|
||||
value="test",
|
||||
),
|
||||
]
|
||||
before_count = len(resource.tags.all())
|
||||
|
||||
resource.upsert_or_delete_tags(tags)
|
||||
|
||||
assert before_count + 1 == len(resource.tags.all())
|
||||
|
||||
tags_dict = resource.get_tags()
|
||||
|
||||
assert "env" in tags_dict
|
||||
assert tags_dict["env"] == "test"
|
||||
|
||||
def test_adding_duplicate_tags(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
|
||||
tags = resource.tags.all()
|
||||
|
||||
before_count = len(resource.tags.all())
|
||||
|
||||
resource.upsert_or_delete_tags(tags)
|
||||
|
||||
# should be the same number of tags
|
||||
assert before_count == len(resource.tags.all())
|
||||
|
||||
def test_add_tags_none(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
resource.upsert_or_delete_tags(None)
|
||||
|
||||
assert len(resource.tags.all()) == 0
|
||||
assert resource.get_tags() == {}
|
||||
|
||||
def test_clear_tags(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
resource.clear_tags()
|
||||
|
||||
assert len(resource.tags.all()) == 0
|
||||
assert resource.get_tags() == {}
|
||||
@@ -1,306 +0,0 @@
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
from rest_framework import status
|
||||
from unittest.mock import patch, ANY, Mock
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestUserViewSet:
|
||||
def test_list_users_with_all_permissions(self, authenticated_client_rbac):
|
||||
response = authenticated_client_rbac.get(reverse("user-list"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert isinstance(response.json()["data"], list)
|
||||
|
||||
def test_list_users_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.get(reverse("user-list"))
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_retrieve_user_with_all_permissions(
|
||||
self, authenticated_client_rbac, create_test_user_rbac
|
||||
):
|
||||
response = authenticated_client_rbac.get(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert (
|
||||
response.json()["data"]["attributes"]["email"]
|
||||
== create_test_user_rbac.email
|
||||
)
|
||||
|
||||
def test_retrieve_user_with_no_roles(
|
||||
self, authenticated_client_rbac_noroles, create_test_user_rbac_no_roles
|
||||
):
|
||||
response = authenticated_client_rbac_noroles.get(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user_rbac_no_roles.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_retrieve_user_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, create_test_user
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.get(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_create_user_with_all_permissions(self, authenticated_client_rbac):
|
||||
valid_user_payload = {
|
||||
"name": "test",
|
||||
"password": "newpassword123",
|
||||
"email": "new_user@test.com",
|
||||
}
|
||||
response = authenticated_client_rbac.post(
|
||||
reverse("user-list"), data=valid_user_payload, format="vnd.api+json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
assert response.json()["data"]["attributes"]["email"] == "new_user@test.com"
|
||||
|
||||
def test_create_user_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac
|
||||
):
|
||||
valid_user_payload = {
|
||||
"name": "test",
|
||||
"password": "newpassword123",
|
||||
"email": "new_user@test.com",
|
||||
}
|
||||
response = authenticated_client_no_permissions_rbac.post(
|
||||
reverse("user-list"), data=valid_user_payload, format="vnd.api+json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
assert response.json()["data"]["attributes"]["email"] == "new_user@test.com"
|
||||
|
||||
def test_partial_update_user_with_all_permissions(
|
||||
self, authenticated_client_rbac, create_test_user_rbac
|
||||
):
|
||||
updated_data = {
|
||||
"data": {
|
||||
"type": "users",
|
||||
"id": str(create_test_user_rbac.id),
|
||||
"attributes": {"name": "Updated Name"},
|
||||
},
|
||||
}
|
||||
response = authenticated_client_rbac.patch(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id}),
|
||||
data=updated_data,
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["data"]["attributes"]["name"] == "Updated Name"
|
||||
|
||||
def test_partial_update_user_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, create_test_user
|
||||
):
|
||||
updated_data = {
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {"name": "Updated Name"},
|
||||
}
|
||||
}
|
||||
response = authenticated_client_no_permissions_rbac.patch(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user.id}),
|
||||
data=updated_data,
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_delete_user_with_all_permissions(
|
||||
self, authenticated_client_rbac, create_test_user_rbac
|
||||
):
|
||||
response = authenticated_client_rbac.delete(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user_rbac.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_204_NO_CONTENT
|
||||
|
||||
def test_delete_user_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, create_test_user
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.delete(
|
||||
reverse("user-detail", kwargs={"pk": create_test_user.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_me_with_all_permissions(
|
||||
self, authenticated_client_rbac, create_test_user_rbac
|
||||
):
|
||||
response = authenticated_client_rbac.get(reverse("user-me"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert (
|
||||
response.json()["data"]["attributes"]["email"]
|
||||
== create_test_user_rbac.email
|
||||
)
|
||||
|
||||
def test_me_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, create_test_user
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.get(reverse("user-me"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["data"]["attributes"]["email"] == "rbac_limited@rbac.com"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestProviderViewSet:
|
||||
def test_list_providers_with_all_permissions(
|
||||
self, authenticated_client_rbac, providers_fixture
|
||||
):
|
||||
response = authenticated_client_rbac.get(reverse("provider-list"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == len(providers_fixture)
|
||||
|
||||
def test_list_providers_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac
|
||||
):
|
||||
response = authenticated_client_no_permissions_rbac.get(
|
||||
reverse("provider-list")
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == 0
|
||||
|
||||
def test_retrieve_provider_with_all_permissions(
|
||||
self, authenticated_client_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
response = authenticated_client_rbac.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["data"]["attributes"]["alias"] == provider.alias
|
||||
|
||||
def test_retrieve_provider_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
response = authenticated_client_no_permissions_rbac.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
def test_create_provider_with_all_permissions(self, authenticated_client_rbac):
|
||||
payload = {"provider": "aws", "uid": "111111111111", "alias": "new_alias"}
|
||||
response = authenticated_client_rbac.post(
|
||||
reverse("provider-list"), data=payload, format="json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
assert response.json()["data"]["attributes"]["alias"] == "new_alias"
|
||||
|
||||
def test_create_provider_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac
|
||||
):
|
||||
payload = {"provider": "aws", "uid": "111111111111", "alias": "new_alias"}
|
||||
response = authenticated_client_no_permissions_rbac.post(
|
||||
reverse("provider-list"), data=payload, format="json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
def test_partial_update_provider_with_all_permissions(
|
||||
self, authenticated_client_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
payload = {
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"id": provider.id,
|
||||
"attributes": {"alias": "updated_alias"},
|
||||
},
|
||||
}
|
||||
response = authenticated_client_rbac.patch(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id}),
|
||||
data=payload,
|
||||
content_type="application/vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["data"]["attributes"]["alias"] == "updated_alias"
|
||||
|
||||
def test_partial_update_provider_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
update_payload = {
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {"alias": "updated_alias"},
|
||||
}
|
||||
}
|
||||
response = authenticated_client_no_permissions_rbac.patch(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id}),
|
||||
data=update_payload,
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
@patch("api.v1.views.Task.objects.get")
|
||||
@patch("api.v1.views.delete_provider_task.delay")
|
||||
def test_delete_provider_with_all_permissions(
|
||||
self,
|
||||
mock_delete_task,
|
||||
mock_task_get,
|
||||
authenticated_client_rbac,
|
||||
providers_fixture,
|
||||
tasks_fixture,
|
||||
):
|
||||
prowler_task = tasks_fixture[0]
|
||||
task_mock = Mock()
|
||||
task_mock.id = prowler_task.id
|
||||
mock_delete_task.return_value = task_mock
|
||||
mock_task_get.return_value = prowler_task
|
||||
|
||||
provider1, *_ = providers_fixture
|
||||
response = authenticated_client_rbac.delete(
|
||||
reverse("provider-detail", kwargs={"pk": provider1.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_202_ACCEPTED
|
||||
mock_delete_task.assert_called_once_with(
|
||||
provider_id=str(provider1.id), tenant_id=ANY
|
||||
)
|
||||
assert "Content-Location" in response.headers
|
||||
assert response.headers["Content-Location"] == f"/api/v1/tasks/{task_mock.id}"
|
||||
|
||||
def test_delete_provider_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
response = authenticated_client_no_permissions_rbac.delete(
|
||||
reverse("provider-detail", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
|
||||
@patch("api.v1.views.Task.objects.get")
|
||||
@patch("api.v1.views.check_provider_connection_task.delay")
|
||||
def test_connection_with_all_permissions(
|
||||
self,
|
||||
mock_provider_connection,
|
||||
mock_task_get,
|
||||
authenticated_client_rbac,
|
||||
providers_fixture,
|
||||
tasks_fixture,
|
||||
):
|
||||
prowler_task = tasks_fixture[0]
|
||||
task_mock = Mock()
|
||||
task_mock.id = prowler_task.id
|
||||
task_mock.status = "PENDING"
|
||||
mock_provider_connection.return_value = task_mock
|
||||
mock_task_get.return_value = prowler_task
|
||||
|
||||
provider1, *_ = providers_fixture
|
||||
assert provider1.connected is None
|
||||
assert provider1.connection_last_checked_at is None
|
||||
|
||||
response = authenticated_client_rbac.post(
|
||||
reverse("provider-connection", kwargs={"pk": provider1.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_202_ACCEPTED
|
||||
mock_provider_connection.assert_called_once_with(
|
||||
provider_id=str(provider1.id), tenant_id=ANY
|
||||
)
|
||||
assert "Content-Location" in response.headers
|
||||
assert response.headers["Content-Location"] == f"/api/v1/tasks/{task_mock.id}"
|
||||
|
||||
def test_connection_with_no_permissions(
|
||||
self, authenticated_client_no_permissions_rbac, providers_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
response = authenticated_client_no_permissions_rbac.post(
|
||||
reverse("provider-connection", kwargs={"pk": provider.id})
|
||||
)
|
||||
assert response.status_code == status.HTTP_403_FORBIDDEN
|
||||
@@ -1,318 +0,0 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
import pytest
|
||||
from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.azure.azure_provider import AzureProvider
|
||||
from prowler.providers.gcp.gcp_provider import GcpProvider
|
||||
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
|
||||
from rest_framework.exceptions import ValidationError, NotFound
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.exceptions import InvitationTokenExpiredException
|
||||
from api.models import Invitation
|
||||
from api.models import Provider
|
||||
from api.utils import (
|
||||
merge_dicts,
|
||||
return_prowler_provider,
|
||||
initialize_prowler_provider,
|
||||
prowler_provider_connection_test,
|
||||
get_prowler_provider_kwargs,
|
||||
)
|
||||
from api.utils import validate_invitation
|
||||
|
||||
|
||||
class TestMergeDicts:
|
||||
def test_simple_merge(self):
|
||||
default_dict = {"key1": "value1", "key2": "value2"}
|
||||
replacement_dict = {"key2": "new_value2", "key3": "value3"}
|
||||
expected_result = {"key1": "value1", "key2": "new_value2", "key3": "value3"}
|
||||
|
||||
result = merge_dicts(default_dict, replacement_dict)
|
||||
assert result == expected_result
|
||||
|
||||
def test_nested_merge(self):
|
||||
default_dict = {
|
||||
"key1": "value1",
|
||||
"key2": {"nested_key1": "nested_value1", "nested_key2": "nested_value2"},
|
||||
}
|
||||
replacement_dict = {
|
||||
"key2": {
|
||||
"nested_key2": "new_nested_value2",
|
||||
"nested_key3": "nested_value3",
|
||||
},
|
||||
"key3": "value3",
|
||||
}
|
||||
expected_result = {
|
||||
"key1": "value1",
|
||||
"key2": {
|
||||
"nested_key1": "nested_value1",
|
||||
"nested_key2": "new_nested_value2",
|
||||
"nested_key3": "nested_value3",
|
||||
},
|
||||
"key3": "value3",
|
||||
}
|
||||
|
||||
result = merge_dicts(default_dict, replacement_dict)
|
||||
assert result == expected_result
|
||||
|
||||
def test_no_overlap(self):
|
||||
default_dict = {"key1": "value1"}
|
||||
replacement_dict = {"key2": "value2"}
|
||||
expected_result = {"key1": "value1", "key2": "value2"}
|
||||
|
||||
result = merge_dicts(default_dict, replacement_dict)
|
||||
assert result == expected_result
|
||||
|
||||
def test_replacement_dict_empty(self):
|
||||
default_dict = {"key1": "value1", "key2": "value2"}
|
||||
replacement_dict = {}
|
||||
expected_result = {"key1": "value1", "key2": "value2"}
|
||||
|
||||
result = merge_dicts(default_dict, replacement_dict)
|
||||
assert result == expected_result
|
||||
|
||||
def test_default_dict_empty(self):
|
||||
default_dict = {}
|
||||
replacement_dict = {"key1": "value1", "key2": "value2"}
|
||||
expected_result = {"key1": "value1", "key2": "value2"}
|
||||
|
||||
result = merge_dicts(default_dict, replacement_dict)
|
||||
assert result == expected_result
|
||||
|
||||
def test_nested_empty_in_replacement_dict(self):
|
||||
default_dict = {"key1": {"nested_key1": "nested_value1"}}
|
||||
replacement_dict = {"key1": {}}
|
||||
expected_result = {"key1": {}}
|
||||
|
||||
result = merge_dicts(default_dict, replacement_dict)
|
||||
assert result == expected_result
|
||||
|
||||
def test_deep_nested_merge(self):
|
||||
default_dict = {"key1": {"nested_key1": {"deep_key1": "deep_value1"}}}
|
||||
replacement_dict = {"key1": {"nested_key1": {"deep_key1": "new_deep_value1"}}}
|
||||
expected_result = {"key1": {"nested_key1": {"deep_key1": "new_deep_value1"}}}
|
||||
|
||||
result = merge_dicts(default_dict, replacement_dict)
|
||||
assert result == expected_result
|
||||
|
||||
|
||||
class TestReturnProwlerProvider:
|
||||
@pytest.mark.parametrize(
|
||||
"provider_type, expected_provider",
|
||||
[
|
||||
(Provider.ProviderChoices.AWS.value, AwsProvider),
|
||||
(Provider.ProviderChoices.GCP.value, GcpProvider),
|
||||
(Provider.ProviderChoices.AZURE.value, AzureProvider),
|
||||
(Provider.ProviderChoices.KUBERNETES.value, KubernetesProvider),
|
||||
],
|
||||
)
|
||||
def test_return_prowler_provider(self, provider_type, expected_provider):
|
||||
provider = MagicMock()
|
||||
provider.provider = provider_type
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
assert prowler_provider == expected_provider
|
||||
|
||||
def test_return_prowler_provider_unsupported_provider(self):
|
||||
provider = MagicMock()
|
||||
provider.provider = "UNSUPPORTED_PROVIDER"
|
||||
with pytest.raises(ValueError):
|
||||
return return_prowler_provider(provider)
|
||||
|
||||
|
||||
class TestInitializeProwlerProvider:
|
||||
@patch("api.utils.return_prowler_provider")
|
||||
def test_initialize_prowler_provider(self, mock_return_prowler_provider):
|
||||
provider = MagicMock()
|
||||
provider.secret.secret = {"key": "value"}
|
||||
mock_return_prowler_provider.return_value = MagicMock()
|
||||
|
||||
initialize_prowler_provider(provider)
|
||||
mock_return_prowler_provider.return_value.assert_called_once_with(key="value")
|
||||
|
||||
|
||||
class TestProwlerProviderConnectionTest:
|
||||
@patch("api.utils.return_prowler_provider")
|
||||
def test_prowler_provider_connection_test(self, mock_return_prowler_provider):
|
||||
provider = MagicMock()
|
||||
provider.uid = "1234567890"
|
||||
provider.secret.secret = {"key": "value"}
|
||||
mock_return_prowler_provider.return_value = MagicMock()
|
||||
|
||||
prowler_provider_connection_test(provider)
|
||||
mock_return_prowler_provider.return_value.test_connection.assert_called_once_with(
|
||||
key="value", provider_id="1234567890", raise_on_exception=False
|
||||
)
|
||||
|
||||
|
||||
class TestGetProwlerProviderKwargs:
|
||||
@pytest.mark.parametrize(
|
||||
"provider_type, expected_extra_kwargs",
|
||||
[
|
||||
(
|
||||
Provider.ProviderChoices.AWS.value,
|
||||
{},
|
||||
),
|
||||
(
|
||||
Provider.ProviderChoices.AZURE.value,
|
||||
{"subscription_ids": ["provider_uid"]},
|
||||
),
|
||||
(
|
||||
Provider.ProviderChoices.GCP.value,
|
||||
{"project_ids": ["provider_uid"]},
|
||||
),
|
||||
(
|
||||
Provider.ProviderChoices.KUBERNETES.value,
|
||||
{"context": "provider_uid"},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_get_prowler_provider_kwargs(self, provider_type, expected_extra_kwargs):
|
||||
provider_uid = "provider_uid"
|
||||
secret_dict = {"key": "value"}
|
||||
secret_mock = MagicMock()
|
||||
secret_mock.secret = secret_dict
|
||||
|
||||
provider = MagicMock()
|
||||
provider.provider = provider_type
|
||||
provider.secret = secret_mock
|
||||
provider.uid = provider_uid
|
||||
|
||||
result = get_prowler_provider_kwargs(provider)
|
||||
|
||||
expected_result = {**secret_dict, **expected_extra_kwargs}
|
||||
assert result == expected_result
|
||||
|
||||
def test_get_prowler_provider_kwargs_unsupported_provider(self):
|
||||
# Setup
|
||||
provider_uid = "provider_uid"
|
||||
secret_dict = {"key": "value"}
|
||||
secret_mock = MagicMock()
|
||||
secret_mock.secret = secret_dict
|
||||
|
||||
provider = MagicMock()
|
||||
provider.provider = "UNSUPPORTED_PROVIDER"
|
||||
provider.secret = secret_mock
|
||||
provider.uid = provider_uid
|
||||
|
||||
result = get_prowler_provider_kwargs(provider)
|
||||
|
||||
expected_result = secret_dict.copy()
|
||||
assert result == expected_result
|
||||
|
||||
def test_get_prowler_provider_kwargs_no_secret(self):
|
||||
# Setup
|
||||
provider_uid = "provider_uid"
|
||||
secret_mock = MagicMock()
|
||||
secret_mock.secret = {}
|
||||
|
||||
provider = MagicMock()
|
||||
provider.provider = Provider.ProviderChoices.AWS.value
|
||||
provider.secret = secret_mock
|
||||
provider.uid = provider_uid
|
||||
|
||||
result = get_prowler_provider_kwargs(provider)
|
||||
|
||||
expected_result = {}
|
||||
assert result == expected_result
|
||||
|
||||
|
||||
class TestValidateInvitation:
|
||||
@pytest.fixture
|
||||
def invitation(self):
|
||||
invitation = MagicMock(spec=Invitation)
|
||||
invitation.token = "VALID_TOKEN"
|
||||
invitation.email = "user@example.com"
|
||||
invitation.expires_at = datetime.now(timezone.utc) + timedelta(days=1)
|
||||
invitation.state = Invitation.State.PENDING
|
||||
invitation.tenant = MagicMock()
|
||||
return invitation
|
||||
|
||||
def test_valid_invitation(self, invitation):
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.return_value = invitation
|
||||
|
||||
result = validate_invitation("VALID_TOKEN", "user@example.com")
|
||||
|
||||
assert result == invitation
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_not_found_raises_validation_error(self):
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.side_effect = Invitation.DoesNotExist
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
validate_invitation("INVALID_TOKEN", "user@example.com")
|
||||
|
||||
assert exc_info.value.detail == {
|
||||
"invitation_token": "Invalid invitation code."
|
||||
}
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="INVALID_TOKEN", email="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_not_found_raises_not_found(self):
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.side_effect = Invitation.DoesNotExist
|
||||
|
||||
with pytest.raises(NotFound) as exc_info:
|
||||
validate_invitation(
|
||||
"INVALID_TOKEN", "user@example.com", raise_not_found=True
|
||||
)
|
||||
|
||||
assert exc_info.value.detail == "Invitation is not valid."
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="INVALID_TOKEN", email="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_expired(self, invitation):
|
||||
expired_time = datetime.now(timezone.utc) - timedelta(days=1)
|
||||
invitation.expires_at = expired_time
|
||||
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using, patch(
|
||||
"api.utils.datetime"
|
||||
) as mock_datetime:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.return_value = invitation
|
||||
mock_datetime.now.return_value = datetime.now(timezone.utc)
|
||||
|
||||
with pytest.raises(InvitationTokenExpiredException):
|
||||
validate_invitation("VALID_TOKEN", "user@example.com")
|
||||
|
||||
# Ensure the invitation state was updated to EXPIRED
|
||||
assert invitation.state == Invitation.State.EXPIRED
|
||||
invitation.save.assert_called_once_with(using=MainRouter.admin_db)
|
||||
|
||||
def test_invitation_not_pending(self, invitation):
|
||||
invitation.state = Invitation.State.ACCEPTED
|
||||
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.return_value = invitation
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
validate_invitation("VALID_TOKEN", "user@example.com")
|
||||
|
||||
assert exc_info.value.detail == {
|
||||
"invitation_token": "This invitation is no longer valid."
|
||||
}
|
||||
|
||||
def test_invitation_with_different_email(self):
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.side_effect = Invitation.DoesNotExist
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
validate_invitation("VALID_TOKEN", "different@example.com")
|
||||
|
||||
assert exc_info.value.detail == {
|
||||
"invitation_token": "Invalid invitation code."
|
||||
}
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email="different@example.com"
|
||||
)
|
||||
@@ -1,113 +0,0 @@
|
||||
from datetime import datetime, timezone
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from uuid6 import UUID
|
||||
|
||||
from api.uuid_utils import (
|
||||
transform_into_uuid7,
|
||||
datetime_to_uuid7,
|
||||
datetime_from_uuid7,
|
||||
uuid7_start,
|
||||
uuid7_end,
|
||||
uuid7_range,
|
||||
)
|
||||
|
||||
|
||||
def test_transform_into_uuid7_valid():
|
||||
uuid_v7 = datetime_to_uuid7(datetime.now(timezone.utc))
|
||||
transformed_uuid = transform_into_uuid7(uuid_v7)
|
||||
assert transformed_uuid == UUID(hex=uuid_v7.hex.upper())
|
||||
assert transformed_uuid.version == 7
|
||||
|
||||
|
||||
def test_transform_into_uuid7_invalid_version():
|
||||
uuid_v4 = uuid4()
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
transform_into_uuid7(UUID(str(uuid_v4)))
|
||||
assert str(exc_info.value.detail[0]) == "Invalid UUIDv7 value."
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input_datetime",
|
||||
[
|
||||
datetime(2024, 9, 11, 7, 20, 27, tzinfo=timezone.utc),
|
||||
datetime(2023, 1, 1, 0, 0, 0, tzinfo=timezone.utc),
|
||||
],
|
||||
)
|
||||
def test_datetime_to_uuid7(input_datetime):
|
||||
uuid7 = datetime_to_uuid7(input_datetime)
|
||||
assert isinstance(uuid7, UUID)
|
||||
assert uuid7.version == 7
|
||||
expected_timestamp_ms = int(input_datetime.timestamp() * 1000) & 0xFFFFFFFFFFFF
|
||||
assert uuid7.time == expected_timestamp_ms
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input_datetime",
|
||||
[
|
||||
datetime(2024, 9, 11, 7, 20, 27, tzinfo=timezone.utc),
|
||||
datetime(2023, 1, 1, 0, 0, 0, tzinfo=timezone.utc),
|
||||
],
|
||||
)
|
||||
def test_datetime_from_uuid7(input_datetime):
|
||||
uuid7 = datetime_to_uuid7(input_datetime)
|
||||
extracted_datetime = datetime_from_uuid7(uuid7)
|
||||
assert extracted_datetime == input_datetime
|
||||
|
||||
|
||||
def test_datetime_from_uuid7_invalid():
|
||||
uuid_v4 = uuid4()
|
||||
with pytest.raises(ValueError):
|
||||
datetime_from_uuid7(UUID(str(uuid_v4)))
|
||||
|
||||
|
||||
def test_uuid7_start():
|
||||
dt = datetime.now(timezone.utc)
|
||||
uuid = datetime_to_uuid7(dt)
|
||||
start_uuid = uuid7_start(uuid)
|
||||
expected_dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
expected_timestamp_ms = int(expected_dt.timestamp() * 1000) & 0xFFFFFFFFFFFF
|
||||
assert start_uuid.time == expected_timestamp_ms
|
||||
assert start_uuid.version == 7
|
||||
|
||||
|
||||
@pytest.mark.parametrize("months_offset", [0, 1, 10, 30, 60])
|
||||
def test_uuid7_end(months_offset):
|
||||
dt = datetime.now(timezone.utc)
|
||||
uuid = datetime_to_uuid7(dt)
|
||||
end_uuid = uuid7_end(uuid, months_offset)
|
||||
expected_dt = dt.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
expected_dt += relativedelta(months=months_offset, microseconds=-1)
|
||||
expected_timestamp_ms = int(expected_dt.timestamp() * 1000) & 0xFFFFFFFFFFFF
|
||||
assert end_uuid.time == expected_timestamp_ms
|
||||
assert end_uuid.version == 7
|
||||
|
||||
|
||||
def test_uuid7_range():
|
||||
dt_now = datetime.now(timezone.utc)
|
||||
uuid_list = [
|
||||
datetime_to_uuid7(dt_now),
|
||||
datetime_to_uuid7(dt_now.replace(year=2023)),
|
||||
datetime_to_uuid7(dt_now.replace(year=2024)),
|
||||
datetime_to_uuid7(dt_now.replace(year=2025)),
|
||||
]
|
||||
start_uuid, end_uuid = uuid7_range(uuid_list)
|
||||
|
||||
# Expected start of range
|
||||
start_dt = datetime_from_uuid7(min(uuid_list, key=lambda u: u.time))
|
||||
start_dt = start_dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
expected_start_timestamp_ms = int(start_dt.timestamp() * 1000) & 0xFFFFFFFFFFFF
|
||||
|
||||
# Expected end of range
|
||||
end_dt = datetime_from_uuid7(max(uuid_list, key=lambda u: u.time))
|
||||
end_dt = end_dt.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
end_dt += relativedelta(months=1, microseconds=-1)
|
||||
expected_end_timestamp_ms = int(end_dt.timestamp() * 1000) & 0xFFFFFFFFFFFF
|
||||
|
||||
assert start_uuid.time == expected_start_timestamp_ms
|
||||
assert end_uuid.time == expected_end_timestamp_ms
|
||||
assert start_uuid.version == 7
|
||||
assert end_uuid.version == 7
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,189 +0,0 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.azure.azure_provider import AzureProvider
|
||||
from prowler.providers.common.models import Connection
|
||||
from prowler.providers.gcp.gcp_provider import GcpProvider
|
||||
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
|
||||
from rest_framework.exceptions import ValidationError, NotFound
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.exceptions import InvitationTokenExpiredException
|
||||
from api.models import Provider, Invitation
|
||||
|
||||
|
||||
def merge_dicts(default_dict: dict, replacement_dict: dict) -> dict:
|
||||
"""
|
||||
Recursively merge two dictionaries, using `default_dict` as the base and `replacement_dict` for overriding values.
|
||||
|
||||
Args:
|
||||
default_dict (dict): The base dictionary containing default key-value pairs.
|
||||
replacement_dict (dict): The dictionary containing values that should override those in `default_dict`.
|
||||
|
||||
Returns:
|
||||
dict: A new dictionary containing all keys from `default_dict` with values from `replacement_dict` replacing
|
||||
any overlapping keys. If a key in both `default_dict` and `replacement_dict` contains dictionaries,
|
||||
this function will merge them recursively.
|
||||
"""
|
||||
result = default_dict.copy()
|
||||
|
||||
for key, value in replacement_dict.items():
|
||||
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
||||
if value:
|
||||
result[key] = merge_dicts(result[key], value)
|
||||
else:
|
||||
result[key] = value
|
||||
else:
|
||||
result[key] = value
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def return_prowler_provider(
|
||||
provider: Provider,
|
||||
) -> [AwsProvider | AzureProvider | GcpProvider | KubernetesProvider]:
|
||||
"""Return the Prowler provider class based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider: The corresponding provider class.
|
||||
|
||||
Raises:
|
||||
ValueError: If the provider type specified in `provider.provider` is not supported.
|
||||
"""
|
||||
match provider.provider:
|
||||
case Provider.ProviderChoices.AWS.value:
|
||||
prowler_provider = AwsProvider
|
||||
case Provider.ProviderChoices.GCP.value:
|
||||
prowler_provider = GcpProvider
|
||||
case Provider.ProviderChoices.AZURE.value:
|
||||
prowler_provider = AzureProvider
|
||||
case Provider.ProviderChoices.KUBERNETES.value:
|
||||
prowler_provider = KubernetesProvider
|
||||
case _:
|
||||
raise ValueError(f"Provider type {provider.provider} not supported")
|
||||
return prowler_provider
|
||||
|
||||
|
||||
def get_prowler_provider_kwargs(provider: Provider) -> dict:
|
||||
"""Get the Prowler provider kwargs based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secret.
|
||||
|
||||
Returns:
|
||||
dict: The provider kwargs for the corresponding provider class.
|
||||
"""
|
||||
prowler_provider_kwargs = provider.secret.secret
|
||||
if provider.provider == Provider.ProviderChoices.AZURE.value:
|
||||
prowler_provider_kwargs = {
|
||||
**prowler_provider_kwargs,
|
||||
"subscription_ids": [provider.uid],
|
||||
}
|
||||
elif provider.provider == Provider.ProviderChoices.GCP.value:
|
||||
prowler_provider_kwargs = {
|
||||
**prowler_provider_kwargs,
|
||||
"project_ids": [provider.uid],
|
||||
}
|
||||
elif provider.provider == Provider.ProviderChoices.KUBERNETES.value:
|
||||
prowler_provider_kwargs = {**prowler_provider_kwargs, "context": provider.uid}
|
||||
return prowler_provider_kwargs
|
||||
|
||||
|
||||
def initialize_prowler_provider(
|
||||
provider: Provider,
|
||||
) -> AwsProvider | AzureProvider | GcpProvider | KubernetesProvider:
|
||||
"""Initialize a Prowler provider instance based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider: An instance of the corresponding provider class
|
||||
(`AwsProvider`, `AzureProvider`, `GcpProvider`, or `KubernetesProvider`) initialized with the
|
||||
provider's secrets.
|
||||
"""
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
prowler_provider_kwargs = get_prowler_provider_kwargs(provider)
|
||||
return prowler_provider(**prowler_provider_kwargs)
|
||||
|
||||
|
||||
def prowler_provider_connection_test(provider: Provider) -> Connection:
|
||||
"""Test the connection to a Prowler provider based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
|
||||
Returns:
|
||||
Connection: A connection object representing the result of the connection test for the specified provider.
|
||||
"""
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
prowler_provider_kwargs = provider.secret.secret
|
||||
return prowler_provider.test_connection(
|
||||
**prowler_provider_kwargs, provider_id=provider.uid, raise_on_exception=False
|
||||
)
|
||||
|
||||
|
||||
def validate_invitation(
|
||||
invitation_token: str, email: str, raise_not_found=False
|
||||
) -> Invitation:
|
||||
"""
|
||||
Validates an invitation based on the provided token and email.
|
||||
|
||||
This function attempts to retrieve an Invitation object using the given
|
||||
`invitation_token` and `email`. It performs several checks to ensure that
|
||||
the invitation is valid, not expired, and in the correct state for acceptance.
|
||||
|
||||
Args:
|
||||
invitation_token (str): The token associated with the invitation.
|
||||
email (str): The email address associated with the invitation.
|
||||
raise_not_found (bool, optional): If True, raises a `NotFound` exception
|
||||
when the invitation is not found. If False, raises a `ValidationError`.
|
||||
Defaults to False.
|
||||
|
||||
Returns:
|
||||
Invitation: The validated Invitation object.
|
||||
|
||||
Raises:
|
||||
NotFound: If `raise_not_found` is True and the invitation does not exist.
|
||||
ValidationError: If the invitation does not exist and `raise_not_found`
|
||||
is False, or if the invitation is invalid or in an incorrect state.
|
||||
InvitationTokenExpiredException: If the invitation has expired.
|
||||
|
||||
Notes:
|
||||
- This function uses the admin database connector to bypass RLS protection
|
||||
since the invitation may belong to a tenant the user is not a member of yet.
|
||||
- If the invitation has expired, its state is updated to EXPIRED, and an
|
||||
`InvitationTokenExpiredException` is raised.
|
||||
- Only invitations in the PENDING state can be accepted.
|
||||
|
||||
Examples:
|
||||
invitation = validate_invitation("TOKEN123", "user@example.com")
|
||||
"""
|
||||
try:
|
||||
# Admin DB connector is used to bypass RLS protection since the invitation belongs to a tenant the user
|
||||
# is not a member of yet
|
||||
invitation = Invitation.objects.using(MainRouter.admin_db).get(
|
||||
token=invitation_token, email=email
|
||||
)
|
||||
except Invitation.DoesNotExist:
|
||||
if raise_not_found:
|
||||
raise NotFound(detail="Invitation is not valid.")
|
||||
else:
|
||||
raise ValidationError({"invitation_token": "Invalid invitation code."})
|
||||
|
||||
# Check if the invitation has expired
|
||||
if invitation.expires_at < datetime.now(timezone.utc):
|
||||
invitation.state = Invitation.State.EXPIRED
|
||||
invitation.save(using=MainRouter.admin_db)
|
||||
raise InvitationTokenExpiredException()
|
||||
|
||||
# Check the state of the invitation
|
||||
if invitation.state != Invitation.State.PENDING:
|
||||
raise ValidationError(
|
||||
{"invitation_token": "This invitation is no longer valid."}
|
||||
)
|
||||
|
||||
return invitation
|
||||
@@ -1,148 +0,0 @@
|
||||
from datetime import datetime, timezone
|
||||
from random import getrandbits
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from uuid6 import UUID
|
||||
|
||||
|
||||
def transform_into_uuid7(uuid_obj: UUID) -> UUID:
|
||||
"""
|
||||
Validates that the given UUID object is a UUIDv7 and returns it.
|
||||
|
||||
This function checks if the provided UUID object is of version 7.
|
||||
If it is, it returns a new UUID object constructed from the uppercase
|
||||
hexadecimal representation of the input UUID. If not, it raises a ValidationError.
|
||||
|
||||
Args:
|
||||
uuid_obj (UUID): The UUID object to validate and transform.
|
||||
|
||||
Returns:
|
||||
UUID: A new UUIDv7 object constructed from the uppercase hexadecimal
|
||||
representation of the input UUID.
|
||||
|
||||
Raises:
|
||||
ValidationError: If the provided UUID is not a version 7 UUID.
|
||||
"""
|
||||
try:
|
||||
if uuid_obj.version != 7:
|
||||
raise ValueError
|
||||
return UUID(hex=uuid_obj.hex.upper())
|
||||
except ValueError:
|
||||
raise ValidationError("Invalid UUIDv7 value.")
|
||||
|
||||
|
||||
def datetime_to_uuid7(dt: datetime) -> UUID:
|
||||
"""
|
||||
Generates a UUIDv7 from a given datetime object.
|
||||
|
||||
Constructs a UUIDv7 using the provided datetime timestamp.
|
||||
Ensures that the version and variant bits are set correctly.
|
||||
|
||||
Args:
|
||||
dt: A datetime object representing the desired timestamp for the UUIDv7.
|
||||
|
||||
Returns:
|
||||
A UUIDv7 object corresponding to the given datetime.
|
||||
"""
|
||||
timestamp_ms = int(dt.timestamp() * 1000) & 0xFFFFFFFFFFFF # 48 bits
|
||||
|
||||
# Generate 12 bits of randomness for the sequence
|
||||
rand_seq = getrandbits(12)
|
||||
# Generate 62 bits of randomness for the node
|
||||
rand_node = getrandbits(62)
|
||||
|
||||
# Build the UUID integer
|
||||
uuid_int = timestamp_ms << 80 # Shift timestamp to bits 80-127
|
||||
|
||||
# Set the version to 7 in bits 76-79
|
||||
uuid_int |= 0x7 << 76
|
||||
|
||||
# Set 12 bits of randomness in bits 64-75
|
||||
uuid_int |= rand_seq << 64
|
||||
|
||||
# Set the variant to "10" in bits 62-63
|
||||
uuid_int |= 0x2 << 62
|
||||
|
||||
# Set 62 bits of randomness in bits 0-61
|
||||
uuid_int |= rand_node
|
||||
|
||||
return UUID(int=uuid_int)
|
||||
|
||||
|
||||
def datetime_from_uuid7(uuid7: UUID) -> datetime:
|
||||
"""
|
||||
Extracts the timestamp from a UUIDv7 and returns it as a datetime object.
|
||||
|
||||
Args:
|
||||
uuid7: A UUIDv7 object.
|
||||
|
||||
Returns:
|
||||
A datetime object representing the timestamp encoded in the UUIDv7.
|
||||
"""
|
||||
timestamp_ms = uuid7.time
|
||||
return datetime.fromtimestamp(timestamp_ms / 1000, tz=timezone.utc)
|
||||
|
||||
|
||||
def uuid7_start(uuid_obj: UUID) -> UUID:
|
||||
"""
|
||||
Returns a UUIDv7 that represents the start of the day for the given UUID.
|
||||
|
||||
Args:
|
||||
uuid_obj: A UUIDv7 object.
|
||||
|
||||
Returns:
|
||||
A UUIDv7 object representing the start of the day for the given UUID's timestamp.
|
||||
"""
|
||||
start_of_day = datetime_from_uuid7(uuid_obj).replace(
|
||||
hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
return datetime_to_uuid7(start_of_day)
|
||||
|
||||
|
||||
def uuid7_end(uuid_obj: UUID, offset_months: int = 1) -> UUID:
|
||||
"""
|
||||
Returns a UUIDv7 that represents the end of the month for the given UUID.
|
||||
|
||||
Args:
|
||||
uuid_obj: A UUIDv7 object.
|
||||
offset_days: Number of months to offset from the given UUID's date. Defaults to 1 to handle if
|
||||
partitions are not being used, if so the value will be the one set at FINDINGS_TABLE_PARTITION_MONTHS.
|
||||
|
||||
Returns:
|
||||
A UUIDv7 object representing the end of the month for the given UUID's date plus offset_months.
|
||||
"""
|
||||
end_of_month = datetime_from_uuid7(uuid_obj).replace(
|
||||
day=1, hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
end_of_month += relativedelta(months=offset_months, microseconds=-1)
|
||||
return datetime_to_uuid7(end_of_month)
|
||||
|
||||
|
||||
def uuid7_range(uuid_list: list[UUID]) -> list[UUID]:
|
||||
"""
|
||||
For the given list of UUIDv7s, returns the start and end UUIDv7 values that represent
|
||||
the range of days covered by the UUIDs.
|
||||
|
||||
Args:
|
||||
uuid_list: A list of UUIDv7 objects.
|
||||
|
||||
Returns:
|
||||
A list containing two UUIDv7 objects: the start and end of the day range.
|
||||
|
||||
Raises:
|
||||
ValidationError: If the list is empty or contains invalid UUIDv7 objects.
|
||||
"""
|
||||
if not uuid_list:
|
||||
raise ValidationError("UUID list is empty.")
|
||||
|
||||
try:
|
||||
start_uuid = min(uuid_list, key=lambda u: u.time)
|
||||
end_uuid = max(uuid_list, key=lambda u: u.time)
|
||||
except AttributeError:
|
||||
raise ValidationError("Invalid UUIDv7 objects in the list.")
|
||||
|
||||
start_range = uuid7_start(start_uuid)
|
||||
end_range = uuid7_end(end_uuid)
|
||||
|
||||
return [start_range, end_range]
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,114 +0,0 @@
|
||||
from django.urls import include, path
|
||||
from drf_spectacular.views import SpectacularRedocView
|
||||
from rest_framework_nested import routers
|
||||
|
||||
from api.v1.views import (
|
||||
CustomTokenObtainView,
|
||||
CustomTokenRefreshView,
|
||||
FindingViewSet,
|
||||
MembershipViewSet,
|
||||
ProviderGroupViewSet,
|
||||
ProviderGroupProvidersRelationshipView,
|
||||
ProviderSecretViewSet,
|
||||
InvitationViewSet,
|
||||
InvitationAcceptViewSet,
|
||||
RoleViewSet,
|
||||
RoleProviderGroupRelationshipView,
|
||||
UserRoleRelationshipView,
|
||||
OverviewViewSet,
|
||||
ComplianceOverviewViewSet,
|
||||
ProviderViewSet,
|
||||
ResourceViewSet,
|
||||
ScanViewSet,
|
||||
ScheduleViewSet,
|
||||
SchemaView,
|
||||
TaskViewSet,
|
||||
TenantMembersViewSet,
|
||||
TenantViewSet,
|
||||
UserViewSet,
|
||||
)
|
||||
|
||||
router = routers.DefaultRouter(trailing_slash=False)
|
||||
|
||||
router.register(r"users", UserViewSet, basename="user")
|
||||
router.register(r"tenants", TenantViewSet, basename="tenant")
|
||||
router.register(r"providers", ProviderViewSet, basename="provider")
|
||||
router.register(r"provider-groups", ProviderGroupViewSet, basename="providergroup")
|
||||
router.register(r"scans", ScanViewSet, basename="scan")
|
||||
router.register(r"tasks", TaskViewSet, basename="task")
|
||||
router.register(r"resources", ResourceViewSet, basename="resource")
|
||||
router.register(r"findings", FindingViewSet, basename="finding")
|
||||
router.register(r"roles", RoleViewSet, basename="role")
|
||||
router.register(
|
||||
r"compliance-overviews", ComplianceOverviewViewSet, basename="complianceoverview"
|
||||
)
|
||||
router.register(r"overviews", OverviewViewSet, basename="overview")
|
||||
router.register(r"schedules", ScheduleViewSet, basename="schedule")
|
||||
|
||||
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
|
||||
tenants_router.register(
|
||||
r"memberships", TenantMembersViewSet, basename="tenant-membership"
|
||||
)
|
||||
|
||||
users_router = routers.NestedSimpleRouter(router, r"users", lookup="user")
|
||||
users_router.register(r"memberships", MembershipViewSet, basename="user-membership")
|
||||
|
||||
urlpatterns = [
|
||||
path("tokens", CustomTokenObtainView.as_view(), name="token-obtain"),
|
||||
path("tokens/refresh", CustomTokenRefreshView.as_view(), name="token-refresh"),
|
||||
path(
|
||||
"providers/secrets",
|
||||
ProviderSecretViewSet.as_view({"get": "list", "post": "create"}),
|
||||
name="providersecret-list",
|
||||
),
|
||||
path(
|
||||
"providers/secrets/<uuid:pk>",
|
||||
ProviderSecretViewSet.as_view(
|
||||
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="providersecret-detail",
|
||||
),
|
||||
path(
|
||||
"tenants/invitations",
|
||||
InvitationViewSet.as_view({"get": "list", "post": "create"}),
|
||||
name="invitation-list",
|
||||
),
|
||||
path(
|
||||
"tenants/invitations/<uuid:pk>",
|
||||
InvitationViewSet.as_view(
|
||||
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="invitation-detail",
|
||||
),
|
||||
path(
|
||||
"invitations/accept",
|
||||
InvitationAcceptViewSet.as_view({"post": "accept"}),
|
||||
name="invitation-accept",
|
||||
),
|
||||
path(
|
||||
"roles/<uuid:pk>/relationships/provider_groups",
|
||||
RoleProviderGroupRelationshipView.as_view(
|
||||
{"post": "create", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="role-provider-groups-relationship",
|
||||
),
|
||||
path(
|
||||
"users/<uuid:pk>/relationships/roles",
|
||||
UserRoleRelationshipView.as_view(
|
||||
{"post": "create", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="user-roles-relationship",
|
||||
),
|
||||
path(
|
||||
"provider-groups/<uuid:pk>/relationships/providers",
|
||||
ProviderGroupProvidersRelationshipView.as_view(
|
||||
{"post": "create", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="provider_group-providers-relationship",
|
||||
),
|
||||
path("", include(router.urls)),
|
||||
path("", include(tenants_router.urls)),
|
||||
path("", include(users_router.urls)),
|
||||
path("schema", SchemaView.as_view(), name="schema"),
|
||||
path("docs", SpectacularRedocView.as_view(url_name="schema"), name="docs"),
|
||||
]
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,22 +0,0 @@
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
|
||||
class MaximumLengthValidator:
|
||||
def __init__(self, max_length=72):
|
||||
self.max_length = max_length
|
||||
|
||||
def validate(self, password, user=None):
|
||||
if len(password) > self.max_length:
|
||||
raise ValidationError(
|
||||
_(
|
||||
"This password is too long. It must contain no more than %(max_length)d characters."
|
||||
),
|
||||
code="password_too_long",
|
||||
params={"max_length": self.max_length},
|
||||
)
|
||||
|
||||
def get_help_text(self):
|
||||
return _(
|
||||
f"Your password must contain no more than {self.max_length} characters."
|
||||
)
|
||||
@@ -1,16 +0,0 @@
|
||||
"""
|
||||
ASGI config for backend project.
|
||||
|
||||
It exposes the ASGI callable as a module-level variable named ``application``.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/5.0/howto/deployment/asgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from django.core.asgi import get_asgi_application
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.django.production")
|
||||
|
||||
application = get_asgi_application()
|
||||
@@ -1,47 +0,0 @@
|
||||
from celery import Celery, Task
|
||||
|
||||
celery_app = Celery("tasks")
|
||||
|
||||
celery_app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
celery_app.conf.update(result_extended=True, result_expires=None)
|
||||
|
||||
celery_app.autodiscover_tasks(["api"])
|
||||
|
||||
|
||||
class RLSTask(Task):
|
||||
def apply_async(
|
||||
self,
|
||||
args=None,
|
||||
kwargs=None,
|
||||
task_id=None,
|
||||
producer=None,
|
||||
link=None,
|
||||
link_error=None,
|
||||
shadow=None,
|
||||
**options,
|
||||
):
|
||||
from django_celery_results.models import TaskResult
|
||||
|
||||
from api.models import Task as APITask
|
||||
|
||||
result = super().apply_async(
|
||||
args=args,
|
||||
kwargs=kwargs,
|
||||
task_id=task_id,
|
||||
producer=producer,
|
||||
link=link,
|
||||
link_error=link_error,
|
||||
shadow=shadow,
|
||||
**options,
|
||||
)
|
||||
task_result_instance = TaskResult.objects.get(task_id=result.task_id)
|
||||
from api.db_utils import rls_transaction
|
||||
|
||||
tenant_id = kwargs.get("tenant_id")
|
||||
with rls_transaction(tenant_id):
|
||||
APITask.objects.create(
|
||||
id=task_result_instance.task_id,
|
||||
tenant_id=tenant_id,
|
||||
task_runner_task=task_result_instance,
|
||||
)
|
||||
return result
|
||||
@@ -1,230 +0,0 @@
|
||||
import json
|
||||
import logging
|
||||
from enum import StrEnum
|
||||
|
||||
from django_guid.log_filters import CorrelationId
|
||||
|
||||
from config.env import env
|
||||
|
||||
|
||||
class BackendLogger(StrEnum):
|
||||
GUNICORN = "gunicorn"
|
||||
GUNICORN_ACCESS = "gunicorn.access"
|
||||
GUNICORN_ERROR = "gunicorn.error"
|
||||
DJANGO = "django"
|
||||
SECURITY = "django.security"
|
||||
DB = "django.db"
|
||||
API = "api"
|
||||
TASKS = "tasks"
|
||||
|
||||
|
||||
# Formatters
|
||||
|
||||
|
||||
class NDJSONFormatter(logging.Formatter):
|
||||
"""NDJSON custom formatter for logging messages.
|
||||
|
||||
If available, it will include all kind of API request metadata.
|
||||
"""
|
||||
|
||||
def format(self, record):
|
||||
log_record = {
|
||||
"timestamp": self.formatTime(record, self.datefmt),
|
||||
"level": record.levelname,
|
||||
"message": record.getMessage(),
|
||||
"logger": record.name,
|
||||
"module": record.module,
|
||||
"pathname": record.pathname,
|
||||
"lineno": record.lineno,
|
||||
"funcName": record.funcName,
|
||||
"process": record.process,
|
||||
"thread": record.thread,
|
||||
"transaction_id": record.transaction_id
|
||||
if hasattr(record, "transaction_id")
|
||||
else None,
|
||||
}
|
||||
|
||||
# Add REST API extra fields
|
||||
if hasattr(record, "user_id"):
|
||||
log_record["user_id"] = record.user_id
|
||||
if hasattr(record, "tenant_id"):
|
||||
log_record["tenant_id"] = record.tenant_id
|
||||
if hasattr(record, "method"):
|
||||
log_record["method"] = record.method
|
||||
if hasattr(record, "path"):
|
||||
log_record["path"] = record.path
|
||||
if hasattr(record, "query_params"):
|
||||
log_record["query_params"] = record.query_params
|
||||
if hasattr(record, "duration"):
|
||||
log_record["duration"] = record.duration
|
||||
if hasattr(record, "status_code"):
|
||||
log_record["status_code"] = record.status_code
|
||||
|
||||
if record.exc_info:
|
||||
log_record["exc_info"] = self.formatException(record.exc_info)
|
||||
|
||||
return json.dumps(log_record)
|
||||
|
||||
|
||||
class HumanReadableFormatter(logging.Formatter):
|
||||
"""Human-readable custom formatter for logging messages.
|
||||
|
||||
If available, it will include all kinds of API request metadata.
|
||||
"""
|
||||
|
||||
def format(self, record):
|
||||
log_components = [
|
||||
f"{self.formatTime(record, self.datefmt)}",
|
||||
f"[{record.name}]",
|
||||
f"{record.levelname}:",
|
||||
f"({record.module})",
|
||||
f"[module={record.module}",
|
||||
f"path={record.pathname}",
|
||||
f"line={record.lineno}",
|
||||
f"function={record.funcName}",
|
||||
f"process={record.process}",
|
||||
f"thread={record.thread}",
|
||||
f"transaction-id={record.transaction_id if hasattr(record, 'transaction_id') else None}]",
|
||||
f"{record.getMessage()}",
|
||||
]
|
||||
|
||||
# Add REST API extra fields
|
||||
if hasattr(record, "user_id"):
|
||||
log_components.append(f"({record.user_id})")
|
||||
if hasattr(record, "tenant_id"):
|
||||
log_components.append(f"[{record.tenant_id}]")
|
||||
if hasattr(record, "method"):
|
||||
log_components.append(f'"{record.method} {record.path}"')
|
||||
if hasattr(record, "query_params"):
|
||||
log_components.append(f"with parameters {record.query_params}")
|
||||
if hasattr(record, "duration"):
|
||||
log_components.append(f"done in {record.duration}s:")
|
||||
if hasattr(record, "status_code"):
|
||||
log_components.append(f"{record.status_code}")
|
||||
|
||||
if record.exc_info:
|
||||
log_components.append(self.formatException(record.exc_info))
|
||||
|
||||
return " ".join(log_components)
|
||||
|
||||
|
||||
# Filters
|
||||
|
||||
|
||||
class TransactionIdFilter(CorrelationId):
|
||||
"""Logging filter class.
|
||||
|
||||
Used to override the `correlation_id_field` parameter in the parent class to use a different name.
|
||||
"""
|
||||
|
||||
CORRELATION_ID_FIELD = "transaction_id"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(correlation_id_field=self.CORRELATION_ID_FIELD)
|
||||
|
||||
|
||||
# Logging settings
|
||||
|
||||
LEVEL = env("DJANGO_LOGGING_LEVEL", default="INFO")
|
||||
FORMATTER = env("DJANGO_LOGGING_FORMATTER", default="ndjson")
|
||||
|
||||
LOGGING = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": True,
|
||||
"filters": {"transaction_id": {"()": TransactionIdFilter}},
|
||||
"formatters": {
|
||||
"ndjson": {
|
||||
"()": NDJSONFormatter,
|
||||
"datefmt": "%Y-%m-%d %H:%M:%S",
|
||||
},
|
||||
"human_readable": {
|
||||
"()": HumanReadableFormatter,
|
||||
"datefmt": "%Y-%m-%d %H:%M:%S",
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"gunicorn_console": {
|
||||
"level": LEVEL,
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": FORMATTER,
|
||||
"filters": ["transaction_id"],
|
||||
},
|
||||
"django_console": {
|
||||
"level": LEVEL,
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": FORMATTER,
|
||||
"filters": ["transaction_id"],
|
||||
},
|
||||
"api_console": {
|
||||
"level": LEVEL,
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": FORMATTER,
|
||||
"filters": ["transaction_id"],
|
||||
},
|
||||
"db_console": {
|
||||
"level": f"{'DEBUG' if LEVEL == 'DEBUG' else 'INFO'}",
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": FORMATTER,
|
||||
"filters": ["transaction_id"],
|
||||
},
|
||||
"security_console": {
|
||||
"level": LEVEL,
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": FORMATTER,
|
||||
"filters": ["transaction_id"],
|
||||
},
|
||||
"tasks_console": {
|
||||
"level": LEVEL,
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": FORMATTER,
|
||||
"filters": ["transaction_id"],
|
||||
},
|
||||
},
|
||||
"loggers": {
|
||||
BackendLogger.GUNICORN: {
|
||||
"handlers": ["gunicorn_console"],
|
||||
"level": LEVEL,
|
||||
"propagate": False,
|
||||
},
|
||||
BackendLogger.GUNICORN_ACCESS: {
|
||||
"handlers": ["gunicorn_console"],
|
||||
"level": "CRITICAL",
|
||||
"propagate": False,
|
||||
},
|
||||
BackendLogger.GUNICORN_ERROR: {
|
||||
"handlers": ["gunicorn_console"],
|
||||
"level": LEVEL,
|
||||
"propagate": False,
|
||||
},
|
||||
BackendLogger.DJANGO: {
|
||||
"handlers": ["django_console"],
|
||||
"level": "WARNING",
|
||||
"propagate": True,
|
||||
},
|
||||
BackendLogger.DB: {
|
||||
"handlers": ["db_console"],
|
||||
"level": LEVEL,
|
||||
"propagate": False,
|
||||
},
|
||||
BackendLogger.SECURITY: {
|
||||
"handlers": ["security_console"],
|
||||
"level": LEVEL,
|
||||
"propagate": False,
|
||||
},
|
||||
BackendLogger.API: {
|
||||
"handlers": ["api_console"],
|
||||
"level": LEVEL,
|
||||
"propagate": False,
|
||||
},
|
||||
BackendLogger.TASKS: {
|
||||
"handlers": ["tasks_console"],
|
||||
"level": LEVEL,
|
||||
"propagate": False,
|
||||
},
|
||||
},
|
||||
# Gunicorn required configuration
|
||||
"root": {
|
||||
"level": "ERROR",
|
||||
"handlers": ["gunicorn_console"],
|
||||
},
|
||||
}
|
||||
@@ -1,209 +0,0 @@
|
||||
from datetime import timedelta
|
||||
|
||||
from config.custom_logging import LOGGING # noqa
|
||||
from config.env import BASE_DIR, env # noqa
|
||||
from config.settings.celery import * # noqa
|
||||
from config.settings.partitions import * # noqa
|
||||
|
||||
SECRET_KEY = env("SECRET_KEY", default="secret")
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=False)
|
||||
ALLOWED_HOSTS = ["localhost", "127.0.0.1"]
|
||||
|
||||
# Application definition
|
||||
|
||||
INSTALLED_APPS = [
|
||||
"django.contrib.admin",
|
||||
"django.contrib.auth",
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.messages",
|
||||
"django.contrib.staticfiles",
|
||||
"django.contrib.postgres",
|
||||
"psqlextra",
|
||||
"api",
|
||||
"rest_framework",
|
||||
"corsheaders",
|
||||
"drf_spectacular",
|
||||
"django_guid",
|
||||
"rest_framework_json_api",
|
||||
"django_celery_results",
|
||||
"django_celery_beat",
|
||||
"rest_framework_simplejwt.token_blacklist",
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
"django_guid.middleware.guid_middleware",
|
||||
"django.middleware.security.SecurityMiddleware",
|
||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||
"corsheaders.middleware.CorsMiddleware",
|
||||
"django.middleware.common.CommonMiddleware",
|
||||
"django.middleware.csrf.CsrfViewMiddleware",
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||
"django.contrib.messages.middleware.MessageMiddleware",
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||
"api.middleware.APILoggingMiddleware",
|
||||
]
|
||||
|
||||
CORS_ALLOWED_ORIGINS = ["http://localhost", "http://127.0.0.1"]
|
||||
|
||||
ROOT_URLCONF = "config.urls"
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [],
|
||||
"APP_DIRS": True,
|
||||
"OPTIONS": {
|
||||
"context_processors": [
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.request",
|
||||
"django.contrib.auth.context_processors.auth",
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
"DEFAULT_SCHEMA_CLASS": "drf_spectacular_jsonapi.schemas.openapi.JsonApiAutoSchema",
|
||||
"DEFAULT_AUTHENTICATION_CLASSES": (
|
||||
"rest_framework_simplejwt.authentication.JWTAuthentication",
|
||||
),
|
||||
"PAGE_SIZE": 10,
|
||||
"EXCEPTION_HANDLER": "api.exceptions.custom_exception_handler",
|
||||
"DEFAULT_PAGINATION_CLASS": "drf_spectacular_jsonapi.schemas.pagination.JsonApiPageNumberPagination",
|
||||
"DEFAULT_PARSER_CLASSES": (
|
||||
"rest_framework_json_api.parsers.JSONParser",
|
||||
"rest_framework.parsers.FormParser",
|
||||
"rest_framework.parsers.MultiPartParser",
|
||||
),
|
||||
"DEFAULT_RENDERER_CLASSES": ("api.renderers.APIJSONRenderer",),
|
||||
"DEFAULT_METADATA_CLASS": "rest_framework_json_api.metadata.JSONAPIMetadata",
|
||||
"DEFAULT_FILTER_BACKENDS": (
|
||||
"rest_framework_json_api.filters.QueryParameterValidationFilter",
|
||||
"rest_framework_json_api.filters.OrderingFilter",
|
||||
"rest_framework_json_api.django_filters.backends.DjangoFilterBackend",
|
||||
"rest_framework.filters.SearchFilter",
|
||||
),
|
||||
"SEARCH_PARAM": "filter[search]",
|
||||
"TEST_REQUEST_RENDERER_CLASSES": (
|
||||
"rest_framework_json_api.renderers.JSONRenderer",
|
||||
),
|
||||
"TEST_REQUEST_DEFAULT_FORMAT": "vnd.api+json",
|
||||
"JSON_API_UNIFORM_EXCEPTIONS": True,
|
||||
}
|
||||
|
||||
SPECTACULAR_SETTINGS = {
|
||||
"SERVE_INCLUDE_SCHEMA": False,
|
||||
"COMPONENT_SPLIT_REQUEST": True,
|
||||
"PREPROCESSING_HOOKS": [
|
||||
"drf_spectacular_jsonapi.hooks.fix_nested_path_parameters",
|
||||
],
|
||||
}
|
||||
|
||||
WSGI_APPLICATION = "config.wsgi.application"
|
||||
|
||||
DJANGO_GUID = {
|
||||
"GUID_HEADER_NAME": "Transaction-ID",
|
||||
"VALIDATE_GUID": True,
|
||||
"RETURN_HEADER": True,
|
||||
"EXPOSE_HEADER": True,
|
||||
"INTEGRATIONS": [],
|
||||
"IGNORE_URLS": [],
|
||||
"UUID_LENGTH": 32,
|
||||
}
|
||||
|
||||
DATABASE_ROUTERS = ["api.db_router.MainRouter"]
|
||||
|
||||
|
||||
# Password validation
|
||||
# https://docs.djangoproject.com/en/5.0/ref/settings/#auth-password-validators
|
||||
|
||||
AUTH_USER_MODEL = "api.User"
|
||||
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
|
||||
"OPTIONS": {"min_length": 12},
|
||||
},
|
||||
{
|
||||
"NAME": "api.validators.MaximumLengthValidator",
|
||||
"OPTIONS": {
|
||||
"max_length": 72,
|
||||
},
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
|
||||
},
|
||||
]
|
||||
|
||||
SIMPLE_JWT = {
|
||||
# Token lifetime settings
|
||||
"ACCESS_TOKEN_LIFETIME": timedelta(
|
||||
minutes=env.int("DJANGO_ACCESS_TOKEN_LIFETIME", 30)
|
||||
),
|
||||
"REFRESH_TOKEN_LIFETIME": timedelta(
|
||||
minutes=env.int("DJANGO_REFRESH_TOKEN_LIFETIME", 60 * 24)
|
||||
),
|
||||
"ROTATE_REFRESH_TOKENS": True,
|
||||
"BLACKLIST_AFTER_ROTATION": True,
|
||||
# Algorithm and keys
|
||||
"ALGORITHM": "RS256",
|
||||
"SIGNING_KEY": env.str("DJANGO_TOKEN_SIGNING_KEY", "").replace("\\n", "\n"),
|
||||
"VERIFYING_KEY": env.str("DJANGO_TOKEN_VERIFYING_KEY", "").replace("\\n", "\n"),
|
||||
# Authorization header configuration
|
||||
"AUTH_HEADER_TYPES": ("Bearer",),
|
||||
"AUTH_HEADER_NAME": "HTTP_AUTHORIZATION",
|
||||
# Custom serializers
|
||||
"TOKEN_OBTAIN_SERIALIZER": "api.serializers.TokenSerializer",
|
||||
"TOKEN_REFRESH_SERIALIZER": "api.serializers.TokenRefreshSerializer",
|
||||
# Standard JWT claims
|
||||
"TOKEN_TYPE_CLAIM": "typ",
|
||||
"JTI_CLAIM": "jti",
|
||||
"USER_ID_FIELD": "id",
|
||||
"USER_ID_CLAIM": "sub",
|
||||
# Issuer and Audience claims, for the moment we will keep these values as default values, they may change in the future.
|
||||
"AUDIENCE": env.str("DJANGO_JWT_AUDIENCE", "https://api.prowler.com"),
|
||||
"ISSUER": env.str("DJANGO_JWT_ISSUER", "https://api.prowler.com"),
|
||||
# Additional security settings
|
||||
"UPDATE_LAST_LOGIN": True,
|
||||
}
|
||||
|
||||
SECRETS_ENCRYPTION_KEY = env.str("DJANGO_SECRETS_ENCRYPTION_KEY", "")
|
||||
|
||||
# Internationalization
|
||||
# https://docs.djangoproject.com/en/5.0/topics/i18n/
|
||||
|
||||
LANGUAGE_CODE = "en-us"
|
||||
LANGUAGES = [
|
||||
("en", "English"),
|
||||
]
|
||||
|
||||
TIME_ZONE = "UTC"
|
||||
|
||||
USE_I18N = True
|
||||
|
||||
USE_TZ = True
|
||||
|
||||
# Static files (CSS, JavaScript, Images)
|
||||
# https://docs.djangoproject.com/en/5.0/howto/static-files/
|
||||
|
||||
STATIC_URL = "static/"
|
||||
|
||||
# Default primary key field type
|
||||
# https://docs.djangoproject.com/en/5.0/ref/settings/#default-auto-field
|
||||
|
||||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
||||
|
||||
# Cache settings
|
||||
CACHE_MAX_AGE = env.int("DJANGO_CACHE_MAX_AGE", 3600)
|
||||
CACHE_STALE_WHILE_REVALIDATE = env.int("DJANGO_STALE_WHILE_REVALIDATE", 60)
|
||||
|
||||
|
||||
TESTING = False
|
||||
@@ -1,40 +0,0 @@
|
||||
from config.django.base import * # noqa
|
||||
from config.env import env
|
||||
|
||||
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=True)
|
||||
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["*"])
|
||||
|
||||
# Database
|
||||
DATABASES = {
|
||||
"prowler_user": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_DB", default="prowler_db"),
|
||||
"USER": env("POSTGRES_USER", default="prowler_user"),
|
||||
"PASSWORD": env("POSTGRES_PASSWORD", default="prowler"),
|
||||
"HOST": env("POSTGRES_HOST", default="postgres-db"),
|
||||
"PORT": env("POSTGRES_PORT", default="5432"),
|
||||
},
|
||||
"admin": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_DB", default="prowler_db"),
|
||||
"USER": env("POSTGRES_ADMIN_USER", default="prowler"),
|
||||
"PASSWORD": env("POSTGRES_ADMIN_PASSWORD", default="S3cret"),
|
||||
"HOST": env("POSTGRES_HOST", default="postgres-db"),
|
||||
"PORT": env("POSTGRES_PORT", default="5432"),
|
||||
},
|
||||
}
|
||||
DATABASES["default"] = DATABASES["prowler_user"]
|
||||
|
||||
REST_FRAMEWORK["DEFAULT_RENDERER_CLASSES"] = tuple( # noqa: F405
|
||||
render_class
|
||||
for render_class in REST_FRAMEWORK["DEFAULT_RENDERER_CLASSES"] # noqa: F405
|
||||
) + ("rest_framework_json_api.renderers.BrowsableAPIRenderer",)
|
||||
|
||||
REST_FRAMEWORK["DEFAULT_FILTER_BACKENDS"] = tuple( # noqa: F405
|
||||
filter_backend
|
||||
for filter_backend in REST_FRAMEWORK["DEFAULT_FILTER_BACKENDS"] # noqa: F405
|
||||
if "DjangoFilterBackend" not in filter_backend
|
||||
) + ("api.filters.CustomDjangoFilterBackend",)
|
||||
|
||||
SECRETS_ENCRYPTION_KEY = "ZMiYVo7m4Fbe2eXXPyrwxdJss2WSalXSv3xHBcJkPl0="
|
||||
@@ -1,28 +0,0 @@
|
||||
from config.django.base import * # noqa
|
||||
from config.env import env
|
||||
|
||||
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=False)
|
||||
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["localhost", "127.0.0.1"])
|
||||
|
||||
# Database
|
||||
# TODO Use Django database routers https://docs.djangoproject.com/en/5.0/topics/db/multi-db/#automatic-database-routing
|
||||
DATABASES = {
|
||||
"prowler_user": {
|
||||
"ENGINE": "django.db.backends.postgresql",
|
||||
"NAME": env("POSTGRES_DB"),
|
||||
"USER": env("POSTGRES_USER"),
|
||||
"PASSWORD": env("POSTGRES_PASSWORD"),
|
||||
"HOST": env("POSTGRES_HOST"),
|
||||
"PORT": env("POSTGRES_PORT"),
|
||||
},
|
||||
"admin": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_DB"),
|
||||
"USER": env("POSTGRES_ADMIN_USER"),
|
||||
"PASSWORD": env("POSTGRES_ADMIN_PASSWORD"),
|
||||
"HOST": env("POSTGRES_HOST"),
|
||||
"PORT": env("POSTGRES_PORT"),
|
||||
},
|
||||
}
|
||||
DATABASES["default"] = DATABASES["prowler_user"]
|
||||
@@ -1,26 +0,0 @@
|
||||
from config.django.base import * # noqa
|
||||
from config.env import env
|
||||
|
||||
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=False)
|
||||
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["localhost", "127.0.0.1"])
|
||||
|
||||
|
||||
DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": "prowler_db_test",
|
||||
"USER": env("POSTGRES_USER", default="prowler_admin"),
|
||||
"PASSWORD": env("POSTGRES_PASSWORD", default="postgres"),
|
||||
"HOST": env("POSTGRES_HOST", default="localhost"),
|
||||
"PORT": env("POSTGRES_PORT", default="5432"),
|
||||
},
|
||||
}
|
||||
|
||||
DATABASE_ROUTERS = []
|
||||
TESTING = True
|
||||
SECRETS_ENCRYPTION_KEY = "ZMiYVo7m4Fbe2eXXPyrwxdJss2WSalXSv3xHBcJkPl0="
|
||||
|
||||
# JWT
|
||||
|
||||
SIMPLE_JWT["ALGORITHM"] = "HS256" # noqa: F405
|
||||
@@ -1,7 +0,0 @@
|
||||
from pathlib import Path
|
||||
|
||||
import environ
|
||||
|
||||
env = environ.Env()
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
@@ -1,43 +0,0 @@
|
||||
import logging
|
||||
import multiprocessing
|
||||
import os
|
||||
|
||||
from config.env import env
|
||||
|
||||
# Ensure the environment variable for Django settings is set
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.django.production")
|
||||
|
||||
# Import Django and set it up before accessing settings
|
||||
import django # noqa: E402
|
||||
|
||||
django.setup()
|
||||
from config.django.production import LOGGING as DJANGO_LOGGERS, DEBUG # noqa: E402
|
||||
from config.custom_logging import BackendLogger # noqa: E402
|
||||
|
||||
BIND_ADDRESS = env("DJANGO_BIND_ADDRESS", default="127.0.0.1")
|
||||
PORT = env("DJANGO_PORT", default=8000)
|
||||
|
||||
# Server settings
|
||||
bind = f"{BIND_ADDRESS}:{PORT}"
|
||||
|
||||
workers = env.int("DJANGO_WORKERS", default=multiprocessing.cpu_count() * 2 + 1)
|
||||
reload = DEBUG
|
||||
|
||||
# Logging
|
||||
logconfig_dict = DJANGO_LOGGERS
|
||||
gunicorn_logger = logging.getLogger(BackendLogger.GUNICORN)
|
||||
|
||||
|
||||
# Hooks
|
||||
def on_starting(_):
|
||||
gunicorn_logger.info(f"Starting gunicorn server with {workers} workers")
|
||||
if reload:
|
||||
gunicorn_logger.warning("Reload settings enabled (dev mode)")
|
||||
|
||||
|
||||
def on_reload(_):
|
||||
gunicorn_logger.warning("Gunicorn server has reloaded")
|
||||
|
||||
|
||||
def when_ready(_):
|
||||
gunicorn_logger.info("Gunicorn server is ready")
|
||||
@@ -1,3 +0,0 @@
|
||||
from config.celery import celery_app
|
||||
|
||||
__all__ = ("celery_app",)
|
||||
@@ -1,13 +0,0 @@
|
||||
from config.env import env
|
||||
|
||||
VALKEY_HOST = env("VALKEY_HOST", default="valkey")
|
||||
VALKEY_PORT = env("VALKEY_PORT", default="6379")
|
||||
VALKEY_DB = env("VALKEY_DB", default="0")
|
||||
|
||||
CELERY_BROKER_URL = f"redis://{VALKEY_HOST}:{VALKEY_PORT}/{VALKEY_DB}"
|
||||
CELERY_RESULT_BACKEND = "django-db"
|
||||
CELERY_TASK_TRACK_STARTED = True
|
||||
|
||||
CELERY_BROKER_CONNECTION_RETRY_ON_STARTUP = True
|
||||
|
||||
CELERY_DEADLOCK_ATTEMPTS = env.int("DJANGO_CELERY_DEADLOCK_ATTEMPTS", default=5)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user