mirror of
https://github.com/prowler-cloud/prowler.git
synced 2025-12-19 05:17:47 +00:00
chore(api): Merge API repository
This commit is contained in:
39
api/.env.example
Normal file
39
api/.env.example
Normal file
@@ -0,0 +1,39 @@
|
||||
# Django settings
|
||||
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1
|
||||
DJANGO_BIND_ADDRESS=0.0.0.0
|
||||
DJANGO_PORT=8000
|
||||
DJANGO_DEBUG=False
|
||||
# Select one of [production|devel]
|
||||
DJANGO_SETTINGS_MODULE=config.django.[production|devel]
|
||||
# Select one of [ndjson|human_readable]
|
||||
DJANGO_LOGGING_FORMATTER=[ndjson|human_readable]
|
||||
# Select one of [DEBUG|INFO|WARNING|ERROR|CRITICAL]
|
||||
# Applies to both Django and Celery Workers
|
||||
DJANGO_LOGGING_LEVEL=INFO
|
||||
DJANGO_WORKERS=4 # Defaults to the maximum available based on CPU cores if not set.
|
||||
DJANGO_TOKEN_SIGNING_KEY=""
|
||||
DJANGO_TOKEN_VERIFYING_KEY=""
|
||||
# Token lifetime is in minutes
|
||||
DJANGO_ACCESS_TOKEN_LIFETIME=30
|
||||
DJANGO_REFRESH_TOKEN_LIFETIME=1440
|
||||
DJANGO_CACHE_MAX_AGE=3600
|
||||
DJANGO_STALE_WHILE_REVALIDATE=60
|
||||
DJANGO_SECRETS_ENCRYPTION_KEY=""
|
||||
# Decide whether to allow Django manage database table partitions
|
||||
DJANGO_MANAGE_DB_PARTITIONS=[True|False]
|
||||
|
||||
# PostgreSQL settings
|
||||
# If running django and celery on host, use 'localhost', else use 'postgres-db'
|
||||
POSTGRES_HOST=[localhost|postgres-db]
|
||||
POSTGRES_PORT=5432
|
||||
POSTGRES_ADMIN_USER=prowler
|
||||
POSTGRES_ADMIN_PASSWORD=S3cret
|
||||
POSTGRES_USER=prowler_user
|
||||
POSTGRES_PASSWORD=S3cret
|
||||
POSTGRES_DB=prowler_db
|
||||
|
||||
# Valkey settings
|
||||
# If running django and celery on host, use localhost, else use 'valkey'
|
||||
VALKEY_HOST=[localhost|valkey]
|
||||
VALKEY_PORT=6379
|
||||
VALKEY_DB=0
|
||||
5
api/.github/CODEOWNERS
vendored
Normal file
5
api/.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
* @prowler-cloud/api
|
||||
|
||||
# To protect a repository fully against unauthorized changes, you also need to define an owner for the CODEOWNERS file itself.
|
||||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#codeowners-and-branch-protection
|
||||
/.github/ @prowler-cloud/api
|
||||
97
api/.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
97
api/.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
name: 🐞 Bug Report
|
||||
description: Create a report to help us improve
|
||||
title: "[Bug]: "
|
||||
labels: ["bug", "status/needs-triage"]
|
||||
|
||||
body:
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to Reproduce
|
||||
description: Steps to reproduce the behavior
|
||||
placeholder: |-
|
||||
1. What command are you running?
|
||||
2. Cloud provider you are launching
|
||||
3. Environment you have, like single account, multi-account, organizations, multi or single subscription, etc.
|
||||
4. See error
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: actual
|
||||
attributes:
|
||||
label: Actual Result with Screenshots or Logs
|
||||
description: If applicable, add screenshots to help explain your problem. Also, you can add logs (anonymize them first!). Here a command that may help to share a log `prowler <your arguments> --log-level DEBUG --log-file $(date +%F)_debug.log` then attach here the log file.
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: type
|
||||
attributes:
|
||||
label: How did you install Prowler?
|
||||
options:
|
||||
- Cloning the repository from github.com (git clone)
|
||||
- From pip package (pip install prowler)
|
||||
- From brew (brew install prowler)
|
||||
- Docker (docker pull toniblyx/prowler)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Environment Resource
|
||||
description: From where are you running Prowler?
|
||||
placeholder: |-
|
||||
1. EC2 instance
|
||||
2. Fargate task
|
||||
3. Docker container locally
|
||||
4. EKS
|
||||
5. Cloud9
|
||||
6. CodeBuild
|
||||
7. Workstation
|
||||
8. Other(please specify)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: os
|
||||
attributes:
|
||||
label: OS used
|
||||
description: Which OS are you using?
|
||||
placeholder: |-
|
||||
1. Amazon Linux 2
|
||||
2. MacOS
|
||||
3. Alpine Linux
|
||||
4. Windows
|
||||
5. Other(please specify)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: prowler-version
|
||||
attributes:
|
||||
label: Prowler version
|
||||
description: Which Prowler version are you using?
|
||||
placeholder: |-
|
||||
prowler --version
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: pip-version
|
||||
attributes:
|
||||
label: Pip version
|
||||
description: Which pip version are you using?
|
||||
placeholder: |-
|
||||
pip --version
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: additional
|
||||
attributes:
|
||||
description: Additional context
|
||||
label: Context
|
||||
validations:
|
||||
required: false
|
||||
1
api/.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
1
api/.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
blank_issues_enabled: false
|
||||
36
api/.github/ISSUE_TEMPLATE/feature-request.yml
vendored
Normal file
36
api/.github/ISSUE_TEMPLATE/feature-request.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: 💡 Feature Request
|
||||
description: Suggest an idea for this project
|
||||
labels: ["enhancement", "status/needs-triage"]
|
||||
|
||||
|
||||
body:
|
||||
- type: textarea
|
||||
id: Problem
|
||||
attributes:
|
||||
label: New feature motivation
|
||||
description: Is your feature request related to a problem? Please describe
|
||||
placeholder: |-
|
||||
1. A clear and concise description of what the problem is. Ex. I'm always frustrated when
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Solution
|
||||
attributes:
|
||||
label: Solution Proposed
|
||||
description: A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Alternatives
|
||||
attributes:
|
||||
label: Describe alternatives you've considered
|
||||
description: A clear and concise description of any alternative solutions or features you've considered.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Context
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: Add any other context or screenshots about the feature request here.
|
||||
validations:
|
||||
required: false
|
||||
15
api/.github/labeler.yml
vendored
Normal file
15
api/.github/labeler.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
documentation:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "docs/**"
|
||||
|
||||
backend/api:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "src/backend/api/**"
|
||||
|
||||
backend/backend:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "src/backend/backend/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
13
api/.github/pull_request_template.md
vendored
Normal file
13
api/.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
### Context
|
||||
|
||||
Please include relevant motivation and context for this PR.
|
||||
|
||||
|
||||
### Description
|
||||
|
||||
Please include a summary of the change and which issue is fixed. List any dependencies that are required for this change.
|
||||
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the AGPL-3.0 license.
|
||||
57
api/.github/workflows/codeql.yml
vendored
Normal file
57
api/.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "main"]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ "main" ]
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
18
api/.github/workflows/find-secrets.yml
vendored
Normal file
18
api/.github/workflows/find-secrets.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
name: find-secrets
|
||||
|
||||
on: pull_request
|
||||
|
||||
jobs:
|
||||
trufflehog:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@v3.68.2
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
head: HEAD
|
||||
15
api/.github/workflows/labeler.yml
vendored
Normal file
15
api/.github/workflows/labeler.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
name: "Pull Request Labeler"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v5
|
||||
139
api/.github/workflows/pull-request.yml
vendored
Normal file
139
api/.github/workflows/pull-request.yml
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
name: pr-lint-test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
pull_request:
|
||||
branches:
|
||||
- "main"
|
||||
|
||||
|
||||
env:
|
||||
POSTGRES_HOST: localhost
|
||||
POSTGRES_PORT: 5432
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: prowler_db_test
|
||||
VALKEY_HOST: localhost
|
||||
VALKEY_PORT: 6379
|
||||
VALKEY_DB: 0
|
||||
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.12"]
|
||||
|
||||
# Service containers to run with `test`
|
||||
services:
|
||||
# Label used to access the service container
|
||||
postgres:
|
||||
image: postgres
|
||||
env:
|
||||
POSTGRES_HOST: ${{ env.POSTGRES_HOST }}
|
||||
POSTGRES_PORT: ${{ env.POSTGRES_PORT }}
|
||||
POSTGRES_USER: ${{ env.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }}
|
||||
POSTGRES_DB: ${{ env.POSTGRES_DB }}
|
||||
# Set health checks to wait until postgres has started
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
valkey:
|
||||
image: valkey/valkey:7-alpine3.19
|
||||
env:
|
||||
VALKEY_HOST: ${{ env.VALKEY_HOST }}
|
||||
VALKEY_PORT: ${{ env.VALKEY_PORT }}
|
||||
VALKEY_DB: ${{ env.VALKEY_DB }}
|
||||
# Set health checks to wait until postgres has started
|
||||
ports:
|
||||
- 6379:6379
|
||||
options: >-
|
||||
--health-cmd "valkey-cli ping"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@v42
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
.github/**
|
||||
README.md
|
||||
docs/**
|
||||
permissions/**
|
||||
mkdocs.yml
|
||||
- name: Install poetry
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
- name: Install dependencies
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
|
||||
&& chmod +x /tmp/hadolint
|
||||
|
||||
- name: Poetry check
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry lock --check
|
||||
- name: Lint with ruff
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run ruff check . --exclude contrib
|
||||
- name: Check Format with ruff
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run ruff format --check . --exclude contrib
|
||||
- name: Lint with pylint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
|
||||
- name: Bandit
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
- name: Safety
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 70612,66963
|
||||
- name: Vulture
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
|
||||
- name: Hadolint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
- name: Test with pytest
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./src/backend --cov-report=xml src/backend
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@v4
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
168
api/.gitignore
vendored
Normal file
168
api/.gitignore
vendored
Normal file
@@ -0,0 +1,168 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
/_data/
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||
.pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
*.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
.idea/
|
||||
|
||||
# VSCode
|
||||
.vscode/
|
||||
91
api/.pre-commit-config.yaml
Normal file
91
api/.pre-commit-config.yaml
Normal file
@@ -0,0 +1,91 @@
|
||||
repos:
|
||||
## GENERAL
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
args: ["--unsafe"]
|
||||
- id: check-json
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- id: no-commit-to-branch
|
||||
- id: pretty-format-json
|
||||
args: ["--autofix", "--no-sort-keys", "--no-ensure-ascii"]
|
||||
exclude: 'src/backend/api/fixtures/dev/.*\.json$'
|
||||
|
||||
## TOML
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.13.0
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
files: pyproject.toml
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
rev: v0.10.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
## PYTHON
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.5.0
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
args: [ --fix ]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.8.0
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
args: ["--directory=src"]
|
||||
- id: poetry-lock
|
||||
args: ["--no-update", "--directory=src"]
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.13.0-beta
|
||||
hooks:
|
||||
- id: hadolint
|
||||
args: ["--ignore=DL3013", "Dockerfile"]
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: bash -c 'poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: trufflehog
|
||||
name: TruffleHog
|
||||
description: Detect secrets in your data.
|
||||
entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail'
|
||||
# For running trufflehog in docker, use the following entry instead:
|
||||
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
|
||||
language: system
|
||||
stages: ["commit", "push"]
|
||||
|
||||
- id: bandit
|
||||
name: bandit
|
||||
description: "Bandit is a tool for finding common security issues in Python code"
|
||||
entry: bash -c 'poetry run bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'poetry run safety check --ignore 70612,66963'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
name: vulture
|
||||
description: "Vulture finds unused code in Python programs."
|
||||
entry: bash -c 'poetry run vulture --exclude "contrib,.venv,tests,conftest.py" --min-confidence 100 .'
|
||||
language: system
|
||||
files: '.*\.py'
|
||||
46
api/Dockerfile
Normal file
46
api/Dockerfile
Normal file
@@ -0,0 +1,46 @@
|
||||
FROM python:3.12-alpine AS build
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/api"
|
||||
|
||||
# hadolint ignore=DL3018
|
||||
RUN apk --no-cache add gcc python3-dev musl-dev linux-headers curl-dev
|
||||
|
||||
RUN apk --no-cache upgrade && \
|
||||
addgroup -g 1000 prowler && \
|
||||
adduser -D -u 1000 -G prowler prowler
|
||||
USER prowler
|
||||
|
||||
WORKDIR /home/prowler
|
||||
|
||||
COPY pyproject.toml ./
|
||||
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry
|
||||
|
||||
COPY src/backend/ ./backend/
|
||||
|
||||
ENV PATH="/home/prowler/.local/bin:$PATH"
|
||||
|
||||
RUN poetry install && \
|
||||
rm -rf ~/.cache/pip
|
||||
|
||||
COPY docker-entrypoint.sh ./docker-entrypoint.sh
|
||||
|
||||
WORKDIR /home/prowler/backend
|
||||
|
||||
# Development image
|
||||
# hadolint ignore=DL3006
|
||||
FROM build AS dev
|
||||
|
||||
USER 0
|
||||
# hadolint ignore=DL3018
|
||||
RUN apk --no-cache add curl vim
|
||||
|
||||
USER prowler
|
||||
|
||||
ENTRYPOINT ["../docker-entrypoint.sh", "dev"]
|
||||
|
||||
# Production image
|
||||
FROM build
|
||||
|
||||
ENTRYPOINT ["../docker-entrypoint.sh", "prod"]
|
||||
271
api/README.md
Normal file
271
api/README.md
Normal file
@@ -0,0 +1,271 @@
|
||||
# Description
|
||||
|
||||
This repository contains the JSON API and Task Runner components for Prowler, which facilitate a complete backend that interacts with the Prowler SDK and is used by the Prowler UI.
|
||||
|
||||
# Components
|
||||
The Prowler API is composed of the following components:
|
||||
|
||||
- The JSON API, which is an API built with Django Rest Framework.
|
||||
- The Celery worker, which is responsible for executing the background tasks that are defined in the JSON API.
|
||||
- The PostgreSQL database, which is used to store the data.
|
||||
- The Valkey database, which is an in-memory database which is used as a message broker for the Celery workers.
|
||||
|
||||
## Note about Valkey
|
||||
|
||||
[Valkey](https://valkey.io/) is an open source (BSD) high performance key/value datastore.
|
||||
|
||||
Valkey exposes a Redis 7.2 compliant API. Any service that exposes the Redis API can be used with Prowler API.
|
||||
|
||||
# Modify environment variables
|
||||
|
||||
Under the root path of the project, you can find a file called `.env.example`. This file shows all the environment variables that the project uses. You *must* create a new file called `.env` and set the values for the variables.
|
||||
|
||||
## Local deployment
|
||||
Keep in mind if you export the `.env` file to use it with local deployment that you will have to do it within the context of the Poetry interpreter, not before. Otherwise, variables will not be loaded properly.
|
||||
|
||||
To do this, you can run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
set -a
|
||||
source .env
|
||||
```
|
||||
|
||||
# 🚀 Production deployment
|
||||
## Docker deployment
|
||||
|
||||
This method requires `docker` and `docker compose`.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
|
||||
### Build the base image
|
||||
|
||||
```console
|
||||
docker compose --profile prod build
|
||||
```
|
||||
|
||||
### Run the production service
|
||||
|
||||
This command will start the Django production server and the Celery worker and also the Valkey and PostgreSQL databases.
|
||||
|
||||
```console
|
||||
docker compose --profile prod up -d
|
||||
```
|
||||
|
||||
You can access the server in `http://localhost:8080`.
|
||||
|
||||
> **NOTE:** notice how the port is different. When developing using docker, the port will be `8080` to prevent conflicts.
|
||||
|
||||
### View the Production Server Logs
|
||||
|
||||
To view the logs for any component (e.g., Django, Celery worker), you can use the following command with a wildcard. This command will follow logs for any container that matches the specified pattern:
|
||||
|
||||
```console
|
||||
docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
|
||||
|
||||
## Local deployment
|
||||
|
||||
To use this method, you'll need to set up a Python virtual environment (version ">=3.11,<3.13") and keep dependencies updated. Additionally, ensure that `poetry` and `docker compose` are installed.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
### Install all dependencies with Poetry
|
||||
|
||||
```console
|
||||
poetry install
|
||||
poetry shell
|
||||
```
|
||||
|
||||
## Start the PostgreSQL Database and Valkey
|
||||
|
||||
The PostgreSQL database (version 16.3) and Valkey (version 7) are required for the development environment. To make development easier, we have provided a `docker-compose` file that will start these components for you.
|
||||
|
||||
**Note:** Make sure to use the specified versions, as there are features in our setup that may not be compatible with older versions of PostgreSQL and Valkey.
|
||||
|
||||
|
||||
```console
|
||||
docker compose up postgres valkey -d
|
||||
```
|
||||
|
||||
## Deploy Django and the Celery worker
|
||||
|
||||
### Run migrations
|
||||
|
||||
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
```
|
||||
|
||||
### Run the Celery worker
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python -m celery -A config.celery worker -l info -E
|
||||
```
|
||||
|
||||
### Run the Django server with Gunicorn
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
gunicorn -c config/guniconf.py config.wsgi:application
|
||||
```
|
||||
|
||||
> By default, the Gunicorn server will try to use as many workers as your machine can handle. You can manually change that in the `src/backend/config/guniconf.py` file.
|
||||
|
||||
# 🧪 Development guide
|
||||
|
||||
## Local deployment
|
||||
|
||||
To use this method, you'll need to set up a Python virtual environment (version ">=3.11,<3.13") and keep dependencies updated. Additionally, ensure that `poetry` and `docker compose` are installed.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
|
||||
### Start the PostgreSQL Database and Valkey
|
||||
|
||||
The PostgreSQL database (version 16.3) and Valkey (version 7) are required for the development environment. To make development easier, we have provided a `docker-compose` file that will start these components for you.
|
||||
|
||||
**Note:** Make sure to use the specified versions, as there are features in our setup that may not be compatible with older versions of PostgreSQL and Valkey.
|
||||
|
||||
|
||||
```console
|
||||
docker compose up postgres valkey -d
|
||||
```
|
||||
|
||||
### Install the Python dependencies
|
||||
|
||||
> You must have Poetry installed
|
||||
|
||||
```console
|
||||
poetry install
|
||||
poetry shell
|
||||
```
|
||||
|
||||
### Apply migrations
|
||||
|
||||
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
```
|
||||
|
||||
### Run the Django development server
|
||||
|
||||
```console
|
||||
cd src/backend
|
||||
python manage.py runserver
|
||||
```
|
||||
|
||||
You can access the server in `http://localhost:8000`.
|
||||
All changes in the code will be automatically reloaded in the server.
|
||||
|
||||
### Run the Celery worker
|
||||
|
||||
```console
|
||||
python -m celery -A config.celery worker -l info -E
|
||||
```
|
||||
|
||||
The Celery worker does not detect and reload changes in the code, so you need to restart it manually when you make changes.
|
||||
|
||||
## Docker deployment
|
||||
|
||||
This method requires `docker` and `docker compose`.
|
||||
|
||||
### Clone the repository
|
||||
|
||||
```console
|
||||
# HTTPS
|
||||
git clone https://github.com/prowler-cloud/api.git
|
||||
|
||||
# SSH
|
||||
git clone git@github.com:prowler-cloud/api.git
|
||||
|
||||
```
|
||||
|
||||
### Build the base image
|
||||
|
||||
```console
|
||||
docker compose --profile dev build
|
||||
```
|
||||
|
||||
### Run the development service
|
||||
|
||||
This command will start the Django development server and the Celery worker and also the Valkey and PostgreSQL databases.
|
||||
|
||||
```console
|
||||
docker compose --profile dev up -d
|
||||
```
|
||||
|
||||
You can access the server in `http://localhost:8080`.
|
||||
All changes in the code will be automatically reloaded in the server.
|
||||
|
||||
> **NOTE:** notice how the port is different. When developing using docker, the port will be `8080` to prevent conflicts.
|
||||
|
||||
### View the development server logs
|
||||
|
||||
To view the logs for any component (e.g., Django, Celery worker), you can use the following command with a wildcard. This command will follow logs for any container that matches the specified pattern:
|
||||
|
||||
```console
|
||||
docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
|
||||
|
||||
## Applying migrations
|
||||
|
||||
For migrations, you need to force the `admin` database router. Assuming you have the correct environment variables and Python virtual environment, run:
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
python manage.py migrate --database admin
|
||||
```
|
||||
|
||||
## Apply fixtures
|
||||
|
||||
Fixtures are used to populate the database with initial development data.
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
python manage.py loaddata api/fixtures/0_dev_users.json --database admin
|
||||
```
|
||||
|
||||
> The default credentials are `dev@prowler.com:thisisapassword123` or `dev2@prowler.com:thisisapassword123`
|
||||
|
||||
## Run tests
|
||||
|
||||
Note that the tests will fail if you use the same `.env` file as the development environment.
|
||||
|
||||
For best results, run in a new shell with no environment variables set.
|
||||
|
||||
```console
|
||||
poetry shell
|
||||
cd src/backend
|
||||
pytest
|
||||
```
|
||||
125
api/docker-compose.yml
Normal file
125
api/docker-compose.yml
Normal file
@@ -0,0 +1,125 @@
|
||||
services:
|
||||
api:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-api
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
ports:
|
||||
- "${DJANGO_PORT:-8000}:${DJANGO_PORT:-8000}"
|
||||
profiles:
|
||||
- prod
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "prod"
|
||||
|
||||
api-dev:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
target: dev
|
||||
image: prowler-api-dev
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=config.django.devel
|
||||
- DJANGO_LOGGING_FORMATTER=human_readable
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
ports:
|
||||
- "${DJANGO_PORT:-8080}:${DJANGO_PORT:-8080}"
|
||||
volumes:
|
||||
- "./src/backend:/home/prowler/backend"
|
||||
- "./pyproject.toml:/home/prowler/pyproject.toml"
|
||||
profiles:
|
||||
- dev
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "dev"
|
||||
|
||||
postgres:
|
||||
image: postgres:16.3-alpine
|
||||
ports:
|
||||
- "${POSTGRES_PORT:-5432}:${POSTGRES_PORT:-5432}"
|
||||
hostname: "postgres-db"
|
||||
volumes:
|
||||
- ./_data/postgres:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_USER=${POSTGRES_ADMIN_USER:-prowler}
|
||||
- POSTGRES_PASSWORD=${POSTGRES_ADMIN_PASSWORD:-S3cret}
|
||||
- POSTGRES_DB=${POSTGRES_DB:-prowler_db}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_ADMIN_USER:-prowler} -d ${POSTGRES_DB:-prowler_db}'"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
valkey:
|
||||
image: valkey/valkey:7-alpine3.19
|
||||
ports:
|
||||
- "${VALKEY_PORT:-6379}:6379"
|
||||
hostname: "valkey"
|
||||
volumes:
|
||||
- ./_data/valkey:/data
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sh -c 'valkey-cli ping'"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
worker:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-worker
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
profiles:
|
||||
- dev
|
||||
- prod
|
||||
depends_on:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "worker"
|
||||
|
||||
worker-beat:
|
||||
build:
|
||||
dockerfile: Dockerfile
|
||||
image: prowler-worker
|
||||
environment:
|
||||
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
|
||||
env_file:
|
||||
- path: ./.env
|
||||
required: false
|
||||
profiles:
|
||||
- dev
|
||||
- prod
|
||||
depends_on:
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
entrypoint:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "beat"
|
||||
71
api/docker-entrypoint.sh
Executable file
71
api/docker-entrypoint.sh
Executable file
@@ -0,0 +1,71 @@
|
||||
#!/bin/sh
|
||||
|
||||
|
||||
apply_migrations() {
|
||||
echo "Applying database migrations..."
|
||||
poetry run python manage.py migrate --database admin
|
||||
}
|
||||
|
||||
apply_fixtures() {
|
||||
echo "Applying Django fixtures..."
|
||||
for fixture in api/fixtures/dev/*.json; do
|
||||
if [ -f "$fixture" ]; then
|
||||
echo "Loading $fixture"
|
||||
poetry run python manage.py loaddata "$fixture" --database admin
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
start_dev_server() {
|
||||
echo "Starting the development server..."
|
||||
poetry run python manage.py runserver 0.0.0.0:"${DJANGO_PORT:-8080}"
|
||||
}
|
||||
|
||||
start_prod_server() {
|
||||
echo "Starting the Gunicorn server..."
|
||||
poetry run gunicorn -c config/guniconf.py config.wsgi:application
|
||||
}
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans -E
|
||||
}
|
||||
|
||||
start_worker_beat() {
|
||||
echo "Starting the worker-beat..."
|
||||
sleep 15
|
||||
poetry run python -m celery -A config.celery beat -l "${DJANGO_LOGGING_LEVEL:-info}" --scheduler django_celery_beat.schedulers:DatabaseScheduler
|
||||
}
|
||||
|
||||
manage_db_partitions() {
|
||||
if [ "${DJANGO_MANAGE_DB_PARTITIONS}" = "True" ]; then
|
||||
echo "Managing DB partitions..."
|
||||
# For now we skip the deletion of partitions until we define the data retention policy
|
||||
# --yes auto approves the operation without the need of an interactive terminal
|
||||
poetry run python manage.py pgpartition --using admin --skip-delete --yes
|
||||
fi
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
dev)
|
||||
apply_migrations
|
||||
apply_fixtures
|
||||
manage_db_partitions
|
||||
start_dev_server
|
||||
;;
|
||||
prod)
|
||||
apply_migrations
|
||||
manage_db_partitions
|
||||
start_prod_server
|
||||
;;
|
||||
worker)
|
||||
start_worker
|
||||
;;
|
||||
beat)
|
||||
start_worker_beat
|
||||
;;
|
||||
*)
|
||||
echo "Usage: $0 {dev|prod|worker|beat}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
65
api/docs/partitions.md
Normal file
65
api/docs/partitions.md
Normal file
@@ -0,0 +1,65 @@
|
||||
# Partitions
|
||||
|
||||
## Overview
|
||||
|
||||
Partitions are used to split the data in a table into smaller chunks, allowing for more efficient querying and storage.
|
||||
|
||||
The Prowler API uses partitions to store findings. The partitions are created based on the UUIDv7 `id` field.
|
||||
|
||||
You can use the Prowler API without ever creating additional partitions. This documentation is only relevant if you want to manage partitions to gain additional query performance.
|
||||
|
||||
### Required Postgres Configuration
|
||||
|
||||
There are 3 configuration options that need to be set in the `postgres.conf` file to get the most performance out of the partitioning:
|
||||
|
||||
- `enable_partition_pruning = on` (default is on)
|
||||
- `enable_partitionwise_join = on` (default is off)
|
||||
- `enable_partitionwise_aggregate = on` (default is off)
|
||||
|
||||
For more information on these options, see the [Postgres documentation](https://www.postgresql.org/docs/current/runtime-config-query.html).
|
||||
|
||||
## Partitioning Strategy
|
||||
|
||||
The partitioning strategy is defined in the `api.partitions` module. The strategy is responsible for creating and deleting partitions based on the provided configuration.
|
||||
|
||||
## Managing Partitions
|
||||
|
||||
The application will run without any extra work on your part. If you want to add or delete partitions, you can use the following commands:
|
||||
|
||||
To manage the partitions, run `python manage.py pgpartition --using admin`
|
||||
|
||||
This command will generate a list of partitions to create and delete based on the provided configuration.
|
||||
|
||||
By default, the command will prompt you to accept the changes before applying them.
|
||||
|
||||
```shell
|
||||
Finding:
|
||||
+ 2024_nov
|
||||
name: 2024_nov
|
||||
from_values: 0192e505-9000-72c8-a47c-cce719d8fb93
|
||||
to_values: 01937f84-5418-7eb8-b2a6-e3be749e839d
|
||||
size_unit: months
|
||||
size_value: 1
|
||||
+ 2024_dec
|
||||
name: 2024_dec
|
||||
from_values: 01937f84-5800-7b55-879c-9cdb46f023f6
|
||||
to_values: 01941f29-7818-7f9f-b4be-20b05bb2f574
|
||||
size_unit: months
|
||||
size_value: 1
|
||||
|
||||
0 partitions will be deleted
|
||||
2 partitions will be created
|
||||
```
|
||||
|
||||
If you choose to apply the partitions, tables will be generated with the following format: `<table_name>_<year>_<month>`.
|
||||
|
||||
For more info on the partitioning manager, see https://github.com/SectorLabs/django-postgres-extra
|
||||
|
||||
### Changing the Partitioning Parameters
|
||||
|
||||
There are 4 environment variables that can be used to change the partitioning parameters:
|
||||
|
||||
- `DJANGO_MANAGE_DB_PARTITIONS`: Allow Django to manage database partitons. By default is set to `False`.
|
||||
- `FINDINGS_TABLE_PARTITION_MONTHS`: Set the months for each partition. Setting the partition monts to 1 will create partitions with a size of 1 natural month.
|
||||
- `FINDINGS_TABLE_PARTITION_COUNT`: Set the number of partitions to create
|
||||
- `FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS`: Set the number of months to keep partitions before deleting them. Setting this to `None` will keep partitions indefinitely.
|
||||
4963
api/poetry.lock
generated
Normal file
4963
api/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
55
api/pyproject.toml
Normal file
55
api/pyproject.toml
Normal file
@@ -0,0 +1,55 @@
|
||||
[build-system]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
requires = ["poetry-core"]
|
||||
|
||||
[tool.poetry]
|
||||
authors = ["Prowler Team"]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "AGPL-3.0"
|
||||
name = "prowler-api"
|
||||
package-mode = false
|
||||
version = "1.0.0"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
celery = {extras = ["pytest"], version = "^5.4.0"}
|
||||
django = "5.1.1"
|
||||
django-celery-beat = "^2.7.0"
|
||||
django-celery-results = "^2.5.1"
|
||||
django-cors-headers = "4.4.0"
|
||||
django-environ = "0.11.2"
|
||||
django-filter = "24.3"
|
||||
django-guid = "3.5.0"
|
||||
django-postgres-extra = "^2.0.8"
|
||||
djangorestframework = "3.15.2"
|
||||
djangorestframework-jsonapi = "7.0.2"
|
||||
djangorestframework-simplejwt = "^5.3.1"
|
||||
drf-nested-routers = "^0.94.1"
|
||||
drf-spectacular = "0.27.2"
|
||||
drf-spectacular-jsonapi = "0.5.1"
|
||||
gunicorn = "23.0.0"
|
||||
prowler = {git = "https://github.com/prowler-cloud/prowler.git", branch = "master"}
|
||||
psycopg2-binary = "2.9.9"
|
||||
pytest-celery = {extras = ["redis"], version = "^1.0.1"}
|
||||
# Needed for prowler compatibility
|
||||
python = ">=3.11,<3.13"
|
||||
uuid6 = "2024.7.10"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
bandit = "1.7.9"
|
||||
coverage = "7.5.4"
|
||||
docker = "7.1.0"
|
||||
freezegun = "1.5.1"
|
||||
mypy = "1.10.1"
|
||||
pylint = "3.2.5"
|
||||
pytest = "8.2.2"
|
||||
pytest-cov = "5.0.0"
|
||||
pytest-django = "4.8.0"
|
||||
pytest-env = "1.1.3"
|
||||
pytest-randomly = "3.15.0"
|
||||
pytest-xdist = "3.6.1"
|
||||
ruff = "0.5.0"
|
||||
safety = "3.2.3"
|
||||
vulture = "2.11"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
0
api/src/__init__.py
Normal file
0
api/src/__init__.py
Normal file
0
api/src/backend/__init__.py
Normal file
0
api/src/backend/__init__.py
Normal file
0
api/src/backend/api/__init__.py
Normal file
0
api/src/backend/api/__init__.py
Normal file
3
api/src/backend/api/admin.py
Normal file
3
api/src/backend/api/admin.py
Normal file
@@ -0,0 +1,3 @@
|
||||
# from django.contrib import admin
|
||||
|
||||
# Register your models here.
|
||||
12
api/src/backend/api/apps.py
Normal file
12
api/src/backend/api/apps.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ApiConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "api"
|
||||
|
||||
def ready(self):
|
||||
from api import signals # noqa: F401
|
||||
from api.compliance import load_prowler_compliance
|
||||
|
||||
load_prowler_compliance()
|
||||
103
api/src/backend/api/base_views.py
Normal file
103
api/src/backend/api/base_views.py
Normal file
@@ -0,0 +1,103 @@
|
||||
import uuid
|
||||
|
||||
from django.db import transaction, connection
|
||||
from rest_framework import permissions
|
||||
from rest_framework.exceptions import NotAuthenticated
|
||||
from rest_framework.filters import SearchFilter
|
||||
from rest_framework_json_api import filters
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_json_api.views import ModelViewSet
|
||||
from rest_framework_simplejwt.authentication import JWTAuthentication
|
||||
|
||||
from api.filters import CustomDjangoFilterBackend
|
||||
|
||||
|
||||
class BaseViewSet(ModelViewSet):
|
||||
authentication_classes = [JWTAuthentication]
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
filter_backends = [
|
||||
filters.QueryParameterValidationFilter,
|
||||
filters.OrderingFilter,
|
||||
CustomDjangoFilterBackend,
|
||||
SearchFilter,
|
||||
]
|
||||
|
||||
filterset_fields = []
|
||||
search_fields = []
|
||||
|
||||
ordering_fields = "__all__"
|
||||
ordering = ["id"]
|
||||
|
||||
def get_queryset(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class BaseRLSViewSet(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
# Ideally, this logic would be in the `.setup()` method but DRF view sets don't call it
|
||||
# https://docs.djangoproject.com/en/5.1/ref/class-based-views/base/#django.views.generic.base.View.setup
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
tenant_id = request.auth.get("tenant_id")
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
try:
|
||||
uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise ValidationError("Tenant ID must be a valid UUID")
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
context["tenant_id"] = self.request.tenant_id
|
||||
return context
|
||||
|
||||
|
||||
class BaseTenantViewset(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
user_id = str(request.user.id)
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT set_config('api.user_id', '{user_id}', TRUE);")
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
|
||||
class BaseUserViewset(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
# TODO refactor after improving RLS on users
|
||||
if request.stream is not None and request.stream.method == "POST":
|
||||
return super().initial(request, *args, **kwargs)
|
||||
if request.auth is None:
|
||||
raise NotAuthenticated
|
||||
|
||||
tenant_id = request.auth.get("tenant_id")
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
try:
|
||||
uuid.UUID(tenant_id)
|
||||
except ValueError:
|
||||
raise ValidationError("Tenant ID must be a valid UUID")
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
209
api/src/backend/api/compliance.py
Normal file
209
api/src/backend/api/compliance.py
Normal file
@@ -0,0 +1,209 @@
|
||||
from types import MappingProxyType
|
||||
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
|
||||
from api.models import Provider
|
||||
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = {}
|
||||
PROWLER_CHECKS = {}
|
||||
|
||||
|
||||
def get_prowler_provider_checks(provider_type: Provider.ProviderChoices):
|
||||
"""
|
||||
Retrieve all check IDs for the specified provider type.
|
||||
|
||||
This function fetches the check metadata for the given cloud provider
|
||||
and returns an iterable of check IDs.
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The provider type
|
||||
(e.g., 'aws', 'azure') for which to retrieve check IDs.
|
||||
|
||||
Returns:
|
||||
Iterable[str]: An iterable of check IDs associated with the specified provider type.
|
||||
"""
|
||||
return CheckMetadata.get_bulk(provider_type).keys()
|
||||
|
||||
|
||||
def get_prowler_provider_compliance(provider_type: Provider.ProviderChoices) -> dict:
|
||||
"""
|
||||
Retrieve the Prowler compliance data for a specified provider type.
|
||||
|
||||
This function fetches the compliance frameworks and their associated
|
||||
requirements for the given cloud provider.
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The provider type
|
||||
(e.g., 'aws', 'azure') for which to retrieve compliance data.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary mapping compliance framework names to their respective
|
||||
Compliance objects for the specified provider.
|
||||
"""
|
||||
return Compliance.get_bulk(provider_type)
|
||||
|
||||
|
||||
def load_prowler_compliance():
|
||||
"""
|
||||
Load and initialize the Prowler compliance data and checks for all provider types.
|
||||
|
||||
This function retrieves compliance data for all supported provider types,
|
||||
generates a compliance overview template, and populates the global variables
|
||||
`PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE` and `PROWLER_CHECKS` with read-only mappings
|
||||
of the compliance templates and checks, respectively.
|
||||
"""
|
||||
global PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE
|
||||
global PROWLER_CHECKS
|
||||
|
||||
prowler_compliance = {
|
||||
provider_type: get_prowler_provider_compliance(provider_type)
|
||||
for provider_type in Provider.ProviderChoices.values
|
||||
}
|
||||
template = generate_compliance_overview_template(prowler_compliance)
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = MappingProxyType(template)
|
||||
PROWLER_CHECKS = MappingProxyType(load_prowler_checks(prowler_compliance))
|
||||
|
||||
|
||||
def load_prowler_checks(prowler_compliance):
|
||||
"""
|
||||
Generate a mapping of checks to the compliance frameworks that include them.
|
||||
|
||||
This function processes the provided compliance data and creates a dictionary
|
||||
mapping each provider type to a dictionary where each check ID maps to a set
|
||||
of compliance names that include that check.
|
||||
|
||||
Args:
|
||||
prowler_compliance (dict): The compliance data for all provider types,
|
||||
as returned by `get_prowler_provider_compliance`.
|
||||
|
||||
Returns:
|
||||
dict: A nested dictionary where the first-level keys are provider types,
|
||||
and the values are dictionaries mapping check IDs to sets of compliance names.
|
||||
"""
|
||||
checks = {}
|
||||
for provider_type in Provider.ProviderChoices.values:
|
||||
checks[provider_type] = {
|
||||
check_id: set() for check_id in get_prowler_provider_checks(provider_type)
|
||||
}
|
||||
for compliance_name, compliance_data in prowler_compliance[
|
||||
provider_type
|
||||
].items():
|
||||
for requirement in compliance_data.Requirements:
|
||||
for check in requirement.Checks:
|
||||
try:
|
||||
checks[provider_type][check].add(compliance_name)
|
||||
except KeyError:
|
||||
continue
|
||||
return checks
|
||||
|
||||
|
||||
def generate_scan_compliance(
|
||||
compliance_overview, provider_type: str, check_id: str, status: str
|
||||
):
|
||||
"""
|
||||
Update the compliance overview with the status of a specific check.
|
||||
|
||||
This function updates the compliance overview by setting the status of the given check
|
||||
within all compliance frameworks and requirements that include it. It then updates the
|
||||
requirement status to 'FAIL' if any of its checks have failed, and adjusts the counts
|
||||
of passed and failed requirements in the compliance overview.
|
||||
|
||||
Args:
|
||||
compliance_overview (dict): The compliance overview data structure to update.
|
||||
provider_type (str): The provider type (e.g., 'aws', 'azure') associated with the check.
|
||||
check_id (str): The identifier of the check whose status is being updated.
|
||||
status (str): The status of the check (e.g., 'PASS', 'FAIL', 'MUTED').
|
||||
|
||||
Returns:
|
||||
None: This function modifies the compliance_overview in place.
|
||||
"""
|
||||
for compliance_id in PROWLER_CHECKS[provider_type][check_id]:
|
||||
for requirement in compliance_overview[compliance_id]["requirements"].values():
|
||||
if check_id in requirement["checks"]:
|
||||
requirement["checks"][check_id] = status
|
||||
requirement["checks_status"][status.lower()] += 1
|
||||
|
||||
if requirement["status"] != "FAIL" and any(
|
||||
value == "FAIL" for value in requirement["checks"].values()
|
||||
):
|
||||
requirement["status"] = "FAIL"
|
||||
compliance_overview[compliance_id]["requirements_status"]["passed"] -= 1
|
||||
compliance_overview[compliance_id]["requirements_status"]["failed"] += 1
|
||||
|
||||
|
||||
def generate_compliance_overview_template(prowler_compliance: dict):
|
||||
"""
|
||||
Generate a compliance overview template for all provider types.
|
||||
|
||||
This function creates a nested dictionary structure representing the compliance
|
||||
overview template for each provider type, compliance framework, and requirement.
|
||||
It initializes the status of all checks and requirements, and calculates initial
|
||||
counts for requirements status.
|
||||
|
||||
Args:
|
||||
prowler_compliance (dict): The compliance data for all provider types,
|
||||
as returned by `get_prowler_provider_compliance`.
|
||||
|
||||
Returns:
|
||||
dict: A nested dictionary representing the compliance overview template,
|
||||
structured by provider type and compliance framework.
|
||||
"""
|
||||
template = {}
|
||||
for provider_type in Provider.ProviderChoices.values:
|
||||
provider_compliance = template.setdefault(provider_type, {})
|
||||
compliance_data_dict = prowler_compliance[provider_type]
|
||||
|
||||
for compliance_name, compliance_data in compliance_data_dict.items():
|
||||
compliance_requirements = {}
|
||||
requirements_status = {"passed": 0, "failed": 0, "manual": 0}
|
||||
total_requirements = 0
|
||||
|
||||
for requirement in compliance_data.Requirements:
|
||||
total_requirements += 1
|
||||
total_checks = len(requirement.Checks)
|
||||
checks_dict = {check: None for check in requirement.Checks}
|
||||
|
||||
# Build requirement dictionary
|
||||
requirement_dict = {
|
||||
"name": requirement.Name or requirement.Id,
|
||||
"description": requirement.Description,
|
||||
"attributes": [
|
||||
dict(attribute) for attribute in requirement.Attributes
|
||||
],
|
||||
"checks": checks_dict,
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": total_checks,
|
||||
},
|
||||
"status": "PASS",
|
||||
}
|
||||
|
||||
# Update requirements status
|
||||
if total_checks == 0:
|
||||
requirements_status["manual"] += 1
|
||||
|
||||
# Add requirement to compliance requirements
|
||||
compliance_requirements[requirement.Id] = requirement_dict
|
||||
|
||||
# Calculate pending requirements
|
||||
pending_requirements = total_requirements - requirements_status["manual"]
|
||||
requirements_status["passed"] = pending_requirements
|
||||
|
||||
# Build compliance dictionary
|
||||
compliance_dict = {
|
||||
"framework": compliance_data.Framework,
|
||||
"version": compliance_data.Version,
|
||||
"provider": provider_type,
|
||||
"description": compliance_data.Description,
|
||||
"requirements": compliance_requirements,
|
||||
"requirements_status": requirements_status,
|
||||
"total_requirements": total_requirements,
|
||||
}
|
||||
|
||||
# Add compliance to provider compliance
|
||||
provider_compliance[compliance_name] = compliance_dict
|
||||
|
||||
return template
|
||||
18
api/src/backend/api/db_router.py
Normal file
18
api/src/backend/api/db_router.py
Normal file
@@ -0,0 +1,18 @@
|
||||
class MainRouter:
|
||||
default_db = "default"
|
||||
admin_db = "admin"
|
||||
|
||||
def db_for_read(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
if model_table_name.startswith("django_"):
|
||||
return self.admin_db
|
||||
return None
|
||||
|
||||
def db_for_write(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
if model_table_name.startswith("django_"):
|
||||
return self.admin_db
|
||||
return None
|
||||
|
||||
def allow_migrate(self, db, app_label, model_name=None, **hints): # noqa: F841
|
||||
return db == self.admin_db
|
||||
271
api/src/backend/api/db_utils.py
Normal file
271
api/src/backend/api/db_utils.py
Normal file
@@ -0,0 +1,271 @@
|
||||
import secrets
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import BaseUserManager
|
||||
from django.db import models, transaction, connection
|
||||
from psycopg2 import connect as psycopg2_connect
|
||||
from psycopg2.extensions import new_type, register_type, register_adapter, AsIs
|
||||
|
||||
DB_USER = settings.DATABASES["default"]["USER"] if not settings.TESTING else "test"
|
||||
DB_PASSWORD = (
|
||||
settings.DATABASES["default"]["PASSWORD"] if not settings.TESTING else "test"
|
||||
)
|
||||
DB_PROWLER_USER = (
|
||||
settings.DATABASES["prowler_user"]["USER"] if not settings.TESTING else "test"
|
||||
)
|
||||
DB_PROWLER_PASSWORD = (
|
||||
settings.DATABASES["prowler_user"]["PASSWORD"] if not settings.TESTING else "test"
|
||||
)
|
||||
TASK_RUNNER_DB_TABLE = "django_celery_results_taskresult"
|
||||
POSTGRES_TENANT_VAR = "api.tenant_id"
|
||||
POSTGRES_USER_VAR = "api.user_id"
|
||||
|
||||
|
||||
@contextmanager
|
||||
def psycopg_connection(database_alias: str):
|
||||
psycopg2_connection = None
|
||||
try:
|
||||
admin_db = settings.DATABASES[database_alias]
|
||||
|
||||
psycopg2_connection = psycopg2_connect(
|
||||
dbname=admin_db["NAME"],
|
||||
user=admin_db["USER"],
|
||||
password=admin_db["PASSWORD"],
|
||||
host=admin_db["HOST"],
|
||||
port=admin_db["PORT"],
|
||||
)
|
||||
yield psycopg2_connection
|
||||
finally:
|
||||
if psycopg2_connection is not None:
|
||||
psycopg2_connection.close()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def tenant_transaction(tenant_id: str):
|
||||
with transaction.atomic():
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
yield cursor
|
||||
|
||||
|
||||
class CustomUserManager(BaseUserManager):
|
||||
def create_user(self, email, password=None, **extra_fields):
|
||||
if not email:
|
||||
raise ValueError("The email field must be set")
|
||||
email = self.normalize_email(email)
|
||||
user = self.model(email=email, **extra_fields)
|
||||
user.set_password(password)
|
||||
user.save(using=self._db)
|
||||
return user
|
||||
|
||||
def get_by_natural_key(self, email):
|
||||
return self.get(email__iexact=email)
|
||||
|
||||
|
||||
def enum_to_choices(enum_class):
|
||||
"""
|
||||
This function converts a Python Enum to a list of tuples, where the first element is the value and the second element is the name.
|
||||
|
||||
It's for use with Django's `choices` attribute, which expects a list of tuples.
|
||||
"""
|
||||
return [(item.value, item.name.replace("_", " ").title()) for item in enum_class]
|
||||
|
||||
|
||||
def one_week_from_now():
|
||||
"""
|
||||
Return a datetime object with a date one week from now.
|
||||
"""
|
||||
return datetime.now(timezone.utc) + timedelta(days=7)
|
||||
|
||||
|
||||
def generate_random_token(length: int = 14, symbols: str | None = None) -> str:
|
||||
"""
|
||||
Generate a random token with the specified length.
|
||||
"""
|
||||
_symbols = "23456789ABCDEFGHJKMNPQRSTVWXYZ"
|
||||
return "".join(secrets.choice(symbols or _symbols) for _ in range(length))
|
||||
|
||||
|
||||
# Postgres Enums
|
||||
|
||||
|
||||
class PostgresEnumMigration:
|
||||
def __init__(self, enum_name: str, enum_values: tuple):
|
||||
self.enum_name = enum_name
|
||||
self.enum_values = enum_values
|
||||
|
||||
def create_enum_type(self, apps, schema_editor): # noqa: F841
|
||||
string_enum_values = ", ".join([f"'{value}'" for value in self.enum_values])
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f"CREATE TYPE {self.enum_name} AS ENUM ({string_enum_values});"
|
||||
)
|
||||
|
||||
def drop_enum_type(self, apps, schema_editor): # noqa: F841
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(f"DROP TYPE {self.enum_name};")
|
||||
|
||||
|
||||
class PostgresEnumField(models.Field):
|
||||
def __init__(self, enum_type_name, *args, **kwargs):
|
||||
self.enum_type_name = enum_type_name
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def db_type(self, connection):
|
||||
return self.enum_type_name
|
||||
|
||||
def from_db_value(self, value, expression, connection): # noqa: F841
|
||||
return value
|
||||
|
||||
def to_python(self, value):
|
||||
if isinstance(value, EnumType):
|
||||
return value.value
|
||||
return value
|
||||
|
||||
def get_prep_value(self, value):
|
||||
if isinstance(value, EnumType):
|
||||
return value.value
|
||||
return value
|
||||
|
||||
|
||||
class EnumType:
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return self.value
|
||||
|
||||
|
||||
def enum_adapter(enum_obj):
|
||||
return AsIs(f"'{enum_obj.value}'::{enum_obj.__class__.enum_type_name}")
|
||||
|
||||
|
||||
def get_enum_oid(connection, enum_type_name: str):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT oid FROM pg_type WHERE typname = %s;", (enum_type_name,))
|
||||
result = cursor.fetchone()
|
||||
if result is None:
|
||||
raise ValueError(f"Enum type '{enum_type_name}' not found")
|
||||
return result[0]
|
||||
|
||||
|
||||
def register_enum(apps, schema_editor, enum_class): # noqa: F841
|
||||
with psycopg_connection(schema_editor.connection.alias) as connection:
|
||||
enum_oid = get_enum_oid(connection, enum_class.enum_type_name)
|
||||
enum_instance = new_type(
|
||||
(enum_oid,),
|
||||
enum_class.enum_type_name,
|
||||
lambda value, cur: value, # noqa: F841
|
||||
)
|
||||
register_type(enum_instance, connection)
|
||||
register_adapter(enum_class, enum_adapter)
|
||||
|
||||
|
||||
# Postgres enum definition for member role
|
||||
|
||||
|
||||
class MemberRoleEnum(EnumType):
|
||||
enum_type_name = "member_role"
|
||||
|
||||
|
||||
class MemberRoleEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("member_role", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Provider.provider
|
||||
|
||||
|
||||
class ProviderEnum(EnumType):
|
||||
enum_type_name = "provider"
|
||||
|
||||
|
||||
class ProviderEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("provider", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Scan.type
|
||||
|
||||
|
||||
class ScanTriggerEnum(EnumType):
|
||||
enum_type_name = "scan_trigger"
|
||||
|
||||
|
||||
class ScanTriggerEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("scan_trigger", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for state
|
||||
|
||||
|
||||
class StateEnum(EnumType):
|
||||
enum_type_name = "state"
|
||||
|
||||
|
||||
class StateEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("state", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Finding.Delta
|
||||
|
||||
|
||||
class FindingDeltaEnum(EnumType):
|
||||
enum_type_name = "finding_delta"
|
||||
|
||||
|
||||
class FindingDeltaEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("finding_delta", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Severity
|
||||
|
||||
|
||||
class SeverityEnum(EnumType):
|
||||
enum_type_name = "severity"
|
||||
|
||||
|
||||
class SeverityEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("severity", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Status
|
||||
|
||||
|
||||
class StatusEnum(EnumType):
|
||||
enum_type_name = "status"
|
||||
|
||||
|
||||
class StatusEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("status", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Provider secrets type
|
||||
|
||||
|
||||
class ProviderSecretTypeEnum(EnumType):
|
||||
enum_type_name = "provider_secret_type"
|
||||
|
||||
|
||||
class ProviderSecretTypeEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("provider_secret_type", *args, **kwargs)
|
||||
|
||||
|
||||
# Postgres enum definition for Provider secrets type
|
||||
|
||||
|
||||
class InvitationStateEnum(EnumType):
|
||||
enum_type_name = "invitation_state"
|
||||
|
||||
|
||||
class InvitationStateEnumField(PostgresEnumField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__("invitation_state", *args, **kwargs)
|
||||
52
api/src/backend/api/decorators.py
Normal file
52
api/src/backend/api/decorators.py
Normal file
@@ -0,0 +1,52 @@
|
||||
from functools import wraps
|
||||
|
||||
from django.db import connection, transaction
|
||||
|
||||
|
||||
def set_tenant(func):
|
||||
"""
|
||||
Decorator to set the tenant context for a Celery task based on the provided tenant_id.
|
||||
|
||||
This decorator extracts the `tenant_id` from the task's keyword arguments,
|
||||
and uses it to set the tenant context for the current database session.
|
||||
The `tenant_id` is then removed from the kwargs before the task function
|
||||
is executed. If `tenant_id` is not provided, a KeyError is raised.
|
||||
|
||||
Args:
|
||||
func (function): The Celery task function to be decorated.
|
||||
|
||||
Raises:
|
||||
KeyError: If `tenant_id` is not found in the task's keyword arguments.
|
||||
|
||||
Returns:
|
||||
function: The wrapped function with tenant context set.
|
||||
|
||||
Example:
|
||||
# This decorator MUST be defined the last in the decorator chain
|
||||
|
||||
@shared_task
|
||||
@set_tenant
|
||||
def some_task(arg1, **kwargs):
|
||||
# Task logic here
|
||||
pass
|
||||
|
||||
# When calling the task
|
||||
some_task.delay(arg1, tenant_id="1234-abcd-5678")
|
||||
|
||||
# The tenant context will be set before the task logic executes.
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
@transaction.atomic
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
tenant_id = kwargs.pop("tenant_id")
|
||||
except KeyError:
|
||||
raise KeyError("This task requires the tenant_id")
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
45
api/src/backend/api/exceptions.py
Normal file
45
api/src/backend/api/exceptions.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from django.core.exceptions import ValidationError as django_validation_error
|
||||
from rest_framework import status
|
||||
from rest_framework.exceptions import APIException
|
||||
from rest_framework_json_api.exceptions import exception_handler
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_simplejwt.exceptions import TokenError, InvalidToken
|
||||
|
||||
|
||||
class ModelValidationError(ValidationError):
|
||||
def __init__(
|
||||
self,
|
||||
detail: str | None = None,
|
||||
code: str | None = None,
|
||||
pointer: str | None = None,
|
||||
status_code: int = 400,
|
||||
):
|
||||
super().__init__(
|
||||
detail=[
|
||||
{
|
||||
"detail": detail,
|
||||
"status": str(status_code),
|
||||
"source": {"pointer": pointer},
|
||||
"code": code,
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class InvitationTokenExpiredException(APIException):
|
||||
status_code = status.HTTP_410_GONE
|
||||
default_detail = "The invitation token has expired and is no longer valid."
|
||||
default_code = "token_expired"
|
||||
|
||||
|
||||
def custom_exception_handler(exc, context):
|
||||
if isinstance(exc, django_validation_error):
|
||||
if hasattr(exc, "error_dict"):
|
||||
exc = ValidationError(exc.message_dict)
|
||||
else:
|
||||
exc = ValidationError(detail=exc.messages[0], code=exc.code)
|
||||
elif isinstance(exc, (TokenError, InvalidToken)):
|
||||
exc.detail["messages"] = [
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
return exception_handler(exc, context)
|
||||
484
api/src/backend/api/filters.py
Normal file
484
api/src/backend/api/filters.py
Normal file
@@ -0,0 +1,484 @@
|
||||
from datetime import date, datetime, timezone
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
from django_filters.rest_framework import (
|
||||
BaseInFilter,
|
||||
FilterSet,
|
||||
BooleanFilter,
|
||||
CharFilter,
|
||||
UUIDFilter,
|
||||
DateFilter,
|
||||
ChoiceFilter,
|
||||
)
|
||||
from rest_framework_json_api.django_filters.backends import DjangoFilterBackend
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
from api.db_utils import (
|
||||
ProviderEnumField,
|
||||
FindingDeltaEnumField,
|
||||
StatusEnumField,
|
||||
SeverityEnumField,
|
||||
InvitationStateEnumField,
|
||||
)
|
||||
from api.models import (
|
||||
User,
|
||||
Membership,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
Resource,
|
||||
ResourceTag,
|
||||
Scan,
|
||||
Task,
|
||||
StateChoices,
|
||||
Finding,
|
||||
SeverityChoices,
|
||||
StatusChoices,
|
||||
ProviderSecret,
|
||||
Invitation,
|
||||
ComplianceOverview,
|
||||
)
|
||||
from api.rls import Tenant
|
||||
from api.uuid_utils import (
|
||||
datetime_to_uuid7,
|
||||
uuid7_start,
|
||||
uuid7_end,
|
||||
uuid7_range,
|
||||
transform_into_uuid7,
|
||||
)
|
||||
from api.v1.serializers import TaskBase
|
||||
|
||||
|
||||
class CustomDjangoFilterBackend(DjangoFilterBackend):
|
||||
def to_html(self, _request, _queryset, _view):
|
||||
"""Override this method to use the Browsable API in dev environments.
|
||||
|
||||
This disables the HTML render for the default filter.
|
||||
"""
|
||||
return None
|
||||
|
||||
|
||||
class UUIDInFilter(BaseInFilter, UUIDFilter):
|
||||
pass
|
||||
|
||||
|
||||
class CharInFilter(BaseInFilter, CharFilter):
|
||||
pass
|
||||
|
||||
|
||||
class ChoiceInFilter(BaseInFilter, ChoiceFilter):
|
||||
pass
|
||||
|
||||
|
||||
class TenantFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = Tenant
|
||||
fields = {
|
||||
"name": ["exact", "icontains"],
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class MembershipFilter(FilterSet):
|
||||
date_joined = DateFilter(field_name="date_joined", lookup_expr="date")
|
||||
role = ChoiceFilter(choices=Membership.RoleChoices.choices)
|
||||
|
||||
class Meta:
|
||||
model = Membership
|
||||
fields = {
|
||||
"tenant": ["exact"],
|
||||
"role": ["exact"],
|
||||
"date_joined": ["date", "gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ProviderFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
connected = BooleanFilter()
|
||||
provider = ChoiceFilter(choices=Provider.ProviderChoices.choices)
|
||||
|
||||
class Meta:
|
||||
model = Provider
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"id": ["exact", "in"],
|
||||
"uid": ["exact", "icontains", "in"],
|
||||
"alias": ["exact", "icontains", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
filter_overrides = {
|
||||
ProviderEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class ProviderRelationshipFilterSet(FilterSet):
|
||||
provider_type = ChoiceFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="provider__provider"
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="provider__provider"
|
||||
)
|
||||
provider_uid = CharFilter(field_name="provider__uid", lookup_expr="exact")
|
||||
provider_uid__in = CharInFilter(field_name="provider__uid", lookup_expr="in")
|
||||
provider_uid__icontains = CharFilter(
|
||||
field_name="provider__uid", lookup_expr="icontains"
|
||||
)
|
||||
provider_alias = CharFilter(field_name="provider__alias", lookup_expr="exact")
|
||||
provider_alias__in = CharInFilter(field_name="provider__alias", lookup_expr="in")
|
||||
provider_alias__icontains = CharFilter(
|
||||
field_name="provider__alias", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
|
||||
class ProviderGroupFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = ProviderGroup
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"name": ["exact", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class ScanFilter(ProviderRelationshipFilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
completed_at = DateFilter(field_name="completed_at", lookup_expr="date")
|
||||
started_at = DateFilter(field_name="started_at", lookup_expr="date")
|
||||
trigger = ChoiceFilter(choices=Scan.TriggerChoices.choices)
|
||||
|
||||
class Meta:
|
||||
model = Scan
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"name": ["exact", "icontains"],
|
||||
"started_at": ["gte", "lte"],
|
||||
"trigger": ["exact"],
|
||||
}
|
||||
|
||||
|
||||
class TaskFilter(FilterSet):
|
||||
name = CharFilter(field_name="task_runner_task__task_name", lookup_expr="exact")
|
||||
name__icontains = CharFilter(
|
||||
field_name="task_runner_task__task_name", lookup_expr="icontains"
|
||||
)
|
||||
state = ChoiceFilter(
|
||||
choices=StateChoices.choices, method="filter_state", lookup_expr="exact"
|
||||
)
|
||||
task_state_inverse_mapping_values = {
|
||||
v: k for k, v in TaskBase.state_mapping.items()
|
||||
}
|
||||
|
||||
def filter_state(self, queryset, name, value):
|
||||
if value not in StateChoices:
|
||||
raise ValidationError(
|
||||
f"Invalid provider value: '{value}'. Valid values are: "
|
||||
f"{', '.join(StateChoices)}"
|
||||
)
|
||||
|
||||
return queryset.filter(
|
||||
task_runner_task__status=self.task_state_inverse_mapping_values[value]
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Task
|
||||
fields = []
|
||||
|
||||
|
||||
class ResourceTagFilter(FilterSet):
|
||||
class Meta:
|
||||
model = ResourceTag
|
||||
fields = {
|
||||
"key": ["exact", "icontains"],
|
||||
"value": ["exact", "icontains"],
|
||||
}
|
||||
search = ["text_search"]
|
||||
|
||||
|
||||
class ResourceFilter(ProviderRelationshipFilterSet):
|
||||
tag_key = CharFilter(method="filter_tag_key")
|
||||
tag_value = CharFilter(method="filter_tag_value")
|
||||
tag = CharFilter(method="filter_tag")
|
||||
tags = CharFilter(method="filter_tag")
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = Resource
|
||||
fields = {
|
||||
"provider": ["exact", "in"],
|
||||
"uid": ["exact", "icontains"],
|
||||
"name": ["exact", "icontains"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
"service": ["exact", "icontains", "in"],
|
||||
"type": ["exact", "icontains", "in"],
|
||||
"inserted_at": ["gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
|
||||
def filter_tag_key(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
|
||||
|
||||
def filter_tag_value(self, queryset, name, value):
|
||||
return queryset.filter(Q(tags__value=value) | Q(tags__value__icontains=value))
|
||||
|
||||
def filter_tag(self, queryset, name, value):
|
||||
# We won't know what the user wants to filter on just based on the value,
|
||||
# and we don't want to build special filtering logic for every possible
|
||||
# provider tag spec, so we'll just do a full text search
|
||||
return queryset.filter(tags__text_search=value)
|
||||
|
||||
|
||||
class FindingFilter(FilterSet):
|
||||
# We filter providers from the scan in findings
|
||||
provider = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
|
||||
)
|
||||
provider_uid = CharFilter(field_name="scan__provider__uid", lookup_expr="exact")
|
||||
provider_uid__in = CharInFilter(field_name="scan__provider__uid", lookup_expr="in")
|
||||
provider_uid__icontains = CharFilter(
|
||||
field_name="scan__provider__uid", lookup_expr="icontains"
|
||||
)
|
||||
provider_alias = CharFilter(field_name="scan__provider__alias", lookup_expr="exact")
|
||||
provider_alias__in = CharInFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="in"
|
||||
)
|
||||
provider_alias__icontains = CharFilter(
|
||||
field_name="scan__provider__alias", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
|
||||
uid = CharFilter(field_name="uid")
|
||||
delta = ChoiceFilter(choices=Finding.DeltaChoices.choices)
|
||||
status = ChoiceFilter(choices=StatusChoices.choices)
|
||||
severity = ChoiceFilter(choices=SeverityChoices)
|
||||
impact = ChoiceFilter(choices=SeverityChoices)
|
||||
|
||||
resources = UUIDInFilter(field_name="resource__id", lookup_expr="in")
|
||||
|
||||
region = CharFilter(field_name="resources__region")
|
||||
region__in = CharInFilter(field_name="resources__region", lookup_expr="in")
|
||||
region__icontains = CharFilter(
|
||||
field_name="resources__region", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
service = CharFilter(field_name="resources__service")
|
||||
service__in = CharInFilter(field_name="resources__service", lookup_expr="in")
|
||||
service__icontains = CharFilter(
|
||||
field_name="resources__service", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_uid = CharFilter(field_name="resources__uid")
|
||||
resource_uid__in = CharInFilter(field_name="resources__uid", lookup_expr="in")
|
||||
resource_uid__icontains = CharFilter(
|
||||
field_name="resources__uid", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_name = CharFilter(field_name="resources__name")
|
||||
resource_name__in = CharInFilter(field_name="resources__name", lookup_expr="in")
|
||||
resource_name__icontains = CharFilter(
|
||||
field_name="resources__name", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
resource_type = CharFilter(field_name="resources__type")
|
||||
resource_type__in = CharInFilter(field_name="resources__type", lookup_expr="in")
|
||||
resource_type__icontains = CharFilter(
|
||||
field_name="resources__type", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
scan = UUIDFilter(method="filter_scan_id")
|
||||
scan__in = UUIDInFilter(method="filter_scan_id_in")
|
||||
|
||||
inserted_at = DateFilter(method="filter_inserted_at", lookup_expr="date")
|
||||
inserted_at__date = DateFilter(method="filter_inserted_at", lookup_expr="date")
|
||||
inserted_at__gte = DateFilter(method="filter_inserted_at_gte")
|
||||
inserted_at__lte = DateFilter(method="filter_inserted_at_lte")
|
||||
|
||||
class Meta:
|
||||
model = Finding
|
||||
fields = {
|
||||
"id": ["exact", "in"],
|
||||
"uid": ["exact", "in"],
|
||||
"scan": ["exact", "in"],
|
||||
"delta": ["exact", "in"],
|
||||
"status": ["exact", "in"],
|
||||
"severity": ["exact", "in"],
|
||||
"impact": ["exact", "in"],
|
||||
"check_id": ["exact", "in", "icontains"],
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"updated_at": ["gte", "lte"],
|
||||
}
|
||||
filter_overrides = {
|
||||
FindingDeltaEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
StatusEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
SeverityEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
},
|
||||
}
|
||||
|
||||
# Convert filter values to UUIDv7 values for use with partitioning
|
||||
def filter_scan_id(self, queryset, name, value):
|
||||
try:
|
||||
value_uuid = transform_into_uuid7(value)
|
||||
start = uuid7_start(value_uuid)
|
||||
end = uuid7_end(value_uuid, settings.FINDINGS_TABLE_PARTITION_MONTHS)
|
||||
except ValidationError as validation_error:
|
||||
detail = str(validation_error.detail[0])
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": detail,
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/relationships/scan"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return (
|
||||
queryset.filter(id__gte=start)
|
||||
.filter(id__lt=end)
|
||||
.filter(scan__id=value_uuid)
|
||||
)
|
||||
|
||||
def filter_scan_id_in(self, queryset, name, value):
|
||||
try:
|
||||
uuid_list = [
|
||||
transform_into_uuid7(value_uuid)
|
||||
for value_uuid in value
|
||||
if value_uuid is not None
|
||||
]
|
||||
|
||||
start, end = uuid7_range(uuid_list)
|
||||
except ValidationError as validation_error:
|
||||
detail = str(validation_error.detail[0])
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": detail,
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/relationships/scan"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
if start == end:
|
||||
return queryset.filter(id__gte=start).filter(scan__id__in=uuid_list)
|
||||
else:
|
||||
return (
|
||||
queryset.filter(id__gte=start)
|
||||
.filter(id__lt=end)
|
||||
.filter(scan__id__in=uuid_list)
|
||||
)
|
||||
|
||||
def filter_inserted_at(self, queryset, name, value):
|
||||
value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(value))
|
||||
|
||||
return queryset.filter(id__gte=start).filter(inserted_at__date=value)
|
||||
|
||||
def filter_inserted_at_gte(self, queryset, name, value):
|
||||
value = self.maybe_date_to_datetime(value)
|
||||
start = uuid7_start(datetime_to_uuid7(value))
|
||||
|
||||
return queryset.filter(id__gte=start).filter(inserted_at__gte=value)
|
||||
|
||||
def filter_inserted_at_lte(self, queryset, name, value):
|
||||
value = self.maybe_date_to_datetime(value)
|
||||
end = uuid7_start(datetime_to_uuid7(value))
|
||||
|
||||
return queryset.filter(id__lte=end).filter(inserted_at__lte=value)
|
||||
|
||||
@staticmethod
|
||||
def maybe_date_to_datetime(value):
|
||||
dt = value
|
||||
if isinstance(value, date):
|
||||
dt = datetime.combine(value, datetime.min.time(), tzinfo=timezone.utc)
|
||||
return dt
|
||||
|
||||
|
||||
class ProviderSecretFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
provider = UUIDFilter(field_name="provider__id", lookup_expr="exact")
|
||||
|
||||
class Meta:
|
||||
model = ProviderSecret
|
||||
fields = {
|
||||
"name": ["exact", "icontains"],
|
||||
}
|
||||
|
||||
|
||||
class InvitationFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
expires_at = DateFilter(field_name="expires_at", lookup_expr="date")
|
||||
state = ChoiceFilter(choices=Invitation.State.choices)
|
||||
state__in = ChoiceInFilter(choices=Invitation.State.choices, lookup_expr="in")
|
||||
|
||||
class Meta:
|
||||
model = Invitation
|
||||
fields = {
|
||||
"email": ["exact", "icontains"],
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"updated_at": ["date", "gte", "lte"],
|
||||
"expires_at": ["date", "gte", "lte"],
|
||||
"inviter": ["exact"],
|
||||
}
|
||||
filter_overrides = {
|
||||
InvitationStateEnumField: {
|
||||
"filter_class": CharFilter,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class UserFilter(FilterSet):
|
||||
date_joined = DateFilter(field_name="date_joined", lookup_expr="date")
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = {
|
||||
"name": ["exact", "icontains"],
|
||||
"email": ["exact", "icontains"],
|
||||
"company_name": ["exact", "icontains"],
|
||||
"date_joined": ["date", "gte", "lte"],
|
||||
"is_active": ["exact"],
|
||||
}
|
||||
|
||||
|
||||
class ComplianceOverviewFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
provider_type = ChoiceFilter(choices=Provider.ProviderChoices.choices)
|
||||
provider_type__in = ChoiceInFilter(choices=Provider.ProviderChoices.choices)
|
||||
scan_id = UUIDFilter(field_name="scan__id")
|
||||
|
||||
class Meta:
|
||||
model = ComplianceOverview
|
||||
fields = {
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"compliance_id": ["exact", "icontains"],
|
||||
"framework": ["exact", "iexact", "icontains"],
|
||||
"version": ["exact", "icontains"],
|
||||
"region": ["exact", "icontains", "in"],
|
||||
}
|
||||
28
api/src/backend/api/fixtures/dev/0_dev_users.json
Normal file
28
api/src/backend/api/fixtures/dev/0_dev_users.json
Normal file
@@ -0,0 +1,28 @@
|
||||
[
|
||||
{
|
||||
"model": "api.user",
|
||||
"pk": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
|
||||
"last_login": null,
|
||||
"name": "Devie Prowlerson",
|
||||
"email": "dev@prowler.com",
|
||||
"company_name": "Prowler Developers",
|
||||
"is_active": true,
|
||||
"date_joined": "2024-09-17T09:04:20.850Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.user",
|
||||
"pk": "b6493a3a-c997-489b-8b99-278bf74de9f6",
|
||||
"fields": {
|
||||
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
|
||||
"last_login": null,
|
||||
"name": "Devietoo Prowlerson",
|
||||
"email": "dev2@prowler.com",
|
||||
"company_name": "Prowler Developers",
|
||||
"is_active": true,
|
||||
"date_joined": "2024-09-18T09:04:20.850Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
50
api/src/backend/api/fixtures/dev/1_dev_tenants.json
Normal file
50
api/src/backend/api/fixtures/dev/1_dev_tenants.json
Normal file
@@ -0,0 +1,50 @@
|
||||
[
|
||||
{
|
||||
"model": "api.tenant",
|
||||
"pk": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"fields": {
|
||||
"inserted_at": "2024-03-21T23:00:00Z",
|
||||
"updated_at": "2024-03-21T23:00:00Z",
|
||||
"name": "Tenant1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.tenant",
|
||||
"pk": "0412980b-06e3-436a-ab98-3c9b1d0333d3",
|
||||
"fields": {
|
||||
"inserted_at": "2024-03-21T23:00:00Z",
|
||||
"updated_at": "2024-03-21T23:00:00Z",
|
||||
"name": "Tenant2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "2b0db93a-7e0b-4edf-a851-ea448676b7eb",
|
||||
"fields": {
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"tenant": "0412980b-06e3-436a-ab98-3c9b1d0333d3",
|
||||
"role": "owner",
|
||||
"date_joined": "2024-09-19T11:03:59.712Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "797d7cee-abc9-4598-98bb-4bf4bfb97f27",
|
||||
"fields": {
|
||||
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "owner",
|
||||
"date_joined": "2024-09-19T11:02:59.712Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.membership",
|
||||
"pk": "dea37563-7009-4dcf-9f18-25efb41462a7",
|
||||
"fields": {
|
||||
"user": "b6493a3a-c997-489b-8b99-278bf74de9f6",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"role": "member",
|
||||
"date_joined": "2024-09-19T11:03:59.712Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
177
api/src/backend/api/fixtures/dev/2_dev_providers.json
Normal file
177
api/src/backend/api/fixtures/dev/2_dev_providers.json
Normal file
@@ -0,0 +1,177 @@
|
||||
[
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "37b065f8-26b0-4218-a665-0b23d07b27d9",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:20:27.050Z",
|
||||
"updated_at": "2024-08-01T17:20:27.050Z",
|
||||
"provider": "gcp",
|
||||
"uid": "a12322-test321",
|
||||
"alias": "gcp_testing_2",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "8851db6b-42e5-4533-aa9e-30a32d67e875",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:19:42.453Z",
|
||||
"updated_at": "2024-08-01T17:19:42.453Z",
|
||||
"provider": "gcp",
|
||||
"uid": "a12345-test123",
|
||||
"alias": "gcp_testing_1",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:19:09.556Z",
|
||||
"updated_at": "2024-08-01T17:19:09.556Z",
|
||||
"provider": "aws",
|
||||
"uid": "123456789020",
|
||||
"alias": "aws_testing_2",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "baa7b895-8bac-4f47-b010-4226d132856e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:20:16.962Z",
|
||||
"updated_at": "2024-08-01T17:20:16.962Z",
|
||||
"provider": "gcp",
|
||||
"uid": "a12322-test123",
|
||||
"alias": "gcp_testing_3",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "d7c7ea89-d9af-423b-a364-1290dcad5a01",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-01T17:18:58.132Z",
|
||||
"updated_at": "2024-08-01T17:18:58.132Z",
|
||||
"provider": "aws",
|
||||
"uid": "123456789015",
|
||||
"alias": "aws_testing_1",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-06T16:03:26.176Z",
|
||||
"updated_at": "2024-08-06T16:03:26.176Z",
|
||||
"provider": "azure",
|
||||
"uid": "8851db6b-42e5-4533-aa9e-30a32d67e875",
|
||||
"alias": "azure_testing",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "26e55a24-cb2c-4cef-ac87-6f91fddb2c97",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-08-06T16:03:07.037Z",
|
||||
"updated_at": "2024-08-06T16:03:07.037Z",
|
||||
"provider": "kubernetes",
|
||||
"uid": "kubernetes-test-12345",
|
||||
"alias": "k8s_testing",
|
||||
"connected": null,
|
||||
"connection_last_checked_at": null,
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.provider",
|
||||
"pk": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:45:26.352Z",
|
||||
"updated_at": "2024-10-18T11:16:23.533Z",
|
||||
"provider": "aws",
|
||||
"uid": "106908755759",
|
||||
"alias": "real testing aws provider",
|
||||
"connected": true,
|
||||
"connection_last_checked_at": "2024-10-18T11:16:23.503Z",
|
||||
"metadata": {},
|
||||
"scanner_args": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "11491b47-75ae-4f71-ad8d-3e630a72182e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-11T08:03:05.026Z",
|
||||
"updated_at": "2024-10-11T08:04:47.033Z",
|
||||
"name": "GCP static secrets",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5DTndmZW9KakRZUHM2UHhQN2V3RzN0QmM1cERham8yMHp5cnVTT0lzdGFyS1FuVmJXUlpYSGsyU0cxR3RMMEdQYXlYMUVsaWtqLU1OZWlaVUp6OFREYlotZTVBY3BuTlZYbm9YcUJydzAxV2p5dkpLamI1Y2tUYzA0MmJUNWxsNTBRM0E1SDRCa0pPQWVlb05YU3dfeUhkLTRmOEh3dGczOGh1ZGhQcVdZdVAtYmtoSWlwNXM4VGFoVmF3dno2X1hrbk5GZjZTWjVuWEdEZUFXeHJSQjEzbTlVakhNdzYyWTdiVEpvUEc2MTNpRzUtczhEank1eGI0b3MyMlAyaGN6dlByZmtUWHByaDNUYWFqYS1tYnNBUkRKTzBacFNSRjFuVmd5bUtFUEJhd1ZVS1ZDd2xSUV9PaEtLTnc0XzVkY2lhM01WTjQwaWdJSk9wNUJSXzQ4RUNQLXFPNy1VdzdPYkZyWkVkU3RyQjVLTS1MVHN0R3k4THNKZ2NBNExaZnl3Q1EwN2dwNGRsUXptMjB0LXUzTUpzTDE2Q1hmS0ZSN2g1ZjBPeV8taFoxNUwxc2FEcktXX0dCM1IzeUZTTHNiTmNxVXBvNWViZTJScUVWV2VYTFQ4UHlid21PY1A0UjdNMGtERkZCd0lLMlJENDMzMVZUM09DQ0twd1N3VHlZd09XLUctOWhYcFJIR1p5aUlZeEUzejc2dWRYdGNsd0xOODNqRUFEczhSTWNtWU0tdFZ1ZTExaHNHUVYtd0Zxdld1LTdKVUNINzlZTGdHODhKeVVpQmRZMHRUNTJRRWhwS1F1Y3I2X2Iwc0c1NHlXSVRLZWxreEt0dVRnOTZFMkptU2VMS1dWXzdVOVRzMUNUWXM2aFlxVDJXdGo3d2cxSVZGWlI2ZWhIZzZBcEl4bEJ6UnVHc0RYWVNHcjFZUHI5ZUYyWG9rSlo0QUVSUkFCX3h2UmtJUTFzVXJUZ25vTmk2VzdoTTNta05ucmNfTi0yR1ZxN1E2MnZJOVVKOGxmMXMzdHMxVndmSVhQbUItUHgtMVpVcHJwMU5JVHJLb0Y1aHV5OEEwS0kzQkEtcFJkdkRnWGxmZnprNFhndWg1TmQyd09yTFdTRmZ3d2ZvZFUtWXp4a2VYb3JjckFIcE13MDUzX0RHSnlzM0N2ZE5IRzJzMXFMc0k4MDRyTHdLZFlWOG9SaFF0LU43Ynd6VFlEcVNvdFZ0emJEVk10aEp4dDZFTFNFNzk0UUo2WTlVLWRGYm1fanZHaFZreHBIMmtzVjhyS0xPTk9fWHhiVTJHQXZwVlVuY3JtSjFUYUdHQzhEaHFNZXhwUHBmY0kxaUVrOHo4a0FYOTdpZVJDbFRvdFlQeWo3eFZHX1ZMZ1Myc3prU3o2c3o2eXNja1U4N0Y1T0d1REVjZFRGNTByUkgyemVCSjlQYkY2bmJ4YTZodHB0cUNzd2xZcENycUdsczBIaEZPbG1jVUlqNlM2cEE3aGpVaWswTzBDLVFGUHM5UHhvM09saWNtaDhaNVlsc3FZdktKeWlheDF5OGhTODE2N3JWamdTZG5Fa3JSQ2ZUSEVfRjZOZXdreXRZLTBZRFhleVFFeC1YUzc0cWhYeEhobGxvdnZ3Rm15WFlBWXp0dm1DeTA5eExLeEFRRXVRSXBXdTNEaWdZZ3JDenItdDhoZlFiTzI0SGZ1c01FR1FNaFVweVBKR1YxWGRUMW1Mc2JVdW9raWR6UHk2ZTBnS05pV3oyZVBjREdkY3k4ZHZPUWE5S281MkJRSHF3NnpTclZ5bl90bk1wUEh6Tkp5dXlDcE5paWRqcVhxRFVObWIzRldWOGJ2aC1CRHZpbFZrb0hjNGpCMm5POGRiS2lETUpMLUVfQlhCdTZPLW9USW1LTFlTSF9zRUJYZ1NKeFFEQjNOR215ZXJDbkFndmcxWl9rWlk9",
|
||||
"provider": "8851db6b-42e5-4533-aa9e-30a32d67e875"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "40191ad5-d8c2-40a9-826d-241397626b68",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-10T11:11:44.515Z",
|
||||
"updated_at": "2024-10-11T07:59:56.102Z",
|
||||
"name": "AWS static secrets",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5DTnI4Y1RyV19UWEJzc3kzQUExcU5tdlQzbFVLeDdZMWd1MzkwWkl2UF9oZGhiVEJHVWpSMXV4MjYyN3g2OVpvNVpkQUQ3S0VGaGdQLTFhQWE3MkpWZUt2cnVhODc4d3FpY3FVZkpwdHJzNUJPeFRwZ3N4bGpPZTlkNWRNdFlwTHU3aTNWR3JjSzJwLWRITHdfQWpXb1F0c1l3bVFxbnFrTEpPTGgxcnF1VUprSzZ5dGRQU2VGYmZhTTlwbVpsNFBNWlFhVW9RbjJyYnZ5N0oweE5kV0ZEaUdpUUpNVExOa3oyQ2dNREVSenJ0TEFZc0RrRWpXNUhyMmtybGNLWDVOR0FabEl4QVR1bkZyb2hBLWc1MFNIekVyeXI0SmVreHBjRnJ1YUlVdXpVbW9JZkk0aEgxYlM1VGhSRlhtcS14YzdTYUhXR2xodElmWjZuNUVwaHozX1RVTG1QWHdPZWd4clNHYnAyOTBsWEl5UU83RGxZb0RKWjdadjlsTmJtSHQ0Yl9uaDJoODB0QV9sWmFYbFAxcjA1bmhNVlNqc2xEeHlvcUJFbVZvY250ZENnMnZLT1psb1JDclB3WVR6NGdZb2pzb3U4Ny04QlB0UTZub0dMOXZEUTZEcVJhZldCWEZZSDdLTy02UVZqck5zVTZwS3pObGlOejNJeHUzbFRabFM2V2xaekZVRjZtX3VzZlplendnOWQzT01WMFd3ejNadHVlTFlqRGR2dk5Da29zOFYwOUdOaEc4OHhHRnJFMmJFMk12VDNPNlBBTGlsXy13cUM1QkVYb0o1Z2U4ZXJnWXpZdm1sWjA5bzQzb2NFWC1xbmIycGZRbGtCaGNaOWlkX094UUNNampwbkZoREctNWI4QnZRaE8zM3BEQ1BwNzA1a3BzOGczZXdIM2s1NHFGN1ZTbmJhZkc4RVdfM0ZIZU5udTBYajd1RGxpWXZpRWdSMmhHa2RKOEIzbmM0X2F1OGxrN2p6LW9UVldDOFVpREoxZ1UzcTBZX19OQ0xJb0syWlhNSlQ4MzQwdzRtVG94Y01GS3FMLV95UVlxOTFORk8zdjE5VGxVaXdhbGlzeHdoYWNzazZWai1GUGtUM2gzR0ZWTTY4SThWeVFnZldIaklOTTJqTTg1VkhEYW5wNmdEVllXMmJCV2tpVmVYeUV2c0E1T00xbHJRNzgzVG9wb0Q1cV81UEhqYUFsQ2p1a0VpRDVINl9SVkpyZVRNVnVXQUxwY3NWZnJrNmRVREpiLWNHYUpXWmxkQlhNbWhuR1NmQ1BaVDlidUxCWHJMaHhZbk1FclVBaEVZeWg1ZlFoenZzRHlKbV8wa3lmMGZrd3NmTDZjQkE0UXNSUFhpTWtUUHBrX29BVzc4QzEtWEJIQW1GMGFuZVlXQWZIOXJEamloeGFCeHpYMHNjMFVfNXpQdlJfSkk2bzFROU5NU0c1SHREWW1nbkFNZFZ0UjdPRGdjaF96RGplY1hjdFFzLVR6MTVXYlRjbHIxQ2JRejRpVko5NWhBU0ZHR3ZvczU5elljRGpHRTdIc0FsSm5fUHEwT1gtTS1lN3M3X3ZZRnlkYUZoZXRQeEJsZlhLdFdTUzU1NUl4a29aOWZIdTlPM0Fnak1xYWVkYTNiMmZXUHlXS2lwUVBZLXQyaUxuRmtQNFFieE9SVmdZVW9WTHlzbnBPZlNIdGVHOE1LNVNESjN3cGtVSHVpT1NJWHE1ZzNmUTVTOC0xX3NGSmJqU19IbjZfQWtMRG1YNUQtRy13TUJIZFlyOXJkQzFQbkdZVXVzM2czbS1HWHFBT1pXdVd3N09tcG82SVhnY1ZtUWxqTEg2UzJCUmllb2pweVN2aGwwS1FVRUhjNEN2amRMc3MwVU4zN3dVMWM5Slg4SERtenFaQk1yMWx0LWtxVWtLZVVtbU4yejVEM2h6TEt0RGdfWE09",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "ed89d1ea-366a-4d12-a602-f2ab77019742",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-10T11:11:44.515Z",
|
||||
"updated_at": "2024-10-11T07:59:56.102Z",
|
||||
"name": "Azure static secrets",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5DTnI4Y1RyV19UWEJzc3kzQUExcU5tdlQzbFVLeDdZMWd1MzkwWkl2UF9oZGhiVEJHVWpSMXV4MjYyN3g2OVpvNVpkQUQ3S0VGaGdQLTFhQWE3MkpWZUt2cnVhODc4d3FpY3FVZkpwdHJzNUJPeFRwZ3N4bGpPZTlkNWRNdFlwTHU3aTNWR3JjSzJwLWRITHdfQWpXb1F0c1l3bVFxbnFrTEpPTGgxcnF1VUprSzZ5dGRQU2VGYmZhTTlwbVpsNFBNWlFhVW9RbjJyYnZ5N0oweE5kV0ZEaUdpUUpNVExOa3oyQ2dNREVSenJ0TEFZc0RrRWpXNUhyMmtybGNLWDVOR0FabEl4QVR1bkZyb2hBLWc1MFNIekVyeXI0SmVreHBjRnJ1YUlVdXpVbW9JZkk0aEgxYlM1VGhSRlhtcS14YzdTYUhXR2xodElmWjZuNUVwaHozX1RVTG1QWHdPZWd4clNHYnAyOTBsWEl5UU83RGxZb0RKWjdadjlsTmJtSHQ0Yl9uaDJoODB0QV9sWmFYbFAxcjA1bmhNVlNqc2xEeHlvcUJFbVZvY250ZENnMnZLT1psb1JDclB3WVR6NGdZb2pzb3U4Ny04QlB0UTZub0dMOXZEUTZEcVJhZldCWEZZSDdLTy02UVZqck5zVTZwS3pObGlOejNJeHUzbFRabFM2V2xaekZVRjZtX3VzZlplendnOWQzT01WMFd3ejNadHVlTFlqRGR2dk5Da29zOFYwOUdOaEc4OHhHRnJFMmJFMk12VDNPNlBBTGlsXy13cUM1QkVYb0o1Z2U4ZXJnWXpZdm1sWjA5bzQzb2NFWC1xbmIycGZRbGtCaGNaOWlkX094UUNNampwbkZoREctNWI4QnZRaE8zM3BEQ1BwNzA1a3BzOGczZXdIM2s1NHFGN1ZTbmJhZkc4RVdfM0ZIZU5udTBYajd1RGxpWXZpRWdSMmhHa2RKOEIzbmM0X2F1OGxrN2p6LW9UVldDOFVpREoxZ1UzcTBZX19OQ0xJb0syWlhNSlQ4MzQwdzRtVG94Y01GS3FMLV95UVlxOTFORk8zdjE5VGxVaXdhbGlzeHdoYWNzazZWai1GUGtUM2gzR0ZWTTY4SThWeVFnZldIaklOTTJqTTg1VkhEYW5wNmdEVllXMmJCV2tpVmVYeUV2c0E1T00xbHJRNzgzVG9wb0Q1cV81UEhqYUFsQ2p1a0VpRDVINl9SVkpyZVRNVnVXQUxwY3NWZnJrNmRVREpiLWNHYUpXWmxkQlhNbWhuR1NmQ1BaVDlidUxCWHJMaHhZbk1FclVBaEVZeWg1ZlFoenZzRHlKbV8wa3lmMGZrd3NmTDZjQkE0UXNSUFhpTWtUUHBrX29BVzc4QzEtWEJIQW1GMGFuZVlXQWZIOXJEamloeGFCeHpYMHNjMFVfNXpQdlJfSkk2bzFROU5NU0c1SHREWW1nbkFNZFZ0UjdPRGdjaF96RGplY1hjdFFzLVR6MTVXYlRjbHIxQ2JRejRpVko5NWhBU0ZHR3ZvczU5elljRGpHRTdIc0FsSm5fUHEwT1gtTS1lN3M3X3ZZRnlkYUZoZXRQeEJsZlhLdFdTUzU1NUl4a29aOWZIdTlPM0Fnak1xYWVkYTNiMmZXUHlXS2lwUVBZLXQyaUxuRmtQNFFieE9SVmdZVW9WTHlzbnBPZlNIdGVHOE1LNVNESjN3cGtVSHVpT1NJWHE1ZzNmUTVTOC0xX3NGSmJqU19IbjZfQWtMRG1YNUQtRy13TUJIZFlyOXJkQzFQbkdZVXVzM2czbS1HWHFBT1pXdVd3N09tcG82SVhnY1ZtUWxqTEg2UzJCUmllb2pweVN2aGwwS1FVRUhjNEN2amRMc3MwVU4zN3dVMWM5Slg4SERtenFaQk1yMWx0LWtxVWtLZVVtbU4yejVEM2h6TEt0RGdfWE09",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providersecret",
|
||||
"pk": "ae48ecde-75cd-4814-92ab-18f48719e5d9",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:45:26.412Z",
|
||||
"updated_at": "2024-10-18T10:45:26.412Z",
|
||||
"name": "Valid AWS Credentials",
|
||||
"secret_type": "static",
|
||||
"_secret": "Z0FBQUFBQm5FanhHa3dXS0I3M2NmWm56SktiaGNqdDZUN0xQU1QwUi15QkhLZldFUmRENk1BXzlscG9JSUxVSTF5ekxuMkdEanlJNjhPUS1VSV9wVTBvU2l4ZnNGOVJhYW93RC1LTEhmc2pyOTJvUWwyWnpFY19WN1pRQk5IdDYwYnBDQnF1eU9nUzdwTGU3QU5qMGFyX1E4SXdpSk9paGVLcVpOVUhwb3duaXgxZ0ZxME5Pcm40QzBGWEZKY2lmRVlCMGFuVFVzemxuVjVNalZVQ2JsY2ZqNWt3Z01IYUZ0dk92YkdtSUZ5SlBvQWZoVU5DWlRFWmExNnJGVEY4Q1Bnd2VJUW9TSWdRcG9rSDNfREQwRld3Q1RYVnVYWVJLWWIxZmpsWGpwd0xQM0dtLTlYUjdHOVhhNklLWXFGTHpFQUVyVmNhYW9CU0tocGVyX3VjMkVEcVdjdFBfaVpsLTBzaUxrWTlta3dpelNtTG9xYVhBUHUzNUE4RnI1WXdJdHcxcFVfaG1XRHhDVFBKamxJb1FaQ2lsQ3FzRmxZbEJVemVkT1E2aHZfbDJqWDJPT3ViOWJGYzQ3eTNWNlFQSHBWRDFiV2tneDM4SmVqMU9Bd01TaXhPY2dmWG5RdENURkM2b2s5V3luVUZQcnFKNldnWEdYaWE2MnVNQkEwMHd6cUY5cVJkcGw4bHBtNzhPeHhkREdwSXNEc1JqQkxUR1FYRTV0UFNwbVlVSWF5LWgtbVhJZXlPZ0Q4cG9HX2E0Qld0LTF1TTFEVy1XNGdnQTRpLWpQQmFJUEdaOFJGNDVoUVJnQ25YVU5DTENMaTY4YmxtYWJFRERXTjAydVN2YnBDb3RkUE0zSDRlN1A3TXc4d2h1Wmd0LWUzZEcwMUstNUw2YnFyS2Z0NEVYMXllQW5GLVBpeU55SkNhczFIeFhrWXZpVXdwSFVrTDdiQjQtWHZJdERXVThzSnJsT2FNZzJDaUt6Y2NXYUZhUlo3VkY0R1BrSHNHNHprTmxjYmp1TXVKakRha0VtNmRFZWRmZHJWdnRCOVNjVGFVWjVQM3RwWWl4SkNmOU1pb2xqMFdOblhNY3Y3aERpOHFlWjJRc2dtRDkzZm1Qc29wdk5OQmJPbGk5ZUpGM1I2YzRJN2gxR3FEMllXR1pma1k0emVqSjZyMUliMGZsc3NfSlVDbGt4QzJTc3hHOU9FRHlZb09zVnlvcDR6WC1uclRSenI0Yy13WlFWNzJWRkwydjhmSjFZdnZ5X3NmZVF6UWRNMXo5STVyV3B0d09UUlFtOURITGhXSDVIUl9zYURJc05KWUNxekVyYkxJclNFNV9leEk4R2xsMGJod3lYeFIwaXR2dllwLTZyNWlXdDRpRkxVYkxWZFdvYUhKck5aeElBZUtKejNKS2tYVW1rTnVrRjJBQmdlZmV6ckozNjNwRmxLS1FaZzRVTTBZYzFFYi1idjBpZkQ3bWVvbEdRZXJrWFNleWZmSmFNdG1wQlp0YmxjWDV5T0tEbHRsYnNHbjRPRjl5MkttOUhRWlJtd1pmTnY4Z1lPRlZoTzFGVDdTZ0RDY1ByV0RndTd5LUNhcHNXUnNIeXdLMEw3WS1tektRTWFLQy1zakpMLWFiM3FOakE1UWU4LXlOX2VPbmd4MTZCRk9OY3Z4UGVDSWxhRlg4eHI4X1VUTDZZM0pjV0JDVi1UUjlTUl85cm1LWlZ0T1dzU0lpdWUwbXgtZ0l6eHNSNExRTV9MczJ6UkRkVElnRV9Rc0RoTDFnVHRZSEFPb2paX200TzZiRzVmRE5hOW5CTjh5Qi1WaEtueEpqRzJDY1luVWZtX1pseUpQSE5lQ0RrZ05EbWo5cU9MZ0ZkcXlqUll4UUkyejRfY2p4RXdEeC1PS1JIQVNUcmNIdkRJbzRiUktMWEQxUFM3aGNzeVFWUDdtcm5xNHlOYUU9",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555"
|
||||
}
|
||||
}
|
||||
]
|
||||
218
api/src/backend/api/fixtures/dev/3_dev_scans.json
Normal file
218
api/src/backend/api/fixtures/dev/3_dev_scans.json
Normal file
@@ -0,0 +1,218 @@
|
||||
[
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "0191e280-9d2f-71c8-9b18-487a23ba185e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "37b065f8-26b0-4218-a665-0b23d07b27d9",
|
||||
"trigger": "manual",
|
||||
"name": "test scan 1",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 1,
|
||||
"duration": 5,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
},
|
||||
"inserted_at": "2024-09-01T17:25:27.050Z",
|
||||
"started_at": "2024-09-01T17:25:27.050Z",
|
||||
"updated_at": "2024-09-01T17:25:27.050Z",
|
||||
"completed_at": "2024-09-01T17:25:32.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01920573-aa9c-73c9-bcda-f2e35c9b19d2",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "manual",
|
||||
"name": "test aws scan 2",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 1,
|
||||
"duration": 20,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
},
|
||||
"inserted_at": "2024-09-02T17:24:27.050Z",
|
||||
"started_at": "2024-09-02T17:24:27.050Z",
|
||||
"updated_at": "2024-09-02T17:24:27.050Z",
|
||||
"completed_at": "2024-09-01T17:24:37.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01920573-ea5b-77fd-a93f-1ed2ae12f728",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "baa7b895-8bac-4f47-b010-4226d132856e",
|
||||
"trigger": "manual",
|
||||
"name": "test gcp scan",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 10,
|
||||
"duration": 10,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"cloudsql_instance_automated_backups"
|
||||
]
|
||||
},
|
||||
"inserted_at": "2024-09-02T19:26:27.050Z",
|
||||
"started_at": "2024-09-02T19:26:27.050Z",
|
||||
"updated_at": "2024-09-02T19:26:27.050Z",
|
||||
"completed_at": "2024-09-01T17:26:37.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01920573-ea5b-77fd-a93f-1ed2ae12f728",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "manual",
|
||||
"name": "test aws scan",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 1,
|
||||
"duration": 35,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
},
|
||||
"inserted_at": "2024-09-02T19:27:27.050Z",
|
||||
"started_at": "2024-09-02T19:27:27.050Z",
|
||||
"updated_at": "2024-09-02T19:27:27.050Z",
|
||||
"completed_at": "2024-09-01T17:27:37.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "c281c924-23f3-4fcc-ac63-73a22154b7de",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled aws scan",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"cloudformation_stack_outputs_find_secrets"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-09-02T19:20:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:24:27.050Z",
|
||||
"updated_at": "2024-09-02T19:24:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "b85601a8-4b45-4194-8135-03fb980ef428",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled aws scan 2",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"cloudformation_stack_outputs_find_secrets"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-08-02T19:31:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:38:27.050Z",
|
||||
"updated_at": "2024-09-02T19:38:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "baa7b895-8bac-4f47-b010-4226d132856e",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled gcp scan",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"cloudsql_instance_automated_backups",
|
||||
"iam_audit_logs_enabled"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-07-02T19:30:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:29:27.050Z",
|
||||
"updated_at": "2024-09-02T19:29:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "25c8907c-b26e-4ec0-966b-a1f53a39d8e6",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"trigger": "scheduled",
|
||||
"name": "test scheduled azure scan",
|
||||
"state": "available",
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"aks_cluster_rbac_enabled",
|
||||
"defender_additional_email_configured_with_a_security_contact"
|
||||
]
|
||||
},
|
||||
"scheduled_at": "2030-08-05T19:32:27.050Z",
|
||||
"inserted_at": "2024-09-02T19:29:27.050Z",
|
||||
"updated_at": "2024-09-02T19:29:27.050Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01929f3b-ed2e-7623-ad63-7c37cd37828f",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan 1",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 19,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
},
|
||||
"duration": 7,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T10:45:57.678Z",
|
||||
"updated_at": "2024-10-18T10:46:05.127Z",
|
||||
"started_at": "2024-10-18T10:45:57.909Z",
|
||||
"completed_at": "2024-10-18T10:46:05.127Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.scan",
|
||||
"pk": "01929f57-c0ee-7553-be0b-cbde006fb6f7",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"name": "real scan 2",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"trigger": "manual",
|
||||
"state": "completed",
|
||||
"unique_resource_count": 20,
|
||||
"progress": 100,
|
||||
"scanner_args": {
|
||||
"checks_to_execute": [
|
||||
"accessanalyzer_enabled",
|
||||
"account_security_contact_information_is_registered"
|
||||
]
|
||||
},
|
||||
"duration": 4,
|
||||
"scheduled_at": null,
|
||||
"inserted_at": "2024-10-18T11:16:21.358Z",
|
||||
"updated_at": "2024-10-18T11:16:26.060Z",
|
||||
"started_at": "2024-10-18T11:16:21.593Z",
|
||||
"completed_at": "2024-10-18T11:16:26.060Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
322
api/src/backend/api/fixtures/dev/4_dev_resources.json
Normal file
322
api/src/backend/api/fixtures/dev/4_dev_resources.json
Normal file
@@ -0,0 +1,322 @@
|
||||
[
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "0234477d-0b8e-439f-87d3-ce38dff3a434",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.772Z",
|
||||
"updated_at": "2024-10-18T11:16:24.466Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root",
|
||||
"name": "",
|
||||
"region": "eu-south-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-south':6C 'iam':3A 'other':11 'root':5A 'south':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "17ce30a3-6e77-42a5-bb08-29dfcad7396a",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.882Z",
|
||||
"updated_at": "2024-10-18T11:16:24.533Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root2",
|
||||
"name": "",
|
||||
"region": "eu-west-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "1f9de587-ba5b-415a-b9b0-ceed4c6c9f32",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.091Z",
|
||||
"updated_at": "2024-10-18T11:16:24.637Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root3",
|
||||
"name": "",
|
||||
"region": "ap-northeast-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "29b35668-6dad-411d-bfec-492311889892",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.008Z",
|
||||
"updated_at": "2024-10-18T11:16:24.600Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root4",
|
||||
"name": "",
|
||||
"region": "us-west-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'us':7C 'us-west':6C 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "30505514-01d4-42bb-8b0c-471bbab27460",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T11:16:26.014Z",
|
||||
"updated_at": "2024-10-18T11:16:26.023Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root5",
|
||||
"name": "",
|
||||
"region": "us-east-1",
|
||||
"service": "account",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'account':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'us':7C 'us-east':6C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "372932f0-e4df-4968-9721-bb4f6236fae4",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.848Z",
|
||||
"updated_at": "2024-10-18T11:16:24.516Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root6",
|
||||
"name": "",
|
||||
"region": "eu-west-3",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'3':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "3a37d124-7637-43f6-9df7-e9aa7ef98c53",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.979Z",
|
||||
"updated_at": "2024-10-18T11:16:24.585Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root7",
|
||||
"name": "",
|
||||
"region": "sa-east-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'sa':7C 'sa-east':6C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "3c49318e-03c6-4f12-876f-40451ce7de3d",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.072Z",
|
||||
"updated_at": "2024-10-18T11:16:24.630Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root8",
|
||||
"name": "",
|
||||
"region": "ap-southeast-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-southeast':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'southeast':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "430bf313-8733-4bc5-ac70-5402adfce880",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.994Z",
|
||||
"updated_at": "2024-10-18T11:16:24.593Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root9",
|
||||
"name": "",
|
||||
"region": "eu-north-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-north':6C 'iam':3A 'north':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "78bd2a52-82f9-45df-90a9-4ad78254fdc4",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.055Z",
|
||||
"updated_at": "2024-10-18T11:16:24.622Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root10",
|
||||
"name": "",
|
||||
"region": "ap-northeast-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "7973e332-795e-4a74-b4d4-a53a21c98c80",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.896Z",
|
||||
"updated_at": "2024-10-18T11:16:24.542Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root11",
|
||||
"name": "",
|
||||
"region": "us-east-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'east':8C 'iam':3A 'other':11 'root':5A 'us':7C 'us-east':6C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "8ca0a188-5699-436e-80fd-e566edaeb259",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.938Z",
|
||||
"updated_at": "2024-10-18T11:16:24.565Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root12",
|
||||
"name": "",
|
||||
"region": "ca-central-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'ca':7C 'ca-central':6C 'central':8C 'iam':3A 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "8fe4514f-71d7-46ab-b0dc-70cef23b4d13",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.965Z",
|
||||
"updated_at": "2024-10-18T11:16:24.578Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root13",
|
||||
"name": "",
|
||||
"region": "eu-west-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'eu':7C 'eu-west':6C 'iam':3A 'other':11 'root':5A 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "9ab35225-dc7c-4ebd-bbc0-d81fb5d9de77",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.909Z",
|
||||
"updated_at": "2024-10-18T11:16:24.549Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root14",
|
||||
"name": "",
|
||||
"region": "ap-south-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-south':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'south':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "9be26c1d-adf0-4ba8-9ca9-c740f4a0dc4e",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.863Z",
|
||||
"updated_at": "2024-10-18T11:16:24.524Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root15",
|
||||
"name": "",
|
||||
"region": "eu-central-2",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'2':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'central':8C 'eu':7C 'eu-central':6C 'iam':3A 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "ba108c01-bcad-44f1-b211-c1d8985da89d",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.110Z",
|
||||
"updated_at": "2024-10-18T11:16:24.644Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root16",
|
||||
"name": "",
|
||||
"region": "ap-northeast-3",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'3':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-northeast':6C 'arn':1A 'aws':2A 'iam':3A 'northeast':8C 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "dc6cfb5d-6835-4c7b-9152-c18c734a6eaa",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.038Z",
|
||||
"updated_at": "2024-10-18T11:16:24.615Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root17",
|
||||
"name": "",
|
||||
"region": "eu-central-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'central':8C 'eu':7C 'eu-central':6C 'iam':3A 'other':11 'root':5A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "e0664164-cfda-44a4-b743-acee1c69386c",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.924Z",
|
||||
"updated_at": "2024-10-18T11:16:24.557Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root18",
|
||||
"name": "",
|
||||
"region": "us-west-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'us':7C 'us-west':6C 'west':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "e1929daa-a984-4116-8131-492a48321dba",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:05.023Z",
|
||||
"updated_at": "2024-10-18T11:16:24.607Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:iam::112233445566:root19",
|
||||
"name": "",
|
||||
"region": "ap-southeast-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9C '112233445566':4A 'accessanalyzer':10 'ap':7C 'ap-southeast':6C 'arn':1A 'aws':2A 'iam':3A 'other':11 'root':5A 'southeast':8C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.resource",
|
||||
"pk": "e37bb1f1-1669-4bb3-be86-e3378ddfbcba",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"inserted_at": "2024-10-18T10:46:04.952Z",
|
||||
"updated_at": "2024-10-18T11:16:24.571Z",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"uid": "arn:aws:access-analyzer:us-east-1:112233445566:analyzer/ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
|
||||
"name": "",
|
||||
"region": "us-east-1",
|
||||
"service": "accessanalyzer",
|
||||
"type": "Other",
|
||||
"text_search": "'1':9A,15C '112233445566':10A 'access':4A 'access-analyzer':3A 'accessanalyzer':16 'analyzer':5A 'analyzer/consoleanalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c':11A 'arn':1A 'aws':2A 'east':8A,14C 'other':17 'us':7A,13C 'us-east':6A,12C"
|
||||
}
|
||||
}
|
||||
]
|
||||
2498
api/src/backend/api/fixtures/dev/5_dev_findings.json
Normal file
2498
api/src/backend/api/fixtures/dev/5_dev_findings.json
Normal file
File diff suppressed because it is too large
Load Diff
62
api/src/backend/api/fixtures/dev/6_dev_rbac.json
Normal file
62
api/src/backend/api/fixtures/dev/6_dev_rbac.json
Normal file
@@ -0,0 +1,62 @@
|
||||
[
|
||||
{
|
||||
"model": "api.providergroup",
|
||||
"pk": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"fields": {
|
||||
"name": "first_group",
|
||||
"inserted_at": "2024-11-13T11:36:19.503Z",
|
||||
"updated_at": "2024-11-13T11:36:19.503Z",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroup",
|
||||
"pk": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"fields": {
|
||||
"name": "second_group",
|
||||
"inserted_at": "2024-11-13T11:36:25.421Z",
|
||||
"updated_at": "2024-11-13T11:36:25.421Z",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroup",
|
||||
"pk": "481769f5-db2b-447b-8b00-1dee18db90ec",
|
||||
"fields": {
|
||||
"name": "third_group",
|
||||
"inserted_at": "2024-11-13T11:36:37.603Z",
|
||||
"updated_at": "2024-11-13T11:36:37.603Z",
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroupmembership",
|
||||
"pk": "13625bd3-f428-4021-ac1b-b0bd41b6e02f",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
|
||||
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"inserted_at": "2024-11-13T11:55:17.138Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroupmembership",
|
||||
"pk": "54784ebe-42d2-4937-aa6a-e21c62879567",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"provider_group": "3fe28fb8-e545-424c-9b8f-69aff638f430",
|
||||
"inserted_at": "2024-11-13T11:55:17.138Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"model": "api.providergroupmembership",
|
||||
"pk": "c8bd52d5-42a5-48fe-8e0a-3eef154b8ebe",
|
||||
"fields": {
|
||||
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
|
||||
"provider": "15fce1fa-ecaa-433f-a9dc-62553f3a2555",
|
||||
"provider_group": "525e91e7-f3f3-4254-bbc3-27ce1ade86b1",
|
||||
"inserted_at": "2024-11-13T11:55:41.237Z"
|
||||
}
|
||||
}
|
||||
]
|
||||
1
api/src/backend/api/fixtures/dev/7_dev_compliance.json
Normal file
1
api/src/backend/api/fixtures/dev/7_dev_compliance.json
Normal file
File diff suppressed because one or more lines are too long
49
api/src/backend/api/middleware.py
Normal file
49
api/src/backend/api/middleware.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
from config.custom_logging import BackendLogger
|
||||
|
||||
|
||||
def extract_auth_info(request) -> dict:
|
||||
if getattr(request, "auth", None) is not None:
|
||||
tenant_id = request.auth.get("tenant_id", "N/A")
|
||||
user_id = request.auth.get("sub", "N/A")
|
||||
else:
|
||||
tenant_id, user_id = "N/A", "N/A"
|
||||
return {"tenant_id": tenant_id, "user_id": user_id}
|
||||
|
||||
|
||||
class APILoggingMiddleware:
|
||||
"""
|
||||
Middleware for logging API requests.
|
||||
|
||||
This middleware logs details of API requests, including the typical request metadata among other useful information.
|
||||
|
||||
Args:
|
||||
get_response (Callable): A callable to get the response, typically the next middleware or view.
|
||||
"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
self.logger = logging.getLogger(BackendLogger.API)
|
||||
|
||||
def __call__(self, request):
|
||||
request_start_time = time.time()
|
||||
|
||||
response = self.get_response(request)
|
||||
duration = time.time() - request_start_time
|
||||
auth_info = extract_auth_info(request)
|
||||
self.logger.info(
|
||||
"",
|
||||
extra={
|
||||
"user_id": auth_info["user_id"],
|
||||
"tenant_id": auth_info["tenant_id"],
|
||||
"method": request.method,
|
||||
"path": request.path,
|
||||
"query_params": request.GET.dict(),
|
||||
"status_code": response.status_code,
|
||||
"duration": duration,
|
||||
},
|
||||
)
|
||||
|
||||
return response
|
||||
1485
api/src/backend/api/migrations/0001_initial.py
Normal file
1485
api/src/backend/api/migrations/0001_initial.py
Normal file
File diff suppressed because it is too large
Load Diff
23
api/src/backend/api/migrations/0002_token_migrations.py
Normal file
23
api/src/backend/api/migrations/0002_token_migrations.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import DB_PROWLER_USER
|
||||
|
||||
DB_NAME = settings.DATABASES["default"]["NAME"]
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0001_initial"),
|
||||
("token_blacklist", "0012_alter_outstandingtoken_user"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
f"""
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON token_blacklist_blacklistedtoken TO {DB_PROWLER_USER};
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE ON token_blacklist_outstandingtoken TO {DB_PROWLER_USER};
|
||||
GRANT SELECT, DELETE ON django_admin_log TO {DB_PROWLER_USER};
|
||||
"""
|
||||
),
|
||||
]
|
||||
0
api/src/backend/api/migrations/__init__.py
Normal file
0
api/src/backend/api/migrations/__init__.py
Normal file
858
api/src/backend/api/models.py
Normal file
858
api/src/backend/api/models.py
Normal file
@@ -0,0 +1,858 @@
|
||||
import json
|
||||
import re
|
||||
from uuid import uuid4, UUID
|
||||
|
||||
from cryptography.fernet import Fernet
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import AbstractBaseUser
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.contrib.postgres.search import SearchVector, SearchVectorField
|
||||
from django.core.validators import MinLengthValidator
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_celery_results.models import TaskResult
|
||||
from prowler.lib.check.models import Severity
|
||||
from psqlextra.models import PostgresPartitionedModel
|
||||
from psqlextra.types import PostgresPartitioningMethod
|
||||
from uuid6 import uuid7
|
||||
|
||||
from api.db_utils import (
|
||||
MemberRoleEnumField,
|
||||
enum_to_choices,
|
||||
ProviderEnumField,
|
||||
StateEnumField,
|
||||
ScanTriggerEnumField,
|
||||
FindingDeltaEnumField,
|
||||
SeverityEnumField,
|
||||
StatusEnumField,
|
||||
CustomUserManager,
|
||||
ProviderSecretTypeEnumField,
|
||||
InvitationStateEnumField,
|
||||
one_week_from_now,
|
||||
generate_random_token,
|
||||
)
|
||||
from api.exceptions import ModelValidationError
|
||||
from api.rls import (
|
||||
RowLevelSecurityProtectedModel,
|
||||
)
|
||||
from api.rls import (
|
||||
Tenant,
|
||||
RowLevelSecurityConstraint,
|
||||
BaseSecurityConstraint,
|
||||
)
|
||||
|
||||
fernet = Fernet(settings.SECRETS_ENCRYPTION_KEY.encode())
|
||||
|
||||
# Convert Prowler Severity enum to Django TextChoices
|
||||
SeverityChoices = enum_to_choices(Severity)
|
||||
|
||||
|
||||
class StatusChoices(models.TextChoices):
|
||||
"""
|
||||
This list is based on the finding status in the Prowler CLI.
|
||||
|
||||
However, it adds another state, MUTED, which is not in the CLI.
|
||||
"""
|
||||
|
||||
FAIL = "FAIL", _("Fail")
|
||||
PASS = "PASS", _("Pass")
|
||||
MANUAL = "MANUAL", _("Manual")
|
||||
MUTED = "MUTED", _("Muted")
|
||||
|
||||
|
||||
class StateChoices(models.TextChoices):
|
||||
AVAILABLE = "available", _("Available")
|
||||
SCHEDULED = "scheduled", _("Scheduled")
|
||||
EXECUTING = "executing", _("Executing")
|
||||
COMPLETED = "completed", _("Completed")
|
||||
FAILED = "failed", _("Failed")
|
||||
CANCELLED = "cancelled", _("Cancelled")
|
||||
|
||||
|
||||
class User(AbstractBaseUser):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
name = models.CharField(max_length=150, validators=[MinLengthValidator(3)])
|
||||
email = models.EmailField(
|
||||
max_length=254,
|
||||
unique=True,
|
||||
help_text="Case insensitive",
|
||||
error_messages={"unique": "Please check the email address and try again."},
|
||||
)
|
||||
company_name = models.CharField(max_length=150, blank=True)
|
||||
is_active = models.BooleanField(default=True)
|
||||
date_joined = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
|
||||
USERNAME_FIELD = "email"
|
||||
REQUIRED_FIELDS = ["name"]
|
||||
|
||||
objects = CustomUserManager()
|
||||
|
||||
def is_member_of_tenant(self, tenant_id):
|
||||
return self.memberships.filter(tenant_id=tenant_id).exists()
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self.email:
|
||||
self.email = self.email.strip().lower()
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
class Meta:
|
||||
db_table = "users"
|
||||
|
||||
constraints = [
|
||||
BaseSecurityConstraint(
|
||||
name="statements_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
)
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "users"
|
||||
|
||||
|
||||
class Membership(models.Model):
|
||||
class RoleChoices(models.TextChoices):
|
||||
OWNER = "owner", _("Owner")
|
||||
MEMBER = "member", _("Member")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
user = models.ForeignKey(
|
||||
User,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="memberships",
|
||||
related_query_name="membership",
|
||||
)
|
||||
tenant = models.ForeignKey(
|
||||
Tenant,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="memberships",
|
||||
related_query_name="membership",
|
||||
)
|
||||
role = MemberRoleEnumField(choices=RoleChoices.choices, default=RoleChoices.MEMBER)
|
||||
date_joined = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
|
||||
class Meta:
|
||||
db_table = "memberships"
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("user", "tenant"),
|
||||
name="unique_resources_by_membership",
|
||||
),
|
||||
BaseSecurityConstraint(
|
||||
name="statements_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "memberships"
|
||||
|
||||
|
||||
class Provider(RowLevelSecurityProtectedModel):
|
||||
class ProviderChoices(models.TextChoices):
|
||||
AWS = "aws", _("AWS")
|
||||
AZURE = "azure", _("Azure")
|
||||
GCP = "gcp", _("GCP")
|
||||
KUBERNETES = "kubernetes", _("Kubernetes")
|
||||
|
||||
@staticmethod
|
||||
def validate_aws_uid(value):
|
||||
if not re.match(r"^\d{12}$", value):
|
||||
raise ModelValidationError(
|
||||
detail="AWS provider ID must be exactly 12 digits.",
|
||||
code="aws-uid",
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def validate_azure_uid(value):
|
||||
try:
|
||||
val = UUID(value, version=4)
|
||||
if str(val) != value:
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
raise ModelValidationError(
|
||||
detail="Azure provider ID must be a valid UUID.",
|
||||
code="azure-uid",
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def validate_gcp_uid(value):
|
||||
if not re.match(r"^[a-z][a-z0-9-]{5,29}$", value):
|
||||
raise ModelValidationError(
|
||||
detail="GCP provider ID must be 6 to 30 characters, start with a letter, and contain only lowercase "
|
||||
"letters, numbers, and hyphens.",
|
||||
code="gcp-uid",
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def validate_kubernetes_uid(value):
|
||||
if not re.match(r"^[a-z0-9]([-a-z0-9]{1,61}[a-z0-9])?$", value):
|
||||
raise ModelValidationError(
|
||||
detail="K8s provider ID must be up to 63 characters, start and end with a lowercase letter or number, "
|
||||
"and contain only lowercase alphanumeric characters and hyphens.",
|
||||
code="kubernetes-uid",
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
provider = ProviderEnumField(
|
||||
choices=ProviderChoices.choices, default=ProviderChoices.AWS
|
||||
)
|
||||
uid = models.CharField(
|
||||
"Unique identifier for the provider, set by the provider",
|
||||
max_length=63,
|
||||
blank=False,
|
||||
validators=[MinLengthValidator(3)],
|
||||
)
|
||||
alias = models.CharField(
|
||||
blank=True, null=True, max_length=100, validators=[MinLengthValidator(3)]
|
||||
)
|
||||
connected = models.BooleanField(null=True, blank=True)
|
||||
connection_last_checked_at = models.DateTimeField(null=True, blank=True)
|
||||
metadata = models.JSONField(default=dict, blank=True)
|
||||
scanner_args = models.JSONField(default=dict, blank=True)
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
getattr(self, f"validate_{self.provider}_uid")(self.uid)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.full_clean()
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "providers"
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "provider", "uid"),
|
||||
name="unique_provider_uids",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "providers"
|
||||
|
||||
|
||||
class ProviderGroup(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
name = models.CharField(max_length=255)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
providers = models.ManyToManyField(
|
||||
Provider, through="ProviderGroupMembership", related_name="provider_groups"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = "provider_groups"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["tenant_id", "name"],
|
||||
name="unique_group_name_per_tenant",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "provider-groups"
|
||||
|
||||
|
||||
class ProviderGroupMembership(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
provider = models.ForeignKey(
|
||||
Provider,
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
provider_group = models.ForeignKey(
|
||||
ProviderGroup,
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
|
||||
class Meta:
|
||||
db_table = "provider_group_memberships"
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=["provider_id", "provider_group"],
|
||||
name="unique_provider_group_membership",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "provider-group-memberships"
|
||||
|
||||
|
||||
class Task(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
task_runner_task = models.OneToOneField(
|
||||
TaskResult,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="task",
|
||||
related_query_name="task",
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "tasks"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["id", "task_runner_task"],
|
||||
name="tasks_id_trt_id_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "tasks"
|
||||
|
||||
|
||||
class Scan(RowLevelSecurityProtectedModel):
|
||||
class TriggerChoices(models.TextChoices):
|
||||
SCHEDULED = "scheduled", _("Scheduled")
|
||||
MANUAL = "manual", _("Manual")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid7, editable=False)
|
||||
name = models.CharField(
|
||||
blank=True, null=True, max_length=100, validators=[MinLengthValidator(3)]
|
||||
)
|
||||
provider = models.ForeignKey(
|
||||
Provider,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
)
|
||||
task = models.ForeignKey(
|
||||
Task,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="scans",
|
||||
related_query_name="scan",
|
||||
null=True,
|
||||
blank=True,
|
||||
)
|
||||
trigger = ScanTriggerEnumField(
|
||||
choices=TriggerChoices.choices,
|
||||
)
|
||||
state = StateEnumField(choices=StateChoices.choices, default=StateChoices.AVAILABLE)
|
||||
unique_resource_count = models.IntegerField(default=0)
|
||||
progress = models.IntegerField(default=0)
|
||||
scanner_args = models.JSONField(default=dict)
|
||||
duration = models.IntegerField(null=True, blank=True)
|
||||
scheduled_at = models.DateTimeField(null=True, blank=True)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
started_at = models.DateTimeField(null=True, blank=True)
|
||||
completed_at = models.DateTimeField(null=True, blank=True)
|
||||
# TODO: mutelist foreign key
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "scans"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["provider", "state", "trigger", "scheduled_at"],
|
||||
name="scans_prov_state_trig_sche_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "scans"
|
||||
|
||||
|
||||
class ResourceTag(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
|
||||
key = models.TextField(blank=False)
|
||||
value = models.TextField(blank=False)
|
||||
|
||||
text_search = models.GeneratedField(
|
||||
expression=SearchVector("key", weight="A", config="simple")
|
||||
+ SearchVector("value", weight="B", config="simple"),
|
||||
output_field=SearchVectorField(),
|
||||
db_persist=True,
|
||||
null=True,
|
||||
editable=False,
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "resource_tags"
|
||||
|
||||
indexes = [
|
||||
GinIndex(fields=["text_search"], name="gin_resource_tags_search_idx"),
|
||||
]
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "key", "value"),
|
||||
name="unique_resource_tags_by_tenant_key_value",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class Resource(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
|
||||
provider = models.ForeignKey(
|
||||
Provider,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="resources",
|
||||
related_query_name="resource",
|
||||
)
|
||||
|
||||
uid = models.TextField(
|
||||
"Unique identifier for the resource, set by the provider", blank=False
|
||||
)
|
||||
name = models.TextField("Name of the resource, as set in the provider", blank=False)
|
||||
region = models.TextField(
|
||||
"Location of the resource, as set by the provider", blank=False
|
||||
)
|
||||
service = models.TextField(
|
||||
"Service of the resource, as set by the provider", blank=False
|
||||
)
|
||||
type = models.TextField("Type of the resource, as set by the provider", blank=False)
|
||||
|
||||
text_search = models.GeneratedField(
|
||||
expression=SearchVector("uid", weight="A", config="simple")
|
||||
+ SearchVector("name", weight="B", config="simple")
|
||||
+ SearchVector("region", weight="C", config="simple")
|
||||
+ SearchVector("service", "type", weight="D", config="simple"),
|
||||
output_field=SearchVectorField(),
|
||||
db_persist=True,
|
||||
null=True,
|
||||
editable=False,
|
||||
)
|
||||
|
||||
tags = models.ManyToManyField(
|
||||
ResourceTag,
|
||||
verbose_name="Tags associated with the resource, by provider",
|
||||
through="ResourceTagMapping",
|
||||
)
|
||||
|
||||
def get_tags(self) -> dict:
|
||||
return {tag.key: tag.value for tag in self.tags.all()}
|
||||
|
||||
def clear_tags(self):
|
||||
self.tags.clear()
|
||||
self.save()
|
||||
|
||||
def upsert_or_delete_tags(self, tags: list[ResourceTag] | None):
|
||||
if tags is None:
|
||||
self.clear_tags()
|
||||
return
|
||||
|
||||
# Add new relationships with the tenant_id field
|
||||
for tag in tags:
|
||||
ResourceTagMapping.objects.update_or_create(
|
||||
tag=tag, resource=self, tenant_id=self.tenant_id
|
||||
)
|
||||
|
||||
# Save the instance
|
||||
self.save()
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "resources"
|
||||
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["uid", "region", "service", "name"],
|
||||
name="resource_uid_reg_serv_name_idx",
|
||||
),
|
||||
GinIndex(fields=["text_search"], name="gin_resources_search_idx"),
|
||||
]
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "provider_id", "uid"),
|
||||
name="unique_resources_by_provider",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "resources"
|
||||
|
||||
|
||||
class ResourceTagMapping(RowLevelSecurityProtectedModel):
|
||||
# NOTE that we don't really need a primary key here,
|
||||
# but everything is easier with django if we do
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
resource = models.ForeignKey(Resource, on_delete=models.CASCADE)
|
||||
tag = models.ForeignKey(ResourceTag, on_delete=models.CASCADE)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "resource_tag_mappings"
|
||||
|
||||
# django will automatically create indexes for:
|
||||
# - resource_id
|
||||
# - tag_id
|
||||
# - tenant_id
|
||||
# - id
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "resource_id", "tag_id"),
|
||||
name="unique_resource_tag_mappings_by_tenant",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
Defines the Finding model.
|
||||
|
||||
Findings uses a partitioned table to store findings. The partitions are created based on the UUIDv7 `id` field.
|
||||
|
||||
Note when creating migrations, you must use `python manage.py pgmakemigrations` to create the migrations.
|
||||
"""
|
||||
|
||||
class PartitioningMeta:
|
||||
method = PostgresPartitioningMethod.RANGE
|
||||
key = ["id"]
|
||||
|
||||
class DeltaChoices(models.TextChoices):
|
||||
NEW = "new", _("New")
|
||||
CHANGED = "changed", _("Changed")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid7, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
|
||||
uid = models.CharField(max_length=300)
|
||||
delta = FindingDeltaEnumField(
|
||||
choices=DeltaChoices.choices,
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
|
||||
status = StatusEnumField(choices=StatusChoices)
|
||||
status_extended = models.TextField(blank=True, null=True)
|
||||
|
||||
severity = SeverityEnumField(choices=SeverityChoices)
|
||||
|
||||
impact = SeverityEnumField(choices=SeverityChoices)
|
||||
impact_extended = models.TextField(blank=True, null=True)
|
||||
|
||||
raw_result = models.JSONField(default=dict)
|
||||
tags = models.JSONField(default=dict, null=True, blank=True)
|
||||
check_id = models.CharField(max_length=100, blank=False, null=False)
|
||||
check_metadata = models.JSONField(default=dict, null=False)
|
||||
|
||||
# Relationships
|
||||
scan = models.ForeignKey(to=Scan, related_name="findings", on_delete=models.CASCADE)
|
||||
|
||||
# many-to-many Resources. Relationship is defined on Resource
|
||||
resources = models.ManyToManyField(
|
||||
Resource,
|
||||
verbose_name="Resources associated with the finding",
|
||||
through="ResourceFindingMapping",
|
||||
related_name="findings",
|
||||
)
|
||||
|
||||
# TODO: Add resource search
|
||||
text_search = models.GeneratedField(
|
||||
expression=SearchVector(
|
||||
"impact_extended", "status_extended", weight="A", config="simple"
|
||||
),
|
||||
output_field=SearchVectorField(),
|
||||
db_persist=True,
|
||||
null=True,
|
||||
editable=False,
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "findings"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "UPDATE", "INSERT", "DELETE"],
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s_default",
|
||||
partition_name="default",
|
||||
statements=["SELECT", "UPDATE", "INSERT", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
indexes = [
|
||||
models.Index(fields=["uid"], name="findings_uid_idx"),
|
||||
models.Index(
|
||||
fields=[
|
||||
"scan_id",
|
||||
"impact",
|
||||
"severity",
|
||||
"status",
|
||||
"check_id",
|
||||
"delta",
|
||||
],
|
||||
name="findings_filter_idx",
|
||||
),
|
||||
GinIndex(fields=["text_search"], name="gin_findings_search_idx"),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "findings"
|
||||
|
||||
def add_resources(self, resources: list[Resource] | None):
|
||||
# Add new relationships with the tenant_id field
|
||||
for resource in resources:
|
||||
ResourceFindingMapping.objects.update_or_create(
|
||||
resource=resource, finding=self, tenant_id=self.tenant_id
|
||||
)
|
||||
|
||||
# Save the instance
|
||||
self.save()
|
||||
|
||||
|
||||
class ResourceFindingMapping(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
Defines the ResourceFindingMapping model.
|
||||
|
||||
ResourceFindingMapping is used to map a Finding to a Resource.
|
||||
|
||||
It follows the same partitioning strategy as the Finding model.
|
||||
"""
|
||||
|
||||
# NOTE that we don't really need a primary key here,
|
||||
# but everything is easier with django if we do
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
resource = models.ForeignKey(Resource, on_delete=models.CASCADE)
|
||||
finding = models.ForeignKey(Finding, on_delete=models.CASCADE)
|
||||
|
||||
class PartitioningMeta:
|
||||
method = PostgresPartitioningMethod.RANGE
|
||||
key = ["finding_id"]
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "resource_finding_mappings"
|
||||
base_manager_name = "objects"
|
||||
abstract = False
|
||||
|
||||
# django will automatically create indexes for:
|
||||
# - resource_id
|
||||
# - finding_id
|
||||
# - tenant_id
|
||||
# - id
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "resource_id", "finding_id"),
|
||||
name="unique_resource_finding_mappings_by_tenant",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name=f"rls_on_{db_table}_default",
|
||||
partition_name="default",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class ProviderSecret(RowLevelSecurityProtectedModel):
|
||||
class TypeChoices(models.TextChoices):
|
||||
STATIC = "static", _("Key-value pairs")
|
||||
ROLE = "role", _("Role assumption")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
name = models.CharField(
|
||||
blank=True, null=True, max_length=100, validators=[MinLengthValidator(3)]
|
||||
)
|
||||
secret_type = ProviderSecretTypeEnumField(choices=TypeChoices.choices)
|
||||
_secret = models.BinaryField(db_column="secret")
|
||||
provider = models.OneToOneField(
|
||||
Provider,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="secret",
|
||||
related_query_name="secret",
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "provider_secrets"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
@property
|
||||
def secret(self):
|
||||
if isinstance(self._secret, memoryview):
|
||||
encrypted_bytes = self._secret.tobytes()
|
||||
elif isinstance(self._secret, str):
|
||||
encrypted_bytes = self._secret.encode()
|
||||
else:
|
||||
encrypted_bytes = self._secret
|
||||
decrypted_data = fernet.decrypt(encrypted_bytes)
|
||||
return json.loads(decrypted_data.decode())
|
||||
|
||||
@secret.setter
|
||||
def secret(self, value):
|
||||
encrypted_data = fernet.encrypt(json.dumps(value).encode())
|
||||
self._secret = encrypted_data
|
||||
|
||||
|
||||
class Invitation(RowLevelSecurityProtectedModel):
|
||||
class State(models.TextChoices):
|
||||
PENDING = "pending", _("Invitation is pending")
|
||||
ACCEPTED = "accepted", _("Invitation was accepted by a user")
|
||||
EXPIRED = "expired", _("Invitation expired after the configured time")
|
||||
REVOKED = "revoked", _("Invitation was revoked by a user")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
email = models.EmailField(max_length=254, blank=False, null=False)
|
||||
state = InvitationStateEnumField(choices=State.choices, default=State.PENDING)
|
||||
token = models.CharField(
|
||||
max_length=14,
|
||||
unique=True,
|
||||
default=generate_random_token,
|
||||
editable=False,
|
||||
blank=False,
|
||||
null=False,
|
||||
validators=[MinLengthValidator(14)],
|
||||
)
|
||||
expires_at = models.DateTimeField(default=one_week_from_now)
|
||||
inviter = models.ForeignKey(
|
||||
User,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="invitations",
|
||||
related_query_name="invitation",
|
||||
null=True,
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "invitations"
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant", "token", "email"),
|
||||
name="unique_tenant_token_email_by_invitation",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "invitations"
|
||||
|
||||
|
||||
class ComplianceOverview(RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
compliance_id = models.CharField(max_length=100, blank=False, null=False)
|
||||
framework = models.CharField(max_length=100, blank=False, null=False)
|
||||
version = models.CharField(max_length=50, blank=True)
|
||||
description = models.TextField(blank=True)
|
||||
region = models.CharField(max_length=50, blank=True)
|
||||
requirements = models.JSONField(default=dict)
|
||||
requirements_passed = models.IntegerField(default=0)
|
||||
requirements_failed = models.IntegerField(default=0)
|
||||
requirements_manual = models.IntegerField(default=0)
|
||||
total_requirements = models.IntegerField(default=0)
|
||||
|
||||
scan = models.ForeignKey(
|
||||
Scan,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="compliance_overviews",
|
||||
related_query_name="compliance_overview",
|
||||
null=True,
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "compliance_overviews"
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant", "scan", "compliance_id", "region"),
|
||||
name="unique_tenant_scan_region_compliance_by_compliance_overview",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "DELETE"],
|
||||
),
|
||||
]
|
||||
indexes = [
|
||||
models.Index(fields=["compliance_id"], name="comp_ov_cp_id_idx"),
|
||||
models.Index(fields=["requirements_failed"], name="comp_ov_req_fail_idx"),
|
||||
models.Index(
|
||||
fields=["compliance_id", "requirements_failed"],
|
||||
name="comp_ov_cp_id_req_fail_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "compliance-overviews"
|
||||
6
api/src/backend/api/pagination.py
Normal file
6
api/src/backend/api/pagination.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from rest_framework_json_api.pagination import JsonApiPageNumberPagination
|
||||
|
||||
|
||||
class ComplianceOverviewPagination(JsonApiPageNumberPagination):
|
||||
page_size = 50
|
||||
max_page_size = 100
|
||||
203
api/src/backend/api/partitions.py
Normal file
203
api/src/backend/api/partitions.py
Normal file
@@ -0,0 +1,203 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Generator, Optional
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from django.conf import settings
|
||||
from psqlextra.partitioning import (
|
||||
PostgresPartitioningManager,
|
||||
PostgresRangePartition,
|
||||
PostgresRangePartitioningStrategy,
|
||||
PostgresTimePartitionSize,
|
||||
PostgresPartitioningError,
|
||||
)
|
||||
from psqlextra.partitioning.config import PostgresPartitioningConfig
|
||||
from uuid6 import UUID
|
||||
|
||||
from api.models import Finding, ResourceFindingMapping
|
||||
from api.rls import RowLevelSecurityConstraint
|
||||
from api.uuid_utils import datetime_to_uuid7
|
||||
|
||||
|
||||
class PostgresUUIDv7RangePartition(PostgresRangePartition):
|
||||
def __init__(
|
||||
self,
|
||||
from_values: UUID,
|
||||
to_values: UUID,
|
||||
size: PostgresTimePartitionSize,
|
||||
name_format: Optional[str] = None,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
self.from_values = from_values
|
||||
self.to_values = to_values
|
||||
self.size = size
|
||||
self.name_format = name_format
|
||||
|
||||
self.rls_statements = None
|
||||
if "rls_statements" in kwargs:
|
||||
self.rls_statements = kwargs["rls_statements"]
|
||||
|
||||
start_timestamp_ms = self.from_values.time
|
||||
|
||||
self.start_datetime = datetime.fromtimestamp(
|
||||
start_timestamp_ms / 1000, timezone.utc
|
||||
)
|
||||
|
||||
def name(self) -> str:
|
||||
if not self.name_format:
|
||||
raise PostgresPartitioningError("Unknown size/unit")
|
||||
|
||||
return self.start_datetime.strftime(self.name_format).lower()
|
||||
|
||||
def deconstruct(self) -> dict:
|
||||
return {
|
||||
**super().deconstruct(),
|
||||
"size_unit": self.size.unit.value,
|
||||
"size_value": self.size.value,
|
||||
}
|
||||
|
||||
def create(
|
||||
self,
|
||||
model,
|
||||
schema_editor,
|
||||
comment,
|
||||
) -> None:
|
||||
super().create(model, schema_editor, comment)
|
||||
|
||||
# if this model has RLS statements, add them to the partition
|
||||
if isinstance(self.rls_statements, list):
|
||||
schema_editor.add_constraint(
|
||||
model,
|
||||
constraint=RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name=f"rls_on_{self.name()}",
|
||||
partition_name=self.name(),
|
||||
statements=self.rls_statements,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class PostgresUUIDv7PartitioningStrategy(PostgresRangePartitioningStrategy):
|
||||
def __init__(
|
||||
self,
|
||||
size: PostgresTimePartitionSize,
|
||||
count: int,
|
||||
start_date: datetime = None,
|
||||
max_age: Optional[relativedelta] = None,
|
||||
name_format: Optional[str] = None,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
self.start_date = start_date.replace(
|
||||
day=1, hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
self.size = size
|
||||
self.count = count
|
||||
self.max_age = max_age
|
||||
self.name_format = name_format
|
||||
|
||||
self.rls_statements = None
|
||||
if "rls_statements" in kwargs:
|
||||
self.rls_statements = kwargs["rls_statements"]
|
||||
|
||||
def to_create(self) -> Generator[PostgresUUIDv7RangePartition, None, None]:
|
||||
current_datetime = (
|
||||
self.start_date if self.start_date else self.get_start_datetime()
|
||||
)
|
||||
|
||||
for _ in range(self.count):
|
||||
end_datetime = (
|
||||
current_datetime + self.size.as_delta() - relativedelta(microseconds=1)
|
||||
)
|
||||
start_uuid7 = datetime_to_uuid7(current_datetime)
|
||||
end_uuid7 = datetime_to_uuid7(end_datetime)
|
||||
|
||||
yield PostgresUUIDv7RangePartition(
|
||||
from_values=start_uuid7,
|
||||
to_values=end_uuid7,
|
||||
size=self.size,
|
||||
name_format=self.name_format,
|
||||
rls_statements=self.rls_statements,
|
||||
)
|
||||
|
||||
current_datetime += self.size.as_delta()
|
||||
|
||||
def to_delete(self) -> Generator[PostgresUUIDv7RangePartition, None, None]:
|
||||
if not self.max_age:
|
||||
return
|
||||
|
||||
current_datetime = self.get_start_datetime() - self.max_age
|
||||
|
||||
while True:
|
||||
end_datetime = current_datetime + self.size.as_delta()
|
||||
start_uuid7 = datetime_to_uuid7(current_datetime)
|
||||
end_uuid7 = datetime_to_uuid7(end_datetime)
|
||||
|
||||
# dropping table will delete indexes and policies
|
||||
yield PostgresUUIDv7RangePartition(
|
||||
from_values=start_uuid7,
|
||||
to_values=end_uuid7,
|
||||
size=self.size,
|
||||
name_format=self.name_format,
|
||||
)
|
||||
|
||||
current_datetime -= self.size.as_delta()
|
||||
|
||||
def get_start_datetime(self) -> datetime:
|
||||
"""
|
||||
Gets the start of the current month in UTC timezone.
|
||||
|
||||
This function returns a `datetime` object set to the first day of the current
|
||||
month, at midnight (00:00:00), in UTC.
|
||||
|
||||
Returns:
|
||||
datetime: A `datetime` object representing the start of the current month in UTC.
|
||||
"""
|
||||
return datetime.now(timezone.utc).replace(
|
||||
day=1, hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
|
||||
|
||||
def relative_days_or_none(value):
|
||||
if value is None:
|
||||
return None
|
||||
return relativedelta(days=value)
|
||||
|
||||
|
||||
#
|
||||
# To manage the partitions, run `python manage.py pgpartition --using admin`
|
||||
#
|
||||
# For more info on the partitioning manager, see https://github.com/SectorLabs/django-postgres-extra
|
||||
manager = PostgresPartitioningManager(
|
||||
[
|
||||
PostgresPartitioningConfig(
|
||||
model=Finding,
|
||||
strategy=PostgresUUIDv7PartitioningStrategy(
|
||||
start_date=datetime.now(timezone.utc),
|
||||
size=PostgresTimePartitionSize(
|
||||
months=settings.FINDINGS_TABLE_PARTITION_MONTHS
|
||||
),
|
||||
count=settings.FINDINGS_TABLE_PARTITION_COUNT,
|
||||
max_age=relative_days_or_none(
|
||||
settings.FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS
|
||||
),
|
||||
name_format="%Y_%b",
|
||||
rls_statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
# ResourceFindingMapping should always follow the Finding partitioning
|
||||
PostgresPartitioningConfig(
|
||||
model=ResourceFindingMapping,
|
||||
strategy=PostgresUUIDv7PartitioningStrategy(
|
||||
start_date=datetime.now(timezone.utc),
|
||||
size=PostgresTimePartitionSize(
|
||||
months=settings.FINDINGS_TABLE_PARTITION_MONTHS
|
||||
),
|
||||
count=settings.FINDINGS_TABLE_PARTITION_COUNT,
|
||||
max_age=relative_days_or_none(
|
||||
settings.FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS
|
||||
),
|
||||
name_format="%Y_%b",
|
||||
rls_statements=["SELECT"],
|
||||
),
|
||||
),
|
||||
]
|
||||
)
|
||||
23
api/src/backend/api/renderers.py
Normal file
23
api/src/backend/api/renderers.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from contextlib import nullcontext
|
||||
|
||||
from rest_framework_json_api.renderers import JSONRenderer
|
||||
|
||||
from api.db_utils import tenant_transaction
|
||||
|
||||
|
||||
class APIJSONRenderer(JSONRenderer):
|
||||
"""JSONRenderer override to apply tenant RLS when there are included resources in the request."""
|
||||
|
||||
def render(self, data, accepted_media_type=None, renderer_context=None):
|
||||
request = renderer_context.get("request")
|
||||
tenant_id = getattr(request, "tenant_id", None) if request else None
|
||||
include_param_present = "include" in request.query_params if request else False
|
||||
|
||||
# Use tenant_transaction if needed for included resources, otherwise do nothing
|
||||
context_manager = (
|
||||
tenant_transaction(tenant_id)
|
||||
if tenant_id and include_param_present
|
||||
else nullcontext()
|
||||
)
|
||||
with context_manager:
|
||||
return super().render(data, accepted_media_type, renderer_context)
|
||||
188
api/src/backend/api/rls.py
Normal file
188
api/src/backend/api/rls.py
Normal file
@@ -0,0 +1,188 @@
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import DEFAULT_DB_ALIAS
|
||||
from django.db import models
|
||||
from django.db.backends.ddl_references import Statement, Table
|
||||
|
||||
from api.db_utils import DB_USER, POSTGRES_TENANT_VAR
|
||||
|
||||
|
||||
class Tenant(models.Model):
|
||||
"""
|
||||
The Tenant is the basic grouping in the system. It is used to separate data between customers.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
name = models.CharField(max_length=100)
|
||||
|
||||
class Meta:
|
||||
db_table = "tenants"
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "tenants"
|
||||
|
||||
|
||||
class RowLevelSecurityConstraint(models.BaseConstraint):
|
||||
"""
|
||||
Model constraint to enforce row-level security on a tenant based model, in addition to the least privileges.
|
||||
|
||||
The constraint can be applied to a partitioned table by specifying the `partition_name` keyword argument.
|
||||
"""
|
||||
|
||||
rls_sql_query = """
|
||||
ALTER TABLE %(table_name)s ENABLE ROW LEVEL SECURITY;
|
||||
ALTER TABLE %(table_name)s FORCE ROW LEVEL SECURITY;
|
||||
"""
|
||||
|
||||
policy_sql_query = """
|
||||
CREATE POLICY %(db_user)s_%(table_name)s_{statement}
|
||||
ON %(table_name)s
|
||||
FOR {statement}
|
||||
TO %(db_user)s
|
||||
{clause} (
|
||||
CASE
|
||||
WHEN current_setting('%(tenant_setting)s', True) IS NULL THEN FALSE
|
||||
ELSE %(field_column)s = current_setting('%(tenant_setting)s')::uuid
|
||||
END
|
||||
);
|
||||
"""
|
||||
|
||||
grant_sql_query = """
|
||||
GRANT {statement} ON %(table_name)s TO %(db_user)s;
|
||||
"""
|
||||
|
||||
drop_sql_query = """
|
||||
ALTER TABLE %(table_name)s NO FORCE ROW LEVEL SECURITY;
|
||||
ALTER TABLE %(table_name)s DISABLE ROW LEVEL SECURITY;
|
||||
REVOKE ALL ON TABLE %(table_name) TO %(db_user)s;
|
||||
"""
|
||||
|
||||
drop_policy_sql_query = """
|
||||
DROP POLICY IF EXISTS %(db_user)s_%(table_name)s_{statement} on %(table_name)s;
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, field: str, name: str, statements: list | None = None, **kwargs
|
||||
) -> None:
|
||||
super().__init__(name=name)
|
||||
self.target_field: str = field
|
||||
self.statements = statements or ["SELECT"]
|
||||
self.partition_name = None
|
||||
if "partition_name" in kwargs:
|
||||
self.partition_name = kwargs["partition_name"]
|
||||
|
||||
def create_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
field_column = schema_editor.quote_name(self.target_field)
|
||||
|
||||
policy_queries = ""
|
||||
grant_queries = ""
|
||||
for statement in self.statements:
|
||||
clause = f"{'WITH CHECK' if statement == 'INSERT' else 'USING'}"
|
||||
policy_queries = f"{policy_queries}{self.policy_sql_query.format(statement=statement, clause=clause)}"
|
||||
grant_queries = (
|
||||
f"{grant_queries}{self.grant_sql_query.format(statement=statement)}"
|
||||
)
|
||||
|
||||
full_create_sql_query = (
|
||||
f"{self.rls_sql_query}" f"{policy_queries}" f"{grant_queries}"
|
||||
)
|
||||
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
table_name = f"{table_name}_{self.partition_name}"
|
||||
|
||||
return Statement(
|
||||
full_create_sql_query,
|
||||
table_name=table_name,
|
||||
field_column=field_column,
|
||||
db_user=DB_USER,
|
||||
tenant_setting=POSTGRES_TENANT_VAR,
|
||||
partition_name=self.partition_name,
|
||||
)
|
||||
|
||||
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
field_column = schema_editor.quote_name(self.target_field)
|
||||
full_drop_sql_query = (
|
||||
f"{self.drop_sql_query}"
|
||||
f"{''.join([self.drop_policy_sql_query.format(statement) for statement in self.statements])}"
|
||||
)
|
||||
table_name = model._meta.db_table
|
||||
if self.partition_name:
|
||||
table_name = f"{table_name}_{self.partition_name}"
|
||||
return Statement(
|
||||
full_drop_sql_query,
|
||||
table_name=Table(table_name, schema_editor.quote_name),
|
||||
field_column=field_column,
|
||||
db_user=DB_USER,
|
||||
partition_name=self.partition_name,
|
||||
)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, RowLevelSecurityConstraint):
|
||||
return self.name == other.name and self.target_field == other.target_field
|
||||
return super().__eq__(other)
|
||||
|
||||
def deconstruct(self) -> tuple[str, tuple, dict]:
|
||||
path, _, kwargs = super().deconstruct()
|
||||
return (path, (self.target_field,), kwargs)
|
||||
|
||||
def validate(self, model, instance, exclude=None, using=DEFAULT_DB_ALIAS): # noqa: F841
|
||||
if not hasattr(instance, "tenant_id"):
|
||||
raise ValidationError(f"{model.__name__} does not have a tenant_id field.")
|
||||
|
||||
|
||||
class BaseSecurityConstraint(models.BaseConstraint):
|
||||
"""Model constraint to grant the least privileges to the API database user."""
|
||||
|
||||
grant_sql_query = """
|
||||
GRANT {statement} ON %(table_name)s TO %(db_user)s;
|
||||
"""
|
||||
|
||||
drop_sql_query = """
|
||||
REVOKE ALL ON TABLE %(table_name) TO %(db_user)s;
|
||||
"""
|
||||
|
||||
def __init__(self, name: str, statements: list | None = None) -> None:
|
||||
super().__init__(name=name)
|
||||
self.statements = statements or ["SELECT"]
|
||||
|
||||
def create_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
grant_queries = ""
|
||||
for statement in self.statements:
|
||||
grant_queries = (
|
||||
f"{grant_queries}{self.grant_sql_query.format(statement=statement)}"
|
||||
)
|
||||
|
||||
return Statement(
|
||||
grant_queries,
|
||||
table_name=model._meta.db_table,
|
||||
db_user=DB_USER,
|
||||
)
|
||||
|
||||
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
|
||||
return Statement(
|
||||
self.drop_sql_query,
|
||||
table_name=Table(model._meta.db_table, schema_editor.quote_name),
|
||||
db_user=DB_USER,
|
||||
)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, BaseSecurityConstraint):
|
||||
return self.name == other.name
|
||||
return super().__eq__(other)
|
||||
|
||||
def deconstruct(self) -> tuple[str, tuple, dict]:
|
||||
path, args, kwargs = super().deconstruct()
|
||||
return path, args, kwargs
|
||||
|
||||
|
||||
class RowLevelSecurityProtectedModel(models.Model):
|
||||
tenant = models.ForeignKey("Tenant", on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
35
api/src/backend/api/signals.py
Normal file
35
api/src/backend/api/signals.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from celery import states
|
||||
from celery.signals import before_task_publish
|
||||
from django.db.models.signals import post_delete
|
||||
from django.dispatch import receiver
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from django_celery_results.backends.database import DatabaseBackend
|
||||
|
||||
from api.models import Provider
|
||||
from config.celery import celery_app
|
||||
|
||||
|
||||
def create_task_result_on_publish(sender=None, headers=None, **kwargs): # noqa: F841
|
||||
"""Celery signal to store TaskResult entries when tasks reach the broker."""
|
||||
db_result_backend = DatabaseBackend(celery_app)
|
||||
request = type("request", (object,), headers)
|
||||
|
||||
db_result_backend.store_result(
|
||||
headers["id"],
|
||||
None,
|
||||
states.PENDING,
|
||||
traceback=None,
|
||||
request=request,
|
||||
)
|
||||
|
||||
|
||||
before_task_publish.connect(
|
||||
create_task_result_on_publish, dispatch_uid="create_task_result_on_publish"
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_delete, sender=Provider)
|
||||
def delete_provider_scan_task(sender, instance, **kwargs): # noqa: F841
|
||||
# Delete the associated periodic task when the provider is deleted
|
||||
task_name = f"scan-perform-scheduled-{instance.id}"
|
||||
PeriodicTask.objects.filter(name=task_name).delete()
|
||||
6951
api/src/backend/api/specs/v1.yaml
Normal file
6951
api/src/backend/api/specs/v1.yaml
Normal file
File diff suppressed because it is too large
Load Diff
0
api/src/backend/api/tests/__init__.py
Normal file
0
api/src/backend/api/tests/__init__.py
Normal file
100
api/src/backend/api/tests/integration/test_authentication.py
Normal file
100
api/src/backend/api/tests/integration/test_authentication.py
Normal file
@@ -0,0 +1,100 @@
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
from unittest.mock import patch
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from conftest import TEST_PASSWORD, get_api_tokens, get_authorization_header
|
||||
|
||||
|
||||
@patch("api.v1.views.MainRouter.admin_db", new="default")
|
||||
@pytest.mark.django_db
|
||||
def test_basic_authentication():
|
||||
client = APIClient()
|
||||
|
||||
test_user = "test_email@prowler.com"
|
||||
test_password = "test_password"
|
||||
|
||||
# Check that a 401 is returned when no basic authentication is provided
|
||||
no_auth_response = client.get(reverse("provider-list"))
|
||||
assert no_auth_response.status_code == 401
|
||||
|
||||
# Check that we can create a new user without any kind of authentication
|
||||
user_creation_response = client.post(
|
||||
reverse("user-list"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "users",
|
||||
"attributes": {
|
||||
"name": "test",
|
||||
"email": test_user,
|
||||
"password": test_password,
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert user_creation_response.status_code == 201
|
||||
|
||||
# Check that using our new user's credentials we can authenticate and get the providers
|
||||
access_token, _ = get_api_tokens(client, test_user, test_password)
|
||||
auth_headers = get_authorization_header(access_token)
|
||||
|
||||
auth_response = client.get(
|
||||
reverse("provider-list"),
|
||||
headers=auth_headers,
|
||||
)
|
||||
assert auth_response.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_refresh_token(create_test_user, tenants_fixture):
|
||||
client = APIClient()
|
||||
|
||||
# Assert that we can obtain a new access token using the refresh one
|
||||
access_token, refresh_token = get_api_tokens(
|
||||
client, create_test_user.email, TEST_PASSWORD
|
||||
)
|
||||
valid_refresh_response = client.post(
|
||||
reverse("token-refresh"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-refresh",
|
||||
"attributes": {"refresh": refresh_token},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert valid_refresh_response.status_code == 200
|
||||
assert (
|
||||
valid_refresh_response.json()["data"]["attributes"]["refresh"] != refresh_token
|
||||
)
|
||||
|
||||
# Assert the former refresh token gets invalidated
|
||||
invalid_refresh_response = client.post(
|
||||
reverse("token-refresh"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-refresh",
|
||||
"attributes": {"refresh": refresh_token},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert invalid_refresh_response.status_code == 400
|
||||
|
||||
# Assert that the new refresh token could be used
|
||||
new_refresh_response = client.post(
|
||||
reverse("token-refresh"),
|
||||
data={
|
||||
"data": {
|
||||
"type": "tokens-refresh",
|
||||
"attributes": {
|
||||
"refresh": valid_refresh_response.json()["data"]["attributes"][
|
||||
"refresh"
|
||||
]
|
||||
},
|
||||
}
|
||||
},
|
||||
format="vnd.api+json",
|
||||
)
|
||||
assert new_refresh_response.status_code == 200
|
||||
97
api/src/backend/api/tests/integration/test_tenants.py
Normal file
97
api/src/backend/api/tests/integration/test_tenants.py
Normal file
@@ -0,0 +1,97 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
|
||||
from conftest import TEST_USER, TEST_PASSWORD, get_api_tokens, get_authorization_header
|
||||
|
||||
|
||||
@patch("api.v1.views.schedule_provider_scan")
|
||||
@pytest.mark.django_db
|
||||
def test_check_resources_between_different_tenants(
|
||||
schedule_mock,
|
||||
enforce_test_user_db_connection,
|
||||
authenticated_api_client,
|
||||
tenants_fixture,
|
||||
):
|
||||
client = authenticated_api_client
|
||||
|
||||
tenant1 = str(tenants_fixture[0].id)
|
||||
tenant2 = str(tenants_fixture[1].id)
|
||||
|
||||
tenant1_token, _ = get_api_tokens(
|
||||
client, TEST_USER, TEST_PASSWORD, tenant_id=tenant1
|
||||
)
|
||||
tenant2_token, _ = get_api_tokens(
|
||||
client, TEST_USER, TEST_PASSWORD, tenant_id=tenant2
|
||||
)
|
||||
|
||||
tenant1_headers = get_authorization_header(tenant1_token)
|
||||
tenant2_headers = get_authorization_header(tenant2_token)
|
||||
|
||||
# Create a provider on tenant 1
|
||||
provider_data = {
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"alias": "test_provider_tenant_1",
|
||||
"provider": "aws",
|
||||
"uid": "123456789012",
|
||||
},
|
||||
}
|
||||
}
|
||||
provider1_response = client.post(
|
||||
reverse("provider-list"),
|
||||
data=provider_data,
|
||||
format="vnd.api+json",
|
||||
headers=tenant1_headers,
|
||||
)
|
||||
assert provider1_response.status_code == 201
|
||||
provider1_id = provider1_response.json()["data"]["id"]
|
||||
|
||||
# Create a provider on tenant 2
|
||||
provider_data = {
|
||||
"data": {
|
||||
"type": "providers",
|
||||
"attributes": {
|
||||
"alias": "test_provider_tenant_2",
|
||||
"provider": "aws",
|
||||
"uid": "123456789013",
|
||||
},
|
||||
}
|
||||
}
|
||||
provider2_response = client.post(
|
||||
reverse("provider-list"),
|
||||
data=provider_data,
|
||||
format="vnd.api+json",
|
||||
headers=tenant2_headers,
|
||||
)
|
||||
assert provider2_response.status_code == 201
|
||||
provider2_id = provider2_response.json()["data"]["id"]
|
||||
|
||||
# Try to get the provider from tenant 1 on tenant 2 and vice versa
|
||||
tenant1_response = client.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider1_id}),
|
||||
headers=tenant2_headers,
|
||||
)
|
||||
assert tenant1_response.status_code == 404
|
||||
tenant2_response = client.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider1_id}),
|
||||
headers=tenant1_headers,
|
||||
)
|
||||
assert tenant2_response.status_code == 200
|
||||
assert tenant2_response.json()["data"]["id"] == provider1_id
|
||||
|
||||
# Vice versa
|
||||
|
||||
tenant2_response = client.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider2_id}),
|
||||
headers=tenant1_headers,
|
||||
)
|
||||
assert tenant2_response.status_code == 404
|
||||
tenant1_response = client.get(
|
||||
reverse("provider-detail", kwargs={"pk": provider2_id}),
|
||||
headers=tenant2_headers,
|
||||
)
|
||||
assert tenant1_response.status_code == 200
|
||||
assert tenant1_response.json()["data"]["id"] == provider2_id
|
||||
284
api/src/backend/api/tests/test_compliance.py
Normal file
284
api/src/backend/api/tests/test_compliance.py
Normal file
@@ -0,0 +1,284 @@
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from api.compliance import (
|
||||
get_prowler_provider_checks,
|
||||
get_prowler_provider_compliance,
|
||||
load_prowler_compliance,
|
||||
load_prowler_checks,
|
||||
generate_scan_compliance,
|
||||
generate_compliance_overview_template,
|
||||
)
|
||||
from api.models import Provider
|
||||
|
||||
|
||||
class TestCompliance:
|
||||
@patch("api.compliance.CheckMetadata")
|
||||
def test_get_prowler_provider_checks(self, mock_check_metadata):
|
||||
provider_type = Provider.ProviderChoices.AWS
|
||||
mock_check_metadata.get_bulk.return_value = {
|
||||
"check1": MagicMock(),
|
||||
"check2": MagicMock(),
|
||||
"check3": MagicMock(),
|
||||
}
|
||||
checks = get_prowler_provider_checks(provider_type)
|
||||
assert set(checks) == {"check1", "check2", "check3"}
|
||||
mock_check_metadata.get_bulk.assert_called_once_with(provider_type)
|
||||
|
||||
@patch("api.compliance.Compliance")
|
||||
def test_get_prowler_provider_compliance(self, mock_compliance):
|
||||
provider_type = Provider.ProviderChoices.AWS
|
||||
mock_compliance.get_bulk.return_value = {
|
||||
"compliance1": MagicMock(),
|
||||
"compliance2": MagicMock(),
|
||||
}
|
||||
compliance_data = get_prowler_provider_compliance(provider_type)
|
||||
assert compliance_data == mock_compliance.get_bulk.return_value
|
||||
mock_compliance.get_bulk.assert_called_once_with(provider_type)
|
||||
|
||||
@patch("api.models.Provider.ProviderChoices")
|
||||
@patch("api.compliance.get_prowler_provider_compliance")
|
||||
@patch("api.compliance.generate_compliance_overview_template")
|
||||
@patch("api.compliance.load_prowler_checks")
|
||||
def test_load_prowler_compliance(
|
||||
self,
|
||||
mock_load_prowler_checks,
|
||||
mock_generate_compliance_overview_template,
|
||||
mock_get_prowler_provider_compliance,
|
||||
mock_provider_choices,
|
||||
):
|
||||
mock_provider_choices.values = ["aws", "azure"]
|
||||
|
||||
compliance_data_aws = {"compliance_aws": MagicMock()}
|
||||
compliance_data_azure = {"compliance_azure": MagicMock()}
|
||||
|
||||
compliance_data_dict = {
|
||||
"aws": compliance_data_aws,
|
||||
"azure": compliance_data_azure,
|
||||
}
|
||||
|
||||
def mock_get_compliance(provider_type):
|
||||
return compliance_data_dict[provider_type]
|
||||
|
||||
mock_get_prowler_provider_compliance.side_effect = mock_get_compliance
|
||||
|
||||
mock_generate_compliance_overview_template.return_value = {
|
||||
"template_key": "template_value"
|
||||
}
|
||||
|
||||
mock_load_prowler_checks.return_value = {"checks_key": "checks_value"}
|
||||
|
||||
load_prowler_compliance()
|
||||
|
||||
from api.compliance import PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE, PROWLER_CHECKS
|
||||
|
||||
assert PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE == {
|
||||
"template_key": "template_value"
|
||||
}
|
||||
assert PROWLER_CHECKS == {"checks_key": "checks_value"}
|
||||
|
||||
expected_prowler_compliance = compliance_data_dict
|
||||
mock_get_prowler_provider_compliance.assert_any_call("aws")
|
||||
mock_get_prowler_provider_compliance.assert_any_call("azure")
|
||||
mock_generate_compliance_overview_template.assert_called_once_with(
|
||||
expected_prowler_compliance
|
||||
)
|
||||
mock_load_prowler_checks.assert_called_once_with(expected_prowler_compliance)
|
||||
|
||||
@patch("api.compliance.get_prowler_provider_checks")
|
||||
@patch("api.models.Provider.ProviderChoices")
|
||||
def test_load_prowler_checks(
|
||||
self, mock_provider_choices, mock_get_prowler_provider_checks
|
||||
):
|
||||
mock_provider_choices.values = ["aws"]
|
||||
|
||||
mock_get_prowler_provider_checks.return_value = ["check1", "check2", "check3"]
|
||||
|
||||
prowler_compliance = {
|
||||
"aws": {
|
||||
"compliance1": MagicMock(
|
||||
Requirements=[
|
||||
MagicMock(
|
||||
Checks=["check1", "check2"],
|
||||
),
|
||||
],
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
expected_checks = {
|
||||
"aws": {
|
||||
"check1": {"compliance1"},
|
||||
"check2": {"compliance1"},
|
||||
"check3": set(),
|
||||
}
|
||||
}
|
||||
|
||||
checks = load_prowler_checks(prowler_compliance)
|
||||
assert checks == expected_checks
|
||||
mock_get_prowler_provider_checks.assert_called_once_with("aws")
|
||||
|
||||
@patch("api.compliance.PROWLER_CHECKS", new_callable=dict)
|
||||
def test_generate_scan_compliance(self, mock_prowler_checks):
|
||||
mock_prowler_checks["aws"] = {
|
||||
"check1": {"compliance1"},
|
||||
"check2": {"compliance1", "compliance2"},
|
||||
}
|
||||
|
||||
compliance_overview = {
|
||||
"compliance1": {
|
||||
"requirements": {
|
||||
"requirement1": {
|
||||
"checks": {"check1": None, "check2": None},
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": 2,
|
||||
},
|
||||
"status": "PASS",
|
||||
}
|
||||
},
|
||||
"requirements_status": {"passed": 1, "failed": 0, "manual": 0},
|
||||
},
|
||||
"compliance2": {
|
||||
"requirements": {
|
||||
"requirement2": {
|
||||
"checks": {"check2": None},
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": 1,
|
||||
},
|
||||
"status": "PASS",
|
||||
}
|
||||
},
|
||||
"requirements_status": {"passed": 1, "failed": 0, "manual": 0},
|
||||
},
|
||||
}
|
||||
|
||||
provider_type = "aws"
|
||||
check_id = "check2"
|
||||
status = "FAIL"
|
||||
|
||||
generate_scan_compliance(compliance_overview, provider_type, check_id, status)
|
||||
|
||||
assert (
|
||||
compliance_overview["compliance1"]["requirements"]["requirement1"][
|
||||
"checks"
|
||||
]["check2"]
|
||||
== "FAIL"
|
||||
)
|
||||
assert (
|
||||
compliance_overview["compliance1"]["requirements"]["requirement1"][
|
||||
"checks_status"
|
||||
]["fail"]
|
||||
== 1
|
||||
)
|
||||
assert (
|
||||
compliance_overview["compliance1"]["requirements"]["requirement1"]["status"]
|
||||
== "FAIL"
|
||||
)
|
||||
assert compliance_overview["compliance1"]["requirements_status"]["passed"] == 0
|
||||
assert compliance_overview["compliance1"]["requirements_status"]["failed"] == 1
|
||||
|
||||
assert (
|
||||
compliance_overview["compliance2"]["requirements"]["requirement2"][
|
||||
"checks"
|
||||
]["check2"]
|
||||
== "FAIL"
|
||||
)
|
||||
assert (
|
||||
compliance_overview["compliance2"]["requirements"]["requirement2"][
|
||||
"checks_status"
|
||||
]["fail"]
|
||||
== 1
|
||||
)
|
||||
assert (
|
||||
compliance_overview["compliance2"]["requirements"]["requirement2"]["status"]
|
||||
== "FAIL"
|
||||
)
|
||||
assert compliance_overview["compliance2"]["requirements_status"]["passed"] == 0
|
||||
assert compliance_overview["compliance2"]["requirements_status"]["failed"] == 1
|
||||
|
||||
assert (
|
||||
compliance_overview["compliance1"]["requirements"]["requirement1"][
|
||||
"checks"
|
||||
]["check1"]
|
||||
is None
|
||||
)
|
||||
|
||||
@patch("api.models.Provider.ProviderChoices")
|
||||
def test_generate_compliance_overview_template(self, mock_provider_choices):
|
||||
mock_provider_choices.values = ["aws"]
|
||||
|
||||
requirement1 = MagicMock(
|
||||
Id="requirement1",
|
||||
Name="Requirement 1",
|
||||
Description="Description of requirement 1",
|
||||
Attributes=[],
|
||||
Checks=["check1", "check2"],
|
||||
)
|
||||
requirement2 = MagicMock(
|
||||
Id="requirement2",
|
||||
Name="Requirement 2",
|
||||
Description="Description of requirement 2",
|
||||
Attributes=[],
|
||||
Checks=[],
|
||||
)
|
||||
compliance1 = MagicMock(
|
||||
Requirements=[requirement1, requirement2],
|
||||
Framework="Framework 1",
|
||||
Version="1.0",
|
||||
Description="Description of compliance1",
|
||||
)
|
||||
prowler_compliance = {"aws": {"compliance1": compliance1}}
|
||||
|
||||
template = generate_compliance_overview_template(prowler_compliance)
|
||||
|
||||
expected_template = {
|
||||
"aws": {
|
||||
"compliance1": {
|
||||
"framework": "Framework 1",
|
||||
"version": "1.0",
|
||||
"provider": "aws",
|
||||
"description": "Description of compliance1",
|
||||
"requirements": {
|
||||
"requirement1": {
|
||||
"name": "Requirement 1",
|
||||
"description": "Description of requirement 1",
|
||||
"attributes": [],
|
||||
"checks": {"check1": None, "check2": None},
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": 2,
|
||||
},
|
||||
"status": "PASS",
|
||||
},
|
||||
"requirement2": {
|
||||
"name": "Requirement 2",
|
||||
"description": "Description of requirement 2",
|
||||
"attributes": [],
|
||||
"checks": {},
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"manual": 0,
|
||||
"total": 0,
|
||||
},
|
||||
"status": "PASS",
|
||||
},
|
||||
},
|
||||
"requirements_status": {
|
||||
"passed": 1, # total_requirements - manual
|
||||
"failed": 0,
|
||||
"manual": 1, # requirement2 has 0 checks
|
||||
},
|
||||
"total_requirements": 2,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert template == expected_template
|
||||
29
api/src/backend/api/tests/test_database.py
Normal file
29
api/src/backend/api/tests/test_database.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
from django.db.utils import ConnectionRouter
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.rls import Tenant
|
||||
from config.django.base import DATABASE_ROUTERS as PROD_DATABASE_ROUTERS
|
||||
|
||||
|
||||
class TestMainDatabaseRouter:
|
||||
@pytest.fixture(scope="module")
|
||||
def router(self):
|
||||
testing_routers = settings.DATABASE_ROUTERS.copy()
|
||||
settings.DATABASE_ROUTERS = PROD_DATABASE_ROUTERS
|
||||
yield ConnectionRouter()
|
||||
settings.DATABASE_ROUTERS = testing_routers
|
||||
|
||||
@pytest.mark.parametrize("api_model", [Tenant])
|
||||
def test_router_api_models(self, api_model, router):
|
||||
assert router.db_for_read(api_model) == "default"
|
||||
assert router.db_for_write(api_model) == "default"
|
||||
|
||||
assert router.allow_migrate_model(MainRouter.admin_db, api_model)
|
||||
assert not router.allow_migrate_model("default", api_model)
|
||||
|
||||
def test_router_django_models(self, router):
|
||||
assert router.db_for_read(MigrationRecorder.Migration) == MainRouter.admin_db
|
||||
assert not router.db_for_read(MigrationRecorder.Migration) == "default"
|
||||
108
api/src/backend/api/tests/test_db_utils.py
Normal file
108
api/src/backend/api/tests/test_db_utils.py
Normal file
@@ -0,0 +1,108 @@
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from unittest.mock import patch
|
||||
|
||||
from api.db_utils import enum_to_choices, one_week_from_now, generate_random_token
|
||||
|
||||
|
||||
class TestEnumToChoices:
|
||||
def test_enum_to_choices_simple(self):
|
||||
class Color(Enum):
|
||||
RED = 1
|
||||
GREEN = 2
|
||||
BLUE = 3
|
||||
|
||||
expected_result = [
|
||||
(1, "Red"),
|
||||
(2, "Green"),
|
||||
(3, "Blue"),
|
||||
]
|
||||
|
||||
result = enum_to_choices(Color)
|
||||
assert result == expected_result
|
||||
|
||||
def test_enum_to_choices_with_underscores(self):
|
||||
class Status(Enum):
|
||||
PENDING_APPROVAL = "pending"
|
||||
IN_PROGRESS = "in_progress"
|
||||
COMPLETED_SUCCESSFULLY = "completed"
|
||||
|
||||
expected_result = [
|
||||
("pending", "Pending Approval"),
|
||||
("in_progress", "In Progress"),
|
||||
("completed", "Completed Successfully"),
|
||||
]
|
||||
|
||||
result = enum_to_choices(Status)
|
||||
assert result == expected_result
|
||||
|
||||
def test_enum_to_choices_empty_enum(self):
|
||||
class EmptyEnum(Enum):
|
||||
pass
|
||||
|
||||
expected_result = []
|
||||
|
||||
result = enum_to_choices(EmptyEnum)
|
||||
assert result == expected_result
|
||||
|
||||
def test_enum_to_choices_numeric_values(self):
|
||||
class Numbers(Enum):
|
||||
ONE = 1
|
||||
TWO = 2
|
||||
THREE = 3
|
||||
|
||||
expected_result = [
|
||||
(1, "One"),
|
||||
(2, "Two"),
|
||||
(3, "Three"),
|
||||
]
|
||||
|
||||
result = enum_to_choices(Numbers)
|
||||
assert result == expected_result
|
||||
|
||||
|
||||
class TestOneWeekFromNow:
|
||||
def test_one_week_from_now(self):
|
||||
with patch("api.db_utils.datetime") as mock_datetime:
|
||||
mock_datetime.now.return_value = datetime(2023, 1, 1, tzinfo=timezone.utc)
|
||||
expected_result = datetime(2023, 1, 8, tzinfo=timezone.utc)
|
||||
|
||||
result = one_week_from_now()
|
||||
assert result == expected_result
|
||||
|
||||
def test_one_week_from_now_with_timezone(self):
|
||||
with patch("api.db_utils.datetime") as mock_datetime:
|
||||
mock_datetime.now.return_value = datetime(
|
||||
2023, 6, 15, 12, 0, tzinfo=timezone.utc
|
||||
)
|
||||
expected_result = datetime(2023, 6, 22, 12, 0, tzinfo=timezone.utc)
|
||||
|
||||
result = one_week_from_now()
|
||||
assert result == expected_result
|
||||
|
||||
|
||||
class TestGenerateRandomToken:
|
||||
def test_generate_random_token_default_length(self):
|
||||
token = generate_random_token()
|
||||
assert len(token) == 14
|
||||
|
||||
def test_generate_random_token_custom_length(self):
|
||||
length = 20
|
||||
token = generate_random_token(length=length)
|
||||
assert len(token) == length
|
||||
|
||||
def test_generate_random_token_with_symbols(self):
|
||||
symbols = "ABC123"
|
||||
token = generate_random_token(length=10, symbols=symbols)
|
||||
assert len(token) == 10
|
||||
assert all(char in symbols for char in token)
|
||||
|
||||
def test_generate_random_token_unique(self):
|
||||
tokens = {generate_random_token() for _ in range(1000)}
|
||||
# Assuming that generating 1000 tokens should result in unique values
|
||||
assert len(tokens) == 1000
|
||||
|
||||
def test_generate_random_token_no_symbols_provided(self):
|
||||
token = generate_random_token(length=5, symbols="")
|
||||
# Default symbols
|
||||
assert len(token) == 5
|
||||
34
api/src/backend/api/tests/test_decorators.py
Normal file
34
api/src/backend/api/tests/test_decorators.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from unittest.mock import patch, call
|
||||
|
||||
import pytest
|
||||
|
||||
from api.decorators import set_tenant
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestSetTenantDecorator:
|
||||
@patch("api.decorators.connection.cursor")
|
||||
def test_set_tenant(self, mock_cursor):
|
||||
mock_cursor.return_value.__enter__.return_value = mock_cursor
|
||||
|
||||
@set_tenant
|
||||
def random_func(arg):
|
||||
return arg
|
||||
|
||||
tenant_id = "1234-abcd-5678"
|
||||
|
||||
result = random_func("test_arg", tenant_id=tenant_id)
|
||||
|
||||
assert (
|
||||
call(f"SELECT set_config('api.tenant_id', '{tenant_id}', TRUE);")
|
||||
in mock_cursor.execute.mock_calls
|
||||
)
|
||||
assert result == "test_arg"
|
||||
|
||||
def test_set_tenant_exception(self):
|
||||
@set_tenant
|
||||
def random_func(arg):
|
||||
return arg
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
random_func("test_arg")
|
||||
54
api/src/backend/api/tests/test_middleware.py
Normal file
54
api/src/backend/api/tests/test_middleware.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from django.http import HttpResponse
|
||||
from django.test import RequestFactory
|
||||
|
||||
from api.middleware import APILoggingMiddleware
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("logging.getLogger")
|
||||
def test_api_logging_middleware_logging(mock_logger):
|
||||
factory = RequestFactory()
|
||||
|
||||
request = factory.get("/test-path?param1=value1¶m2=value2")
|
||||
request.method = "GET"
|
||||
|
||||
response = HttpResponse()
|
||||
response.status_code = 200
|
||||
|
||||
get_response = MagicMock(return_value=response)
|
||||
|
||||
with patch("api.middleware.extract_auth_info") as mock_extract_auth_info:
|
||||
mock_extract_auth_info.return_value = {
|
||||
"user_id": "user123",
|
||||
"tenant_id": "tenant456",
|
||||
}
|
||||
|
||||
with patch("api.middleware.logging.getLogger") as mock_get_logger:
|
||||
mock_logger = MagicMock()
|
||||
mock_get_logger.return_value = mock_logger
|
||||
|
||||
middleware = APILoggingMiddleware(get_response)
|
||||
|
||||
with patch("api.middleware.time.time") as mock_time:
|
||||
mock_time.side_effect = [1000.0, 1001.0] # Start time and end time
|
||||
|
||||
middleware(request)
|
||||
|
||||
get_response.assert_called_once_with(request)
|
||||
|
||||
mock_extract_auth_info.assert_called_once_with(request)
|
||||
|
||||
expected_extra = {
|
||||
"user_id": "user123",
|
||||
"tenant_id": "tenant456",
|
||||
"method": "GET",
|
||||
"path": "/test-path",
|
||||
"query_params": {"param1": "value1", "param2": "value2"},
|
||||
"status_code": 200,
|
||||
"duration": 1.0,
|
||||
}
|
||||
|
||||
mock_logger.info.assert_called_once_with("", extra=expected_extra)
|
||||
89
api/src/backend/api/tests/test_models.py
Normal file
89
api/src/backend/api/tests/test_models.py
Normal file
@@ -0,0 +1,89 @@
|
||||
import pytest
|
||||
|
||||
from api.models import Resource, ResourceTag
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestResourceModel:
|
||||
def test_setting_tags(self, providers_fixture):
|
||||
provider, *_ = providers_fixture
|
||||
|
||||
resource = Resource.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
provider=provider,
|
||||
uid="arn:aws:ec2:us-east-1:123456789012:instance/i-1234567890abcdef0",
|
||||
name="My Instance 1",
|
||||
region="us-east-1",
|
||||
service="ec2",
|
||||
type="prowler-test",
|
||||
)
|
||||
|
||||
tags = [
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
key="key",
|
||||
value="value",
|
||||
),
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
key="key2",
|
||||
value="value2",
|
||||
),
|
||||
]
|
||||
|
||||
resource.upsert_or_delete_tags(tags)
|
||||
|
||||
assert len(tags) == len(resource.tags.all())
|
||||
|
||||
tags_dict = resource.get_tags()
|
||||
|
||||
for tag in tags:
|
||||
assert tag.key in tags_dict
|
||||
assert tag.value == tags_dict[tag.key]
|
||||
|
||||
def test_adding_tags(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
|
||||
tags = [
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=resource.tenant_id,
|
||||
key="env",
|
||||
value="test",
|
||||
),
|
||||
]
|
||||
before_count = len(resource.tags.all())
|
||||
|
||||
resource.upsert_or_delete_tags(tags)
|
||||
|
||||
assert before_count + 1 == len(resource.tags.all())
|
||||
|
||||
tags_dict = resource.get_tags()
|
||||
|
||||
assert "env" in tags_dict
|
||||
assert tags_dict["env"] == "test"
|
||||
|
||||
def test_adding_duplicate_tags(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
|
||||
tags = resource.tags.all()
|
||||
|
||||
before_count = len(resource.tags.all())
|
||||
|
||||
resource.upsert_or_delete_tags(tags)
|
||||
|
||||
# should be the same number of tags
|
||||
assert before_count == len(resource.tags.all())
|
||||
|
||||
def test_add_tags_none(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
resource.upsert_or_delete_tags(None)
|
||||
|
||||
assert len(resource.tags.all()) == 0
|
||||
assert resource.get_tags() == {}
|
||||
|
||||
def test_clear_tags(self, resources_fixture):
|
||||
resource, *_ = resources_fixture
|
||||
resource.clear_tags()
|
||||
|
||||
assert len(resource.tags.all()) == 0
|
||||
assert resource.get_tags() == {}
|
||||
318
api/src/backend/api/tests/test_utils.py
Normal file
318
api/src/backend/api/tests/test_utils.py
Normal file
@@ -0,0 +1,318 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
import pytest
|
||||
from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.azure.azure_provider import AzureProvider
|
||||
from prowler.providers.gcp.gcp_provider import GcpProvider
|
||||
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
|
||||
from rest_framework.exceptions import ValidationError, NotFound
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.exceptions import InvitationTokenExpiredException
|
||||
from api.models import Invitation
|
||||
from api.models import Provider
|
||||
from api.utils import (
|
||||
merge_dicts,
|
||||
return_prowler_provider,
|
||||
initialize_prowler_provider,
|
||||
prowler_provider_connection_test,
|
||||
get_prowler_provider_kwargs,
|
||||
)
|
||||
from api.utils import validate_invitation
|
||||
|
||||
|
||||
class TestMergeDicts:
|
||||
def test_simple_merge(self):
|
||||
default_dict = {"key1": "value1", "key2": "value2"}
|
||||
replacement_dict = {"key2": "new_value2", "key3": "value3"}
|
||||
expected_result = {"key1": "value1", "key2": "new_value2", "key3": "value3"}
|
||||
|
||||
result = merge_dicts(default_dict, replacement_dict)
|
||||
assert result == expected_result
|
||||
|
||||
def test_nested_merge(self):
|
||||
default_dict = {
|
||||
"key1": "value1",
|
||||
"key2": {"nested_key1": "nested_value1", "nested_key2": "nested_value2"},
|
||||
}
|
||||
replacement_dict = {
|
||||
"key2": {
|
||||
"nested_key2": "new_nested_value2",
|
||||
"nested_key3": "nested_value3",
|
||||
},
|
||||
"key3": "value3",
|
||||
}
|
||||
expected_result = {
|
||||
"key1": "value1",
|
||||
"key2": {
|
||||
"nested_key1": "nested_value1",
|
||||
"nested_key2": "new_nested_value2",
|
||||
"nested_key3": "nested_value3",
|
||||
},
|
||||
"key3": "value3",
|
||||
}
|
||||
|
||||
result = merge_dicts(default_dict, replacement_dict)
|
||||
assert result == expected_result
|
||||
|
||||
def test_no_overlap(self):
|
||||
default_dict = {"key1": "value1"}
|
||||
replacement_dict = {"key2": "value2"}
|
||||
expected_result = {"key1": "value1", "key2": "value2"}
|
||||
|
||||
result = merge_dicts(default_dict, replacement_dict)
|
||||
assert result == expected_result
|
||||
|
||||
def test_replacement_dict_empty(self):
|
||||
default_dict = {"key1": "value1", "key2": "value2"}
|
||||
replacement_dict = {}
|
||||
expected_result = {"key1": "value1", "key2": "value2"}
|
||||
|
||||
result = merge_dicts(default_dict, replacement_dict)
|
||||
assert result == expected_result
|
||||
|
||||
def test_default_dict_empty(self):
|
||||
default_dict = {}
|
||||
replacement_dict = {"key1": "value1", "key2": "value2"}
|
||||
expected_result = {"key1": "value1", "key2": "value2"}
|
||||
|
||||
result = merge_dicts(default_dict, replacement_dict)
|
||||
assert result == expected_result
|
||||
|
||||
def test_nested_empty_in_replacement_dict(self):
|
||||
default_dict = {"key1": {"nested_key1": "nested_value1"}}
|
||||
replacement_dict = {"key1": {}}
|
||||
expected_result = {"key1": {}}
|
||||
|
||||
result = merge_dicts(default_dict, replacement_dict)
|
||||
assert result == expected_result
|
||||
|
||||
def test_deep_nested_merge(self):
|
||||
default_dict = {"key1": {"nested_key1": {"deep_key1": "deep_value1"}}}
|
||||
replacement_dict = {"key1": {"nested_key1": {"deep_key1": "new_deep_value1"}}}
|
||||
expected_result = {"key1": {"nested_key1": {"deep_key1": "new_deep_value1"}}}
|
||||
|
||||
result = merge_dicts(default_dict, replacement_dict)
|
||||
assert result == expected_result
|
||||
|
||||
|
||||
class TestReturnProwlerProvider:
|
||||
@pytest.mark.parametrize(
|
||||
"provider_type, expected_provider",
|
||||
[
|
||||
(Provider.ProviderChoices.AWS.value, AwsProvider),
|
||||
(Provider.ProviderChoices.GCP.value, GcpProvider),
|
||||
(Provider.ProviderChoices.AZURE.value, AzureProvider),
|
||||
(Provider.ProviderChoices.KUBERNETES.value, KubernetesProvider),
|
||||
],
|
||||
)
|
||||
def test_return_prowler_provider(self, provider_type, expected_provider):
|
||||
provider = MagicMock()
|
||||
provider.provider = provider_type
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
assert prowler_provider == expected_provider
|
||||
|
||||
def test_return_prowler_provider_unsupported_provider(self):
|
||||
provider = MagicMock()
|
||||
provider.provider = "UNSUPPORTED_PROVIDER"
|
||||
with pytest.raises(ValueError):
|
||||
return return_prowler_provider(provider)
|
||||
|
||||
|
||||
class TestInitializeProwlerProvider:
|
||||
@patch("api.utils.return_prowler_provider")
|
||||
def test_initialize_prowler_provider(self, mock_return_prowler_provider):
|
||||
provider = MagicMock()
|
||||
provider.secret.secret = {"key": "value"}
|
||||
mock_return_prowler_provider.return_value = MagicMock()
|
||||
|
||||
initialize_prowler_provider(provider)
|
||||
mock_return_prowler_provider.return_value.assert_called_once_with(key="value")
|
||||
|
||||
|
||||
class TestProwlerProviderConnectionTest:
|
||||
@patch("api.utils.return_prowler_provider")
|
||||
def test_prowler_provider_connection_test(self, mock_return_prowler_provider):
|
||||
provider = MagicMock()
|
||||
provider.uid = "1234567890"
|
||||
provider.secret.secret = {"key": "value"}
|
||||
mock_return_prowler_provider.return_value = MagicMock()
|
||||
|
||||
prowler_provider_connection_test(provider)
|
||||
mock_return_prowler_provider.return_value.test_connection.assert_called_once_with(
|
||||
key="value", provider_id="1234567890", raise_on_exception=False
|
||||
)
|
||||
|
||||
|
||||
class TestGetProwlerProviderKwargs:
|
||||
@pytest.mark.parametrize(
|
||||
"provider_type, expected_extra_kwargs",
|
||||
[
|
||||
(
|
||||
Provider.ProviderChoices.AWS.value,
|
||||
{},
|
||||
),
|
||||
(
|
||||
Provider.ProviderChoices.AZURE.value,
|
||||
{"subscription_ids": ["provider_uid"]},
|
||||
),
|
||||
(
|
||||
Provider.ProviderChoices.GCP.value,
|
||||
{"project_ids": ["provider_uid"]},
|
||||
),
|
||||
(
|
||||
Provider.ProviderChoices.KUBERNETES.value,
|
||||
{"context": "provider_uid"},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_get_prowler_provider_kwargs(self, provider_type, expected_extra_kwargs):
|
||||
provider_uid = "provider_uid"
|
||||
secret_dict = {"key": "value"}
|
||||
secret_mock = MagicMock()
|
||||
secret_mock.secret = secret_dict
|
||||
|
||||
provider = MagicMock()
|
||||
provider.provider = provider_type
|
||||
provider.secret = secret_mock
|
||||
provider.uid = provider_uid
|
||||
|
||||
result = get_prowler_provider_kwargs(provider)
|
||||
|
||||
expected_result = {**secret_dict, **expected_extra_kwargs}
|
||||
assert result == expected_result
|
||||
|
||||
def test_get_prowler_provider_kwargs_unsupported_provider(self):
|
||||
# Setup
|
||||
provider_uid = "provider_uid"
|
||||
secret_dict = {"key": "value"}
|
||||
secret_mock = MagicMock()
|
||||
secret_mock.secret = secret_dict
|
||||
|
||||
provider = MagicMock()
|
||||
provider.provider = "UNSUPPORTED_PROVIDER"
|
||||
provider.secret = secret_mock
|
||||
provider.uid = provider_uid
|
||||
|
||||
result = get_prowler_provider_kwargs(provider)
|
||||
|
||||
expected_result = secret_dict.copy()
|
||||
assert result == expected_result
|
||||
|
||||
def test_get_prowler_provider_kwargs_no_secret(self):
|
||||
# Setup
|
||||
provider_uid = "provider_uid"
|
||||
secret_mock = MagicMock()
|
||||
secret_mock.secret = {}
|
||||
|
||||
provider = MagicMock()
|
||||
provider.provider = Provider.ProviderChoices.AWS.value
|
||||
provider.secret = secret_mock
|
||||
provider.uid = provider_uid
|
||||
|
||||
result = get_prowler_provider_kwargs(provider)
|
||||
|
||||
expected_result = {}
|
||||
assert result == expected_result
|
||||
|
||||
|
||||
class TestValidateInvitation:
|
||||
@pytest.fixture
|
||||
def invitation(self):
|
||||
invitation = MagicMock(spec=Invitation)
|
||||
invitation.token = "VALID_TOKEN"
|
||||
invitation.email = "user@example.com"
|
||||
invitation.expires_at = datetime.now(timezone.utc) + timedelta(days=1)
|
||||
invitation.state = Invitation.State.PENDING
|
||||
invitation.tenant = MagicMock()
|
||||
return invitation
|
||||
|
||||
def test_valid_invitation(self, invitation):
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.return_value = invitation
|
||||
|
||||
result = validate_invitation("VALID_TOKEN", "user@example.com")
|
||||
|
||||
assert result == invitation
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_not_found_raises_validation_error(self):
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.side_effect = Invitation.DoesNotExist
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
validate_invitation("INVALID_TOKEN", "user@example.com")
|
||||
|
||||
assert exc_info.value.detail == {
|
||||
"invitation_token": "Invalid invitation code."
|
||||
}
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="INVALID_TOKEN", email="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_not_found_raises_not_found(self):
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.side_effect = Invitation.DoesNotExist
|
||||
|
||||
with pytest.raises(NotFound) as exc_info:
|
||||
validate_invitation(
|
||||
"INVALID_TOKEN", "user@example.com", raise_not_found=True
|
||||
)
|
||||
|
||||
assert exc_info.value.detail == "Invitation is not valid."
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="INVALID_TOKEN", email="user@example.com"
|
||||
)
|
||||
|
||||
def test_invitation_expired(self, invitation):
|
||||
expired_time = datetime.now(timezone.utc) - timedelta(days=1)
|
||||
invitation.expires_at = expired_time
|
||||
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using, patch(
|
||||
"api.utils.datetime"
|
||||
) as mock_datetime:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.return_value = invitation
|
||||
mock_datetime.now.return_value = datetime.now(timezone.utc)
|
||||
|
||||
with pytest.raises(InvitationTokenExpiredException):
|
||||
validate_invitation("VALID_TOKEN", "user@example.com")
|
||||
|
||||
# Ensure the invitation state was updated to EXPIRED
|
||||
assert invitation.state == Invitation.State.EXPIRED
|
||||
invitation.save.assert_called_once_with(using=MainRouter.admin_db)
|
||||
|
||||
def test_invitation_not_pending(self, invitation):
|
||||
invitation.state = Invitation.State.ACCEPTED
|
||||
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.return_value = invitation
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
validate_invitation("VALID_TOKEN", "user@example.com")
|
||||
|
||||
assert exc_info.value.detail == {
|
||||
"invitation_token": "This invitation is no longer valid."
|
||||
}
|
||||
|
||||
def test_invitation_with_different_email(self):
|
||||
with patch("api.utils.Invitation.objects.using") as mock_using:
|
||||
mock_db = mock_using.return_value
|
||||
mock_db.get.side_effect = Invitation.DoesNotExist
|
||||
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
validate_invitation("VALID_TOKEN", "different@example.com")
|
||||
|
||||
assert exc_info.value.detail == {
|
||||
"invitation_token": "Invalid invitation code."
|
||||
}
|
||||
mock_db.get.assert_called_once_with(
|
||||
token="VALID_TOKEN", email="different@example.com"
|
||||
)
|
||||
113
api/src/backend/api/tests/test_uuid_utils.py
Normal file
113
api/src/backend/api/tests/test_uuid_utils.py
Normal file
@@ -0,0 +1,113 @@
|
||||
from datetime import datetime, timezone
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from uuid6 import UUID
|
||||
|
||||
from api.uuid_utils import (
|
||||
transform_into_uuid7,
|
||||
datetime_to_uuid7,
|
||||
datetime_from_uuid7,
|
||||
uuid7_start,
|
||||
uuid7_end,
|
||||
uuid7_range,
|
||||
)
|
||||
|
||||
|
||||
def test_transform_into_uuid7_valid():
|
||||
uuid_v7 = datetime_to_uuid7(datetime.now(timezone.utc))
|
||||
transformed_uuid = transform_into_uuid7(uuid_v7)
|
||||
assert transformed_uuid == UUID(hex=uuid_v7.hex.upper())
|
||||
assert transformed_uuid.version == 7
|
||||
|
||||
|
||||
def test_transform_into_uuid7_invalid_version():
|
||||
uuid_v4 = uuid4()
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
transform_into_uuid7(UUID(str(uuid_v4)))
|
||||
assert str(exc_info.value.detail[0]) == "Invalid UUIDv7 value."
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input_datetime",
|
||||
[
|
||||
datetime(2024, 9, 11, 7, 20, 27, tzinfo=timezone.utc),
|
||||
datetime(2023, 1, 1, 0, 0, 0, tzinfo=timezone.utc),
|
||||
],
|
||||
)
|
||||
def test_datetime_to_uuid7(input_datetime):
|
||||
uuid7 = datetime_to_uuid7(input_datetime)
|
||||
assert isinstance(uuid7, UUID)
|
||||
assert uuid7.version == 7
|
||||
expected_timestamp_ms = int(input_datetime.timestamp() * 1000) & 0xFFFFFFFFFFFF
|
||||
assert uuid7.time == expected_timestamp_ms
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input_datetime",
|
||||
[
|
||||
datetime(2024, 9, 11, 7, 20, 27, tzinfo=timezone.utc),
|
||||
datetime(2023, 1, 1, 0, 0, 0, tzinfo=timezone.utc),
|
||||
],
|
||||
)
|
||||
def test_datetime_from_uuid7(input_datetime):
|
||||
uuid7 = datetime_to_uuid7(input_datetime)
|
||||
extracted_datetime = datetime_from_uuid7(uuid7)
|
||||
assert extracted_datetime == input_datetime
|
||||
|
||||
|
||||
def test_datetime_from_uuid7_invalid():
|
||||
uuid_v4 = uuid4()
|
||||
with pytest.raises(ValueError):
|
||||
datetime_from_uuid7(UUID(str(uuid_v4)))
|
||||
|
||||
|
||||
def test_uuid7_start():
|
||||
dt = datetime.now(timezone.utc)
|
||||
uuid = datetime_to_uuid7(dt)
|
||||
start_uuid = uuid7_start(uuid)
|
||||
expected_dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
expected_timestamp_ms = int(expected_dt.timestamp() * 1000) & 0xFFFFFFFFFFFF
|
||||
assert start_uuid.time == expected_timestamp_ms
|
||||
assert start_uuid.version == 7
|
||||
|
||||
|
||||
@pytest.mark.parametrize("months_offset", [0, 1, 10, 30, 60])
|
||||
def test_uuid7_end(months_offset):
|
||||
dt = datetime.now(timezone.utc)
|
||||
uuid = datetime_to_uuid7(dt)
|
||||
end_uuid = uuid7_end(uuid, months_offset)
|
||||
expected_dt = dt.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
expected_dt += relativedelta(months=months_offset, microseconds=-1)
|
||||
expected_timestamp_ms = int(expected_dt.timestamp() * 1000) & 0xFFFFFFFFFFFF
|
||||
assert end_uuid.time == expected_timestamp_ms
|
||||
assert end_uuid.version == 7
|
||||
|
||||
|
||||
def test_uuid7_range():
|
||||
dt_now = datetime.now(timezone.utc)
|
||||
uuid_list = [
|
||||
datetime_to_uuid7(dt_now),
|
||||
datetime_to_uuid7(dt_now.replace(year=2023)),
|
||||
datetime_to_uuid7(dt_now.replace(year=2024)),
|
||||
datetime_to_uuid7(dt_now.replace(year=2025)),
|
||||
]
|
||||
start_uuid, end_uuid = uuid7_range(uuid_list)
|
||||
|
||||
# Expected start of range
|
||||
start_dt = datetime_from_uuid7(min(uuid_list, key=lambda u: u.time))
|
||||
start_dt = start_dt.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
expected_start_timestamp_ms = int(start_dt.timestamp() * 1000) & 0xFFFFFFFFFFFF
|
||||
|
||||
# Expected end of range
|
||||
end_dt = datetime_from_uuid7(max(uuid_list, key=lambda u: u.time))
|
||||
end_dt = end_dt.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
end_dt += relativedelta(months=1, microseconds=-1)
|
||||
expected_end_timestamp_ms = int(end_dt.timestamp() * 1000) & 0xFFFFFFFFFFFF
|
||||
|
||||
assert start_uuid.time == expected_start_timestamp_ms
|
||||
assert end_uuid.time == expected_end_timestamp_ms
|
||||
assert start_uuid.version == 7
|
||||
assert end_uuid.version == 7
|
||||
3264
api/src/backend/api/tests/test_views.py
Normal file
3264
api/src/backend/api/tests/test_views.py
Normal file
File diff suppressed because it is too large
Load Diff
189
api/src/backend/api/utils.py
Normal file
189
api/src/backend/api/utils.py
Normal file
@@ -0,0 +1,189 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.azure.azure_provider import AzureProvider
|
||||
from prowler.providers.common.models import Connection
|
||||
from prowler.providers.gcp.gcp_provider import GcpProvider
|
||||
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
|
||||
from rest_framework.exceptions import ValidationError, NotFound
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.exceptions import InvitationTokenExpiredException
|
||||
from api.models import Provider, Invitation
|
||||
|
||||
|
||||
def merge_dicts(default_dict: dict, replacement_dict: dict) -> dict:
|
||||
"""
|
||||
Recursively merge two dictionaries, using `default_dict` as the base and `replacement_dict` for overriding values.
|
||||
|
||||
Args:
|
||||
default_dict (dict): The base dictionary containing default key-value pairs.
|
||||
replacement_dict (dict): The dictionary containing values that should override those in `default_dict`.
|
||||
|
||||
Returns:
|
||||
dict: A new dictionary containing all keys from `default_dict` with values from `replacement_dict` replacing
|
||||
any overlapping keys. If a key in both `default_dict` and `replacement_dict` contains dictionaries,
|
||||
this function will merge them recursively.
|
||||
"""
|
||||
result = default_dict.copy()
|
||||
|
||||
for key, value in replacement_dict.items():
|
||||
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
||||
if value:
|
||||
result[key] = merge_dicts(result[key], value)
|
||||
else:
|
||||
result[key] = value
|
||||
else:
|
||||
result[key] = value
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def return_prowler_provider(
|
||||
provider: Provider,
|
||||
) -> [AwsProvider | AzureProvider | GcpProvider | KubernetesProvider]:
|
||||
"""Return the Prowler provider class based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider: The corresponding provider class.
|
||||
|
||||
Raises:
|
||||
ValueError: If the provider type specified in `provider.provider` is not supported.
|
||||
"""
|
||||
match provider.provider:
|
||||
case Provider.ProviderChoices.AWS.value:
|
||||
prowler_provider = AwsProvider
|
||||
case Provider.ProviderChoices.GCP.value:
|
||||
prowler_provider = GcpProvider
|
||||
case Provider.ProviderChoices.AZURE.value:
|
||||
prowler_provider = AzureProvider
|
||||
case Provider.ProviderChoices.KUBERNETES.value:
|
||||
prowler_provider = KubernetesProvider
|
||||
case _:
|
||||
raise ValueError(f"Provider type {provider.provider} not supported")
|
||||
return prowler_provider
|
||||
|
||||
|
||||
def get_prowler_provider_kwargs(provider: Provider) -> dict:
|
||||
"""Get the Prowler provider kwargs based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secret.
|
||||
|
||||
Returns:
|
||||
dict: The provider kwargs for the corresponding provider class.
|
||||
"""
|
||||
prowler_provider_kwargs = provider.secret.secret
|
||||
if provider.provider == Provider.ProviderChoices.AZURE.value:
|
||||
prowler_provider_kwargs = {
|
||||
**prowler_provider_kwargs,
|
||||
"subscription_ids": [provider.uid],
|
||||
}
|
||||
elif provider.provider == Provider.ProviderChoices.GCP.value:
|
||||
prowler_provider_kwargs = {
|
||||
**prowler_provider_kwargs,
|
||||
"project_ids": [provider.uid],
|
||||
}
|
||||
elif provider.provider == Provider.ProviderChoices.KUBERNETES.value:
|
||||
prowler_provider_kwargs = {**prowler_provider_kwargs, "context": provider.uid}
|
||||
return prowler_provider_kwargs
|
||||
|
||||
|
||||
def initialize_prowler_provider(
|
||||
provider: Provider,
|
||||
) -> AwsProvider | AzureProvider | GcpProvider | KubernetesProvider:
|
||||
"""Initialize a Prowler provider instance based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider: An instance of the corresponding provider class
|
||||
(`AwsProvider`, `AzureProvider`, `GcpProvider`, or `KubernetesProvider`) initialized with the
|
||||
provider's secrets.
|
||||
"""
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
prowler_provider_kwargs = get_prowler_provider_kwargs(provider)
|
||||
return prowler_provider(**prowler_provider_kwargs)
|
||||
|
||||
|
||||
def prowler_provider_connection_test(provider: Provider) -> Connection:
|
||||
"""Test the connection to a Prowler provider based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
|
||||
Returns:
|
||||
Connection: A connection object representing the result of the connection test for the specified provider.
|
||||
"""
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
prowler_provider_kwargs = provider.secret.secret
|
||||
return prowler_provider.test_connection(
|
||||
**prowler_provider_kwargs, provider_id=provider.uid, raise_on_exception=False
|
||||
)
|
||||
|
||||
|
||||
def validate_invitation(
|
||||
invitation_token: str, email: str, raise_not_found=False
|
||||
) -> Invitation:
|
||||
"""
|
||||
Validates an invitation based on the provided token and email.
|
||||
|
||||
This function attempts to retrieve an Invitation object using the given
|
||||
`invitation_token` and `email`. It performs several checks to ensure that
|
||||
the invitation is valid, not expired, and in the correct state for acceptance.
|
||||
|
||||
Args:
|
||||
invitation_token (str): The token associated with the invitation.
|
||||
email (str): The email address associated with the invitation.
|
||||
raise_not_found (bool, optional): If True, raises a `NotFound` exception
|
||||
when the invitation is not found. If False, raises a `ValidationError`.
|
||||
Defaults to False.
|
||||
|
||||
Returns:
|
||||
Invitation: The validated Invitation object.
|
||||
|
||||
Raises:
|
||||
NotFound: If `raise_not_found` is True and the invitation does not exist.
|
||||
ValidationError: If the invitation does not exist and `raise_not_found`
|
||||
is False, or if the invitation is invalid or in an incorrect state.
|
||||
InvitationTokenExpiredException: If the invitation has expired.
|
||||
|
||||
Notes:
|
||||
- This function uses the admin database connector to bypass RLS protection
|
||||
since the invitation may belong to a tenant the user is not a member of yet.
|
||||
- If the invitation has expired, its state is updated to EXPIRED, and an
|
||||
`InvitationTokenExpiredException` is raised.
|
||||
- Only invitations in the PENDING state can be accepted.
|
||||
|
||||
Examples:
|
||||
invitation = validate_invitation("TOKEN123", "user@example.com")
|
||||
"""
|
||||
try:
|
||||
# Admin DB connector is used to bypass RLS protection since the invitation belongs to a tenant the user
|
||||
# is not a member of yet
|
||||
invitation = Invitation.objects.using(MainRouter.admin_db).get(
|
||||
token=invitation_token, email=email
|
||||
)
|
||||
except Invitation.DoesNotExist:
|
||||
if raise_not_found:
|
||||
raise NotFound(detail="Invitation is not valid.")
|
||||
else:
|
||||
raise ValidationError({"invitation_token": "Invalid invitation code."})
|
||||
|
||||
# Check if the invitation has expired
|
||||
if invitation.expires_at < datetime.now(timezone.utc):
|
||||
invitation.state = Invitation.State.EXPIRED
|
||||
invitation.save(using=MainRouter.admin_db)
|
||||
raise InvitationTokenExpiredException()
|
||||
|
||||
# Check the state of the invitation
|
||||
if invitation.state != Invitation.State.PENDING:
|
||||
raise ValidationError(
|
||||
{"invitation_token": "This invitation is no longer valid."}
|
||||
)
|
||||
|
||||
return invitation
|
||||
148
api/src/backend/api/uuid_utils.py
Normal file
148
api/src/backend/api/uuid_utils.py
Normal file
@@ -0,0 +1,148 @@
|
||||
from datetime import datetime, timezone
|
||||
from random import getrandbits
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from uuid6 import UUID
|
||||
|
||||
|
||||
def transform_into_uuid7(uuid_obj: UUID) -> UUID:
|
||||
"""
|
||||
Validates that the given UUID object is a UUIDv7 and returns it.
|
||||
|
||||
This function checks if the provided UUID object is of version 7.
|
||||
If it is, it returns a new UUID object constructed from the uppercase
|
||||
hexadecimal representation of the input UUID. If not, it raises a ValidationError.
|
||||
|
||||
Args:
|
||||
uuid_obj (UUID): The UUID object to validate and transform.
|
||||
|
||||
Returns:
|
||||
UUID: A new UUIDv7 object constructed from the uppercase hexadecimal
|
||||
representation of the input UUID.
|
||||
|
||||
Raises:
|
||||
ValidationError: If the provided UUID is not a version 7 UUID.
|
||||
"""
|
||||
try:
|
||||
if uuid_obj.version != 7:
|
||||
raise ValueError
|
||||
return UUID(hex=uuid_obj.hex.upper())
|
||||
except ValueError:
|
||||
raise ValidationError("Invalid UUIDv7 value.")
|
||||
|
||||
|
||||
def datetime_to_uuid7(dt: datetime) -> UUID:
|
||||
"""
|
||||
Generates a UUIDv7 from a given datetime object.
|
||||
|
||||
Constructs a UUIDv7 using the provided datetime timestamp.
|
||||
Ensures that the version and variant bits are set correctly.
|
||||
|
||||
Args:
|
||||
dt: A datetime object representing the desired timestamp for the UUIDv7.
|
||||
|
||||
Returns:
|
||||
A UUIDv7 object corresponding to the given datetime.
|
||||
"""
|
||||
timestamp_ms = int(dt.timestamp() * 1000) & 0xFFFFFFFFFFFF # 48 bits
|
||||
|
||||
# Generate 12 bits of randomness for the sequence
|
||||
rand_seq = getrandbits(12)
|
||||
# Generate 62 bits of randomness for the node
|
||||
rand_node = getrandbits(62)
|
||||
|
||||
# Build the UUID integer
|
||||
uuid_int = timestamp_ms << 80 # Shift timestamp to bits 80-127
|
||||
|
||||
# Set the version to 7 in bits 76-79
|
||||
uuid_int |= 0x7 << 76
|
||||
|
||||
# Set 12 bits of randomness in bits 64-75
|
||||
uuid_int |= rand_seq << 64
|
||||
|
||||
# Set the variant to "10" in bits 62-63
|
||||
uuid_int |= 0x2 << 62
|
||||
|
||||
# Set 62 bits of randomness in bits 0-61
|
||||
uuid_int |= rand_node
|
||||
|
||||
return UUID(int=uuid_int)
|
||||
|
||||
|
||||
def datetime_from_uuid7(uuid7: UUID) -> datetime:
|
||||
"""
|
||||
Extracts the timestamp from a UUIDv7 and returns it as a datetime object.
|
||||
|
||||
Args:
|
||||
uuid7: A UUIDv7 object.
|
||||
|
||||
Returns:
|
||||
A datetime object representing the timestamp encoded in the UUIDv7.
|
||||
"""
|
||||
timestamp_ms = uuid7.time
|
||||
return datetime.fromtimestamp(timestamp_ms / 1000, tz=timezone.utc)
|
||||
|
||||
|
||||
def uuid7_start(uuid_obj: UUID) -> UUID:
|
||||
"""
|
||||
Returns a UUIDv7 that represents the start of the day for the given UUID.
|
||||
|
||||
Args:
|
||||
uuid_obj: A UUIDv7 object.
|
||||
|
||||
Returns:
|
||||
A UUIDv7 object representing the start of the day for the given UUID's timestamp.
|
||||
"""
|
||||
start_of_day = datetime_from_uuid7(uuid_obj).replace(
|
||||
hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
return datetime_to_uuid7(start_of_day)
|
||||
|
||||
|
||||
def uuid7_end(uuid_obj: UUID, offset_months: int = 1) -> UUID:
|
||||
"""
|
||||
Returns a UUIDv7 that represents the end of the month for the given UUID.
|
||||
|
||||
Args:
|
||||
uuid_obj: A UUIDv7 object.
|
||||
offset_days: Number of months to offset from the given UUID's date. Defaults to 1 to handle if
|
||||
partitions are not being used, if so the value will be the one set at FINDINGS_TABLE_PARTITION_MONTHS.
|
||||
|
||||
Returns:
|
||||
A UUIDv7 object representing the end of the month for the given UUID's date plus offset_months.
|
||||
"""
|
||||
end_of_month = datetime_from_uuid7(uuid_obj).replace(
|
||||
day=1, hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
end_of_month += relativedelta(months=offset_months, microseconds=-1)
|
||||
return datetime_to_uuid7(end_of_month)
|
||||
|
||||
|
||||
def uuid7_range(uuid_list: list[UUID]) -> list[UUID]:
|
||||
"""
|
||||
For the given list of UUIDv7s, returns the start and end UUIDv7 values that represent
|
||||
the range of days covered by the UUIDs.
|
||||
|
||||
Args:
|
||||
uuid_list: A list of UUIDv7 objects.
|
||||
|
||||
Returns:
|
||||
A list containing two UUIDv7 objects: the start and end of the day range.
|
||||
|
||||
Raises:
|
||||
ValidationError: If the list is empty or contains invalid UUIDv7 objects.
|
||||
"""
|
||||
if not uuid_list:
|
||||
raise ValidationError("UUID list is empty.")
|
||||
|
||||
try:
|
||||
start_uuid = min(uuid_list, key=lambda u: u.time)
|
||||
end_uuid = max(uuid_list, key=lambda u: u.time)
|
||||
except AttributeError:
|
||||
raise ValidationError("Invalid UUIDv7 objects in the list.")
|
||||
|
||||
start_range = uuid7_start(start_uuid)
|
||||
end_range = uuid7_end(end_uuid)
|
||||
|
||||
return [start_range, end_range]
|
||||
0
api/src/backend/api/v1/__init__.py
Normal file
0
api/src/backend/api/v1/__init__.py
Normal file
1272
api/src/backend/api/v1/serializers.py
Normal file
1272
api/src/backend/api/v1/serializers.py
Normal file
File diff suppressed because it is too large
Load Diff
86
api/src/backend/api/v1/urls.py
Normal file
86
api/src/backend/api/v1/urls.py
Normal file
@@ -0,0 +1,86 @@
|
||||
from django.urls import path, include
|
||||
from drf_spectacular.views import SpectacularRedocView
|
||||
from rest_framework_nested import routers
|
||||
|
||||
from api.v1.views import (
|
||||
CustomTokenObtainView,
|
||||
CustomTokenRefreshView,
|
||||
SchemaView,
|
||||
UserViewSet,
|
||||
TenantViewSet,
|
||||
TenantMembersViewSet,
|
||||
MembershipViewSet,
|
||||
ProviderViewSet,
|
||||
ScanViewSet,
|
||||
TaskViewSet,
|
||||
ResourceViewSet,
|
||||
FindingViewSet,
|
||||
ProviderGroupViewSet,
|
||||
ProviderSecretViewSet,
|
||||
InvitationViewSet,
|
||||
InvitationAcceptViewSet,
|
||||
OverviewViewSet,
|
||||
ComplianceOverviewViewSet,
|
||||
)
|
||||
|
||||
router = routers.DefaultRouter(trailing_slash=False)
|
||||
|
||||
router.register(r"users", UserViewSet, basename="user")
|
||||
router.register(r"tenants", TenantViewSet, basename="tenant")
|
||||
router.register(r"providers", ProviderViewSet, basename="provider")
|
||||
router.register(r"provider_groups", ProviderGroupViewSet, basename="providergroup")
|
||||
router.register(r"scans", ScanViewSet, basename="scan")
|
||||
router.register(r"tasks", TaskViewSet, basename="task")
|
||||
router.register(r"resources", ResourceViewSet, basename="resource")
|
||||
router.register(r"findings", FindingViewSet, basename="finding")
|
||||
router.register(
|
||||
r"compliance-overviews", ComplianceOverviewViewSet, basename="complianceoverview"
|
||||
)
|
||||
router.register(r"overviews", OverviewViewSet, basename="overview")
|
||||
|
||||
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
|
||||
tenants_router.register(
|
||||
r"memberships", TenantMembersViewSet, basename="tenant-membership"
|
||||
)
|
||||
|
||||
users_router = routers.NestedSimpleRouter(router, r"users", lookup="user")
|
||||
users_router.register(r"memberships", MembershipViewSet, basename="user-membership")
|
||||
|
||||
urlpatterns = [
|
||||
path("tokens", CustomTokenObtainView.as_view(), name="token-obtain"),
|
||||
path("tokens/refresh", CustomTokenRefreshView.as_view(), name="token-refresh"),
|
||||
path(
|
||||
"providers/secrets",
|
||||
ProviderSecretViewSet.as_view({"get": "list", "post": "create"}),
|
||||
name="providersecret-list",
|
||||
),
|
||||
path(
|
||||
"providers/secrets/<uuid:pk>",
|
||||
ProviderSecretViewSet.as_view(
|
||||
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="providersecret-detail",
|
||||
),
|
||||
path(
|
||||
"tenants/invitations",
|
||||
InvitationViewSet.as_view({"get": "list", "post": "create"}),
|
||||
name="invitation-list",
|
||||
),
|
||||
path(
|
||||
"tenants/invitations/<uuid:pk>",
|
||||
InvitationViewSet.as_view(
|
||||
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
|
||||
),
|
||||
name="invitation-detail",
|
||||
),
|
||||
path(
|
||||
"invitations/accept",
|
||||
InvitationAcceptViewSet.as_view({"post": "accept"}),
|
||||
name="invitation-accept",
|
||||
),
|
||||
path("", include(router.urls)),
|
||||
path("", include(tenants_router.urls)),
|
||||
path("", include(users_router.urls)),
|
||||
path("schema", SchemaView.as_view(), name="schema"),
|
||||
path("docs", SpectacularRedocView.as_view(url_name="schema"), name="docs"),
|
||||
]
|
||||
1384
api/src/backend/api/v1/views.py
Normal file
1384
api/src/backend/api/v1/views.py
Normal file
File diff suppressed because it is too large
Load Diff
22
api/src/backend/api/validators.py
Normal file
22
api/src/backend/api/validators.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
|
||||
class MaximumLengthValidator:
|
||||
def __init__(self, max_length=72):
|
||||
self.max_length = max_length
|
||||
|
||||
def validate(self, password, user=None):
|
||||
if len(password) > self.max_length:
|
||||
raise ValidationError(
|
||||
_(
|
||||
"This password is too long. It must contain no more than %(max_length)d characters."
|
||||
),
|
||||
code="password_too_long",
|
||||
params={"max_length": self.max_length},
|
||||
)
|
||||
|
||||
def get_help_text(self):
|
||||
return _(
|
||||
f"Your password must contain no more than {self.max_length} characters."
|
||||
)
|
||||
0
api/src/backend/config/__init__.py
Normal file
0
api/src/backend/config/__init__.py
Normal file
16
api/src/backend/config/asgi.py
Normal file
16
api/src/backend/config/asgi.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""
|
||||
ASGI config for backend project.
|
||||
|
||||
It exposes the ASGI callable as a module-level variable named ``application``.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/5.0/howto/deployment/asgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from django.core.asgi import get_asgi_application
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.django.production")
|
||||
|
||||
application = get_asgi_application()
|
||||
46
api/src/backend/config/celery.py
Normal file
46
api/src/backend/config/celery.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from celery import Celery, Task
|
||||
|
||||
celery_app = Celery("tasks")
|
||||
|
||||
celery_app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
celery_app.conf.update(result_extended=True)
|
||||
|
||||
celery_app.autodiscover_tasks(["api"])
|
||||
|
||||
|
||||
class RLSTask(Task):
|
||||
def apply_async(
|
||||
self,
|
||||
args=None,
|
||||
kwargs=None,
|
||||
task_id=None,
|
||||
producer=None,
|
||||
link=None,
|
||||
link_error=None,
|
||||
shadow=None,
|
||||
**options,
|
||||
):
|
||||
from api.models import Task as APITask
|
||||
from django_celery_results.models import TaskResult
|
||||
|
||||
result = super().apply_async(
|
||||
args=args,
|
||||
kwargs=kwargs,
|
||||
task_id=task_id,
|
||||
producer=producer,
|
||||
link=link,
|
||||
link_error=link_error,
|
||||
shadow=shadow,
|
||||
**options,
|
||||
)
|
||||
task_result_instance = TaskResult.objects.get(task_id=result.task_id)
|
||||
from api.db_utils import tenant_transaction
|
||||
|
||||
tenant_id = kwargs.get("tenant_id")
|
||||
with tenant_transaction(tenant_id):
|
||||
APITask.objects.create(
|
||||
id=task_result_instance.task_id,
|
||||
tenant_id=tenant_id,
|
||||
task_runner_task=task_result_instance,
|
||||
)
|
||||
return result
|
||||
230
api/src/backend/config/custom_logging.py
Normal file
230
api/src/backend/config/custom_logging.py
Normal file
@@ -0,0 +1,230 @@
|
||||
import json
|
||||
import logging
|
||||
from enum import StrEnum
|
||||
|
||||
from django_guid.log_filters import CorrelationId
|
||||
|
||||
from config.env import env
|
||||
|
||||
|
||||
class BackendLogger(StrEnum):
|
||||
GUNICORN = "gunicorn"
|
||||
GUNICORN_ACCESS = "gunicorn.access"
|
||||
GUNICORN_ERROR = "gunicorn.error"
|
||||
DJANGO = "django"
|
||||
SECURITY = "django.security"
|
||||
DB = "django.db"
|
||||
API = "api"
|
||||
TASKS = "tasks"
|
||||
|
||||
|
||||
# Formatters
|
||||
|
||||
|
||||
class NDJSONFormatter(logging.Formatter):
|
||||
"""NDJSON custom formatter for logging messages.
|
||||
|
||||
If available, it will include all kind of API request metadata.
|
||||
"""
|
||||
|
||||
def format(self, record):
|
||||
log_record = {
|
||||
"timestamp": self.formatTime(record, self.datefmt),
|
||||
"level": record.levelname,
|
||||
"message": record.getMessage(),
|
||||
"logger": record.name,
|
||||
"module": record.module,
|
||||
"pathname": record.pathname,
|
||||
"lineno": record.lineno,
|
||||
"funcName": record.funcName,
|
||||
"process": record.process,
|
||||
"thread": record.thread,
|
||||
"transaction_id": record.transaction_id
|
||||
if hasattr(record, "transaction_id")
|
||||
else None,
|
||||
}
|
||||
|
||||
# Add REST API extra fields
|
||||
if hasattr(record, "user_id"):
|
||||
log_record["user_id"] = record.user_id
|
||||
if hasattr(record, "tenant_id"):
|
||||
log_record["tenant_id"] = record.tenant_id
|
||||
if hasattr(record, "method"):
|
||||
log_record["method"] = record.method
|
||||
if hasattr(record, "path"):
|
||||
log_record["path"] = record.path
|
||||
if hasattr(record, "query_params"):
|
||||
log_record["query_params"] = record.query_params
|
||||
if hasattr(record, "duration"):
|
||||
log_record["duration"] = record.duration
|
||||
if hasattr(record, "status_code"):
|
||||
log_record["status_code"] = record.status_code
|
||||
|
||||
if record.exc_info:
|
||||
log_record["exc_info"] = self.formatException(record.exc_info)
|
||||
|
||||
return json.dumps(log_record)
|
||||
|
||||
|
||||
class HumanReadableFormatter(logging.Formatter):
|
||||
"""Human-readable custom formatter for logging messages.
|
||||
|
||||
If available, it will include all kinds of API request metadata.
|
||||
"""
|
||||
|
||||
def format(self, record):
|
||||
log_components = [
|
||||
f"{self.formatTime(record, self.datefmt)}",
|
||||
f"[{record.name}]",
|
||||
f"{record.levelname}:",
|
||||
f"({record.module})",
|
||||
f"[module={record.module}",
|
||||
f"path={record.pathname}",
|
||||
f"line={record.lineno}",
|
||||
f"function={record.funcName}",
|
||||
f"process={record.process}",
|
||||
f"thread={record.thread}",
|
||||
f"transaction-id={record.transaction_id if hasattr(record, 'transaction_id') else None}]",
|
||||
f"{record.getMessage()}",
|
||||
]
|
||||
|
||||
# Add REST API extra fields
|
||||
if hasattr(record, "user_id"):
|
||||
log_components.append(f"({record.user_id})")
|
||||
if hasattr(record, "tenant_id"):
|
||||
log_components.append(f"[{record.tenant_id}]")
|
||||
if hasattr(record, "method"):
|
||||
log_components.append(f'"{record.method} {record.path}"')
|
||||
if hasattr(record, "query_params"):
|
||||
log_components.append(f"with parameters {record.query_params}")
|
||||
if hasattr(record, "duration"):
|
||||
log_components.append(f"done in {record.duration}s:")
|
||||
if hasattr(record, "status_code"):
|
||||
log_components.append(f"{record.status_code}")
|
||||
|
||||
if record.exc_info:
|
||||
log_components.append(self.formatException(record.exc_info))
|
||||
|
||||
return " ".join(log_components)
|
||||
|
||||
|
||||
# Filters
|
||||
|
||||
|
||||
class TransactionIdFilter(CorrelationId):
|
||||
"""Logging filter class.
|
||||
|
||||
Used to override the `correlation_id_field` parameter in the parent class to use a different name.
|
||||
"""
|
||||
|
||||
CORRELATION_ID_FIELD = "transaction_id"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(correlation_id_field=self.CORRELATION_ID_FIELD)
|
||||
|
||||
|
||||
# Logging settings
|
||||
|
||||
LEVEL = env("DJANGO_LOGGING_LEVEL", default="INFO")
|
||||
FORMATTER = env("DJANGO_LOGGING_FORMATTER", default="ndjson")
|
||||
|
||||
LOGGING = {
|
||||
"version": 1,
|
||||
"disable_existing_loggers": True,
|
||||
"filters": {"transaction_id": {"()": TransactionIdFilter}},
|
||||
"formatters": {
|
||||
"ndjson": {
|
||||
"()": NDJSONFormatter,
|
||||
"datefmt": "%Y-%m-%d %H:%M:%S",
|
||||
},
|
||||
"human_readable": {
|
||||
"()": HumanReadableFormatter,
|
||||
"datefmt": "%Y-%m-%d %H:%M:%S",
|
||||
},
|
||||
},
|
||||
"handlers": {
|
||||
"gunicorn_console": {
|
||||
"level": LEVEL,
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": FORMATTER,
|
||||
"filters": ["transaction_id"],
|
||||
},
|
||||
"django_console": {
|
||||
"level": LEVEL,
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": FORMATTER,
|
||||
"filters": ["transaction_id"],
|
||||
},
|
||||
"api_console": {
|
||||
"level": LEVEL,
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": FORMATTER,
|
||||
"filters": ["transaction_id"],
|
||||
},
|
||||
"db_console": {
|
||||
"level": f"{'DEBUG' if LEVEL == 'DEBUG' else 'INFO'}",
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": FORMATTER,
|
||||
"filters": ["transaction_id"],
|
||||
},
|
||||
"security_console": {
|
||||
"level": LEVEL,
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": FORMATTER,
|
||||
"filters": ["transaction_id"],
|
||||
},
|
||||
"tasks_console": {
|
||||
"level": LEVEL,
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": FORMATTER,
|
||||
"filters": ["transaction_id"],
|
||||
},
|
||||
},
|
||||
"loggers": {
|
||||
BackendLogger.GUNICORN: {
|
||||
"handlers": ["gunicorn_console"],
|
||||
"level": LEVEL,
|
||||
"propagate": False,
|
||||
},
|
||||
BackendLogger.GUNICORN_ACCESS: {
|
||||
"handlers": ["gunicorn_console"],
|
||||
"level": "CRITICAL",
|
||||
"propagate": False,
|
||||
},
|
||||
BackendLogger.GUNICORN_ERROR: {
|
||||
"handlers": ["gunicorn_console"],
|
||||
"level": LEVEL,
|
||||
"propagate": False,
|
||||
},
|
||||
BackendLogger.DJANGO: {
|
||||
"handlers": ["django_console"],
|
||||
"level": "WARNING",
|
||||
"propagate": True,
|
||||
},
|
||||
BackendLogger.DB: {
|
||||
"handlers": ["db_console"],
|
||||
"level": LEVEL,
|
||||
"propagate": False,
|
||||
},
|
||||
BackendLogger.SECURITY: {
|
||||
"handlers": ["security_console"],
|
||||
"level": LEVEL,
|
||||
"propagate": False,
|
||||
},
|
||||
BackendLogger.API: {
|
||||
"handlers": ["api_console"],
|
||||
"level": LEVEL,
|
||||
"propagate": False,
|
||||
},
|
||||
BackendLogger.TASKS: {
|
||||
"handlers": ["tasks_console"],
|
||||
"level": LEVEL,
|
||||
"propagate": False,
|
||||
},
|
||||
},
|
||||
# Gunicorn required configuration
|
||||
"root": {
|
||||
"level": "ERROR",
|
||||
"handlers": ["gunicorn_console"],
|
||||
},
|
||||
}
|
||||
0
api/src/backend/config/django/__init__.py
Normal file
0
api/src/backend/config/django/__init__.py
Normal file
209
api/src/backend/config/django/base.py
Normal file
209
api/src/backend/config/django/base.py
Normal file
@@ -0,0 +1,209 @@
|
||||
from datetime import timedelta
|
||||
|
||||
from config.custom_logging import LOGGING # noqa
|
||||
from config.env import BASE_DIR, env # noqa
|
||||
from config.settings.celery import * # noqa
|
||||
from config.settings.partitions import * # noqa
|
||||
|
||||
SECRET_KEY = env("SECRET_KEY", default="secret")
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=False)
|
||||
ALLOWED_HOSTS = ["localhost", "127.0.0.1"]
|
||||
|
||||
# Application definition
|
||||
|
||||
INSTALLED_APPS = [
|
||||
"django.contrib.admin",
|
||||
"django.contrib.auth",
|
||||
"django.contrib.contenttypes",
|
||||
"django.contrib.sessions",
|
||||
"django.contrib.messages",
|
||||
"django.contrib.staticfiles",
|
||||
"django.contrib.postgres",
|
||||
"psqlextra",
|
||||
"api",
|
||||
"rest_framework",
|
||||
"corsheaders",
|
||||
"drf_spectacular",
|
||||
"django_guid",
|
||||
"rest_framework_json_api",
|
||||
"django_celery_results",
|
||||
"django_celery_beat",
|
||||
"rest_framework_simplejwt.token_blacklist",
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
"django_guid.middleware.guid_middleware",
|
||||
"django.middleware.security.SecurityMiddleware",
|
||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
||||
"corsheaders.middleware.CorsMiddleware",
|
||||
"django.middleware.common.CommonMiddleware",
|
||||
"django.middleware.csrf.CsrfViewMiddleware",
|
||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
||||
"django.contrib.messages.middleware.MessageMiddleware",
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||
"api.middleware.APILoggingMiddleware",
|
||||
]
|
||||
|
||||
CORS_ALLOWED_ORIGINS = ["http://localhost", "http://127.0.0.1"]
|
||||
|
||||
ROOT_URLCONF = "config.urls"
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
||||
"DIRS": [],
|
||||
"APP_DIRS": True,
|
||||
"OPTIONS": {
|
||||
"context_processors": [
|
||||
"django.template.context_processors.debug",
|
||||
"django.template.context_processors.request",
|
||||
"django.contrib.auth.context_processors.auth",
|
||||
"django.contrib.messages.context_processors.messages",
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
"DEFAULT_SCHEMA_CLASS": "drf_spectacular_jsonapi.schemas.openapi.JsonApiAutoSchema",
|
||||
"DEFAULT_AUTHENTICATION_CLASSES": (
|
||||
"rest_framework_simplejwt.authentication.JWTAuthentication",
|
||||
),
|
||||
"PAGE_SIZE": 10,
|
||||
"EXCEPTION_HANDLER": "api.exceptions.custom_exception_handler",
|
||||
"DEFAULT_PAGINATION_CLASS": "drf_spectacular_jsonapi.schemas.pagination.JsonApiPageNumberPagination",
|
||||
"DEFAULT_PARSER_CLASSES": (
|
||||
"rest_framework_json_api.parsers.JSONParser",
|
||||
"rest_framework.parsers.FormParser",
|
||||
"rest_framework.parsers.MultiPartParser",
|
||||
),
|
||||
"DEFAULT_RENDERER_CLASSES": ("api.renderers.APIJSONRenderer",),
|
||||
"DEFAULT_METADATA_CLASS": "rest_framework_json_api.metadata.JSONAPIMetadata",
|
||||
"DEFAULT_FILTER_BACKENDS": (
|
||||
"rest_framework_json_api.filters.QueryParameterValidationFilter",
|
||||
"rest_framework_json_api.filters.OrderingFilter",
|
||||
"rest_framework_json_api.django_filters.backends.DjangoFilterBackend",
|
||||
"rest_framework.filters.SearchFilter",
|
||||
),
|
||||
"SEARCH_PARAM": "filter[search]",
|
||||
"TEST_REQUEST_RENDERER_CLASSES": (
|
||||
"rest_framework_json_api.renderers.JSONRenderer",
|
||||
),
|
||||
"TEST_REQUEST_DEFAULT_FORMAT": "vnd.api+json",
|
||||
"JSON_API_UNIFORM_EXCEPTIONS": True,
|
||||
}
|
||||
|
||||
SPECTACULAR_SETTINGS = {
|
||||
"SERVE_INCLUDE_SCHEMA": False,
|
||||
"COMPONENT_SPLIT_REQUEST": True,
|
||||
"PREPROCESSING_HOOKS": [
|
||||
"drf_spectacular_jsonapi.hooks.fix_nested_path_parameters",
|
||||
],
|
||||
}
|
||||
|
||||
WSGI_APPLICATION = "config.wsgi.application"
|
||||
|
||||
DJANGO_GUID = {
|
||||
"GUID_HEADER_NAME": "Transaction-ID",
|
||||
"VALIDATE_GUID": True,
|
||||
"RETURN_HEADER": True,
|
||||
"EXPOSE_HEADER": True,
|
||||
"INTEGRATIONS": [],
|
||||
"IGNORE_URLS": [],
|
||||
"UUID_LENGTH": 32,
|
||||
}
|
||||
|
||||
DATABASE_ROUTERS = ["api.db_router.MainRouter"]
|
||||
|
||||
|
||||
# Password validation
|
||||
# https://docs.djangoproject.com/en/5.0/ref/settings/#auth-password-validators
|
||||
|
||||
AUTH_USER_MODEL = "api.User"
|
||||
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
|
||||
"OPTIONS": {"min_length": 12},
|
||||
},
|
||||
{
|
||||
"NAME": "api.validators.MaximumLengthValidator",
|
||||
"OPTIONS": {
|
||||
"max_length": 72,
|
||||
},
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
|
||||
},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
|
||||
},
|
||||
]
|
||||
|
||||
SIMPLE_JWT = {
|
||||
# Token lifetime settings
|
||||
"ACCESS_TOKEN_LIFETIME": timedelta(
|
||||
minutes=env.int("DJANGO_ACCESS_TOKEN_LIFETIME", 30)
|
||||
),
|
||||
"REFRESH_TOKEN_LIFETIME": timedelta(
|
||||
minutes=env.int("DJANGO_REFRESH_TOKEN_LIFETIME", 60 * 24)
|
||||
),
|
||||
"ROTATE_REFRESH_TOKENS": True,
|
||||
"BLACKLIST_AFTER_ROTATION": True,
|
||||
# Algorithm and keys
|
||||
"ALGORITHM": "RS256",
|
||||
"SIGNING_KEY": env.str("DJANGO_TOKEN_SIGNING_KEY", "").replace("\\n", "\n"),
|
||||
"VERIFYING_KEY": env.str("DJANGO_TOKEN_VERIFYING_KEY", "").replace("\\n", "\n"),
|
||||
# Authorization header configuration
|
||||
"AUTH_HEADER_TYPES": ("Bearer",),
|
||||
"AUTH_HEADER_NAME": "HTTP_AUTHORIZATION",
|
||||
# Custom serializers
|
||||
"TOKEN_OBTAIN_SERIALIZER": "api.serializers.TokenSerializer",
|
||||
"TOKEN_REFRESH_SERIALIZER": "api.serializers.TokenRefreshSerializer",
|
||||
# Standard JWT claims
|
||||
"TOKEN_TYPE_CLAIM": "typ",
|
||||
"JTI_CLAIM": "jti",
|
||||
"USER_ID_FIELD": "id",
|
||||
"USER_ID_CLAIM": "sub",
|
||||
# Issuer and Audience claims, for the moment we will keep these values as default values, they may change in the future.
|
||||
"AUDIENCE": env.str("DJANGO_JWT_AUDIENCE", "https://api.prowler.com"),
|
||||
"ISSUER": env.str("DJANGO_JWT_ISSUER", "https://api.prowler.com"),
|
||||
# Additional security settings
|
||||
"UPDATE_LAST_LOGIN": True,
|
||||
}
|
||||
|
||||
SECRETS_ENCRYPTION_KEY = env.str("DJANGO_SECRETS_ENCRYPTION_KEY", "")
|
||||
|
||||
# Internationalization
|
||||
# https://docs.djangoproject.com/en/5.0/topics/i18n/
|
||||
|
||||
LANGUAGE_CODE = "en-us"
|
||||
LANGUAGES = [
|
||||
("en", "English"),
|
||||
]
|
||||
|
||||
TIME_ZONE = "UTC"
|
||||
|
||||
USE_I18N = True
|
||||
|
||||
USE_TZ = True
|
||||
|
||||
# Static files (CSS, JavaScript, Images)
|
||||
# https://docs.djangoproject.com/en/5.0/howto/static-files/
|
||||
|
||||
STATIC_URL = "static/"
|
||||
|
||||
# Default primary key field type
|
||||
# https://docs.djangoproject.com/en/5.0/ref/settings/#default-auto-field
|
||||
|
||||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
||||
|
||||
# Cache settings
|
||||
CACHE_MAX_AGE = env.int("DJANGO_CACHE_MAX_AGE", 3600)
|
||||
CACHE_STALE_WHILE_REVALIDATE = env.int("DJANGO_STALE_WHILE_REVALIDATE", 60)
|
||||
|
||||
|
||||
TESTING = False
|
||||
40
api/src/backend/config/django/devel.py
Normal file
40
api/src/backend/config/django/devel.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from config.django.base import * # noqa
|
||||
from config.env import env
|
||||
|
||||
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=True)
|
||||
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["*"])
|
||||
|
||||
# Database
|
||||
DATABASES = {
|
||||
"prowler_user": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_DB", default="prowler_db"),
|
||||
"USER": env("POSTGRES_USER", default="prowler_user"),
|
||||
"PASSWORD": env("POSTGRES_PASSWORD", default="prowler"),
|
||||
"HOST": env("POSTGRES_HOST", default="postgres-db"),
|
||||
"PORT": env("POSTGRES_PORT", default="5432"),
|
||||
},
|
||||
"admin": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_DB", default="prowler_db"),
|
||||
"USER": env("POSTGRES_ADMIN_USER", default="prowler"),
|
||||
"PASSWORD": env("POSTGRES_ADMIN_PASSWORD", default="S3cret"),
|
||||
"HOST": env("POSTGRES_HOST", default="postgres-db"),
|
||||
"PORT": env("POSTGRES_PORT", default="5432"),
|
||||
},
|
||||
}
|
||||
DATABASES["default"] = DATABASES["prowler_user"]
|
||||
|
||||
REST_FRAMEWORK["DEFAULT_RENDERER_CLASSES"] = tuple( # noqa: F405
|
||||
render_class
|
||||
for render_class in REST_FRAMEWORK["DEFAULT_RENDERER_CLASSES"] # noqa: F405
|
||||
) + ("rest_framework_json_api.renderers.BrowsableAPIRenderer",)
|
||||
|
||||
REST_FRAMEWORK["DEFAULT_FILTER_BACKENDS"] = tuple( # noqa: F405
|
||||
filter_backend
|
||||
for filter_backend in REST_FRAMEWORK["DEFAULT_FILTER_BACKENDS"] # noqa: F405
|
||||
if "DjangoFilterBackend" not in filter_backend
|
||||
) + ("api.filters.CustomDjangoFilterBackend",)
|
||||
|
||||
SECRETS_ENCRYPTION_KEY = "ZMiYVo7m4Fbe2eXXPyrwxdJss2WSalXSv3xHBcJkPl0="
|
||||
28
api/src/backend/config/django/production.py
Normal file
28
api/src/backend/config/django/production.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from config.django.base import * # noqa
|
||||
from config.env import env
|
||||
|
||||
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=False)
|
||||
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["localhost", "127.0.0.1"])
|
||||
|
||||
# Database
|
||||
# TODO Use Django database routers https://docs.djangoproject.com/en/5.0/topics/db/multi-db/#automatic-database-routing
|
||||
DATABASES = {
|
||||
"prowler_user": {
|
||||
"ENGINE": "django.db.backends.postgresql",
|
||||
"NAME": env("POSTGRES_DB"),
|
||||
"USER": env("POSTGRES_USER"),
|
||||
"PASSWORD": env("POSTGRES_PASSWORD"),
|
||||
"HOST": env("POSTGRES_HOST"),
|
||||
"PORT": env("POSTGRES_PORT"),
|
||||
},
|
||||
"admin": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_DB"),
|
||||
"USER": env("POSTGRES_ADMIN_USER"),
|
||||
"PASSWORD": env("POSTGRES_ADMIN_PASSWORD"),
|
||||
"HOST": env("POSTGRES_HOST"),
|
||||
"PORT": env("POSTGRES_PORT"),
|
||||
},
|
||||
}
|
||||
DATABASES["default"] = DATABASES["prowler_user"]
|
||||
26
api/src/backend/config/django/testing.py
Normal file
26
api/src/backend/config/django/testing.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from config.django.base import * # noqa
|
||||
from config.env import env
|
||||
|
||||
|
||||
DEBUG = env.bool("DJANGO_DEBUG", default=False)
|
||||
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["localhost", "127.0.0.1"])
|
||||
|
||||
|
||||
DATABASES = {
|
||||
"default": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": "prowler_db_test",
|
||||
"USER": env("POSTGRES_USER", default="prowler"),
|
||||
"PASSWORD": env("POSTGRES_PASSWORD", default="S3cret"),
|
||||
"HOST": env("POSTGRES_HOST", default="localhost"),
|
||||
"PORT": env("POSTGRES_PORT", default="5432"),
|
||||
},
|
||||
}
|
||||
|
||||
DATABASE_ROUTERS = []
|
||||
TESTING = True
|
||||
SECRETS_ENCRYPTION_KEY = "ZMiYVo7m4Fbe2eXXPyrwxdJss2WSalXSv3xHBcJkPl0="
|
||||
|
||||
# JWT
|
||||
|
||||
SIMPLE_JWT["ALGORITHM"] = "HS256" # noqa: F405
|
||||
7
api/src/backend/config/env.py
Normal file
7
api/src/backend/config/env.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from pathlib import Path
|
||||
|
||||
import environ
|
||||
|
||||
env = environ.Env()
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
43
api/src/backend/config/guniconf.py
Normal file
43
api/src/backend/config/guniconf.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import logging
|
||||
import multiprocessing
|
||||
import os
|
||||
|
||||
from config.env import env
|
||||
|
||||
# Ensure the environment variable for Django settings is set
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.django.production")
|
||||
|
||||
# Import Django and set it up before accessing settings
|
||||
import django # noqa: E402
|
||||
|
||||
django.setup()
|
||||
from config.django.production import LOGGING as DJANGO_LOGGERS, DEBUG # noqa: E402
|
||||
from config.custom_logging import BackendLogger # noqa: E402
|
||||
|
||||
BIND_ADDRESS = env("DJANGO_BIND_ADDRESS", default="127.0.0.1")
|
||||
PORT = env("DJANGO_PORT", default=8000)
|
||||
|
||||
# Server settings
|
||||
bind = f"{BIND_ADDRESS}:{PORT}"
|
||||
|
||||
workers = env.int("DJANGO_WORKERS", default=multiprocessing.cpu_count() * 2 + 1)
|
||||
reload = DEBUG
|
||||
|
||||
# Logging
|
||||
logconfig_dict = DJANGO_LOGGERS
|
||||
gunicorn_logger = logging.getLogger(BackendLogger.GUNICORN)
|
||||
|
||||
|
||||
# Hooks
|
||||
def on_starting(_):
|
||||
gunicorn_logger.info(f"Starting gunicorn server with {workers} workers")
|
||||
if reload:
|
||||
gunicorn_logger.warning("Reload settings enabled (dev mode)")
|
||||
|
||||
|
||||
def on_reload(_):
|
||||
gunicorn_logger.warning("Gunicorn server has reloaded")
|
||||
|
||||
|
||||
def when_ready(_):
|
||||
gunicorn_logger.info("Gunicorn server is ready")
|
||||
3
api/src/backend/config/settings/__init__.py
Normal file
3
api/src/backend/config/settings/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from config.celery import celery_app
|
||||
|
||||
__all__ = ("celery_app",)
|
||||
11
api/src/backend/config/settings/celery.py
Normal file
11
api/src/backend/config/settings/celery.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from config.env import env
|
||||
|
||||
VALKEY_HOST = env("VALKEY_HOST", default="valkey")
|
||||
VALKEY_PORT = env("VALKEY_PORT", default="6379")
|
||||
VALKEY_DB = env("VALKEY_DB", default="0")
|
||||
|
||||
CELERY_BROKER_URL = f"redis://{VALKEY_HOST}:{VALKEY_PORT}/{VALKEY_DB}"
|
||||
CELERY_RESULT_BACKEND = "django-db"
|
||||
CELERY_TASK_TRACK_STARTED = True
|
||||
|
||||
CELERY_BROKER_CONNECTION_RETRY_ON_STARTUP = True
|
||||
16
api/src/backend/config/settings/partitions.py
Normal file
16
api/src/backend/config/settings/partitions.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from config.env import env
|
||||
|
||||
# Partitioning
|
||||
PSQLEXTRA_PARTITIONING_MANAGER = "api.partitions.manager"
|
||||
|
||||
# Set the months for each partition. Setting the partition months to 1 will create partitions with a size of 1 natural month.
|
||||
FINDINGS_TABLE_PARTITION_MONTHS = env.int("FINDINGS_TABLE_PARTITION_MONTHS", 1)
|
||||
|
||||
# Set the number of partitions to create
|
||||
FINDINGS_TABLE_PARTITION_COUNT = env.int("FINDINGS_TABLE_PARTITION_COUNT", 7)
|
||||
|
||||
# Set the number of months to keep partitions before deleting them
|
||||
# Setting this to None will keep partitions indefinitely
|
||||
FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS = env.int(
|
||||
"FINDINGS_TABLE_PARTITION_MAX_AGE_MONTHS", None
|
||||
)
|
||||
7
api/src/backend/config/urls.py
Normal file
7
api/src/backend/config/urls.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from django.contrib import admin
|
||||
from django.urls import include, path
|
||||
|
||||
urlpatterns = [
|
||||
path("admin/", admin.site.urls),
|
||||
path("api/v1/", include("api.v1.urls")),
|
||||
]
|
||||
16
api/src/backend/config/wsgi.py
Normal file
16
api/src/backend/config/wsgi.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""
|
||||
WSGI config for backend project.
|
||||
|
||||
It exposes the WSGI callable as a module-level variable named ``application``.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/5.0/howto/deployment/wsgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.django.production")
|
||||
|
||||
application = get_wsgi_application()
|
||||
546
api/src/backend/conftest.py
Normal file
546
api/src/backend/conftest.py
Normal file
@@ -0,0 +1,546 @@
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from django.db import connections as django_connections, connection as django_connection
|
||||
from django.urls import reverse
|
||||
from django_celery_results.models import TaskResult
|
||||
from prowler.lib.check.models import Severity
|
||||
from prowler.lib.outputs.finding import Status
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from api.models import (
|
||||
Finding,
|
||||
)
|
||||
from api.models import (
|
||||
User,
|
||||
Provider,
|
||||
ProviderGroup,
|
||||
Resource,
|
||||
ResourceTag,
|
||||
Scan,
|
||||
StateChoices,
|
||||
Task,
|
||||
Membership,
|
||||
ProviderSecret,
|
||||
Invitation,
|
||||
ComplianceOverview,
|
||||
)
|
||||
from api.rls import Tenant
|
||||
from api.v1.serializers import TokenSerializer
|
||||
|
||||
API_JSON_CONTENT_TYPE = "application/vnd.api+json"
|
||||
NO_TENANT_HTTP_STATUS = status.HTTP_401_UNAUTHORIZED
|
||||
TEST_USER = "dev@prowler.com"
|
||||
TEST_PASSWORD = "testing_psswd"
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def enforce_test_user_db_connection(django_db_setup, django_db_blocker):
|
||||
"""Ensure tests use the test user for database connections."""
|
||||
test_user = "test"
|
||||
test_password = "test"
|
||||
|
||||
with django_db_blocker.unblock():
|
||||
with django_connection.cursor() as cursor:
|
||||
# Required for testing purposes using APIClient
|
||||
cursor.execute(f"GRANT ALL PRIVILEGES ON django_session TO {test_user};")
|
||||
|
||||
original_user = settings.DATABASES["default"]["USER"]
|
||||
original_password = settings.DATABASES["default"]["PASSWORD"]
|
||||
|
||||
django_connections["default"].settings_dict["USER"] = test_user
|
||||
django_connections["default"].settings_dict["PASSWORD"] = test_password
|
||||
|
||||
django_connections["default"].close()
|
||||
django_connections["default"].connect()
|
||||
|
||||
yield
|
||||
|
||||
with django_db_blocker.unblock():
|
||||
django_connections["default"].settings_dict["USER"] = original_user
|
||||
django_connections["default"].settings_dict["PASSWORD"] = original_password
|
||||
|
||||
django_connections["default"].close()
|
||||
django_connections["default"].connect()
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def disable_logging():
|
||||
logging.disable(logging.CRITICAL)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def create_test_user(django_db_setup, django_db_blocker):
|
||||
with django_db_blocker.unblock():
|
||||
user = User.objects.create_user(
|
||||
name="testing",
|
||||
email=TEST_USER,
|
||||
password=TEST_PASSWORD,
|
||||
)
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_client(create_test_user, tenants_fixture, client):
|
||||
client.user = create_test_user
|
||||
serializer = TokenSerializer(
|
||||
data={"type": "tokens", "email": TEST_USER, "password": TEST_PASSWORD}
|
||||
)
|
||||
serializer.is_valid()
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
return client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def authenticated_api_client(create_test_user, tenants_fixture):
|
||||
client = APIClient()
|
||||
serializer = TokenSerializer(
|
||||
data={"type": "tokens", "email": TEST_USER, "password": TEST_PASSWORD}
|
||||
)
|
||||
serializer.is_valid()
|
||||
access_token = serializer.validated_data["access"]
|
||||
client.defaults["HTTP_AUTHORIZATION"] = f"Bearer {access_token}"
|
||||
return client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tenants_fixture(create_test_user):
|
||||
user = create_test_user
|
||||
tenant1 = Tenant.objects.create(
|
||||
name="Tenant One",
|
||||
)
|
||||
Membership.objects.create(
|
||||
user=user,
|
||||
tenant=tenant1,
|
||||
)
|
||||
tenant2 = Tenant.objects.create(
|
||||
name="Tenant Two",
|
||||
)
|
||||
Membership.objects.create(
|
||||
user=user,
|
||||
tenant=tenant2,
|
||||
role=Membership.RoleChoices.OWNER,
|
||||
)
|
||||
tenant3 = Tenant.objects.create(
|
||||
name="Tenant Three",
|
||||
)
|
||||
return tenant1, tenant2, tenant3
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def invitations_fixture(create_test_user, tenants_fixture):
|
||||
user = create_test_user
|
||||
*_, tenant = tenants_fixture
|
||||
valid_invitation = Invitation.objects.create(
|
||||
email="testing@prowler.com",
|
||||
state=Invitation.State.PENDING,
|
||||
token="TESTING1234567",
|
||||
inviter=user,
|
||||
tenant=tenant,
|
||||
)
|
||||
expired_invitation = Invitation.objects.create(
|
||||
email="testing@prowler.com",
|
||||
state=Invitation.State.EXPIRED,
|
||||
token="TESTING1234568",
|
||||
expires_at=datetime.now(timezone.utc) - timedelta(days=1),
|
||||
inviter=user,
|
||||
tenant=tenant,
|
||||
)
|
||||
return valid_invitation, expired_invitation
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def providers_fixture(tenants_fixture):
|
||||
tenant, *_ = tenants_fixture
|
||||
provider1 = Provider.objects.create(
|
||||
provider="aws",
|
||||
uid="123456789012",
|
||||
alias="aws_testing_1",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
provider2 = Provider.objects.create(
|
||||
provider="aws",
|
||||
uid="123456789013",
|
||||
alias="aws_testing_2",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
provider3 = Provider.objects.create(
|
||||
provider="gcp",
|
||||
uid="a12322-test321",
|
||||
alias="gcp_testing",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
provider4 = Provider.objects.create(
|
||||
provider="kubernetes",
|
||||
uid="kubernetes-test-12345",
|
||||
alias="k8s_testing",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
provider5 = Provider.objects.create(
|
||||
provider="azure",
|
||||
uid="37b065f8-26b0-4218-a665-0b23d07b27d9",
|
||||
alias="azure_testing",
|
||||
tenant_id=tenant.id,
|
||||
scanner_args={"key1": "value1", "key2": {"key21": "value21"}},
|
||||
)
|
||||
|
||||
return provider1, provider2, provider3, provider4, provider5
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def provider_groups_fixture(tenants_fixture):
|
||||
tenant, *_ = tenants_fixture
|
||||
pgroup1 = ProviderGroup.objects.create(
|
||||
name="Group One",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
pgroup2 = ProviderGroup.objects.create(
|
||||
name="Group Two",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
pgroup3 = ProviderGroup.objects.create(
|
||||
name="Group Three",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
return pgroup1, pgroup2, pgroup3
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def provider_secret_fixture(providers_fixture):
|
||||
return tuple(
|
||||
ProviderSecret.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
provider=provider,
|
||||
secret_type=ProviderSecret.TypeChoices.STATIC,
|
||||
secret={"key": "value"},
|
||||
name=provider.alias,
|
||||
)
|
||||
for provider in providers_fixture
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def scans_fixture(tenants_fixture, providers_fixture):
|
||||
tenant, *_ = tenants_fixture
|
||||
provider, provider2, *_ = providers_fixture
|
||||
|
||||
scan1 = Scan.objects.create(
|
||||
name="Scan 1",
|
||||
provider=provider,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.AVAILABLE,
|
||||
tenant_id=tenant.id,
|
||||
started_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
scan2 = Scan.objects.create(
|
||||
name="Scan 2",
|
||||
provider=provider,
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.FAILED,
|
||||
tenant_id=tenant.id,
|
||||
started_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
scan3 = Scan.objects.create(
|
||||
name="Scan 3",
|
||||
provider=provider2,
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
tenant_id=tenant.id,
|
||||
started_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
return scan1, scan2, scan3
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tasks_fixture(tenants_fixture):
|
||||
tenant, *_ = tenants_fixture
|
||||
|
||||
task_runner_task1 = TaskResult.objects.create(
|
||||
task_id="81a1b34b-ff6e-498e-979c-d6a83260167f",
|
||||
task_name="task_runner_task1",
|
||||
task_kwargs='{"kwarg1": "value1"}',
|
||||
status="SUCCESS",
|
||||
)
|
||||
task_runner_task2 = TaskResult.objects.create(
|
||||
task_id="4d0260a5-2e1f-4a34-a976-8c5acb9f5499",
|
||||
task_name="task_runner_task1",
|
||||
status="PENDING",
|
||||
)
|
||||
task1 = Task.objects.create(
|
||||
id=task_runner_task1.task_id,
|
||||
task_runner_task=task_runner_task1,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
task2 = Task.objects.create(
|
||||
id=task_runner_task2.task_id,
|
||||
task_runner_task=task_runner_task2,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
return task1, task2
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def resources_fixture(providers_fixture):
|
||||
provider, *_ = providers_fixture
|
||||
|
||||
tags = [
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
key="key",
|
||||
value="value",
|
||||
),
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
key="key2",
|
||||
value="value2",
|
||||
),
|
||||
]
|
||||
|
||||
resource1 = Resource.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
provider=provider,
|
||||
uid="arn:aws:ec2:us-east-1:123456789012:instance/i-1234567890abcdef0",
|
||||
name="My Instance 1",
|
||||
region="us-east-1",
|
||||
service="ec2",
|
||||
type="prowler-test",
|
||||
)
|
||||
|
||||
resource1.upsert_or_delete_tags(tags)
|
||||
|
||||
resource2 = Resource.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
provider=provider,
|
||||
uid="arn:aws:ec2:us-east-1:123456789012:instance/i-1234567890abcdef1",
|
||||
name="My Instance 2",
|
||||
region="eu-west-1",
|
||||
service="s3",
|
||||
type="prowler-test",
|
||||
)
|
||||
resource2.upsert_or_delete_tags(tags)
|
||||
|
||||
resource3 = Resource.objects.create(
|
||||
tenant_id=providers_fixture[1].tenant_id,
|
||||
provider=providers_fixture[1],
|
||||
uid="arn:aws:ec2:us-east-1:123456789012:bucket/i-1234567890abcdef2",
|
||||
name="My Bucket 3",
|
||||
region="us-east-1",
|
||||
service="ec2",
|
||||
type="test",
|
||||
)
|
||||
|
||||
tags = [
|
||||
ResourceTag.objects.create(
|
||||
tenant_id=provider.tenant_id,
|
||||
key="key3",
|
||||
value="multi word value3",
|
||||
),
|
||||
]
|
||||
resource3.upsert_or_delete_tags(tags)
|
||||
|
||||
return resource1, resource2, resource3
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def findings_fixture(scans_fixture, resources_fixture):
|
||||
scan, *_ = scans_fixture
|
||||
resource1, resource2, *_ = resources_fixture
|
||||
|
||||
finding1 = Finding.objects.create(
|
||||
tenant_id=scan.tenant_id,
|
||||
uid="test_finding_uid_1",
|
||||
scan=scan,
|
||||
delta=None,
|
||||
status=Status.FAIL,
|
||||
status_extended="test status extended ",
|
||||
impact=Severity.critical,
|
||||
impact_extended="test impact extended one",
|
||||
severity=Severity.critical,
|
||||
raw_result={
|
||||
"status": Status.FAIL,
|
||||
"impact": Severity.critical,
|
||||
"severity": Severity.critical,
|
||||
},
|
||||
tags={"test": "dev-qa"},
|
||||
check_id="test_check_id",
|
||||
check_metadata={
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description apple sauce",
|
||||
},
|
||||
)
|
||||
|
||||
finding1.add_resources([resource1])
|
||||
|
||||
finding2 = Finding.objects.create(
|
||||
tenant_id=scan.tenant_id,
|
||||
uid="test_finding_uid_2",
|
||||
scan=scan,
|
||||
delta="new",
|
||||
status=Status.FAIL,
|
||||
status_extended="Load Balancer exposed to internet",
|
||||
impact=Severity.medium,
|
||||
impact_extended="test impact extended two",
|
||||
severity=Severity.medium,
|
||||
raw_result={
|
||||
"status": Status.FAIL,
|
||||
"impact": Severity.medium,
|
||||
"severity": Severity.medium,
|
||||
},
|
||||
tags={"test": "test"},
|
||||
check_id="test_check_id",
|
||||
check_metadata={
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description orange juice",
|
||||
},
|
||||
)
|
||||
|
||||
finding2.add_resources([resource2])
|
||||
|
||||
return finding1, finding2
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def compliance_overviews_fixture(scans_fixture, tenants_fixture):
|
||||
tenant = tenants_fixture[0]
|
||||
scan1, scan2, scan3 = scans_fixture
|
||||
|
||||
compliance_overview1 = ComplianceOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="aws_account_security_onboarding_aws",
|
||||
framework="AWS-Account-Security-Onboarding",
|
||||
version="1.0",
|
||||
description="Description for AWS Account Security Onboarding",
|
||||
region="eu-west-1",
|
||||
requirements={
|
||||
"requirement1": {
|
||||
"name": "Requirement 1",
|
||||
"checks": {"check1.1": "PASS", "check1.2": None},
|
||||
"status": "PASS",
|
||||
"attributes": [],
|
||||
"description": "Description of requirement 1",
|
||||
"checks_status": {
|
||||
"total": 2,
|
||||
"failed": 0,
|
||||
"passed": 2,
|
||||
},
|
||||
},
|
||||
"requirement2": {
|
||||
"name": "Requirement 2",
|
||||
"checks": {"check2.1": "PASS", "check2.2": "PASS"},
|
||||
"status": "PASS",
|
||||
"attributes": [],
|
||||
"description": "Description of requirement 2",
|
||||
"checks_status": {
|
||||
"total": 2,
|
||||
"failed": 0,
|
||||
"passed": 2,
|
||||
},
|
||||
},
|
||||
"requirement3": {
|
||||
"name": "Requirement 3 - manual",
|
||||
"checks": {},
|
||||
"status": "PASS",
|
||||
"attributes": [],
|
||||
"description": "Description of requirement 2",
|
||||
"checks_status": {
|
||||
"total": 0,
|
||||
"failed": 0,
|
||||
"passed": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
requirements_passed=2,
|
||||
requirements_failed=0,
|
||||
requirements_manual=1,
|
||||
total_requirements=3,
|
||||
)
|
||||
|
||||
compliance_overview2 = ComplianceOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
compliance_id="aws_account_security_onboarding_aws",
|
||||
framework="AWS-Account-Security-Onboarding",
|
||||
version="1.0",
|
||||
description="Description for AWS Account Security Onboarding",
|
||||
region="eu-west-2",
|
||||
requirements={
|
||||
"requirement1": {
|
||||
"name": "Requirement 1",
|
||||
"checks": {"check1.1": "PASS", "check1.2": None},
|
||||
"status": "PASS",
|
||||
"attributes": [],
|
||||
"description": "Description of requirement 1",
|
||||
"checks_status": {
|
||||
"total": 2,
|
||||
"failed": 0,
|
||||
"passed": 2,
|
||||
},
|
||||
},
|
||||
"requirement2": {
|
||||
"name": "Requirement 2",
|
||||
"checks": {"check2.1": "PASS", "check2.2": "FAIL"},
|
||||
"status": "FAIL",
|
||||
"attributes": [],
|
||||
"description": "Description of requirement 2",
|
||||
"checks_status": {
|
||||
"total": 2,
|
||||
"failed": 1,
|
||||
"passed": 1,
|
||||
},
|
||||
},
|
||||
"requirement3": {
|
||||
"name": "Requirement 3 - manual",
|
||||
"checks": {},
|
||||
"status": "PASS",
|
||||
"attributes": [],
|
||||
"description": "Description of requirement 2",
|
||||
"checks_status": {
|
||||
"total": 0,
|
||||
"failed": 0,
|
||||
"passed": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
requirements_passed=1,
|
||||
requirements_failed=1,
|
||||
requirements_manual=1,
|
||||
total_requirements=3,
|
||||
)
|
||||
|
||||
# Return the created compliance overviews
|
||||
return compliance_overview1, compliance_overview2
|
||||
|
||||
|
||||
def get_api_tokens(
|
||||
api_client, user_email: str, user_password: str, tenant_id: str = None
|
||||
) -> tuple[str, str]:
|
||||
json_body = {
|
||||
"data": {
|
||||
"type": "tokens",
|
||||
"attributes": {
|
||||
"email": user_email,
|
||||
"password": user_password,
|
||||
},
|
||||
}
|
||||
}
|
||||
if tenant_id is not None:
|
||||
json_body["data"]["attributes"]["tenant_id"] = tenant_id
|
||||
response = api_client.post(
|
||||
reverse("token-obtain"),
|
||||
data=json_body,
|
||||
format="vnd.api+json",
|
||||
)
|
||||
return response.json()["data"]["attributes"]["access"], response.json()["data"][
|
||||
"attributes"
|
||||
]["refresh"]
|
||||
|
||||
|
||||
def get_authorization_header(access_token: str) -> dict:
|
||||
return {"Authorization": f"Bearer {access_token}"}
|
||||
24
api/src/backend/manage.py
Executable file
24
api/src/backend/manage.py
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env python
|
||||
"""Django's command-line utility for administrative tasks."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
"""Run administrative tasks."""
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.django.production")
|
||||
try:
|
||||
from django.core.management import execute_from_command_line
|
||||
except ImportError as exc:
|
||||
raise ImportError(
|
||||
"Couldn't import Django. Are you sure it's installed and "
|
||||
"available on your PYTHONPATH environment variable? Did you "
|
||||
"forget to activate a virtual environment?"
|
||||
) from exc
|
||||
execute_from_command_line(sys.argv)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
3
api/src/backend/pytest.ini
Normal file
3
api/src/backend/pytest.ini
Normal file
@@ -0,0 +1,3 @@
|
||||
[pytest]
|
||||
DJANGO_SETTINGS_MODULE = config.django.testing
|
||||
addopts = -rP
|
||||
0
api/src/backend/tasks/__init__.py
Normal file
0
api/src/backend/tasks/__init__.py
Normal file
31
api/src/backend/tasks/beat.py
Normal file
31
api/src/backend/tasks/beat.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import json
|
||||
|
||||
from django.utils import timezone
|
||||
from django_celery_beat.models import PeriodicTask, IntervalSchedule
|
||||
|
||||
from api.models import Provider
|
||||
|
||||
|
||||
def schedule_provider_scan(provider_instance: Provider):
|
||||
schedule, _ = IntervalSchedule.objects.get_or_create(
|
||||
every=24,
|
||||
period=IntervalSchedule.HOURS,
|
||||
)
|
||||
|
||||
# Create a unique name for the periodic task
|
||||
task_name = f"scan-perform-scheduled-{provider_instance.id}"
|
||||
|
||||
# Schedule the task
|
||||
PeriodicTask.objects.create(
|
||||
interval=schedule,
|
||||
name=task_name,
|
||||
task="scan-perform-scheduled",
|
||||
kwargs=json.dumps(
|
||||
{
|
||||
"tenant_id": str(provider_instance.tenant_id),
|
||||
"provider_id": str(provider_instance.id),
|
||||
}
|
||||
),
|
||||
start_time=provider_instance.inserted_at + timezone.timedelta(hours=24),
|
||||
one_off=False,
|
||||
)
|
||||
41
api/src/backend/tasks/jobs/connection.py
Normal file
41
api/src/backend/tasks/jobs/connection.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from celery.utils.log import get_task_logger
|
||||
|
||||
from api.models import Provider
|
||||
from api.utils import prowler_provider_connection_test
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def check_provider_connection(provider_id: str):
|
||||
"""
|
||||
Business logic to check the connection status of a provider.
|
||||
|
||||
Args:
|
||||
provider_id (str): The primary key of the Provider instance to check.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing:
|
||||
- 'connected' (bool): Indicates whether the provider is successfully connected.
|
||||
- 'error' (str or None): The error message if the connection failed, otherwise `None`.
|
||||
|
||||
Raises:
|
||||
ValueError: If the provider type is not supported.
|
||||
Model.DoesNotExist: If the provider does not exist.
|
||||
"""
|
||||
provider_instance = Provider.objects.get(pk=provider_id)
|
||||
try:
|
||||
connection_result = prowler_provider_connection_test(provider_instance)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Unexpected exception checking {provider_instance.provider} provider connection: {str(e)}"
|
||||
)
|
||||
raise e
|
||||
|
||||
provider_instance.connected = connection_result.is_connected
|
||||
provider_instance.connection_last_checked_at = datetime.now(tz=timezone.utc)
|
||||
provider_instance.save()
|
||||
|
||||
connection_error = f"{connection_result.error}" if connection_result.error else None
|
||||
return {"connected": connection_result.is_connected, "error": connection_error}
|
||||
25
api/src/backend/tasks/jobs/deletion.py
Normal file
25
api/src/backend/tasks/jobs/deletion.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from celery.utils.log import get_task_logger
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def delete_instance(model, pk: str):
|
||||
"""
|
||||
Deletes an instance of the specified model.
|
||||
|
||||
This function retrieves an instance of the provided model using its primary key
|
||||
and deletes it from the database.
|
||||
|
||||
Args:
|
||||
model (Model): The Django model class from which to delete an instance.
|
||||
pk (str): The primary key of the instance to delete.
|
||||
|
||||
Returns:
|
||||
tuple: A tuple containing the number of objects deleted and a dictionary
|
||||
with the count of deleted objects per model,
|
||||
including related models if applicable.
|
||||
|
||||
Raises:
|
||||
model.DoesNotExist: If no instance with the provided primary key exists.
|
||||
"""
|
||||
return model.objects.get(pk=pk).delete()
|
||||
323
api/src/backend/tasks/jobs/scan.py
Normal file
323
api/src/backend/tasks/jobs/scan.py
Normal file
@@ -0,0 +1,323 @@
|
||||
import time
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from celery.utils.log import get_task_logger
|
||||
from prowler.lib.outputs.finding import Finding as ProwlerFinding
|
||||
from prowler.lib.scan.scan import Scan as ProwlerScan
|
||||
|
||||
from api.compliance import (
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE,
|
||||
generate_scan_compliance,
|
||||
)
|
||||
from api.db_utils import tenant_transaction
|
||||
from api.models import (
|
||||
Provider,
|
||||
Scan,
|
||||
Finding,
|
||||
Resource,
|
||||
ResourceTag,
|
||||
StatusChoices as FindingStatus,
|
||||
StateChoices,
|
||||
ComplianceOverview,
|
||||
)
|
||||
from api.utils import initialize_prowler_provider
|
||||
from api.v1.serializers import ScanTaskSerializer
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def _create_finding_delta(
|
||||
last_status: FindingStatus | None | str, new_status: FindingStatus | None
|
||||
) -> Finding.DeltaChoices:
|
||||
"""
|
||||
Determine the delta status of a finding based on its previous and current status.
|
||||
|
||||
Args:
|
||||
last_status (FindingStatus | None | str): The previous status of the finding. Can be None or a string representation.
|
||||
new_status (FindingStatus | None): The current status of the finding.
|
||||
|
||||
Returns:
|
||||
Finding.DeltaChoices: The delta status indicating if the finding is new, changed, or unchanged.
|
||||
- Returns `Finding.DeltaChoices.NEW` if `last_status` is None.
|
||||
- Returns `Finding.DeltaChoices.CHANGED` if `last_status` and `new_status` are different.
|
||||
- Returns `None` if the status hasn't changed.
|
||||
"""
|
||||
if last_status is None:
|
||||
return Finding.DeltaChoices.NEW
|
||||
return Finding.DeltaChoices.CHANGED if last_status != new_status else None
|
||||
|
||||
|
||||
def _store_resources(
|
||||
finding: ProwlerFinding, tenant_id: str, provider_instance: Provider
|
||||
) -> tuple[Resource, tuple[str, str]]:
|
||||
"""
|
||||
Store resource information from a finding, including tags, in the database.
|
||||
|
||||
Args:
|
||||
finding (ProwlerFinding): The finding object containing resource information.
|
||||
tenant_id (str): The ID of the tenant owning the resource.
|
||||
provider_instance (Provider): The provider instance associated with the resource.
|
||||
|
||||
Returns:
|
||||
tuple:
|
||||
- Resource: The resource instance created or retrieved from the database.
|
||||
- tuple[str, str]: A tuple containing the resource UID and region.
|
||||
|
||||
"""
|
||||
with tenant_transaction(tenant_id):
|
||||
resource_instance, created = Resource.objects.get_or_create(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider_instance,
|
||||
uid=finding.resource_uid,
|
||||
defaults={
|
||||
"region": finding.region,
|
||||
"service": finding.service_name,
|
||||
"type": finding.resource_type,
|
||||
},
|
||||
)
|
||||
|
||||
if not created:
|
||||
resource_instance.region = finding.region
|
||||
resource_instance.service = finding.service_name
|
||||
resource_instance.type = finding.resource_type
|
||||
resource_instance.save()
|
||||
with tenant_transaction(tenant_id):
|
||||
tags = [
|
||||
ResourceTag.objects.get_or_create(
|
||||
tenant_id=tenant_id, key=key, value=value
|
||||
)[0]
|
||||
for key, value in finding.resource_tags.items()
|
||||
]
|
||||
resource_instance.upsert_or_delete_tags(tags=tags)
|
||||
return resource_instance, (resource_instance.uid, resource_instance.region)
|
||||
|
||||
|
||||
def perform_prowler_scan(
|
||||
tenant_id: str, scan_id: str, provider_id: str, checks_to_execute: list[str] = None
|
||||
):
|
||||
"""
|
||||
Perform a scan using Prowler and store the findings and resources in the database.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The ID of the tenant for which the scan is performed.
|
||||
scan_id (str): The ID of the scan instance.
|
||||
provider_id (str): The ID of the provider to scan.
|
||||
checks_to_execute (list[str], optional): A list of specific checks to execute. Defaults to None.
|
||||
|
||||
Returns:
|
||||
dict: Serialized data of the completed scan instance.
|
||||
|
||||
Raises:
|
||||
ValueError: If the provider cannot be connected.
|
||||
|
||||
"""
|
||||
generate_compliance = False
|
||||
check_status_by_region = {}
|
||||
exception = None
|
||||
unique_resources = set()
|
||||
start_time = time.time()
|
||||
|
||||
with tenant_transaction(tenant_id):
|
||||
provider_instance = Provider.objects.get(pk=provider_id)
|
||||
scan_instance = Scan.objects.get(pk=scan_id)
|
||||
scan_instance.state = StateChoices.EXECUTING
|
||||
scan_instance.started_at = datetime.now(tz=timezone.utc)
|
||||
scan_instance.save()
|
||||
|
||||
try:
|
||||
with tenant_transaction(tenant_id):
|
||||
try:
|
||||
prowler_provider = initialize_prowler_provider(provider_instance)
|
||||
provider_instance.connected = True
|
||||
except Exception as e:
|
||||
provider_instance.connected = False
|
||||
raise ValueError(
|
||||
f"Provider {provider_instance.provider} is not connected: {e}"
|
||||
)
|
||||
finally:
|
||||
provider_instance.connection_last_checked_at = datetime.now(
|
||||
tz=timezone.utc
|
||||
)
|
||||
provider_instance.save()
|
||||
|
||||
generate_compliance = provider_instance.provider != Provider.ProviderChoices.GCP
|
||||
prowler_scan = ProwlerScan(provider=prowler_provider, checks=checks_to_execute)
|
||||
|
||||
resource_cache = {}
|
||||
tag_cache = {}
|
||||
last_status_cache = {}
|
||||
|
||||
for progress, findings in prowler_scan.scan():
|
||||
with tenant_transaction(tenant_id):
|
||||
for finding in findings:
|
||||
# Process resource
|
||||
resource_uid = finding.resource_uid
|
||||
if resource_uid not in resource_cache:
|
||||
# Get or create the resource
|
||||
resource_instance, _ = Resource.objects.get_or_create(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider_instance,
|
||||
uid=resource_uid,
|
||||
defaults={
|
||||
"region": finding.region,
|
||||
"service": finding.service_name,
|
||||
"type": finding.resource_type,
|
||||
"name": finding.resource_name,
|
||||
},
|
||||
)
|
||||
resource_cache[resource_uid] = resource_instance
|
||||
else:
|
||||
resource_instance = resource_cache[resource_uid]
|
||||
|
||||
# Update resource fields if necessary
|
||||
updated_fields = []
|
||||
if resource_instance.region != finding.region:
|
||||
resource_instance.region = finding.region
|
||||
updated_fields.append("region")
|
||||
if resource_instance.service != finding.service_name:
|
||||
resource_instance.service = finding.service_name
|
||||
updated_fields.append("service")
|
||||
if resource_instance.type != finding.resource_type:
|
||||
resource_instance.type = finding.resource_type
|
||||
updated_fields.append("type")
|
||||
if updated_fields:
|
||||
resource_instance.save(update_fields=updated_fields)
|
||||
|
||||
# Update tags
|
||||
tags = []
|
||||
for key, value in finding.resource_tags.items():
|
||||
tag_key = (key, value)
|
||||
if tag_key not in tag_cache:
|
||||
tag_instance, _ = ResourceTag.objects.get_or_create(
|
||||
tenant_id=tenant_id, key=key, value=value
|
||||
)
|
||||
tag_cache[tag_key] = tag_instance
|
||||
else:
|
||||
tag_instance = tag_cache[tag_key]
|
||||
tags.append(tag_instance)
|
||||
resource_instance.upsert_or_delete_tags(tags=tags)
|
||||
|
||||
unique_resources.add(
|
||||
(resource_instance.uid, resource_instance.region)
|
||||
)
|
||||
|
||||
# Process finding
|
||||
finding_uid = finding.uid
|
||||
if finding_uid not in last_status_cache:
|
||||
most_recent_finding = (
|
||||
Finding.objects.filter(uid=finding_uid)
|
||||
.order_by("-id")
|
||||
.values("status")
|
||||
.first()
|
||||
)
|
||||
last_status = (
|
||||
most_recent_finding["status"]
|
||||
if most_recent_finding
|
||||
else None
|
||||
)
|
||||
last_status_cache[finding_uid] = last_status
|
||||
else:
|
||||
last_status = last_status_cache[finding_uid]
|
||||
|
||||
status = FindingStatus[finding.status]
|
||||
delta = _create_finding_delta(last_status, status)
|
||||
|
||||
# Create the finding
|
||||
finding_instance = Finding.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
uid=finding_uid,
|
||||
delta=delta,
|
||||
check_metadata=finding.get_metadata(),
|
||||
status=status,
|
||||
status_extended=finding.status_extended,
|
||||
severity=finding.severity,
|
||||
impact=finding.severity,
|
||||
raw_result=finding.raw,
|
||||
check_id=finding.check_id,
|
||||
scan=scan_instance,
|
||||
)
|
||||
finding_instance.add_resources([resource_instance])
|
||||
|
||||
# Update compliance data if applicable
|
||||
if not generate_compliance or finding.status.value == "MUTED":
|
||||
continue
|
||||
|
||||
region_dict = check_status_by_region.setdefault(finding.region, {})
|
||||
current_status = region_dict.get(finding.check_id)
|
||||
if current_status == "FAIL":
|
||||
continue
|
||||
region_dict[finding.check_id] = finding.status.value
|
||||
|
||||
# Update scan progress
|
||||
with tenant_transaction(tenant_id):
|
||||
scan_instance.progress = progress
|
||||
scan_instance.save()
|
||||
|
||||
scan_instance.state = StateChoices.COMPLETED
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error performing scan {scan_id}: {e}")
|
||||
exception = e
|
||||
scan_instance.state = StateChoices.FAILED
|
||||
|
||||
finally:
|
||||
with tenant_transaction(tenant_id):
|
||||
scan_instance.duration = time.time() - start_time
|
||||
scan_instance.completed_at = datetime.now(tz=timezone.utc)
|
||||
scan_instance.unique_resource_count = len(unique_resources)
|
||||
scan_instance.save()
|
||||
|
||||
if generate_compliance:
|
||||
try:
|
||||
regions = prowler_provider.get_regions()
|
||||
except AttributeError:
|
||||
regions = set()
|
||||
|
||||
compliance_template = PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE[
|
||||
provider_instance.provider
|
||||
]
|
||||
compliance_overview_by_region = {
|
||||
region: deepcopy(compliance_template) for region in regions
|
||||
}
|
||||
|
||||
for region, check_status in check_status_by_region.items():
|
||||
compliance_data = compliance_overview_by_region.setdefault(
|
||||
region, deepcopy(compliance_template)
|
||||
)
|
||||
for check_name, status in check_status.items():
|
||||
generate_scan_compliance(
|
||||
compliance_data,
|
||||
provider_instance.provider,
|
||||
check_name,
|
||||
status,
|
||||
)
|
||||
|
||||
# Prepare compliance overview objects
|
||||
compliance_overview_objects = []
|
||||
for region, compliance_data in compliance_overview_by_region.items():
|
||||
for compliance_id, compliance in compliance_data.items():
|
||||
compliance_overview_objects.append(
|
||||
ComplianceOverview(
|
||||
tenant_id=tenant_id,
|
||||
scan=scan_instance,
|
||||
region=region,
|
||||
compliance_id=compliance_id,
|
||||
framework=compliance["framework"],
|
||||
version=compliance["version"],
|
||||
description=compliance["description"],
|
||||
requirements=compliance["requirements"],
|
||||
requirements_passed=compliance["requirements_status"]["passed"],
|
||||
requirements_failed=compliance["requirements_status"]["failed"],
|
||||
requirements_manual=compliance["requirements_status"]["manual"],
|
||||
total_requirements=compliance["total_requirements"],
|
||||
)
|
||||
)
|
||||
with tenant_transaction(tenant_id):
|
||||
ComplianceOverview.objects.bulk_create(compliance_overview_objects)
|
||||
|
||||
if exception is not None:
|
||||
raise exception
|
||||
|
||||
serializer = ScanTaskSerializer(instance=scan_instance)
|
||||
return serializer.data
|
||||
112
api/src/backend/tasks/tasks.py
Normal file
112
api/src/backend/tasks/tasks.py
Normal file
@@ -0,0 +1,112 @@
|
||||
from celery import shared_task
|
||||
|
||||
from api.db_utils import tenant_transaction
|
||||
from api.decorators import set_tenant
|
||||
from api.models import Provider, Scan
|
||||
from config.celery import RLSTask
|
||||
from tasks.jobs.connection import check_provider_connection
|
||||
from tasks.jobs.deletion import delete_instance
|
||||
from tasks.jobs.scan import perform_prowler_scan
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="provider-connection-check")
|
||||
@set_tenant
|
||||
def check_provider_connection_task(provider_id: str):
|
||||
"""
|
||||
Task to check the connection status of a provider.
|
||||
|
||||
Args:
|
||||
provider_id (str): The primary key of the Provider instance to check.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing:
|
||||
- 'connected' (bool): Indicates whether the provider is successfully connected.
|
||||
- 'error' (str or None): The error message if the connection failed, otherwise `None`.
|
||||
"""
|
||||
return check_provider_connection(provider_id=provider_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="provider-deletion")
|
||||
@set_tenant
|
||||
def delete_provider_task(provider_id: str):
|
||||
"""
|
||||
Task to delete a specific Provider instance.
|
||||
|
||||
Args:
|
||||
provider_id (str): The primary key of the `Provider` instance to be deleted.
|
||||
|
||||
Returns:
|
||||
tuple: A tuple containing:
|
||||
- The number of instances deleted.
|
||||
- A dictionary with the count of deleted instances per model,
|
||||
including related models if cascading deletes were triggered.
|
||||
"""
|
||||
return delete_instance(model=Provider, pk=provider_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="scan-perform", queue="scans")
|
||||
def perform_scan_task(
|
||||
tenant_id: str, scan_id: str, provider_id: str, checks_to_execute: list[str] = None
|
||||
):
|
||||
"""
|
||||
Task to perform a Prowler scan on a given provider.
|
||||
|
||||
This task runs a Prowler scan on the provider identified by `provider_id`
|
||||
under the tenant identified by `tenant_id`. The scan will use the `scan_id`
|
||||
for tracking purposes.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The tenant ID under which the scan is being performed.
|
||||
scan_id (str): The ID of the scan to be performed.
|
||||
provider_id (str): The primary key of the Provider instance to scan.
|
||||
checks_to_execute (list[str], optional): A list of specific checks to perform during the scan. Defaults to None.
|
||||
|
||||
Returns:
|
||||
dict: The result of the scan execution, typically including the status and results of the performed checks.
|
||||
"""
|
||||
return perform_prowler_scan(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
provider_id=provider_id,
|
||||
checks_to_execute=checks_to_execute,
|
||||
)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, bind=True, name="scan-perform-scheduled", queue="scans")
|
||||
def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
"""
|
||||
Task to perform a scheduled Prowler scan on a given provider.
|
||||
|
||||
This task creates and executes a Prowler scan for the provider identified by `provider_id`
|
||||
under the tenant identified by `tenant_id`. It initiates a new scan instance with the task ID
|
||||
for tracking purposes. This task is intended to be run on a schedule (e.g., daily) to
|
||||
automatically perform scans without manual intervention.
|
||||
|
||||
Args:
|
||||
self: The task instance (automatically passed when bind=True).
|
||||
tenant_id (str): The tenant ID under which the scan is being performed.
|
||||
provider_id (str): The primary key of the Provider instance to scan.
|
||||
|
||||
Returns:
|
||||
dict: The result of the scan execution, typically including the status and results
|
||||
of the performed checks.
|
||||
|
||||
"""
|
||||
task_id = self.request.id
|
||||
|
||||
with tenant_transaction(tenant_id):
|
||||
provider_instance = Provider.objects.get(pk=provider_id)
|
||||
|
||||
scan_instance = Scan.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
name="Daily scheduled scan",
|
||||
provider=provider_instance,
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
task_id=task_id,
|
||||
)
|
||||
|
||||
return perform_prowler_scan(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=str(scan_instance.id),
|
||||
provider_id=provider_id,
|
||||
)
|
||||
0
api/src/backend/tasks/tests/__init__.py
Normal file
0
api/src/backend/tasks/tests/__init__.py
Normal file
72
api/src/backend/tasks/tests/test_connection.py
Normal file
72
api/src/backend/tasks/tests/test_connection.py
Normal file
@@ -0,0 +1,72 @@
|
||||
from datetime import datetime, timezone
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from api.models import Provider
|
||||
from tasks.jobs.connection import check_provider_connection
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"provider_data",
|
||||
[
|
||||
{"provider": "aws", "uid": "123456789012", "alias": "aws"},
|
||||
],
|
||||
)
|
||||
@patch("tasks.jobs.connection.prowler_provider_connection_test")
|
||||
@pytest.mark.django_db
|
||||
def test_check_provider_connection(
|
||||
mock_provider_connection_test, tenants_fixture, provider_data
|
||||
):
|
||||
provider = Provider.objects.create(**provider_data, tenant_id=tenants_fixture[0].id)
|
||||
|
||||
mock_test_connection_result = MagicMock()
|
||||
mock_test_connection_result.is_connected = True
|
||||
|
||||
mock_provider_connection_test.return_value = mock_test_connection_result
|
||||
|
||||
check_provider_connection(
|
||||
provider_id=str(provider.id),
|
||||
)
|
||||
provider.refresh_from_db()
|
||||
|
||||
mock_provider_connection_test.assert_called_once()
|
||||
assert provider.connected is True
|
||||
assert provider.connection_last_checked_at is not None
|
||||
assert provider.connection_last_checked_at <= datetime.now(tz=timezone.utc)
|
||||
|
||||
|
||||
@patch("tasks.jobs.connection.Provider.objects.get")
|
||||
@pytest.mark.django_db
|
||||
def test_check_provider_connection_unsupported_provider(mock_provider_get):
|
||||
mock_provider_instance = MagicMock()
|
||||
mock_provider_instance.provider = "UNSUPPORTED_PROVIDER"
|
||||
mock_provider_get.return_value = mock_provider_instance
|
||||
|
||||
with pytest.raises(
|
||||
ValueError, match="Provider type UNSUPPORTED_PROVIDER not supported"
|
||||
):
|
||||
check_provider_connection("provider_id")
|
||||
|
||||
|
||||
@patch("tasks.jobs.connection.Provider.objects.get")
|
||||
@patch("tasks.jobs.connection.prowler_provider_connection_test")
|
||||
@pytest.mark.django_db
|
||||
def test_check_provider_connection_exception(
|
||||
mock_provider_connection_test, mock_provider_get
|
||||
):
|
||||
mock_provider_instance = MagicMock()
|
||||
mock_provider_instance.provider = Provider.ProviderChoices.AWS.value
|
||||
mock_provider_get.return_value = mock_provider_instance
|
||||
|
||||
mock_provider_connection_test.return_value = MagicMock()
|
||||
mock_provider_connection_test.return_value.is_connected = False
|
||||
mock_provider_connection_test.return_value.error = Exception()
|
||||
|
||||
result = check_provider_connection(provider_id="provider_id")
|
||||
|
||||
assert result["connected"] is False
|
||||
assert result["error"] is not None
|
||||
|
||||
mock_provider_instance.save.assert_called_once()
|
||||
assert mock_provider_instance.connected is False
|
||||
22
api/src/backend/tasks/tests/test_deletion.py
Normal file
22
api/src/backend/tasks/tests/test_deletion.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import pytest
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
|
||||
from api.models import Provider
|
||||
from tasks.jobs.deletion import delete_instance
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestDeleteInstance:
|
||||
def test_delete_instance_success(self, providers_fixture):
|
||||
instance = providers_fixture[0]
|
||||
result = delete_instance(Provider, instance.id)
|
||||
|
||||
assert result
|
||||
with pytest.raises(ObjectDoesNotExist):
|
||||
Provider.objects.get(pk=instance.id)
|
||||
|
||||
def test_delete_instance_does_not_exist(self):
|
||||
non_existent_pk = "babf6796-cfcc-4fd3-9dcf-88d012247645"
|
||||
|
||||
with pytest.raises(ObjectDoesNotExist):
|
||||
delete_instance(Provider, non_existent_pk)
|
||||
360
api/src/backend/tasks/tests/test_scan.py
Normal file
360
api/src/backend/tasks/tests/test_scan.py
Normal file
@@ -0,0 +1,360 @@
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from api.models import (
|
||||
StateChoices,
|
||||
Severity,
|
||||
Finding,
|
||||
Resource,
|
||||
StatusChoices,
|
||||
Provider,
|
||||
)
|
||||
from tasks.jobs.scan import (
|
||||
perform_prowler_scan,
|
||||
_create_finding_delta,
|
||||
_store_resources,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestPerformScan:
|
||||
def test_perform_prowler_scan_success(
|
||||
self,
|
||||
tenants_fixture,
|
||||
scans_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
with (
|
||||
patch("api.db_utils.tenant_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
patch("tasks.jobs.scan.ProwlerScan") as mock_prowler_scan_class,
|
||||
patch(
|
||||
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE",
|
||||
new_callable=dict,
|
||||
) as mock_prowler_compliance_overview_template,
|
||||
patch(
|
||||
"api.compliance.PROWLER_CHECKS", new_callable=dict
|
||||
) as mock_prowler_checks,
|
||||
):
|
||||
# Set up the mock PROWLER_CHECKS
|
||||
mock_prowler_checks["aws"] = {
|
||||
"check1": {"compliance1"},
|
||||
"check2": {"compliance1", "compliance2"},
|
||||
}
|
||||
|
||||
# Set up the mock PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE
|
||||
mock_prowler_compliance_overview_template["aws"] = {
|
||||
"compliance1": {
|
||||
"framework": "Framework 1",
|
||||
"version": "1.0",
|
||||
"provider": "aws",
|
||||
"description": "Description of compliance1",
|
||||
"requirements": {
|
||||
"requirement1": {
|
||||
"name": "Requirement 1",
|
||||
"description": "Description of requirement 1",
|
||||
"attributes": [],
|
||||
"checks": {"check1": None, "check2": None},
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
"fail": 0,
|
||||
"total": 2,
|
||||
},
|
||||
"status": "PASS",
|
||||
}
|
||||
},
|
||||
"requirements_status": {
|
||||
"passed": 1,
|
||||
"failed": 0,
|
||||
"manual": 0,
|
||||
},
|
||||
"total_requirements": 1,
|
||||
}
|
||||
}
|
||||
|
||||
# Ensure the database is empty
|
||||
assert Finding.objects.count() == 0
|
||||
assert Resource.objects.count() == 0
|
||||
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
|
||||
# Ensure the provider type is 'aws' to match our mocks
|
||||
provider.provider = Provider.ProviderChoices.AWS
|
||||
provider.save()
|
||||
|
||||
tenant_id = str(tenant.id)
|
||||
scan_id = str(scan.id)
|
||||
provider_id = str(provider.id)
|
||||
checks_to_execute = ["check1", "check2"]
|
||||
|
||||
# Mock the findings returned by the prowler scan
|
||||
finding = MagicMock()
|
||||
finding.uid = "this_is_a_test_finding_id"
|
||||
finding.status = StatusChoices.PASS
|
||||
finding.status_extended = "test status extended"
|
||||
finding.severity = Severity.medium
|
||||
finding.check_id = "check1"
|
||||
finding.get_metadata.return_value = {"key": "value"}
|
||||
finding.resource_uid = "resource_uid"
|
||||
finding.resource_name = "resource_name"
|
||||
finding.region = "region"
|
||||
finding.service_name = "service_name"
|
||||
finding.resource_type = "resource_type"
|
||||
finding.resource_tags = {"tag1": "value1", "tag2": "value2"}
|
||||
finding.raw = {}
|
||||
|
||||
# Mock the ProwlerScan instance
|
||||
mock_prowler_scan_instance = MagicMock()
|
||||
mock_prowler_scan_instance.scan.return_value = [(100, [finding])]
|
||||
mock_prowler_scan_class.return_value = mock_prowler_scan_instance
|
||||
|
||||
# Mock prowler_provider.get_regions()
|
||||
mock_prowler_provider_instance = MagicMock()
|
||||
mock_prowler_provider_instance.get_regions.return_value = ["region"]
|
||||
mock_initialize_prowler_provider.return_value = (
|
||||
mock_prowler_provider_instance
|
||||
)
|
||||
|
||||
# Call the function under test
|
||||
perform_prowler_scan(tenant_id, scan_id, provider_id, checks_to_execute)
|
||||
|
||||
# Refresh instances from the database
|
||||
scan.refresh_from_db()
|
||||
scan_finding = Finding.objects.get(scan=scan)
|
||||
scan_resource = Resource.objects.get(provider=provider)
|
||||
|
||||
# Assertions
|
||||
assert scan.tenant == tenant
|
||||
assert scan.provider == provider
|
||||
assert scan.state == StateChoices.COMPLETED
|
||||
assert scan.completed_at is not None
|
||||
assert scan.duration is not None
|
||||
assert scan.started_at is not None
|
||||
assert scan.unique_resource_count == 1
|
||||
assert scan.progress == 100
|
||||
|
||||
assert scan_finding.uid == finding.uid
|
||||
assert scan_finding.status == finding.status
|
||||
assert scan_finding.status_extended == finding.status_extended
|
||||
assert scan_finding.severity == finding.severity
|
||||
assert scan_finding.check_id == finding.check_id
|
||||
assert scan_finding.raw_result == finding.raw
|
||||
|
||||
assert scan_resource.tenant == tenant
|
||||
assert scan_resource.uid == finding.resource_uid
|
||||
assert scan_resource.region == finding.region
|
||||
assert scan_resource.service == finding.service_name
|
||||
assert scan_resource.type == finding.resource_type
|
||||
assert scan_resource.name == finding.resource_name
|
||||
|
||||
# Assert that the resource tags have been created and associated
|
||||
tags = scan_resource.tags.all()
|
||||
assert tags.count() == 2
|
||||
tag_keys = {tag.key for tag in tags}
|
||||
tag_values = {tag.value for tag in tags}
|
||||
assert tag_keys == set(finding.resource_tags.keys())
|
||||
assert tag_values == set(finding.resource_tags.values())
|
||||
|
||||
@patch("tasks.jobs.scan.ProwlerScan")
|
||||
@patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider",
|
||||
side_effect=Exception("Connection error"),
|
||||
)
|
||||
@patch("api.db_utils.tenant_transaction")
|
||||
def test_perform_prowler_scan_no_connection(
|
||||
self,
|
||||
mock_tenant_transaction,
|
||||
mock_initialize_prowler_provider,
|
||||
mock_prowler_scan_class,
|
||||
tenants_fixture,
|
||||
scans_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
|
||||
tenant_id = str(tenant.id)
|
||||
scan_id = str(scan.id)
|
||||
provider_id = str(provider.id)
|
||||
checks_to_execute = ["check1", "check2"]
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
perform_prowler_scan(tenant_id, scan_id, provider_id, checks_to_execute)
|
||||
|
||||
scan.refresh_from_db()
|
||||
assert scan.state == StateChoices.FAILED
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"last_status, new_status, expected_delta",
|
||||
[
|
||||
(None, None, Finding.DeltaChoices.NEW),
|
||||
(None, StatusChoices.PASS, Finding.DeltaChoices.NEW),
|
||||
(StatusChoices.PASS, StatusChoices.PASS, None),
|
||||
(StatusChoices.PASS, StatusChoices.FAIL, Finding.DeltaChoices.CHANGED),
|
||||
(StatusChoices.FAIL, StatusChoices.PASS, Finding.DeltaChoices.CHANGED),
|
||||
],
|
||||
)
|
||||
def test_create_finding_delta(self, last_status, new_status, expected_delta):
|
||||
assert _create_finding_delta(last_status, new_status) == expected_delta
|
||||
|
||||
@patch("api.models.ResourceTag.objects.get_or_create")
|
||||
@patch("api.models.Resource.objects.get_or_create")
|
||||
@patch("api.db_utils.tenant_transaction")
|
||||
def test_store_resources_new_resource(
|
||||
self,
|
||||
mock_tenant_transaction,
|
||||
mock_get_or_create_resource,
|
||||
mock_get_or_create_tag,
|
||||
):
|
||||
tenant_id = "tenant123"
|
||||
provider_instance = MagicMock()
|
||||
provider_instance.id = "provider456"
|
||||
|
||||
finding = MagicMock()
|
||||
finding.resource_uid = "resource_uid_123"
|
||||
finding.resource_name = "resource_name"
|
||||
finding.region = "us-west-1"
|
||||
finding.service_name = "service_name"
|
||||
finding.resource_type = "resource_type"
|
||||
finding.resource_tags = {"tag1": "value1", "tag2": "value2"}
|
||||
|
||||
resource_instance = MagicMock()
|
||||
resource_instance.uid = finding.resource_uid
|
||||
resource_instance.region = finding.region
|
||||
|
||||
mock_get_or_create_resource.return_value = (resource_instance, True)
|
||||
tag_instance = MagicMock()
|
||||
mock_get_or_create_tag.return_value = (tag_instance, True)
|
||||
|
||||
resource, resource_uid_tuple = _store_resources(
|
||||
finding, tenant_id, provider_instance
|
||||
)
|
||||
|
||||
mock_get_or_create_resource.assert_called_once_with(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider_instance,
|
||||
uid=finding.resource_uid,
|
||||
defaults={
|
||||
"region": finding.region,
|
||||
"service": finding.service_name,
|
||||
"type": finding.resource_type,
|
||||
},
|
||||
)
|
||||
|
||||
assert resource == resource_instance
|
||||
assert resource_uid_tuple == (resource_instance.uid, resource_instance.region)
|
||||
resource_instance.upsert_or_delete_tags.assert_called_once()
|
||||
|
||||
@patch("api.models.ResourceTag.objects.get_or_create")
|
||||
@patch("api.models.Resource.objects.get_or_create")
|
||||
@patch("api.db_utils.tenant_transaction")
|
||||
def test_store_resources_existing_resource(
|
||||
self,
|
||||
mock_tenant_transaction,
|
||||
mock_get_or_create_resource,
|
||||
mock_get_or_create_tag,
|
||||
):
|
||||
tenant_id = "tenant123"
|
||||
provider_instance = MagicMock()
|
||||
provider_instance.id = "provider456"
|
||||
|
||||
finding = MagicMock()
|
||||
finding.resource_uid = "resource_uid_123"
|
||||
finding.resource_name = "resource_name"
|
||||
finding.region = "us-west-2"
|
||||
finding.service_name = "new_service"
|
||||
finding.resource_type = "new_type"
|
||||
finding.resource_tags = {"tag1": "value1", "tag2": "value2"}
|
||||
|
||||
resource_instance = MagicMock()
|
||||
resource_instance.uid = finding.resource_uid
|
||||
resource_instance.region = "us-west-1"
|
||||
resource_instance.service = "old_service"
|
||||
resource_instance.type = "old_type"
|
||||
|
||||
mock_get_or_create_resource.return_value = (resource_instance, False)
|
||||
|
||||
tag_instance = MagicMock()
|
||||
mock_get_or_create_tag.return_value = (tag_instance, True)
|
||||
|
||||
resource, resource_uid_tuple = _store_resources(
|
||||
finding, tenant_id, provider_instance
|
||||
)
|
||||
|
||||
mock_get_or_create_resource.assert_called_once_with(
|
||||
tenant_id=tenant_id,
|
||||
provider=provider_instance,
|
||||
uid=finding.resource_uid,
|
||||
defaults={
|
||||
"region": finding.region,
|
||||
"service": finding.service_name,
|
||||
"type": finding.resource_type,
|
||||
},
|
||||
)
|
||||
|
||||
# Check that resource fields were updated
|
||||
assert resource_instance.region == finding.region
|
||||
assert resource_instance.service == finding.service_name
|
||||
assert resource_instance.type == finding.resource_type
|
||||
resource_instance.save.assert_called_once()
|
||||
|
||||
assert resource == resource_instance
|
||||
assert resource_uid_tuple == (resource_instance.uid, resource_instance.region)
|
||||
resource_instance.upsert_or_delete_tags.assert_called_once()
|
||||
|
||||
@patch("api.models.ResourceTag.objects.get_or_create")
|
||||
@patch("api.models.Resource.objects.get_or_create")
|
||||
@patch("api.db_utils.tenant_transaction")
|
||||
def test_store_resources_with_tags(
|
||||
self,
|
||||
mock_tenant_transaction,
|
||||
mock_get_or_create_resource,
|
||||
mock_get_or_create_tag,
|
||||
):
|
||||
tenant_id = "tenant123"
|
||||
provider_instance = MagicMock()
|
||||
provider_instance.id = "provider456"
|
||||
|
||||
finding = MagicMock()
|
||||
finding.resource_uid = "resource_uid_123"
|
||||
finding.resource_name = "resource_name"
|
||||
finding.region = "us-west-1"
|
||||
finding.service_name = "service_name"
|
||||
finding.resource_type = "resource_type"
|
||||
finding.resource_tags = {"tag1": "value1", "tag2": "value2"}
|
||||
|
||||
resource_instance = MagicMock()
|
||||
resource_instance.uid = finding.resource_uid
|
||||
resource_instance.region = finding.region
|
||||
|
||||
mock_get_or_create_resource.return_value = (resource_instance, True)
|
||||
tag_instance_1 = MagicMock()
|
||||
tag_instance_2 = MagicMock()
|
||||
mock_get_or_create_tag.side_effect = [
|
||||
(tag_instance_1, True),
|
||||
(tag_instance_2, True),
|
||||
]
|
||||
|
||||
resource, resource_uid_tuple = _store_resources(
|
||||
finding, tenant_id, provider_instance
|
||||
)
|
||||
|
||||
mock_get_or_create_tag.assert_any_call(
|
||||
tenant_id=tenant_id, key="tag1", value="value1"
|
||||
)
|
||||
mock_get_or_create_tag.assert_any_call(
|
||||
tenant_id=tenant_id, key="tag2", value="value2"
|
||||
)
|
||||
resource_instance.upsert_or_delete_tags.assert_called_once()
|
||||
tags_passed = resource_instance.upsert_or_delete_tags.call_args[1]["tags"]
|
||||
assert tag_instance_1 in tags_passed
|
||||
assert tag_instance_2 in tags_passed
|
||||
|
||||
assert resource == resource_instance
|
||||
assert resource_uid_tuple == (resource_instance.uid, resource_instance.region)
|
||||
0
api/tests/README.md
Normal file
0
api/tests/README.md
Normal file
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user