Compare commits

..

19 Commits

Author SHA1 Message Date
Pablo Lara
2d190eb020 chore: add manage group actions 2024-12-18 17:38:09 +01:00
Pablo Lara
0459a4d6f6 Merge branch 'PRWLR-5824-Update-resource-name-and-enable-relationships-on-role-and-provider_group-create-update' into PRWLR-4669-Roles-Page-UI-with-API-changes 2024-12-18 12:10:45 +01:00
Pablo Lara
19df649554 chore: add manage group actions 2024-12-18 11:15:37 +01:00
Adrián Jesús Peña Rodríguez
737550eb05 ref(rbac): update spec 2024-12-18 11:08:19 +01:00
Adrián Jesús Peña Rodríguez
68d7d9f998 ref(rbac): enable relationship creation when objects is created 2024-12-18 11:05:29 +01:00
Pablo Lara
3c9a8b3634 chore: add manage group component 2024-12-17 11:13:37 +01:00
Pablo Lara
81f970f2d3 chore: refactor updateInvite action form 2024-12-16 13:34:16 +01:00
Pablo Lara
3d9cd177a2 chore: add role column to the table users 2024-12-16 12:59:40 +01:00
Pablo Lara
c49fdc114a chore: report an error related to RBAC API side 2024-12-15 11:43:46 +01:00
Pablo Lara
95fd9d6b5e WIP: add change role to the user's invitations 2024-12-15 10:52:41 +01:00
Pablo Lara
6a5bc75252 chore: add change role to the user's invitations 2024-12-15 10:52:33 +01:00
Pablo Lara
858c04b0b0 chore: fix error with exports 2024-12-15 10:52:09 +01:00
Pablo Lara
2d6f20e84b feat: add role when invite an user 2024-12-15 10:51:58 +01:00
Pablo Lara
b0a98b1a87 feat: add permission column to roles table 2024-12-15 10:51:49 +01:00
Pablo Lara
577530ac69 chore: add and edit roles is working now 2024-12-15 10:51:10 +01:00
Pablo Lara
c1a8d47e5b feat: edit role feature 2024-12-15 10:50:39 +01:00
Pablo Lara
e80704d6f0 feat: add new role feature 2024-12-15 10:50:32 +01:00
Pablo Lara
010de4b415 feat: add roles page 2024-12-15 10:50:24 +01:00
Pablo Lara
0a2b8e4315 chore: add roles item to the sidebar 2024-12-15 10:50:16 +01:00
3557 changed files with 47465 additions and 341667 deletions

75
.env
View File

@@ -3,21 +3,16 @@
# For production, it is recommended to use a secure method to store these variables and change the default secret keys.
#### Prowler UI Configuration ####
PROWLER_UI_VERSION="stable"
AUTH_URL=http://localhost:3000
PROWLER_UI_VERSION="latest"
SITE_URL=http://localhost:3000
API_BASE_URL=http://prowler-api:8080/api/v1
NEXT_PUBLIC_API_BASE_URL=${API_BASE_URL}
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
AUTH_TRUST_HOST=true
UI_PORT=3000
# openssl rand -base64 32
AUTH_SECRET="N/c6mnaS5+SWq81+819OrzQZlmx1Vxtp/orjttJSmw8="
# Google Tag Manager ID
NEXT_PUBLIC_GOOGLE_TAG_MANAGER_ID=""
#### Prowler API Configuration ####
PROWLER_API_VERSION="stable"
PROWLER_API_VERSION="latest"
# PostgreSQL settings
# If running Django and celery on host, use 'localhost', else use 'postgres-db'
POSTGRES_HOST=postgres-db
@@ -28,40 +23,12 @@ POSTGRES_USER=prowler
POSTGRES_PASSWORD=postgres
POSTGRES_DB=prowler_db
# Celery-Prowler task settings
TASK_RETRY_DELAY_SECONDS=0.1
TASK_RETRY_ATTEMPTS=5
# Valkey settings
# If running Valkey and celery on host, use localhost, else use 'valkey'
VALKEY_HOST=valkey
VALKEY_PORT=6379
VALKEY_DB=0
# API scan settings
# The path to the directory where scan output should be stored
DJANGO_TMP_OUTPUT_DIRECTORY="/tmp/prowler_api_output"
# The maximum number of findings to process in a single batch
DJANGO_FINDINGS_BATCH_SIZE=1000
# The AWS access key to be used when uploading scan output to an S3 bucket
# If left empty, default AWS credentials resolution behavior will be used
DJANGO_OUTPUT_S3_AWS_ACCESS_KEY_ID=""
# The AWS secret key to be used when uploading scan output to an S3 bucket
DJANGO_OUTPUT_S3_AWS_SECRET_ACCESS_KEY=""
# An optional AWS session token
DJANGO_OUTPUT_S3_AWS_SESSION_TOKEN=""
# The AWS region where your S3 bucket is located (e.g., "us-east-1")
DJANGO_OUTPUT_S3_AWS_DEFAULT_REGION=""
# The name of the S3 bucket where scan output should be stored
DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET=""
# Django settings
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1,prowler-api
DJANGO_BIND_ADDRESS=0.0.0.0
@@ -73,12 +40,9 @@ DJANGO_LOGGING_FORMATTER=human_readable
# Select one of [DEBUG|INFO|WARNING|ERROR|CRITICAL]
# Applies to both Django and Celery Workers
DJANGO_LOGGING_LEVEL=INFO
# Defaults to the maximum available based on CPU cores if not set.
DJANGO_WORKERS=4
# Token lifetime is in minutes
DJANGO_ACCESS_TOKEN_LIFETIME=30
# Token lifetime is in minutes
DJANGO_REFRESH_TOKEN_LIFETIME=1440
DJANGO_WORKERS=4 # Defaults to the maximum available based on CPU cores if not set.
DJANGO_ACCESS_TOKEN_LIFETIME=30 # Token lifetime is in minutes
DJANGO_REFRESH_TOKEN_LIFETIME=1440 # Token lifetime is in minutes
DJANGO_CACHE_MAX_AGE=3600
DJANGO_STALE_WHILE_REVALIDATE=60
DJANGO_MANAGE_DB_PARTITIONS=True
@@ -123,30 +87,3 @@ jQIDAQAB
-----END PUBLIC KEY-----"
# openssl rand -base64 32
DJANGO_SECRETS_ENCRYPTION_KEY="oE/ltOhp/n1TdbHjVmzcjDPLcLA41CVI/4Rk+UB5ESc="
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
DJANGO_SENTRY_DSN=
# Sentry settings
SENTRY_ENVIRONMENT=local
SENTRY_RELEASE=local
#### Prowler release version ####
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.7.5
# Social login credentials
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
SOCIAL_GOOGLE_OAUTH_CLIENT_ID=""
SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET=""
SOCIAL_GITHUB_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/github"
SOCIAL_GITHUB_OAUTH_CLIENT_ID=""
SOCIAL_GITHUB_OAUTH_CLIENT_SECRET=""
# Single Sign-On (SSO)
SAML_SSO_CALLBACK_URL="${AUTH_URL}/api/auth/callback/saml"
# Lighthouse tracing
LANGSMITH_TRACING=false
LANGSMITH_ENDPOINT="https://api.smith.langchain.com"
LANGSMITH_API_KEY=""
LANGCHAIN_PROJECT=""

146
.github/dependabot.yml vendored
View File

@@ -9,112 +9,96 @@ updates:
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 25
interval: "daily"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "pip"
# Dependabot Updates are temporary disabled - 2025/03/19
# - package-ecosystem: "pip"
# directory: "/api"
# schedule:
# interval: "daily"
# open-pull-requests-limit: 10
# target-branch: master
# labels:
# - "dependencies"
# - "pip"
# - "component/api"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 25
interval: "daily"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "github_actions"
# Dependabot Updates are temporary disabled - 2025/03/19
# - package-ecosystem: "npm"
# directory: "/ui"
# schedule:
# interval: "daily"
# open-pull-requests-limit: 10
# target-branch: master
# labels:
# - "dependencies"
# - "npm"
# - "component/ui"
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "daily"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "npm"
- package-ecosystem: "docker"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 25
interval: "weekly"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "docker"
# Dependabot Updates are temporary disabled - 2025/04/15
# v4.6
# - package-ecosystem: "pip"
# directory: "/"
# schedule:
# interval: "weekly"
# open-pull-requests-limit: 10
# target-branch: v4.6
# labels:
# - "dependencies"
# - "pip"
# - "v4"
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v4.6
labels:
- "dependencies"
- "pip"
- "v4"
# - package-ecosystem: "github-actions"
# directory: "/"
# schedule:
# interval: "weekly"
# open-pull-requests-limit: 10
# target-branch: v4.6
# labels:
# - "dependencies"
# - "github_actions"
# - "v4"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v4.6
labels:
- "dependencies"
- "github_actions"
- "v4"
# - package-ecosystem: "docker"
# directory: "/"
# schedule:
# interval: "weekly"
# open-pull-requests-limit: 10
# target-branch: v4.6
# labels:
# - "dependencies"
# - "docker"
# - "v4"
- package-ecosystem: "docker"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v4.6
labels:
- "dependencies"
- "docker"
- "v4"
# Dependabot Updates are temporary disabled - 2025/03/19
# v3
# - package-ecosystem: "pip"
# directory: "/"
# schedule:
# interval: "monthly"
# open-pull-requests-limit: 10
# target-branch: v3
# labels:
# - "dependencies"
# - "pip"
# - "v3"
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10
target-branch: v3
labels:
- "dependencies"
- "pip"
- "v3"
# - package-ecosystem: "github-actions"
# directory: "/"
# schedule:
# interval: "monthly"
# open-pull-requests-limit: 10
# target-branch: v3
# labels:
# - "dependencies"
# - "github_actions"
# - "v3"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10
target-branch: v3
labels:
- "dependencies"
- "github_actions"
- "v3"

15
.github/labeler.yml vendored
View File

@@ -27,11 +27,6 @@ provider/github:
- any-glob-to-any-file: "prowler/providers/github/**"
- any-glob-to-any-file: "tests/providers/github/**"
provider/iac:
- changed-files:
- any-glob-to-any-file: "prowler/providers/iac/**"
- any-glob-to-any-file: "tests/providers/iac/**"
github_actions:
- changed-files:
- any-glob-to-any-file: ".github/workflows/*"
@@ -97,13 +92,3 @@ component/api:
component/ui:
- changed-files:
- any-glob-to-any-file: "ui/**"
compliance:
- changed-files:
- any-glob-to-any-file: "prowler/compliance/**"
- any-glob-to-any-file: "prowler/lib/outputs/compliance/**"
- any-glob-to-any-file: "tests/lib/outputs/compliance/**"
review-django-migrations:
- changed-files:
- any-glob-to-any-file: "api/src/backend/api/migrations/**"

View File

@@ -15,14 +15,7 @@ Please include a summary of the change and which issue is fixed. List any depend
- [ ] Review if the code is being covered by tests.
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
- [ ] Review if backport is needed.
- [ ] Review if is needed to change the [Readme.md](https://github.com/prowler-cloud/prowler/blob/master/README.md)
- [ ] Ensure new entries are added to [CHANGELOG.md](https://github.com/prowler-cloud/prowler/blob/master/prowler/CHANGELOG.md), if applicable.
#### API
- [ ] Verify if API specs need to be regenerated.
- [ ] Check if version updates are required (e.g., specs, Poetry, etc.).
- [ ] Ensure new entries are added to [CHANGELOG.md](https://github.com/prowler-cloud/prowler/blob/master/api/CHANGELOG.md), if applicable.
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.

View File

@@ -6,7 +6,6 @@ on:
- "master"
paths:
- "api/**"
- "prowler/**"
- ".github/workflows/api-build-lint-push-containers.yml"
# Uncomment the code below to test this action on PRs
@@ -24,7 +23,6 @@ env:
# Tags
LATEST_TAG: latest
RELEASE_TAG: ${{ github.event.release.tag_name }}
STABLE_TAG: stable
WORKING_DIRECTORY: ./api
@@ -33,83 +31,51 @@ env:
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-api
jobs:
repository-check:
name: Repository check
runs-on: ubuntu-latest
outputs:
is_repo: ${{ steps.repository_check.outputs.is_repo }}
steps:
- name: Repository check
id: repository_check
working-directory: /tmp
run: |
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
then
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
else
echo "This action only runs for prowler-cloud/prowler"
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
fi
# Build Prowler OSS container
container-build-push:
needs: repository-check
if: needs.repository-check.outputs.is_repo == 'true'
runs-on: ubuntu-latest
defaults:
run:
working-directory: ${{ env.WORKING_DIRECTORY }}
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set short git commit SHA
id: vars
- name: Repository check
working-directory: /tmp
run: |
shortSha=$(git rev-parse --short ${{ github.sha }})
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
[[ ${{ github.repository }} != "prowler-cloud/prowler" ]] && echo "This action only runs for prowler-cloud/prowler"; exit 0
- name: Checkout
uses: actions/checkout@v4
- name: Login to DockerHub
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@v3
- name: Build and push container image (latest)
# Comment the following line for testing
if: github.event_name == 'push'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
# Set push: false for testing
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.SHORT_SHA }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Trigger deployment
if: github.event_name == 'push'
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
repository: ${{ secrets.CLOUD_DISPATCH }}
event-type: prowler-api-deploy
client-payload: '{"sha": "${{ github.sha }}", "short_sha": "${{ env.SHORT_SHA }}"}'

View File

@@ -15,12 +15,16 @@ on:
push:
branches:
- "master"
- "v3"
- "v4.*"
- "v5.*"
paths:
- "api/**"
pull_request:
branches:
- "master"
- "v3"
- "v4.*"
- "v5.*"
paths:
- "api/**"
@@ -44,16 +48,16 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/api-codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

View File

@@ -4,17 +4,15 @@ on:
push:
branches:
- "master"
- "v5.*"
paths:
- ".github/workflows/api-pull-request.yml"
- "api/**"
pull_request:
branches:
- "master"
- "v5.*"
paths:
- "api/**"
env:
POSTGRES_HOST: localhost
POSTGRES_PORT: 5432
@@ -26,12 +24,7 @@ env:
VALKEY_HOST: localhost
VALKEY_PORT: 6379
VALKEY_DB: 0
API_WORKING_DIR: ./api
IMAGE_NAME: prowler-api
IGNORE_FILES: |
api/docs/**
api/README.md
api/CHANGELOG.md
jobs:
test:
@@ -75,54 +68,39 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
uses: tj-actions/changed-files@v45
with:
files: api/**
files_ignore: ${{ env.IGNORE_FILES }}
- name: Replace @master with current branch in pyproject.toml
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
echo "Using branch: $BRANCH_NAME"
sed -i "s|@master|@$BRANCH_NAME|g" pyproject.toml
files_ignore: |
api/.github/**
api/docs/**
api/permissions/**
api/README.md
api/mkdocs.yml
- name: Install poetry
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
python -m pip install --upgrade pip
pipx install poetry==2.1.1
- name: Update poetry.lock after the branch name change
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry lock
pipx install poetry
- name: Set up Python ${{ matrix.python-version }}
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
- name: Install system dependencies for xmlsec
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
sudo apt-get update
sudo apt-get install -y libxml2-dev libxmlsec1-dev libxmlsec1-openssl pkg-config
- name: Install dependencies
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry install --no-root
poetry install
poetry run pip list
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
grep '"tag_name":' | \
@@ -134,7 +112,7 @@ jobs:
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry check --lock
poetry lock --check
- name: Lint with ruff
working-directory: ./api
@@ -163,9 +141,8 @@ jobs:
- name: Safety
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
# 76352, 76353, 77323 come from SDK, but they cannot upgrade it yet. It does not affect API
run: |
poetry run safety check --ignore 70612,66963,74429,76352,76353,77323
poetry run safety check --ignore 70612,66963
- name: Vulture
working-directory: ./api
@@ -187,32 +164,8 @@ jobs:
- name: Upload coverage reports to Codecov
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
uses: codecov/codecov-action@v5
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: api
test-container-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: api/**
files_ignore: ${{ env.IGNORE_FILES }}
- name: Set up Docker Buildx
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Build Container
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
context: ${{ env.API_WORKING_DIR }}
push: false
tags: ${{ env.IMAGE_NAME }}:latest
outputs: type=docker
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -23,7 +23,7 @@ jobs:
steps:
- name: Check labels
id: preview_label_check
uses: agilepathway/label-checker@c3d16ad512e7cea5961df85ff2486bb774caf3c5 # v1.6.65
uses: docker://agilepathway/pull-request-label-checker:v1.6.55
with:
allow_failure: true
prefix_mode: true
@@ -33,7 +33,7 @@ jobs:
- name: Backport Action
if: steps.preview_label_check.outputs.label_check == 'success'
uses: sorenlouv/backport-github-action@ad888e978060bc1b2798690dd9d03c4036560947 # v9.5.1
uses: sorenlouv/backport-github-action@v9.5.1
with:
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}

View File

@@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Leave PR comment with the Prowler Documentation URI
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ env.PR_NUMBER }}
body: |

View File

@@ -1,23 +0,0 @@
name: Prowler - Conventional Commit
on:
pull_request:
types:
- "opened"
- "edited"
- "synchronize"
branches:
- "master"
- "v3"
- "v4.*"
- "v5.*"
jobs:
conventional-commit-check:
runs-on: ubuntu-latest
steps:
- name: conventional-commit-check
id: conventional-commit-check
uses: agenthunt/conventional-commit-checker-action@9e552d650d0e205553ec7792d447929fc78e012b # v2.0.0
with:
pr-title-regex: '^([^\s(]+)(?:\(([^)]+)\))?: (.+)'

View File

@@ -1,67 +0,0 @@
name: Create Backport Label
on:
release:
types: [published]
jobs:
create_label:
runs-on: ubuntu-latest
permissions:
contents: write
issues: write
steps:
- name: Create backport label
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
RELEASE_TAG: ${{ github.event.release.tag_name }}
OWNER_REPO: ${{ github.repository }}
run: |
VERSION_ONLY=${RELEASE_TAG#v} # Remove 'v' prefix if present (e.g., v3.2.0 -> 3.2.0)
# Check if it's a minor version (X.Y.0)
if [[ "$VERSION_ONLY" =~ ^[0-9]+\.[0-9]+\.0$ ]]; then
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is a minor version. Proceeding to create backport label."
TWO_DIGIT_VERSION=${VERSION_ONLY%.0} # Extract X.Y from X.Y.0 (e.g., 5.6 from 5.6.0)
FINAL_LABEL_NAME="backport-to-v${TWO_DIGIT_VERSION}"
FINAL_DESCRIPTION="Backport PR to the v${TWO_DIGIT_VERSION} branch"
echo "Effective label name will be: ${FINAL_LABEL_NAME}"
echo "Effective description will be: ${FINAL_DESCRIPTION}"
# Check if the label already exists
STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token ${GITHUB_TOKEN}" "https://api.github.com/repos/${OWNER_REPO}/labels/${FINAL_LABEL_NAME}")
if [ "${STATUS_CODE}" -eq 200 ]; then
echo "Label '${FINAL_LABEL_NAME}' already exists."
elif [ "${STATUS_CODE}" -eq 404 ]; then
echo "Label '${FINAL_LABEL_NAME}' does not exist. Creating it..."
# Prepare JSON data payload
JSON_DATA=$(printf '{"name":"%s","description":"%s","color":"B60205"}' "${FINAL_LABEL_NAME}" "${FINAL_DESCRIPTION}")
CREATE_STATUS_CODE=$(curl -s -o /tmp/curl_create_response.json -w "%{http_code}" -X POST \
-H "Accept: application/vnd.github.v3+json" \
-H "Authorization: token ${GITHUB_TOKEN}" \
--data "${JSON_DATA}" \
"https://api.github.com/repos/${OWNER_REPO}/labels")
CREATE_RESPONSE_BODY=$(cat /tmp/curl_create_response.json)
rm -f /tmp/curl_create_response.json
if [ "$CREATE_STATUS_CODE" -eq 201 ]; then
echo "Label '${FINAL_LABEL_NAME}' created successfully."
else
echo "Error creating label '${FINAL_LABEL_NAME}'. Status: $CREATE_STATUS_CODE"
echo "Response: $CREATE_RESPONSE_BODY"
exit 1
fi
else
echo "Error checking for label '${FINAL_LABEL_NAME}'. HTTP Status: ${STATUS_CODE}"
exit 1
fi
else
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is not a minor version. Skipping backport label creation."
exit 0
fi

View File

@@ -7,11 +7,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: TruffleHog OSS
uses: trufflesecurity/trufflehog@6641d4ba5b684fffe195b9820345de1bf19f3181 # v3.89.2
uses: trufflesecurity/trufflehog@v3.86.1
with:
path: ./
base: ${{ github.event.repository.default_branch }}

View File

@@ -14,4 +14,4 @@ jobs:
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
- uses: actions/labeler@v5

View File

@@ -1,215 +0,0 @@
name: Prowler Release Preparation
run-name: Prowler Release Preparation for ${{ inputs.prowler_version }}
on:
workflow_dispatch:
inputs:
prowler_version:
description: 'Prowler version to release (e.g., 5.9.0)'
required: true
type: string
env:
PROWLER_VERSION: ${{ github.event.inputs.prowler_version }}
jobs:
prepare-release:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- name: Parse version and determine branch
run: |
# Validate version format (reusing pattern from sdk-bump-version.yml)
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
MAJOR_VERSION=${BASH_REMATCH[1]}
MINOR_VERSION=${BASH_REMATCH[2]}
PATCH_VERSION=${BASH_REMATCH[3]}
# Export version components to environment
echo "MAJOR_VERSION=${MAJOR_VERSION}" >> "${GITHUB_ENV}"
echo "MINOR_VERSION=${MINOR_VERSION}" >> "${GITHUB_ENV}"
echo "PATCH_VERSION=${PATCH_VERSION}" >> "${GITHUB_ENV}"
# Determine branch name (format: v5.9)
BRANCH_NAME="v${MAJOR_VERSION}.${MINOR_VERSION}"
echo "BRANCH_NAME=${BRANCH_NAME}" >> "${GITHUB_ENV}"
# Calculate UI version (1.X.X format - matches Prowler minor version)
UI_VERSION="1.${MINOR_VERSION}.${PATCH_VERSION}"
echo "UI_VERSION=${UI_VERSION}" >> "${GITHUB_ENV}"
# Calculate API version (1.X.X format - one minor version ahead)
API_MINOR_VERSION=$((MINOR_VERSION + 1))
API_VERSION="1.${API_MINOR_VERSION}.${PATCH_VERSION}"
echo "API_VERSION=${API_VERSION}" >> "${GITHUB_ENV}"
echo "Prowler version: $PROWLER_VERSION"
echo "Branch name: $BRANCH_NAME"
echo "UI version: $UI_VERSION"
echo "API version: $API_VERSION"
echo "Is minor release: $([ $PATCH_VERSION -eq 0 ] && echo 'true' || echo 'false')"
else
echo "Invalid version syntax: '$PROWLER_VERSION' (must be N.N.N)" >&2
exit 1
fi
- name: Checkout existing branch for patch release
if: ${{ env.PATCH_VERSION != '0' }}
run: |
echo "Patch release detected, checking out existing branch $BRANCH_NAME..."
if git show-ref --verify --quiet "refs/heads/$BRANCH_NAME"; then
echo "Branch $BRANCH_NAME exists locally, checking out..."
git checkout "$BRANCH_NAME"
elif git show-ref --verify --quiet "refs/remotes/origin/$BRANCH_NAME"; then
echo "Branch $BRANCH_NAME exists remotely, checking out..."
git checkout -b "$BRANCH_NAME" "origin/$BRANCH_NAME"
else
echo "ERROR: Branch $BRANCH_NAME should exist for patch release $PROWLER_VERSION"
exit 1
fi
- name: Verify version in pyproject.toml
run: |
CURRENT_VERSION=$(grep '^version = ' pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
if [ "$CURRENT_VERSION" != "$PROWLER_VERSION_TRIMMED" ]; then
echo "ERROR: Version mismatch in pyproject.toml (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_VERSION')"
exit 1
fi
echo "✓ pyproject.toml version: $CURRENT_VERSION"
- name: Verify version in prowler/config/config.py
run: |
CURRENT_VERSION=$(grep '^prowler_version = ' prowler/config/config.py | sed -E 's/prowler_version = "([^"]+)"/\1/' | tr -d '[:space:]')
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
if [ "$CURRENT_VERSION" != "$PROWLER_VERSION_TRIMMED" ]; then
echo "ERROR: Version mismatch in prowler/config/config.py (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_VERSION')"
exit 1
fi
echo "✓ prowler/config/config.py version: $CURRENT_VERSION"
- name: Verify version in api/pyproject.toml
run: |
CURRENT_API_VERSION=$(grep '^version = ' api/pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
if [ "$CURRENT_API_VERSION" != "$API_VERSION_TRIMMED" ]; then
echo "ERROR: API version mismatch in api/pyproject.toml (expected: '$API_VERSION_TRIMMED', found: '$CURRENT_API_VERSION')"
exit 1
fi
echo "✓ api/pyproject.toml version: $CURRENT_API_VERSION"
- name: Verify prowler dependency in api/pyproject.toml
run: |
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
if [ "$CURRENT_PROWLER_REF" != "$PROWLER_VERSION_TRIMMED" ]; then
echo "ERROR: Prowler dependency mismatch in api/pyproject.toml (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_PROWLER_REF')"
exit 1
fi
echo "✓ api/pyproject.toml prowler dependency: $CURRENT_PROWLER_REF"
- name: Verify version in api/src/backend/api/v1/views.py
run: |
CURRENT_API_VERSION=$(grep 'spectacular_settings.VERSION = ' api/src/backend/api/v1/views.py | sed -E 's/.*spectacular_settings.VERSION = "([^"]+)".*/\1/' | tr -d '[:space:]')
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
if [ "$CURRENT_API_VERSION" != "$API_VERSION_TRIMMED" ]; then
echo "ERROR: API version mismatch in views.py (expected: '$API_VERSION_TRIMMED', found: '$CURRENT_API_VERSION')"
exit 1
fi
echo "✓ api/src/backend/api/v1/views.py version: $CURRENT_API_VERSION"
- name: Create release branch for minor release
if: ${{ env.PATCH_VERSION == '0' }}
run: |
echo "Minor release detected (patch = 0), creating new branch $BRANCH_NAME..."
if git show-ref --verify --quiet "refs/heads/$BRANCH_NAME" || git show-ref --verify --quiet "refs/remotes/origin/$BRANCH_NAME"; then
echo "ERROR: Branch $BRANCH_NAME already exists for minor release $PROWLER_VERSION"
exit 1
fi
git checkout -b "$BRANCH_NAME"
- name: Extract changelog entries
run: |
set -e
# Function to extract changelog for a specific version
extract_changelog() {
local file="$1"
local version="$2"
local output_file="$3"
if [ ! -f "$file" ]; then
echo "Warning: $file not found, skipping..."
touch "$output_file"
return
fi
# Extract changelog section for this version
awk -v version="$version" '
/^## \[v?'"$version"'\]/ { found=1; next }
found && /^## \[v?[0-9]+\.[0-9]+\.[0-9]+\]/ { found=0 }
found && !/^## \[v?'"$version"'\]/ { print }
' "$file" > "$output_file"
# Remove --- separators
sed -i '/^---$/d' "$output_file"
# Remove trailing empty lines
sed -i '/^$/d' "$output_file"
}
# Extract changelogs
echo "Extracting changelog entries..."
extract_changelog "prowler/CHANGELOG.md" "$PROWLER_VERSION" "prowler_changelog.md"
extract_changelog "api/CHANGELOG.md" "$API_VERSION" "api_changelog.md"
extract_changelog "ui/CHANGELOG.md" "$UI_VERSION" "ui_changelog.md"
# Combine changelogs in order: UI, API, SDK
> combined_changelog.md
if [ -s "ui_changelog.md" ]; then
echo "## UI" >> combined_changelog.md
echo "" >> combined_changelog.md
cat ui_changelog.md >> combined_changelog.md
echo "" >> combined_changelog.md
fi
if [ -s "api_changelog.md" ]; then
echo "## API" >> combined_changelog.md
echo "" >> combined_changelog.md
cat api_changelog.md >> combined_changelog.md
echo "" >> combined_changelog.md
fi
if [ -s "prowler_changelog.md" ]; then
echo "## SDK" >> combined_changelog.md
echo "" >> combined_changelog.md
cat prowler_changelog.md >> combined_changelog.md
echo "" >> combined_changelog.md
fi
echo "Combined changelog preview:"
cat combined_changelog.md
- name: Create draft release
uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2
with:
tag_name: ${{ env.PROWLER_VERSION }}
name: Prowler ${{ env.PROWLER_VERSION }}
body_path: combined_changelog.md
draft: true
target_commitish: ${{ env.PATCH_VERSION == '0' && 'master' || env.BRANCH_NAME }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean up temporary files
run: |
rm -f prowler_changelog.md api_changelog.md ui_changelog.md combined_changelog.md

View File

@@ -1,86 +0,0 @@
name: Check Changelog
on:
pull_request:
types: [opened, synchronize, reopened, labeled, unlabeled]
jobs:
check-changelog:
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
pull-requests: write
env:
MONITORED_FOLDERS: "api ui prowler"
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- name: Get list of changed files
id: changed_files
run: |
git fetch origin ${{ github.base_ref }}
git diff --name-only origin/${{ github.base_ref }}...HEAD > changed_files.txt
cat changed_files.txt
- name: Check for folder changes and changelog presence
id: check_folders
run: |
missing_changelogs=""
for folder in $MONITORED_FOLDERS; do
if grep -q "^${folder}/" changed_files.txt; then
echo "Detected changes in ${folder}/"
if ! grep -q "^${folder}/CHANGELOG.md$" changed_files.txt; then
echo "No changelog update found for ${folder}/"
missing_changelogs="${missing_changelogs}- \`${folder}\`\n"
fi
fi
done
echo "missing_changelogs<<EOF" >> $GITHUB_OUTPUT
echo -e "${missing_changelogs}" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Find existing changelog comment
if: github.event.pull_request.head.repo.full_name == github.repository
id: find_comment
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e #v3.1.0
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: '<!-- changelog-check -->'
- name: Comment on PR if changelog is missing
if: github.event.pull_request.head.repo.full_name == github.repository && steps.check_folders.outputs.missing_changelogs != ''
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
with:
issue-number: ${{ github.event.pull_request.number }}
comment-id: ${{ steps.find_comment.outputs.comment-id }}
body: |
<!-- changelog-check -->
⚠️ **Changes detected in the following folders without a corresponding update to the `CHANGELOG.md`:**
${{ steps.check_folders.outputs.missing_changelogs }}
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.
- name: Comment on PR if all changelogs are present
if: github.event.pull_request.head.repo.full_name == github.repository && steps.check_folders.outputs.missing_changelogs == ''
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
with:
issue-number: ${{ github.event.pull_request.number }}
comment-id: ${{ steps.find_comment.outputs.comment-id }}
body: |
<!-- changelog-check -->
✅ All necessary `CHANGELOG.md` files have been updated. Great job! 🎉
- name: Fail if changelog is missing
if: steps.check_folders.outputs.missing_changelogs != ''
run: |
echo "ERROR: Missing changelog updates in some folders."
exit 1

View File

@@ -1,37 +0,0 @@
name: Prowler - Merged Pull Request
on:
pull_request_target:
branches: ['master']
types: ['closed']
jobs:
trigger-cloud-pull-request:
name: Trigger Cloud Pull Request
if: github.event.pull_request.merged == true && github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
ref: ${{ github.event.pull_request.merge_commit_sha }}
- name: Set short git commit SHA
id: vars
run: |
shortSha=$(git rev-parse --short ${{ github.event.pull_request.merge_commit_sha }})
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
- name: Trigger pull request
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
repository: ${{ secrets.CLOUD_DISPATCH }}
event-type: prowler-pull-request-merged
client-payload: '{
"PROWLER_COMMIT_SHA": "${{ github.event.pull_request.merge_commit_sha }}",
"PROWLER_COMMIT_SHORT_SHA": "${{ env.SHORT_SHA }}",
"PROWLER_PR_TITLE": "${{ github.event.pull_request.title }}",
"PROWLER_PR_LABELS": ${{ toJson(github.event.pull_request.labels.*.name) }},
"PROWLER_PR_BODY": ${{ toJson(github.event.pull_request.body) }},
"PROWLER_PR_URL":${{ toJson(github.event.pull_request.html_url) }}
}'

View File

@@ -59,16 +59,16 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Install Poetry
run: |
pipx install poetry==2.*
pipx install poetry
pipx inject poetry poetry-bumpversion
- name: Get Prowler version
@@ -108,13 +108,13 @@ jobs:
esac
- name: Login to DockerHub
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Public ECR
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
uses: docker/login-action@v3
with:
registry: public.ecr.aws
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
@@ -123,11 +123,11 @@ jobs:
AWS_REGION: ${{ env.AWS_REGION }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@v3
- name: Build and push container image (latest)
if: github.event_name == 'push'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@v6
with:
push: true
tags: |
@@ -140,7 +140,7 @@ jobs:
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@v6
with:
# Use local context to get changes
# https://github.com/docker/build-push-action#path-context

View File

@@ -1,147 +0,0 @@
name: SDK - Bump Version
on:
release:
types: [published]
env:
PROWLER_VERSION: ${{ github.event.release.tag_name }}
BASE_BRANCH: master
jobs:
bump-version:
name: Bump Version
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Get Prowler version
shell: bash
run: |
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
MAJOR_VERSION=${BASH_REMATCH[1]}
MINOR_VERSION=${BASH_REMATCH[2]}
FIX_VERSION=${BASH_REMATCH[3]}
# Export version components to GitHub environment
echo "MAJOR_VERSION=${MAJOR_VERSION}" >> "${GITHUB_ENV}"
echo "MINOR_VERSION=${MINOR_VERSION}" >> "${GITHUB_ENV}"
echo "FIX_VERSION=${FIX_VERSION}" >> "${GITHUB_ENV}"
if (( MAJOR_VERSION == 5 )); then
if (( FIX_VERSION == 0 )); then
echo "Minor Release: $PROWLER_VERSION"
# Set up next minor version for master
BUMP_VERSION_TO=${MAJOR_VERSION}.$((MINOR_VERSION + 1)).${FIX_VERSION}
echo "BUMP_VERSION_TO=${BUMP_VERSION_TO}" >> "${GITHUB_ENV}"
TARGET_BRANCH=${BASE_BRANCH}
echo "TARGET_BRANCH=${TARGET_BRANCH}" >> "${GITHUB_ENV}"
# Set up patch version for version branch
PATCH_VERSION_TO=${MAJOR_VERSION}.${MINOR_VERSION}.1
echo "PATCH_VERSION_TO=${PATCH_VERSION_TO}" >> "${GITHUB_ENV}"
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
echo "Bumping to next minor version: ${BUMP_VERSION_TO} in branch ${TARGET_BRANCH}"
echo "Bumping to next patch version: ${PATCH_VERSION_TO} in branch ${VERSION_BRANCH}"
else
echo "Patch Release: $PROWLER_VERSION"
BUMP_VERSION_TO=${MAJOR_VERSION}.${MINOR_VERSION}.$((FIX_VERSION + 1))
echo "BUMP_VERSION_TO=${BUMP_VERSION_TO}" >> "${GITHUB_ENV}"
TARGET_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
echo "TARGET_BRANCH=${TARGET_BRANCH}" >> "${GITHUB_ENV}"
echo "Bumping to next patch version: ${BUMP_VERSION_TO} in branch ${TARGET_BRANCH}"
fi
else
echo "Releasing another Prowler major version, aborting..."
exit 1
fi
else
echo "Invalid version syntax: '$PROWLER_VERSION' (must be N.N.N)" >&2
exit 1
fi
- name: Bump versions in files
run: |
echo "Using PROWLER_VERSION=$PROWLER_VERSION"
echo "Using BUMP_VERSION_TO=$BUMP_VERSION_TO"
set -e
echo "Bumping version in pyproject.toml ..."
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${BUMP_VERSION_TO}\"|" pyproject.toml
echo "Bumping version in prowler/config/config.py ..."
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${BUMP_VERSION_TO}\"|" prowler/config/config.py
echo "Bumping version in .env ..."
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${BUMP_VERSION_TO}|" .env
git --no-pager diff
- name: Create Pull Request
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
base: ${{ env.TARGET_BRANCH }}
commit-message: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
branch: "version-bump-to-v${{ env.BUMP_VERSION_TO }}"
title: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
labels: no-changelog
body: |
### Description
Bump Prowler version to v${{ env.BUMP_VERSION_TO }}
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
- name: Handle patch version for minor release
if: env.FIX_VERSION == '0'
run: |
echo "Using PROWLER_VERSION=$PROWLER_VERSION"
echo "Using PATCH_VERSION_TO=$PATCH_VERSION_TO"
set -e
echo "Bumping version in pyproject.toml ..."
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${PATCH_VERSION_TO}\"|" pyproject.toml
echo "Bumping version in prowler/config/config.py ..."
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${PATCH_VERSION_TO}\"|" prowler/config/config.py
echo "Bumping version in .env ..."
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PATCH_VERSION_TO}|" .env
git --no-pager diff
- name: Create Pull Request for patch version
if: env.FIX_VERSION == '0'
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
base: ${{ env.VERSION_BRANCH }}
commit-message: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
branch: "version-bump-to-v${{ env.PATCH_VERSION_TO }}"
title: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
labels: no-changelog
body: |
### Description
Bump Prowler version to v${{ env.PATCH_VERSION_TO }}
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.

View File

@@ -17,21 +17,17 @@ on:
- "master"
- "v3"
- "v4.*"
- "v5.*"
paths-ignore:
- 'ui/**'
- 'api/**'
- '.github/**'
pull_request:
branches:
- "master"
- "v3"
- "v4.*"
- "v5.*"
paths-ignore:
- 'ui/**'
- 'api/**'
- '.github/**'
schedule:
- cron: '00 12 * * *'
@@ -52,16 +48,16 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/sdk-codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

View File

@@ -21,11 +21,11 @@ jobs:
python-version: ["3.9", "3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
uses: tj-actions/changed-files@v45
with:
files: ./**
files_ignore: |
@@ -34,24 +34,19 @@ jobs:
permissions/**
api/**
ui/**
prowler/CHANGELOG.md
README.md
mkdocs.yml
.backportrc.json
.env
docker-compose*
examples/**
.gitignore
- name: Install poetry
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
python -m pip install --upgrade pip
pipx install poetry==2.1.1
pipx install poetry
- name: Set up Python ${{ matrix.python-version }}
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
@@ -59,7 +54,7 @@ jobs:
- name: Install dependencies
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry install --no-root
poetry install
poetry run pip list
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
grep '"tag_name":' | \
@@ -70,7 +65,7 @@ jobs:
- name: Poetry check
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry check --lock
poetry lock --check
- name: Lint with flake8
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
@@ -102,155 +97,20 @@ jobs:
run: |
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
- name: Dockerfile - Check if Dockerfile has changed
id: dockerfile-changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
Dockerfile
- name: Hadolint
if: steps.dockerfile-changed-files.outputs.any_changed == 'true'
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
/tmp/hadolint Dockerfile --ignore=DL3013
# Test AWS
- name: AWS - Check if any file has changed
id: aws-changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
./prowler/providers/aws/**
./tests/providers/aws/**
.poetry.lock
- name: AWS - Test
if: steps.aws-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
# Test Azure
- name: Azure - Check if any file has changed
id: azure-changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
./prowler/providers/azure/**
./tests/providers/azure/**
.poetry.lock
- name: Azure - Test
if: steps.azure-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/azure --cov-report=xml:azure_coverage.xml tests/providers/azure
# Test GCP
- name: GCP - Check if any file has changed
id: gcp-changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
./prowler/providers/gcp/**
./tests/providers/gcp/**
.poetry.lock
- name: GCP - Test
if: steps.gcp-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/gcp --cov-report=xml:gcp_coverage.xml tests/providers/gcp
# Test Kubernetes
- name: Kubernetes - Check if any file has changed
id: kubernetes-changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
./prowler/providers/kubernetes/**
./tests/providers/kubernetes/**
.poetry.lock
- name: Kubernetes - Test
if: steps.kubernetes-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/kubernetes --cov-report=xml:kubernetes_coverage.xml tests/providers/kubernetes
# Test GitHub
- name: GitHub - Check if any file has changed
id: github-changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
./prowler/providers/github/**
./tests/providers/github/**
.poetry.lock
- name: GitHub - Test
if: steps.github-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/github --cov-report=xml:github_coverage.xml tests/providers/github
# Test NHN
- name: NHN - Check if any file has changed
id: nhn-changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
./prowler/providers/nhn/**
./tests/providers/nhn/**
.poetry.lock
- name: NHN - Test
if: steps.nhn-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/nhn --cov-report=xml:nhn_coverage.xml tests/providers/nhn
# Test M365
- name: M365 - Check if any file has changed
id: m365-changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
./prowler/providers/m365/**
./tests/providers/m365/**
.poetry.lock
- name: M365 - Test
if: steps.m365-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/m365 --cov-report=xml:m365_coverage.xml tests/providers/m365
# Test IaC
- name: IaC - Check if any file has changed
id: iac-changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
./prowler/providers/iac/**
./tests/providers/iac/**
.poetry.lock
- name: IaC - Test
if: steps.iac-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/iac --cov-report=xml:iac_coverage.xml tests/providers/iac
# Common Tests
- name: Lib - Test
- name: Test with pytest
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/lib --cov-report=xml:lib_coverage.xml tests/lib
poetry run pytest -n auto --cov=./prowler --cov-report=xml tests
- name: Config - Test
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/config --cov-report=xml:config_coverage.xml tests/config
# Codecov
- name: Upload coverage reports to Codecov
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
uses: codecov/codecov-action@v5
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler
files: ./aws_coverage.xml,./azure_coverage.xml,./gcp_coverage.xml,./kubernetes_coverage.xml,./github_coverage.xml,./nhn_coverage.xml,./m365_coverage.xml,./lib_coverage.xml,./config_coverage.xml

View File

@@ -7,43 +7,15 @@ on:
env:
RELEASE_TAG: ${{ github.event.release.tag_name }}
PYTHON_VERSION: 3.11
# CACHE: "poetry"
CACHE: "poetry"
jobs:
repository-check:
name: Repository check
runs-on: ubuntu-latest
outputs:
is_repo: ${{ steps.repository_check.outputs.is_repo }}
steps:
- name: Repository check
id: repository_check
working-directory: /tmp
run: |
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
then
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
else
echo "This action only runs for prowler-cloud/prowler"
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
fi
release-prowler-job:
runs-on: ubuntu-latest
needs: repository-check
if: needs.repository-check.outputs.is_repo == 'true'
env:
POETRY_VIRTUALENVS_CREATE: "false"
name: Release Prowler to PyPI
steps:
- name: Repository check
working-directory: /tmp
run: |
if [[ "${{ github.repository }}" != "prowler-cloud/prowler" ]]; then
echo "This action only runs for prowler-cloud/prowler"
exit 1
fi
- name: Get Prowler version
run: |
PROWLER_VERSION="${{ env.RELEASE_TAG }}"
@@ -64,17 +36,17 @@ jobs:
;;
esac
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
- name: Install dependencies
run: |
pipx install poetry==2.1.1
pipx install poetry
- name: Setup Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
# cache: ${{ env.CACHE }}
cache: ${{ env.CACHE }}
- name: Build Prowler package
run: |

View File

@@ -4,7 +4,7 @@ name: SDK - Refresh AWS services' regions
on:
schedule:
- cron: "0 9 * * 1" # runs at 09:00 UTC every Monday
- cron: "0 9 * * *" #runs at 09:00 UTC everyday
env:
GITHUB_BRANCH: "master"
@@ -23,12 +23,12 @@ jobs:
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/checkout@v4
with:
ref: ${{ env.GITHUB_BRANCH }}
- name: setup python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
uses: actions/setup-python@v5
with:
python-version: 3.9 #install the python needed
@@ -38,7 +38,7 @@ jobs:
pip install boto3
- name: Configure AWS Credentials -- DEV
uses: aws-actions/configure-aws-credentials@b47578312673ae6fa5b5096b330d9fbac3d116df # v4.2.1
uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: ${{ env.AWS_REGION_DEV }}
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
@@ -50,13 +50,12 @@ jobs:
# Create pull request
- name: Create Pull Request
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
uses: peter-evans/create-pull-request@v7
with:
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
commit-message: "feat(regions_update): Update regions for AWS services"
branch: "aws-services-regions-updated-${{ github.sha }}"
labels: "status/waiting-for-revision, severity/low, provider/aws"
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
title: "chore(regions_update): Changes in regions for AWS services"
body: |
### Description

View File

@@ -23,99 +23,59 @@ env:
# Tags
LATEST_TAG: latest
RELEASE_TAG: ${{ github.event.release.tag_name }}
STABLE_TAG: stable
WORKING_DIRECTORY: ./ui
# Container Registries
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
NEXT_PUBLIC_API_BASE_URL: http://prowler-api:8080/api/v1
jobs:
repository-check:
name: Repository check
runs-on: ubuntu-latest
outputs:
is_repo: ${{ steps.repository_check.outputs.is_repo }}
steps:
- name: Repository check
id: repository_check
working-directory: /tmp
run: |
if [[ ${{ github.repository }} == "prowler-cloud/prowler" ]]
then
echo "is_repo=true" >> "${GITHUB_OUTPUT}"
else
echo "This action only runs for prowler-cloud/prowler"
echo "is_repo=false" >> "${GITHUB_OUTPUT}"
fi
# Build Prowler OSS container
container-build-push:
needs: repository-check
if: needs.repository-check.outputs.is_repo == 'true'
runs-on: ubuntu-latest
defaults:
run:
working-directory: ${{ env.WORKING_DIRECTORY }}
steps:
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set short git commit SHA
id: vars
- name: Repository check
working-directory: /tmp
run: |
shortSha=$(git rev-parse --short ${{ github.sha }})
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
[[ ${{ github.repository }} != "prowler-cloud/prowler" ]] && echo "This action only runs for prowler-cloud/prowler"; exit 0
- name: Checkout
uses: actions/checkout@v4
- name: Login to DockerHub
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@v3
- name: Build and push container image (latest)
# Comment the following line for testing
if: github.event_name == 'push'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
build-args: |
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=${{ env.SHORT_SHA }}
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
# Set push: false for testing
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.SHORT_SHA }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@v6
with:
context: ${{ env.WORKING_DIRECTORY }}
build-args: |
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${{ env.RELEASE_TAG }}
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Trigger deployment
if: github.event_name == 'push'
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
repository: ${{ secrets.CLOUD_DISPATCH }}
event-type: prowler-ui-deploy
client-payload: '{"sha": "${{ github.sha }}", "short_sha": "${{ env.SHORT_SHA }}"}'

View File

@@ -15,12 +15,14 @@ on:
push:
branches:
- "master"
- "v4.*"
- "v5.*"
paths:
- "ui/**"
pull_request:
branches:
- "master"
- "v4.*"
- "v5.*"
paths:
- "ui/**"
@@ -44,16 +46,16 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/ui-codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@181d5eefc20863364f96762470ba6f862bdef56b # v3.29.2
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

View File

@@ -1,22 +1,11 @@
name: UI - Pull Request
on:
push:
branches:
- "master"
- "v5.*"
paths:
- ".github/workflows/ui-pull-request.yml"
- "ui/**"
pull_request:
branches:
- master
- "v5.*"
paths:
- 'ui/**'
env:
UI_WORKING_DIR: ./ui
IMAGE_NAME: prowler-ui
jobs:
test-and-coverage:
@@ -27,85 +16,19 @@ jobs:
node-version: [20.x]
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Node.js ${{ matrix.node-version }}
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
cache-dependency-path: './ui/package-lock.json'
- name: Install dependencies
working-directory: ./ui
run: npm ci
run: npm install
- name: Run Healthcheck
working-directory: ./ui
run: npm run healthcheck
- name: Build the application
working-directory: ./ui
run: npm run build
e2e-tests:
runs-on: ubuntu-latest
env:
AUTH_SECRET: 'fallback-ci-secret-for-testing'
AUTH_TRUST_HOST: true
NEXTAUTH_URL: http://localhost:3000
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
persist-credentials: false
- name: Setup Node.js
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version: '20.x'
cache: 'npm'
cache-dependency-path: './ui/package-lock.json'
- name: Install dependencies
working-directory: ./ui
run: npm ci
- name: Cache Playwright browsers
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
id: playwright-cache
with:
path: ~/.cache/ms-playwright
key: ${{ runner.os }}-playwright-${{ hashFiles('ui/package-lock.json') }}
restore-keys: |
${{ runner.os }}-playwright-
- name: Install Playwright browsers
working-directory: ./ui
if: steps.playwright-cache.outputs.cache-hit != 'true'
run: npm run test:e2e:install
- name: Build the application
working-directory: ./ui
run: npm run build
- name: Run Playwright tests
working-directory: ./ui
run: npm run test:e2e
- name: Upload Playwright report
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
if: failure()
with:
name: playwright-report
path: ui/playwright-report/
retention-days: 30
test-container-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Build Container
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
context: ${{ env.UI_WORKING_DIR }}
# Always build using `prod` target
target: prod
push: false
tags: ${{ env.IMAGE_NAME }}:latest
outputs: type=docker
build-args: |
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_51LwpXXXX

17
.gitignore vendored
View File

@@ -31,7 +31,7 @@ tags
*.DS_Store
# Prowler output
/output
output/
# Prowler found secrets
secrets-*/
@@ -42,28 +42,13 @@ junit-reports/
# VSCode files
.vscode/
# Cursor files
.cursorignore
.cursor/
# RooCode files
.roo/
.rooignore
.roomodes
# Cline files
.cline/
.clineignore
# Terraform
.terraform*
*.tfstate
*.tfstate.*
# .env
ui/.env*
api/.env*
.env.local
# Coverage
.coverage*

View File

@@ -27,7 +27,6 @@ repos:
hooks:
- id: shellcheck
exclude: contrib
## PYTHON
- repo: https://github.com/myint/autoflake
rev: v2.3.1
@@ -59,28 +58,11 @@ repos:
args: ["--ignore=E266,W503,E203,E501,W605"]
- repo: https://github.com/python-poetry/poetry
rev: 2.1.1
rev: 1.8.0
hooks:
- id: poetry-check
name: API - poetry-check
args: ["--directory=./api"]
pass_filenames: false
- id: poetry-lock
name: API - poetry-lock
args: ["--directory=./api"]
pass_filenames: false
- id: poetry-check
name: SDK - poetry-check
args: ["--directory=./"]
pass_filenames: false
- id: poetry-lock
name: SDK - poetry-lock
args: ["--directory=./"]
pass_filenames: false
args: ["--no-update"]
- repo: https://github.com/hadolint/hadolint
rev: v2.13.0-beta
@@ -108,19 +90,20 @@ repos:
- id: bandit
name: bandit
description: "Bandit is a tool for finding common security issues in Python code"
entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
entry: bash -c 'bandit -q -lll -x '*_test.py,./contrib/' -r .'
language: system
files: '.*\.py'
- id: safety
name: safety
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
entry: bash -c 'safety check --ignore 70612,66963,74429,76352,76353'
entry: bash -c 'safety check --ignore 70612,66963'
language: system
- id: vulture
name: vulture
description: "Vulture finds unused code in Python programs."
entry: bash -c 'vulture --exclude "contrib,.venv,api/src/backend/api/tests/,api/src/backend/conftest.py,api/src/backend/tasks/tests/" --min-confidence 100 .'
entry: bash -c 'vulture --exclude "contrib" --min-confidence 100 .'
exclude: 'api/src/backend/'
language: system
files: '.*\.py'

View File

@@ -1,61 +1,38 @@
FROM python:3.12.11-slim-bookworm AS build
FROM python:3.12.8-alpine3.20
LABEL maintainer="https://github.com/prowler-cloud/prowler"
LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
ARG POWERSHELL_VERSION=7.5.0
# hadolint ignore=DL3008
RUN apt-get update && apt-get install -y --no-install-recommends \
wget libicu72 libunwind8 libssl3 libcurl4 ca-certificates apt-transport-https gnupg \
&& rm -rf /var/lib/apt/lists/*
# Install PowerShell
RUN ARCH=$(uname -m) && \
if [ "$ARCH" = "x86_64" ]; then \
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-x64.tar.gz -O /tmp/powershell.tar.gz ; \
elif [ "$ARCH" = "aarch64" ]; then \
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-arm64.tar.gz -O /tmp/powershell.tar.gz ; \
else \
echo "Unsupported architecture: $ARCH" && exit 1 ; \
fi && \
mkdir -p /opt/microsoft/powershell/7 && \
tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 && \
chmod +x /opt/microsoft/powershell/7/pwsh && \
ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh && \
rm /tmp/powershell.tar.gz
# Add prowler user
RUN addgroup --gid 1000 prowler && \
adduser --uid 1000 --gid 1000 --disabled-password --gecos "" prowler
# Update system dependencies and install essential tools
#hadolint ignore=DL3018
RUN apk --no-cache upgrade && apk --no-cache add curl git
# Create non-root user
RUN mkdir -p /home/prowler && \
echo 'prowler:x:1000:1000:prowler:/home/prowler:' > /etc/passwd && \
echo 'prowler:x:1000:' > /etc/group && \
chown -R prowler:prowler /home/prowler
USER prowler
WORKDIR /home/prowler
# Copy necessary files
WORKDIR /home/prowler
COPY prowler/ /home/prowler/prowler/
COPY dashboard/ /home/prowler/dashboard/
COPY pyproject.toml /home/prowler
COPY README.md /home/prowler/
COPY prowler/providers/m365/lib/powershell/m365_powershell.py /home/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py
COPY README.md /home/prowler
# Install Python dependencies
ENV HOME='/home/prowler'
ENV PATH="${HOME}/.local/bin:${PATH}"
#hadolint ignore=DL3013
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir poetry
RUN poetry install --compile && \
rm -rf ~/.cache/pip
# Install PowerShell modules
RUN poetry run python prowler/providers/m365/lib/powershell/m365_powershell.py
ENV PATH="$HOME/.local/bin:$PATH"
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
pip install --no-cache-dir .
# Remove deprecated dash dependencies
RUN pip uninstall dash-html-components -y && \
pip uninstall dash-core-components -y
# Remove Prowler directory and build files
USER 0
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info
USER prowler
ENTRYPOINT ["poetry", "run", "prowler"]
ENTRYPOINT ["prowler"]

187
README.md
View File

@@ -3,7 +3,7 @@
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
</p>
<p align="center">
<b><i>Prowler</b> is the Open Cloud Security platform trusted by thousands to automate security and compliance in any cloud environment. With hundreds of ready-to-use checks and compliance frameworks, Prowler delivers real-time, customizable monitoring and seamless integrations, making cloud security simple, scalable, and cost-effective for organizations of any size.
<b><i>Prowler Open Source</b> is as dynamic and adaptable as the environment theyre meant to protect. Trusted by the leaders in security.
</p>
<p align="center">
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
@@ -43,29 +43,15 @@
# Description
**Prowler** is an open-source security tool designed to assess and enforce security best practices across AWS, Azure, Google Cloud, and Kubernetes. It supports tasks such as security audits, incident response, continuous monitoring, system hardening, forensic readiness, and remediation processes.
Prowler includes hundreds of built-in controls to ensure compliance with standards and frameworks, including:
- **Industry Standards:** CIS, NIST 800, NIST CSF, and CISA
- **Regulatory Compliance and Governance:** RBI, FedRAMP, and PCI-DSS
- **Frameworks for Sensitive Data and Privacy:** GDPR, HIPAA, and FFIEC
- **Frameworks for Organizational Governance and Quality Control:** SOC2 and GXP
- **AWS-Specific Frameworks:** AWS Foundational Technical Review (FTR) and AWS Well-Architected Framework (Security Pillar)
- **National Security Standards:** ENS (Spanish National Security Scheme)
- **Custom Security Frameworks:** Tailored to your needs
## Prowler CLI and Prowler Cloud
Prowler offers a Command Line Interface (CLI), known as Prowler Open Source, and an additional service built on top of it, called <a href="https://prowler.com">Prowler Cloud</a>.
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler Cloud</a>.
## Prowler App
Prowler App is a web-based application that simplifies running Prowler across your cloud provider accounts. It provides a user-friendly interface to visualize the results and streamline your security assessments.
Prowler App is a web application that allows you to run Prowler in your cloud provider accounts and visualize the results in a user-friendly interface.
![Prowler App](docs/img/overview.png)
>For more details, refer to the [Prowler App Documentation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-app-installation)
>More details at [Prowler App Documentation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-app-installation)
## Prowler CLI
@@ -74,7 +60,6 @@ prowler <provider>
```
![Prowler CLI Execution](docs/img/short-display.png)
## Prowler Dashboard
```console
@@ -82,34 +67,22 @@ prowler dashboard
```
![Prowler Dashboard](docs/img/dashboard.png)
# Prowler at a Glance
It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks.
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|---|---|---|---|---|
| AWS | 567 | 82 | 36 | 10 |
| GCP | 79 | 13 | 10 | 3 |
| Azure | 142 | 18 | 10 | 3 |
| Kubernetes | 83 | 7 | 5 | 7 |
| GitHub | 16 | 2 | 1 | 0 |
| M365 | 69 | 7 | 3 | 2 |
| NHN (Unofficial) | 6 | 2 | 1 | 0 |
> [!Note]
> The numbers in the table are updated periodically.
> [!Tip]
> For the most accurate and up-to-date information about checks, services, frameworks, and categories, visit [**Prowler Hub**](https://hub.prowler.com).
> [!Note]
> Use the following commands to list Prowler's available checks, services, compliance frameworks, and categories: `prowler <provider> --list-checks`, `prowler <provider> --list-services`, `prowler <provider> --list-compliance` and `prowler <provider> --list-categories`.
| AWS | 561 | 81 -> `prowler aws --list-services` | 30 -> `prowler aws --list-compliance` | 9 -> `prowler aws --list-categories` |
| GCP | 77 | 13 -> `prowler gcp --list-services` | 3 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
| Azure | 139 | 18 -> `prowler azure --list-services` | 4 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
# 💻 Installation
## Prowler App
Prowler App offers flexible installation methods tailored to various environments:
Prowler App can be installed in different ways, depending on your environment:
> For detailed instructions on using Prowler App, refer to the [Prowler App Usage Guide](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/prowler-app/).
> See how to use Prowler App in the [Prowler App Usage Guide](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/prowler-app/).
### Docker Compose
@@ -125,31 +98,14 @@ curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/mast
docker compose up -d
```
> Containers are built for `linux/amd64`.
### Configuring Your Workstation for Prowler App
If your workstation's architecture is incompatible, you can resolve this by:
- **Setting the environment variable**: `DOCKER_DEFAULT_PLATFORM=linux/amd64`
- **Using the following flag in your Docker command**: `--platform linux/amd64`
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
### Common Issues with Docker Pull Installation
> [!Note]
If you want to use AWS role assumption (e.g., with the "Connect assuming IAM Role" option), you may need to mount your local `.aws` directory into the container as a volume (e.g., `- "${HOME}/.aws:/home/prowler/.aws:ro"`). There are several ways to configure credentials for Docker containers. See the [Troubleshooting](./docs/troubleshooting.md) section for more details and examples.
You can find more information in the [Troubleshooting](./docs/troubleshooting.md) section.
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
### From GitHub
**Requirements**
* `git` installed.
* `poetry` v2 installed: [poetry installation](https://python-poetry.org/docs/#installation).
* `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
* `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
@@ -159,7 +115,7 @@ You can find more information in the [Troubleshooting](./docs/troubleshooting.md
git clone https://github.com/prowler-cloud/prowler
cd prowler/api
poetry install
eval $(poetry env activate)
poetry shell
set -a
source .env
docker compose up postgres valkey -d
@@ -167,13 +123,8 @@ cd src/backend
python manage.py migrate --database admin
gunicorn -c config/guniconf.py config.wsgi:application
```
> [!IMPORTANT]
> As of Poetry v2.0.0, the `poetry shell` command has been deprecated. Use `poetry env activate` instead for environment activation.
>
> If your Poetry version is below v2.0.0, continue using `poetry shell` to activate your environment.
> For further guidance, refer to the Poetry Environment Activation Guide https://python-poetry.org/docs/managing-environments/#activating-the-environment.
> After completing the setup, access the API documentation at http://localhost:8080/api/v1/docs.
> Now, you can access the API documentation at http://localhost:8080/api/v1/docs.
**Commands to run the API Worker**
@@ -181,7 +132,7 @@ gunicorn -c config/guniconf.py config.wsgi:application
git clone https://github.com/prowler-cloud/prowler
cd prowler/api
poetry install
eval $(poetry env activate)
poetry shell
set -a
source .env
cd src/backend
@@ -194,7 +145,7 @@ python -m celery -A config.celery worker -l info -E
git clone https://github.com/prowler-cloud/prowler
cd prowler/api
poetry install
eval $(poetry env activate)
poetry shell
set -a
source .env
cd src/backend
@@ -211,31 +162,29 @@ npm run build
npm start
```
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
> Enjoy Prowler App at http://localhost:3000 by signing up with your email and password.
## Prowler CLI
### Pip package
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/). Consequently, it can be installed using pip with Python >3.9.1, <3.13:
Prowler CLI is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9, < 3.13:
```console
pip install prowler
prowler -v
```
>For further guidance, refer to [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-cli-installation)
>More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-cli-installation)
### Containers
**Available Versions of Prowler CLI**
The available versions of Prowler CLI are the following:
The following versions of Prowler CLI are available, depending on your requirements:
- `latest`: Synchronizes with the `master` branch. Note that this version is not stable.
- `v4-latest`: Synchronizes with the `v4` branch. Note that this version is not stable.
- `v3-latest`: Synchronizes with the `v3` branch. Note that this version is not stable.
- `<x.y.z>` (release): Stable releases corresponding to specific versions. You can find the complete list of releases [here](https://github.com/prowler-cloud/prowler/releases).
- `stable`: Always points to the latest release.
- `v4-stable`: Always points to the latest release for v4.
- `v3-stable`: Always points to the latest release for v3.
- `latest`: in sync with `master` branch (bear in mind that it is not a stable version)
- `v4-latest`: in sync with `v4` branch (bear in mind that it is not a stable version)
- `v3-latest`: in sync with `v3` branch (bear in mind that it is not a stable version)
- `<x.y.z>` (release): you can find the releases [here](https://github.com/prowler-cloud/prowler/releases), those are stable releases.
- `stable`: this tag always point to the latest release.
- `v4-stable`: this tag always point to the latest release for v4.
- `v3-stable`: this tag always point to the latest release for v3.
The container images are available here:
- Prowler CLI:
@@ -247,56 +196,29 @@ The container images are available here:
### From GitHub
Python >3.9.1, <3.13 is required with pip and Poetry:
Python >= 3.9, < 3.13 is required with pip and poetry:
``` console
git clone https://github.com/prowler-cloud/prowler
cd prowler
eval $(poetry env activate)
poetry shell
poetry install
python prowler-cli.py -v
python prowler.py -v
```
> [!IMPORTANT]
> To clone Prowler on Windows, configure Git to support long file paths by running the following command: `git config core.longpaths true`.
> [!IMPORTANT]
> As of Poetry v2.0.0, the `poetry shell` command has been deprecated. Use `poetry env activate` instead for environment activation.
>
> If your Poetry version is below v2.0.0, continue using `poetry shell` to activate your environment.
> For further guidance, refer to the Poetry Environment Activation Guide https://python-poetry.org/docs/managing-environments/#activating-the-environment.
# ✏️ High level architecture
> If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
# 📐✏️ High level architecture
## Prowler App
**Prowler App** is composed of three key components:
The **Prowler App** consists of three main components:
- **Prowler UI**: A web-based interface, built with Next.js, providing a user-friendly experience for executing Prowler scans and visualizing results.
- **Prowler API**: A backend service, developed with Django REST Framework, responsible for running Prowler scans and storing the generated results.
- **Prowler SDK**: A Python SDK designed to extend the functionality of the Prowler CLI for advanced capabilities.
- **Prowler UI**: A user-friendly web interface for running Prowler and viewing results, powered by Next.js.
- **Prowler API**: The backend API that executes Prowler scans and stores the results, built with Django REST Framework.
- **Prowler SDK**: A Python SDK that integrates with the Prowler CLI for advanced functionality.
![Prowler App Architecture](docs/img/prowler-app-architecture.png)
## Prowler CLI
**Running Prowler**
Prowler can be executed across various environments, offering flexibility to meet your needs. It can be run from:
- Your own workstation
- A Kubernetes Job
- Google Compute Engine
- Azure Virtual Machines (VMs)
- Amazon EC2 instances
- AWS Fargate or other container platforms
- CloudShell
And many more environments.
You can run Prowler from your workstation, a Kubernetes Job, a Google Compute Engine, an Azure VM, an EC2 instance, Fargate or any other container, CloudShell and many more.
![Architecture](docs/img/architecture.png)
@@ -304,36 +226,23 @@ And many more environments.
## General
- `Allowlist` now is called `Mutelist`.
- The `--quiet` option has been deprecated. Use the `--status` flag to filter findings based on their status: PASS, FAIL, or MANUAL.
- All findings with an `INFO` status have been reclassified as `MANUAL`.
- The CSV output format is standardized across all providers.
- The `--quiet` option has been deprecated, now use the `--status` flag to select the finding's status you want to get from PASS, FAIL or MANUAL.
- All `INFO` finding's status has changed to `MANUAL`.
- The CSV output format is common for all the providers.
**Deprecated Output Formats**
The following formats are now deprecated:
- Native JSON has been replaced with JSON in [OCSF] v1.1.0 format, which is standardized across all providers (https://schema.ocsf.io/).
We have deprecated some of our outputs formats:
- The native JSON is replaced for the JSON [OCSF](https://schema.ocsf.io/) v1.1.0, common for all the providers.
## AWS
**AWS Flag Deprecation**
The flag --sts-endpoint-region has been deprecated due to the adoption of AWS STS regional tokens.
**Sending FAIL Results to AWS Security Hub**
- To send only FAILS to AWS Security Hub, use one of the following options: `--send-sh-only-fails` or `--security-hub --status FAIL`.
- Deprecate the AWS flag --sts-endpoint-region since we use AWS STS regional tokens.
- To send only FAILS to AWS Security Hub, now use either `--send-sh-only-fails` or `--security-hub --status FAIL`.
# 📖 Documentation
**Documentation Resources**
For installation instructions, usage details, tutorials, and the Developer Guide, visit https://docs.prowler.com/
Install, Usage, Tutorials and Developer Guide is at https://docs.prowler.com/
# 📃 License
**Prowler License Information**
Prowler is licensed under the Apache License 2.0, as indicated in each file within the repository. Obtaining a Copy of the License
A copy of the License is available at <http://www.apache.org/licenses/LICENSE-2.0>
Prowler is licensed as Apache License 2.0 as specified in each file. You may obtain a copy of the License at
<http://www.apache.org/licenses/LICENSE-2.0>

View File

@@ -22,8 +22,6 @@ DJANGO_SECRETS_ENCRYPTION_KEY=""
# Decide whether to allow Django manage database table partitions
DJANGO_MANAGE_DB_PARTITIONS=[True|False]
DJANGO_CELERY_DEADLOCK_ATTEMPTS=5
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
DJANGO_SENTRY_DSN=
# PostgreSQL settings
# If running django and celery on host, use 'localhost', else use 'postgres-db'
@@ -40,19 +38,3 @@ POSTGRES_DB=prowler_db
VALKEY_HOST=[localhost|valkey]
VALKEY_PORT=6379
VALKEY_DB=0
# Sentry settings
SENTRY_ENVIRONMENT=local
SENTRY_RELEASE=local
# Social login credentials
DJANGO_GOOGLE_OAUTH_CLIENT_ID=""
DJANGO_GOOGLE_OAUTH_CLIENT_SECRET=""
DJANGO_GOOGLE_OAUTH_CALLBACK_URL=""
DJANGO_GITHUB_OAUTH_CLIENT_ID=""
DJANGO_GITHUB_OAUTH_CLIENT_SECRET=""
DJANGO_GITHUB_OAUTH_CALLBACK_URL=""
# Deletion Task Batch Size
DJANGO_DELETION_BATCH_SIZE=5000

168
api/.gitignore vendored Normal file
View File

@@ -0,0 +1,168 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.pyc
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
/_data/
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
*.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
.idea/
# VSCode
.vscode/

View File

@@ -0,0 +1,91 @@
repos:
## GENERAL
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
hooks:
- id: check-merge-conflict
- id: check-yaml
args: ["--unsafe"]
- id: check-json
- id: end-of-file-fixer
- id: trailing-whitespace
- id: no-commit-to-branch
- id: pretty-format-json
args: ["--autofix", "--no-sort-keys", "--no-ensure-ascii"]
exclude: 'src/backend/api/fixtures/dev/.*\.json$'
## TOML
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.13.0
hooks:
- id: pretty-format-toml
args: [--autofix]
files: pyproject.toml
## BASH
- repo: https://github.com/koalaman/shellcheck-precommit
rev: v0.10.0
hooks:
- id: shellcheck
exclude: contrib
## PYTHON
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.5.0
hooks:
# Run the linter.
- id: ruff
args: [ --fix ]
# Run the formatter.
- id: ruff-format
- repo: https://github.com/python-poetry/poetry
rev: 1.8.0
hooks:
- id: poetry-check
args: ["--directory=src"]
- id: poetry-lock
args: ["--no-update", "--directory=src"]
- repo: https://github.com/hadolint/hadolint
rev: v2.13.0-beta
hooks:
- id: hadolint
args: ["--ignore=DL3013", "Dockerfile"]
- repo: local
hooks:
- id: pylint
name: pylint
entry: bash -c 'poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/'
language: system
files: '.*\.py'
- id: trufflehog
name: TruffleHog
description: Detect secrets in your data.
entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail'
# For running trufflehog in docker, use the following entry instead:
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
language: system
stages: ["commit", "push"]
- id: bandit
name: bandit
description: "Bandit is a tool for finding common security issues in Python code"
entry: bash -c 'poetry run bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
language: system
files: '.*\.py'
- id: safety
name: safety
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
entry: bash -c 'poetry run safety check --ignore 70612,66963'
language: system
- id: vulture
name: vulture
description: "Vulture finds unused code in Python programs."
entry: bash -c 'poetry run vulture --exclude "contrib,.venv,tests,conftest.py" --min-confidence 100 .'
language: system
files: '.*\.py'

View File

@@ -1,207 +0,0 @@
# Prowler API Changelog
All notable changes to the **Prowler API** are documented in this file.
## [1.10.2] (Prowler v5.9.2)
### Changed
- Optimized queries for resources views [(#8336)](https://github.com/prowler-cloud/prowler/pull/8336)
---
## [v1.10.1] (Prowler v5.9.1)
### Fixed
- Calculate failed findings during scans to prevent heavy database queries [(#8322)](https://github.com/prowler-cloud/prowler/pull/8322)
---
## [v1.10.0] (Prowler v5.9.0)
### Added
- SSO with SAML support [(#8175)](https://github.com/prowler-cloud/prowler/pull/8175)
- `GET /resources/metadata`, `GET /resources/metadata/latest` and `GET /resources/latest` to expose resource metadata and latest scan results [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
### Changed
- `/processors` endpoints to post-process findings. Currently, only the Mutelist processor is supported to allow to mute findings.
- Optimized the underlying queries for resources endpoints [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
- Optimized include parameters for resources view [(#8229)](https://github.com/prowler-cloud/prowler/pull/8229)
- Optimized overview background tasks [(#8300)](https://github.com/prowler-cloud/prowler/pull/8300)
### Fixed
- Search filter for findings and resources [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
- RBAC is now applied to `GET /overviews/providers` [(#8277)](https://github.com/prowler-cloud/prowler/pull/8277)
### Changed
- `POST /schedules/daily` returns a `409 CONFLICT` if already created [(#8258)](https://github.com/prowler-cloud/prowler/pull/8258)
### Security
- Enhanced password validation to enforce 12+ character passwords with special characters, uppercase, lowercase, and numbers [(#8225)](https://github.com/prowler-cloud/prowler/pull/8225)
---
## [v1.9.1] (Prowler v5.8.1)
### Added
- Custom exception for provider connection errors during scans [(#8234)](https://github.com/prowler-cloud/prowler/pull/8234)
### Changed
- Summary and overview tasks now use a dedicated queue and no longer propagate errors to compliance tasks [(#8214)](https://github.com/prowler-cloud/prowler/pull/8214)
### Fixed
- Scan with no resources will not trigger legacy code for findings metadata [(#8183)](https://github.com/prowler-cloud/prowler/pull/8183)
- Invitation email comparison case-insensitive [(#8206)](https://github.com/prowler-cloud/prowler/pull/8206)
### Removed
- Validation of the provider's secret type during updates [(#8197)](https://github.com/prowler-cloud/prowler/pull/8197)
---
## [v1.9.0] (Prowler v5.8.0)
### Added
- Support GCP Service Account key [(#7824)](https://github.com/prowler-cloud/prowler/pull/7824)
- `GET /compliance-overviews` endpoints to retrieve compliance metadata and specific requirements statuses [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
- Lighthouse configuration support [(#7848)](https://github.com/prowler-cloud/prowler/pull/7848)
### Changed
- Reworked `GET /compliance-overviews` to return proper requirement metrics [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
- Optional `user` and `password` for M365 provider [(#7992)](https://github.com/prowler-cloud/prowler/pull/7992)
### Fixed
- Scheduled scans are no longer deleted when their daily schedule run is disabled [(#8082)](https://github.com/prowler-cloud/prowler/pull/8082)
---
## [v1.8.5] (Prowler v5.7.5)
### Fixed
- Normalize provider UID to ensure safe and unique export directory paths [(#8007)](https://github.com/prowler-cloud/prowler/pull/8007).
- Blank resource types in `/metadata` endpoints [(#8027)](https://github.com/prowler-cloud/prowler/pull/8027)
---
## [v1.8.4] (Prowler v5.7.4)
### Removed
- Reverted RLS transaction handling and DB custom backend [(#7994)](https://github.com/prowler-cloud/prowler/pull/7994)
---
## [v1.8.3] (Prowler v5.7.3)
### Added
- Database backend to handle already closed connections [(#7935)](https://github.com/prowler-cloud/prowler/pull/7935)
### Changed
- Renamed field encrypted_password to password for M365 provider [(#7784)](https://github.com/prowler-cloud/prowler/pull/7784)
### Fixed
- Transaction persistence with RLS operations [(#7916)](https://github.com/prowler-cloud/prowler/pull/7916)
- Reverted the change `get_with_retry` to use the original `get` method for retrieving tasks [(#7932)](https://github.com/prowler-cloud/prowler/pull/7932)
---
## [v1.8.2] (Prowler v5.7.2)
### Fixed
- Task lookup to use task_kwargs instead of task_args for scan report resolution [(#7830)](https://github.com/prowler-cloud/prowler/pull/7830)
- Kubernetes UID validation to allow valid context names [(#7871)](https://github.com/prowler-cloud/prowler/pull/7871)
- Connection status verification before launching a scan [(#7831)](https://github.com/prowler-cloud/prowler/pull/7831)
- Race condition when creating background tasks [(#7876)](https://github.com/prowler-cloud/prowler/pull/7876)
- Error when modifying or retrieving tenants due to missing user UUID in transaction context [(#7890)](https://github.com/prowler-cloud/prowler/pull/7890)
---
## [v1.8.1] (Prowler v5.7.1)
### Fixed
- Added database index to improve performance on finding lookup [(#7800)](https://github.com/prowler-cloud/prowler/pull/7800)
---
## [v1.8.0] (Prowler v5.7.0)
### Added
- Huge improvements to `/findings/metadata` and resource related filters for findings [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
- Improvements to `/overviews` endpoints [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
- Queue to perform backfill background tasks [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
- New endpoints to retrieve latest findings and metadata [(#7743)](https://github.com/prowler-cloud/prowler/pull/7743)
- Export support for Prowler ThreatScore in M365 [(7783)](https://github.com/prowler-cloud/prowler/pull/7783)
---
## [v1.7.0] (Prowler v5.6.0)
### Added
- M365 as a new provider [(#7563)](https://github.com/prowler-cloud/prowler/pull/7563)
- `compliance/` folder and ZIPexport functionality for all compliance reports [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653)
- API endpoint to fetch and download any specific compliance file by name [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653)
---
## [v1.6.0] (Prowler v5.5.0)
### Added
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167)
- HTTP Security Headers [(#7289)](https://github.com/prowler-cloud/prowler/pull/7289)
- New endpoint to get the compliance overviews metadata [(#7333)](https://github.com/prowler-cloud/prowler/pull/7333)
- Support for muted findings [(#7378)](https://github.com/prowler-cloud/prowler/pull/7378)
- Missing fields to API findings and resources [(#7318)](https://github.com/prowler-cloud/prowler/pull/7318)
---
## [v1.5.4] (Prowler v5.4.4)
### Fixed
- Bug with periodic tasks when trying to delete a provider [(#7466)](https://github.com/prowler-cloud/prowler/pull/7466)
---
## [v1.5.3] (Prowler v5.4.3)
### Fixed
- Duplicated scheduled scans handling [(#7401)](https://github.com/prowler-cloud/prowler/pull/7401)
- Environment variable to configure the deletion task batch size [(#7423)](https://github.com/prowler-cloud/prowler/pull/7423)
---
## [v1.5.2] (Prowler v5.4.2)
### Changed
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349)
---
## [v1.5.1] (Prowler v5.4.1)
### Fixed
- Handle response in case local files are missing [(#7183)](https://github.com/prowler-cloud/prowler/pull/7183)
- Race condition when deleting export files after the S3 upload [(#7172)](https://github.com/prowler-cloud/prowler/pull/7172)
- Handle exception when a provider has no secret in test connection [(#7283)](https://github.com/prowler-cloud/prowler/pull/7283)
---
## [v1.5.0] (Prowler v5.4.0)
### Added
- Social login integration with Google and GitHub [(#6906)](https://github.com/prowler-cloud/prowler/pull/6906)
- API scan report system, now all scans launched from the API will generate a compressed file with the report in OCSF, CSV and HTML formats [(#6878)](https://github.com/prowler-cloud/prowler/pull/6878)
- Configurable Sentry integration [(#6874)](https://github.com/prowler-cloud/prowler/pull/6874)
### Changed
- Optimized `GET /findings` endpoint to improve response time and size [(#7019)](https://github.com/prowler-cloud/prowler/pull/7019)
---
## [v1.4.0] (Prowler v5.3.0)
### Changed
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700)
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800)
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863)
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869)
---

View File

@@ -1,45 +1,13 @@
FROM python:3.12.10-slim-bookworm AS build
FROM python:3.12.8-alpine3.20 AS build
LABEL maintainer="https://github.com/prowler-cloud/api"
ARG POWERSHELL_VERSION=7.5.0
ENV POWERSHELL_VERSION=${POWERSHELL_VERSION}
# hadolint ignore=DL3008
RUN apt-get update && apt-get install -y --no-install-recommends \
wget \
libicu72 \
gcc \
g++ \
make \
libxml2-dev \
libxmlsec1-dev \
libxmlsec1-openssl \
pkg-config \
libtool \
libxslt1-dev \
python3-dev \
&& rm -rf /var/lib/apt/lists/*
# Install PowerShell
RUN ARCH=$(uname -m) && \
if [ "$ARCH" = "x86_64" ]; then \
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-x64.tar.gz -O /tmp/powershell.tar.gz ; \
elif [ "$ARCH" = "aarch64" ]; then \
wget --progress=dot:giga https://github.com/PowerShell/PowerShell/releases/download/v${POWERSHELL_VERSION}/powershell-${POWERSHELL_VERSION}-linux-arm64.tar.gz -O /tmp/powershell.tar.gz ; \
else \
echo "Unsupported architecture: $ARCH" && exit 1 ; \
fi && \
mkdir -p /opt/microsoft/powershell/7 && \
tar zxf /tmp/powershell.tar.gz -C /opt/microsoft/powershell/7 && \
chmod +x /opt/microsoft/powershell/7/pwsh && \
ln -s /opt/microsoft/powershell/7/pwsh /usr/bin/pwsh && \
rm /tmp/powershell.tar.gz
# Add prowler user
RUN addgroup --gid 1000 prowler && \
adduser --uid 1000 --gid 1000 --disabled-password --gecos "" prowler
# hadolint ignore=DL3018
RUN apk --no-cache add gcc python3-dev musl-dev linux-headers curl-dev
RUN apk --no-cache upgrade && \
addgroup -g 1000 prowler && \
adduser -D -u 1000 -G prowler prowler
USER prowler
WORKDIR /home/prowler
@@ -49,22 +17,27 @@ COPY pyproject.toml ./
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir poetry
COPY src/backend/ ./backend/
ENV PATH="/home/prowler/.local/bin:$PATH"
# Add `--no-root` to avoid installing the current project as a package
RUN poetry install --no-root && \
RUN poetry install && \
rm -rf ~/.cache/pip
RUN poetry run python "$(poetry env info --path)/src/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py"
COPY src/backend/ ./backend/
COPY docker-entrypoint.sh ./docker-entrypoint.sh
WORKDIR /home/prowler/backend
# Development image
# hadolint ignore=DL3006
FROM build AS dev
USER 0
# hadolint ignore=DL3018
RUN apk --no-cache add curl vim
USER prowler
ENTRYPOINT ["../docker-entrypoint.sh", "dev"]
# Production image

View File

@@ -235,7 +235,6 @@ To view the logs for any component (e.g., Django, Celery worker), you can use th
```console
docker logs -f $(docker ps --format "{{.Names}}" | grep 'api-')
```
## Applying migrations
@@ -257,7 +256,7 @@ cd src/backend
python manage.py loaddata api/fixtures/0_dev_users.json --database admin
```
> The default credentials are `dev@prowler.com:Thisisapassword123@` or `dev2@prowler.com:Thisisapassword123@`
> The default credentials are `dev@prowler.com:thisisapassword123` or `dev2@prowler.com:thisisapassword123`
## Run tests
@@ -270,66 +269,3 @@ poetry shell
cd src/backend
pytest
```
# Custom commands
Django provides a way to create custom commands that can be run from the command line.
> These commands can be found in: ```prowler/api/src/backend/api/management/commands```
To run a custom command, you need to be in the `prowler/api/src/backend` directory and run:
```console
poetry shell
python manage.py <command_name>
```
## Generate dummy data
```console
python manage.py findings --tenant
<TENANT_ID> --findings <NUM_FINDINGS> --re
sources <NUM_RESOURCES> --batch <TRANSACTION_BATCH_SIZE> --alias <ALIAS>
```
This command creates, for a given tenant, a provider, scan and a set of findings and resources related altogether.
> Scan progress and state are updated in real time.
> - 0-33%: Create resources.
> - 33-66%: Create findings.
> - 66%: Create resource-finding mapping.
>
> The last step is required to access the findings details, since the UI needs that to print all the information.
### Example
```console
~/backend $ poetry run python manage.py findings --tenant
fffb1893-3fc7-4623-a5d9-fae47da1c528 --findings 25000 --re
sources 1000 --batch 5000 --alias test-script
Starting data population
Tenant: fffb1893-3fc7-4623-a5d9-fae47da1c528
Alias: test-script
Resources: 1000
Findings: 25000
Batch size: 5000
Creating resources...
100%|███████████████████████| 1/1 [00:00<00:00, 7.72it/s]
Resources created successfully.
Creating findings...
100%|███████████████████████| 5/5 [00:05<00:00, 1.09s/it]
Findings created successfully.
Creating resource-finding mappings...
100%|███████████████████████| 5/5 [00:02<00:00, 1.81it/s]
Resource-finding mappings created successfully.
Successfully populated test data.
```

125
api/docker-compose.yml Normal file
View File

@@ -0,0 +1,125 @@
services:
api:
build:
dockerfile: Dockerfile
image: prowler-api
env_file:
- path: ./.env
required: false
ports:
- "${DJANGO_PORT:-8000}:${DJANGO_PORT:-8000}"
profiles:
- prod
depends_on:
postgres:
condition: service_healthy
valkey:
condition: service_healthy
entrypoint:
- "../docker-entrypoint.sh"
- "prod"
api-dev:
build:
dockerfile: Dockerfile
target: dev
image: prowler-api-dev
environment:
- DJANGO_SETTINGS_MODULE=config.django.devel
- DJANGO_LOGGING_FORMATTER=human_readable
env_file:
- path: ./.env
required: false
ports:
- "${DJANGO_PORT:-8080}:${DJANGO_PORT:-8080}"
volumes:
- "./src/backend:/home/prowler/backend"
- "./pyproject.toml:/home/prowler/pyproject.toml"
profiles:
- dev
depends_on:
postgres:
condition: service_healthy
valkey:
condition: service_healthy
entrypoint:
- "../docker-entrypoint.sh"
- "dev"
postgres:
image: postgres:16.3-alpine
ports:
- "${POSTGRES_PORT:-5432}:${POSTGRES_PORT:-5432}"
hostname: "postgres-db"
volumes:
- ./_data/postgres:/var/lib/postgresql/data
environment:
- POSTGRES_USER=${POSTGRES_ADMIN_USER:-prowler}
- POSTGRES_PASSWORD=${POSTGRES_ADMIN_PASSWORD:-S3cret}
- POSTGRES_DB=${POSTGRES_DB:-prowler_db}
env_file:
- path: ./.env
required: false
healthcheck:
test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_ADMIN_USER:-prowler} -d ${POSTGRES_DB:-prowler_db}'"]
interval: 5s
timeout: 5s
retries: 5
valkey:
image: valkey/valkey:7-alpine3.19
ports:
- "${VALKEY_PORT:-6379}:6379"
hostname: "valkey"
volumes:
- ./_data/valkey:/data
env_file:
- path: ./.env
required: false
healthcheck:
test: ["CMD-SHELL", "sh -c 'valkey-cli ping'"]
interval: 10s
timeout: 5s
retries: 3
worker:
build:
dockerfile: Dockerfile
image: prowler-worker
environment:
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
env_file:
- path: ./.env
required: false
profiles:
- dev
- prod
depends_on:
valkey:
condition: service_healthy
postgres:
condition: service_healthy
entrypoint:
- "../docker-entrypoint.sh"
- "worker"
worker-beat:
build:
dockerfile: Dockerfile
image: prowler-worker
environment:
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
env_file:
- path: ./.env
required: false
profiles:
- dev
- prod
depends_on:
valkey:
condition: service_healthy
postgres:
condition: service_healthy
entrypoint:
- "../docker-entrypoint.sh"
- "beat"

View File

@@ -3,10 +3,6 @@
apply_migrations() {
echo "Applying database migrations..."
# Fix Inconsistent migration history after adding sites app
poetry run python manage.py check_and_fix_socialaccount_sites_migration --database admin
poetry run python manage.py migrate --database admin
}
@@ -32,7 +28,7 @@ start_prod_server() {
start_worker() {
echo "Starting the worker..."
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill,overview -E --max-tasks-per-child 1
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans -E
}
start_worker_beat() {

5413
api/poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,54 +2,43 @@
build-backend = "poetry.core.masonry.api"
requires = ["poetry-core"]
[project]
authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
dependencies = [
"celery[pytest] (>=5.4.0,<6.0.0)",
"dj-rest-auth[with_social,jwt] (==7.0.1)",
"django==5.1.10",
"django-allauth[saml] (>=65.8.0,<66.0.0)",
"django-celery-beat (>=2.7.0,<3.0.0)",
"django-celery-results (>=2.5.1,<3.0.0)",
"django-cors-headers==4.4.0",
"django-environ==0.11.2",
"django-filter==24.3",
"django-guid==3.5.0",
"django-postgres-extra (>=2.0.8,<3.0.0)",
"djangorestframework==3.15.2",
"djangorestframework-jsonapi==7.0.2",
"djangorestframework-simplejwt (>=5.3.1,<6.0.0)",
"drf-nested-routers (>=0.94.1,<1.0.0)",
"drf-spectacular==0.27.2",
"drf-spectacular-jsonapi==0.5.1",
"gunicorn==23.0.0",
"lxml==5.3.2",
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.9",
"psycopg2-binary==2.9.9",
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
"uuid6==2024.7.10",
"openai (>=1.82.0,<2.0.0)",
"xmlsec==1.3.14"
]
[tool.poetry]
authors = ["Prowler Team"]
description = "Prowler's API (Django/DRF)"
license = "Apache-2.0"
name = "prowler-api"
package-mode = false
# Needed for the SDK compatibility
requires-python = ">=3.11,<3.13"
version = "1.10.2"
version = "1.1.0"
[project.scripts]
celery = "src.backend.config.settings.celery"
[tool.poetry.dependencies]
celery = {extras = ["pytest"], version = "^5.4.0"}
django = "5.1.1"
django-celery-beat = "^2.7.0"
django-celery-results = "^2.5.1"
django-cors-headers = "4.4.0"
django-environ = "0.11.2"
django-filter = "24.3"
django-guid = "3.5.0"
django-postgres-extra = "^2.0.8"
djangorestframework = "3.15.2"
djangorestframework-jsonapi = "7.0.2"
djangorestframework-simplejwt = "^5.3.1"
drf-nested-routers = "^0.94.1"
drf-spectacular = "0.27.2"
drf-spectacular-jsonapi = "0.5.1"
gunicorn = "23.0.0"
prowler = {git = "https://github.com/prowler-cloud/prowler.git", tag = "5.0.0"}
psycopg2-binary = "2.9.9"
pytest-celery = {extras = ["redis"], version = "^1.0.1"}
# Needed for prowler compatibility
python = ">=3.11,<3.13"
uuid6 = "2024.7.10"
[tool.poetry.group.dev.dependencies]
bandit = "1.7.9"
coverage = "7.5.4"
django-silk = "5.3.2"
docker = "7.1.0"
freezegun = "1.5.1"
marshmallow = ">=3.15.0,<4.0.0"
mypy = "1.10.1"
pylint = "3.2.5"
pytest = "8.2.2"
@@ -59,6 +48,8 @@ pytest-env = "1.1.3"
pytest-randomly = "3.15.0"
pytest-xdist = "3.6.1"
ruff = "0.5.0"
safety = "3.2.9"
tqdm = "4.67.1"
vulture = "2.14"
safety = "3.2.3"
vulture = "2.11"
[tool.poetry.scripts]
celery = "src.backend.config.settings.celery"

View File

@@ -1,71 +0,0 @@
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
from django.db import transaction
from api.db_router import MainRouter
from api.db_utils import rls_transaction
from api.models import Membership, Role, Tenant, User, UserRoleRelationship
class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
@staticmethod
def get_user_by_email(email: str):
try:
return User.objects.get(email=email)
except User.DoesNotExist:
return None
def pre_social_login(self, request, sociallogin):
# Link existing accounts with the same email address
email = sociallogin.account.extra_data.get("email")
if sociallogin.provider.id == "saml":
email = sociallogin.user.email
if email:
existing_user = self.get_user_by_email(email)
if existing_user:
sociallogin.connect(request, existing_user)
def save_user(self, request, sociallogin, form=None):
"""
Called after the user data is fully populated from the provider
and is about to be saved to the DB for the first time.
"""
with transaction.atomic(using=MainRouter.admin_db):
user = super().save_user(request, sociallogin, form)
provider = sociallogin.provider.id
extra = sociallogin.account.extra_data
if provider != "saml":
# Handle other providers (e.g., GitHub, Google)
user.save(using=MainRouter.admin_db)
social_account_name = extra.get("name")
if social_account_name:
user.name = social_account_name
user.save(using=MainRouter.admin_db)
tenant = Tenant.objects.using(MainRouter.admin_db).create(
name=f"{user.email.split('@')[0]} default tenant"
)
with rls_transaction(str(tenant.id)):
Membership.objects.using(MainRouter.admin_db).create(
user=user, tenant=tenant, role=Membership.RoleChoices.OWNER
)
role = Role.objects.using(MainRouter.admin_db).create(
name="admin",
tenant_id=tenant.id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
user=user,
role=role,
tenant_id=tenant.id,
)
else:
request.session["saml_user_created"] = str(user.id)
return user

View File

@@ -109,6 +109,16 @@ class BaseTenantViewset(BaseViewSet):
pass # Tenant might not exist, handle gracefully
def initial(self, request, *args, **kwargs):
if (
request.resolver_match.url_name != "tenant-detail"
and request.method != "DELETE"
):
user_id = str(request.user.id)
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
return super().initial(request, *args, **kwargs)
# TODO: DRY this when we have time
if request.auth is None:
raise NotAuthenticated
@@ -116,8 +126,8 @@ class BaseTenantViewset(BaseViewSet):
if tenant_id is None:
raise NotAuthenticated("Tenant ID is not present in token")
user_id = str(request.user.id)
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
with rls_transaction(tenant_id):
self.request.tenant_id = tenant_id
return super().initial(request, *args, **kwargs)

View File

@@ -1,38 +1,12 @@
from types import MappingProxyType
from api.models import Provider
from prowler.config.config import get_available_compliance_frameworks
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.check.models import CheckMetadata
from api.models import Provider
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE = {}
PROWLER_CHECKS = {}
AVAILABLE_COMPLIANCE_FRAMEWORKS = {}
def get_compliance_frameworks(provider_type: Provider.ProviderChoices) -> list[str]:
"""
Retrieve and cache the list of available compliance frameworks for a specific cloud provider.
This function lazily loads and caches the available compliance frameworks (e.g., CIS, MITRE, ISO)
for each provider type (AWS, Azure, GCP, etc.) on first access. Subsequent calls for the same
provider will return the cached result.
Args:
provider_type (Provider.ProviderChoices): The cloud provider type for which to retrieve
available compliance frameworks (e.g., "aws", "azure", "gcp", "m365").
Returns:
list[str]: A list of framework identifiers (e.g., "cis_1.4_aws", "mitre_attack_azure") available
for the given provider.
"""
global AVAILABLE_COMPLIANCE_FRAMEWORKS
if provider_type not in AVAILABLE_COMPLIANCE_FRAMEWORKS:
AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type] = (
get_available_compliance_frameworks(provider_type)
)
return AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type]
def get_prowler_provider_checks(provider_type: Provider.ProviderChoices):
@@ -190,16 +164,10 @@ def generate_compliance_overview_template(prowler_compliance: dict):
total_checks = len(requirement.Checks)
checks_dict = {check: None for check in requirement.Checks}
req_status_val = "MANUAL" if total_checks == 0 else "PASS"
# Build requirement dictionary
requirement_dict = {
"name": requirement.Name or requirement.Id,
"description": requirement.Description,
"tactics": getattr(requirement, "Tactics", []),
"subtechniques": getattr(requirement, "SubTechniques", []),
"platforms": getattr(requirement, "Platforms", []),
"technique_url": getattr(requirement, "TechniqueURL", ""),
"attributes": [
dict(attribute) for attribute in requirement.Attributes
],
@@ -210,18 +178,20 @@ def generate_compliance_overview_template(prowler_compliance: dict):
"manual": 0,
"total": total_checks,
},
"status": req_status_val,
"status": "PASS",
}
# Update requirements status counts for the framework
if req_status_val == "MANUAL":
# Update requirements status
if total_checks == 0:
requirements_status["manual"] += 1
elif req_status_val == "PASS":
requirements_status["passed"] += 1
# Add requirement to compliance requirements
compliance_requirements[requirement.Id] = requirement_dict
# Calculate pending requirements
pending_requirements = total_requirements - requirements_status["manual"]
requirements_status["passed"] = pending_requirements
# Build compliance dictionary
compliance_dict = {
"framework": compliance_data.Framework,

View File

@@ -1,29 +1,18 @@
ALLOWED_APPS = ("django", "socialaccount", "account", "authtoken", "silk")
class MainRouter:
default_db = "default"
admin_db = "admin"
def db_for_read(self, model, **hints): # noqa: F841
model_table_name = model._meta.db_table
if model_table_name.startswith("django_") or any(
model_table_name.startswith(f"{app}_") for app in ALLOWED_APPS
):
if model_table_name.startswith("django_"):
return self.admin_db
return None
def db_for_write(self, model, **hints): # noqa: F841
model_table_name = model._meta.db_table
if any(model_table_name.startswith(f"{app}_") for app in ALLOWED_APPS):
if model_table_name.startswith("django_"):
return self.admin_db
return None
def allow_migrate(self, db, app_label, model_name=None, **hints): # noqa: F841
return db == self.admin_db
def allow_relation(self, obj1, obj2, **hints): # noqa: F841
# Allow relations if both objects are in either "default" or "admin" db connectors
if {obj1._state.db, obj2._state.db} <= {self.default_db, self.admin_db}:
return True
return None

View File

@@ -1,4 +1,3 @@
import re
import secrets
import uuid
from contextlib import contextmanager
@@ -6,8 +5,8 @@ from datetime import datetime, timedelta, timezone
from django.conf import settings
from django.contrib.auth.models import BaseUserManager
from django.core.paginator import Paginator
from django.db import connection, models, transaction
from django_celery_beat.models import PeriodicTask
from psycopg2 import connect as psycopg2_connect
from psycopg2.extensions import AsIs, new_type, register_adapter, register_type
from rest_framework_json_api.serializers import ValidationError
@@ -107,12 +106,11 @@ def generate_random_token(length: int = 14, symbols: str | None = None) -> str:
return "".join(secrets.choice(symbols or _symbols) for _ in range(length))
def batch_delete(tenant_id, queryset, batch_size=settings.DJANGO_DELETION_BATCH_SIZE):
def batch_delete(queryset, batch_size=5000):
"""
Deletes objects in batches and returns the total number of deletions and a summary.
Args:
tenant_id (str): Tenant ID the queryset belongs to.
queryset (QuerySet): The queryset of objects to delete.
batch_size (int): The number of objects to delete in each batch.
@@ -122,82 +120,21 @@ def batch_delete(tenant_id, queryset, batch_size=settings.DJANGO_DELETION_BATCH_
total_deleted = 0
deletion_summary = {}
while True:
with rls_transaction(tenant_id, POSTGRES_TENANT_VAR):
# Get a batch of IDs to delete
batch_ids = set(
queryset.values_list("id", flat=True).order_by("id")[:batch_size]
)
if not batch_ids:
# No more objects to delete
break
paginator = Paginator(queryset.order_by("id").only("id"), batch_size)
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
for page_num in paginator.page_range:
batch_ids = [obj.id for obj in paginator.page(page_num).object_list]
deleted_count, deleted_info = queryset.filter(id__in=batch_ids).delete()
total_deleted += deleted_count
for model_label, count in deleted_info.items():
deletion_summary[model_label] = deletion_summary.get(model_label, 0) + count
return total_deleted, deletion_summary
def delete_related_daily_task(provider_id: str):
"""
Deletes the periodic task associated with a specific provider.
Args:
provider_id (str): The unique identifier for the provider
whose related periodic task should be deleted.
"""
task_name = f"scan-perform-scheduled-{provider_id}"
PeriodicTask.objects.filter(name=task_name).delete()
def create_objects_in_batches(
tenant_id: str, model, objects: list, batch_size: int = 500
):
"""
Bulk-create model instances in repeated, per-tenant RLS transactions.
All chunks execute in their own transaction, so no single transaction
grows too large.
Args:
tenant_id (str): UUID string of the tenant under which to set RLS.
model: Django model class whose `.objects.bulk_create()` will be called.
objects (list): List of model instances (unsaved) to bulk-create.
batch_size (int): Maximum number of objects per bulk_create call.
"""
total = len(objects)
for i in range(0, total, batch_size):
chunk = objects[i : i + batch_size]
with rls_transaction(value=tenant_id, parameter=POSTGRES_TENANT_VAR):
model.objects.bulk_create(chunk, batch_size)
def update_objects_in_batches(
tenant_id: str, model, objects: list, fields: list, batch_size: int = 500
):
"""
Bulk-update model instances in repeated, per-tenant RLS transactions.
All chunks execute in their own transaction, so no single transaction
grows too large.
Args:
tenant_id (str): UUID string of the tenant under which to set RLS.
model: Django model class whose `.objects.bulk_update()` will be called.
objects (list): List of model instances (saved) to bulk-update.
fields (list): List of field names to update.
batch_size (int): Maximum number of objects per bulk_update call.
"""
total = len(objects)
for start in range(0, total, batch_size):
chunk = objects[start : start + batch_size]
with rls_transaction(value=tenant_id, parameter=POSTGRES_TENANT_VAR):
model.objects.bulk_update(chunk, fields, batch_size)
# Postgres Enums
@@ -273,167 +210,6 @@ def register_enum(apps, schema_editor, enum_class): # noqa: F841
register_adapter(enum_class, enum_adapter)
def _should_create_index_on_partition(
partition_name: str, all_partitions: bool = False
) -> bool:
"""
Determine if we should create an index on this partition.
Args:
partition_name: The name of the partition (e.g., "findings_2025_aug", "findings_default")
all_partitions: If True, create on all partitions. If False, only current/future partitions.
Returns:
bool: True if index should be created on this partition, False otherwise.
"""
if all_partitions:
return True
# Extract date from partition name if it follows the pattern
# Partition names look like: findings_2025_aug, findings_2025_jul, etc.
date_pattern = r"(\d{4})_([a-z]{3})$"
match = re.search(date_pattern, partition_name)
if not match:
# If we can't parse the date, include it to be safe (e.g., default partition)
return True
try:
year_str, month_abbr = match.groups()
year = int(year_str)
# Map month abbreviations to numbers
month_map = {
"jan": 1,
"feb": 2,
"mar": 3,
"apr": 4,
"may": 5,
"jun": 6,
"jul": 7,
"aug": 8,
"sep": 9,
"oct": 10,
"nov": 11,
"dec": 12,
}
month = month_map.get(month_abbr.lower())
if month is None:
# Unknown month abbreviation, include it to be safe
return True
partition_date = datetime(year, month, 1, tzinfo=timezone.utc)
# Get current month start
now = datetime.now(timezone.utc)
current_month_start = now.replace(
day=1, hour=0, minute=0, second=0, microsecond=0
)
# Include current month and future partitions
return partition_date >= current_month_start
except (ValueError, TypeError):
# If date parsing fails, include it to be safe
return True
def create_index_on_partitions(
apps, # noqa: F841
schema_editor,
parent_table: str,
index_name: str,
columns: str,
method: str = "BTREE",
where: str = "",
all_partitions: bool = True,
):
"""
Create an index on existing partitions of `parent_table`.
Args:
parent_table: The name of the root table (e.g. "findings").
index_name: A short name for the index (will be prefixed per-partition).
columns: The parenthesized column list, e.g. "tenant_id, scan_id, status".
method: The index method—BTREE, GIN, etc. Defaults to BTREE.
where: Optional WHERE clause (without the leading "WHERE"), e.g. "status = 'FAIL'".
all_partitions: Whether to create indexes on all partitions or just current/future ones.
Defaults to False (current/future only) to avoid maintenance overhead
on old partitions where the index may not be needed.
Examples:
# Create index only on current and future partitions (recommended for new indexes)
create_index_on_partitions(
apps, schema_editor,
parent_table="findings",
index_name="new_performance_idx",
columns="tenant_id, status, severity",
all_partitions=False # Default behavior
)
# Create index on all partitions (use when migrating existing critical indexes)
create_index_on_partitions(
apps, schema_editor,
parent_table="findings",
index_name="critical_existing_idx",
columns="tenant_id, scan_id",
all_partitions=True
)
"""
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT inhrelid::regclass::text
FROM pg_inherits
WHERE inhparent = %s::regclass
""",
[parent_table],
)
partitions = [row[0] for row in cursor.fetchall()]
where_sql = f" WHERE {where}" if where else ""
for partition in partitions:
if _should_create_index_on_partition(partition, all_partitions):
idx_name = f"{partition.replace('.', '_')}_{index_name}"
sql = (
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {idx_name} "
f"ON {partition} USING {method} ({columns})"
f"{where_sql};"
)
schema_editor.execute(sql)
def drop_index_on_partitions(
apps, # noqa: F841
schema_editor,
parent_table: str,
index_name: str,
):
"""
Drop the per-partition indexes that were created by create_index_on_partitions.
Args:
parent_table: The name of the root table (e.g. "findings").
index_name: The same short name used when creating them.
"""
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT inhrelid::regclass::text
FROM pg_inherits
WHERE inhparent = %s::regclass
""",
[parent_table],
)
partitions = [row[0] for row in cursor.fetchall()]
for partition in partitions:
idx_name = f"{partition.replace('.', '_')}_{index_name}"
sql = f"DROP INDEX CONCURRENTLY IF EXISTS {idx_name};"
schema_editor.execute(sql)
# Postgres enum definition for member role
@@ -540,27 +316,3 @@ class InvitationStateEnum(EnumType):
class InvitationStateEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("invitation_state", *args, **kwargs)
# Postgres enum definition for Integration type
class IntegrationTypeEnum(EnumType):
enum_type_name = "integration_type"
class IntegrationTypeEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("integration_type", *args, **kwargs)
# Postgres enum definition for Processor type
class ProcessorTypeEnum(EnumType):
enum_type_name = "processor_type"
class ProcessorTypeEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("processor_type", *args, **kwargs)

View File

@@ -7,7 +7,7 @@ from rest_framework_json_api.serializers import ValidationError
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
def set_tenant(func=None, *, keep_tenant=False):
def set_tenant(func):
"""
Decorator to set the tenant context for a Celery task based on the provided tenant_id.
@@ -40,29 +40,20 @@ def set_tenant(func=None, *, keep_tenant=False):
# The tenant context will be set before the task logic executes.
"""
def decorator(func):
@wraps(func)
@transaction.atomic
def wrapper(*args, **kwargs):
try:
if not keep_tenant:
tenant_id = kwargs.pop("tenant_id")
else:
tenant_id = kwargs["tenant_id"]
except KeyError:
raise KeyError("This task requires the tenant_id")
try:
uuid.UUID(tenant_id)
except ValueError:
raise ValidationError("Tenant ID must be a valid UUID")
with connection.cursor() as cursor:
cursor.execute(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
@wraps(func)
@transaction.atomic
def wrapper(*args, **kwargs):
try:
tenant_id = kwargs.pop("tenant_id")
except KeyError:
raise KeyError("This task requires the tenant_id")
try:
uuid.UUID(tenant_id)
except ValueError:
raise ValidationError("Tenant ID must be a valid UUID")
with connection.cursor() as cursor:
cursor.execute(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
return func(*args, **kwargs)
return func(*args, **kwargs)
return wrapper
if func is None:
return decorator
else:
return decorator(func)
return wrapper

View File

@@ -3,7 +3,7 @@ from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework_json_api.exceptions import exception_handler
from rest_framework_json_api.serializers import ValidationError
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
from rest_framework_simplejwt.exceptions import TokenError, InvalidToken
class ModelValidationError(ValidationError):
@@ -32,36 +32,6 @@ class InvitationTokenExpiredException(APIException):
default_code = "token_expired"
# Task Management Exceptions (non-HTTP)
class TaskManagementError(Exception):
"""Base exception for task management errors."""
def __init__(self, task=None):
self.task = task
super().__init__()
class TaskFailedException(TaskManagementError):
"""Raised when a task has failed."""
class TaskNotFoundException(TaskManagementError):
"""Raised when a task is not found."""
class TaskInProgressException(TaskManagementError):
"""Raised when a task is running but there's no related Task object to return."""
def __init__(self, task_result=None):
self.task_result = task_result
super().__init__()
# Provider connection errors
class ProviderConnectionError(Exception):
"""Base exception for provider connection errors."""
def custom_exception_handler(exc, context):
if isinstance(exc, django_validation_error):
if hasattr(exc, "error_dict"):
@@ -69,30 +39,7 @@ def custom_exception_handler(exc, context):
else:
exc = ValidationError(detail=exc.messages[0], code=exc.code)
elif isinstance(exc, (TokenError, InvalidToken)):
if (
hasattr(exc, "detail")
and isinstance(exc.detail, dict)
and "messages" in exc.detail
):
exc.detail["messages"] = [
message_item["message"] for message_item in exc.detail["messages"]
]
exc.detail["messages"] = [
message_item["message"] for message_item in exc.detail["messages"]
]
return exception_handler(exc, context)
class ConflictException(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = "A conflict occurred. The resource already exists."
default_code = "conflict"
def __init__(self, detail=None, code=None, pointer=None):
error_detail = {
"detail": detail or self.default_detail,
"status": self.status_code,
"code": self.default_code,
}
if pointer:
error_detail["source"] = {"pointer": pointer}
super().__init__(detail=[error_detail])

View File

@@ -1,6 +1,5 @@
from datetime import date, datetime, timedelta, timezone
from datetime import date, datetime, timezone
from dateutil.parser import parse
from django.conf import settings
from django.db.models import Q
from django_filters.rest_framework import (
@@ -23,13 +22,11 @@ from api.db_utils import (
StatusEnumField,
)
from api.models import (
ComplianceRequirementOverview,
ComplianceOverview,
Finding,
Integration,
Invitation,
Membership,
PermissionChoices,
Processor,
Provider,
ProviderGroup,
ProviderSecret,
@@ -83,114 +80,6 @@ class ChoiceInFilter(BaseInFilter, ChoiceFilter):
pass
class CommonFindingFilters(FilterSet):
# We filter providers from the scan in findings
provider = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
provider__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
provider_type = ChoiceFilter(
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
)
provider_type__in = ChoiceInFilter(
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
)
provider_uid = CharFilter(field_name="scan__provider__uid", lookup_expr="exact")
provider_uid__in = CharInFilter(field_name="scan__provider__uid", lookup_expr="in")
provider_uid__icontains = CharFilter(
field_name="scan__provider__uid", lookup_expr="icontains"
)
provider_alias = CharFilter(field_name="scan__provider__alias", lookup_expr="exact")
provider_alias__in = CharInFilter(
field_name="scan__provider__alias", lookup_expr="in"
)
provider_alias__icontains = CharFilter(
field_name="scan__provider__alias", lookup_expr="icontains"
)
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
uid = CharFilter(field_name="uid")
delta = ChoiceFilter(choices=Finding.DeltaChoices.choices)
status = ChoiceFilter(choices=StatusChoices.choices)
severity = ChoiceFilter(choices=SeverityChoices)
impact = ChoiceFilter(choices=SeverityChoices)
muted = BooleanFilter(
help_text="If this filter is not provided, muted and non-muted findings will be returned."
)
resources = UUIDInFilter(field_name="resource__id", lookup_expr="in")
region = CharFilter(method="filter_resource_region")
region__in = CharInFilter(field_name="resource_regions", lookup_expr="overlap")
region__icontains = CharFilter(
field_name="resource_regions", lookup_expr="icontains"
)
service = CharFilter(method="filter_resource_service")
service__in = CharInFilter(field_name="resource_services", lookup_expr="overlap")
service__icontains = CharFilter(
field_name="resource_services", lookup_expr="icontains"
)
resource_uid = CharFilter(field_name="resources__uid")
resource_uid__in = CharInFilter(field_name="resources__uid", lookup_expr="in")
resource_uid__icontains = CharFilter(
field_name="resources__uid", lookup_expr="icontains"
)
resource_name = CharFilter(field_name="resources__name")
resource_name__in = CharInFilter(field_name="resources__name", lookup_expr="in")
resource_name__icontains = CharFilter(
field_name="resources__name", lookup_expr="icontains"
)
resource_type = CharFilter(method="filter_resource_type")
resource_type__in = CharInFilter(field_name="resource_types", lookup_expr="overlap")
resource_type__icontains = CharFilter(
field_name="resources__type", lookup_expr="icontains"
)
# Temporarily disabled until we implement tag filtering in the UI
# resource_tag_key = CharFilter(field_name="resources__tags__key")
# resource_tag_key__in = CharInFilter(
# field_name="resources__tags__key", lookup_expr="in"
# )
# resource_tag_key__icontains = CharFilter(
# field_name="resources__tags__key", lookup_expr="icontains"
# )
# resource_tag_value = CharFilter(field_name="resources__tags__value")
# resource_tag_value__in = CharInFilter(
# field_name="resources__tags__value", lookup_expr="in"
# )
# resource_tag_value__icontains = CharFilter(
# field_name="resources__tags__value", lookup_expr="icontains"
# )
# resource_tags = CharInFilter(
# method="filter_resource_tag",
# lookup_expr="in",
# help_text="Filter by resource tags `key:value` pairs.\nMultiple values may be "
# "separated by commas.",
# )
def filter_resource_service(self, queryset, name, value):
return queryset.filter(resource_services__contains=[value])
def filter_resource_region(self, queryset, name, value):
return queryset.filter(resource_regions__contains=[value])
def filter_resource_type(self, queryset, name, value):
return queryset.filter(resource_types__contains=[value])
def filter_resource_tag(self, queryset, name, value):
overall_query = Q()
for key_value_pair in value:
tag_key, tag_value = key_value_pair.split(":", 1)
overall_query |= Q(
resources__tags__key__icontains=tag_key,
resources__tags__value__icontains=tag_value,
)
return queryset.filter(overall_query).distinct()
class TenantFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
@@ -340,8 +229,6 @@ class ResourceFilter(ProviderRelationshipFilterSet):
tags = CharFilter(method="filter_tag")
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
scan = UUIDFilter(field_name="provider__scan", lookup_expr="exact")
scan__in = UUIDInFilter(field_name="provider__scan", lookup_expr="in")
class Meta:
model = Resource
@@ -356,52 +243,6 @@ class ResourceFilter(ProviderRelationshipFilterSet):
"updated_at": ["gte", "lte"],
}
def filter_queryset(self, queryset):
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
self.data.get("updated_at")
or self.data.get("updated_at__date")
or self.data.get("updated_at__gte")
or self.data.get("updated_at__lte")
):
raise ValidationError(
[
{
"detail": "At least one date filter is required: filter[updated_at], filter[updated_at.gte], "
"or filter[updated_at.lte].",
"status": 400,
"source": {"pointer": "/data/attributes/updated_at"},
"code": "required",
}
]
)
gte_date = (
parse(self.data.get("updated_at__gte")).date()
if self.data.get("updated_at__gte")
else datetime.now(timezone.utc).date()
)
lte_date = (
parse(self.data.get("updated_at__lte")).date()
if self.data.get("updated_at__lte")
else datetime.now(timezone.utc).date()
)
if abs(lte_date - gte_date) > timedelta(
days=settings.FINDINGS_MAX_DAYS_IN_RANGE
):
raise ValidationError(
[
{
"detail": f"The date range cannot exceed {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
"status": 400,
"source": {"pointer": "/data/attributes/updated_at"},
"code": "invalid",
}
]
)
return super().filter_queryset(queryset)
def filter_tag_key(self, queryset, name, value):
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
@@ -415,50 +256,76 @@ class ResourceFilter(ProviderRelationshipFilterSet):
return queryset.filter(tags__text_search=value)
class LatestResourceFilter(ProviderRelationshipFilterSet):
tag_key = CharFilter(method="filter_tag_key")
tag_value = CharFilter(method="filter_tag_value")
tag = CharFilter(method="filter_tag")
tags = CharFilter(method="filter_tag")
class FindingFilter(FilterSet):
# We filter providers from the scan in findings
provider = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
provider__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
provider_type = ChoiceFilter(
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
)
provider_type__in = ChoiceInFilter(
choices=Provider.ProviderChoices.choices, field_name="scan__provider__provider"
)
provider_uid = CharFilter(field_name="scan__provider__uid", lookup_expr="exact")
provider_uid__in = CharInFilter(field_name="scan__provider__uid", lookup_expr="in")
provider_uid__icontains = CharFilter(
field_name="scan__provider__uid", lookup_expr="icontains"
)
provider_alias = CharFilter(field_name="scan__provider__alias", lookup_expr="exact")
provider_alias__in = CharInFilter(
field_name="scan__provider__alias", lookup_expr="in"
)
provider_alias__icontains = CharFilter(
field_name="scan__provider__alias", lookup_expr="icontains"
)
class Meta:
model = Resource
fields = {
"provider": ["exact", "in"],
"uid": ["exact", "icontains"],
"name": ["exact", "icontains"],
"region": ["exact", "icontains", "in"],
"service": ["exact", "icontains", "in"],
"type": ["exact", "icontains", "in"],
}
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
def filter_tag_key(self, queryset, name, value):
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
uid = CharFilter(field_name="uid")
delta = ChoiceFilter(choices=Finding.DeltaChoices.choices)
status = ChoiceFilter(choices=StatusChoices.choices)
severity = ChoiceFilter(choices=SeverityChoices)
impact = ChoiceFilter(choices=SeverityChoices)
def filter_tag_value(self, queryset, name, value):
return queryset.filter(Q(tags__value=value) | Q(tags__value__icontains=value))
resources = UUIDInFilter(field_name="resource__id", lookup_expr="in")
def filter_tag(self, queryset, name, value):
# We won't know what the user wants to filter on just based on the value,
# and we don't want to build special filtering logic for every possible
# provider tag spec, so we'll just do a full text search
return queryset.filter(tags__text_search=value)
region = CharFilter(field_name="resources__region")
region__in = CharInFilter(field_name="resources__region", lookup_expr="in")
region__icontains = CharFilter(
field_name="resources__region", lookup_expr="icontains"
)
service = CharFilter(field_name="resources__service")
service__in = CharInFilter(field_name="resources__service", lookup_expr="in")
service__icontains = CharFilter(
field_name="resources__service", lookup_expr="icontains"
)
resource_uid = CharFilter(field_name="resources__uid")
resource_uid__in = CharInFilter(field_name="resources__uid", lookup_expr="in")
resource_uid__icontains = CharFilter(
field_name="resources__uid", lookup_expr="icontains"
)
resource_name = CharFilter(field_name="resources__name")
resource_name__in = CharInFilter(field_name="resources__name", lookup_expr="in")
resource_name__icontains = CharFilter(
field_name="resources__name", lookup_expr="icontains"
)
resource_type = CharFilter(field_name="resources__type")
resource_type__in = CharInFilter(field_name="resources__type", lookup_expr="in")
resource_type__icontains = CharFilter(
field_name="resources__type", lookup_expr="icontains"
)
class FindingFilter(CommonFindingFilters):
scan = UUIDFilter(method="filter_scan_id")
scan__in = UUIDInFilter(method="filter_scan_id_in")
inserted_at = DateFilter(method="filter_inserted_at", lookup_expr="date")
inserted_at__date = DateFilter(method="filter_inserted_at", lookup_expr="date")
inserted_at__gte = DateFilter(
method="filter_inserted_at_gte",
help_text=f"Maximum date range is {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
)
inserted_at__lte = DateFilter(
method="filter_inserted_at_lte",
help_text=f"Maximum date range is {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
)
inserted_at__gte = DateFilter(method="filter_inserted_at_gte")
inserted_at__lte = DateFilter(method="filter_inserted_at_lte")
class Meta:
model = Finding
@@ -486,61 +353,6 @@ class FindingFilter(CommonFindingFilters):
},
}
def filter_resource_type(self, queryset, name, value):
return queryset.filter(resource_types__contains=[value])
def filter_resource_region(self, queryset, name, value):
return queryset.filter(resource_regions__contains=[value])
def filter_resource_service(self, queryset, name, value):
return queryset.filter(resource_services__contains=[value])
def filter_queryset(self, queryset):
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
self.data.get("inserted_at")
or self.data.get("inserted_at__date")
or self.data.get("inserted_at__gte")
or self.data.get("inserted_at__lte")
):
raise ValidationError(
[
{
"detail": "At least one date filter is required: filter[inserted_at], filter[inserted_at.gte], "
"or filter[inserted_at.lte].",
"status": 400,
"source": {"pointer": "/data/attributes/inserted_at"},
"code": "required",
}
]
)
gte_date = (
datetime.strptime(self.data.get("inserted_at__gte"), "%Y-%m-%d").date()
if self.data.get("inserted_at__gte")
else datetime.now(timezone.utc).date()
)
lte_date = (
datetime.strptime(self.data.get("inserted_at__lte"), "%Y-%m-%d").date()
if self.data.get("inserted_at__lte")
else datetime.now(timezone.utc).date()
)
if abs(lte_date - gte_date) > timedelta(
days=settings.FINDINGS_MAX_DAYS_IN_RANGE
):
raise ValidationError(
[
{
"detail": f"The date range cannot exceed {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
"status": 400,
"source": {"pointer": "/data/attributes/inserted_at"},
"code": "invalid",
}
]
)
return super().filter_queryset(queryset)
# Convert filter values to UUIDv7 values for use with partitioning
def filter_scan_id(self, queryset, name, value):
try:
@@ -561,7 +373,9 @@ class FindingFilter(CommonFindingFilters):
)
return (
queryset.filter(id__gte=start).filter(id__lt=end).filter(scan_id=value_uuid)
queryset.filter(id__gte=start)
.filter(id__lt=end)
.filter(scan__id=value_uuid)
)
def filter_scan_id_in(self, queryset, name, value):
@@ -586,32 +400,31 @@ class FindingFilter(CommonFindingFilters):
]
)
if start == end:
return queryset.filter(id__gte=start).filter(scan_id__in=uuid_list)
return queryset.filter(id__gte=start).filter(scan__id__in=uuid_list)
else:
return (
queryset.filter(id__gte=start)
.filter(id__lt=end)
.filter(scan_id__in=uuid_list)
.filter(scan__id__in=uuid_list)
)
def filter_inserted_at(self, queryset, name, value):
datetime_value = self.maybe_date_to_datetime(value)
start = uuid7_start(datetime_to_uuid7(datetime_value))
end = uuid7_start(datetime_to_uuid7(datetime_value + timedelta(days=1)))
value = self.maybe_date_to_datetime(value)
start = uuid7_start(datetime_to_uuid7(value))
return queryset.filter(id__gte=start, id__lt=end)
return queryset.filter(id__gte=start).filter(inserted_at__date=value)
def filter_inserted_at_gte(self, queryset, name, value):
datetime_value = self.maybe_date_to_datetime(value)
start = uuid7_start(datetime_to_uuid7(datetime_value))
value = self.maybe_date_to_datetime(value)
start = uuid7_start(datetime_to_uuid7(value))
return queryset.filter(id__gte=start)
return queryset.filter(id__gte=start).filter(inserted_at__gte=value)
def filter_inserted_at_lte(self, queryset, name, value):
datetime_value = self.maybe_date_to_datetime(value)
end = uuid7_start(datetime_to_uuid7(datetime_value + timedelta(days=1)))
value = self.maybe_date_to_datetime(value)
end = uuid7_start(datetime_to_uuid7(value))
return queryset.filter(id__lt=end)
return queryset.filter(id__lte=end).filter(inserted_at__lte=value)
@staticmethod
def maybe_date_to_datetime(value):
@@ -621,31 +434,6 @@ class FindingFilter(CommonFindingFilters):
return dt
class LatestFindingFilter(CommonFindingFilters):
class Meta:
model = Finding
fields = {
"id": ["exact", "in"],
"uid": ["exact", "in"],
"delta": ["exact", "in"],
"status": ["exact", "in"],
"severity": ["exact", "in"],
"impact": ["exact", "in"],
"check_id": ["exact", "in", "icontains"],
}
filter_overrides = {
FindingDeltaEnumField: {
"filter_class": CharFilter,
},
StatusEnumField: {
"filter_class": CharFilter,
},
SeverityEnumField: {
"filter_class": CharFilter,
},
}
class ProviderSecretFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
@@ -717,11 +505,12 @@ class RoleFilter(FilterSet):
class ComplianceOverviewFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
scan_id = UUIDFilter(field_name="scan_id")
region = CharFilter(field_name="region")
provider_type = ChoiceFilter(choices=Provider.ProviderChoices.choices)
provider_type__in = ChoiceInFilter(choices=Provider.ProviderChoices.choices)
scan_id = UUIDFilter(field_name="scan__id")
class Meta:
model = ComplianceRequirementOverview
model = ComplianceOverview
fields = {
"inserted_at": ["date", "gte", "lte"],
"compliance_id": ["exact", "icontains"],
@@ -741,6 +530,12 @@ class ScanSummaryFilter(FilterSet):
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
)
region = CharFilter(field_name="region")
muted_findings = BooleanFilter(method="filter_muted_findings")
def filter_muted_findings(self, queryset, name, value):
if not value:
return queryset.exclude(muted__gt=0)
return queryset
class Meta:
model = ScanSummary
@@ -751,6 +546,8 @@ class ScanSummaryFilter(FilterSet):
class ServiceOverviewFilter(ScanSummaryFilter):
muted_findings = None
def is_valid(self):
# Check if at least one of the inserted_at filters is present
inserted_at_filters = [
@@ -768,28 +565,3 @@ class ServiceOverviewFilter(ScanSummaryFilter):
}
)
return super().is_valid()
class IntegrationFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
integration_type = ChoiceFilter(choices=Integration.IntegrationChoices.choices)
integration_type__in = ChoiceInFilter(
choices=Integration.IntegrationChoices.choices,
field_name="integration_type",
lookup_expr="in",
)
class Meta:
model = Integration
fields = {
"inserted_at": ["date", "gte", "lte"],
}
class ProcessorFilter(FilterSet):
processor_type = ChoiceFilter(choices=Processor.ProcessorChoices.choices)
processor_type__in = ChoiceInFilter(
choices=Processor.ProcessorChoices.choices,
field_name="processor_type",
lookup_expr="in",
)

View File

@@ -3,7 +3,7 @@
"model": "api.user",
"pk": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
"fields": {
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
"last_login": null,
"name": "Devie Prowlerson",
"email": "dev@prowler.com",
@@ -16,7 +16,7 @@
"model": "api.user",
"pk": "b6493a3a-c997-489b-8b99-278bf74de9f6",
"fields": {
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
"last_login": null,
"name": "Devietoo Prowlerson",
"email": "dev2@prowler.com",

View File

@@ -122,22 +122,6 @@
"scanner_args": {}
}
},
{
"model": "api.provider",
"pk": "7791914f-d646-4fe2-b2ed-73f2c6499a36",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:45:26.352Z",
"updated_at": "2024-10-18T11:16:23.533Z",
"provider": "kubernetes",
"uid": "gke_lucky-coast-419309_us-central1_autopilot-cluster-2",
"alias": "k8s_testing_2",
"connected": true,
"connection_last_checked_at": "2024-10-18T11:16:23.503Z",
"metadata": {},
"scanner_args": {}
}
},
{
"model": "api.providersecret",
"pk": "11491b47-75ae-4f71-ad8d-3e630a72182e",

View File

@@ -11,7 +11,9 @@
"unique_resource_count": 1,
"duration": 5,
"scanner_args": {
"checks_to_execute": ["accessanalyzer_enabled"]
"checks_to_execute": [
"accessanalyzer_enabled"
]
},
"inserted_at": "2024-09-01T17:25:27.050Z",
"started_at": "2024-09-01T17:25:27.050Z",
@@ -31,7 +33,9 @@
"unique_resource_count": 1,
"duration": 20,
"scanner_args": {
"checks_to_execute": ["accessanalyzer_enabled"]
"checks_to_execute": [
"accessanalyzer_enabled"
]
},
"inserted_at": "2024-09-02T17:24:27.050Z",
"started_at": "2024-09-02T17:24:27.050Z",
@@ -51,7 +55,9 @@
"unique_resource_count": 10,
"duration": 10,
"scanner_args": {
"checks_to_execute": ["cloudsql_instance_automated_backups"]
"checks_to_execute": [
"cloudsql_instance_automated_backups"
]
},
"inserted_at": "2024-09-02T19:26:27.050Z",
"started_at": "2024-09-02T19:26:27.050Z",
@@ -71,7 +77,9 @@
"unique_resource_count": 1,
"duration": 35,
"scanner_args": {
"checks_to_execute": ["accessanalyzer_enabled"]
"checks_to_execute": [
"accessanalyzer_enabled"
]
},
"inserted_at": "2024-09-02T19:27:27.050Z",
"started_at": "2024-09-02T19:27:27.050Z",
@@ -89,7 +97,9 @@
"name": "test scheduled aws scan",
"state": "available",
"scanner_args": {
"checks_to_execute": ["cloudformation_stack_outputs_find_secrets"]
"checks_to_execute": [
"cloudformation_stack_outputs_find_secrets"
]
},
"scheduled_at": "2030-09-02T19:20:27.050Z",
"inserted_at": "2024-09-02T19:24:27.050Z",
@@ -168,7 +178,9 @@
"unique_resource_count": 19,
"progress": 100,
"scanner_args": {
"checks_to_execute": ["accessanalyzer_enabled"]
"checks_to_execute": [
"accessanalyzer_enabled"
]
},
"duration": 7,
"scheduled_at": null,
@@ -178,56 +190,6 @@
"completed_at": "2024-10-18T10:46:05.127Z"
}
},
{
"model": "api.scan",
"pk": "6dd8925f-a52d-48de-a546-d2d90db30ab1",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "real scan azure",
"provider": "1b59e032-3eb6-4694-93a5-df84cd9b3ce2",
"trigger": "manual",
"state": "completed",
"unique_resource_count": 20,
"progress": 100,
"scanner_args": {
"checks_to_execute": [
"accessanalyzer_enabled",
"account_security_contact_information_is_registered"
]
},
"duration": 4,
"scheduled_at": null,
"inserted_at": "2024-10-18T11:16:21.358Z",
"updated_at": "2024-10-18T11:16:26.060Z",
"started_at": "2024-10-18T11:16:21.593Z",
"completed_at": "2024-10-18T11:16:26.060Z"
}
},
{
"model": "api.scan",
"pk": "4ca7ce89-3236-41a8-a369-8937bc152af5",
"fields": {
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"name": "real scan k8s",
"provider": "7791914f-d646-4fe2-b2ed-73f2c6499a36",
"trigger": "manual",
"state": "completed",
"unique_resource_count": 20,
"progress": 100,
"scanner_args": {
"checks_to_execute": [
"accessanalyzer_enabled",
"account_security_contact_information_is_registered"
]
},
"duration": 4,
"scheduled_at": null,
"inserted_at": "2024-10-18T11:16:21.358Z",
"updated_at": "2024-10-18T11:16:26.060Z",
"started_at": "2024-10-18T11:16:21.593Z",
"completed_at": "2024-10-18T11:16:26.060Z"
}
},
{
"model": "api.scan",
"pk": "01929f57-c0ee-7553-be0b-cbde006fb6f7",

View File

@@ -6,7 +6,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.823Z",
"updated_at": "2024-10-18T10:46:04.841Z",
"first_seen_at": "2024-10-18T10:46:04.823Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-south-2-112233445566",
"delta": "new",
"status": "FAIL",
@@ -62,7 +61,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.855Z",
"updated_at": "2024-10-18T10:46:04.858Z",
"first_seen_at": "2024-10-18T10:46:04.855Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-3-112233445566",
"delta": "new",
"status": "FAIL",
@@ -118,7 +116,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.869Z",
"updated_at": "2024-10-18T10:46:04.876Z",
"first_seen_at": "2024-10-18T10:46:04.869Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-2-112233445566",
"delta": "new",
"status": "FAIL",
@@ -174,7 +171,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.888Z",
"updated_at": "2024-10-18T10:46:04.892Z",
"first_seen_at": "2024-10-18T10:46:04.888Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -230,7 +226,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.901Z",
"updated_at": "2024-10-18T10:46:04.905Z",
"first_seen_at": "2024-10-18T10:46:04.901Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-2-112233445566",
"delta": "new",
"status": "FAIL",
@@ -286,7 +281,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.915Z",
"updated_at": "2024-10-18T10:46:04.919Z",
"first_seen_at": "2024-10-18T10:46:04.915Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-south-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -342,7 +336,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.929Z",
"updated_at": "2024-10-18T10:46:04.934Z",
"first_seen_at": "2024-10-18T10:46:04.929Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -398,7 +391,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.944Z",
"updated_at": "2024-10-18T10:46:04.947Z",
"first_seen_at": "2024-10-18T10:46:04.944Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ca-central-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -454,7 +446,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.957Z",
"updated_at": "2024-10-18T10:46:04.962Z",
"first_seen_at": "2024-10-18T10:46:04.957Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-1-ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
"delta": "new",
"status": "PASS",
@@ -510,7 +501,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.971Z",
"updated_at": "2024-10-18T10:46:04.975Z",
"first_seen_at": "2024-10-18T10:46:04.971Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-2-112233445566",
"delta": "new",
"status": "FAIL",
@@ -566,7 +556,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.984Z",
"updated_at": "2024-10-18T10:46:04.989Z",
"first_seen_at": "2024-10-18T10:46:04.984Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-sa-east-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -622,7 +611,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:04.999Z",
"updated_at": "2024-10-18T10:46:05.003Z",
"first_seen_at": "2024-10-18T10:46:04.999Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-north-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -678,7 +666,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.013Z",
"updated_at": "2024-10-18T10:46:05.018Z",
"first_seen_at": "2024-10-18T10:46:05.013Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-2-112233445566",
"delta": "new",
"status": "FAIL",
@@ -734,7 +721,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.029Z",
"updated_at": "2024-10-18T10:46:05.033Z",
"first_seen_at": "2024-10-18T10:46:05.029Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -790,7 +776,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.045Z",
"updated_at": "2024-10-18T10:46:05.050Z",
"first_seen_at": "2024-10-18T10:46:05.045Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -846,7 +831,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.061Z",
"updated_at": "2024-10-18T10:46:05.065Z",
"first_seen_at": "2024-10-18T10:46:05.061Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-1-112233445566",
"delta": "new",
"status": "FAIL",
@@ -902,7 +886,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.080Z",
"updated_at": "2024-10-18T10:46:05.085Z",
"first_seen_at": "2024-10-18T10:46:05.080Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-2-112233445566",
"delta": "new",
"status": "FAIL",
@@ -958,7 +941,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.099Z",
"updated_at": "2024-10-18T10:46:05.104Z",
"first_seen_at": "2024-10-18T10:46:05.099Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-2-112233445566",
"delta": "new",
"status": "FAIL",
@@ -1014,7 +996,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T10:46:05.115Z",
"updated_at": "2024-10-18T10:46:05.121Z",
"first_seen_at": "2024-10-18T10:46:05.115Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-3-112233445566",
"delta": "new",
"status": "FAIL",
@@ -1070,7 +1051,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.489Z",
"updated_at": "2024-10-18T11:16:24.506Z",
"first_seen_at": "2024-10-18T10:46:04.823Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-south-2-112233445566",
"delta": null,
"status": "FAIL",
@@ -1126,7 +1106,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.518Z",
"updated_at": "2024-10-18T11:16:24.521Z",
"first_seen_at": "2024-10-18T10:46:04.855Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-3-112233445566",
"delta": null,
"status": "FAIL",
@@ -1182,7 +1161,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.526Z",
"updated_at": "2024-10-18T11:16:24.529Z",
"first_seen_at": "2024-10-18T10:46:04.869Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-2-112233445566",
"delta": null,
"status": "FAIL",
@@ -1238,7 +1216,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.535Z",
"updated_at": "2024-10-18T11:16:24.538Z",
"first_seen_at": "2024-10-18T10:46:04.888Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1294,7 +1271,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.544Z",
"updated_at": "2024-10-18T11:16:24.546Z",
"first_seen_at": "2024-10-18T10:46:04.901Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-2-112233445566",
"delta": null,
"status": "FAIL",
@@ -1350,7 +1326,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.551Z",
"updated_at": "2024-10-18T11:16:24.554Z",
"first_seen_at": "2024-10-18T10:46:04.915Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-south-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1406,7 +1381,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.560Z",
"updated_at": "2024-10-18T11:16:24.562Z",
"first_seen_at": "2024-10-18T10:46:04.929Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1462,7 +1436,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.567Z",
"updated_at": "2024-10-18T11:16:24.569Z",
"first_seen_at": "2024-10-18T10:46:04.944Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ca-central-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1518,7 +1491,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.573Z",
"updated_at": "2024-10-18T11:16:24.575Z",
"first_seen_at": "2024-10-18T10:46:04.957Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-east-1-ConsoleAnalyzer-83b66ad7-d024-454e-b851-52d11cc1cf7c",
"delta": null,
"status": "PASS",
@@ -1574,7 +1546,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.580Z",
"updated_at": "2024-10-18T11:16:24.582Z",
"first_seen_at": "2024-10-18T10:46:04.971Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-west-2-112233445566",
"delta": null,
"status": "FAIL",
@@ -1630,7 +1601,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.587Z",
"updated_at": "2024-10-18T11:16:24.589Z",
"first_seen_at": "2024-10-18T10:46:04.984Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-sa-east-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1686,7 +1656,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.595Z",
"updated_at": "2024-10-18T11:16:24.597Z",
"first_seen_at": "2024-10-18T10:46:04.999Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-north-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1742,7 +1711,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.602Z",
"updated_at": "2024-10-18T11:16:24.604Z",
"first_seen_at": "2024-10-18T10:46:05.013Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-us-west-2-112233445566",
"delta": null,
"status": "FAIL",
@@ -1798,7 +1766,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.610Z",
"updated_at": "2024-10-18T11:16:24.612Z",
"first_seen_at": "2024-10-18T10:46:05.029Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1854,7 +1821,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.617Z",
"updated_at": "2024-10-18T11:16:24.620Z",
"first_seen_at": "2024-10-18T10:46:05.045Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-eu-central-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1910,7 +1876,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.625Z",
"updated_at": "2024-10-18T11:16:24.627Z",
"first_seen_at": "2024-10-18T10:46:05.061Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-1-112233445566",
"delta": null,
"status": "FAIL",
@@ -1966,7 +1931,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.632Z",
"updated_at": "2024-10-18T11:16:24.634Z",
"first_seen_at": "2024-10-18T10:46:05.080Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-southeast-2-112233445566",
"delta": null,
"status": "FAIL",
@@ -2022,7 +1986,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.639Z",
"updated_at": "2024-10-18T11:16:24.642Z",
"first_seen_at": "2024-10-18T10:46:05.099Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-2-112233445566",
"delta": null,
"status": "FAIL",
@@ -2078,7 +2041,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:24.646Z",
"updated_at": "2024-10-18T11:16:24.648Z",
"first_seen_at": "2024-10-18T10:46:05.115Z",
"uid": "prowler-aws-accessanalyzer_enabled-112233445566-ap-northeast-3-112233445566",
"delta": null,
"status": "FAIL",
@@ -2134,7 +2096,6 @@
"tenant": "12646005-9067-4d2a-a098-8bb378604362",
"inserted_at": "2024-10-18T11:16:26.033Z",
"updated_at": "2024-10-18T11:16:26.045Z",
"first_seen_at": "2024-10-18T11:16:26.033Z",
"uid": "prowler-aws-account_security_contact_information_is_registered-112233445566-us-east-1-112233445566",
"delta": "new",
"status": "MANUAL",

View File

@@ -1,80 +0,0 @@
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
from django.db.migrations.recorder import MigrationRecorder
def table_exists(table_name):
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT EXISTS (
SELECT 1 FROM information_schema.tables
WHERE table_name = %s
)
""",
[table_name],
)
return cursor.fetchone()[0]
class Command(BaseCommand):
help = "Fix migration inconsistency between socialaccount and sites"
def add_arguments(self, parser):
parser.add_argument(
"--database",
default=DEFAULT_DB_ALIAS,
help="Specifies the database to operate on.",
)
def handle(self, *args, **options):
db = options["database"]
connection = connections[db]
recorder = MigrationRecorder(connection)
applied = set(recorder.applied_migrations())
has_social = ("socialaccount", "0001_initial") in applied
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_name = 'django_site'
);
"""
)
site_table_exists = cursor.fetchone()[0]
if has_social and not site_table_exists:
self.stdout.write(
f"Detected inconsistency in '{db}'. Creating 'django_site' table manually..."
)
with transaction.atomic(using=db):
with connection.schema_editor() as schema_editor:
schema_editor.create_model(Site)
recorder.record_applied("sites", "0001_initial")
recorder.record_applied("sites", "0002_alter_domain_unique")
self.stdout.write(
"Fixed: 'django_site' table created and migrations registered."
)
# Ensure the relationship table also exists
if not table_exists("socialaccount_socialapp_sites"):
self.stdout.write(
"Detected missing 'socialaccount_socialapp_sites' table. Creating manually..."
)
with connection.schema_editor() as schema_editor:
from allauth.socialaccount.models import SocialApp
schema_editor.create_model(
SocialApp._meta.get_field("sites").remote_field.through
)
self.stdout.write(
"Fixed: 'socialaccount_socialapp_sites' table created."
)

View File

@@ -1,285 +0,0 @@
import random
from datetime import datetime, timezone
from math import ceil
from uuid import uuid4
from django.core.management.base import BaseCommand
from tqdm import tqdm
from api.db_utils import rls_transaction
from api.models import (
Finding,
Provider,
Resource,
ResourceFindingMapping,
ResourceScanSummary,
Scan,
StatusChoices,
)
from prowler.lib.check.models import CheckMetadata
class Command(BaseCommand):
help = "Populates the database with test data for performance testing."
def add_arguments(self, parser):
parser.add_argument(
"--tenant",
type=str,
required=True,
help="Tenant id for which the data will be populated.",
)
parser.add_argument(
"--resources",
type=int,
required=True,
help="The number of resources to create.",
)
parser.add_argument(
"--findings",
type=int,
required=True,
help="The number of findings to create.",
)
parser.add_argument(
"--batch", type=int, required=True, help="The batch size for bulk creation."
)
parser.add_argument(
"--alias",
type=str,
required=False,
help="Optional alias for the provider and scan",
)
def handle(self, *args, **options):
tenant_id = options["tenant"]
num_resources = options["resources"]
num_findings = options["findings"]
batch_size = options["batch"]
alias = options["alias"] or "Testing"
uid_token = str(uuid4())
self.stdout.write(self.style.NOTICE("Starting data population"))
self.stdout.write(self.style.NOTICE(f"\tTenant: {tenant_id}"))
self.stdout.write(self.style.NOTICE(f"\tAlias: {alias}"))
self.stdout.write(self.style.NOTICE(f"\tResources: {num_resources}"))
self.stdout.write(self.style.NOTICE(f"\tFindings: {num_findings}"))
self.stdout.write(self.style.NOTICE(f"\tBatch size: {batch_size}\n\n"))
# Resource metadata
possible_regions = [
"us-east-1",
"us-east-2",
"us-west-1",
"us-west-2",
"ca-central-1",
"eu-central-1",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"ap-southeast-1",
"ap-southeast-2",
"ap-northeast-1",
"ap-northeast-2",
"ap-south-1",
"sa-east-1",
]
possible_services = []
possible_types = []
bulk_check_metadata = CheckMetadata.get_bulk(provider="aws")
for check_metadata in bulk_check_metadata.values():
if check_metadata.ServiceName not in possible_services:
possible_services.append(check_metadata.ServiceName)
if (
check_metadata.ResourceType
and check_metadata.ResourceType not in possible_types
):
possible_types.append(check_metadata.ResourceType)
with rls_transaction(tenant_id):
provider, _ = Provider.all_objects.get_or_create(
tenant_id=tenant_id,
provider="aws",
connected=True,
uid=str(random.randint(100000000000, 999999999999)),
defaults={
"alias": alias,
},
)
with rls_transaction(tenant_id):
scan = Scan.all_objects.create(
tenant_id=tenant_id,
provider=provider,
name=alias,
trigger="manual",
state="executing",
progress=0,
started_at=datetime.now(timezone.utc),
)
scan_state = "completed"
try:
# Create resources
resources = []
for i in range(num_resources):
resources.append(
Resource(
tenant_id=tenant_id,
provider_id=provider.id,
uid=f"testing-{uid_token}-{i}",
name=f"Testing {uid_token}-{i}",
region=random.choice(possible_regions),
service=random.choice(possible_services),
type=random.choice(possible_types),
inserted_at="2024-10-01T00:00:00Z",
)
)
num_batches = ceil(len(resources) / batch_size)
self.stdout.write(self.style.WARNING("Creating resources..."))
for i in tqdm(range(0, len(resources), batch_size), total=num_batches):
with rls_transaction(tenant_id):
Resource.all_objects.bulk_create(resources[i : i + batch_size])
self.stdout.write(self.style.SUCCESS("Resources created successfully.\n\n"))
with rls_transaction(tenant_id):
scan.progress = 33
scan.save()
# Create Findings
findings = []
possible_deltas = ["new", "changed", None]
possible_severities = ["critical", "high", "medium", "low"]
findings_resources_mapping = []
for i in range(num_findings):
severity = random.choice(possible_severities)
check_id = random.randint(1, 1000)
assigned_resource_num = random.randint(0, len(resources) - 1)
assigned_resource = resources[assigned_resource_num]
findings_resources_mapping.append(assigned_resource_num)
findings.append(
Finding(
tenant_id=tenant_id,
scan=scan,
uid=f"testing-{uid_token}-{i}",
delta=random.choice(possible_deltas),
check_id=f"check-{check_id}",
status=random.choice(list(StatusChoices)),
severity=severity,
impact=severity,
raw_result={},
check_metadata={
"checktitle": f"Test title for check {check_id}",
"risk": f"Testing risk {uid_token}-{i}",
"provider": "aws",
"severity": severity,
"categories": ["category1", "category2", "category3"],
"description": "This is a random description that should not matter for testing purposes.",
"servicename": assigned_resource.service,
"resourcetype": assigned_resource.type,
},
resource_types=[assigned_resource.type],
resource_regions=[assigned_resource.region],
resource_services=[assigned_resource.service],
inserted_at="2024-10-01T00:00:00Z",
)
)
num_batches = ceil(len(findings) / batch_size)
self.stdout.write(self.style.WARNING("Creating findings..."))
for i in tqdm(range(0, len(findings), batch_size), total=num_batches):
with rls_transaction(tenant_id):
Finding.all_objects.bulk_create(findings[i : i + batch_size])
self.stdout.write(self.style.SUCCESS("Findings created successfully.\n\n"))
with rls_transaction(tenant_id):
scan.progress = 66
scan.save()
# Create ResourceFindingMapping
mappings = []
scan_resource_cache: set[tuple] = set()
for index, finding_instance in enumerate(findings):
resource_instance = resources[findings_resources_mapping[index]]
mappings.append(
ResourceFindingMapping(
tenant_id=tenant_id,
resource=resource_instance,
finding=finding_instance,
)
)
scan_resource_cache.add(
(
str(resource_instance.id),
resource_instance.service,
resource_instance.region,
resource_instance.type,
)
)
num_batches = ceil(len(mappings) / batch_size)
self.stdout.write(
self.style.WARNING("Creating resource-finding mappings...")
)
for i in tqdm(range(0, len(mappings), batch_size), total=num_batches):
with rls_transaction(tenant_id):
ResourceFindingMapping.objects.bulk_create(
mappings[i : i + batch_size]
)
self.stdout.write(
self.style.SUCCESS(
"Resource-finding mappings created successfully.\n\n"
)
)
with rls_transaction(tenant_id):
scan.progress = 99
scan.save()
self.stdout.write(self.style.WARNING("Creating finding filter values..."))
resource_scan_summaries = [
ResourceScanSummary(
tenant_id=tenant_id,
scan_id=str(scan.id),
resource_id=resource_id,
service=service,
region=region,
resource_type=resource_type,
)
for resource_id, service, region, resource_type in scan_resource_cache
]
num_batches = ceil(len(resource_scan_summaries) / batch_size)
with rls_transaction(tenant_id):
for i in tqdm(
range(0, len(resource_scan_summaries), batch_size),
total=num_batches,
):
with rls_transaction(tenant_id):
ResourceScanSummary.objects.bulk_create(
resource_scan_summaries[i : i + batch_size],
ignore_conflicts=True,
)
self.stdout.write(
self.style.SUCCESS("Finding filter values created successfully.\n\n")
)
except Exception as e:
self.stdout.write(self.style.ERROR(f"Failed to populate test data: {e}"))
scan_state = "failed"
finally:
scan.completed_at = datetime.now(timezone.utc)
scan.duration = int(
(datetime.now(timezone.utc) - scan.started_at).total_seconds()
)
scan.progress = 100
scan.state = scan_state
scan.unique_resource_count = num_resources
with rls_transaction(tenant_id):
scan.save()
self.stdout.write(self.style.NOTICE("Successfully populated test data."))

View File

@@ -1,17 +1,15 @@
# Generated by Django 5.1.1 on 2024-12-05 12:29
import uuid
import api.rls
import django.db.models.deletion
import uuid
from django.conf import settings
from django.db import migrations, models
import api.rls
class Migration(migrations.Migration):
dependencies = [
("api", "0003_update_provider_unique_constraint_with_is_deleted"),
("api", "0002_token_migrations"),
]
operations = [

View File

@@ -1,23 +0,0 @@
# Generated by Django 5.1.1 on 2024-12-20 13:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0002_token_migrations"),
]
operations = [
migrations.RemoveConstraint(
model_name="provider",
name="unique_provider_uids",
),
migrations.AddConstraint(
model_name="provider",
constraint=models.UniqueConstraint(
fields=("tenant_id", "provider", "uid", "is_deleted"),
name="unique_provider_uids",
),
),
]

View File

@@ -1,5 +1,4 @@
from django.db import migrations
from api.db_router import MainRouter
@@ -36,7 +35,7 @@ def create_admin_role(apps, schema_editor):
class Migration(migrations.Migration):
dependencies = [
("api", "0004_rbac"),
("api", "0003_rbac"),
]
operations = [

View File

@@ -1,15 +0,0 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0005_rbac_missing_admin_roles"),
]
operations = [
migrations.AddField(
model_name="finding",
name="first_seen_at",
field=models.DateTimeField(editable=False, null=True),
),
]

View File

@@ -1,25 +0,0 @@
# Generated by Django 5.1.5 on 2025-01-28 15:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0006_findings_first_seen"),
]
operations = [
migrations.AddIndex(
model_name="scan",
index=models.Index(
fields=["tenant_id", "provider_id", "state", "inserted_at"],
name="scans_prov_state_insert_idx",
),
),
migrations.AddIndex(
model_name="scansummary",
index=models.Index(
fields=["tenant_id", "scan_id"], name="scan_summaries_tenant_scan_idx"
),
),
]

View File

@@ -1,64 +0,0 @@
import json
from datetime import datetime, timedelta, timezone
import django.db.models.deletion
from django.db import migrations, models
from django_celery_beat.models import PeriodicTask
from api.db_utils import rls_transaction
from api.models import Scan, StateChoices
def migrate_daily_scheduled_scan_tasks(apps, schema_editor):
for daily_scheduled_scan_task in PeriodicTask.objects.filter(
task="scan-perform-scheduled"
):
task_kwargs = json.loads(daily_scheduled_scan_task.kwargs)
tenant_id = task_kwargs["tenant_id"]
provider_id = task_kwargs["provider_id"]
current_time = datetime.now(timezone.utc)
scheduled_time_today = datetime.combine(
current_time.date(),
daily_scheduled_scan_task.start_time.time(),
tzinfo=timezone.utc,
)
if current_time < scheduled_time_today:
next_scan_date = scheduled_time_today
else:
next_scan_date = scheduled_time_today + timedelta(days=1)
with rls_transaction(tenant_id):
Scan.objects.create(
tenant_id=tenant_id,
name="Daily scheduled scan",
provider_id=provider_id,
trigger=Scan.TriggerChoices.SCHEDULED,
state=StateChoices.SCHEDULED,
scheduled_at=next_scan_date,
scheduler_task_id=daily_scheduled_scan_task.id,
)
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0007_scan_and_scan_summaries_indexes"),
("django_celery_beat", "0019_alter_periodictasks_options"),
]
operations = [
migrations.AddField(
model_name="scan",
name="scheduler_task",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="django_celery_beat.periodictask",
),
),
migrations.RunPython(migrate_daily_scheduled_scan_tasks),
]

View File

@@ -1,22 +0,0 @@
# Generated by Django 5.1.5 on 2025-02-07 09:42
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0008_daily_scheduled_tasks_update"),
]
operations = [
migrations.AlterField(
model_name="provider",
name="uid",
field=models.CharField(
max_length=250,
validators=[django.core.validators.MinLengthValidator(3)],
verbose_name="Unique identifier for the provider, set by the provider",
),
),
]

View File

@@ -1,109 +0,0 @@
from functools import partial
from django.db import connection, migrations
def create_index_on_partitions(
apps, schema_editor, parent_table: str, index_name: str, index_details: str
):
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT inhrelid::regclass::text
FROM pg_inherits
WHERE inhparent = %s::regclass;
""",
[parent_table],
)
partitions = [row[0] for row in cursor.fetchall()]
# Iterate over partitions and create index concurrently.
# Note: PostgreSQL does not allow CONCURRENTLY inside a transaction,
# so we need atomic = False for this migration.
for partition in partitions:
sql = (
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {partition.replace('.', '_')}_{index_name} ON {partition} "
f"{index_details};"
)
schema_editor.execute(sql)
def drop_index_on_partitions(apps, schema_editor, parent_table: str, index_name: str):
with schema_editor.connection.cursor() as cursor:
cursor.execute(
"""
SELECT inhrelid::regclass::text
FROM pg_inherits
WHERE inhparent = %s::regclass;
""",
[parent_table],
)
partitions = [row[0] for row in cursor.fetchall()]
# Iterate over partitions and drop index concurrently.
for partition in partitions:
partition_index = f"{partition.replace('.', '_')}_{index_name}"
sql = f"DROP INDEX CONCURRENTLY IF EXISTS {partition_index};"
schema_editor.execute(sql)
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0009_increase_provider_uid_maximum_length"),
]
operations = [
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="findings",
index_name="findings_tenant_and_id_idx",
index_details="(tenant_id, id)",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="findings",
index_name="findings_tenant_and_id_idx",
),
),
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="findings",
index_name="find_tenant_scan_idx",
index_details="(tenant_id, scan_id)",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="findings",
index_name="find_tenant_scan_idx",
),
),
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="findings",
index_name="find_tenant_scan_id_idx",
index_details="(tenant_id, scan_id, id)",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="findings",
index_name="find_tenant_scan_id_idx",
),
),
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="findings",
index_name="find_delta_new_idx",
index_details="(tenant_id, id) where delta = 'new'",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="findings",
index_name="find_delta_new_idx",
),
),
]

View File

@@ -1,49 +0,0 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0010_findings_performance_indexes_partitions"),
]
operations = [
migrations.AddIndex(
model_name="finding",
index=models.Index(
fields=["tenant_id", "id"], name="findings_tenant_and_id_idx"
),
),
migrations.AddIndex(
model_name="finding",
index=models.Index(
fields=["tenant_id", "scan_id"], name="find_tenant_scan_idx"
),
),
migrations.AddIndex(
model_name="finding",
index=models.Index(
fields=["tenant_id", "scan_id", "id"], name="find_tenant_scan_id_idx"
),
),
migrations.AddIndex(
model_name="finding",
index=models.Index(
condition=models.Q(("delta", "new")),
fields=["tenant_id", "id"],
name="find_delta_new_idx",
),
),
migrations.AddIndex(
model_name="resourcetagmapping",
index=models.Index(
fields=["tenant_id", "resource_id"], name="resource_tag_tenant_idx"
),
),
migrations.AddIndex(
model_name="resource",
index=models.Index(
fields=["tenant_id", "service", "region", "type"],
name="resource_tenant_metadata_idx",
),
),
]

View File

@@ -1,15 +0,0 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0011_findings_performance_indexes_parent"),
]
operations = [
migrations.AddField(
model_name="scan",
name="output_location",
field=models.CharField(blank=True, max_length=200, null=True),
),
]

View File

@@ -1,35 +0,0 @@
# Generated by Django 5.1.5 on 2025-03-03 15:46
from functools import partial
from django.db import migrations
from api.db_utils import IntegrationTypeEnum, PostgresEnumMigration, register_enum
from api.models import Integration
IntegrationTypeEnumMigration = PostgresEnumMigration(
enum_name="integration_type",
enum_values=tuple(
integration_type[0]
for integration_type in Integration.IntegrationChoices.choices
),
)
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0012_scan_report_output"),
]
operations = [
migrations.RunPython(
IntegrationTypeEnumMigration.create_enum_type,
reverse_code=IntegrationTypeEnumMigration.drop_enum_type,
),
migrations.RunPython(
partial(register_enum, enum_class=IntegrationTypeEnum),
reverse_code=migrations.RunPython.noop,
),
]

View File

@@ -1,131 +0,0 @@
# Generated by Django 5.1.5 on 2025-03-03 15:46
import uuid
import django.db.models.deletion
from django.db import migrations, models
import api.db_utils
import api.rls
from api.rls import RowLevelSecurityConstraint
class Migration(migrations.Migration):
dependencies = [
("api", "0013_integrations_enum"),
]
operations = [
migrations.CreateModel(
name="Integration",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("enabled", models.BooleanField(default=False)),
("connected", models.BooleanField(blank=True, null=True)),
(
"connection_last_checked_at",
models.DateTimeField(blank=True, null=True),
),
(
"integration_type",
api.db_utils.IntegrationTypeEnumField(
choices=[
("amazon_s3", "Amazon S3"),
("saml", "SAML"),
("aws_security_hub", "AWS Security Hub"),
("jira", "JIRA"),
("slack", "Slack"),
]
),
),
("configuration", models.JSONField(default=dict)),
("_credentials", models.BinaryField(db_column="credentials")),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={"db_table": "integrations", "abstract": False},
),
migrations.AddConstraint(
model_name="integration",
constraint=RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_integration",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.CreateModel(
name="IntegrationProviderRelationship",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
(
"integration",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="api.integration",
),
),
(
"provider",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.provider"
),
),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "integration_provider_mappings",
"constraints": [
models.UniqueConstraint(
fields=("integration_id", "provider_id"),
name="unique_integration_provider_rel",
),
],
},
),
migrations.AddConstraint(
model_name="IntegrationProviderRelationship",
constraint=RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_integrationproviderrelationship",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.AddField(
model_name="integration",
name="providers",
field=models.ManyToManyField(
blank=True,
related_name="integrations",
through="api.IntegrationProviderRelationship",
to="api.provider",
),
),
]

View File

@@ -1,26 +0,0 @@
# Generated by Django 5.1.5 on 2025-03-25 11:29
from django.db import migrations, models
import api.db_utils
class Migration(migrations.Migration):
dependencies = [
("api", "0014_integrations"),
]
operations = [
migrations.AddField(
model_name="finding",
name="muted",
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name="finding",
name="status",
field=api.db_utils.StatusEnumField(
choices=[("FAIL", "Fail"), ("PASS", "Pass"), ("MANUAL", "Manual")]
),
),
]

View File

@@ -1,32 +0,0 @@
# Generated by Django 5.1.5 on 2025-03-31 10:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0015_finding_muted"),
]
operations = [
migrations.AddField(
model_name="finding",
name="compliance",
field=models.JSONField(blank=True, default=dict, null=True),
),
migrations.AddField(
model_name="resource",
name="details",
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name="resource",
name="metadata",
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name="resource",
name="partition",
field=models.TextField(blank=True, null=True),
),
]

View File

@@ -1,32 +0,0 @@
# Generated by Django 5.1.7 on 2025-04-16 08:47
from django.db import migrations
import api.db_utils
class Migration(migrations.Migration):
dependencies = [
("api", "0016_finding_compliance_resource_details_and_more"),
]
operations = [
migrations.AlterField(
model_name="provider",
name="provider",
field=api.db_utils.ProviderEnumField(
choices=[
("aws", "AWS"),
("azure", "Azure"),
("gcp", "GCP"),
("kubernetes", "Kubernetes"),
("m365", "M365"),
],
default="aws",
),
),
migrations.RunSQL(
"ALTER TYPE provider ADD VALUE IF NOT EXISTS 'm365';",
reverse_sql=migrations.RunSQL.noop,
),
]

View File

@@ -1,81 +0,0 @@
# Generated by Django 5.1.7 on 2025-05-05 10:01
import uuid
import django.db.models.deletion
import uuid6
from django.db import migrations, models
import api.rls
class Migration(migrations.Migration):
dependencies = [
("api", "0017_m365_provider"),
]
operations = [
migrations.CreateModel(
name="ResourceScanSummary",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("scan_id", models.UUIDField(db_index=True, default=uuid6.uuid7)),
("resource_id", models.UUIDField(db_index=True, default=uuid.uuid4)),
("service", models.CharField(max_length=100)),
("region", models.CharField(max_length=100)),
("resource_type", models.CharField(max_length=100)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "resource_scan_summaries",
"indexes": [
models.Index(
fields=["tenant_id", "scan_id", "service"],
name="rss_tenant_scan_svc_idx",
),
models.Index(
fields=["tenant_id", "scan_id", "region"],
name="rss_tenant_scan_reg_idx",
),
models.Index(
fields=["tenant_id", "scan_id", "resource_type"],
name="rss_tenant_scan_type_idx",
),
models.Index(
fields=["tenant_id", "scan_id", "region", "service"],
name="rss_tenant_scan_reg_svc_idx",
),
models.Index(
fields=["tenant_id", "scan_id", "service", "resource_type"],
name="rss_tenant_scan_svc_type_idx",
),
models.Index(
fields=["tenant_id", "scan_id", "region", "resource_type"],
name="rss_tenant_scan_reg_type_idx",
),
],
"unique_together": {("tenant_id", "scan_id", "resource_id")},
},
),
migrations.AddConstraint(
model_name="resourcescansummary",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_resourcescansummary",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
]

View File

@@ -1,42 +0,0 @@
import django.contrib.postgres.fields
import django.contrib.postgres.indexes
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0018_resource_scan_summaries"),
]
operations = [
migrations.AddField(
model_name="finding",
name="resource_regions",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.CharField(max_length=100),
blank=True,
null=True,
size=None,
),
),
migrations.AddField(
model_name="finding",
name="resource_services",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.CharField(max_length=100),
blank=True,
null=True,
size=None,
),
),
migrations.AddField(
model_name="finding",
name="resource_types",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.CharField(max_length=100),
blank=True,
null=True,
size=None,
),
),
]

View File

@@ -1,86 +0,0 @@
from functools import partial
from django.db import migrations
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0019_finding_denormalize_resource_fields"),
]
operations = [
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="findings",
index_name="gin_find_service_idx",
columns="resource_services",
method="GIN",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="findings",
index_name="gin_find_service_idx",
),
),
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="findings",
index_name="gin_find_region_idx",
columns="resource_regions",
method="GIN",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="findings",
index_name="gin_find_region_idx",
),
),
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="findings",
index_name="gin_find_rtype_idx",
columns="resource_types",
method="GIN",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="findings",
index_name="gin_find_rtype_idx",
),
),
migrations.RunPython(
partial(
drop_index_on_partitions,
parent_table="findings",
index_name="findings_uid_idx",
),
reverse_code=partial(
create_index_on_partitions,
parent_table="findings",
index_name="findings_uid_idx",
columns="uid",
method="BTREE",
),
),
migrations.RunPython(
partial(
drop_index_on_partitions,
parent_table="findings",
index_name="findings_filter_idx",
),
reverse_code=partial(
create_index_on_partitions,
parent_table="findings",
index_name="findings_filter_idx",
columns="scan_id, impact, severity, status, check_id, delta",
method="BTREE",
),
),
]

View File

@@ -1,37 +0,0 @@
import django.contrib.postgres.indexes
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("api", "0020_findings_new_performance_indexes_partitions"),
]
operations = [
migrations.AddIndex(
model_name="finding",
index=django.contrib.postgres.indexes.GinIndex(
fields=["resource_services"], name="gin_find_service_idx"
),
),
migrations.AddIndex(
model_name="finding",
index=django.contrib.postgres.indexes.GinIndex(
fields=["resource_regions"], name="gin_find_region_idx"
),
),
migrations.AddIndex(
model_name="finding",
index=django.contrib.postgres.indexes.GinIndex(
fields=["resource_types"], name="gin_find_rtype_idx"
),
),
migrations.RemoveIndex(
model_name="finding",
name="findings_uid_idx",
),
migrations.RemoveIndex(
model_name="finding",
name="findings_filter_idx",
),
]

View File

@@ -1,38 +0,0 @@
# Generated by Django 5.1.8 on 2025-05-12 10:04
from django.contrib.postgres.operations import AddIndexConcurrently
from django.db import migrations, models
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0021_findings_new_performance_indexes_parent"),
("django_celery_beat", "0019_alter_periodictasks_options"),
]
operations = [
AddIndexConcurrently(
model_name="scan",
index=models.Index(
condition=models.Q(("state", "completed")),
fields=["tenant_id", "provider_id", "state", "-inserted_at"],
name="scans_prov_state_ins_desc_idx",
),
),
AddIndexConcurrently(
model_name="scansummary",
index=models.Index(
fields=["tenant_id", "scan_id", "service"],
name="ss_tenant_scan_service_idx",
),
),
AddIndexConcurrently(
model_name="scansummary",
index=models.Index(
fields=["tenant_id", "scan_id", "severity"],
name="ss_tenant_scan_severity_idx",
),
),
]

View File

@@ -1,28 +0,0 @@
# Generated by Django 5.1.8 on 2025-05-12 10:18
from django.contrib.postgres.operations import AddIndexConcurrently
from django.db import migrations, models
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0022_scan_summaries_performance_indexes"),
]
operations = [
AddIndexConcurrently(
model_name="resource",
index=models.Index(
fields=["tenant_id", "id"], name="resources_tenant_id_idx"
),
),
AddIndexConcurrently(
model_name="resource",
index=models.Index(
fields=["tenant_id", "provider_id"],
name="resources_tenant_provider_idx",
),
),
]

View File

@@ -1,29 +0,0 @@
from functools import partial
from django.db import migrations
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0023_resources_lookup_optimization"),
]
operations = [
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="findings",
index_name="find_tenant_uid_inserted_idx",
columns="tenant_id, uid, inserted_at DESC",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="findings",
index_name="find_tenant_uid_inserted_idx",
),
)
]

View File

@@ -1,17 +0,0 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0024_findings_uid_index_partitions"),
]
operations = [
migrations.AddIndex(
model_name="finding",
index=models.Index(
fields=["tenant_id", "uid", "-inserted_at"],
name="find_tenant_uid_inserted_idx",
),
),
]

View File

@@ -1,14 +0,0 @@
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("api", "0025_findings_uid_index_parent"),
]
operations = [
migrations.RunSQL(
"ALTER TYPE provider_secret_type ADD VALUE IF NOT EXISTS 'service_account';",
reverse_sql=migrations.RunSQL.noop,
),
]

View File

@@ -1,124 +0,0 @@
# Generated by Django 5.1.8 on 2025-05-21 11:37
import uuid
import django.db.models.deletion
from django.db import migrations, models
import api.db_utils
import api.rls
from api.rls import RowLevelSecurityConstraint
class Migration(migrations.Migration):
dependencies = [
("api", "0026_provider_secret_gcp_service_account"),
]
operations = [
migrations.CreateModel(
name="ComplianceRequirementOverview",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
("compliance_id", models.TextField(blank=False)),
("framework", models.TextField(blank=False)),
("version", models.TextField(blank=True)),
("description", models.TextField(blank=True)),
("region", models.TextField(blank=False)),
("requirement_id", models.TextField(blank=False)),
(
"requirement_status",
api.db_utils.StatusEnumField(
choices=[
("FAIL", "Fail"),
("PASS", "Pass"),
("MANUAL", "Manual"),
]
),
),
("passed_checks", models.IntegerField(default=0)),
("failed_checks", models.IntegerField(default=0)),
("total_checks", models.IntegerField(default=0)),
(
"scan",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="compliance_requirements_overviews",
related_query_name="compliance_requirements_overview",
to="api.scan",
),
),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "compliance_requirements_overviews",
"abstract": False,
"indexes": [
models.Index(
fields=["tenant_id", "scan_id"], name="cro_tenant_scan_idx"
),
models.Index(
fields=["tenant_id", "scan_id", "compliance_id"],
name="cro_scan_comp_idx",
),
models.Index(
fields=["tenant_id", "scan_id", "compliance_id", "region"],
name="cro_scan_comp_reg_idx",
),
models.Index(
fields=[
"tenant_id",
"scan_id",
"compliance_id",
"requirement_id",
],
name="cro_scan_comp_req_idx",
),
models.Index(
fields=[
"tenant_id",
"scan_id",
"compliance_id",
"requirement_id",
"region",
],
name="cro_scan_comp_req_reg_idx",
),
],
"constraints": [
models.UniqueConstraint(
fields=(
"tenant_id",
"scan_id",
"compliance_id",
"requirement_id",
"region",
),
name="unique_tenant_compliance_requirement_overview",
)
],
},
),
migrations.AddConstraint(
model_name="ComplianceRequirementOverview",
constraint=RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_compliancerequirementoverview",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
]

View File

@@ -1,29 +0,0 @@
from functools import partial
from django.db import migrations
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0027_compliance_requirement_overviews"),
]
operations = [
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="findings",
index_name="find_tenant_scan_check_idx",
columns="tenant_id, scan_id, check_id",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="findings",
index_name="find_tenant_scan_check_idx",
),
)
]

View File

@@ -1,17 +0,0 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0028_findings_check_index_partitions"),
]
operations = [
migrations.AddIndex(
model_name="finding",
index=models.Index(
fields=["tenant_id", "scan_id", "check_id"],
name="find_tenant_scan_check_idx",
),
),
]

View File

@@ -1,107 +0,0 @@
# Generated by Django 5.1.10 on 2025-06-12 12:45
import uuid
import django.core.validators
import django.db.models.deletion
from django.db import migrations, models
import api.rls
class Migration(migrations.Migration):
dependencies = [
("api", "0029_findings_check_index_parent"),
]
operations = [
migrations.CreateModel(
name="LighthouseConfiguration",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"name",
models.CharField(
help_text="Name of the configuration",
max_length=100,
validators=[django.core.validators.MinLengthValidator(3)],
),
),
(
"api_key",
models.BinaryField(
help_text="Encrypted API key for the LLM service"
),
),
(
"model",
models.CharField(
choices=[
("gpt-4o-2024-11-20", "GPT-4o v2024-11-20"),
("gpt-4o-2024-08-06", "GPT-4o v2024-08-06"),
("gpt-4o-2024-05-13", "GPT-4o v2024-05-13"),
("gpt-4o", "GPT-4o Default"),
("gpt-4o-mini-2024-07-18", "GPT-4o Mini v2024-07-18"),
("gpt-4o-mini", "GPT-4o Mini Default"),
],
default="gpt-4o-2024-08-06",
help_text="Must be one of the supported model names",
max_length=50,
),
),
(
"temperature",
models.FloatField(default=0, help_text="Must be between 0 and 1"),
),
(
"max_tokens",
models.IntegerField(
default=4000, help_text="Must be between 500 and 5000"
),
),
(
"business_context",
models.TextField(
blank=True,
default="",
help_text="Additional business context for this AI model configuration",
),
),
("is_active", models.BooleanField(default=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "lighthouse_configurations",
"abstract": False,
"constraints": [
models.UniqueConstraint(
fields=("tenant_id",),
name="unique_lighthouse_config_per_tenant",
),
],
},
),
migrations.AddConstraint(
model_name="lighthouseconfiguration",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_lighthouseconfiguration",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
]

View File

@@ -1,24 +0,0 @@
# Generated by Django 5.1.10 on 2025-06-23 10:04
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0030_lighthouseconfiguration"),
("django_celery_beat", "0019_alter_periodictasks_options"),
]
operations = [
migrations.AlterField(
model_name="scan",
name="scheduler_task",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="django_celery_beat.periodictask",
),
),
]

View File

@@ -1,150 +0,0 @@
# Generated by Django 5.1.10 on 2025-07-02 15:47
import uuid
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
import api.db_utils
import api.rls
class Migration(migrations.Migration):
dependencies = [
("api", "0031_scan_disable_on_cascade_periodic_tasks"),
]
operations = [
migrations.AlterField(
model_name="integration",
name="integration_type",
field=api.db_utils.IntegrationTypeEnumField(
choices=[
("amazon_s3", "Amazon S3"),
("aws_security_hub", "AWS Security Hub"),
("jira", "JIRA"),
("slack", "Slack"),
]
),
),
migrations.CreateModel(
name="SAMLToken",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("expires_at", models.DateTimeField(editable=False)),
("token", models.JSONField(unique=True)),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"db_table": "saml_tokens",
},
),
migrations.CreateModel(
name="SAMLConfiguration",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
(
"email_domain",
models.CharField(
help_text="Email domain used to identify the tenant, e.g. prowlerdemo.com",
max_length=254,
unique=True,
),
),
(
"metadata_xml",
models.TextField(
help_text="Raw IdP metadata XML to configure SingleSignOnService, certificates, etc."
),
),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "saml_configurations",
},
),
migrations.AddConstraint(
model_name="samlconfiguration",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_samlconfiguration",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.AddConstraint(
model_name="samlconfiguration",
constraint=models.UniqueConstraint(
fields=("tenant",), name="unique_samlconfig_per_tenant"
),
),
migrations.CreateModel(
name="SAMLDomainIndex",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("email_domain", models.CharField(max_length=254, unique=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "saml_domain_index",
},
),
migrations.AddConstraint(
model_name="samldomainindex",
constraint=models.UniqueConstraint(
fields=("email_domain", "tenant"),
name="unique_resources_by_email_domain",
),
),
migrations.AddConstraint(
model_name="samldomainindex",
constraint=api.rls.BaseSecurityConstraint(
name="statements_on_samldomainindex",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
]

View File

@@ -1,34 +0,0 @@
# Generated by Django 5.1.5 on 2025-03-03 15:46
from functools import partial
from django.db import migrations
from api.db_utils import PostgresEnumMigration, ProcessorTypeEnum, register_enum
from api.models import Processor
ProcessorTypeEnumMigration = PostgresEnumMigration(
enum_name="processor_type",
enum_values=tuple(
processor_type[0] for processor_type in Processor.ProcessorChoices.choices
),
)
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0032_saml"),
]
operations = [
migrations.RunPython(
ProcessorTypeEnumMigration.create_enum_type,
reverse_code=ProcessorTypeEnumMigration.drop_enum_type,
),
migrations.RunPython(
partial(register_enum, enum_class=ProcessorTypeEnum),
reverse_code=migrations.RunPython.noop,
),
]

View File

@@ -1,88 +0,0 @@
# Generated by Django 5.1.5 on 2025-03-26 13:04
import uuid
import django.db.models.deletion
from django.db import migrations, models
import api.db_utils
import api.rls
from api.rls import RowLevelSecurityConstraint
class Migration(migrations.Migration):
dependencies = [
("api", "0033_processors_enum"),
]
operations = [
migrations.CreateModel(
name="Processor",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"processor_type",
api.db_utils.ProcessorTypeEnumField(
choices=[("mutelist", "Mutelist")]
),
),
("configuration", models.JSONField(default=dict)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "processors",
"abstract": False,
"indexes": [
models.Index(
fields=["tenant_id", "id"], name="processor_tenant_id_idx"
),
models.Index(
fields=["tenant_id", "processor_type"],
name="processor_tenant_type_idx",
),
],
},
),
migrations.AddConstraint(
model_name="processor",
constraint=models.UniqueConstraint(
fields=("tenant_id", "processor_type"),
name="unique_processor_types_tenant",
),
),
migrations.AddConstraint(
model_name="processor",
constraint=RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_processor",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.AddField(
model_name="scan",
name="processor",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="scans",
related_query_name="scan",
to="api.processor",
),
),
]

View File

@@ -1,22 +0,0 @@
import django.core.validators
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0034_processors"),
]
operations = [
migrations.AddField(
model_name="finding",
name="muted_reason",
field=models.TextField(
blank=True,
max_length=500,
null=True,
validators=[django.core.validators.MinLengthValidator(3)],
),
),
]

View File

@@ -1,30 +0,0 @@
from functools import partial
from django.db import migrations
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0035_finding_muted_reason"),
]
operations = [
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="resource_finding_mappings",
index_name="rfm_tenant_finding_idx",
columns="tenant_id, finding_id",
method="BTREE",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="resource_finding_mappings",
index_name="rfm_tenant_finding_idx",
),
),
]

View File

@@ -1,17 +0,0 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0036_rfm_tenant_finding_index_partitions"),
]
operations = [
migrations.AddIndex(
model_name="resourcefindingmapping",
index=models.Index(
fields=["tenant_id", "finding_id"],
name="rfm_tenant_finding_idx",
),
),
]

View File

@@ -1,15 +0,0 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0037_rfm_tenant_finding_index_parent"),
]
operations = [
migrations.AddField(
model_name="resource",
name="failed_findings_count",
field=models.IntegerField(default=0),
)
]

View File

@@ -1,20 +0,0 @@
from django.contrib.postgres.operations import AddIndexConcurrently
from django.db import migrations, models
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0038_resource_failed_findings_count"),
]
operations = [
AddIndexConcurrently(
model_name="resource",
index=models.Index(
fields=["tenant_id", "-failed_findings_count", "id"],
name="resources_failed_findings_idx",
),
),
]

View File

@@ -1,30 +0,0 @@
from functools import partial
from django.db import migrations
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0039_resource_resources_failed_findings_idx"),
]
operations = [
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="resource_finding_mappings",
index_name="rfm_tenant_resource_idx",
columns="tenant_id, resource_id",
method="BTREE",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="resource_finding_mappings",
index_name="rfm_tenant_resource_idx",
),
),
]

View File

@@ -1,17 +0,0 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0040_rfm_tenant_resource_index_partitions"),
]
operations = [
migrations.AddIndex(
model_name="resourcefindingmapping",
index=models.Index(
fields=["tenant_id", "resource_id"],
name="rfm_tenant_resource_idx",
),
),
]

View File

@@ -1,23 +0,0 @@
from django.contrib.postgres.operations import AddIndexConcurrently
from django.db import migrations, models
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0041_rfm_tenant_resource_parent_partitions"),
("django_celery_beat", "0019_alter_periodictasks_options"),
]
operations = [
AddIndexConcurrently(
model_name="scan",
index=models.Index(
condition=models.Q(("state", "completed")),
fields=["tenant_id", "provider_id", "-inserted_at"],
include=("id",),
name="scans_prov_ins_desc_idx",
),
),
]

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
from drf_spectacular_jsonapi.schemas.pagination import JsonApiPageNumberPagination
from rest_framework_json_api.pagination import JsonApiPageNumberPagination
class ComplianceOverviewPagination(JsonApiPageNumberPagination):

View File

@@ -1,11 +1,9 @@
from enum import Enum
from typing import Optional
from django.db.models import QuerySet
from rest_framework.permissions import BasePermission
from api.db_router import MainRouter
from api.models import Provider, Role, User
from api.db_router import MainRouter
from typing import Optional
from django.db.models import QuerySet
class Permissions(Enum):
@@ -65,11 +63,8 @@ def get_providers(role: Role) -> QuerySet[Provider]:
A QuerySet of Provider objects filtered by the role's provider groups.
If the role has no provider groups, returns an empty queryset.
"""
tenant = role.tenant
provider_groups = role.provider_groups.all()
if not provider_groups.exists():
return Provider.objects.none()
return Provider.objects.filter(
tenant=tenant, provider_groups__in=provider_groups
).distinct()
return Provider.objects.filter(provider_groups__in=provider_groups).distinct()

View File

@@ -2,7 +2,8 @@ from typing import Any
from uuid import uuid4
from django.core.exceptions import ValidationError
from django.db import DEFAULT_DB_ALIAS, models
from django.db import DEFAULT_DB_ALIAS
from django.db import models
from django.db.backends.ddl_references import Statement, Table
from api.db_utils import DB_USER, POSTGRES_TENANT_VAR
@@ -58,11 +59,11 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
drop_sql_query = """
ALTER TABLE %(table_name)s NO FORCE ROW LEVEL SECURITY;
ALTER TABLE %(table_name)s DISABLE ROW LEVEL SECURITY;
REVOKE ALL ON TABLE %(table_name)s FROM %(db_user)s;
REVOKE ALL ON TABLE %(table_name) TO %(db_user)s;
"""
drop_policy_sql_query = """
DROP POLICY IF EXISTS %(db_user)s_%(raw_table_name)s_{statement} ON %(table_name)s;
DROP POLICY IF EXISTS %(db_user)s_%(table_name)s_{statement} on %(table_name)s;
"""
def __init__(
@@ -87,7 +88,9 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
f"{grant_queries}{self.grant_sql_query.format(statement=statement)}"
)
full_create_sql_query = f"{self.rls_sql_query}{policy_queries}{grant_queries}"
full_create_sql_query = (
f"{self.rls_sql_query}" f"{policy_queries}" f"{grant_queries}"
)
table_name = model._meta.db_table
if self.partition_name:
@@ -104,20 +107,16 @@ class RowLevelSecurityConstraint(models.BaseConstraint):
def remove_sql(self, model: Any, schema_editor: Any) -> Any:
field_column = schema_editor.quote_name(self.target_field)
raw_table_name = model._meta.db_table
table_name = raw_table_name
if self.partition_name:
raw_table_name = f"{raw_table_name}_{self.partition_name}"
table_name = raw_table_name
full_drop_sql_query = (
f"{self.drop_sql_query}"
f"{''.join([self.drop_policy_sql_query.format(statement=statement) for statement in self.statements])}"
f"{''.join([self.drop_policy_sql_query.format(statement) for statement in self.statements])}"
)
table_name = model._meta.db_table
if self.partition_name:
table_name = f"{table_name}_{self.partition_name}"
return Statement(
full_drop_sql_query,
table_name=Table(table_name, schema_editor.quote_name),
raw_table_name=raw_table_name,
field_column=field_column,
db_user=DB_USER,
partition_name=self.partition_name,

View File

@@ -1,12 +1,12 @@
from celery import states
from celery.signals import before_task_publish
from config.celery import celery_app
from django.db.models.signals import post_delete
from django.dispatch import receiver
from django_celery_beat.models import PeriodicTask
from django_celery_results.backends.database import DatabaseBackend
from api.db_utils import delete_related_daily_task
from api.models import Provider
from config.celery import celery_app
def create_task_result_on_publish(sender=None, headers=None, **kwargs): # noqa: F841
@@ -31,4 +31,5 @@ before_task_publish.connect(
@receiver(post_delete, sender=Provider)
def delete_provider_scan_task(sender, instance, **kwargs): # noqa: F841
# Delete the associated periodic task when the provider is deleted
delete_related_daily_task(instance.id)
task_name = f"scan-perform-scheduled-{instance.id}"
PeriodicTask.objects.filter(name=task_name).delete()

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,8 @@
import pytest
from conftest import TEST_PASSWORD, get_api_tokens, get_authorization_header
from django.urls import reverse
from rest_framework.test import APIClient
from api.models import Membership, User
from conftest import TEST_PASSWORD, get_api_tokens, get_authorization_header
@pytest.mark.django_db
@@ -11,7 +10,7 @@ def test_basic_authentication():
client = APIClient()
test_user = "test_email@prowler.com"
test_password = "Test_password@1"
test_password = "test_password"
# Check that a 401 is returned when no basic authentication is provided
no_auth_response = client.get(reverse("provider-list"))
@@ -97,204 +96,3 @@ def test_refresh_token(create_test_user, tenants_fixture):
format="vnd.api+json",
)
assert new_refresh_response.status_code == 200
@pytest.mark.django_db
def test_user_me_when_inviting_users(create_test_user, tenants_fixture, roles_fixture):
client = APIClient()
role = roles_fixture[0]
user1_email = "user1@testing.com"
user2_email = "user2@testing.com"
password = "Thisisapassword123@"
user1_response = client.post(
reverse("user-list"),
data={
"data": {
"type": "users",
"attributes": {
"name": "user1",
"email": user1_email,
"password": password,
},
}
},
format="vnd.api+json",
)
assert user1_response.status_code == 201
user1_access_token, _ = get_api_tokens(client, user1_email, password)
user1_headers = get_authorization_header(user1_access_token)
user2_invitation = client.post(
reverse("invitation-list"),
data={
"data": {
"type": "invitations",
"attributes": {"email": user2_email},
"relationships": {
"roles": {
"data": [
{
"type": "roles",
"id": str(role.id),
}
]
}
},
}
},
format="vnd.api+json",
headers=user1_headers,
)
assert user2_invitation.status_code == 201
invitation_token = user2_invitation.json()["data"]["attributes"]["token"]
user2_response = client.post(
reverse("user-list") + f"?invitation_token={invitation_token}",
data={
"data": {
"type": "users",
"attributes": {
"name": "user2",
"email": user2_email,
"password": password,
},
}
},
format="vnd.api+json",
)
assert user2_response.status_code == 201
user2_access_token, _ = get_api_tokens(client, user2_email, password)
user2_headers = get_authorization_header(user2_access_token)
user1_me = client.get(reverse("user-me"), headers=user1_headers)
assert user1_me.status_code == 200
assert user1_me.json()["data"]["attributes"]["email"] == user1_email
user2_me = client.get(reverse("user-me"), headers=user2_headers)
assert user2_me.status_code == 200
assert user2_me.json()["data"]["attributes"]["email"] == user2_email
@pytest.mark.django_db
class TestTokenSwitchTenant:
def test_switch_tenant_with_valid_token(self, tenants_fixture, providers_fixture):
client = APIClient()
test_user = "test_email@prowler.com"
test_password = "Test_password1@"
# Check that we can create a new user without any kind of authentication
user_creation_response = client.post(
reverse("user-list"),
data={
"data": {
"type": "users",
"attributes": {
"name": "test",
"email": test_user,
"password": test_password,
},
}
},
format="vnd.api+json",
)
assert user_creation_response.status_code == 201
# Create a new relationship between this user and another tenant
tenant_id = tenants_fixture[0].id
user_instance = User.objects.get(email=test_user)
Membership.objects.create(user=user_instance, tenant_id=tenant_id)
# Check that using our new user's credentials we can authenticate and get the providers
access_token, _ = get_api_tokens(client, test_user, test_password)
auth_headers = get_authorization_header(access_token)
user_me_response = client.get(
reverse("user-me"),
headers=auth_headers,
)
assert user_me_response.status_code == 200
# Assert this user belongs to two tenants
assert (
user_me_response.json()["data"]["relationships"]["memberships"]["meta"][
"count"
]
== 2
)
provider_response = client.get(
reverse("provider-list"),
headers=auth_headers,
)
assert provider_response.status_code == 200
# Empty response since there are no providers in this tenant
assert not provider_response.json()["data"]
switch_tenant_response = client.post(
reverse("token-switch"),
data={
"data": {
"type": "tokens-switch-tenant",
"attributes": {"tenant_id": tenant_id},
}
},
headers=auth_headers,
)
assert switch_tenant_response.status_code == 200
new_access_token = switch_tenant_response.json()["data"]["attributes"]["access"]
new_auth_headers = get_authorization_header(new_access_token)
provider_response = client.get(
reverse("provider-list"),
headers=new_auth_headers,
)
assert provider_response.status_code == 200
# Now it must be data because we switched to another tenant with providers
assert provider_response.json()["data"]
def test_switch_tenant_with_invalid_token(self, create_test_user, tenants_fixture):
client = APIClient()
access_token, refresh_token = get_api_tokens(
client, create_test_user.email, TEST_PASSWORD
)
auth_headers = get_authorization_header(access_token)
invalid_token_response = client.post(
reverse("token-switch"),
data={
"data": {
"type": "tokens-switch-tenant",
"attributes": {"tenant_id": "invalid_tenant_id"},
}
},
headers=auth_headers,
)
assert invalid_token_response.status_code == 400
assert invalid_token_response.json()["errors"][0]["code"] == "invalid"
assert (
invalid_token_response.json()["errors"][0]["detail"]
== "Must be a valid UUID."
)
invalid_tenant_response = client.post(
reverse("token-switch"),
data={
"data": {
"type": "tokens-switch-tenant",
"attributes": {"tenant_id": tenants_fixture[-1].id},
}
},
headers=auth_headers,
)
assert invalid_tenant_response.status_code == 400
assert invalid_tenant_response.json()["errors"][0]["code"] == "invalid"
assert invalid_tenant_response.json()["errors"][0]["detail"] == (
"Tenant does not exist or user is not a " "member."
)

Some files were not shown because too many files have changed in this diff Show More