Compare commits

..

4 Commits

Author SHA1 Message Date
Chandrapal Badshah 8b41dceb1c Revert changes to package-lock 2025-06-04 15:03:04 +05:30
Chandrapal Badshah 15f98d79e0 Integrate Prowler Hub 2025-06-04 13:44:53 +05:30
Chandrapal Badshah 67fe87cfd4 Move layout to lighthouse config page 2025-06-04 13:18:42 +05:30
Chandrapal Badshah 9e62a5398f Add Lighthouse chat interface 2025-06-02 13:50:28 +05:30
1354 changed files with 24259 additions and 108241 deletions
+1 -12
View File
@@ -6,17 +6,11 @@
PROWLER_UI_VERSION="stable"
AUTH_URL=http://localhost:3000
API_BASE_URL=http://prowler-api:8080/api/v1
NEXT_PUBLIC_API_BASE_URL=${API_BASE_URL}
NEXT_PUBLIC_API_DOCS_URL=http://prowler-api:8080/api/v1/docs
AUTH_TRUST_HOST=true
UI_PORT=3000
# Temp URL for feeds need to use actual
RSS_FEED_URL=https://prowler.com/blog/rss
# openssl rand -base64 32
AUTH_SECRET="N/c6mnaS5+SWq81+819OrzQZlmx1Vxtp/orjttJSmw8="
# Google Tag Manager ID
NEXT_PUBLIC_GOOGLE_TAG_MANAGER_ID=""
#### Prowler API Configuration ####
PROWLER_API_VERSION="stable"
@@ -127,14 +121,13 @@ jQIDAQAB
DJANGO_SECRETS_ENCRYPTION_KEY="oE/ltOhp/n1TdbHjVmzcjDPLcLA41CVI/4Rk+UB5ESc="
DJANGO_BROKER_VISIBILITY_TIMEOUT=86400
DJANGO_SENTRY_DSN=
DJANGO_THROTTLE_TOKEN_OBTAIN=50/minute
# Sentry settings
SENTRY_ENVIRONMENT=local
SENTRY_RELEASE=local
#### Prowler release version ####
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.10.0
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.6.0
# Social login credentials
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
@@ -145,10 +138,6 @@ SOCIAL_GITHUB_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/github"
SOCIAL_GITHUB_OAUTH_CLIENT_ID=""
SOCIAL_GITHUB_OAUTH_CLIENT_SECRET=""
# Single Sign-On (SSO)
SAML_SSO_CALLBACK_URL="${AUTH_URL}/api/auth/callback/saml"
# Lighthouse tracing
LANGSMITH_TRACING=false
LANGSMITH_ENDPOINT="https://api.smith.langchain.com"
LANGSMITH_API_KEY=""
-17
View File
@@ -22,26 +22,11 @@ provider/kubernetes:
- any-glob-to-any-file: "prowler/providers/kubernetes/**"
- any-glob-to-any-file: "tests/providers/kubernetes/**"
provider/m365:
- changed-files:
- any-glob-to-any-file: "prowler/providers/m365/**"
- any-glob-to-any-file: "tests/providers/m365/**"
provider/github:
- changed-files:
- any-glob-to-any-file: "prowler/providers/github/**"
- any-glob-to-any-file: "tests/providers/github/**"
provider/iac:
- changed-files:
- any-glob-to-any-file: "prowler/providers/iac/**"
- any-glob-to-any-file: "tests/providers/iac/**"
provider/mongodbatlas:
- changed-files:
- any-glob-to-any-file: "prowler/providers/mongodbatlas/**"
- any-glob-to-any-file: "tests/providers/mongodbatlas/**"
github_actions:
- changed-files:
- any-glob-to-any-file: ".github/workflows/*"
@@ -57,13 +42,11 @@ mutelist:
- any-glob-to-any-file: "prowler/providers/azure/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/gcp/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/kubernetes/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/mongodbatlas/lib/mutelist/**"
- any-glob-to-any-file: "tests/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/aws/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/azure/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/gcp/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/kubernetes/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/mongodbatlas/lib/mutelist/**"
integration/s3:
- changed-files:
-4
View File
@@ -8,10 +8,6 @@ If fixes an issue please add it with `Fix #XXXX`
Please include a summary of the change and which issue is fixed. List any dependencies that are required for this change.
### Steps to review
Please add a detailed description of how to review this PR.
### Checklist
- Are there new checks included in this PR? Yes / No
@@ -6,7 +6,6 @@ on:
- "master"
paths:
- "api/**"
- "prowler/**"
- ".github/workflows/api-build-lint-push-containers.yml"
# Uncomment the code below to test this action on PRs
@@ -77,12 +76,12 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Build and push container image (latest)
# Comment the following line for testing
if: github.event_name == 'push'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
with:
context: ${{ env.WORKING_DIRECTORY }}
# Set push: false for testing
@@ -95,7 +94,7 @@ jobs:
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
with:
context: ${{ env.WORKING_DIRECTORY }}
push: true
+2 -2
View File
@@ -48,12 +48,12 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/api-codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
with:
category: "/language:${{matrix.language}}"
+13 -49
View File
@@ -13,7 +13,6 @@ on:
- "master"
- "v5.*"
paths:
- ".github/workflows/api-pull-request.yml"
- "api/**"
env:
@@ -29,10 +28,6 @@ env:
VALKEY_DB: 0
API_WORKING_DIR: ./api
IMAGE_NAME: prowler-api
IGNORE_FILES: |
api/docs/**
api/README.md
api/CHANGELOG.md
jobs:
test:
@@ -82,14 +77,17 @@ jobs:
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
api/**
.github/workflows/api-pull-request.yml
files_ignore: ${{ env.IGNORE_FILES }}
files: api/**
files_ignore: |
api/.github/**
api/docs/**
api/permissions/**
api/README.md
api/mkdocs.yml
- name: Replace @master with current branch in pyproject.toml - Only for pull requests to `master`
- name: Replace @master with current branch in pyproject.toml
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' && github.event_name == 'pull_request' && github.base_ref == 'master'
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
echo "Using branch: $BRANCH_NAME"
@@ -108,23 +106,6 @@ jobs:
run: |
poetry lock
- name: Update SDK's poetry.lock resolved_reference to latest commit - Only for push events to `master`
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' && github.event_name == 'push' && github.ref == 'refs/heads/master'
run: |
# Get the latest commit hash from the prowler-cloud/prowler repository
LATEST_COMMIT=$(curl -s "https://api.github.com/repos/prowler-cloud/prowler/commits/master" | jq -r '.sha')
echo "Latest commit hash: $LATEST_COMMIT"
# Update the resolved_reference specifically for prowler-cloud/prowler repository
sed -i '/url = "https:\/\/github\.com\/prowler-cloud\/prowler\.git"/,/resolved_reference = / {
s/resolved_reference = "[a-f0-9]\{40\}"/resolved_reference = "'"$LATEST_COMMIT"'"/
}' poetry.lock
# Verify the change was made
echo "Updated resolved_reference:"
grep -A2 -B2 "resolved_reference" poetry.lock
- name: Set up Python ${{ matrix.python-version }}
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
@@ -132,12 +113,6 @@ jobs:
python-version: ${{ matrix.python-version }}
cache: "poetry"
- name: Install system dependencies for xmlsec
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
sudo apt-get update
sudo apt-get install -y libxml2-dev libxmlsec1-dev libxmlsec1-openssl pkg-config
- name: Install dependencies
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
@@ -183,10 +158,8 @@ jobs:
- name: Safety
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
# 76352, 76353, 77323 come from SDK, but they cannot upgrade it yet. It does not affect API
# TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X
run: |
poetry run safety check --ignore 70612,66963,74429,76352,76353,77323,77744,77745
poetry run safety check --ignore 70612,66963,74429
- name: Vulture
working-directory: ./api
@@ -208,7 +181,7 @@ jobs:
- name: Upload coverage reports to Codecov
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.4.2
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
@@ -217,19 +190,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: api/**
files_ignore: ${{ env.IGNORE_FILES }}
- name: Set up Docker Buildx
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Build Container
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
with:
context: ${{ env.API_WORKING_DIR }}
push: false
@@ -7,7 +7,6 @@ on:
- 'v3'
paths:
- 'docs/**'
- '.github/workflows/build-documentation-on-pr.yml'
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
@@ -17,20 +16,9 @@ jobs:
name: Documentation Link
runs-on: ubuntu-latest
steps:
- name: Find existing documentation comment
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
id: find-comment
with:
issue-number: ${{ env.PR_NUMBER }}
comment-author: 'github-actions[bot]'
body-includes: '<!-- prowler-docs-link -->'
- name: Create or update PR comment with the Prowler Documentation URI
- name: Leave PR comment with the Prowler Documentation URI
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
with:
comment-id: ${{ steps.find-comment.outputs.comment-id }}
issue-number: ${{ env.PR_NUMBER }}
body: |
<!-- prowler-docs-link -->
You can check the documentation for this PR here -> [Prowler Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)
edit-mode: replace
+1 -1
View File
@@ -1,4 +1,4 @@
name: Prowler - Create Backport Label
name: Create Backport Label
on:
release:
+1 -1
View File
@@ -11,7 +11,7 @@ jobs:
with:
fetch-depth: 0
- name: TruffleHog OSS
uses: trufflesecurity/trufflehog@a05cf0859455b5b16317ee22d809887a4043cdf0 # v3.90.2
uses: trufflesecurity/trufflehog@b06f6d72a3791308bb7ba59c2b8cb7a083bd17e4 # v3.88.26
with:
path: ./
base: ${{ github.event.repository.default_branch }}
-167
View File
@@ -1,167 +0,0 @@
name: Prowler - PR Conflict Checker
on:
pull_request:
types:
- opened
- synchronize
- reopened
branches:
- "master"
- "v5.*"
pull_request_target:
types:
- opened
- synchronize
- reopened
branches:
- "master"
- "v5.*"
jobs:
conflict-checker:
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
**
- name: Check for conflict markers
id: conflict-check
run: |
echo "Checking for conflict markers in changed files..."
CONFLICT_FILES=""
HAS_CONFLICTS=false
# Check each changed file for conflict markers
for file in ${{ steps.changed-files.outputs.all_changed_files }}; do
if [ -f "$file" ]; then
echo "Checking file: $file"
# Look for conflict markers
if grep -l "^<<<<<<<\|^=======\|^>>>>>>>" "$file" 2>/dev/null; then
echo "Conflict markers found in: $file"
CONFLICT_FILES="$CONFLICT_FILES$file "
HAS_CONFLICTS=true
fi
fi
done
if [ "$HAS_CONFLICTS" = true ]; then
echo "has_conflicts=true" >> $GITHUB_OUTPUT
echo "conflict_files=$CONFLICT_FILES" >> $GITHUB_OUTPUT
echo "Conflict markers detected in files: $CONFLICT_FILES"
else
echo "has_conflicts=false" >> $GITHUB_OUTPUT
echo "No conflict markers found in changed files"
fi
- name: Add conflict label
if: steps.conflict-check.outputs.has_conflicts == 'true'
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
script: |
const { data: labels } = await github.rest.issues.listLabelsOnIssue({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
});
const hasConflictLabel = labels.some(label => label.name === 'has-conflicts');
if (!hasConflictLabel) {
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
labels: ['has-conflicts']
});
console.log('Added has-conflicts label');
} else {
console.log('has-conflicts label already exists');
}
- name: Remove conflict label
if: steps.conflict-check.outputs.has_conflicts == 'false'
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
script: |
try {
await github.rest.issues.removeLabel({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
name: 'has-conflicts'
});
console.log('Removed has-conflicts label');
} catch (error) {
if (error.status === 404) {
console.log('has-conflicts label was not present');
} else {
throw error;
}
}
- name: Find existing conflict comment
if: steps.conflict-check.outputs.has_conflicts == 'true'
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
id: find-comment
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-regex: '(⚠️ \*\*Conflict Markers Detected\*\*|✅ \*\*Conflict Markers Resolved\*\*)'
- name: Create or update conflict comment
if: steps.conflict-check.outputs.has_conflicts == 'true'
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
with:
comment-id: ${{ steps.find-comment.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
edit-mode: replace
body: |
⚠️ **Conflict Markers Detected**
This pull request contains unresolved conflict markers in the following files:
```
${{ steps.conflict-check.outputs.conflict_files }}
```
Please resolve these conflicts by:
1. Locating the conflict markers: `<<<<<<<`, `=======`, and `>>>>>>>`
2. Manually editing the files to resolve the conflicts
3. Removing all conflict markers
4. Committing and pushing the changes
- name: Find existing conflict comment when resolved
if: steps.conflict-check.outputs.has_conflicts == 'false'
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
id: find-resolved-comment
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-regex: '(⚠️ \*\*Conflict Markers Detected\*\*|✅ \*\*Conflict Markers Resolved\*\*)'
- name: Update comment when conflicts resolved
if: steps.conflict-check.outputs.has_conflicts == 'false' && steps.find-resolved-comment.outputs.comment-id != ''
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
with:
comment-id: ${{ steps.find-resolved-comment.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
edit-mode: replace
body: |
✅ **Conflict Markers Resolved**
All conflict markers have been successfully resolved in this pull request.
@@ -1,300 +0,0 @@
name: Prowler - Release Preparation
run-name: Prowler Release Preparation for ${{ inputs.prowler_version }}
on:
workflow_dispatch:
inputs:
prowler_version:
description: 'Prowler version to release (e.g., 5.9.0)'
required: true
type: string
env:
PROWLER_VERSION: ${{ github.event.inputs.prowler_version }}
jobs:
prepare-release:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
- name: Set up Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: '3.12'
- name: Install Poetry
run: |
python3 -m pip install --user poetry
echo "$HOME/.local/bin" >> $GITHUB_PATH
- name: Configure Git
run: |
git config --global user.name "prowler-bot"
git config --global user.email "179230569+prowler-bot@users.noreply.github.com"
- name: Parse version and determine branch
run: |
# Validate version format (reusing pattern from sdk-bump-version.yml)
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
MAJOR_VERSION=${BASH_REMATCH[1]}
MINOR_VERSION=${BASH_REMATCH[2]}
PATCH_VERSION=${BASH_REMATCH[3]}
# Export version components to environment
echo "MAJOR_VERSION=${MAJOR_VERSION}" >> "${GITHUB_ENV}"
echo "MINOR_VERSION=${MINOR_VERSION}" >> "${GITHUB_ENV}"
echo "PATCH_VERSION=${PATCH_VERSION}" >> "${GITHUB_ENV}"
# Determine branch name (format: v5.9)
BRANCH_NAME="v${MAJOR_VERSION}.${MINOR_VERSION}"
echo "BRANCH_NAME=${BRANCH_NAME}" >> "${GITHUB_ENV}"
# Calculate UI version (1.X.X format - matches Prowler minor version)
UI_VERSION="1.${MINOR_VERSION}.${PATCH_VERSION}"
echo "UI_VERSION=${UI_VERSION}" >> "${GITHUB_ENV}"
# Calculate API version (1.X.X format - one minor version ahead)
API_MINOR_VERSION=$((MINOR_VERSION + 1))
API_VERSION="1.${API_MINOR_VERSION}.${PATCH_VERSION}"
echo "API_VERSION=${API_VERSION}" >> "${GITHUB_ENV}"
echo "Prowler version: $PROWLER_VERSION"
echo "Branch name: $BRANCH_NAME"
echo "UI version: $UI_VERSION"
echo "API version: $API_VERSION"
echo "Is minor release: $([ $PATCH_VERSION -eq 0 ] && echo 'true' || echo 'false')"
else
echo "Invalid version syntax: '$PROWLER_VERSION' (must be N.N.N)" >&2
exit 1
fi
- name: Checkout existing branch for patch release
if: ${{ env.PATCH_VERSION != '0' }}
run: |
echo "Patch release detected, checking out existing branch $BRANCH_NAME..."
if git show-ref --verify --quiet "refs/heads/$BRANCH_NAME"; then
echo "Branch $BRANCH_NAME exists locally, checking out..."
git checkout "$BRANCH_NAME"
elif git show-ref --verify --quiet "refs/remotes/origin/$BRANCH_NAME"; then
echo "Branch $BRANCH_NAME exists remotely, checking out..."
git checkout -b "$BRANCH_NAME" "origin/$BRANCH_NAME"
else
echo "ERROR: Branch $BRANCH_NAME should exist for patch release $PROWLER_VERSION"
exit 1
fi
- name: Verify version in pyproject.toml
run: |
CURRENT_VERSION=$(grep '^version = ' pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
if [ "$CURRENT_VERSION" != "$PROWLER_VERSION_TRIMMED" ]; then
echo "ERROR: Version mismatch in pyproject.toml (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_VERSION')"
exit 1
fi
echo "✓ pyproject.toml version: $CURRENT_VERSION"
- name: Verify version in prowler/config/config.py
run: |
CURRENT_VERSION=$(grep '^prowler_version = ' prowler/config/config.py | sed -E 's/prowler_version = "([^"]+)"/\1/' | tr -d '[:space:]')
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
if [ "$CURRENT_VERSION" != "$PROWLER_VERSION_TRIMMED" ]; then
echo "ERROR: Version mismatch in prowler/config/config.py (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_VERSION')"
exit 1
fi
echo "✓ prowler/config/config.py version: $CURRENT_VERSION"
- name: Verify version in api/pyproject.toml
run: |
CURRENT_API_VERSION=$(grep '^version = ' api/pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
if [ "$CURRENT_API_VERSION" != "$API_VERSION_TRIMMED" ]; then
echo "ERROR: API version mismatch in api/pyproject.toml (expected: '$API_VERSION_TRIMMED', found: '$CURRENT_API_VERSION')"
exit 1
fi
echo "✓ api/pyproject.toml version: $CURRENT_API_VERSION"
- name: Verify prowler dependency in api/pyproject.toml
if: ${{ env.PATCH_VERSION != '0' }}
run: |
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
if [ "$CURRENT_PROWLER_REF" != "$BRANCH_NAME_TRIMMED" ]; then
echo "ERROR: Prowler dependency mismatch in api/pyproject.toml (expected: '$BRANCH_NAME_TRIMMED', found: '$CURRENT_PROWLER_REF')"
exit 1
fi
echo "✓ api/pyproject.toml prowler dependency: $CURRENT_PROWLER_REF"
- name: Verify version in api/src/backend/api/v1/views.py
run: |
CURRENT_API_VERSION=$(grep 'spectacular_settings.VERSION = ' api/src/backend/api/v1/views.py | sed -E 's/.*spectacular_settings.VERSION = "([^"]+)".*/\1/' | tr -d '[:space:]')
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
if [ "$CURRENT_API_VERSION" != "$API_VERSION_TRIMMED" ]; then
echo "ERROR: API version mismatch in views.py (expected: '$API_VERSION_TRIMMED', found: '$CURRENT_API_VERSION')"
exit 1
fi
echo "✓ api/src/backend/api/v1/views.py version: $CURRENT_API_VERSION"
- name: Checkout existing release branch for minor release
if: ${{ env.PATCH_VERSION == '0' }}
run: |
echo "Minor release detected (patch = 0), checking out existing branch $BRANCH_NAME..."
if git show-ref --verify --quiet "refs/remotes/origin/$BRANCH_NAME"; then
echo "Branch $BRANCH_NAME exists remotely, checking out..."
git checkout -b "$BRANCH_NAME" "origin/$BRANCH_NAME"
else
echo "ERROR: Branch $BRANCH_NAME should exist for minor release $PROWLER_VERSION. Please create it manually first."
exit 1
fi
- name: Prepare prowler dependency update for minor release
if: ${{ env.PATCH_VERSION == '0' }}
run: |
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
# Create a temporary branch for the PR
TEMP_BRANCH="update-api-dependency-$BRANCH_NAME_TRIMMED-$(date +%s)"
echo "TEMP_BRANCH=$TEMP_BRANCH" >> $GITHUB_ENV
# Switch back to master and create temp branch
git checkout master
git checkout -b "$TEMP_BRANCH"
# Minor release: update the dependency to use the release branch
echo "Updating prowler dependency from '$CURRENT_PROWLER_REF' to '$BRANCH_NAME_TRIMMED'"
sed -i "s|prowler @ git+https://github.com/prowler-cloud/prowler.git@[^\"]*\"|prowler @ git+https://github.com/prowler-cloud/prowler.git@$BRANCH_NAME_TRIMMED\"|" api/pyproject.toml
# Verify the change was made
UPDATED_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
if [ "$UPDATED_PROWLER_REF" != "$BRANCH_NAME_TRIMMED" ]; then
echo "ERROR: Failed to update prowler dependency in api/pyproject.toml"
exit 1
fi
# Update poetry lock file
echo "Updating poetry.lock file..."
cd api
poetry lock
cd ..
# Commit and push the temporary branch
git add api/pyproject.toml api/poetry.lock
git commit -m "chore(api): update prowler dependency to $BRANCH_NAME_TRIMMED for release $PROWLER_VERSION"
git push origin "$TEMP_BRANCH"
echo "✓ Prepared prowler dependency update to: $UPDATED_PROWLER_REF"
- name: Create Pull Request against release branch
if: ${{ env.PATCH_VERSION == '0' }}
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
branch: ${{ env.TEMP_BRANCH }}
base: ${{ env.BRANCH_NAME }}
title: "chore(api): Update prowler dependency to ${{ env.BRANCH_NAME }} for release ${{ env.PROWLER_VERSION }}"
body: |
### Description
Updates the API prowler dependency for release ${{ env.PROWLER_VERSION }}.
**Changes:**
- Updates `api/pyproject.toml` prowler dependency from `@master` to `@${{ env.BRANCH_NAME }}`
- Updates `api/poetry.lock` file with resolved dependencies
This PR should be merged into the `${{ env.BRANCH_NAME }}` release branch.
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
labels: |
component/api
no-changelog
- name: Extract changelog entries
run: |
set -e
# Function to extract changelog for a specific version
extract_changelog() {
local file="$1"
local version="$2"
local output_file="$3"
if [ ! -f "$file" ]; then
echo "Warning: $file not found, skipping..."
touch "$output_file"
return
fi
# Extract changelog section for this version
awk -v version="$version" '
/^## \[v?'"$version"'\]/ { found=1; next }
found && /^## \[v?[0-9]+\.[0-9]+\.[0-9]+\]/ { found=0 }
found && !/^## \[v?'"$version"'\]/ { print }
' "$file" > "$output_file"
# Remove --- separators
sed -i '/^---$/d' "$output_file"
# Remove trailing empty lines
sed -i '/^$/d' "$output_file"
}
# Extract changelogs
echo "Extracting changelog entries..."
extract_changelog "prowler/CHANGELOG.md" "$PROWLER_VERSION" "prowler_changelog.md"
extract_changelog "api/CHANGELOG.md" "$API_VERSION" "api_changelog.md"
extract_changelog "ui/CHANGELOG.md" "$UI_VERSION" "ui_changelog.md"
# Combine changelogs in order: UI, API, SDK
> combined_changelog.md
if [ -s "ui_changelog.md" ]; then
echo "## UI" >> combined_changelog.md
echo "" >> combined_changelog.md
cat ui_changelog.md >> combined_changelog.md
echo "" >> combined_changelog.md
fi
if [ -s "api_changelog.md" ]; then
echo "## API" >> combined_changelog.md
echo "" >> combined_changelog.md
cat api_changelog.md >> combined_changelog.md
echo "" >> combined_changelog.md
fi
if [ -s "prowler_changelog.md" ]; then
echo "## SDK" >> combined_changelog.md
echo "" >> combined_changelog.md
cat prowler_changelog.md >> combined_changelog.md
echo "" >> combined_changelog.md
fi
echo "Combined changelog preview:"
cat combined_changelog.md
- name: Create draft release
uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2
with:
tag_name: ${{ env.PROWLER_VERSION }}
name: Prowler ${{ env.PROWLER_VERSION }}
body_path: combined_changelog.md
draft: true
target_commitish: ${{ env.PATCH_VERSION == '0' && 'master' || env.BRANCH_NAME }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Clean up temporary files
run: |
rm -f prowler_changelog.md api_changelog.md ui_changelog.md combined_changelog.md
@@ -1,77 +0,0 @@
name: Prowler - Check Changelog
on:
pull_request:
types: [opened, synchronize, reopened, labeled, unlabeled]
jobs:
check-changelog:
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
pull-requests: write
env:
MONITORED_FOLDERS: "api ui prowler"
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- name: Get list of changed files
id: changed_files
run: |
git fetch origin ${{ github.base_ref }}
git diff --name-only origin/${{ github.base_ref }}...HEAD > changed_files.txt
cat changed_files.txt
- name: Check for folder changes and changelog presence
id: check_folders
run: |
missing_changelogs=""
for folder in $MONITORED_FOLDERS; do
if grep -q "^${folder}/" changed_files.txt; then
echo "Detected changes in ${folder}/"
if ! grep -q "^${folder}/CHANGELOG.md$" changed_files.txt; then
echo "No changelog update found for ${folder}/"
missing_changelogs="${missing_changelogs}- \`${folder}\`\n"
fi
fi
done
echo "missing_changelogs<<EOF" >> $GITHUB_OUTPUT
echo -e "${missing_changelogs}" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Find existing changelog comment
if: github.event.pull_request.head.repo.full_name == github.repository
id: find_comment
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e #v3.1.0
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: '<!-- changelog-check -->'
- name: Update PR comment with changelog status
if: github.event.pull_request.head.repo.full_name == github.repository
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
with:
issue-number: ${{ github.event.pull_request.number }}
comment-id: ${{ steps.find_comment.outputs.comment-id }}
edit-mode: replace
body: |
<!-- changelog-check -->
${{ steps.check_folders.outputs.missing_changelogs != '' && format('⚠️ **Changes detected in the following folders without a corresponding update to the `CHANGELOG.md`:**
{0}
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.', steps.check_folders.outputs.missing_changelogs) || '✅ All necessary `CHANGELOG.md` files have been updated. Great job! 🎉' }}
- name: Fail if changelog is missing
if: steps.check_folders.outputs.missing_changelogs != ''
run: |
echo "ERROR: Missing changelog updates in some folders."
exit 1
+8 -9
View File
@@ -27,12 +27,11 @@ jobs:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
repository: ${{ secrets.CLOUD_DISPATCH }}
event-type: prowler-pull-request-merged
client-payload: |
{
"PROWLER_COMMIT_SHA": "${{ github.event.pull_request.merge_commit_sha }}",
"PROWLER_COMMIT_SHORT_SHA": "${{ env.SHORT_SHA }}",
"PROWLER_PR_TITLE": ${{ toJson(github.event.pull_request.title) }},
"PROWLER_PR_LABELS": ${{ toJson(github.event.pull_request.labels.*.name) }},
"PROWLER_PR_BODY": ${{ toJson(github.event.pull_request.body) }},
"PROWLER_PR_URL": ${{ toJson(github.event.pull_request.html_url) }}
}
client-payload: '{
"PROWLER_COMMIT_SHA": "${{ github.event.pull_request.merge_commit_sha }}",
"PROWLER_COMMIT_SHORT_SHA": "${{ env.SHORT_SHA }}",
"PROWLER_PR_TITLE": "${{ github.event.pull_request.title }}",
"PROWLER_PR_LABELS": ${{ toJson(github.event.pull_request.labels.*.name) }},
"PROWLER_PR_BODY": ${{ toJson(github.event.pull_request.body) }},
"PROWLER_PR_URL":${{ toJson(github.event.pull_request.html_url) }}
}'
@@ -123,11 +123,11 @@ jobs:
AWS_REGION: ${{ env.AWS_REGION }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Build and push container image (latest)
if: github.event_name == 'push'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
with:
push: true
tags: |
@@ -140,7 +140,7 @@ jobs:
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
with:
# Use local context to get changes
# https://github.com/docker/build-push-action#path-context
@@ -157,22 +157,6 @@ jobs:
cache-from: type=gha
cache-to: type=gha,mode=max
# - name: Push README to Docker Hub (toniblyx)
# uses: peter-evans/dockerhub-description@432a30c9e07499fd01da9f8a49f0faf9e0ca5b77 # v4.0.2
# with:
# username: ${{ secrets.DOCKERHUB_USERNAME }}
# password: ${{ secrets.DOCKERHUB_TOKEN }}
# repository: ${{ env.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}
# readme-filepath: ./README.md
#
# - name: Push README to Docker Hub (prowlercloud)
# uses: peter-evans/dockerhub-description@432a30c9e07499fd01da9f8a49f0faf9e0ca5b77 # v4.0.2
# with:
# username: ${{ secrets.DOCKERHUB_USERNAME }}
# password: ${{ secrets.DOCKERHUB_TOKEN }}
# repository: ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}
# readme-filepath: ./README.md
dispatch-action:
needs: container-build-push
runs-on: ubuntu-latest
+1 -2
View File
@@ -12,6 +12,7 @@ env:
jobs:
bump-version:
name: Bump Version
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
@@ -96,7 +97,6 @@ jobs:
commit-message: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
branch: "version-bump-to-v${{ env.BUMP_VERSION_TO }}"
title: "chore(release): Bump version to v${{ env.BUMP_VERSION_TO }}"
labels: no-changelog
body: |
### Description
@@ -135,7 +135,6 @@ jobs:
commit-message: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
branch: "version-bump-to-v${{ env.PATCH_VERSION_TO }}"
title: "chore(release): Bump version to v${{ env.PATCH_VERSION_TO }}"
labels: no-changelog
body: |
### Description
+2 -2
View File
@@ -56,12 +56,12 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/sdk-codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
with:
category: "/language:${{matrix.language}}"
+2 -39
View File
@@ -102,15 +102,8 @@ jobs:
run: |
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
- name: Dockerfile - Check if Dockerfile has changed
id: dockerfile-changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
Dockerfile
- name: Hadolint
if: steps.dockerfile-changed-files.outputs.any_changed == 'true'
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
/tmp/hadolint Dockerfile --ignore=DL3013
@@ -219,36 +212,6 @@ jobs:
run: |
poetry run pytest -n auto --cov=./prowler/providers/m365 --cov-report=xml:m365_coverage.xml tests/providers/m365
# Test IaC
- name: IaC - Check if any file has changed
id: iac-changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
./prowler/providers/iac/**
./tests/providers/iac/**
.poetry.lock
- name: IaC - Test
if: steps.iac-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/iac --cov-report=xml:iac_coverage.xml tests/providers/iac
# Test MongoDB Atlas
- name: MongoDB Atlas - Check if any file has changed
id: mongodb-atlas-changed-files
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
with:
files: |
./prowler/providers/mongodbatlas/**
./tests/providers/mongodbatlas/**
.poetry.lock
- name: MongoDB Atlas - Test
if: steps.mongodb-atlas-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/mongodbatlas --cov-report=xml:mongodb_atlas_coverage.xml tests/providers/mongodbatlas
# Common Tests
- name: Lib - Test
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
@@ -263,7 +226,7 @@ jobs:
# Codecov
- name: Upload coverage reports to Codecov
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
uses: codecov/codecov-action@ad3126e916f78f00edff4ed0317cf185271ccc2d # v5.4.2
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
@@ -38,7 +38,7 @@ jobs:
pip install boto3
- name: Configure AWS Credentials -- DEV
uses: aws-actions/configure-aws-credentials@b47578312673ae6fa5b5096b330d9fbac3d116df # v4.2.1
uses: aws-actions/configure-aws-credentials@ececac1a45f3b08a01d2dd070d28d111c5fe6722 # v4.1.0
with:
aws-region: ${{ env.AWS_REGION_DEV }}
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
@@ -30,7 +30,6 @@ env:
# Container Registries
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-ui
NEXT_PUBLIC_API_BASE_URL: http://prowler-api:8080/api/v1
jobs:
repository-check:
@@ -77,17 +76,16 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Build and push container image (latest)
# Comment the following line for testing
if: github.event_name == 'push'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
with:
context: ${{ env.WORKING_DIRECTORY }}
build-args: |
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=${{ env.SHORT_SHA }}
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
# Set push: false for testing
push: true
tags: |
@@ -98,12 +96,11 @@ jobs:
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
with:
context: ${{ env.WORKING_DIRECTORY }}
build-args: |
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${{ env.RELEASE_TAG }}
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
push: true
tags: |
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
+2 -2
View File
@@ -48,12 +48,12 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
uses: github/codeql-action/init@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/ui-codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
uses: github/codeql-action/analyze@28deaeda66b76a05916b6923827895f2b14ab387 # v3.28.16
with:
category: "/language:${{matrix.language}}"
-98
View File
@@ -1,98 +0,0 @@
name: UI - E2E Tests
on:
pull_request:
branches:
- master
- "v5.*"
paths:
- '.github/workflows/ui-e2e-tests.yml'
- 'ui/**'
jobs:
e2e-tests:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
env:
AUTH_SECRET: 'fallback-ci-secret-for-testing'
AUTH_TRUST_HOST: true
NEXTAUTH_URL: 'http://localhost:3000'
NEXT_PUBLIC_API_BASE_URL: 'http://localhost:8080/api/v1'
steps:
- name: Checkout repository
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Start API services
run: |
# Override docker-compose image tag to use latest instead of stable
# This overrides any PROWLER_API_VERSION set in .env file
export PROWLER_API_VERSION=latest
echo "Using PROWLER_API_VERSION=${PROWLER_API_VERSION}"
docker compose up -d api worker worker-beat
- name: Wait for API to be ready
run: |
echo "Waiting for prowler-api..."
timeout=150 # 5 minutes max
elapsed=0
while [ $elapsed -lt $timeout ]; do
if curl -s ${NEXT_PUBLIC_API_BASE_URL}/docs >/dev/null 2>&1; then
echo "Prowler API is ready!"
exit 0
fi
echo "Waiting for prowler-api... (${elapsed}s elapsed)"
sleep 5
elapsed=$((elapsed + 5))
done
echo "Timeout waiting for prowler-api to start"
exit 1
- name: Load database fixtures for E2E tests
run: |
docker compose exec -T api sh -c '
echo "Loading all fixtures from api/fixtures/dev/..."
for fixture in api/fixtures/dev/*.json; do
if [ -f "$fixture" ]; then
echo "Loading $fixture"
poetry run python manage.py loaddata "$fixture" --database admin
fi
done
echo "All database fixtures loaded successfully!"
'
- name: Setup Node.js environment
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version: '20.x'
cache: 'npm'
cache-dependency-path: './ui/package-lock.json'
- name: Install UI dependencies
working-directory: ./ui
run: npm ci
- name: Build UI application
working-directory: ./ui
run: npm run build
- name: Cache Playwright browsers
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
id: playwright-cache
with:
path: ~/.cache/ms-playwright
key: ${{ runner.os }}-playwright-${{ hashFiles('ui/package-lock.json') }}
restore-keys: |
${{ runner.os }}-playwright-
- name: Install Playwright browsers
working-directory: ./ui
if: steps.playwright-cache.outputs.cache-hit != 'true'
run: npm run test:e2e:install
- name: Run E2E tests
working-directory: ./ui
run: npm run test:e2e
- name: Upload test reports
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: failure()
with:
name: playwright-report
path: ui/playwright-report/
retention-days: 30
- name: Cleanup services
if: always()
run: |
echo "Shutting down services..."
docker compose down -v || true
echo "Cleanup completed"
+3 -6
View File
@@ -34,26 +34,23 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
cache-dependency-path: './ui/package-lock.json'
- name: Install dependencies
working-directory: ./ui
run: npm ci
run: npm install
- name: Run Healthcheck
working-directory: ./ui
run: npm run healthcheck
- name: Build the application
working-directory: ./ui
run: npm run build
test-container-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
- name: Build Container
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@14487ce63c7a62a4a324b0bfb37086795e31c6c1 # v6.16.0
with:
context: ${{ env.UI_WORKING_DIR }}
# Always build using `prod` target
-16
View File
@@ -44,16 +44,6 @@ junit-reports/
# Cursor files
.cursorignore
.cursor/
# RooCode files
.roo/
.rooignore
.roomodes
# Cline files
.cline/
.clineignore
# Terraform
.terraform*
@@ -75,9 +65,3 @@ node_modules
# Persistent data
_data/
# Claude
CLAUDE.md
# LLM's (Until we have a standard one)
AGENTS.md
+1 -2
View File
@@ -115,8 +115,7 @@ repos:
- id: safety
name: safety
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
# TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X
entry: bash -c 'safety check --ignore 70612,66963,74429,76352,76353,77744,77745'
entry: bash -c 'safety check --ignore 70612,66963,74429'
language: system
- id: vulture
+6 -3
View File
@@ -1,4 +1,4 @@
FROM python:3.12.11-slim-bookworm AS build
FROM python:3.12.10-slim-bookworm AS build
LABEL maintainer="https://github.com/prowler-cloud/prowler"
LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
@@ -6,8 +6,7 @@ LABEL org.opencontainers.image.source="https://github.com/prowler-cloud/prowler"
ARG POWERSHELL_VERSION=7.5.0
# hadolint ignore=DL3008
RUN apt-get update && apt-get install -y --no-install-recommends \
wget libicu72 libunwind8 libssl3 libcurl4 ca-certificates apt-transport-https gnupg \
RUN apt-get update && apt-get install -y --no-install-recommends wget libicu72 \
&& rm -rf /var/lib/apt/lists/*
# Install PowerShell
@@ -47,6 +46,10 @@ ENV PATH="${HOME}/.local/bin:${PATH}"
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir poetry
# By default poetry does not compile Python source files to bytecode during installation.
# This speeds up the installation process, but the first execution may take a little more
# time because Python then compiles source files to bytecode automatically. If you want to
# compile source files to bytecode during installation, you can use the --compile option
RUN poetry install --compile && \
rm -rf ~/.cache/pip
+26 -35
View File
@@ -19,16 +19,19 @@
<a href="https://goto.prowler.com/slack"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
<a href="https://pypi.org/project/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler.svg"></a>
<a href="https://pypi.python.org/pypi/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler.svg"></a>
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Downloads" src="https://img.shields.io/pypi/dw/prowler.svg?label=downloads"></a>
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Prowler Downloads" src="https://img.shields.io/pypi/dw/prowler.svg?label=prowler%20downloads"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/toniblyx/prowler"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/cloud/build/toniblyx/prowler"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/image-size/toniblyx/prowler"></a>
<a href="https://gallery.ecr.aws/prowler-cloud/prowler"><img width="120" height=19" alt="AWS ECR Gallery" src="https://user-images.githubusercontent.com/3985464/151531396-b6535a68-c907-44eb-95a1-a09508178616.png"></a>
<a href="https://codecov.io/gh/prowler-cloud/prowler"><img src="https://codecov.io/gh/prowler-cloud/prowler/graph/badge.svg?token=OflBGsdpDl"/></a>
</p>
<p align="center">
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler"><img alt="Repo size" src="https://img.shields.io/github/repo-size/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/issues"><img alt="Issues" src="https://img.shields.io/github/issues/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/release-date/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler"><img alt="Contributors" src="https://img.shields.io/github/contributors-anon/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler/issues"><img alt="Issues" src="https://img.shields.io/github/issues/prowler-cloud/prowler"></a>
<a href="https://github.com/prowler-cloud/prowler"><img alt="License" src="https://img.shields.io/github/license/prowler-cloud/prowler"></a>
<a href="https://twitter.com/ToniBlyx"><img alt="Twitter" src="https://img.shields.io/twitter/follow/toniblyx?style=social"></a>
<a href="https://twitter.com/prowlercloud"><img alt="Twitter" src="https://img.shields.io/twitter/follow/prowlercloud?style=social"></a>
@@ -52,11 +55,15 @@ Prowler includes hundreds of built-in controls to ensure compliance with standar
- **National Security Standards:** ENS (Spanish National Security Scheme)
- **Custom Security Frameworks:** Tailored to your needs
## Prowler CLI and Prowler Cloud
Prowler offers a Command Line Interface (CLI), known as Prowler Open Source, and an additional service built on top of it, called <a href="https://prowler.com">Prowler Cloud</a>.
## Prowler App
Prowler App is a web-based application that simplifies running Prowler across your cloud provider accounts. It provides a user-friendly interface to visualize the results and streamline your security assessments.
![Prowler App](docs/products/img/overview.png)
![Prowler App](docs/img/overview.png)
>For more details, refer to the [Prowler App Documentation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-app-installation)
@@ -73,36 +80,28 @@ prowler <provider>
```console
prowler dashboard
```
![Prowler Dashboard](docs/products/img/dashboard.png)
![Prowler Dashboard](docs/img/dashboard.png)
# Prowler at a Glance
> [!Tip]
> For the most accurate and up-to-date information about checks, services, frameworks, and categories, visit [**Prowler Hub**](https://hub.prowler.com).
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) | Support | Stage | Interface |
|---|---|---|---|---|---|---|---|
| AWS | 576 | 82 | 36 | 10 | Official | Stable | UI, API, CLI |
| GCP | 79 | 13 | 10 | 3 | Official | Stable | UI, API, CLI |
| Azure | 162 | 19 | 11 | 4 | Official | Stable | UI, API, CLI |
| Kubernetes | 83 | 7 | 5 | 7 | Official | Stable | UI, API, CLI |
| GitHub | 17 | 2 | 1 | 0 | Official | Stable | UI, API, CLI |
| M365 | 70 | 7 | 3 | 2 | Official | Stable | UI, API, CLI |
| IaC | [See `trivy` docs.](https://trivy.dev/latest/docs/coverage/iac/) | N/A | N/A | N/A | Official | Beta | CLI |
| MongoDB Atlas | 10 | 3 | 0 | 0 | Official | Beta | CLI |
| NHN | 6 | 2 | 1 | 0 | Unofficial | Beta | CLI |
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|---|---|---|---|---|
| AWS | 567 | 82 | 36 | 10 |
| GCP | 79 | 13 | 9 | 3 |
| Azure | 142 | 18 | 10 | 3 |
| Kubernetes | 83 | 7 | 5 | 7 |
| GitHub | 16 | 2 | 1 | 0 |
| M365 | 69 | 7 | 2 | 2 |
| NHN (Unofficial) | 6 | 2 | 1 | 0 |
> [!Note]
> The numbers in the table are updated periodically.
> [!Tip]
> For the most accurate and up-to-date information about checks, services, frameworks, and categories, visit [**Prowler Hub**](https://hub.prowler.com).
> [!Note]
> Use the following commands to list Prowler's available checks, services, compliance frameworks, and categories:
> - `prowler <provider> --list-checks`
> - `prowler <provider> --list-services`
> - `prowler <provider> --list-compliance`
> - `prowler <provider> --list-categories`
> Use the following commands to list Prowler's available checks, services, compliance frameworks, and categories: `prowler <provider> --list-checks`, `prowler <provider> --list-services`, `prowler <provider> --list-compliance` and `prowler <provider> --list-categories`.
# 💻 Installation
@@ -137,14 +136,6 @@ If your workstation's architecture is incompatible, you can resolve this by:
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
### Common Issues with Docker Pull Installation
> [!Note]
If you want to use AWS role assumption (e.g., with the "Connect assuming IAM Role" option), you may need to mount your local `.aws` directory into the container as a volume (e.g., `- "${HOME}/.aws:/home/prowler/.aws:ro"`). There are several ways to configure credentials for Docker containers. See the [Troubleshooting](./docs/troubleshooting.md) section for more details and examples.
You can find more information in the [Troubleshooting](./docs/troubleshooting.md) section.
### From GitHub
**Requirements**
@@ -240,7 +231,7 @@ The following versions of Prowler CLI are available, depending on your requireme
The container images are available here:
- Prowler CLI:
- [DockerHub](https://hub.docker.com/r/prowlercloud/prowler/tags)
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
- Prowler App:
- [DockerHub - Prowler UI](https://hub.docker.com/r/prowlercloud/prowler-ui/tags)
@@ -275,7 +266,7 @@ python prowler-cli.py -v
- **Prowler API**: A backend service, developed with Django REST Framework, responsible for running Prowler scans and storing the generated results.
- **Prowler SDK**: A Python SDK designed to extend the functionality of the Prowler CLI for advanced capabilities.
![Prowler App Architecture](docs/products/img/prowler-app-architecture.png)
![Prowler App Architecture](docs/img/prowler-app-architecture.png)
## Prowler CLI
+15 -57
View File
@@ -1,65 +1,23 @@
# Security
# Security Policy
## Reporting Vulnerabilities
## Software Security
As an **AWS Partner** and we have passed the [AWS Foundation Technical Review (FTR)](https://aws.amazon.com/partners/foundational-technical-review/) and we use the following tools and automation to make sure our code is secure and dependencies up-to-dated:
At Prowler, we consider the security of our open source software and systems a top priority. But no matter how much effort we put into system security, there can still be vulnerabilities present.
- `bandit` for code security review.
- `safety` and `dependabot` for dependencies.
- `hadolint` and `dockle` for our containers security.
- `snyk` in Docker Hub.
- `clair` in Amazon ECR.
- `vulture`, `flake8`, `black` and `pylint` for formatting and best practices.
If you discover a vulnerability, we would like to know about it so we can take steps to address it as quickly as possible. We would like to ask you to help us better protect our users, our clients and our systems.
## Reporting a Vulnerability
When reporting vulnerabilities, please consider (1) attack scenario / exploitability, and (2) the security impact of the bug. The following issues are considered out of scope:
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or ProwlerPro service, please submit the information by contacting to https://support.prowler.com.
- Social engineering support or attacks requiring social engineering.
- Clickjacking on pages with no sensitive actions.
- Cross-Site Request Forgery (CSRF) on unauthenticated forms or forms with no sensitive actions.
- Attacks requiring Man-In-The-Middle (MITM) or physical access to a user's device.
- Previously known vulnerable libraries without a working Proof of Concept (PoC).
- Comma Separated Values (CSV) injection without demonstrating a vulnerability.
- Missing best practices in SSL/TLS configuration.
- Any activity that could lead to the disruption of service (DoS).
- Rate limiting or brute force issues on non-authentication endpoints.
- Missing best practices in Content Security Policy (CSP).
- Missing HttpOnly or Secure flags on cookies.
- Configuration of or missing security headers.
- Missing email best practices, such as invalid, incomplete, or missing SPF/DKIM/DMARC records.
- Vulnerabilities only affecting users of outdated or unpatched browsers (less than two stable versions behind).
- Software version disclosure, banner identification issues, or descriptive error messages.
- Tabnabbing.
- Issues that require unlikely user interaction.
- Improper logout functionality and improper session timeout.
- CORS misconfiguration without an exploitation scenario.
- Broken link hijacking.
- Automated scanning results (e.g., sqlmap, Burp active scanner) that have not been manually verified.
- Content spoofing and text injection issues without a clear attack vector.
- Email spoofing without exploiting security flaws.
- Dead links or broken links.
- User enumeration.
The information you share with ProwlerPro as part of this process is kept confidential within ProwlerPro. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.
Testing guidelines:
- Do not run automated scanners on other customer projects. Running automated scanners can run up costs for our users. Aggressively configured scanners might inadvertently disrupt services, exploit vulnerabilities, lead to system instability or breaches and violate Terms of Service from our upstream providers. Our own security systems won't be able to distinguish hostile reconnaissance from whitehat research. If you wish to run an automated scanner, notify us at support@prowler.com and only run it on your own Prowler app project. Do NOT attack Prowler in usage of other customers.
- Do not take advantage of the vulnerability or problem you have discovered, for example by downloading more data than necessary to demonstrate the vulnerability or deleting or modifying other people's data.
We will review the submitted report, and assign it a tracking number. We will then respond to you, acknowledging receipt of the report, and outline the next steps in the process.
Reporting guidelines:
- File a report through our Support Desk at https://support.prowler.com
- If it is about a lack of a security functionality, please file a feature request instead at https://github.com/prowler-cloud/prowler/issues
- Do provide sufficient information to reproduce the problem, so we will be able to resolve it as quickly as possible.
- If you have further questions and want direct interaction with the Prowler team, please contact us at via our Community Slack at goto.prowler.com/slack.
You will receive a non-automated response to your initial contact within 24 hours, confirming receipt of your reported vulnerability.
Disclosure guidelines:
- In order to protect our users and customers, do not reveal the problem to others until we have researched, addressed and informed our affected customers.
- If you want to publicly share your research about Prowler at a conference, in a blog or any other public forum, you should share a draft with us for review and approval at least 30 days prior to the publication date. Please note that the following should not be included:
- Data regarding any Prowler user or customer projects.
- Prowler customers' data.
- Information about Prowler employees, contractors or partners.
What we promise:
- We will respond to your report within 5 business days with our evaluation of the report and an expected resolution date.
- If you have followed the instructions above, we will not take any legal action against you in regard to the report.
- We will handle your report with strict confidentiality, and not pass on your personal details to third parties without your permission.
- We will keep you informed of the progress towards resolving the problem.
- In the public information concerning the problem reported, we will give your name as the discoverer of the problem (unless you desire otherwise).
We strive to resolve all problems as quickly as possible, and we would like to play an active role in the ultimate publication on the problem after it is resolved.
---
For more information about our security policies, please refer to our [Security](https://docs.prowler.com/projects/prowler-open-source/en/latest/security/) section in our documentation.
We will coordinate public notification of any validated vulnerability with you. Where possible, we prefer that our respective public disclosures be posted simultaneously.
-2
View File
@@ -19,8 +19,6 @@ DJANGO_REFRESH_TOKEN_LIFETIME=1440
DJANGO_CACHE_MAX_AGE=3600
DJANGO_STALE_WHILE_REVALIDATE=60
DJANGO_SECRETS_ENCRYPTION_KEY=""
# Throttle, two options: Empty means no throttle; or if desired use one in DRF format: https://www.django-rest-framework.org/api-guide/throttling/#setting-the-throttling-policy
DJANGO_THROTTLE_TOKEN_OBTAIN=50/minute
# Decide whether to allow Django manage database table partitions
DJANGO_MANAGE_DB_PARTITIONS=[True|False]
DJANGO_CELERY_DEADLOCK_ATTEMPTS=5
+168
View File
@@ -0,0 +1,168 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.pyc
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
/_data/
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
*.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
.idea/
# VSCode
.vscode/
+91
View File
@@ -0,0 +1,91 @@
repos:
## GENERAL
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
hooks:
- id: check-merge-conflict
- id: check-yaml
args: ["--unsafe"]
- id: check-json
- id: end-of-file-fixer
- id: trailing-whitespace
- id: no-commit-to-branch
- id: pretty-format-json
args: ["--autofix", "--no-sort-keys", "--no-ensure-ascii"]
exclude: 'src/backend/api/fixtures/dev/.*\.json$'
## TOML
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.13.0
hooks:
- id: pretty-format-toml
args: [--autofix]
files: pyproject.toml
## BASH
- repo: https://github.com/koalaman/shellcheck-precommit
rev: v0.10.0
hooks:
- id: shellcheck
exclude: contrib
## PYTHON
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.5.0
hooks:
# Run the linter.
- id: ruff
args: [ --fix ]
# Run the formatter.
- id: ruff-format
- repo: https://github.com/python-poetry/poetry
rev: 1.8.0
hooks:
- id: poetry-check
args: ["--directory=src"]
- id: poetry-lock
args: ["--no-update", "--directory=src"]
- repo: https://github.com/hadolint/hadolint
rev: v2.13.0-beta
hooks:
- id: hadolint
args: ["--ignore=DL3013", "Dockerfile"]
- repo: local
hooks:
- id: pylint
name: pylint
entry: bash -c 'poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/'
language: system
files: '.*\.py'
- id: trufflehog
name: TruffleHog
description: Detect secrets in your data.
entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail'
# For running trufflehog in docker, use the following entry instead:
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
language: system
stages: ["commit", "push"]
- id: bandit
name: bandit
description: "Bandit is a tool for finding common security issues in Python code"
entry: bash -c 'poetry run bandit -q -lll -x '*_test.py,./contrib/,./.venv/' -r .'
language: system
files: '.*\.py'
- id: safety
name: safety
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
entry: bash -c 'poetry run safety check --ignore 70612,66963,74429'
language: system
- id: vulture
name: vulture
description: "Vulture finds unused code in Python programs."
entry: bash -c 'poetry run vulture --exclude "contrib,.venv,tests,conftest.py" --min-confidence 100 .'
language: system
files: '.*\.py'
+34 -171
View File
@@ -2,181 +2,43 @@
All notable changes to the **Prowler API** are documented in this file.
## [1.13.2] (Prowler 5.12.3)
### Fixed
- 500 error when deleting user [(#8731)](https://github.com/prowler-cloud/prowler/pull/8731)
---
## [1.13.1] (Prowler 5.12.2)
### Changed
- Renamed compliance overview task queue to `compliance` [(#8755)](https://github.com/prowler-cloud/prowler/pull/8755)
### Security
- Django updated to the latest 5.1 security release, 5.1.12, due to [problems](https://www.djangoproject.com/weblog/2025/sep/03/security-releases/) with potential SQL injection in FilteredRelation column aliases [(#8693)](https://github.com/prowler-cloud/prowler/pull/8693)
---
## [1.13.0] (Prowler 5.12.0)
## [v1.9.0] (Prowler UNRELEASED)
### Added
- Integration with JIRA, enabling sending findings to a JIRA project [(#8622)](https://github.com/prowler-cloud/prowler/pull/8622), [(#8637)](https://github.com/prowler-cloud/prowler/pull/8637)
- `GET /overviews/findings_severity` now supports `filter[status]` and `filter[status__in]` to aggregate by specific statuses (`FAIL`, `PASS`)[(#8186)](https://github.com/prowler-cloud/prowler/pull/8186)
- Throttling options for `/api/v1/tokens` using the `DJANGO_THROTTLE_TOKEN_OBTAIN` environment variable [(#8647)](https://github.com/prowler-cloud/prowler/pull/8647)
---
## [1.12.0] (Prowler 5.11.0)
### Added
- Lighthouse support for OpenAI GPT-5 [(#8527)](https://github.com/prowler-cloud/prowler/pull/8527)
- Integration with Amazon Security Hub, enabling sending findings to Security Hub [(#8365)](https://github.com/prowler-cloud/prowler/pull/8365)
- Generate ASFF output for AWS providers with SecurityHub integration enabled [(#8569)](https://github.com/prowler-cloud/prowler/pull/8569)
### Fixed
- GitHub provider always scans user instead of organization when using provider UID [(#8587)](https://github.com/prowler-cloud/prowler/pull/8587)
---
## [1.11.0] (Prowler 5.10.0)
### Added
- Github provider support [(#8271)](https://github.com/prowler-cloud/prowler/pull/8271)
- Integration with Amazon S3, enabling storage and retrieval of scan data via S3 buckets [(#8056)](https://github.com/prowler-cloud/prowler/pull/8056)
### Fixed
- Avoid sending errors to Sentry in M365 provider when user authentication fails [(#8420)](https://github.com/prowler-cloud/prowler/pull/8420)
---
## [1.10.2] (Prowler v5.9.2)
### Changed
- Optimized queries for resources views [(#8336)](https://github.com/prowler-cloud/prowler/pull/8336)
---
## [v1.10.1] (Prowler v5.9.1)
### Fixed
- Calculate failed findings during scans to prevent heavy database queries [(#8322)](https://github.com/prowler-cloud/prowler/pull/8322)
---
## [v1.10.0] (Prowler v5.9.0)
### Added
- SSO with SAML support [(#8175)](https://github.com/prowler-cloud/prowler/pull/8175)
- `GET /resources/metadata`, `GET /resources/metadata/latest` and `GET /resources/latest` to expose resource metadata and latest scan results [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
### Changed
- `/processors` endpoints to post-process findings. Currently, only the Mutelist processor is supported to allow to mute findings.
- Optimized the underlying queries for resources endpoints [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
- Optimized include parameters for resources view [(#8229)](https://github.com/prowler-cloud/prowler/pull/8229)
- Optimized overview background tasks [(#8300)](https://github.com/prowler-cloud/prowler/pull/8300)
### Fixed
- Search filter for findings and resources [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
- RBAC is now applied to `GET /overviews/providers` [(#8277)](https://github.com/prowler-cloud/prowler/pull/8277)
### Changed
- `POST /schedules/daily` returns a `409 CONFLICT` if already created [(#8258)](https://github.com/prowler-cloud/prowler/pull/8258)
### Security
- Enhanced password validation to enforce 12+ character passwords with special characters, uppercase, lowercase, and numbers [(#8225)](https://github.com/prowler-cloud/prowler/pull/8225)
---
## [v1.9.1] (Prowler v5.8.1)
### Added
- Custom exception for provider connection errors during scans [(#8234)](https://github.com/prowler-cloud/prowler/pull/8234)
### Changed
- Summary and overview tasks now use a dedicated queue and no longer propagate errors to compliance tasks [(#8214)](https://github.com/prowler-cloud/prowler/pull/8214)
### Fixed
- Scan with no resources will not trigger legacy code for findings metadata [(#8183)](https://github.com/prowler-cloud/prowler/pull/8183)
- Invitation email comparison case-insensitive [(#8206)](https://github.com/prowler-cloud/prowler/pull/8206)
### Removed
- Validation of the provider's secret type during updates [(#8197)](https://github.com/prowler-cloud/prowler/pull/8197)
---
## [v1.9.0] (Prowler v5.8.0)
### Added
- Support GCP Service Account key [(#7824)](https://github.com/prowler-cloud/prowler/pull/7824)
- `GET /compliance-overviews` endpoints to retrieve compliance metadata and specific requirements statuses [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
- Lighthouse configuration support [(#7848)](https://github.com/prowler-cloud/prowler/pull/7848)
### Changed
- Reworked `GET /compliance-overviews` to return proper requirement metrics [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
- Optional `user` and `password` for M365 provider [(#7992)](https://github.com/prowler-cloud/prowler/pull/7992)
### Fixed
- Scheduled scans are no longer deleted when their daily schedule run is disabled [(#8082)](https://github.com/prowler-cloud/prowler/pull/8082)
---
## [v1.8.5] (Prowler v5.7.5)
### Fixed
- Normalize provider UID to ensure safe and unique export directory paths [(#8007)](https://github.com/prowler-cloud/prowler/pull/8007).
- Blank resource types in `/metadata` endpoints [(#8027)](https://github.com/prowler-cloud/prowler/pull/8027)
---
## [v1.8.4] (Prowler v5.7.4)
### Removed
- Reverted RLS transaction handling and DB custom backend [(#7994)](https://github.com/prowler-cloud/prowler/pull/7994)
---
## [v1.8.3] (Prowler v5.7.3)
### Added
- Database backend to handle already closed connections [(#7935)](https://github.com/prowler-cloud/prowler/pull/7935)
- Support GCP Service Account key. [(#7824)](https://github.com/prowler-cloud/prowler/pull/7824)
### Changed
- Renamed field encrypted_password to password for M365 provider [(#7784)](https://github.com/prowler-cloud/prowler/pull/7784)
### Fixed
- Transaction persistence with RLS operations [(#7916)](https://github.com/prowler-cloud/prowler/pull/7916)
- Reverted the change `get_with_retry` to use the original `get` method for retrieving tasks [(#7932)](https://github.com/prowler-cloud/prowler/pull/7932)
- Fixed the connection status verification before launching a scan [(#7831)](https://github.com/prowler-cloud/prowler/pull/7831)
---
## [v1.8.2] (Prowler v5.7.2)
### Fixed
- Task lookup to use task_kwargs instead of task_args for scan report resolution [(#7830)](https://github.com/prowler-cloud/prowler/pull/7830)
- Kubernetes UID validation to allow valid context names [(#7871)](https://github.com/prowler-cloud/prowler/pull/7871)
- Connection status verification before launching a scan [(#7831)](https://github.com/prowler-cloud/prowler/pull/7831)
- Race condition when creating background tasks [(#7876)](https://github.com/prowler-cloud/prowler/pull/7876)
- Error when modifying or retrieving tenants due to missing user UUID in transaction context [(#7890)](https://github.com/prowler-cloud/prowler/pull/7890)
- Fixed task lookup to use task_kwargs instead of task_args for scan report resolution. [(#7830)](https://github.com/prowler-cloud/prowler/pull/7830)
- Fixed Kubernetes UID validation to allow valid context names [(#7871)](https://github.com/prowler-cloud/prowler/pull/7871)
- Fixed a race condition when creating background tasks [(#7876)](https://github.com/prowler-cloud/prowler/pull/7876).
---
## [v1.8.1] (Prowler v5.7.1)
### Fixed
- Added database index to improve performance on finding lookup [(#7800)](https://github.com/prowler-cloud/prowler/pull/7800)
- Added database index to improve performance on finding lookup. [(#7800)](https://github.com/prowler-cloud/prowler/pull/7800)
---
## [v1.8.0] (Prowler v5.7.0)
### Added
- Huge improvements to `/findings/metadata` and resource related filters for findings [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
- Improvements to `/overviews` endpoints [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
- Queue to perform backfill background tasks [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690)
- New endpoints to retrieve latest findings and metadata [(#7743)](https://github.com/prowler-cloud/prowler/pull/7743)
- Export support for Prowler ThreatScore in M365 [(7783)](https://github.com/prowler-cloud/prowler/pull/7783)
- Added huge improvements to `/findings/metadata` and resource related filters for findings [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690).
- Added improvements to `/overviews` endpoints [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690).
- Added new queue to perform backfill background tasks [(#7690)](https://github.com/prowler-cloud/prowler/pull/7690).
- Added new endpoints to retrieve latest findings and metadata [(#7743)](https://github.com/prowler-cloud/prowler/pull/7743).
- Added export support for Prowler ThreatScore in M365 [(7783)](https://github.com/prowler-cloud/prowler/pull/7783)
---
@@ -184,9 +46,9 @@ All notable changes to the **Prowler API** are documented in this file.
### Added
- M365 as a new provider [(#7563)](https://github.com/prowler-cloud/prowler/pull/7563)
- `compliance/` folder and ZIPexport functionality for all compliance reports [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653)
- API endpoint to fetch and download any specific compliance file by name [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653)
- Added M365 as a new provider [(#7563)](https://github.com/prowler-cloud/prowler/pull/7563).
- Added a `compliance/` folder and ZIPexport functionality for all compliance reports.[(#7653)](https://github.com/prowler-cloud/prowler/pull/7653).
- Added a new API endpoint to fetch and download any specific compliance file by name [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653).
---
@@ -194,42 +56,43 @@ All notable changes to the **Prowler API** are documented in this file.
### Added
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167)
- HTTP Security Headers [(#7289)](https://github.com/prowler-cloud/prowler/pull/7289)
- New endpoint to get the compliance overviews metadata [(#7333)](https://github.com/prowler-cloud/prowler/pull/7333)
- Support for muted findings [(#7378)](https://github.com/prowler-cloud/prowler/pull/7378)
- Missing fields to API findings and resources [(#7318)](https://github.com/prowler-cloud/prowler/pull/7318)
- Support for developing new integrations [(#7167)](https://github.com/prowler-cloud/prowler/pull/7167).
- HTTP Security Headers [(#7289)](https://github.com/prowler-cloud/prowler/pull/7289).
- New endpoint to get the compliance overviews metadata [(#7333)](https://github.com/prowler-cloud/prowler/pull/7333).
- Support for muted findings [(#7378)](https://github.com/prowler-cloud/prowler/pull/7378).
- Added missing fields to API findings and resources [(#7318)](https://github.com/prowler-cloud/prowler/pull/7318).
---
## [v1.5.4] (Prowler v5.4.4)
### Fixed
- Bug with periodic tasks when trying to delete a provider [(#7466)](https://github.com/prowler-cloud/prowler/pull/7466)
- Fixed a bug with periodic tasks when trying to delete a provider ([#7466])(https://github.com/prowler-cloud/prowler/pull/7466).
---
## [v1.5.3] (Prowler v5.4.3)
### Fixed
- Duplicated scheduled scans handling [(#7401)](https://github.com/prowler-cloud/prowler/pull/7401)
- Environment variable to configure the deletion task batch size [(#7423)](https://github.com/prowler-cloud/prowler/pull/7423)
- Added duplicated scheduled scans handling ([#7401])(https://github.com/prowler-cloud/prowler/pull/7401).
- Added environment variable to configure the deletion task batch size ([#7423])(https://github.com/prowler-cloud/prowler/pull/7423).
---
## [v1.5.2] (Prowler v5.4.2)
### Changed
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349)
- Refactored deletion logic and implemented retry mechanism for deletion tasks [(#7349)](https://github.com/prowler-cloud/prowler/pull/7349).
---
## [v1.5.1] (Prowler v5.4.1)
### Fixed
- Handle response in case local files are missing [(#7183)](https://github.com/prowler-cloud/prowler/pull/7183)
- Race condition when deleting export files after the S3 upload [(#7172)](https://github.com/prowler-cloud/prowler/pull/7172)
- Handle exception when a provider has no secret in test connection [(#7283)](https://github.com/prowler-cloud/prowler/pull/7283)
- Added a handled response in case local files are missing [(#7183)](https://github.com/prowler-cloud/prowler/pull/7183).
- Fixed a race condition when deleting export files after the S3 upload [(#7172)](https://github.com/prowler-cloud/prowler/pull/7172).
- Handled exception when a provider has no secret in test connection [(#7283)](https://github.com/prowler-cloud/prowler/pull/7283).
---
@@ -237,20 +100,20 @@ All notable changes to the **Prowler API** are documented in this file.
### Added
- Social login integration with Google and GitHub [(#6906)](https://github.com/prowler-cloud/prowler/pull/6906)
- API scan report system, now all scans launched from the API will generate a compressed file with the report in OCSF, CSV and HTML formats [(#6878)](https://github.com/prowler-cloud/prowler/pull/6878)
- Add API scan report system, now all scans launched from the API will generate a compressed file with the report in OCSF, CSV and HTML formats [(#6878)](https://github.com/prowler-cloud/prowler/pull/6878).
- Configurable Sentry integration [(#6874)](https://github.com/prowler-cloud/prowler/pull/6874)
### Changed
- Optimized `GET /findings` endpoint to improve response time and size [(#7019)](https://github.com/prowler-cloud/prowler/pull/7019)
- Optimized `GET /findings` endpoint to improve response time and size [(#7019)](https://github.com/prowler-cloud/prowler/pull/7019).
---
## [v1.4.0] (Prowler v5.3.0)
### Changed
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700)
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800)
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863)
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869)
- Daily scheduled scan instances are now created beforehand with `SCHEDULED` state [(#6700)](https://github.com/prowler-cloud/prowler/pull/6700).
- Findings endpoints now require at least one date filter [(#6800)](https://github.com/prowler-cloud/prowler/pull/6800).
- Findings metadata endpoint received a performance improvement [(#6863)](https://github.com/prowler-cloud/prowler/pull/6863).
- Increased the allowed length of the provider UID for Kubernetes providers [(#6869)](https://github.com/prowler-cloud/prowler/pull/6869).
---
+5 -19
View File
@@ -6,19 +6,7 @@ ARG POWERSHELL_VERSION=7.5.0
ENV POWERSHELL_VERSION=${POWERSHELL_VERSION}
# hadolint ignore=DL3008
RUN apt-get update && apt-get install -y --no-install-recommends \
wget \
libicu72 \
gcc \
g++ \
make \
libxml2-dev \
libxmlsec1-dev \
libxmlsec1-openssl \
pkg-config \
libtool \
libxslt1-dev \
python3-dev \
RUN apt-get update && apt-get install -y --no-install-recommends wget libicu72 \
&& rm -rf /var/lib/apt/lists/*
# Install PowerShell
@@ -44,25 +32,23 @@ USER prowler
WORKDIR /home/prowler
# Ensure output directory exists
RUN mkdir -p /tmp/prowler_api_output
COPY pyproject.toml ./
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir poetry
COPY src/backend/ ./backend/
ENV PATH="/home/prowler/.local/bin:$PATH"
# Add `--no-root` to avoid installing the current project as a package
RUN poetry install --no-root && \
rm -rf ~/.cache/pip
RUN poetry run python "$(poetry env info --path)/src/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py"
COPY src/backend/ ./backend/
COPY docker-entrypoint.sh ./docker-entrypoint.sh
RUN poetry run python "$(poetry env info --path)/src/prowler/prowler/providers/m365/lib/powershell/m365_powershell.py"
WORKDIR /home/prowler/backend
# Development image
+1 -1
View File
@@ -257,7 +257,7 @@ cd src/backend
python manage.py loaddata api/fixtures/0_dev_users.json --database admin
```
> The default credentials are `dev@prowler.com:Thisisapassword123@` or `dev2@prowler.com:Thisisapassword123@`
> The default credentials are `dev@prowler.com:thisisapassword123` or `dev2@prowler.com:thisisapassword123`
## Run tests
+125
View File
@@ -0,0 +1,125 @@
services:
api:
build:
dockerfile: Dockerfile
image: prowler-api
env_file:
- path: ./.env
required: false
ports:
- "${DJANGO_PORT:-8000}:${DJANGO_PORT:-8000}"
profiles:
- prod
depends_on:
postgres:
condition: service_healthy
valkey:
condition: service_healthy
entrypoint:
- "../docker-entrypoint.sh"
- "prod"
api-dev:
build:
dockerfile: Dockerfile
target: dev
image: prowler-api-dev
environment:
- DJANGO_SETTINGS_MODULE=config.django.devel
- DJANGO_LOGGING_FORMATTER=human_readable
env_file:
- path: ./.env
required: false
ports:
- "${DJANGO_PORT:-8080}:${DJANGO_PORT:-8080}"
volumes:
- "./src/backend:/home/prowler/backend"
- "./pyproject.toml:/home/prowler/pyproject.toml"
profiles:
- dev
depends_on:
postgres:
condition: service_healthy
valkey:
condition: service_healthy
entrypoint:
- "../docker-entrypoint.sh"
- "dev"
postgres:
image: postgres:16.3-alpine
ports:
- "${POSTGRES_PORT:-5432}:${POSTGRES_PORT:-5432}"
hostname: "postgres-db"
volumes:
- ./_data/postgres:/var/lib/postgresql/data
environment:
- POSTGRES_USER=${POSTGRES_ADMIN_USER:-prowler}
- POSTGRES_PASSWORD=${POSTGRES_ADMIN_PASSWORD:-S3cret}
- POSTGRES_DB=${POSTGRES_DB:-prowler_db}
env_file:
- path: ./.env
required: false
healthcheck:
test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_ADMIN_USER:-prowler} -d ${POSTGRES_DB:-prowler_db}'"]
interval: 5s
timeout: 5s
retries: 5
valkey:
image: valkey/valkey:7-alpine3.19
ports:
- "${VALKEY_PORT:-6379}:6379"
hostname: "valkey"
volumes:
- ./_data/valkey:/data
env_file:
- path: ./.env
required: false
healthcheck:
test: ["CMD-SHELL", "sh -c 'valkey-cli ping'"]
interval: 10s
timeout: 5s
retries: 3
worker:
build:
dockerfile: Dockerfile
image: prowler-worker
environment:
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
env_file:
- path: ./.env
required: false
profiles:
- dev
- prod
depends_on:
valkey:
condition: service_healthy
postgres:
condition: service_healthy
entrypoint:
- "../docker-entrypoint.sh"
- "worker"
worker-beat:
build:
dockerfile: Dockerfile
image: prowler-worker
environment:
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-config.django.production}
env_file:
- path: ./.env
required: false
profiles:
- dev
- prod
depends_on:
valkey:
condition: service_healthy
postgres:
condition: service_healthy
entrypoint:
- "../docker-entrypoint.sh"
- "beat"
+1 -5
View File
@@ -3,10 +3,6 @@
apply_migrations() {
echo "Applying database migrations..."
# Fix Inconsistent migration history after adding sites app
poetry run python manage.py check_and_fix_socialaccount_sites_migration --database admin
poetry run python manage.py migrate --database admin
}
@@ -32,7 +28,7 @@ start_prod_server() {
start_worker() {
echo "Starting the worker..."
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill,overview,integrations,compliance -E --max-tasks-per-child 1
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill -E --max-tasks-per-child 1
}
start_worker_beat() {
+1241 -1979
View File
File diff suppressed because it is too large Load Diff
+5 -9
View File
@@ -7,8 +7,8 @@ authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
dependencies = [
"celery[pytest] (>=5.4.0,<6.0.0)",
"dj-rest-auth[with_social,jwt] (==7.0.1)",
"django (==5.1.12)",
"django-allauth[saml] (>=65.8.0,<66.0.0)",
"django==5.1.8",
"django-allauth==65.4.1",
"django-celery-beat (>=2.7.0,<3.0.0)",
"django-celery-results (>=2.5.1,<3.0.0)",
"django-cors-headers==4.4.0",
@@ -23,15 +23,11 @@ dependencies = [
"drf-spectacular==0.27.2",
"drf-spectacular-jsonapi==0.5.1",
"gunicorn==23.0.0",
"lxml==5.3.2",
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.12",
"prowler @ git+https://github.com/prowler-cloud/prowler.git@master",
"psycopg2-binary==2.9.9",
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
"uuid6==2024.7.10",
"openai (>=1.82.0,<2.0.0)",
"xmlsec==1.3.14",
"h2 (==4.3.0)"
"uuid6==2024.7.10"
]
description = "Prowler's API (Django/DRF)"
license = "Apache-2.0"
@@ -39,7 +35,7 @@ name = "prowler-api"
package-mode = false
# Needed for the SDK compatibility
requires-python = ">=3.11,<3.13"
version = "1.13.2"
version = "1.9.0"
[project.scripts]
celery = "src.backend.config.settings.celery"
+26 -36
View File
@@ -17,8 +17,6 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
def pre_social_login(self, request, sociallogin):
# Link existing accounts with the same email address
email = sociallogin.account.extra_data.get("email")
if sociallogin.provider.id == "saml":
email = sociallogin.user.email
if email:
existing_user = self.get_user_by_email(email)
if existing_user:
@@ -31,41 +29,33 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
"""
with transaction.atomic(using=MainRouter.admin_db):
user = super().save_user(request, sociallogin, form)
provider = sociallogin.provider.id
extra = sociallogin.account.extra_data
if provider != "saml":
# Handle other providers (e.g., GitHub, Google)
user.save(using=MainRouter.admin_db)
social_account_name = sociallogin.account.extra_data.get("name")
if social_account_name:
user.name = social_account_name
user.save(using=MainRouter.admin_db)
social_account_name = extra.get("name")
if social_account_name:
user.name = social_account_name
user.save(using=MainRouter.admin_db)
tenant = Tenant.objects.using(MainRouter.admin_db).create(
name=f"{user.email.split('@')[0]} default tenant"
tenant = Tenant.objects.using(MainRouter.admin_db).create(
name=f"{user.email.split('@')[0]} default tenant"
)
with rls_transaction(str(tenant.id)):
Membership.objects.using(MainRouter.admin_db).create(
user=user, tenant=tenant, role=Membership.RoleChoices.OWNER
)
role = Role.objects.using(MainRouter.admin_db).create(
name="admin",
tenant_id=tenant.id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
user=user,
role=role,
tenant_id=tenant.id,
)
with rls_transaction(str(tenant.id)):
Membership.objects.using(MainRouter.admin_db).create(
user=user, tenant=tenant, role=Membership.RoleChoices.OWNER
)
role = Role.objects.using(MainRouter.admin_db).create(
name="admin",
tenant_id=tenant.id,
manage_users=True,
manage_account=True,
manage_billing=True,
manage_providers=True,
manage_integrations=True,
manage_scans=True,
unlimited_visibility=True,
)
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
user=user,
role=role,
tenant_id=tenant.id,
)
else:
request.session["saml_user_created"] = str(user.id)
return user
+12 -2
View File
@@ -109,6 +109,16 @@ class BaseTenantViewset(BaseViewSet):
pass # Tenant might not exist, handle gracefully
def initial(self, request, *args, **kwargs):
if (
request.resolver_match.url_name != "tenant-detail"
and request.method != "DELETE"
):
user_id = str(request.user.id)
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
return super().initial(request, *args, **kwargs)
# TODO: DRY this when we have time
if request.auth is None:
raise NotAuthenticated
@@ -116,8 +126,8 @@ class BaseTenantViewset(BaseViewSet):
if tenant_id is None:
raise NotAuthenticated("Tenant ID is not present in token")
user_id = str(request.user.id)
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
with rls_transaction(tenant_id):
self.request.tenant_id = tenant_id
return super().initial(request, *args, **kwargs)
+7 -11
View File
@@ -190,16 +190,10 @@ def generate_compliance_overview_template(prowler_compliance: dict):
total_checks = len(requirement.Checks)
checks_dict = {check: None for check in requirement.Checks}
req_status_val = "MANUAL" if total_checks == 0 else "PASS"
# Build requirement dictionary
requirement_dict = {
"name": requirement.Name or requirement.Id,
"description": requirement.Description,
"tactics": getattr(requirement, "Tactics", []),
"subtechniques": getattr(requirement, "SubTechniques", []),
"platforms": getattr(requirement, "Platforms", []),
"technique_url": getattr(requirement, "TechniqueURL", ""),
"attributes": [
dict(attribute) for attribute in requirement.Attributes
],
@@ -210,18 +204,20 @@ def generate_compliance_overview_template(prowler_compliance: dict):
"manual": 0,
"total": total_checks,
},
"status": req_status_val,
"status": "PASS",
}
# Update requirements status counts for the framework
if req_status_val == "MANUAL":
# Update requirements status
if total_checks == 0:
requirements_status["manual"] += 1
elif req_status_val == "PASS":
requirements_status["passed"] += 1
# Add requirement to compliance requirements
compliance_requirements[requirement.Id] = requirement_dict
# Calculate pending requirements
pending_requirements = total_requirements - requirements_status["manual"]
requirements_status["passed"] = pending_requirements
# Build compliance dictionary
compliance_dict = {
"framework": compliance_data.Framework,
+11 -159
View File
@@ -1,4 +1,3 @@
import re
import secrets
import uuid
from contextlib import contextmanager
@@ -153,51 +152,6 @@ def delete_related_daily_task(provider_id: str):
PeriodicTask.objects.filter(name=task_name).delete()
def create_objects_in_batches(
tenant_id: str, model, objects: list, batch_size: int = 500
):
"""
Bulk-create model instances in repeated, per-tenant RLS transactions.
All chunks execute in their own transaction, so no single transaction
grows too large.
Args:
tenant_id (str): UUID string of the tenant under which to set RLS.
model: Django model class whose `.objects.bulk_create()` will be called.
objects (list): List of model instances (unsaved) to bulk-create.
batch_size (int): Maximum number of objects per bulk_create call.
"""
total = len(objects)
for i in range(0, total, batch_size):
chunk = objects[i : i + batch_size]
with rls_transaction(value=tenant_id, parameter=POSTGRES_TENANT_VAR):
model.objects.bulk_create(chunk, batch_size)
def update_objects_in_batches(
tenant_id: str, model, objects: list, fields: list, batch_size: int = 500
):
"""
Bulk-update model instances in repeated, per-tenant RLS transactions.
All chunks execute in their own transaction, so no single transaction
grows too large.
Args:
tenant_id (str): UUID string of the tenant under which to set RLS.
model: Django model class whose `.objects.bulk_update()` will be called.
objects (list): List of model instances (saved) to bulk-update.
fields (list): List of field names to update.
batch_size (int): Maximum number of objects per bulk_update call.
"""
total = len(objects)
for start in range(0, total, batch_size):
chunk = objects[start : start + batch_size]
with rls_transaction(value=tenant_id, parameter=POSTGRES_TENANT_VAR):
model.objects.bulk_update(chunk, fields, batch_size)
# Postgres Enums
@@ -273,72 +227,6 @@ def register_enum(apps, schema_editor, enum_class): # noqa: F841
register_adapter(enum_class, enum_adapter)
def _should_create_index_on_partition(
partition_name: str, all_partitions: bool = False
) -> bool:
"""
Determine if we should create an index on this partition.
Args:
partition_name: The name of the partition (e.g., "findings_2025_aug", "findings_default")
all_partitions: If True, create on all partitions. If False, only current/future partitions.
Returns:
bool: True if index should be created on this partition, False otherwise.
"""
if all_partitions:
return True
# Extract date from partition name if it follows the pattern
# Partition names look like: findings_2025_aug, findings_2025_jul, etc.
date_pattern = r"(\d{4})_([a-z]{3})$"
match = re.search(date_pattern, partition_name)
if not match:
# If we can't parse the date, include it to be safe (e.g., default partition)
return True
try:
year_str, month_abbr = match.groups()
year = int(year_str)
# Map month abbreviations to numbers
month_map = {
"jan": 1,
"feb": 2,
"mar": 3,
"apr": 4,
"may": 5,
"jun": 6,
"jul": 7,
"aug": 8,
"sep": 9,
"oct": 10,
"nov": 11,
"dec": 12,
}
month = month_map.get(month_abbr.lower())
if month is None:
# Unknown month abbreviation, include it to be safe
return True
partition_date = datetime(year, month, 1, tzinfo=timezone.utc)
# Get current month start
now = datetime.now(timezone.utc)
current_month_start = now.replace(
day=1, hour=0, minute=0, second=0, microsecond=0
)
# Include current month and future partitions
return partition_date >= current_month_start
except (ValueError, TypeError):
# If date parsing fails, include it to be safe
return True
def create_index_on_partitions(
apps, # noqa: F841
schema_editor,
@@ -347,39 +235,16 @@ def create_index_on_partitions(
columns: str,
method: str = "BTREE",
where: str = "",
all_partitions: bool = True,
):
"""
Create an index on existing partitions of `parent_table`.
Create an index on every existing partition of `parent_table`.
Args:
parent_table: The name of the root table (e.g. "findings").
index_name: A short name for the index (will be prefixed per-partition).
columns: The parenthesized column list, e.g. "tenant_id, scan_id, status".
method: The index method—BTREE, GIN, etc. Defaults to BTREE.
where: Optional WHERE clause (without the leading "WHERE"), e.g. "status = 'FAIL'".
all_partitions: Whether to create indexes on all partitions or just current/future ones.
Defaults to False (current/future only) to avoid maintenance overhead
on old partitions where the index may not be needed.
Examples:
# Create index only on current and future partitions (recommended for new indexes)
create_index_on_partitions(
apps, schema_editor,
parent_table="findings",
index_name="new_performance_idx",
columns="tenant_id, status, severity",
all_partitions=False # Default behavior
)
# Create index on all partitions (use when migrating existing critical indexes)
create_index_on_partitions(
apps, schema_editor,
parent_table="findings",
index_name="critical_existing_idx",
columns="tenant_id, scan_id",
all_partitions=True
)
method: The index method—BTREE, GIN, etc. Defaults to BTREE.
where: Optional WHERE clause (without the leading "WHERE"), e.g. "status = 'FAIL'".
"""
with connection.cursor() as cursor:
cursor.execute(
@@ -394,14 +259,13 @@ def create_index_on_partitions(
where_sql = f" WHERE {where}" if where else ""
for partition in partitions:
if _should_create_index_on_partition(partition, all_partitions):
idx_name = f"{partition.replace('.', '_')}_{index_name}"
sql = (
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {idx_name} "
f"ON {partition} USING {method} ({columns})"
f"{where_sql};"
)
schema_editor.execute(sql)
idx_name = f"{partition.replace('.', '_')}_{index_name}"
sql = (
f"CREATE INDEX CONCURRENTLY IF NOT EXISTS {idx_name} "
f"ON {partition} USING {method} ({columns})"
f"{where_sql};"
)
schema_editor.execute(sql)
def drop_index_on_partitions(
@@ -415,7 +279,7 @@ def drop_index_on_partitions(
Args:
parent_table: The name of the root table (e.g. "findings").
index_name: The same short name used when creating them.
index_name: The same short name used when creating them.
"""
with connection.cursor() as cursor:
cursor.execute(
@@ -552,15 +416,3 @@ class IntegrationTypeEnum(EnumType):
class IntegrationTypeEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("integration_type", *args, **kwargs)
# Postgres enum definition for Processor type
class ProcessorTypeEnum(EnumType):
enum_type_name = "processor_type"
class ProcessorTypeEnumField(PostgresEnumField):
def __init__(self, *args, **kwargs):
super().__init__("processor_type", *args, **kwargs)
+4 -57
View File
@@ -3,7 +3,7 @@ from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework_json_api.exceptions import exception_handler
from rest_framework_json_api.serializers import ValidationError
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
from rest_framework_simplejwt.exceptions import TokenError, InvalidToken
class ModelValidationError(ValidationError):
@@ -32,36 +32,6 @@ class InvitationTokenExpiredException(APIException):
default_code = "token_expired"
# Task Management Exceptions (non-HTTP)
class TaskManagementError(Exception):
"""Base exception for task management errors."""
def __init__(self, task=None):
self.task = task
super().__init__()
class TaskFailedException(TaskManagementError):
"""Raised when a task has failed."""
class TaskNotFoundException(TaskManagementError):
"""Raised when a task is not found."""
class TaskInProgressException(TaskManagementError):
"""Raised when a task is running but there's no related Task object to return."""
def __init__(self, task_result=None):
self.task_result = task_result
super().__init__()
# Provider connection errors
class ProviderConnectionError(Exception):
"""Base exception for provider connection errors."""
def custom_exception_handler(exc, context):
if isinstance(exc, django_validation_error):
if hasattr(exc, "error_dict"):
@@ -69,30 +39,7 @@ def custom_exception_handler(exc, context):
else:
exc = ValidationError(detail=exc.messages[0], code=exc.code)
elif isinstance(exc, (TokenError, InvalidToken)):
if (
hasattr(exc, "detail")
and isinstance(exc.detail, dict)
and "messages" in exc.detail
):
exc.detail["messages"] = [
message_item["message"] for message_item in exc.detail["messages"]
]
exc.detail["messages"] = [
message_item["message"] for message_item in exc.detail["messages"]
]
return exception_handler(exc, context)
class ConflictException(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = "A conflict occurred. The resource already exists."
default_code = "conflict"
def __init__(self, detail=None, code=None, pointer=None):
error_detail = {
"detail": detail or self.default_detail,
"status": self.status_code,
"code": self.default_code,
}
if pointer:
error_detail["source"] = {"pointer": pointer}
super().__init__(detail=[error_detail])
+6 -181
View File
@@ -1,8 +1,7 @@
from datetime import date, datetime, timedelta, timezone
from dateutil.parser import parse
from django.conf import settings
from django.db.models import F, Q
from django.db.models import Q
from django_filters.rest_framework import (
BaseInFilter,
BooleanFilter,
@@ -23,14 +22,12 @@ from api.db_utils import (
StatusEnumField,
)
from api.models import (
ComplianceRequirementOverview,
ComplianceOverview,
Finding,
Integration,
Invitation,
Membership,
OverviewStatusChoices,
PermissionChoices,
Processor,
Provider,
ProviderGroup,
ProviderSecret,
@@ -341,8 +338,6 @@ class ResourceFilter(ProviderRelationshipFilterSet):
tags = CharFilter(method="filter_tag")
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
scan = UUIDFilter(field_name="provider__scan", lookup_expr="exact")
scan__in = UUIDInFilter(field_name="provider__scan", lookup_expr="in")
class Meta:
model = Resource
@@ -357,82 +352,6 @@ class ResourceFilter(ProviderRelationshipFilterSet):
"updated_at": ["gte", "lte"],
}
def filter_queryset(self, queryset):
if not (self.data.get("scan") or self.data.get("scan__in")) and not (
self.data.get("updated_at")
or self.data.get("updated_at__date")
or self.data.get("updated_at__gte")
or self.data.get("updated_at__lte")
):
raise ValidationError(
[
{
"detail": "At least one date filter is required: filter[updated_at], filter[updated_at.gte], "
"or filter[updated_at.lte].",
"status": 400,
"source": {"pointer": "/data/attributes/updated_at"},
"code": "required",
}
]
)
gte_date = (
parse(self.data.get("updated_at__gte")).date()
if self.data.get("updated_at__gte")
else datetime.now(timezone.utc).date()
)
lte_date = (
parse(self.data.get("updated_at__lte")).date()
if self.data.get("updated_at__lte")
else datetime.now(timezone.utc).date()
)
if abs(lte_date - gte_date) > timedelta(
days=settings.FINDINGS_MAX_DAYS_IN_RANGE
):
raise ValidationError(
[
{
"detail": f"The date range cannot exceed {settings.FINDINGS_MAX_DAYS_IN_RANGE} days.",
"status": 400,
"source": {"pointer": "/data/attributes/updated_at"},
"code": "invalid",
}
]
)
return super().filter_queryset(queryset)
def filter_tag_key(self, queryset, name, value):
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
def filter_tag_value(self, queryset, name, value):
return queryset.filter(Q(tags__value=value) | Q(tags__value__icontains=value))
def filter_tag(self, queryset, name, value):
# We won't know what the user wants to filter on just based on the value,
# and we don't want to build special filtering logic for every possible
# provider tag spec, so we'll just do a full text search
return queryset.filter(tags__text_search=value)
class LatestResourceFilter(ProviderRelationshipFilterSet):
tag_key = CharFilter(method="filter_tag_key")
tag_value = CharFilter(method="filter_tag_value")
tag = CharFilter(method="filter_tag")
tags = CharFilter(method="filter_tag")
class Meta:
model = Resource
fields = {
"provider": ["exact", "in"],
"uid": ["exact", "icontains"],
"name": ["exact", "icontains"],
"region": ["exact", "icontains", "in"],
"service": ["exact", "icontains", "in"],
"type": ["exact", "icontains", "in"],
}
def filter_tag_key(self, queryset, name, value):
return queryset.filter(Q(tags__key=value) | Q(tags__key__icontains=value))
@@ -718,11 +637,12 @@ class RoleFilter(FilterSet):
class ComplianceOverviewFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
scan_id = UUIDFilter(field_name="scan_id")
region = CharFilter(field_name="region")
provider_type = ChoiceFilter(choices=Provider.ProviderChoices.choices)
provider_type__in = ChoiceInFilter(choices=Provider.ProviderChoices.choices)
scan_id = UUIDFilter(field_name="scan__id")
class Meta:
model = ComplianceRequirementOverview
model = ComplianceOverview
fields = {
"inserted_at": ["date", "gte", "lte"],
"compliance_id": ["exact", "icontains"],
@@ -751,72 +671,6 @@ class ScanSummaryFilter(FilterSet):
}
class ScanSummarySeverityFilter(ScanSummaryFilter):
"""Filter for findings_severity ScanSummary endpoint - includes status filters"""
# Custom status filters - only for severity grouping endpoint
status = ChoiceFilter(method="filter_status", choices=OverviewStatusChoices.choices)
status__in = CharInFilter(method="filter_status_in", lookup_expr="in")
def filter_status(self, queryset, name, value):
# Validate the status value
if value not in [choice[0] for choice in OverviewStatusChoices.choices]:
raise ValidationError(f"Invalid status value: {value}")
# Apply the filter by annotating the queryset with the status field
if value == OverviewStatusChoices.FAIL:
return queryset.annotate(status_count=F("fail"))
elif value == OverviewStatusChoices.PASS:
return queryset.annotate(status_count=F("_pass"))
else:
return queryset.annotate(status_count=F("total"))
def filter_status_in(self, queryset, name, value):
# Validate the status values
valid_statuses = [choice[0] for choice in OverviewStatusChoices.choices]
for status_val in value:
if status_val not in valid_statuses:
raise ValidationError(f"Invalid status value: {status_val}")
# If all statuses or no valid statuses, use total
if (
set(value)
>= {
OverviewStatusChoices.FAIL,
OverviewStatusChoices.PASS,
}
or not value
):
return queryset.annotate(status_count=F("total"))
# Build the sum expression based on status values
sum_expression = None
for status in value:
if status == OverviewStatusChoices.FAIL:
field_expr = F("fail")
elif status == OverviewStatusChoices.PASS:
field_expr = F("_pass")
else:
continue
if sum_expression is None:
sum_expression = field_expr
else:
sum_expression = sum_expression + field_expr
if sum_expression is None:
return queryset.annotate(status_count=F("total"))
return queryset.annotate(status_count=sum_expression)
class Meta:
model = ScanSummary
fields = {
"inserted_at": ["date", "gte", "lte"],
"region": ["exact", "icontains", "in"],
}
class ServiceOverviewFilter(ScanSummaryFilter):
def is_valid(self):
# Check if at least one of the inserted_at filters is present
@@ -851,32 +705,3 @@ class IntegrationFilter(FilterSet):
fields = {
"inserted_at": ["date", "gte", "lte"],
}
class ProcessorFilter(FilterSet):
processor_type = ChoiceFilter(choices=Processor.ProcessorChoices.choices)
processor_type__in = ChoiceInFilter(
choices=Processor.ProcessorChoices.choices,
field_name="processor_type",
lookup_expr="in",
)
class IntegrationJiraFindingsFilter(FilterSet):
# To be expanded as needed
finding_id = UUIDFilter(field_name="id", lookup_expr="exact")
finding_id__in = UUIDInFilter(field_name="id", lookup_expr="in")
class Meta:
model = Finding
fields = {}
def filter_queryset(self, queryset):
# Validate that there is at least one filter provided
if not self.data:
raise ValidationError(
{
"findings": "No finding filters provided. At least one filter is required."
}
)
return super().filter_queryset(queryset)
@@ -3,7 +3,7 @@
"model": "api.user",
"pk": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
"fields": {
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
"last_login": null,
"name": "Devie Prowlerson",
"email": "dev@prowler.com",
@@ -16,7 +16,7 @@
"model": "api.user",
"pk": "b6493a3a-c997-489b-8b99-278bf74de9f6",
"fields": {
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
"password": "pbkdf2_sha256$720000$vA62S78kog2c2ytycVQdke$Fp35GVLLMyy5fUq3krSL9I02A+ocQ+RVa4S22LIAO5s=",
"last_login": null,
"name": "Devietoo Prowlerson",
"email": "dev2@prowler.com",
@@ -24,18 +24,5 @@
"is_active": true,
"date_joined": "2024-09-18T09:04:20.850Z"
}
},
{
"model": "api.user",
"pk": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
"fields": {
"password": "pbkdf2_sha256$870000$Z63pGJ7nre48hfcGbk5S0O$rQpKczAmijs96xa+gPVJifpT3Fetb8DOusl5Eq6gxac=",
"last_login": null,
"name": "E2E Test User",
"email": "e2e@prowler.com",
"company_name": "Prowler E2E Tests",
"is_active": true,
"date_joined": "2024-01-01T00:00:00.850Z"
}
}
]
@@ -46,24 +46,5 @@
"role": "member",
"date_joined": "2024-09-19T11:03:59.712Z"
}
},
{
"model": "api.tenant",
"pk": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
"fields": {
"inserted_at": "2024-01-01T00:00:00Z",
"updated_at": "2024-01-01T00:00:00Z",
"name": "E2E Test Tenant"
}
},
{
"model": "api.membership",
"pk": "9b1a2c3d-4e5f-6789-abc1-23456789def0",
"fields": {
"user": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
"role": "owner",
"date_joined": "2024-01-01T00:00:00.000Z"
}
}
]
@@ -149,32 +149,5 @@
"user": "8b38e2eb-6689-4f1e-a4ba-95b275130200",
"inserted_at": "2024-11-20T15:36:14.302Z"
}
},
{
"model": "api.role",
"pk": "a5b6c7d8-9e0f-1234-5678-90abcdef1234",
"fields": {
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
"name": "e2e_admin",
"manage_users": true,
"manage_account": true,
"manage_billing": true,
"manage_providers": true,
"manage_integrations": true,
"manage_scans": true,
"unlimited_visibility": true,
"inserted_at": "2024-01-01T00:00:00.000Z",
"updated_at": "2024-01-01T00:00:00.000Z"
}
},
{
"model": "api.userrolerelationship",
"pk": "f1e2d3c4-b5a6-9876-5432-10fedcba9876",
"fields": {
"tenant": "7c8f94a3-e2d1-4b3a-9f87-2c4d5e6f1a2b",
"role": "a5b6c7d8-9e0f-1234-5678-90abcdef1234",
"user": "6d4f8a91-3c2e-4b5a-8f7d-1e9c5b2a4d6f",
"inserted_at": "2024-01-01T00:00:00.000Z"
}
}
]
File diff suppressed because one or more lines are too long
@@ -1,80 +0,0 @@
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand
from django.db import DEFAULT_DB_ALIAS, connection, connections, transaction
from django.db.migrations.recorder import MigrationRecorder
def table_exists(table_name):
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT EXISTS (
SELECT 1 FROM information_schema.tables
WHERE table_name = %s
)
""",
[table_name],
)
return cursor.fetchone()[0]
class Command(BaseCommand):
help = "Fix migration inconsistency between socialaccount and sites"
def add_arguments(self, parser):
parser.add_argument(
"--database",
default=DEFAULT_DB_ALIAS,
help="Specifies the database to operate on.",
)
def handle(self, *args, **options):
db = options["database"]
connection = connections[db]
recorder = MigrationRecorder(connection)
applied = set(recorder.applied_migrations())
has_social = ("socialaccount", "0001_initial") in applied
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_name = 'django_site'
);
"""
)
site_table_exists = cursor.fetchone()[0]
if has_social and not site_table_exists:
self.stdout.write(
f"Detected inconsistency in '{db}'. Creating 'django_site' table manually..."
)
with transaction.atomic(using=db):
with connection.schema_editor() as schema_editor:
schema_editor.create_model(Site)
recorder.record_applied("sites", "0001_initial")
recorder.record_applied("sites", "0002_alter_domain_unique")
self.stdout.write(
"Fixed: 'django_site' table created and migrations registered."
)
# Ensure the relationship table also exists
if not table_exists("socialaccount_socialapp_sites"):
self.stdout.write(
"Detected missing 'socialaccount_socialapp_sites' table. Creating manually..."
)
with connection.schema_editor() as schema_editor:
from allauth.socialaccount.models import SocialApp
schema_editor.create_model(
SocialApp._meta.get_field("sites").remote_field.through
)
self.stdout.write(
"Fixed: 'socialaccount_socialapp_sites' table created."
)
@@ -1,124 +0,0 @@
# Generated by Django 5.1.8 on 2025-05-21 11:37
import uuid
import django.db.models.deletion
from django.db import migrations, models
import api.db_utils
import api.rls
from api.rls import RowLevelSecurityConstraint
class Migration(migrations.Migration):
dependencies = [
("api", "0026_provider_secret_gcp_service_account"),
]
operations = [
migrations.CreateModel(
name="ComplianceRequirementOverview",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
("compliance_id", models.TextField(blank=False)),
("framework", models.TextField(blank=False)),
("version", models.TextField(blank=True)),
("description", models.TextField(blank=True)),
("region", models.TextField(blank=False)),
("requirement_id", models.TextField(blank=False)),
(
"requirement_status",
api.db_utils.StatusEnumField(
choices=[
("FAIL", "Fail"),
("PASS", "Pass"),
("MANUAL", "Manual"),
]
),
),
("passed_checks", models.IntegerField(default=0)),
("failed_checks", models.IntegerField(default=0)),
("total_checks", models.IntegerField(default=0)),
(
"scan",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="compliance_requirements_overviews",
related_query_name="compliance_requirements_overview",
to="api.scan",
),
),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "compliance_requirements_overviews",
"abstract": False,
"indexes": [
models.Index(
fields=["tenant_id", "scan_id"], name="cro_tenant_scan_idx"
),
models.Index(
fields=["tenant_id", "scan_id", "compliance_id"],
name="cro_scan_comp_idx",
),
models.Index(
fields=["tenant_id", "scan_id", "compliance_id", "region"],
name="cro_scan_comp_reg_idx",
),
models.Index(
fields=[
"tenant_id",
"scan_id",
"compliance_id",
"requirement_id",
],
name="cro_scan_comp_req_idx",
),
models.Index(
fields=[
"tenant_id",
"scan_id",
"compliance_id",
"requirement_id",
"region",
],
name="cro_scan_comp_req_reg_idx",
),
],
"constraints": [
models.UniqueConstraint(
fields=(
"tenant_id",
"scan_id",
"compliance_id",
"requirement_id",
"region",
),
name="unique_tenant_compliance_requirement_overview",
)
],
},
),
migrations.AddConstraint(
model_name="ComplianceRequirementOverview",
constraint=RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_compliancerequirementoverview",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
]
@@ -1,29 +0,0 @@
from functools import partial
from django.db import migrations
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0027_compliance_requirement_overviews"),
]
operations = [
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="findings",
index_name="find_tenant_scan_check_idx",
columns="tenant_id, scan_id, check_id",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="findings",
index_name="find_tenant_scan_check_idx",
),
)
]
@@ -1,17 +0,0 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0028_findings_check_index_partitions"),
]
operations = [
migrations.AddIndex(
model_name="finding",
index=models.Index(
fields=["tenant_id", "scan_id", "check_id"],
name="find_tenant_scan_check_idx",
),
),
]
@@ -1,107 +0,0 @@
# Generated by Django 5.1.10 on 2025-06-12 12:45
import uuid
import django.core.validators
import django.db.models.deletion
from django.db import migrations, models
import api.rls
class Migration(migrations.Migration):
dependencies = [
("api", "0029_findings_check_index_parent"),
]
operations = [
migrations.CreateModel(
name="LighthouseConfiguration",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"name",
models.CharField(
help_text="Name of the configuration",
max_length=100,
validators=[django.core.validators.MinLengthValidator(3)],
),
),
(
"api_key",
models.BinaryField(
help_text="Encrypted API key for the LLM service"
),
),
(
"model",
models.CharField(
choices=[
("gpt-4o-2024-11-20", "GPT-4o v2024-11-20"),
("gpt-4o-2024-08-06", "GPT-4o v2024-08-06"),
("gpt-4o-2024-05-13", "GPT-4o v2024-05-13"),
("gpt-4o", "GPT-4o Default"),
("gpt-4o-mini-2024-07-18", "GPT-4o Mini v2024-07-18"),
("gpt-4o-mini", "GPT-4o Mini Default"),
],
default="gpt-4o-2024-08-06",
help_text="Must be one of the supported model names",
max_length=50,
),
),
(
"temperature",
models.FloatField(default=0, help_text="Must be between 0 and 1"),
),
(
"max_tokens",
models.IntegerField(
default=4000, help_text="Must be between 500 and 5000"
),
),
(
"business_context",
models.TextField(
blank=True,
default="",
help_text="Additional business context for this AI model configuration",
),
),
("is_active", models.BooleanField(default=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "lighthouse_configurations",
"abstract": False,
"constraints": [
models.UniqueConstraint(
fields=("tenant_id",),
name="unique_lighthouse_config_per_tenant",
),
],
},
),
migrations.AddConstraint(
model_name="lighthouseconfiguration",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_lighthouseconfiguration",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
]
@@ -1,24 +0,0 @@
# Generated by Django 5.1.10 on 2025-06-23 10:04
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0030_lighthouseconfiguration"),
("django_celery_beat", "0019_alter_periodictasks_options"),
]
operations = [
migrations.AlterField(
model_name="scan",
name="scheduler_task",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="django_celery_beat.periodictask",
),
),
]
-150
View File
@@ -1,150 +0,0 @@
# Generated by Django 5.1.10 on 2025-07-02 15:47
import uuid
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
import api.db_utils
import api.rls
class Migration(migrations.Migration):
dependencies = [
("api", "0031_scan_disable_on_cascade_periodic_tasks"),
]
operations = [
migrations.AlterField(
model_name="integration",
name="integration_type",
field=api.db_utils.IntegrationTypeEnumField(
choices=[
("amazon_s3", "Amazon S3"),
("aws_security_hub", "AWS Security Hub"),
("jira", "JIRA"),
("slack", "Slack"),
]
),
),
migrations.CreateModel(
name="SAMLToken",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
("expires_at", models.DateTimeField(editable=False)),
("token", models.JSONField(unique=True)),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"db_table": "saml_tokens",
},
),
migrations.CreateModel(
name="SAMLConfiguration",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
(
"email_domain",
models.CharField(
help_text="Email domain used to identify the tenant, e.g. prowlerdemo.com",
max_length=254,
unique=True,
),
),
(
"metadata_xml",
models.TextField(
help_text="Raw IdP metadata XML to configure SingleSignOnService, certificates, etc."
),
),
("created_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "saml_configurations",
},
),
migrations.AddConstraint(
model_name="samlconfiguration",
constraint=api.rls.RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_samlconfiguration",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.AddConstraint(
model_name="samlconfiguration",
constraint=models.UniqueConstraint(
fields=("tenant",), name="unique_samlconfig_per_tenant"
),
),
migrations.CreateModel(
name="SAMLDomainIndex",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("email_domain", models.CharField(max_length=254, unique=True)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "saml_domain_index",
},
),
migrations.AddConstraint(
model_name="samldomainindex",
constraint=models.UniqueConstraint(
fields=("email_domain", "tenant"),
name="unique_resources_by_email_domain",
),
),
migrations.AddConstraint(
model_name="samldomainindex",
constraint=api.rls.BaseSecurityConstraint(
name="statements_on_samldomainindex",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
]
@@ -1,34 +0,0 @@
# Generated by Django 5.1.5 on 2025-03-03 15:46
from functools import partial
from django.db import migrations
from api.db_utils import PostgresEnumMigration, ProcessorTypeEnum, register_enum
from api.models import Processor
ProcessorTypeEnumMigration = PostgresEnumMigration(
enum_name="processor_type",
enum_values=tuple(
processor_type[0] for processor_type in Processor.ProcessorChoices.choices
),
)
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0032_saml"),
]
operations = [
migrations.RunPython(
ProcessorTypeEnumMigration.create_enum_type,
reverse_code=ProcessorTypeEnumMigration.drop_enum_type,
),
migrations.RunPython(
partial(register_enum, enum_class=ProcessorTypeEnum),
reverse_code=migrations.RunPython.noop,
),
]
@@ -1,88 +0,0 @@
# Generated by Django 5.1.5 on 2025-03-26 13:04
import uuid
import django.db.models.deletion
from django.db import migrations, models
import api.db_utils
import api.rls
from api.rls import RowLevelSecurityConstraint
class Migration(migrations.Migration):
dependencies = [
("api", "0033_processors_enum"),
]
operations = [
migrations.CreateModel(
name="Processor",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("inserted_at", models.DateTimeField(auto_now_add=True)),
("updated_at", models.DateTimeField(auto_now=True)),
(
"processor_type",
api.db_utils.ProcessorTypeEnumField(
choices=[("mutelist", "Mutelist")]
),
),
("configuration", models.JSONField(default=dict)),
(
"tenant",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
),
),
],
options={
"db_table": "processors",
"abstract": False,
"indexes": [
models.Index(
fields=["tenant_id", "id"], name="processor_tenant_id_idx"
),
models.Index(
fields=["tenant_id", "processor_type"],
name="processor_tenant_type_idx",
),
],
},
),
migrations.AddConstraint(
model_name="processor",
constraint=models.UniqueConstraint(
fields=("tenant_id", "processor_type"),
name="unique_processor_types_tenant",
),
),
migrations.AddConstraint(
model_name="processor",
constraint=RowLevelSecurityConstraint(
"tenant_id",
name="rls_on_processor",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
),
migrations.AddField(
model_name="scan",
name="processor",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="scans",
related_query_name="scan",
to="api.processor",
),
),
]
@@ -1,22 +0,0 @@
import django.core.validators
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0034_processors"),
]
operations = [
migrations.AddField(
model_name="finding",
name="muted_reason",
field=models.TextField(
blank=True,
max_length=500,
null=True,
validators=[django.core.validators.MinLengthValidator(3)],
),
),
]
@@ -1,30 +0,0 @@
from functools import partial
from django.db import migrations
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0035_finding_muted_reason"),
]
operations = [
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="resource_finding_mappings",
index_name="rfm_tenant_finding_idx",
columns="tenant_id, finding_id",
method="BTREE",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="resource_finding_mappings",
index_name="rfm_tenant_finding_idx",
),
),
]
@@ -1,17 +0,0 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0036_rfm_tenant_finding_index_partitions"),
]
operations = [
migrations.AddIndex(
model_name="resourcefindingmapping",
index=models.Index(
fields=["tenant_id", "finding_id"],
name="rfm_tenant_finding_idx",
),
),
]
@@ -1,15 +0,0 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0037_rfm_tenant_finding_index_parent"),
]
operations = [
migrations.AddField(
model_name="resource",
name="failed_findings_count",
field=models.IntegerField(default=0),
)
]
@@ -1,20 +0,0 @@
from django.contrib.postgres.operations import AddIndexConcurrently
from django.db import migrations, models
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0038_resource_failed_findings_count"),
]
operations = [
AddIndexConcurrently(
model_name="resource",
index=models.Index(
fields=["tenant_id", "-failed_findings_count", "id"],
name="resources_failed_findings_idx",
),
),
]
@@ -1,30 +0,0 @@
from functools import partial
from django.db import migrations
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0039_resource_resources_failed_findings_idx"),
]
operations = [
migrations.RunPython(
partial(
create_index_on_partitions,
parent_table="resource_finding_mappings",
index_name="rfm_tenant_resource_idx",
columns="tenant_id, resource_id",
method="BTREE",
),
reverse_code=partial(
drop_index_on_partitions,
parent_table="resource_finding_mappings",
index_name="rfm_tenant_resource_idx",
),
),
]
@@ -1,17 +0,0 @@
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0040_rfm_tenant_resource_index_partitions"),
]
operations = [
migrations.AddIndex(
model_name="resourcefindingmapping",
index=models.Index(
fields=["tenant_id", "resource_id"],
name="rfm_tenant_resource_idx",
),
),
]
@@ -1,23 +0,0 @@
from django.contrib.postgres.operations import AddIndexConcurrently
from django.db import migrations, models
class Migration(migrations.Migration):
atomic = False
dependencies = [
("api", "0041_rfm_tenant_resource_parent_partitions"),
("django_celery_beat", "0019_alter_periodictasks_options"),
]
operations = [
AddIndexConcurrently(
model_name="scan",
index=models.Index(
condition=models.Q(("state", "completed")),
fields=["tenant_id", "provider_id", "-inserted_at"],
include=("id",),
name="scans_prov_ins_desc_idx",
),
),
]
@@ -1,33 +0,0 @@
# Generated by Django 5.1.7 on 2025-07-09 14:44
from django.db import migrations
import api.db_utils
class Migration(migrations.Migration):
dependencies = [
("api", "0042_scan_scans_prov_ins_desc_idx"),
]
operations = [
migrations.AlterField(
model_name="provider",
name="provider",
field=api.db_utils.ProviderEnumField(
choices=[
("aws", "AWS"),
("azure", "Azure"),
("gcp", "GCP"),
("kubernetes", "Kubernetes"),
("m365", "M365"),
("github", "GitHub"),
],
default="aws",
),
),
migrations.RunSQL(
"ALTER TYPE provider ADD VALUE IF NOT EXISTS 'github';",
reverse_sql=migrations.RunSQL.noop,
),
]
@@ -1,19 +0,0 @@
# Generated by Django 5.1.10 on 2025-07-17 11:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0043_github_provider"),
]
operations = [
migrations.AddConstraint(
model_name="integration",
constraint=models.UniqueConstraint(
fields=("configuration", "tenant"),
name="unique_configuration_per_tenant",
),
),
]
@@ -1,17 +0,0 @@
# Generated by Django 5.1.10 on 2025-07-21 16:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0044_integration_unique_configuration_per_tenant"),
]
operations = [
migrations.AlterField(
model_name="scan",
name="output_location",
field=models.CharField(blank=True, max_length=4096, null=True),
),
]
@@ -1,33 +0,0 @@
# Generated by Django 5.1.10 on 2025-08-20 09:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0045_alter_scan_output_location"),
]
operations = [
migrations.AlterField(
model_name="lighthouseconfiguration",
name="model",
field=models.CharField(
choices=[
("gpt-4o-2024-11-20", "GPT-4o v2024-11-20"),
("gpt-4o-2024-08-06", "GPT-4o v2024-08-06"),
("gpt-4o-2024-05-13", "GPT-4o v2024-05-13"),
("gpt-4o", "GPT-4o Default"),
("gpt-4o-mini-2024-07-18", "GPT-4o Mini v2024-07-18"),
("gpt-4o-mini", "GPT-4o Mini Default"),
("gpt-5-2025-08-07", "GPT-5 v2025-08-07"),
("gpt-5", "GPT-5 Default"),
("gpt-5-mini-2025-08-07", "GPT-5 Mini v2025-08-07"),
("gpt-5-mini", "GPT-5 Mini Default"),
],
default="gpt-4o-2024-08-06",
help_text="Must be one of the supported model names",
max_length=50,
),
),
]
@@ -1,16 +0,0 @@
# Generated by Django 5.1.10 on 2025-08-20 08:24
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("api", "0046_lighthouse_gpt5"),
]
operations = [
migrations.RemoveConstraint(
model_name="integration",
name="unique_configuration_per_tenant",
),
]
+61 -602
View File
@@ -1,21 +1,15 @@
import json
import logging
import re
import xml.etree.ElementTree as ET
from datetime import datetime, timedelta, timezone
import time
from uuid import UUID, uuid4
from allauth.socialaccount.models import SocialApp
from config.custom_logging import BackendLogger
from config.settings.social_login import SOCIALACCOUNT_PROVIDERS
from cryptography.fernet import Fernet, InvalidToken
from config.env import env
from cryptography.fernet import Fernet
from django.conf import settings
from django.contrib.auth.models import AbstractBaseUser
from django.contrib.postgres.fields import ArrayField
from django.contrib.postgres.indexes import GinIndex
from django.contrib.postgres.search import SearchVector, SearchVectorField
from django.contrib.sites.models import Site
from django.core.exceptions import ValidationError
from django.core.validators import MinLengthValidator
from django.db import models
from django.db.models import Q
@@ -27,14 +21,12 @@ from psqlextra.models import PostgresPartitionedModel
from psqlextra.types import PostgresPartitioningMethod
from uuid6 import uuid7
from api.db_router import MainRouter
from api.db_utils import (
CustomUserManager,
FindingDeltaEnumField,
IntegrationTypeEnumField,
InvitationStateEnumField,
MemberRoleEnumField,
ProcessorTypeEnumField,
ProviderEnumField,
ProviderSecretTypeEnumField,
ScanTriggerEnumField,
@@ -59,8 +51,6 @@ fernet = Fernet(settings.SECRETS_ENCRYPTION_KEY.encode())
# Convert Prowler Severity enum to Django TextChoices
SeverityChoices = enum_to_choices(Severity)
logger = logging.getLogger(BackendLogger.API)
class StatusChoices(models.TextChoices):
"""
@@ -74,15 +64,6 @@ class StatusChoices(models.TextChoices):
MANUAL = "MANUAL", _("Manual")
class OverviewStatusChoices(models.TextChoices):
"""
Status filters allowed in overview/severity endpoints.
"""
FAIL = "FAIL", _("Fail")
PASS = "PASS", _("Pass")
class StateChoices(models.TextChoices):
AVAILABLE = "available", _("Available")
SCHEDULED = "scheduled", _("Scheduled")
@@ -214,7 +195,6 @@ class Provider(RowLevelSecurityProtectedModel):
GCP = "gcp", _("GCP")
KUBERNETES = "kubernetes", _("Kubernetes")
M365 = "m365", _("M365")
GITHUB = "github", _("GitHub")
@staticmethod
def validate_aws_uid(value):
@@ -275,16 +255,6 @@ class Provider(RowLevelSecurityProtectedModel):
pointer="/data/attributes/uid",
)
@staticmethod
def validate_github_uid(value):
if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9-]{0,38}$", value):
raise ModelValidationError(
detail="GitHub provider ID must be a valid GitHub username or organization name (1-39 characters, "
"starting with alphanumeric, containing only alphanumeric characters and hyphens).",
code="github-uid",
pointer="/data/attributes/uid",
)
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
@@ -384,6 +354,42 @@ class ProviderGroupMembership(RowLevelSecurityProtectedModel):
resource_name = "provider_groups-provider"
class TaskManager(models.Manager):
def get_with_retry(
self,
id: str,
max_retries: int = None,
delay_seconds: float = None,
):
"""
Retry fetching a Task by ID in case it hasn't been created yet.
Args:
id (str): The Celery task ID (expected to match Task model PK).
max_retries (int, optional): Number of retry attempts. Defaults to env TASK_RETRY_ATTEMPTS or 5.
delay_seconds (float, optional): Delay between retries in seconds. Defaults to env TASK_RETRY_DELAY_SECONDS or 0.1.
Returns:
Task: The retrieved Task instance.
Raises:
Task.DoesNotExist: If the task is not found after all retries.
"""
max_retries = max_retries or env.int("TASK_RETRY_ATTEMPTS", default=5)
delay_seconds = delay_seconds or env.float(
"TASK_RETRY_DELAY_SECONDS", default=0.1
)
for _attempt in range(max_retries):
try:
return self.get(id=id)
except self.model.DoesNotExist:
time.sleep(delay_seconds)
raise self.model.DoesNotExist(
f"Task with ID {id} not found after {max_retries} retries."
)
class Task(RowLevelSecurityProtectedModel):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
@@ -396,6 +402,8 @@ class Task(RowLevelSecurityProtectedModel):
blank=True,
)
objects = TaskManager()
class Meta(RowLevelSecurityProtectedModel.Meta):
db_table = "tasks"
@@ -430,6 +438,20 @@ class Scan(RowLevelSecurityProtectedModel):
name = models.CharField(
blank=True, null=True, max_length=100, validators=[MinLengthValidator(3)]
)
provider = models.ForeignKey(
Provider,
on_delete=models.CASCADE,
related_name="scans",
related_query_name="scan",
)
task = models.ForeignKey(
Task,
on_delete=models.CASCADE,
related_name="scans",
related_query_name="scan",
null=True,
blank=True,
)
trigger = ScanTriggerEnumField(
choices=TriggerChoices.choices,
)
@@ -445,31 +467,11 @@ class Scan(RowLevelSecurityProtectedModel):
completed_at = models.DateTimeField(null=True, blank=True)
next_scan_at = models.DateTimeField(null=True, blank=True)
scheduler_task = models.ForeignKey(
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
)
output_location = models.CharField(blank=True, null=True, max_length=4096)
provider = models.ForeignKey(
Provider,
on_delete=models.CASCADE,
related_name="scans",
related_query_name="scan",
)
task = models.ForeignKey(
Task,
on_delete=models.CASCADE,
related_name="scans",
related_query_name="scan",
null=True,
blank=True,
)
processor = models.ForeignKey(
"Processor",
on_delete=models.SET_NULL,
related_name="scans",
related_query_name="scan",
null=True,
blank=True,
PeriodicTask, on_delete=models.CASCADE, null=True, blank=True
)
output_location = models.CharField(blank=True, null=True, max_length=200)
# TODO: mutelist foreign key
class Meta(RowLevelSecurityProtectedModel.Meta):
db_table = "scans"
@@ -496,13 +498,6 @@ class Scan(RowLevelSecurityProtectedModel):
condition=Q(state=StateChoices.COMPLETED),
name="scans_prov_state_ins_desc_idx",
),
# TODO This might replace `scans_prov_state_ins_desc_idx` completely. Review usage
models.Index(
fields=["tenant_id", "provider_id", "-inserted_at"],
condition=Q(state=StateChoices.COMPLETED),
include=["id"],
name="scans_prov_ins_desc_idx",
),
]
class JSONAPIMeta:
@@ -588,8 +583,6 @@ class Resource(RowLevelSecurityProtectedModel):
details = models.TextField(blank=True, null=True)
partition = models.TextField(blank=True, null=True)
failed_findings_count = models.IntegerField(default=0)
# Relationships
tags = models.ManyToManyField(
ResourceTag,
@@ -636,10 +629,6 @@ class Resource(RowLevelSecurityProtectedModel):
fields=["tenant_id", "provider_id"],
name="resources_tenant_provider_idx",
),
models.Index(
fields=["tenant_id", "-failed_findings_count", "id"],
name="resources_failed_findings_idx",
),
]
constraints = [
@@ -738,9 +727,6 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
check_id = models.CharField(max_length=100, blank=False, null=False)
check_metadata = models.JSONField(default=dict, null=False)
muted = models.BooleanField(default=False, null=False)
muted_reason = models.TextField(
blank=True, null=True, validators=[MinLengthValidator(3)], max_length=500
)
compliance = models.JSONField(default=dict, null=True, blank=True)
# Denormalize resource data for performance
@@ -816,10 +802,6 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
GinIndex(fields=["resource_services"], name="gin_find_service_idx"),
GinIndex(fields=["resource_regions"], name="gin_find_region_idx"),
GinIndex(fields=["resource_types"], name="gin_find_rtype_idx"),
models.Index(
fields=["tenant_id", "scan_id", "check_id"],
name="find_tenant_scan_check_idx",
),
]
class JSONAPIMeta:
@@ -882,16 +864,6 @@ class ResourceFindingMapping(PostgresPartitionedModel, RowLevelSecurityProtected
# - tenant_id
# - id
indexes = [
models.Index(
fields=["tenant_id", "finding_id"],
name="rfm_tenant_finding_idx",
),
models.Index(
fields=["tenant_id", "resource_id"],
name="rfm_tenant_resource_idx",
),
]
constraints = [
models.UniqueConstraint(
fields=("tenant_id", "resource_id", "finding_id"),
@@ -996,11 +968,6 @@ class Invitation(RowLevelSecurityProtectedModel):
null=True,
)
def save(self, *args, **kwargs):
if self.email:
self.email = self.email.strip().lower()
super().save(*args, **kwargs)
class Meta(RowLevelSecurityProtectedModel.Meta):
db_table = "invitations"
@@ -1216,78 +1183,6 @@ class ComplianceOverview(RowLevelSecurityProtectedModel):
resource_name = "compliance-overviews"
class ComplianceRequirementOverview(RowLevelSecurityProtectedModel):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
compliance_id = models.TextField(blank=False)
framework = models.TextField(blank=False)
version = models.TextField(blank=True)
description = models.TextField(blank=True)
region = models.TextField(blank=False)
requirement_id = models.TextField(blank=False)
requirement_status = StatusEnumField(choices=StatusChoices)
passed_checks = models.IntegerField(default=0)
failed_checks = models.IntegerField(default=0)
total_checks = models.IntegerField(default=0)
scan = models.ForeignKey(
Scan,
on_delete=models.CASCADE,
related_name="compliance_requirements_overviews",
related_query_name="compliance_requirements_overview",
)
class Meta(RowLevelSecurityProtectedModel.Meta):
db_table = "compliance_requirements_overviews"
constraints = [
models.UniqueConstraint(
fields=(
"tenant_id",
"scan_id",
"compliance_id",
"requirement_id",
"region",
),
name="unique_tenant_compliance_requirement_overview",
),
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "DELETE"],
),
]
indexes = [
models.Index(fields=["tenant_id", "scan_id"], name="cro_tenant_scan_idx"),
models.Index(
fields=["tenant_id", "scan_id", "compliance_id"],
name="cro_scan_comp_idx",
),
models.Index(
fields=["tenant_id", "scan_id", "compliance_id", "region"],
name="cro_scan_comp_reg_idx",
),
models.Index(
fields=["tenant_id", "scan_id", "compliance_id", "requirement_id"],
name="cro_scan_comp_req_idx",
),
models.Index(
fields=[
"tenant_id",
"scan_id",
"compliance_id",
"requirement_id",
"region",
],
name="cro_scan_comp_req_reg_idx",
),
]
class JSONAPIMeta:
resource_name = "compliance-requirements-overviews"
class ScanSummary(RowLevelSecurityProtectedModel):
objects = ActiveProviderManager()
all_objects = models.Manager()
@@ -1355,7 +1250,8 @@ class ScanSummary(RowLevelSecurityProtectedModel):
class Integration(RowLevelSecurityProtectedModel):
class IntegrationChoices(models.TextChoices):
AMAZON_S3 = "amazon_s3", _("Amazon S3")
S3 = "amazon_s3", _("Amazon S3")
SAML = "saml", _("SAML")
AWS_SECURITY_HUB = "aws_security_hub", _("AWS Security Hub")
JIRA = "jira", _("JIRA")
SLACK = "slack", _("Slack")
@@ -1429,273 +1325,6 @@ class IntegrationProviderRelationship(RowLevelSecurityProtectedModel):
]
class SAMLToken(models.Model):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
expires_at = models.DateTimeField(editable=False)
token = models.JSONField(unique=True)
user = models.ForeignKey(User, on_delete=models.CASCADE)
class Meta:
db_table = "saml_tokens"
def save(self, *args, **kwargs):
if not self.expires_at:
self.expires_at = datetime.now(timezone.utc) + timedelta(seconds=15)
super().save(*args, **kwargs)
def is_expired(self) -> bool:
return datetime.now(timezone.utc) >= self.expires_at
class SAMLDomainIndex(models.Model):
"""
Public index of SAML domains. No RLS. Used for fast lookup in SAML login flow.
"""
email_domain = models.CharField(max_length=254, unique=True)
tenant = models.ForeignKey("Tenant", on_delete=models.CASCADE)
class Meta:
db_table = "saml_domain_index"
constraints = [
models.UniqueConstraint(
fields=("email_domain", "tenant"),
name="unique_resources_by_email_domain",
),
BaseSecurityConstraint(
name="statements_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]
class SAMLConfiguration(RowLevelSecurityProtectedModel):
"""
Stores per-tenant SAML settings, including email domain and IdP metadata.
Automatically syncs to a SocialApp instance on save.
Note:
This model exists to provide a tenant-aware abstraction over SAML configuration.
It supports row-level security, custom validation, and metadata parsing, enabling
Prowler to expose a clean API and admin interface for managing SAML integrations.
Although Django Allauth uses the SocialApp model to store provider configuration,
it is not designed for multi-tenant use. SocialApp lacks support for tenant scoping,
email domain mapping, and structured metadata handling.
By managing SAMLConfiguration separately, we ensure:
- Strong isolation between tenants via RLS.
- Ownership of raw IdP metadata and its validation.
- An explicit link between SAML config and business-level identifiers (e.g. email domain).
- Programmatic transformation into the SocialApp format used by Allauth.
In short, this model acts as a secure and user-friendly layer over Allauth's lower-level primitives.
"""
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
email_domain = models.CharField(
max_length=254,
unique=True,
help_text="Email domain used to identify the tenant, e.g. prowlerdemo.com",
)
metadata_xml = models.TextField(
help_text="Raw IdP metadata XML to configure SingleSignOnService, certificates, etc."
)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class JSONAPIMeta:
resource_name = "saml-configurations"
class Meta:
db_table = "saml_configurations"
constraints = [
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
# 1 config per tenant
models.UniqueConstraint(
fields=["tenant"],
name="unique_samlconfig_per_tenant",
),
]
def clean(self, old_email_domain=None, is_create=False):
# Domain must not contain @
if "@" in self.email_domain:
raise ValidationError({"email_domain": "Domain must not contain @"})
# Enforce at most one config per tenant
qs = SAMLConfiguration.objects.filter(tenant=self.tenant)
# Exclude ourselves in case of update
if self.pk:
qs = qs.exclude(pk=self.pk)
if qs.exists():
raise ValidationError(
{"tenant": "A SAML configuration already exists for this tenant."}
)
# The email domain must be unique in the entire system
qs = SAMLConfiguration.objects.using(MainRouter.admin_db).filter(
email_domain__iexact=self.email_domain
)
if qs.exists() and old_email_domain != self.email_domain:
raise ValidationError(
{"tenant": "There is a problem with your email domain."}
)
# The entityID must be unique in the system
idp_settings = self._parsed_metadata
entity_id = idp_settings.get("entity_id")
if entity_id:
# Find any SocialApp with this entityID
q = SocialApp.objects.filter(provider="saml", provider_id=entity_id)
# If updating, exclude our own SocialApp from the check
if not is_create:
q = q.exclude(client_id=old_email_domain)
else:
q = q.exclude(client_id=self.email_domain)
if q.exists():
raise ValidationError(
{"metadata_xml": "There is a problem with your metadata."}
)
def save(self, *args, **kwargs):
self.email_domain = self.email_domain.strip().lower()
is_create = not SAMLConfiguration.objects.filter(pk=self.pk).exists()
if not is_create:
old = SAMLConfiguration.objects.get(pk=self.pk)
old_email_domain = old.email_domain
old_metadata_xml = old.metadata_xml
else:
old_email_domain = None
old_metadata_xml = None
self._parsed_metadata = self._parse_metadata()
self.clean(old_email_domain, is_create)
super().save(*args, **kwargs)
if is_create or (
old_email_domain != self.email_domain
or old_metadata_xml != self.metadata_xml
):
self._sync_social_app(old_email_domain)
# Sync the public index
if not is_create and old_email_domain and old_email_domain != self.email_domain:
SAMLDomainIndex.objects.filter(email_domain=old_email_domain).delete()
# Create/update the new domain index
SAMLDomainIndex.objects.update_or_create(
email_domain=self.email_domain, defaults={"tenant": self.tenant}
)
def delete(self, *args, **kwargs):
super().delete(*args, **kwargs)
SocialApp.objects.filter(provider="saml", client_id=self.email_domain).delete()
SAMLDomainIndex.objects.filter(email_domain=self.email_domain).delete()
def _parse_metadata(self):
"""
Parse the raw IdP metadata XML and extract:
- entity_id
- sso_url
- slo_url (may be None)
- x509cert (required)
"""
ns = {
"md": "urn:oasis:names:tc:SAML:2.0:metadata",
"ds": "http://www.w3.org/2000/09/xmldsig#",
}
try:
root = ET.fromstring(self.metadata_xml)
except ET.ParseError as e:
raise ValidationError({"metadata_xml": f"Invalid XML: {e}"})
# Entity ID
entity_id = root.attrib.get("entityID")
if not entity_id:
raise ValidationError({"metadata_xml": "Missing entityID in metadata."})
# SSO endpoint (must exist)
sso = root.find(".//md:IDPSSODescriptor/md:SingleSignOnService", ns)
if sso is None or "Location" not in sso.attrib:
raise ValidationError(
{"metadata_xml": "Missing SingleSignOnService in metadata."}
)
sso_url = sso.attrib["Location"]
# SLO endpoint (optional)
slo = root.find(".//md:IDPSSODescriptor/md:SingleLogoutService", ns)
slo_url = slo.attrib.get("Location") if slo is not None else None
# X.509 certificate (required)
cert = root.find(
'.//md:KeyDescriptor[@use="signing"]/ds:KeyInfo/ds:X509Data/ds:X509Certificate',
ns,
)
if cert is None or not cert.text or not cert.text.strip():
raise ValidationError(
{
"metadata_xml": 'Metadata must include a <ds:X509Certificate> under <KeyDescriptor use="signing">.'
}
)
x509cert = cert.text.strip()
return {
"entity_id": entity_id,
"sso_url": sso_url,
"slo_url": slo_url,
"x509cert": x509cert,
}
def _sync_social_app(self, previous_email_domain=None):
"""
Create or update the corresponding SocialApp based on email_domain.
If the domain changed, update the matching SocialApp.
"""
settings_dict = SOCIALACCOUNT_PROVIDERS["saml"].copy()
settings_dict["idp"] = self._parsed_metadata
current_site = Site.objects.get(id=settings.SITE_ID)
social_app_qs = SocialApp.objects.filter(
provider="saml", client_id=previous_email_domain or self.email_domain
)
client_id = self.email_domain[:191]
name = f"SAML-{self.email_domain}"[:40]
if social_app_qs.exists():
social_app = social_app_qs.first()
social_app.client_id = client_id
social_app.name = name
social_app.settings = settings_dict
social_app.provider_id = self._parsed_metadata["entity_id"]
social_app.save()
social_app.sites.set([current_site])
else:
social_app = SocialApp.objects.create(
provider="saml",
client_id=client_id,
name=name,
settings=settings_dict,
provider_id=self._parsed_metadata["entity_id"],
)
social_app.sites.set([current_site])
class ResourceScanSummary(RowLevelSecurityProtectedModel):
scan_id = models.UUIDField(default=uuid7, db_index=True)
resource_id = models.UUIDField(default=uuid4, db_index=True)
@@ -1743,173 +1372,3 @@ class ResourceScanSummary(RowLevelSecurityProtectedModel):
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]
class LighthouseConfiguration(RowLevelSecurityProtectedModel):
"""
Stores configuration and API keys for LLM services.
"""
class ModelChoices(models.TextChoices):
GPT_4O_2024_11_20 = "gpt-4o-2024-11-20", _("GPT-4o v2024-11-20")
GPT_4O_2024_08_06 = "gpt-4o-2024-08-06", _("GPT-4o v2024-08-06")
GPT_4O_2024_05_13 = "gpt-4o-2024-05-13", _("GPT-4o v2024-05-13")
GPT_4O = "gpt-4o", _("GPT-4o Default")
GPT_4O_MINI_2024_07_18 = "gpt-4o-mini-2024-07-18", _("GPT-4o Mini v2024-07-18")
GPT_4O_MINI = "gpt-4o-mini", _("GPT-4o Mini Default")
GPT_5_2025_08_07 = "gpt-5-2025-08-07", _("GPT-5 v2025-08-07")
GPT_5 = "gpt-5", _("GPT-5 Default")
GPT_5_MINI_2025_08_07 = "gpt-5-mini-2025-08-07", _("GPT-5 Mini v2025-08-07")
GPT_5_MINI = "gpt-5-mini", _("GPT-5 Mini Default")
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
name = models.CharField(
max_length=100,
validators=[MinLengthValidator(3)],
blank=False,
null=False,
help_text="Name of the configuration",
)
api_key = models.BinaryField(
blank=False, null=False, help_text="Encrypted API key for the LLM service"
)
model = models.CharField(
max_length=50,
choices=ModelChoices.choices,
blank=False,
null=False,
default=ModelChoices.GPT_4O_2024_08_06,
help_text="Must be one of the supported model names",
)
temperature = models.FloatField(default=0, help_text="Must be between 0 and 1")
max_tokens = models.IntegerField(
default=4000, help_text="Must be between 500 and 5000"
)
business_context = models.TextField(
blank=True,
null=False,
default="",
help_text="Additional business context for this AI model configuration",
)
is_active = models.BooleanField(default=True)
def __str__(self):
return self.name
def clean(self):
super().clean()
# Validate temperature
if not 0 <= self.temperature <= 1:
raise ModelValidationError(
detail="Temperature must be between 0 and 1",
code="invalid_temperature",
pointer="/data/attributes/temperature",
)
# Validate max_tokens
if not 500 <= self.max_tokens <= 5000:
raise ModelValidationError(
detail="Max tokens must be between 500 and 5000",
code="invalid_max_tokens",
pointer="/data/attributes/max_tokens",
)
@property
def api_key_decoded(self):
"""Return the decrypted API key, or None if unavailable or invalid."""
if not self.api_key:
return None
try:
decrypted_key = fernet.decrypt(bytes(self.api_key))
return decrypted_key.decode()
except InvalidToken:
logger.warning("Invalid token while decrypting API key.")
except Exception as e:
logger.exception("Unexpected error while decrypting API key: %s", e)
@api_key_decoded.setter
def api_key_decoded(self, value):
"""Store the encrypted API key."""
if not value:
raise ModelValidationError(
detail="API key is required",
code="invalid_api_key",
pointer="/data/attributes/api_key",
)
# Validate OpenAI API key format
openai_key_pattern = r"^sk-[\w-]+T3BlbkFJ[\w-]+$"
if not re.match(openai_key_pattern, value):
raise ModelValidationError(
detail="Invalid OpenAI API key format.",
code="invalid_api_key",
pointer="/data/attributes/api_key",
)
self.api_key = fernet.encrypt(value.encode())
def save(self, *args, **kwargs):
self.full_clean()
super().save(*args, **kwargs)
class Meta(RowLevelSecurityProtectedModel.Meta):
db_table = "lighthouse_configurations"
constraints = [
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
# Add unique constraint for name within a tenant
models.UniqueConstraint(
fields=["tenant_id"], name="unique_lighthouse_config_per_tenant"
),
]
class JSONAPIMeta:
resource_name = "lighthouse-configurations"
class Processor(RowLevelSecurityProtectedModel):
class ProcessorChoices(models.TextChoices):
MUTELIST = "mutelist", _("Mutelist")
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
updated_at = models.DateTimeField(auto_now=True, editable=False)
processor_type = ProcessorTypeEnumField(choices=ProcessorChoices.choices)
configuration = models.JSONField(default=dict)
class Meta(RowLevelSecurityProtectedModel.Meta):
db_table = "processors"
constraints = [
models.UniqueConstraint(
fields=("tenant_id", "processor_type"),
name="unique_processor_types_tenant",
),
RowLevelSecurityConstraint(
field="tenant_id",
name="rls_on_%(class)s",
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
),
]
indexes = [
models.Index(
fields=["tenant_id", "id"],
name="processor_tenant_id_idx",
),
models.Index(
fields=["tenant_id", "processor_type"],
name="processor_tenant_type_idx",
),
]
class JSONAPIMeta:
resource_name = "processors"
+1 -1
View File
@@ -1,4 +1,4 @@
from drf_spectacular_jsonapi.schemas.pagination import JsonApiPageNumberPagination
from rest_framework_json_api.pagination import JsonApiPageNumberPagination
class ComplianceOverviewPagination(JsonApiPageNumberPagination):
-95
View File
@@ -1,95 +0,0 @@
def _pick_task_response_component(components):
schemas = components.get("schemas", {}) or {}
for candidate in ("TaskResponse",):
if candidate in schemas:
return candidate
return None
def _extract_task_example_from_components(components):
schemas = components.get("schemas", {}) or {}
candidate = "TaskResponse"
doc = schemas.get(candidate)
if isinstance(doc, dict) and "example" in doc:
return doc["example"]
res = schemas.get(candidate)
if isinstance(res, dict) and "example" in res:
example = res["example"]
return example if "data" in example else {"data": example}
# Fallback
return {
"data": {
"type": "tasks",
"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08",
"attributes": {
"inserted_at": "2019-08-24T14:15:22Z",
"completed_at": "2019-08-24T14:15:22Z",
"name": "string",
"state": "available",
"result": None,
"task_args": None,
"metadata": None,
},
}
}
def attach_task_202_examples(result, generator, request, public): # noqa: F841
if not isinstance(result, dict):
return result
components = result.get("components", {}) or {}
task_resp_component = _pick_task_response_component(components)
task_example = _extract_task_example_from_components(components)
paths = result.get("paths", {}) or {}
for path_item in paths.values():
if not isinstance(path_item, dict):
continue
for method_obj in path_item.values():
if not isinstance(method_obj, dict):
continue
responses = method_obj.get("responses", {}) or {}
resp_202 = responses.get("202")
if not isinstance(resp_202, dict):
continue
content = resp_202.get("content", {}) or {}
jsonapi = content.get("application/vnd.api+json")
if not isinstance(jsonapi, dict):
continue
# Inject example if missing
if "examples" not in jsonapi and "example" not in jsonapi:
jsonapi["examples"] = {
"Task queued": {
"summary": "Task queued",
"value": task_example,
}
}
# Rewrite schema $ref if needed
if task_resp_component:
schema = jsonapi.get("schema")
must_replace = False
if not isinstance(schema, dict):
must_replace = True
else:
ref = schema.get("$ref")
if not ref:
must_replace = True
else:
current = ref.split("/")[-1]
if current != task_resp_component:
must_replace = True
if must_replace:
jsonapi["schema"] = {
"$ref": f"#/components/schemas/{task_resp_component}"
}
return result
File diff suppressed because it is too large Load Diff
@@ -11,7 +11,7 @@ def test_basic_authentication():
client = APIClient()
test_user = "test_email@prowler.com"
test_password = "Test_password@1"
test_password = "test_password"
# Check that a 401 is returned when no basic authentication is provided
no_auth_response = client.get(reverse("provider-list"))
@@ -108,7 +108,7 @@ def test_user_me_when_inviting_users(create_test_user, tenants_fixture, roles_fi
user1_email = "user1@testing.com"
user2_email = "user2@testing.com"
password = "Thisisapassword123@"
password = "thisisapassword123"
user1_response = client.post(
reverse("user-list"),
@@ -187,7 +187,7 @@ class TestTokenSwitchTenant:
client = APIClient()
test_user = "test_email@prowler.com"
test_password = "Test_password1@"
test_password = "test_password"
# Check that we can create a new user without any kind of authentication
user_creation_response = client.post(
@@ -17,7 +17,7 @@ def test_delete_provider_without_executing_task(
client = APIClient()
test_user = "test_email@prowler.com"
test_password = "Test_password1@"
test_password = "test_password"
prowler_task = tasks_fixture[0]
task_mock = Mock()
@@ -1,77 +0,0 @@
from unittest.mock import MagicMock, patch
import pytest
from allauth.socialaccount.models import SocialLogin
from django.contrib.auth import get_user_model
from api.adapters import ProwlerSocialAccountAdapter
User = get_user_model()
@pytest.mark.django_db
class TestProwlerSocialAccountAdapter:
def test_get_user_by_email_returns_user(self, create_test_user):
adapter = ProwlerSocialAccountAdapter()
user = adapter.get_user_by_email(create_test_user.email)
assert user == create_test_user
def test_get_user_by_email_returns_none_for_unknown_email(self):
adapter = ProwlerSocialAccountAdapter()
assert adapter.get_user_by_email("notfound@example.com") is None
def test_pre_social_login_links_existing_user(self, create_test_user, rf):
adapter = ProwlerSocialAccountAdapter()
sociallogin = MagicMock(spec=SocialLogin)
sociallogin.account = MagicMock()
sociallogin.provider = MagicMock()
sociallogin.provider.id = "saml"
sociallogin.account.extra_data = {}
sociallogin.user = create_test_user
sociallogin.connect = MagicMock()
adapter.pre_social_login(rf.get("/"), sociallogin)
call_args = sociallogin.connect.call_args
assert call_args is not None
called_request, called_user = call_args[0]
assert called_request.path == "/"
assert called_user.email == create_test_user.email
def test_pre_social_login_no_link_if_email_missing(self, rf):
adapter = ProwlerSocialAccountAdapter()
sociallogin = MagicMock(spec=SocialLogin)
sociallogin.account = MagicMock()
sociallogin.provider = MagicMock()
sociallogin.user = MagicMock()
sociallogin.provider.id = "saml"
sociallogin.account.extra_data = {}
sociallogin.connect = MagicMock()
adapter.pre_social_login(rf.get("/"), sociallogin)
sociallogin.connect.assert_not_called()
def test_save_user_saml_sets_session_flag(self, rf):
adapter = ProwlerSocialAccountAdapter()
request = rf.get("/")
request.session = {}
sociallogin = MagicMock(spec=SocialLogin)
sociallogin.provider = MagicMock()
sociallogin.provider.id = "saml"
sociallogin.account = MagicMock()
sociallogin.account.extra_data = {}
mock_user = MagicMock()
mock_user.id = 123
with patch("api.adapters.super") as mock_super:
with patch("api.adapters.transaction"):
with patch("api.adapters.MainRouter"):
mock_super.return_value.save_user.return_value = mock_user
adapter.save_user(request, sociallogin)
assert request.session["saml_user_created"] == "123"
+6 -22
View File
@@ -1,12 +1,12 @@
from unittest.mock import MagicMock, patch
from unittest.mock import patch, MagicMock
from api.compliance import (
generate_compliance_overview_template,
generate_scan_compliance,
get_prowler_provider_checks,
get_prowler_provider_compliance,
load_prowler_checks,
load_prowler_compliance,
load_prowler_checks,
generate_scan_compliance,
generate_compliance_overview_template,
)
from api.models import Provider
@@ -69,7 +69,7 @@ class TestCompliance:
load_prowler_compliance()
from api.compliance import PROWLER_CHECKS, PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE
from api.compliance import PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE, PROWLER_CHECKS
assert PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE == {
"template_key": "template_value"
@@ -218,10 +218,6 @@ class TestCompliance:
Description="Description of requirement 1",
Attributes=[],
Checks=["check1", "check2"],
Tactics=["tactic1"],
SubTechniques=["subtechnique1"],
Platforms=["platform1"],
TechniqueURL="https://example.com",
)
requirement2 = MagicMock(
Id="requirement2",
@@ -229,10 +225,6 @@ class TestCompliance:
Description="Description of requirement 2",
Attributes=[],
Checks=[],
Tactics=[],
SubTechniques=[],
Platforms=[],
TechniqueURL="",
)
compliance1 = MagicMock(
Requirements=[requirement1, requirement2],
@@ -255,10 +247,6 @@ class TestCompliance:
"requirement1": {
"name": "Requirement 1",
"description": "Description of requirement 1",
"tactics": ["tactic1"],
"subtechniques": ["subtechnique1"],
"platforms": ["platform1"],
"technique_url": "https://example.com",
"attributes": [],
"checks": {"check1": None, "check2": None},
"checks_status": {
@@ -272,10 +260,6 @@ class TestCompliance:
"requirement2": {
"name": "Requirement 2",
"description": "Description of requirement 2",
"tactics": [],
"subtechniques": [],
"platforms": [],
"technique_url": "",
"attributes": [],
"checks": {},
"checks_status": {
@@ -284,7 +268,7 @@ class TestCompliance:
"manual": 0,
"total": 0,
},
"status": "MANUAL",
"status": "PASS",
},
},
"requirements_status": {
-175
View File
@@ -3,17 +3,12 @@ from enum import Enum
from unittest.mock import patch
import pytest
from django.conf import settings
from freezegun import freeze_time
from api.db_utils import (
_should_create_index_on_partition,
batch_delete,
create_objects_in_batches,
enum_to_choices,
generate_random_token,
one_week_from_now,
update_objects_in_batches,
)
from api.models import Provider
@@ -143,173 +138,3 @@ class TestBatchDelete:
)
assert Provider.objects.all().count() == 0
assert summary == {"api.Provider": create_test_providers}
class TestShouldCreateIndexOnPartition:
@freeze_time("2025-05-15 00:00:00Z")
@pytest.mark.parametrize(
"partition_name, all_partitions, expected",
[
("any_name", True, True),
("findings_default", True, True),
("findings_2022_jan", True, True),
("foo_bar", False, True),
("findings_2025_MAY", False, True),
("findings_2025_may", False, True),
("findings_2025_jun", False, True),
("findings_2025_apr", False, False),
("findings_2025_xyz", False, True),
],
)
def test_partition_inclusion_logic(self, partition_name, all_partitions, expected):
assert (
_should_create_index_on_partition(partition_name, all_partitions)
is expected
)
@freeze_time("2025-05-15 00:00:00Z")
def test_invalid_date_components(self):
# even if regex matches but int conversion fails, we fallback True
# (e.g. year too big, month number parse error)
bad_name = "findings_99999_jan"
assert _should_create_index_on_partition(bad_name, False) is True
bad_name2 = "findings_2025_abc"
# abc not in month_map → fallback True
assert _should_create_index_on_partition(bad_name2, False) is True
@pytest.mark.django_db
class TestCreateObjectsInBatches:
@pytest.fixture
def tenant(self, tenants_fixture):
return tenants_fixture[0]
def make_provider_instances(self, tenant, count):
"""
Return a list of `count` unsaved Provider instances for the given tenant.
"""
base_uid = 1000
return [
Provider(
tenant=tenant,
uid=str(base_uid + i),
provider=Provider.ProviderChoices.AWS,
)
for i in range(count)
]
def test_exact_multiple_of_batch(self, tenant):
total = 6
batch_size = 3
objs = self.make_provider_instances(tenant, total)
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
qs = Provider.objects.filter(tenant=tenant)
assert qs.count() == total
def test_non_multiple_of_batch(self, tenant):
total = 7
batch_size = 3
objs = self.make_provider_instances(tenant, total)
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
qs = Provider.objects.filter(tenant=tenant)
assert qs.count() == total
def test_batch_size_default(self, tenant):
default_size = settings.DJANGO_DELETION_BATCH_SIZE
total = default_size + 2
objs = self.make_provider_instances(tenant, total)
create_objects_in_batches(str(tenant.id), Provider, objs)
qs = Provider.objects.filter(tenant=tenant)
assert qs.count() == total
@pytest.mark.django_db
class TestUpdateObjectsInBatches:
@pytest.fixture
def tenant(self, tenants_fixture):
return tenants_fixture[0]
def make_provider_instances(self, tenant, count):
"""
Return a list of `count` unsaved Provider instances for the given tenant.
"""
base_uid = 2000
return [
Provider(
tenant=tenant,
uid=str(base_uid + i),
provider=Provider.ProviderChoices.AWS,
)
for i in range(count)
]
def test_exact_multiple_of_batch(self, tenant):
total = 6
batch_size = 3
objs = self.make_provider_instances(tenant, total)
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
# Fetch them back, mutate the `uid` field, then update in batches
providers = list(Provider.objects.filter(tenant=tenant))
for p in providers:
p.uid = f"{p.uid}_upd"
update_objects_in_batches(
tenant_id=str(tenant.id),
model=Provider,
objects=providers,
fields=["uid"],
batch_size=batch_size,
)
qs = Provider.objects.filter(tenant=tenant, uid__endswith="_upd")
assert qs.count() == total
def test_non_multiple_of_batch(self, tenant):
total = 7
batch_size = 3
objs = self.make_provider_instances(tenant, total)
create_objects_in_batches(str(tenant.id), Provider, objs, batch_size=batch_size)
providers = list(Provider.objects.filter(tenant=tenant))
for p in providers:
p.uid = f"{p.uid}_upd"
update_objects_in_batches(
tenant_id=str(tenant.id),
model=Provider,
objects=providers,
fields=["uid"],
batch_size=batch_size,
)
qs = Provider.objects.filter(tenant=tenant, uid__endswith="_upd")
assert qs.count() == total
def test_batch_size_default(self, tenant):
default_size = settings.DJANGO_DELETION_BATCH_SIZE
total = default_size + 2
objs = self.make_provider_instances(tenant, total)
create_objects_in_batches(str(tenant.id), Provider, objs)
providers = list(Provider.objects.filter(tenant=tenant))
for p in providers:
p.uid = f"{p.uid}_upd"
# Update without specifying batch_size (uses default)
update_objects_in_batches(
tenant_id=str(tenant.id),
model=Provider,
objects=providers,
fields=["uid"],
)
qs = Provider.objects.filter(tenant=tenant, uid__endswith="_upd")
assert qs.count() == total
-379
View File
@@ -1,379 +0,0 @@
import json
from uuid import uuid4
import pytest
from django_celery_results.models import TaskResult
from rest_framework import status
from rest_framework.response import Response
from api.exceptions import (
TaskFailedException,
TaskInProgressException,
TaskNotFoundException,
)
from api.models import Task, User
from api.rls import Tenant
from api.v1.mixins import PaginateByPkMixin, TaskManagementMixin
@pytest.mark.django_db
class TestPaginateByPkMixin:
@pytest.fixture
def tenant(self):
return Tenant.objects.create(name="Test Tenant")
@pytest.fixture
def users(self, tenant):
# Create 5 users with proper email field
users = []
for i in range(5):
user = User.objects.create(email=f"user{i}@example.com", name=f"User {i}")
users.append(user)
return users
class DummyView(PaginateByPkMixin):
def __init__(self, page):
self._page = page
def paginate_queryset(self, qs):
return self._page
def get_serializer(self, queryset, many):
class S:
def __init__(self, data):
# serialize to list of ids
self.data = [obj.id for obj in data] if many else queryset.id
return S(queryset)
def get_paginated_response(self, data):
return Response({"results": data}, status=status.HTTP_200_OK)
def test_no_pagination(self, users):
base_qs = User.objects.all().order_by("id")
view = self.DummyView(page=None)
resp = view.paginate_by_pk(
request=None, base_queryset=base_qs, manager=User.objects
)
# since no pagination, should return all ids in order
expected = [u.id for u in base_qs]
assert isinstance(resp, Response)
assert resp.data == expected
def test_with_pagination(self, users):
base_qs = User.objects.all().order_by("id")
# simulate paging to first 2 ids
page = [base_qs[1].id, base_qs[3].id]
view = self.DummyView(page=page)
resp = view.paginate_by_pk(
request=None, base_queryset=base_qs, manager=User.objects
)
# should fetch only those two users, in the same order as page
assert resp.status_code == status.HTTP_200_OK
assert resp.data == {"results": page}
@pytest.mark.django_db
class TestTaskManagementMixin:
class DummyView(TaskManagementMixin):
pass
@pytest.fixture
def tenant(self):
return Tenant.objects.create(name="Test Tenant")
@pytest.fixture(autouse=True)
def cleanup(self):
Task.objects.all().delete()
TaskResult.objects.all().delete()
def test_no_task_and_no_taskresult_raises_not_found(self):
view = self.DummyView()
with pytest.raises(TaskNotFoundException):
view.check_task_status("task_xyz", {"foo": "bar"})
def test_no_task_and_no_taskresult_returns_none_when_not_raising(self):
view = self.DummyView()
result = view.check_task_status(
"task_xyz", {"foo": "bar"}, raise_on_not_found=False
)
assert result is None
def test_taskresult_pending_raises_in_progress(self):
task_kwargs = {"foo": "bar"}
tr = TaskResult.objects.create(
task_id=str(uuid4()),
task_name="task_xyz",
task_kwargs=json.dumps(task_kwargs),
status="PENDING",
)
view = self.DummyView()
with pytest.raises(TaskInProgressException) as excinfo:
view.check_task_status("task_xyz", task_kwargs, raise_on_not_found=False)
assert hasattr(excinfo.value, "task_result")
assert excinfo.value.task_result == tr
def test_taskresult_started_raises_in_progress(self):
task_kwargs = {"foo": "bar"}
tr = TaskResult.objects.create(
task_id=str(uuid4()),
task_name="task_xyz",
task_kwargs=json.dumps(task_kwargs),
status="STARTED",
)
view = self.DummyView()
with pytest.raises(TaskInProgressException) as excinfo:
view.check_task_status("task_xyz", task_kwargs, raise_on_not_found=False)
assert hasattr(excinfo.value, "task_result")
assert excinfo.value.task_result == tr
def test_taskresult_progress_raises_in_progress(self):
task_kwargs = {"foo": "bar"}
tr = TaskResult.objects.create(
task_id=str(uuid4()),
task_name="task_xyz",
task_kwargs=json.dumps(task_kwargs),
status="PROGRESS",
)
view = self.DummyView()
with pytest.raises(TaskInProgressException) as excinfo:
view.check_task_status("task_xyz", task_kwargs, raise_on_not_found=False)
assert hasattr(excinfo.value, "task_result")
assert excinfo.value.task_result == tr
def test_taskresult_failure_raises_failed(self):
task_kwargs = {"a": 1}
TaskResult.objects.create(
task_id=str(uuid4()),
task_name="task_fail",
task_kwargs=json.dumps(task_kwargs),
status="FAILURE",
)
view = self.DummyView()
with pytest.raises(TaskFailedException):
view.check_task_status("task_fail", task_kwargs, raise_on_not_found=False)
def test_taskresult_failure_returns_none_when_not_raising(self):
task_kwargs = {"a": 1}
TaskResult.objects.create(
task_id=str(uuid4()),
task_name="task_fail",
task_kwargs=json.dumps(task_kwargs),
status="FAILURE",
)
view = self.DummyView()
result = view.check_task_status(
"task_fail", task_kwargs, raise_on_failed=False, raise_on_not_found=False
)
assert result is None
def test_taskresult_success_returns_none(self):
task_kwargs = {"x": 2}
TaskResult.objects.create(
task_id=str(uuid4()),
task_name="task_ok",
task_kwargs=json.dumps(task_kwargs),
status="SUCCESS",
)
view = self.DummyView()
# should not raise, and returns None
assert (
view.check_task_status("task_ok", task_kwargs, raise_on_not_found=False)
is None
)
def test_taskresult_revoked_returns_none(self):
task_kwargs = {"x": 2}
TaskResult.objects.create(
task_id=str(uuid4()),
task_name="task_revoked",
task_kwargs=json.dumps(task_kwargs),
status="REVOKED",
)
view = self.DummyView()
# should not raise, and returns None
assert (
view.check_task_status(
"task_revoked", task_kwargs, raise_on_not_found=False
)
is None
)
def test_task_with_failed_status_raises_failed(self, tenant):
task_kwargs = {"provider_id": "test"}
tr = TaskResult.objects.create(
task_id=str(uuid4()),
task_name="scan_task",
task_kwargs=json.dumps(task_kwargs),
status="FAILURE",
)
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
view = self.DummyView()
with pytest.raises(TaskFailedException) as excinfo:
view.check_task_status("scan_task", task_kwargs)
# Check that the exception contains the expected task
assert hasattr(excinfo.value, "task")
assert excinfo.value.task == task
def test_task_with_cancelled_status_raises_failed(self, tenant):
task_kwargs = {"provider_id": "test"}
tr = TaskResult.objects.create(
task_id=str(uuid4()),
task_name="scan_task",
task_kwargs=json.dumps(task_kwargs),
status="REVOKED",
)
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
view = self.DummyView()
with pytest.raises(TaskFailedException) as excinfo:
view.check_task_status("scan_task", task_kwargs)
# Check that the exception contains the expected task
assert hasattr(excinfo.value, "task")
assert excinfo.value.task == task
def test_task_with_failed_status_returns_task_when_not_raising(self, tenant):
task_kwargs = {"provider_id": "test"}
tr = TaskResult.objects.create(
task_id=str(uuid4()),
task_name="scan_task",
task_kwargs=json.dumps(task_kwargs),
status="FAILURE",
)
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
view = self.DummyView()
result = view.check_task_status("scan_task", task_kwargs, raise_on_failed=False)
assert result == task
def test_task_with_completed_status_returns_none(self, tenant):
task_kwargs = {"provider_id": "test"}
tr = TaskResult.objects.create(
task_id=str(uuid4()),
task_name="scan_task",
task_kwargs=json.dumps(task_kwargs),
status="SUCCESS",
)
Task.objects.create(tenant=tenant, task_runner_task=tr)
view = self.DummyView()
result = view.check_task_status("scan_task", task_kwargs)
assert result is None
def test_task_with_executing_status_returns_task(self, tenant):
task_kwargs = {"provider_id": "test"}
tr = TaskResult.objects.create(
task_id=str(uuid4()),
task_name="scan_task",
task_kwargs=json.dumps(task_kwargs),
status="STARTED",
)
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
view = self.DummyView()
result = view.check_task_status("scan_task", task_kwargs)
assert result is not None
assert result.pk == task.pk
def test_task_with_pending_status_returns_task(self, tenant):
task_kwargs = {"provider_id": "test"}
tr = TaskResult.objects.create(
task_id=str(uuid4()),
task_name="scan_task",
task_kwargs=json.dumps(task_kwargs),
status="PENDING",
)
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
view = self.DummyView()
result = view.check_task_status("scan_task", task_kwargs)
assert result is not None
assert result.pk == task.pk
def test_get_task_response_if_running_returns_none_for_completed_task(self, tenant):
task_kwargs = {"provider_id": "test"}
tr = TaskResult.objects.create(
task_id=str(uuid4()),
task_name="scan_task",
task_kwargs=json.dumps(task_kwargs),
status="SUCCESS",
)
Task.objects.create(tenant=tenant, task_runner_task=tr)
view = self.DummyView()
result = view.get_task_response_if_running("scan_task", task_kwargs)
assert result is None
def test_get_task_response_if_running_returns_none_for_no_task(self):
view = self.DummyView()
result = view.get_task_response_if_running(
"nonexistent", {"foo": "bar"}, raise_on_not_found=False
)
assert result is None
def test_get_task_response_if_running_returns_202_for_executing_task(self, tenant):
task_kwargs = {"provider_id": "test"}
tr = TaskResult.objects.create(
task_id=str(uuid4()),
task_name="scan_task",
task_kwargs=json.dumps(task_kwargs),
status="STARTED",
)
task = Task.objects.create(tenant=tenant, task_runner_task=tr)
view = self.DummyView()
result = view.get_task_response_if_running("scan_task", task_kwargs)
assert isinstance(result, Response)
assert result.status_code == status.HTTP_202_ACCEPTED
assert "Content-Location" in result.headers
# The response should contain the serialized task data
assert result.data is not None
assert "id" in result.data
assert str(result.data["id"]) == str(task.id)
def test_get_task_response_if_running_returns_none_for_available_task(self, tenant):
task_kwargs = {"provider_id": "test"}
tr = TaskResult.objects.create(
task_id=str(uuid4()),
task_name="scan_task",
task_kwargs=json.dumps(task_kwargs),
status="PENDING",
)
Task.objects.create(tenant=tenant, task_runner_task=tr)
view = self.DummyView()
result = view.get_task_response_if_running("scan_task", task_kwargs)
# PENDING maps to AVAILABLE, which is not EXECUTING, so should return None
assert result is None
def test_kwargs_filtering_works_correctly(self, tenant):
# Create tasks with different kwargs
task_kwargs_1 = {"provider_id": "test1", "scan_type": "full"}
task_kwargs_2 = {"provider_id": "test2", "scan_type": "quick"}
tr1 = TaskResult.objects.create(
task_id=str(uuid4()),
task_name="scan_task",
task_kwargs=json.dumps(task_kwargs_1),
status="STARTED",
)
tr2 = TaskResult.objects.create(
task_id=str(uuid4()),
task_name="scan_task",
task_kwargs=json.dumps(task_kwargs_2),
status="STARTED",
)
task1 = Task.objects.create(tenant=tenant, task_runner_task=tr1)
task2 = Task.objects.create(tenant=tenant, task_runner_task=tr2)
view = self.DummyView()
# Should find task1 when searching for its kwargs
result1 = view.check_task_status("scan_task", {"provider_id": "test1"})
assert result1 is not None
assert result1.pk == task1.pk
# Should find task2 when searching for its kwargs
result2 = view.check_task_status("scan_task", {"provider_id": "test2"})
assert result2 is not None
assert result2.pk == task2.pk
# Should not find anything when searching for non-existent kwargs
result3 = view.check_task_status(
"scan_task", {"provider_id": "test3"}, raise_on_not_found=False
)
assert result3 is None
+28 -197
View File
@@ -1,9 +1,9 @@
import pytest
from allauth.socialaccount.models import SocialApp
from django.core.exceptions import ValidationError
import uuid
from unittest import mock
from api.db_router import MainRouter
from api.models import Resource, ResourceTag, SAMLConfiguration, SAMLDomainIndex
import pytest
from api.models import Resource, ResourceTag, Task
@pytest.mark.django_db
@@ -126,201 +126,32 @@ class TestResourceModel:
@pytest.mark.django_db
class TestSAMLConfigurationModel:
VALID_METADATA = """<?xml version='1.0' encoding='UTF-8'?>
<md:EntityDescriptor entityID='TEST' xmlns:md='urn:oasis:names:tc:SAML:2.0:metadata'>
<md:IDPSSODescriptor WantAuthnRequestsSigned='false' protocolSupportEnumeration='urn:oasis:names:tc:SAML:2.0:protocol'>
<md:KeyDescriptor use='signing'>
<ds:KeyInfo xmlns:ds='http://www.w3.org/2000/09/xmldsig#'>
<ds:X509Data>
<ds:X509Certificate>FAKECERTDATA</ds:X509Certificate>
</ds:X509Data>
</ds:KeyInfo>
</md:KeyDescriptor>
<md:SingleSignOnService Binding='urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST' Location='https://idp.test/sso'/>
</md:IDPSSODescriptor>
</md:EntityDescriptor>
"""
class TestTaskManager:
def test_get_with_retry_success(self):
task_id = uuid.uuid4()
call_counter = {"count": 0}
def test_creates_valid_configuration(self, tenants_fixture):
tenant = tenants_fixture[0]
config = SAMLConfiguration.objects.using(MainRouter.admin_db).create(
email_domain="ssoexample.com",
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
tenant=tenant,
)
def side_effect(*args, **kwargs):
if call_counter["count"] < 2:
call_counter["count"] += 1
raise Task.DoesNotExist()
return Task(id=task_id)
assert config.email_domain == "ssoexample.com"
assert SocialApp.objects.filter(client_id="ssoexample.com").exists()
with mock.patch.object(Task.objects, "get", side_effect=side_effect):
task = Task.objects.get_with_retry(
task_id, max_retries=5, delay_seconds=0.01
)
def test_email_domain_with_at_symbol_fails(self, tenants_fixture):
tenant = tenants_fixture[0]
config = SAMLConfiguration(
email_domain="invalid@domain.com",
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
tenant=tenant,
)
assert task.id == task_id
assert call_counter["count"] == 2
with pytest.raises(ValidationError) as exc_info:
config.clean()
def test_get_with_retry_fail(self):
non_existent_id = uuid.uuid4()
errors = exc_info.value.message_dict
assert "email_domain" in errors
assert "Domain must not contain @" in errors["email_domain"][0]
with mock.patch.object(Task.objects, "get", side_effect=Task.DoesNotExist):
with pytest.raises(Task.DoesNotExist) as excinfo:
Task.objects.get_with_retry(
non_existent_id, max_retries=3, delay_seconds=0.01
)
def test_duplicate_email_domain_fails(self, tenants_fixture):
tenant1, tenant2, *_ = tenants_fixture
SAMLConfiguration.objects.using(MainRouter.admin_db).create(
email_domain="duplicate.com",
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
tenant=tenant1,
)
config = SAMLConfiguration(
email_domain="duplicate.com",
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
tenant=tenant2,
)
with pytest.raises(ValidationError) as exc_info:
config.clean()
errors = exc_info.value.message_dict
assert "tenant" in errors
assert "There is a problem with your email domain." in errors["tenant"][0]
def test_duplicate_tenant_config_fails(self, tenants_fixture):
tenant = tenants_fixture[0]
SAMLConfiguration.objects.using(MainRouter.admin_db).create(
email_domain="unique1.com",
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
tenant=tenant,
)
config = SAMLConfiguration(
email_domain="unique2.com",
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
tenant=tenant,
)
with pytest.raises(ValidationError) as exc_info:
config.clean()
errors = exc_info.value.message_dict
assert "tenant" in errors
assert (
"A SAML configuration already exists for this tenant."
in errors["tenant"][0]
)
def test_invalid_metadata_xml_fails(self, tenants_fixture):
tenant = tenants_fixture[0]
config = SAMLConfiguration(
email_domain="brokenxml.com",
metadata_xml="<bad<xml>",
tenant=tenant,
)
with pytest.raises(ValidationError) as exc_info:
config._parse_metadata()
errors = exc_info.value.message_dict
assert "metadata_xml" in errors
assert "Invalid XML" in errors["metadata_xml"][0]
assert "not well-formed" in errors["metadata_xml"][0]
def test_metadata_missing_sso_fails(self, tenants_fixture):
tenant = tenants_fixture[0]
xml = """<md:EntityDescriptor entityID="x" xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata">
<md:IDPSSODescriptor></md:IDPSSODescriptor>
</md:EntityDescriptor>"""
config = SAMLConfiguration(
email_domain="nosso.com",
metadata_xml=xml,
tenant=tenant,
)
with pytest.raises(ValidationError) as exc_info:
config._parse_metadata()
errors = exc_info.value.message_dict
assert "metadata_xml" in errors
assert "Missing SingleSignOnService" in errors["metadata_xml"][0]
def test_metadata_missing_certificate_fails(self, tenants_fixture):
tenant = tenants_fixture[0]
xml = """<md:EntityDescriptor entityID="x" xmlns:md="urn:oasis:names:tc:SAML:2.0:metadata">
<md:IDPSSODescriptor>
<md:SingleSignOnService Binding="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" Location="https://example.com/sso"/>
</md:IDPSSODescriptor>
</md:EntityDescriptor>"""
config = SAMLConfiguration(
email_domain="nocert.com",
metadata_xml=xml,
tenant=tenant,
)
with pytest.raises(ValidationError) as exc_info:
config._parse_metadata()
errors = exc_info.value.message_dict
assert "metadata_xml" in errors
assert "X509Certificate" in errors["metadata_xml"][0]
def test_deletes_saml_configuration_and_related_objects(self, tenants_fixture):
tenant = tenants_fixture[0]
email_domain = "deleteme.com"
# Create the configuration
config = SAMLConfiguration.objects.using(MainRouter.admin_db).create(
email_domain=email_domain,
metadata_xml=TestSAMLConfigurationModel.VALID_METADATA,
tenant=tenant,
)
# Verify that the SocialApp and SAMLDomainIndex exist
assert SocialApp.objects.filter(client_id=email_domain).exists()
assert (
SAMLDomainIndex.objects.using(MainRouter.admin_db)
.filter(email_domain=email_domain)
.exists()
)
# Delete the configuration
config.delete()
# Verify that the configuration and its related objects are deleted
assert (
not SAMLConfiguration.objects.using(MainRouter.admin_db)
.filter(pk=config.pk)
.exists()
)
assert not SocialApp.objects.filter(client_id=email_domain).exists()
assert (
not SAMLDomainIndex.objects.using(MainRouter.admin_db)
.filter(email_domain=email_domain)
.exists()
)
def test_duplicate_entity_id_fails_on_creation(self, tenants_fixture):
tenant1, tenant2, *_ = tenants_fixture
SAMLConfiguration.objects.using(MainRouter.admin_db).create(
email_domain="first.com",
metadata_xml=self.VALID_METADATA,
tenant=tenant1,
)
config = SAMLConfiguration(
email_domain="second.com",
metadata_xml=self.VALID_METADATA,
tenant=tenant2,
)
with pytest.raises(ValidationError) as exc_info:
config.save()
errors = exc_info.value.message_dict
assert "metadata_xml" in errors
assert "There is a problem with your metadata." in errors["metadata_xml"][0]
assert str(non_existent_id) in str(excinfo.value)
+3 -88
View File
@@ -1,7 +1,6 @@
from unittest.mock import ANY, Mock, patch
import pytest
from conftest import TODAY
from django.urls import reverse
from rest_framework import status
@@ -61,7 +60,7 @@ class TestUserViewSet:
def test_create_user_with_all_permissions(self, authenticated_client_rbac):
valid_user_payload = {
"name": "test",
"password": "Newpassword123@",
"password": "newpassword123",
"email": "new_user@test.com",
}
response = authenticated_client_rbac.post(
@@ -75,7 +74,7 @@ class TestUserViewSet:
):
valid_user_payload = {
"name": "test",
"password": "Newpassword123@",
"password": "newpassword123",
"email": "new_user@test.com",
}
response = authenticated_client_no_permissions_rbac.post(
@@ -322,7 +321,7 @@ class TestProviderViewSet:
@pytest.mark.django_db
class TestLimitedVisibility:
TEST_EMAIL = "rbac@rbac.com"
TEST_PASSWORD = "Thisisapassword123@"
TEST_PASSWORD = "thisisapassword123"
@pytest.fixture
def limited_admin_user(
@@ -410,87 +409,3 @@ class TestLimitedVisibility:
assert (
response.json()["data"]["relationships"]["providers"]["meta"]["count"] == 1
)
def test_overviews_providers(
self,
authenticated_client_rbac_limited,
scan_summaries_fixture,
providers_fixture,
):
# By default, the associated provider is the one which has the overview data
response = authenticated_client_rbac_limited.get(reverse("overview-providers"))
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) > 0
# Changing the provider visibility, no data should be returned
# Only the associated provider to that group is changed
new_provider = providers_fixture[1]
ProviderGroupMembership.objects.all().update(provider=new_provider)
response = authenticated_client_rbac_limited.get(reverse("overview-providers"))
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == 0
@pytest.mark.parametrize(
"endpoint_name",
[
"findings",
"findings_severity",
],
)
def test_overviews_findings(
self,
endpoint_name,
authenticated_client_rbac_limited,
scan_summaries_fixture,
providers_fixture,
):
# By default, the associated provider is the one which has the overview data
response = authenticated_client_rbac_limited.get(
reverse(f"overview-{endpoint_name}")
)
assert response.status_code == status.HTTP_200_OK
values = response.json()["data"]["attributes"].values()
assert any(value > 0 for value in values)
# Changing the provider visibility, no data should be returned
# Only the associated provider to that group is changed
new_provider = providers_fixture[1]
ProviderGroupMembership.objects.all().update(provider=new_provider)
response = authenticated_client_rbac_limited.get(
reverse(f"overview-{endpoint_name}")
)
assert response.status_code == status.HTTP_200_OK
data = response.json()["data"]["attributes"].values()
assert all(value == 0 for value in data)
def test_overviews_services(
self,
authenticated_client_rbac_limited,
scan_summaries_fixture,
providers_fixture,
):
# By default, the associated provider is the one which has the overview data
response = authenticated_client_rbac_limited.get(
reverse("overview-services"), {"filter[inserted_at]": TODAY}
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) > 0
# Changing the provider visibility, no data should be returned
# Only the associated provider to that group is changed
new_provider = providers_fixture[1]
ProviderGroupMembership.objects.all().update(provider=new_provider)
response = authenticated_client_rbac_limited.get(
reverse("overview-services"), {"filter[inserted_at]": TODAY}
)
assert response.status_code == status.HTTP_200_OK
assert len(response.json()["data"]) == 0
-80
View File
@@ -1,80 +0,0 @@
import logging
from unittest.mock import MagicMock
from config.settings.sentry import before_send
def test_before_send_ignores_log_with_ignored_exception():
"""Test that before_send ignores logs containing ignored exceptions."""
log_record = MagicMock()
log_record.msg = "Provider kubernetes is not connected"
log_record.levelno = logging.ERROR # 40
hint = {"log_record": log_record}
event = MagicMock()
result = before_send(event, hint)
# Assert that the event was dropped (None returned)
assert result is None
def test_before_send_ignores_exception_with_ignored_exception():
"""Test that before_send ignores exceptions containing ignored exceptions."""
exc_info = (Exception, Exception("Provider kubernetes is not connected"), None)
hint = {"exc_info": exc_info}
event = MagicMock()
result = before_send(event, hint)
# Assert that the event was dropped (None returned)
assert result is None
def test_before_send_passes_through_non_ignored_log():
"""Test that before_send passes through logs that don't contain ignored exceptions."""
log_record = MagicMock()
log_record.msg = "Some other error message"
log_record.levelno = logging.ERROR # 40
hint = {"log_record": log_record}
event = MagicMock()
result = before_send(event, hint)
# Assert that the event was passed through
assert result == event
def test_before_send_passes_through_non_ignored_exception():
"""Test that before_send passes through exceptions that don't contain ignored exceptions."""
exc_info = (Exception, Exception("Some other error message"), None)
hint = {"exc_info": exc_info}
event = MagicMock()
result = before_send(event, hint)
# Assert that the event was passed through
assert result == event
def test_before_send_handles_warning_level():
"""Test that before_send handles warning level logs."""
log_record = MagicMock()
log_record.msg = "Provider kubernetes is not connected"
log_record.levelno = logging.WARNING # 30
hint = {"log_record": log_record}
event = MagicMock()
result = before_send(event, hint)
# Assert that the event was dropped (None returned)
assert result is None
@@ -1,100 +0,0 @@
import pytest
from rest_framework.exceptions import ValidationError
from api.v1.serializer_utils.integrations import S3ConfigSerializer
class TestS3ConfigSerializer:
"""Test cases for S3ConfigSerializer validation."""
def test_validate_output_directory_valid_paths(self):
"""Test that valid output directory paths are accepted."""
serializer = S3ConfigSerializer()
# Test normal paths
assert serializer.validate_output_directory("test") == "test"
assert serializer.validate_output_directory("test/folder") == "test/folder"
assert serializer.validate_output_directory("my-folder_123") == "my-folder_123"
# Test paths with leading slashes (should be normalized)
assert serializer.validate_output_directory("/test") == "test"
assert serializer.validate_output_directory("/test/folder") == "test/folder"
# Test paths with excessive slashes (should be normalized)
assert serializer.validate_output_directory("///test") == "test"
assert serializer.validate_output_directory("///////test") == "test"
assert serializer.validate_output_directory("test//folder") == "test/folder"
assert serializer.validate_output_directory("test///folder") == "test/folder"
def test_validate_output_directory_empty_values(self):
"""Test that empty values raise validation errors."""
serializer = S3ConfigSerializer()
with pytest.raises(
ValidationError, match="Output directory cannot be empty or just"
):
serializer.validate_output_directory(".")
with pytest.raises(
ValidationError, match="Output directory cannot be empty or just"
):
serializer.validate_output_directory("/")
def test_validate_output_directory_invalid_characters(self):
"""Test that invalid characters are rejected."""
serializer = S3ConfigSerializer()
invalid_chars = ["<", ">", ":", '"', "|", "?", "*"]
for char in invalid_chars:
with pytest.raises(
ValidationError, match="Output directory contains invalid characters"
):
serializer.validate_output_directory(f"test{char}folder")
def test_validate_output_directory_too_long(self):
"""Test that paths that are too long are rejected."""
serializer = S3ConfigSerializer()
# Create a path longer than 900 characters
long_path = "a" * 901
with pytest.raises(ValidationError, match="Output directory path is too long"):
serializer.validate_output_directory(long_path)
def test_validate_output_directory_edge_cases(self):
"""Test edge cases for output directory validation."""
serializer = S3ConfigSerializer()
# Test path at the limit (900 characters)
path_at_limit = "a" * 900
assert serializer.validate_output_directory(path_at_limit) == path_at_limit
# Test complex normalization
assert serializer.validate_output_directory("//test/../folder//") == "folder"
assert serializer.validate_output_directory("/test/./folder/") == "test/folder"
def test_s3_config_serializer_full_validation(self):
"""Test the full S3ConfigSerializer with valid data."""
data = {
"bucket_name": "my-test-bucket",
"output_directory": "///////test", # This should be normalized
}
serializer = S3ConfigSerializer(data=data)
assert serializer.is_valid()
validated_data = serializer.validated_data
assert validated_data["bucket_name"] == "my-test-bucket"
assert validated_data["output_directory"] == "test" # Normalized
def test_s3_config_serializer_invalid_data(self):
"""Test the full S3ConfigSerializer with invalid data."""
data = {
"bucket_name": "my-test-bucket",
"output_directory": "test<invalid", # Contains invalid character
}
serializer = S3ConfigSerializer(data=data)
assert not serializer.is_valid()
assert "output_directory" in serializer.errors
+5 -322
View File
@@ -6,18 +6,16 @@ from rest_framework.exceptions import NotFound, ValidationError
from api.db_router import MainRouter
from api.exceptions import InvitationTokenExpiredException
from api.models import Integration, Invitation, Provider
from api.models import Invitation, Provider
from api.utils import (
get_prowler_provider_kwargs,
initialize_prowler_provider,
merge_dicts,
prowler_integration_connection_test,
prowler_provider_connection_test,
return_prowler_provider,
validate_invitation,
)
from prowler.providers.aws.aws_provider import AwsProvider
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHubConnection
from prowler.providers.azure.azure_provider import AzureProvider
from prowler.providers.gcp.gcp_provider import GcpProvider
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
@@ -133,21 +131,6 @@ class TestInitializeProwlerProvider:
initialize_prowler_provider(provider)
mock_return_prowler_provider.return_value.assert_called_once_with(key="value")
@patch("api.utils.return_prowler_provider")
def test_initialize_prowler_provider_with_mutelist(
self, mock_return_prowler_provider
):
provider = MagicMock()
provider.secret.secret = {"key": "value"}
mutelist_processor = MagicMock()
mutelist_processor.configuration = {"Mutelist": {"key": "value"}}
mock_return_prowler_provider.return_value = MagicMock()
initialize_prowler_provider(provider, mutelist_processor)
mock_return_prowler_provider.return_value.assert_called_once_with(
key="value", mutelist_content={"key": "value"}
)
class TestProwlerProviderConnectionTest:
@patch("api.utils.return_prowler_provider")
@@ -199,10 +182,6 @@ class TestGetProwlerProviderKwargs:
Provider.ProviderChoices.M365.value,
{},
),
(
Provider.ProviderChoices.GITHUB.value,
{"organizations": ["provider_uid"]},
),
],
)
def test_get_prowler_provider_kwargs(self, provider_type, expected_extra_kwargs):
@@ -221,25 +200,6 @@ class TestGetProwlerProviderKwargs:
expected_result = {**secret_dict, **expected_extra_kwargs}
assert result == expected_result
def test_get_prowler_provider_kwargs_with_mutelist(self):
provider_uid = "provider_uid"
secret_dict = {"key": "value"}
secret_mock = MagicMock()
secret_mock.secret = secret_dict
mutelist_processor = MagicMock()
mutelist_processor.configuration = {"Mutelist": {"key": "value"}}
provider = MagicMock()
provider.provider = Provider.ProviderChoices.AWS.value
provider.secret = secret_mock
provider.uid = provider_uid
result = get_prowler_provider_kwargs(provider, mutelist_processor)
expected_result = {**secret_dict, "mutelist_content": {"key": "value"}}
assert result == expected_result
def test_get_prowler_provider_kwargs_unsupported_provider(self):
# Setup
provider_uid = "provider_uid"
@@ -294,7 +254,7 @@ class TestValidateInvitation:
assert result == invitation
mock_db.get.assert_called_once_with(
token="VALID_TOKEN", email__iexact="user@example.com"
token="VALID_TOKEN", email="user@example.com"
)
def test_invitation_not_found_raises_validation_error(self):
@@ -309,7 +269,7 @@ class TestValidateInvitation:
"invitation_token": "Invalid invitation code."
}
mock_db.get.assert_called_once_with(
token="INVALID_TOKEN", email__iexact="user@example.com"
token="INVALID_TOKEN", email="user@example.com"
)
def test_invitation_not_found_raises_not_found(self):
@@ -324,7 +284,7 @@ class TestValidateInvitation:
assert exc_info.value.detail == "Invitation is not valid."
mock_db.get.assert_called_once_with(
token="INVALID_TOKEN", email__iexact="user@example.com"
token="INVALID_TOKEN", email="user@example.com"
)
def test_invitation_expired(self, invitation):
@@ -372,282 +332,5 @@ class TestValidateInvitation:
"invitation_token": "Invalid invitation code."
}
mock_db.get.assert_called_once_with(
token="VALID_TOKEN", email__iexact="different@example.com"
token="VALID_TOKEN", email="different@example.com"
)
def test_valid_invitation_uppercase_email(self):
"""Test that validate_invitation works with case-insensitive email lookup."""
uppercase_email = "USER@example.com"
invitation = MagicMock(spec=Invitation)
invitation.token = "VALID_TOKEN"
invitation.email = uppercase_email
invitation.expires_at = datetime.now(timezone.utc) + timedelta(days=1)
invitation.state = Invitation.State.PENDING
invitation.tenant = MagicMock()
with patch("api.utils.Invitation.objects.using") as mock_using:
mock_db = mock_using.return_value
mock_db.get.return_value = invitation
result = validate_invitation("VALID_TOKEN", "user@example.com")
assert result == invitation
mock_db.get.assert_called_once_with(
token="VALID_TOKEN", email__iexact="user@example.com"
)
class TestProwlerIntegrationConnectionTest:
"""Test prowler_integration_connection_test function for SecurityHub regions reset."""
@patch("api.utils.SecurityHub")
def test_security_hub_connection_failure_resets_regions(
self, mock_security_hub_class
):
"""Test that SecurityHub connection failure resets regions to empty dict."""
# Create integration with existing regions configuration
integration = MagicMock()
integration.integration_type = Integration.IntegrationChoices.AWS_SECURITY_HUB
integration.credentials = {
"aws_access_key_id": "test_key",
"aws_secret_access_key": "test_secret",
}
integration.configuration = {
"send_only_fails": True,
"regions": {
"us-east-1": True,
"us-west-2": True,
"eu-west-1": False,
"ap-south-1": False,
},
}
# Mock provider relationship
mock_provider = MagicMock()
mock_provider.uid = "123456789012"
mock_relationship = MagicMock()
mock_relationship.provider = mock_provider
integration.integrationproviderrelationship_set.first.return_value = (
mock_relationship
)
# Mock failed SecurityHub connection
mock_connection = SecurityHubConnection(
is_connected=False,
error=Exception("SecurityHub testing"),
enabled_regions=set(),
disabled_regions=set(),
)
mock_security_hub_class.test_connection.return_value = mock_connection
# Call the function
result = prowler_integration_connection_test(integration)
# Assertions
assert result.is_connected is False
assert str(result.error) == "SecurityHub testing"
# Verify regions were completely reset to empty dict
assert integration.configuration["regions"] == {}
# Verify save was called to persist the change
integration.save.assert_called_once()
# Verify test_connection was called with correct parameters
mock_security_hub_class.test_connection.assert_called_once_with(
aws_account_id="123456789012",
raise_on_exception=False,
aws_access_key_id="test_key",
aws_secret_access_key="test_secret",
)
@patch("api.utils.SecurityHub")
def test_security_hub_connection_success_saves_regions(
self, mock_security_hub_class
):
"""Test that successful SecurityHub connection saves regions correctly."""
integration = MagicMock()
integration.integration_type = Integration.IntegrationChoices.AWS_SECURITY_HUB
integration.credentials = {
"aws_access_key_id": "valid_key",
"aws_secret_access_key": "valid_secret",
}
integration.configuration = {"send_only_fails": False}
# Mock provider relationship
mock_provider = MagicMock()
mock_provider.uid = "123456789012"
mock_relationship = MagicMock()
mock_relationship.provider = mock_provider
integration.integrationproviderrelationship_set.first.return_value = (
mock_relationship
)
# Mock successful SecurityHub connection with regions
mock_connection = SecurityHubConnection(
is_connected=True,
error=None,
enabled_regions={"us-east-1", "eu-west-1"},
disabled_regions={"ap-south-1"},
)
mock_security_hub_class.test_connection.return_value = mock_connection
result = prowler_integration_connection_test(integration)
assert result.is_connected is True
# Verify regions were saved correctly
assert integration.configuration["regions"]["us-east-1"] is True
assert integration.configuration["regions"]["eu-west-1"] is True
assert integration.configuration["regions"]["ap-south-1"] is False
integration.save.assert_called_once()
@patch("api.utils.rls_transaction")
@patch("api.utils.Jira")
def test_jira_connection_success_basic_auth(
self, mock_jira_class, mock_rls_transaction
):
integration = MagicMock()
integration.integration_type = Integration.IntegrationChoices.JIRA
integration.tenant_id = "test-tenant-id"
integration.credentials = {
"user_mail": "test@example.com",
"api_token": "test_api_token",
"domain": "example.atlassian.net",
}
integration.configuration = {}
# Mock successful JIRA connection with projects
mock_connection = MagicMock()
mock_connection.is_connected = True
mock_connection.error = None
mock_connection.projects = {"PROJ1": "Project 1", "PROJ2": "Project 2"}
mock_jira_class.test_connection.return_value = mock_connection
# Mock rls_transaction context manager
mock_rls_transaction.return_value.__enter__ = MagicMock()
mock_rls_transaction.return_value.__exit__ = MagicMock()
result = prowler_integration_connection_test(integration)
assert result.is_connected is True
assert result.error is None
# Verify JIRA connection was called with correct parameters including domain from credentials
mock_jira_class.test_connection.assert_called_once_with(
user_mail="test@example.com",
api_token="test_api_token",
domain="example.atlassian.net",
raise_on_exception=False,
)
# Verify rls_transaction was called with correct tenant_id
mock_rls_transaction.assert_called_once_with("test-tenant-id")
# Verify projects were saved to integration configuration
assert integration.configuration["projects"] == {
"PROJ1": "Project 1",
"PROJ2": "Project 2",
}
# Verify integration.save() was called
integration.save.assert_called_once()
@patch("api.utils.rls_transaction")
@patch("api.utils.Jira")
def test_jira_connection_failure_invalid_credentials(
self, mock_jira_class, mock_rls_transaction
):
integration = MagicMock()
integration.integration_type = Integration.IntegrationChoices.JIRA
integration.tenant_id = "test-tenant-id"
integration.credentials = {
"user_mail": "invalid@example.com",
"api_token": "invalid_token",
"domain": "invalid.atlassian.net",
}
integration.configuration = {}
# Mock failed JIRA connection
mock_connection = MagicMock()
mock_connection.is_connected = False
mock_connection.error = Exception("Authentication failed: Invalid credentials")
mock_connection.projects = {} # Empty projects when connection fails
mock_jira_class.test_connection.return_value = mock_connection
# Mock rls_transaction context manager
mock_rls_transaction.return_value.__enter__ = MagicMock()
mock_rls_transaction.return_value.__exit__ = MagicMock()
result = prowler_integration_connection_test(integration)
assert result.is_connected is False
assert "Authentication failed: Invalid credentials" in str(result.error)
# Verify JIRA connection was called with correct parameters
mock_jira_class.test_connection.assert_called_once_with(
user_mail="invalid@example.com",
api_token="invalid_token",
domain="invalid.atlassian.net",
raise_on_exception=False,
)
# Verify rls_transaction was called even on failure
mock_rls_transaction.assert_called_once_with("test-tenant-id")
# Verify empty projects dict was saved to integration configuration
assert integration.configuration["projects"] == {}
# Verify integration.save() was called even on connection failure
integration.save.assert_called_once()
@patch("api.utils.rls_transaction")
@patch("api.utils.Jira")
def test_jira_connection_projects_update_with_existing_configuration(
self, mock_jira_class, mock_rls_transaction
):
"""Test that projects are properly updated when integration already has configuration data"""
integration = MagicMock()
integration.integration_type = Integration.IntegrationChoices.JIRA
integration.tenant_id = "test-tenant-id"
integration.credentials = {
"user_mail": "test@example.com",
"api_token": "test_api_token",
"domain": "example.atlassian.net",
}
integration.configuration = {
"issue_types": ["Task"], # Existing configuration
"projects": {"OLD_PROJ": "Old Project"}, # Will be overwritten
}
# Mock successful JIRA connection with new projects
mock_connection = MagicMock()
mock_connection.is_connected = True
mock_connection.error = None
mock_connection.projects = {
"NEW_PROJ1": "New Project 1",
"NEW_PROJ2": "New Project 2",
}
mock_jira_class.test_connection.return_value = mock_connection
# Mock rls_transaction context manager
mock_rls_transaction.return_value.__enter__ = MagicMock()
mock_rls_transaction.return_value.__exit__ = MagicMock()
result = prowler_integration_connection_test(integration)
assert result.is_connected is True
assert result.error is None
# Verify projects were updated (old projects replaced with new ones)
assert integration.configuration["projects"] == {
"NEW_PROJ1": "New Project 1",
"NEW_PROJ2": "New Project 2",
}
# Verify other configuration fields were preserved
assert integration.configuration["issue_types"] == ["Task"]
# Verify integration.save() was called
integration.save.assert_called_once()
File diff suppressed because it is too large Load Diff
+9 -132
View File
@@ -6,18 +6,13 @@ from django.db.models import Subquery
from rest_framework.exceptions import NotFound, ValidationError
from api.db_router import MainRouter
from api.db_utils import rls_transaction
from api.exceptions import InvitationTokenExpiredException
from api.models import Integration, Invitation, Processor, Provider, Resource
from api.models import Invitation, Provider, Resource
from api.v1.serializers import FindingMetadataSerializer
from prowler.lib.outputs.jira.jira import Jira, JiraBasicAuthError
from prowler.providers.aws.aws_provider import AwsProvider
from prowler.providers.aws.lib.s3.s3 import S3
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHub
from prowler.providers.azure.azure_provider import AzureProvider
from prowler.providers.common.models import Connection
from prowler.providers.gcp.gcp_provider import GcpProvider
from prowler.providers.github.github_provider import GithubProvider
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
from prowler.providers.m365.m365_provider import M365Provider
@@ -60,21 +55,14 @@ def merge_dicts(default_dict: dict, replacement_dict: dict) -> dict:
def return_prowler_provider(
provider: Provider,
) -> [
AwsProvider
| AzureProvider
| GcpProvider
| GithubProvider
| KubernetesProvider
| M365Provider
]:
) -> [AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider]:
"""Return the Prowler provider class based on the given provider type.
Args:
provider (Provider): The provider object containing the provider type and associated secrets.
Returns:
AwsProvider | AzureProvider | GcpProvider | GithubProvider | KubernetesProvider | M365Provider: The corresponding provider class.
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider: The corresponding provider class.
Raises:
ValueError: If the provider type specified in `provider.provider` is not supported.
@@ -90,21 +78,16 @@ def return_prowler_provider(
prowler_provider = KubernetesProvider
case Provider.ProviderChoices.M365.value:
prowler_provider = M365Provider
case Provider.ProviderChoices.GITHUB.value:
prowler_provider = GithubProvider
case _:
raise ValueError(f"Provider type {provider.provider} not supported")
return prowler_provider
def get_prowler_provider_kwargs(
provider: Provider, mutelist_processor: Processor | None = None
) -> dict:
def get_prowler_provider_kwargs(provider: Provider) -> dict:
"""Get the Prowler provider kwargs based on the given provider type.
Args:
provider (Provider): The provider object containing the provider type and associated secret.
mutelist_processor (Processor): The mutelist processor object containing the mutelist configuration.
Returns:
dict: The provider kwargs for the corresponding provider class.
@@ -122,45 +105,24 @@ def get_prowler_provider_kwargs(
}
elif provider.provider == Provider.ProviderChoices.KUBERNETES.value:
prowler_provider_kwargs = {**prowler_provider_kwargs, "context": provider.uid}
elif provider.provider == Provider.ProviderChoices.GITHUB.value:
if provider.uid:
prowler_provider_kwargs = {
**prowler_provider_kwargs,
"organizations": [provider.uid],
}
if mutelist_processor:
mutelist_content = mutelist_processor.configuration.get("Mutelist", {})
if mutelist_content:
prowler_provider_kwargs["mutelist_content"] = mutelist_content
return prowler_provider_kwargs
def initialize_prowler_provider(
provider: Provider,
mutelist_processor: Processor | None = None,
) -> (
AwsProvider
| AzureProvider
| GcpProvider
| GithubProvider
| KubernetesProvider
| M365Provider
):
) -> AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider:
"""Initialize a Prowler provider instance based on the given provider type.
Args:
provider (Provider): The provider object containing the provider type and associated secrets.
mutelist_processor (Processor): The mutelist processor object containing the mutelist configuration.
Returns:
AwsProvider | AzureProvider | GcpProvider | GithubProvider | KubernetesProvider | M365Provider: An instance of the corresponding provider class
(`AwsProvider`, `AzureProvider`, `GcpProvider`, `GithubProvider`, `KubernetesProvider` or `M365Provider`) initialized with the
AwsProvider | AzureProvider | GcpProvider | KubernetesProvider | M365Provider: An instance of the corresponding provider class
(`AwsProvider`, `AzureProvider`, `GcpProvider`, `KubernetesProvider` or `M365Provider`) initialized with the
provider's secrets.
"""
prowler_provider = return_prowler_provider(provider)
prowler_provider_kwargs = get_prowler_provider_kwargs(provider, mutelist_processor)
prowler_provider_kwargs = get_prowler_provider_kwargs(provider)
return prowler_provider(**prowler_provider_kwargs)
@@ -185,77 +147,6 @@ def prowler_provider_connection_test(provider: Provider) -> Connection:
)
def prowler_integration_connection_test(integration: Integration) -> Connection:
"""Test the connection to a Prowler integration based on the given integration type.
Args:
integration (Integration): The integration object containing the integration type and associated credentials.
Returns:
Connection: A connection object representing the result of the connection test for the specified integration.
"""
if integration.integration_type == Integration.IntegrationChoices.AMAZON_S3:
return S3.test_connection(
**integration.credentials,
bucket_name=integration.configuration["bucket_name"],
raise_on_exception=False,
)
# TODO: It is possible that we can unify the connection test for all integrations, but need refactoring
# to avoid code duplication. Actually the AWS integrations are similar, so SecurityHub and S3 can be unified
# making some changes in the SDK.
elif (
integration.integration_type == Integration.IntegrationChoices.AWS_SECURITY_HUB
):
# Get the provider associated with this integration
provider_relationship = integration.integrationproviderrelationship_set.first()
if not provider_relationship:
return Connection(
is_connected=False, error="No provider associated with this integration"
)
credentials = (
integration.credentials
if integration.credentials
else provider_relationship.provider.secret.secret
)
connection = SecurityHub.test_connection(
aws_account_id=provider_relationship.provider.uid,
raise_on_exception=False,
**credentials,
)
# Only save regions if connection is successful
if connection.is_connected:
regions_status = {r: True for r in connection.enabled_regions}
regions_status.update({r: False for r in connection.disabled_regions})
# Save regions information in the integration configuration
integration.configuration["regions"] = regions_status
integration.save()
else:
# Reset regions information if connection fails
integration.configuration["regions"] = {}
integration.save()
return connection
elif integration.integration_type == Integration.IntegrationChoices.JIRA:
jira_connection = Jira.test_connection(
**integration.credentials,
raise_on_exception=False,
)
project_keys = jira_connection.projects if jira_connection.is_connected else {}
with rls_transaction(str(integration.tenant_id)):
integration.configuration["projects"] = project_keys
integration.save()
return jira_connection
elif integration.integration_type == Integration.IntegrationChoices.SLACK:
pass
else:
raise ValueError(
f"Integration type {integration.integration_type} not supported"
)
def validate_invitation(
invitation_token: str, email: str, raise_not_found=False
) -> Invitation:
@@ -296,7 +187,7 @@ def validate_invitation(
# Admin DB connector is used to bypass RLS protection since the invitation belongs to a tenant the user
# is not a member of yet
invitation = Invitation.objects.using(MainRouter.admin_db).get(
token=invitation_token, email__iexact=email
token=invitation_token, email=email
)
except Invitation.DoesNotExist:
if raise_not_found:
@@ -347,17 +238,3 @@ def get_findings_metadata_no_aggregations(tenant_id: str, filtered_queryset):
serializer.is_valid(raise_exception=True)
return serializer.data
def initialize_prowler_integration(integration: Integration) -> Jira:
# TODO Refactor other integrations to use this function
if integration.integration_type == Integration.IntegrationChoices.JIRA:
try:
return Jira(**integration.credentials)
except JiraBasicAuthError as jira_auth_error:
with rls_transaction(str(integration.tenant_id)):
integration.configuration["projects"] = {}
integration.connected = False
integration.connection_last_checked_at = datetime.now(tz=timezone.utc)
integration.save()
raise jira_auth_error
+2 -203
View File
@@ -1,16 +1,5 @@
from django.urls import reverse
from django_celery_results.models import TaskResult
from rest_framework import status
from rest_framework.response import Response
from api.exceptions import (
TaskFailedException,
TaskInProgressException,
TaskNotFoundException,
)
from api.models import StateChoices, Task
from api.v1.serializers import TaskSerializer
class PaginateByPkMixin:
"""
@@ -24,211 +13,21 @@ class PaginateByPkMixin:
request, # noqa: F841
base_queryset,
manager,
select_related: list | None = None,
prefetch_related: list | None = None,
select_related: list[str] | None = None,
prefetch_related: list[str] | None = None,
) -> Response:
"""
Paginate a queryset by primary key.
This method is useful when you want to paginate a queryset that has been
filtered or annotated in a way that would be lost if you used the default
pagination method.
"""
pk_list = base_queryset.values_list("id", flat=True)
page = self.paginate_queryset(pk_list)
if page is None:
return Response(self.get_serializer(base_queryset, many=True).data)
queryset = manager.filter(id__in=page)
if select_related:
queryset = queryset.select_related(*select_related)
if prefetch_related:
queryset = queryset.prefetch_related(*prefetch_related)
# Optimize tags loading, if applicable
if hasattr(self, "_optimize_tags_loading"):
queryset = self._optimize_tags_loading(queryset)
queryset = sorted(queryset, key=lambda obj: page.index(obj.id))
serialized = self.get_serializer(queryset, many=True).data
return self.get_paginated_response(serialized)
class TaskManagementMixin:
"""
Mixin to manage task status checking.
This mixin provides functionality to check if a task with specific parameters
is running, completed, failed, or doesn't exist. It returns the task when running
and raises specific exceptions for failed/not found scenarios that can be handled
at the view level.
"""
def check_task_status(
self,
task_name: str,
task_kwargs: dict,
raise_on_failed: bool = True,
raise_on_not_found: bool = True,
) -> Task | None:
"""
Check the status of a task with given name and kwargs.
This method first checks for a related Task object, and if not found,
checks TaskResult directly. If a TaskResult is found and running but
there's no related Task, it raises TaskInProgressException.
Args:
task_name (str): The name of the task to check
task_kwargs (dict): The kwargs to match against the task
raise_on_failed (bool): Whether to raise exception if task failed
raise_on_not_found (bool): Whether to raise exception if task not found
Returns:
Task | None: The task instance if found (regardless of state), None if not found and raise_on_not_found=False
Raises:
TaskFailedException: If task failed and raise_on_failed=True
TaskNotFoundException: If task not found and raise_on_not_found=True
TaskInProgressException: If task is running but no related Task object exists
"""
# First, try to find a Task object with related TaskResult
try:
# Build the filter for task kwargs
task_filter = {
"task_runner_task__task_name": task_name,
}
# Add kwargs filters - we need to check if the task kwargs contain our parameters
for key, value in task_kwargs.items():
task_filter["task_runner_task__task_kwargs__contains"] = str(value)
task = (
Task.objects.filter(**task_filter)
.select_related("task_runner_task")
.order_by("-inserted_at")
.first()
)
if task:
# Get task state using the same logic as TaskSerializer
task_state_mapping = {
"PENDING": StateChoices.AVAILABLE,
"STARTED": StateChoices.EXECUTING,
"PROGRESS": StateChoices.EXECUTING,
"SUCCESS": StateChoices.COMPLETED,
"FAILURE": StateChoices.FAILED,
"REVOKED": StateChoices.CANCELLED,
}
celery_status = (
task.task_runner_task.status if task.task_runner_task else None
)
task_state = task_state_mapping.get(
celery_status or "", StateChoices.AVAILABLE
)
# Check task state and raise exceptions accordingly
if task_state in (StateChoices.FAILED, StateChoices.CANCELLED):
if raise_on_failed:
raise TaskFailedException(task=task)
return task
elif task_state == StateChoices.COMPLETED:
return None
return task
except Task.DoesNotExist:
pass
# If no Task found, check TaskResult directly
try:
# Build the filter for TaskResult
task_result_filter = {
"task_name": task_name,
}
# Add kwargs filters - check if the task kwargs contain our parameters
for key, value in task_kwargs.items():
task_result_filter["task_kwargs__contains"] = str(value)
task_result = (
TaskResult.objects.filter(**task_result_filter)
.order_by("-date_created")
.first()
)
if task_result:
# Check if the TaskResult indicates a running task
if task_result.status in ["PENDING", "STARTED", "PROGRESS"]:
# Task is running but no related Task object exists
raise TaskInProgressException(task_result=task_result)
elif task_result.status == "FAILURE":
if raise_on_failed:
raise TaskFailedException(task=None)
# For other statuses (SUCCESS, REVOKED), we don't have a Task to return,
# so we treat it as not found
except TaskResult.DoesNotExist:
pass
# No task found at all
if raise_on_not_found:
raise TaskNotFoundException()
return None
def get_task_response_if_running(
self,
task_name: str,
task_kwargs: dict,
raise_on_failed: bool = True,
raise_on_not_found: bool = True,
) -> Response | None:
"""
Get a 202 response with task details if the task is currently running.
This method is useful for endpoints that should return task status when
a background task is in progress, similar to the compliance overview endpoints.
Args:
task_name (str): The name of the task to check
task_kwargs (dict): The kwargs to match against the task
Returns:
Response | None: 202 response with task details if running, None otherwise
"""
task = self.check_task_status(
task_name=task_name,
task_kwargs=task_kwargs,
raise_on_failed=raise_on_failed,
raise_on_not_found=raise_on_not_found,
)
if not task:
return None
# Get task state
task_state_mapping = {
"PENDING": StateChoices.AVAILABLE,
"STARTED": StateChoices.EXECUTING,
"PROGRESS": StateChoices.EXECUTING,
"SUCCESS": StateChoices.COMPLETED,
"FAILURE": StateChoices.FAILED,
"REVOKED": StateChoices.CANCELLED,
}
celery_status = task.task_runner_task.status if task.task_runner_task else None
task_state = task_state_mapping.get(celery_status or "", StateChoices.AVAILABLE)
if task_state == StateChoices.EXECUTING:
self.response_serializer_class = TaskSerializer
serializer = TaskSerializer(task)
return Response(
data=serializer.data,
status=status.HTTP_202_ACCEPTED,
headers={
"Content-Location": reverse("task-detail", kwargs={"pk": task.id})
},
)
@@ -1,23 +0,0 @@
import yaml
from rest_framework_json_api import serializers
from rest_framework_json_api.serializers import ValidationError
class BaseValidateSerializer(serializers.Serializer):
def validate(self, data):
if hasattr(self, "initial_data"):
initial_data = set(self.initial_data.keys()) - {"id", "type"}
unknown_keys = initial_data - set(self.fields.keys())
if unknown_keys:
raise ValidationError(f"Invalid fields: {unknown_keys}")
return data
class YamlOrJsonField(serializers.JSONField):
def to_internal_value(self, data):
if isinstance(data, str):
try:
data = yaml.safe_load(data)
except yaml.YAMLError as exc:
raise serializers.ValidationError("Invalid YAML format") from exc
return super().to_internal_value(data)
@@ -1,78 +1,24 @@
import os
import re
from drf_spectacular.utils import extend_schema_field
from rest_framework_json_api import serializers
from rest_framework_json_api.serializers import ValidationError
from api.v1.serializer_utils.base import BaseValidateSerializer
class BaseValidateSerializer(serializers.Serializer):
def validate(self, data):
if hasattr(self, "initial_data"):
initial_data = set(self.initial_data.keys()) - {"id", "type"}
unknown_keys = initial_data - set(self.fields.keys())
if unknown_keys:
raise ValidationError(f"Invalid fields: {unknown_keys}")
return data
# Integrations
class S3ConfigSerializer(BaseValidateSerializer):
bucket_name = serializers.CharField()
output_directory = serializers.CharField(allow_blank=True)
def validate_output_directory(self, value):
"""
Validate the output_directory field to ensure it's a properly formatted path.
Prevents paths with excessive slashes like "///////test".
If empty, sets a default value.
"""
# If empty or None, set default value
if not value:
return "output"
# Normalize the path to remove excessive slashes
normalized_path = os.path.normpath(value)
# Remove leading slashes for S3 paths
if normalized_path.startswith("/"):
normalized_path = normalized_path.lstrip("/")
# Check for invalid characters or patterns
if re.search(r'[<>:"|?*]', normalized_path):
raise serializers.ValidationError(
'Output directory contains invalid characters. Avoid: < > : " | ? *'
)
# Check for empty path after normalization
if not normalized_path or normalized_path == ".":
raise serializers.ValidationError(
"Output directory cannot be empty or just '.' or '/'."
)
# Check for paths that are too long (S3 key limit is 1024 characters, leave some room for filename)
if len(normalized_path) > 900:
raise serializers.ValidationError(
"Output directory path is too long (max 900 characters)."
)
return normalized_path
class Meta:
resource_name = "integrations"
class SecurityHubConfigSerializer(BaseValidateSerializer):
send_only_fails = serializers.BooleanField(default=False)
archive_previous_findings = serializers.BooleanField(default=False)
regions = serializers.DictField(default=dict, read_only=True)
def to_internal_value(self, data):
validated_data = super().to_internal_value(data)
# Always initialize regions as empty dict
validated_data["regions"] = {}
return validated_data
class Meta:
resource_name = "integrations"
class JiraConfigSerializer(BaseValidateSerializer):
domain = serializers.CharField(read_only=True)
issue_types = serializers.ListField(
read_only=True, child=serializers.CharField(), default=["Task"]
)
projects = serializers.DictField(read_only=True)
output_directory = serializers.CharField()
class Meta:
resource_name = "integrations"
@@ -93,15 +39,6 @@ class AWSCredentialSerializer(BaseValidateSerializer):
resource_name = "integrations"
class JiraCredentialSerializer(BaseValidateSerializer):
user_mail = serializers.EmailField(required=True)
api_token = serializers.CharField(required=True)
domain = serializers.CharField(required=True)
class Meta:
resource_name = "integrations"
@extend_schema_field(
{
"oneOf": [
@@ -153,27 +90,6 @@ class JiraCredentialSerializer(BaseValidateSerializer):
},
},
},
{
"type": "object",
"title": "JIRA Credentials",
"properties": {
"user_mail": {
"type": "string",
"format": "email",
"description": "The email address of the JIRA user account.",
},
"api_token": {
"type": "string",
"description": "The API token for authentication with JIRA. This can be generated from your "
"Atlassian account settings.",
},
"domain": {
"type": "string",
"description": "The JIRA domain/instance URL (e.g., 'your-domain.atlassian.net').",
},
},
"required": ["user_mail", "api_token", "domain"],
},
]
}
)
@@ -194,40 +110,10 @@ class IntegrationCredentialField(serializers.JSONField):
},
"output_directory": {
"type": "string",
"description": "The directory path within the bucket where files will be saved. Optional - "
'defaults to "output" if not provided. Path will be normalized to remove '
'excessive slashes and invalid characters are not allowed (< > : " | ? *). '
"Maximum length is 900 characters.",
"maxLength": 900,
"pattern": '^[^<>:"|?*]+$',
"default": "output",
"description": "The directory path within the bucket where files will be saved.",
},
},
"required": ["bucket_name"],
},
{
"type": "object",
"title": "AWS Security Hub",
"properties": {
"send_only_fails": {
"type": "boolean",
"default": False,
"description": "If true, only findings with status 'FAIL' will be sent to Security Hub.",
},
"archive_previous_findings": {
"type": "boolean",
"default": False,
"description": "If true, archives findings that are not present in the current execution.",
},
},
},
{
"type": "object",
"title": "JIRA",
"description": "JIRA integration does not accept any configuration in the payload. Leave it as an "
"empty JSON object (`{}`).",
"properties": {},
"additionalProperties": False,
"required": ["bucket_name", "output_directory"],
},
]
}
@@ -1,21 +0,0 @@
from drf_spectacular.utils import extend_schema_field
from api.v1.serializer_utils.base import YamlOrJsonField
from prowler.lib.mutelist.mutelist import mutelist_schema
@extend_schema_field(
{
"oneOf": [
{
"type": "object",
"title": "Mutelist",
"properties": {"Mutelist": mutelist_schema},
"additionalProperties": False,
},
]
}
)
class ProcessorConfigField(YamlOrJsonField):
pass
@@ -176,43 +176,6 @@ from rest_framework_json_api import serializers
},
"required": ["kubeconfig_content"],
},
{
"type": "object",
"title": "GitHub Personal Access Token",
"properties": {
"personal_access_token": {
"type": "string",
"description": "GitHub personal access token for authentication.",
}
},
"required": ["personal_access_token"],
},
{
"type": "object",
"title": "GitHub OAuth App Token",
"properties": {
"oauth_app_token": {
"type": "string",
"description": "GitHub OAuth App token for authentication.",
}
},
"required": ["oauth_app_token"],
},
{
"type": "object",
"title": "GitHub App Credentials",
"properties": {
"github_app_id": {
"type": "integer",
"description": "GitHub App ID for authentication.",
},
"github_app_key": {
"type": "string",
"description": "Path to the GitHub App private key file.",
},
},
"required": ["github_app_id", "github_app_key"],
},
]
}
)
+134 -587
View File
@@ -7,24 +7,20 @@ from django.contrib.auth.models import update_last_login
from django.contrib.auth.password_validation import validate_password
from drf_spectacular.utils import extend_schema_field
from jwt.exceptions import InvalidKeyError
from rest_framework.validators import UniqueTogetherValidator
from rest_framework_json_api import serializers
from rest_framework_json_api.relations import SerializerMethodResourceRelatedField
from rest_framework_json_api.serializers import ValidationError
from rest_framework_simplejwt.exceptions import TokenError
from rest_framework_simplejwt.serializers import TokenObtainPairSerializer
from rest_framework_simplejwt.tokens import RefreshToken
from api.exceptions import ConflictException
from api.models import (
ComplianceOverview,
Finding,
Integration,
IntegrationProviderRelationship,
Invitation,
InvitationRoleRelationship,
LighthouseConfiguration,
Membership,
Processor,
Provider,
ProviderGroup,
ProviderGroupMembership,
@@ -33,10 +29,8 @@ from api.models import (
ResourceTag,
Role,
RoleProviderGroupRelationship,
SAMLConfiguration,
Scan,
StateChoices,
StatusChoices,
Task,
User,
UserRoleRelationship,
@@ -46,14 +40,9 @@ from api.v1.serializer_utils.integrations import (
AWSCredentialSerializer,
IntegrationConfigField,
IntegrationCredentialField,
JiraConfigSerializer,
JiraCredentialSerializer,
S3ConfigSerializer,
SecurityHubConfigSerializer,
)
from api.v1.serializer_utils.processors import ProcessorConfigField
from api.v1.serializer_utils.providers import ProviderSecretField
from prowler.lib.mutelist.mutelist import Mutelist
# Tokens
@@ -139,12 +128,6 @@ class TokenSerializer(BaseTokenSerializer):
class TokenSocialLoginSerializer(BaseTokenSerializer):
email = serializers.EmailField(write_only=True)
tenant_id = serializers.UUIDField(
write_only=True,
required=False,
help_text="If not provided, the tenant ID of the first membership that was added"
" to the user will be used.",
)
# Output tokens
refresh = serializers.CharField(read_only=True)
@@ -866,7 +849,6 @@ class ScanSerializer(RLSSerializer):
"completed_at",
"scheduled_at",
"next_scan_at",
"processor",
"url",
]
@@ -1004,12 +986,8 @@ class ResourceSerializer(RLSSerializer):
tags = serializers.SerializerMethodField()
type_ = serializers.CharField(read_only=True)
failed_findings_count = serializers.IntegerField(read_only=True)
findings = SerializerMethodResourceRelatedField(
many=True,
read_only=True,
)
findings = serializers.ResourceRelatedField(many=True, read_only=True)
class Meta:
model = Resource
@@ -1025,7 +1003,6 @@ class ResourceSerializer(RLSSerializer):
"tags",
"provider",
"findings",
"failed_findings_count",
"url",
]
extra_kwargs = {
@@ -1035,8 +1012,8 @@ class ResourceSerializer(RLSSerializer):
}
included_serializers = {
"findings": "api.v1.serializers.FindingIncludeSerializer",
"provider": "api.v1.serializers.ProviderIncludeSerializer",
"findings": "api.v1.serializers.FindingSerializer",
"provider": "api.v1.serializers.ProviderSerializer",
}
@extend_schema_field(
@@ -1047,10 +1024,6 @@ class ResourceSerializer(RLSSerializer):
}
)
def get_tags(self, obj):
# Use prefetched tags if available to avoid N+1 queries
if hasattr(obj, "prefetched_tags"):
return {tag.key: tag.value for tag in obj.prefetched_tags}
# Fallback to the original method if prefetch is not available
return obj.get_tags(self.context.get("tenant_id"))
def get_fields(self):
@@ -1060,17 +1033,10 @@ class ResourceSerializer(RLSSerializer):
fields["type"] = type_
return fields
def get_findings(self, obj):
return (
obj.latest_findings
if hasattr(obj, "latest_findings")
else obj.findings.all()
)
class ResourceIncludeSerializer(RLSSerializer):
"""
Serializer for the included Resource model.
Serializer for the Resource model.
"""
tags = serializers.SerializerMethodField()
@@ -1103,10 +1069,6 @@ class ResourceIncludeSerializer(RLSSerializer):
}
)
def get_tags(self, obj):
# Use prefetched tags if available to avoid N+1 queries
if hasattr(obj, "prefetched_tags"):
return {tag.key: tag.value for tag in obj.prefetched_tags}
# Fallback to the original method if prefetch is not available
return obj.get_tags(self.context.get("tenant_id"))
def get_fields(self):
@@ -1117,17 +1079,6 @@ class ResourceIncludeSerializer(RLSSerializer):
return fields
class ResourceMetadataSerializer(serializers.Serializer):
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
types = serializers.ListField(child=serializers.CharField(), allow_empty=True)
# Temporarily disabled until we implement tag filtering in the UI
# tags = serializers.JSONField(help_text="Tags are described as key-value pairs.")
class Meta:
resource_name = "resources-metadata"
class FindingSerializer(RLSSerializer):
"""
Serializer for the Finding model.
@@ -1151,7 +1102,6 @@ class FindingSerializer(RLSSerializer):
"updated_at",
"first_seen_at",
"muted",
"muted_reason",
"url",
# Relationships
"scan",
@@ -1164,28 +1114,6 @@ class FindingSerializer(RLSSerializer):
}
class FindingIncludeSerializer(RLSSerializer):
"""
Serializer for the include Finding model.
"""
class Meta:
model = Finding
fields = [
"id",
"uid",
"status",
"severity",
"check_id",
"check_metadata",
"inserted_at",
"updated_at",
"first_seen_at",
"muted",
"muted_reason",
]
# To be removed when the related endpoint is removed as well
class FindingDynamicFilterSerializer(serializers.Serializer):
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
@@ -1221,8 +1149,6 @@ class BaseWriteProviderSecretSerializer(BaseWriteSerializer):
serializer = AzureProviderSecret(data=secret)
elif provider_type == Provider.ProviderChoices.GCP.value:
serializer = GCPProviderSecret(data=secret)
elif provider_type == Provider.ProviderChoices.GITHUB.value:
serializer = GithubProviderSecret(data=secret)
elif provider_type == Provider.ProviderChoices.KUBERNETES.value:
serializer = KubernetesProviderSecret(data=secret)
elif provider_type == Provider.ProviderChoices.M365.value:
@@ -1272,8 +1198,8 @@ class M365ProviderSecret(serializers.Serializer):
client_id = serializers.CharField()
client_secret = serializers.CharField()
tenant_id = serializers.CharField()
user = serializers.EmailField(required=False)
password = serializers.CharField(required=False)
user = serializers.EmailField()
password = serializers.CharField()
class Meta:
resource_name = "provider-secrets"
@@ -1302,16 +1228,6 @@ class KubernetesProviderSecret(serializers.Serializer):
resource_name = "provider-secrets"
class GithubProviderSecret(serializers.Serializer):
personal_access_token = serializers.CharField(required=False)
oauth_app_token = serializers.CharField(required=False)
github_app_id = serializers.IntegerField(required=False)
github_app_key_content = serializers.CharField(required=False)
class Meta:
resource_name = "provider-secrets"
class AWSRoleAssumptionProviderSecret(serializers.Serializer):
role_arn = serializers.CharField()
external_id = serializers.CharField()
@@ -1391,13 +1307,12 @@ class ProviderSecretUpdateSerializer(BaseWriteProviderSecretSerializer):
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
"provider": {"read_only": True},
"secret_type": {"required": False},
"secret_type": {"read_only": True},
}
def validate(self, attrs):
provider = self.instance.provider
# To allow updating a secret with the same type without making the `secret_type` mandatory
secret_type = attrs.get("secret_type") or self.instance.secret_type
secret_type = self.instance.secret_type
secret = attrs.get("secret")
validated_attrs = super().validate(attrs)
@@ -1764,63 +1679,130 @@ class RoleProviderGroupRelationshipSerializer(RLSSerializer, BaseWriteSerializer
# Compliance overview
class ComplianceOverviewSerializer(serializers.Serializer):
class ComplianceOverviewSerializer(RLSSerializer):
"""
Serializer for compliance requirement status aggregated by compliance framework.
This serializer is used to format aggregated compliance framework data,
providing counts of passed, failed, and manual requirements along with
an overall global status for each framework.
Serializer for the ComplianceOverview model.
"""
# Add ID field which will be used for resource identification
id = serializers.CharField()
framework = serializers.CharField()
version = serializers.CharField()
requirements_passed = serializers.IntegerField()
requirements_failed = serializers.IntegerField()
requirements_manual = serializers.IntegerField()
total_requirements = serializers.IntegerField()
requirements_status = serializers.SerializerMethodField(
read_only=True, method_name="get_requirements_status"
)
provider_type = serializers.SerializerMethodField(read_only=True)
class JSONAPIMeta:
resource_name = "compliance-overviews"
class Meta:
model = ComplianceOverview
fields = [
"id",
"inserted_at",
"compliance_id",
"framework",
"version",
"requirements_status",
"region",
"provider_type",
"scan",
"url",
]
@extend_schema_field(
{
"type": "object",
"properties": {
"passed": {"type": "integer"},
"failed": {"type": "integer"},
"manual": {"type": "integer"},
"total": {"type": "integer"},
},
}
)
def get_requirements_status(self, obj):
return {
"passed": obj.requirements_passed,
"failed": obj.requirements_failed,
"manual": obj.requirements_manual,
"total": obj.total_requirements,
}
@extend_schema_field(serializers.CharField(allow_null=True))
def get_provider_type(self, obj):
"""
Retrieves the provider_type from scan.provider.provider_type.
"""
try:
return obj.scan.provider.provider
except AttributeError:
return None
class ComplianceOverviewDetailSerializer(serializers.Serializer):
"""
Serializer for detailed compliance requirement information.
class ComplianceOverviewFullSerializer(ComplianceOverviewSerializer):
requirements = serializers.SerializerMethodField(read_only=True)
This serializer formats the aggregated requirement data, showing detailed status
and counts for each requirement across all regions.
"""
class Meta(ComplianceOverviewSerializer.Meta):
fields = ComplianceOverviewSerializer.Meta.fields + [
"description",
"requirements",
]
id = serializers.CharField()
framework = serializers.CharField()
version = serializers.CharField()
description = serializers.CharField()
status = serializers.ChoiceField(choices=StatusChoices.choices)
class JSONAPIMeta:
resource_name = "compliance-requirements-details"
class ComplianceOverviewAttributesSerializer(serializers.Serializer):
id = serializers.CharField()
framework_description = serializers.CharField()
name = serializers.CharField()
framework = serializers.CharField()
version = serializers.CharField()
description = serializers.CharField()
attributes = serializers.JSONField()
class JSONAPIMeta:
resource_name = "compliance-requirements-attributes"
@extend_schema_field(
{
"type": "object",
"properties": {
"requirement_id": {
"type": "object",
"properties": {
"name": {"type": "string"},
"checks": {
"type": "object",
"properties": {
"check_name": {
"type": "object",
"properties": {
"status": {
"type": "string",
"enum": ["PASS", "FAIL", None],
},
},
}
},
"description": "Each key in the 'checks' object is a check name, with values as "
"'PASS', 'FAIL', or null.",
},
"status": {
"type": "string",
"enum": ["PASS", "FAIL", "MANUAL"],
},
"attributes": {
"type": "array",
"items": {
"type": "object",
},
},
"description": {"type": "string"},
"checks_status": {
"type": "object",
"properties": {
"total": {"type": "integer"},
"pass": {"type": "integer"},
"fail": {"type": "integer"},
"manual": {"type": "integer"},
},
},
},
}
},
}
)
def get_requirements(self, obj):
"""
Returns the detailed structure of requirements.
"""
return obj.requirements
class ComplianceOverviewMetadataSerializer(serializers.Serializer):
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
class JSONAPIMeta:
class Meta:
resource_name = "compliance-overviews-metadata"
@@ -1954,62 +1936,6 @@ class ScheduleDailyCreateSerializer(serializers.Serializer):
class BaseWriteIntegrationSerializer(BaseWriteSerializer):
def validate(self, attrs):
integration_type = attrs.get("integration_type")
if (
integration_type == Integration.IntegrationChoices.AMAZON_S3
and Integration.objects.filter(
configuration=attrs.get("configuration")
).exists()
):
raise ConflictException(
detail="This integration already exists.",
pointer="/data/attributes/configuration",
)
if (
integration_type == Integration.IntegrationChoices.JIRA
and Integration.objects.filter(
configuration__contains={
"domain": attrs.get("configuration").get("domain")
}
).exists()
):
raise ConflictException(
detail="This integration already exists.",
pointer="/data/attributes/configuration",
)
# Check if any provider already has a SecurityHub integration
if hasattr(self, "instance") and self.instance and not integration_type:
integration_type = self.instance.integration_type
if (
integration_type == Integration.IntegrationChoices.AWS_SECURITY_HUB
and "providers" in attrs
):
providers = attrs.get("providers", [])
tenant_id = self.context.get("tenant_id")
for provider in providers:
# For updates, exclude the current instance from the check
query = IntegrationProviderRelationship.objects.filter(
provider=provider,
integration__integration_type=Integration.IntegrationChoices.AWS_SECURITY_HUB,
tenant_id=tenant_id,
)
if hasattr(self, "instance") and self.instance:
query = query.exclude(integration=self.instance)
if query.exists():
raise ConflictException(
detail=f"Provider {provider.id} already has a Security Hub integration. Only one "
"Security Hub integration is allowed per provider.",
pointer="/data/relationships/providers",
)
return super().validate(attrs)
@staticmethod
def validate_integration_data(
integration_type: str,
@@ -2017,49 +1943,17 @@ class BaseWriteIntegrationSerializer(BaseWriteSerializer):
configuration: dict,
credentials: dict,
):
if integration_type == Integration.IntegrationChoices.AMAZON_S3:
if integration_type == Integration.IntegrationChoices.S3:
config_serializer = S3ConfigSerializer
credentials_serializers = [AWSCredentialSerializer]
elif integration_type == Integration.IntegrationChoices.AWS_SECURITY_HUB:
if providers:
if len(providers) > 1:
raise serializers.ValidationError(
{
"providers": "Only one provider is supported for the Security Hub integration."
}
)
if providers[0].provider != Provider.ProviderChoices.AWS:
raise serializers.ValidationError(
{
"providers": "The provider must be AWS type for the Security Hub integration."
}
)
config_serializer = SecurityHubConfigSerializer
credentials_serializers = [AWSCredentialSerializer]
elif integration_type == Integration.IntegrationChoices.JIRA:
if providers:
raise serializers.ValidationError(
{
"providers": "Relationship field is not accepted. This integration applies to all providers."
}
)
if configuration:
raise serializers.ValidationError(
{
"configuration": "This integration does not support custom configuration."
}
)
config_serializer = JiraConfigSerializer
# Create non-editable configuration for JIRA integration
default_jira_issue_types = ["Task"]
configuration.update(
{
"projects": {},
"issue_types": default_jira_issue_types,
"domain": credentials.get("domain"),
}
)
credentials_serializers = [JiraCredentialSerializer]
# TODO: This will be required for AWS Security Hub
# if providers and not all(
# provider.provider == Provider.ProviderChoices.AWS
# for provider in providers
# ):
# raise serializers.ValidationError(
# {"providers": "All providers must be AWS for the S3 integration."}
# )
else:
raise serializers.ValidationError(
{
@@ -2067,11 +1961,7 @@ class BaseWriteIntegrationSerializer(BaseWriteSerializer):
}
)
serializer_instance = config_serializer(data=configuration)
serializer_instance.is_valid(raise_exception=True)
# Apply the validated (and potentially transformed) data back to configuration
configuration.update(serializer_instance.validated_data)
config_serializer(data=configuration).is_valid(raise_exception=True)
for cred_serializer in credentials_serializers:
try:
@@ -2123,10 +2013,6 @@ class IntegrationSerializer(RLSSerializer):
for provider in representation["providers"]
if provider["id"] in allowed_provider_ids
]
if instance.integration_type == Integration.IntegrationChoices.JIRA:
representation["configuration"].update(
{"domain": instance.credentials.get("domain")}
)
return representation
@@ -2154,6 +2040,7 @@ class IntegrationCreateSerializer(BaseWriteIntegrationSerializer):
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
"connected": {"read_only": True},
"enabled": {"read_only": True},
"connection_last_checked_at": {"read_only": True},
}
@@ -2163,18 +2050,10 @@ class IntegrationCreateSerializer(BaseWriteIntegrationSerializer):
configuration = attrs.get("configuration")
credentials = attrs.get("credentials")
if (
not providers
and integration_type == Integration.IntegrationChoices.AWS_SECURITY_HUB
):
raise serializers.ValidationError(
{"providers": "At least one provider is required for this integration."}
)
validated_attrs = super().validate(attrs)
self.validate_integration_data(
integration_type, providers, configuration, credentials
)
validated_attrs = super().validate(attrs)
return validated_attrs
def create(self, validated_data):
@@ -2227,16 +2106,13 @@ class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
def validate(self, attrs):
integration_type = self.instance.integration_type
providers = attrs.get("providers")
if integration_type != Integration.IntegrationChoices.JIRA:
configuration = attrs.get("configuration") or self.instance.configuration
else:
configuration = attrs.get("configuration", {})
configuration = attrs.get("configuration") or self.instance.configuration
credentials = attrs.get("credentials") or self.instance.credentials
validated_attrs = super().validate(attrs)
self.validate_integration_data(
integration_type, providers, configuration, credentials
)
validated_attrs = super().validate(attrs)
return validated_attrs
def update(self, instance, validated_data):
@@ -2251,333 +2127,4 @@ class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
]
IntegrationProviderRelationship.objects.bulk_create(new_relationships)
# Preserve regions field for Security Hub integrations
if instance.integration_type == Integration.IntegrationChoices.AWS_SECURITY_HUB:
if "configuration" in validated_data:
# Preserve the existing regions field if it exists
existing_regions = instance.configuration.get("regions", {})
validated_data["configuration"]["regions"] = existing_regions
return super().update(instance, validated_data)
def to_representation(self, instance):
representation = super().to_representation(instance)
# Ensure JIRA integrations show updated domain in configuration from credentials
if instance.integration_type == Integration.IntegrationChoices.JIRA:
representation["configuration"].update(
{"domain": instance.credentials.get("domain")}
)
return representation
class IntegrationJiraDispatchSerializer(serializers.Serializer):
"""
Serializer for dispatching findings to JIRA integration.
"""
project_key = serializers.CharField(required=True)
issue_type = serializers.ChoiceField(required=True, choices=["Task"])
class JSONAPIMeta:
resource_name = "integrations-jira-dispatches"
def validate(self, attrs):
validated_attrs = super().validate(attrs)
integration_instance = Integration.objects.get(
id=self.context.get("integration_id")
)
if integration_instance.integration_type != Integration.IntegrationChoices.JIRA:
raise ValidationError(
{"integration_type": "The given integration is not a JIRA integration"}
)
if not integration_instance.enabled:
raise ValidationError(
{"integration": "The given integration is not enabled"}
)
project_key = attrs.get("project_key")
if project_key not in integration_instance.configuration.get("projects", {}):
raise ValidationError(
{
"project_key": "The given project key is not available for this JIRA integration. Refresh the "
"connection if this is an error."
}
)
return validated_attrs
# Processors
class ProcessorSerializer(RLSSerializer):
"""
Serializer for the Processor model.
"""
configuration = ProcessorConfigField()
class Meta:
model = Processor
fields = [
"id",
"inserted_at",
"updated_at",
"processor_type",
"configuration",
"url",
]
class ProcessorCreateSerializer(RLSSerializer, BaseWriteSerializer):
configuration = ProcessorConfigField(required=True)
class Meta:
model = Processor
fields = [
"inserted_at",
"updated_at",
"processor_type",
"configuration",
]
extra_kwargs = {
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
}
validators = [
UniqueTogetherValidator(
queryset=Processor.objects.all(),
fields=["processor_type"],
message="A processor with the same type already exists.",
)
]
def validate(self, attrs):
validated_attrs = super().validate(attrs)
self.validate_processor_data(attrs)
return validated_attrs
def validate_processor_data(self, attrs):
processor_type = attrs.get("processor_type")
configuration = attrs.get("configuration")
if processor_type == "mutelist":
self.validate_mutelist_configuration(configuration)
def validate_mutelist_configuration(self, configuration):
if not isinstance(configuration, dict):
raise serializers.ValidationError("Invalid Mutelist configuration.")
mutelist_configuration = configuration.get("Mutelist", {})
if not mutelist_configuration:
raise serializers.ValidationError(
"Invalid Mutelist configuration: 'Mutelist' is a required property."
)
try:
Mutelist.validate_mutelist(mutelist_configuration, raise_on_exception=True)
return
except Exception as error:
raise serializers.ValidationError(
f"Invalid Mutelist configuration: {error}"
)
class ProcessorUpdateSerializer(BaseWriteSerializer):
configuration = ProcessorConfigField(required=True)
class Meta:
model = Processor
fields = [
"inserted_at",
"updated_at",
"configuration",
]
extra_kwargs = {
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
}
def validate(self, attrs):
validated_attrs = super().validate(attrs)
self.validate_processor_data(attrs)
return validated_attrs
def validate_processor_data(self, attrs):
processor_type = self.instance.processor_type
configuration = attrs.get("configuration")
if processor_type == "mutelist":
self.validate_mutelist_configuration(configuration)
def validate_mutelist_configuration(self, configuration):
if not isinstance(configuration, dict):
raise serializers.ValidationError("Invalid Mutelist configuration.")
mutelist_configuration = configuration.get("Mutelist", {})
if not mutelist_configuration:
raise serializers.ValidationError(
"Invalid Mutelist configuration: 'Mutelist' is a required property."
)
try:
Mutelist.validate_mutelist(mutelist_configuration, raise_on_exception=True)
return
except Exception as error:
raise serializers.ValidationError(
f"Invalid Mutelist configuration: {error}"
)
# SSO
class SamlInitiateSerializer(serializers.Serializer):
email_domain = serializers.CharField()
class JSONAPIMeta:
resource_name = "saml-initiate"
class SamlMetadataSerializer(serializers.Serializer):
class JSONAPIMeta:
resource_name = "saml-meta"
class SAMLConfigurationSerializer(RLSSerializer):
class Meta:
model = SAMLConfiguration
fields = ["id", "email_domain", "metadata_xml", "created_at", "updated_at"]
read_only_fields = ["id", "created_at", "updated_at"]
class LighthouseConfigSerializer(RLSSerializer):
"""
Serializer for the LighthouseConfig model.
"""
api_key = serializers.CharField(required=False)
class Meta:
model = LighthouseConfiguration
fields = [
"id",
"name",
"api_key",
"model",
"temperature",
"max_tokens",
"business_context",
"is_active",
"inserted_at",
"updated_at",
"url",
]
extra_kwargs = {
"id": {"read_only": True},
"is_active": {"read_only": True},
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
}
def to_representation(self, instance):
data = super().to_representation(instance)
# Check if api_key is specifically requested in fields param
fields_param = self.context.get("request", None) and self.context[
"request"
].query_params.get("fields[lighthouse-config]", "")
if fields_param == "api_key":
# Return decrypted key if specifically requested
data["api_key"] = instance.api_key_decoded if instance.api_key else None
else:
# Return masked key for general requests
data["api_key"] = "*" * len(instance.api_key) if instance.api_key else None
return data
class LighthouseConfigCreateSerializer(RLSSerializer, BaseWriteSerializer):
"""Serializer for creating new Lighthouse configurations."""
api_key = serializers.CharField(write_only=True, required=True)
class Meta:
model = LighthouseConfiguration
fields = [
"id",
"name",
"api_key",
"model",
"temperature",
"max_tokens",
"business_context",
"is_active",
"inserted_at",
"updated_at",
]
extra_kwargs = {
"id": {"read_only": True},
"is_active": {"read_only": True},
"inserted_at": {"read_only": True},
"updated_at": {"read_only": True},
}
def validate(self, attrs):
tenant_id = self.context.get("request").tenant_id
if LighthouseConfiguration.objects.filter(tenant_id=tenant_id).exists():
raise serializers.ValidationError(
{
"tenant_id": "Lighthouse configuration already exists for this tenant."
}
)
return super().validate(attrs)
def create(self, validated_data):
api_key = validated_data.pop("api_key")
instance = super().create(validated_data)
instance.api_key_decoded = api_key
instance.save()
return instance
def to_representation(self, instance):
data = super().to_representation(instance)
# Always mask the API key in the response
data["api_key"] = "*" * len(instance.api_key) if instance.api_key else None
return data
class LighthouseConfigUpdateSerializer(BaseWriteSerializer):
"""
Serializer for updating LighthouseConfig instances.
"""
api_key = serializers.CharField(write_only=True, required=False)
class Meta:
model = LighthouseConfiguration
fields = [
"id",
"name",
"api_key",
"model",
"temperature",
"max_tokens",
"business_context",
"is_active",
]
extra_kwargs = {
"id": {"read_only": True},
"is_active": {"read_only": True},
"name": {"required": False},
"model": {"required": False},
"temperature": {"required": False},
"max_tokens": {"required": False},
}
def update(self, instance, validated_data):
api_key = validated_data.pop("api_key", None)
instance = super().update(instance, validated_data)
if api_key:
instance.api_key_decoded = api_key
instance.save()
return instance
-54
View File
@@ -1,25 +1,20 @@
from allauth.socialaccount.providers.saml.views import ACSView, MetadataView, SLSView
from django.urls import include, path
from drf_spectacular.views import SpectacularRedocView
from rest_framework_nested import routers
from api.v1.views import (
ComplianceOverviewViewSet,
CustomSAMLLoginView,
CustomTokenObtainView,
CustomTokenRefreshView,
CustomTokenSwitchTenantView,
FindingViewSet,
GithubSocialLoginView,
GoogleSocialLoginView,
IntegrationJiraViewSet,
IntegrationViewSet,
InvitationAcceptViewSet,
InvitationViewSet,
LighthouseConfigViewSet,
MembershipViewSet,
OverviewViewSet,
ProcessorViewSet,
ProviderGroupProvidersRelationshipView,
ProviderGroupViewSet,
ProviderSecretViewSet,
@@ -27,14 +22,10 @@ from api.v1.views import (
ResourceViewSet,
RoleProviderGroupRelationshipView,
RoleViewSet,
SAMLConfigurationViewSet,
SAMLInitiateAPIView,
SAMLTokenValidateView,
ScanViewSet,
ScheduleViewSet,
SchemaView,
TaskViewSet,
TenantFinishACSView,
TenantMembersViewSet,
TenantViewSet,
UserRoleRelationshipView,
@@ -58,13 +49,6 @@ router.register(
router.register(r"overviews", OverviewViewSet, basename="overview")
router.register(r"schedules", ScheduleViewSet, basename="schedule")
router.register(r"integrations", IntegrationViewSet, basename="integration")
router.register(r"processors", ProcessorViewSet, basename="processor")
router.register(r"saml-config", SAMLConfigurationViewSet, basename="saml-config")
router.register(
r"lighthouse-configurations",
LighthouseConfigViewSet,
basename="lighthouseconfiguration",
)
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
tenants_router.register(
@@ -74,13 +58,6 @@ tenants_router.register(
users_router = routers.NestedSimpleRouter(router, r"users", lookup="user")
users_router.register(r"memberships", MembershipViewSet, basename="user-membership")
integrations_router = routers.NestedSimpleRouter(
router, r"integrations", lookup="integration"
)
integrations_router.register(
r"jira", IntegrationJiraViewSet, basename="integration-jira"
)
urlpatterns = [
path("tokens", CustomTokenObtainView.as_view(), name="token-obtain"),
path("tokens/refresh", CustomTokenRefreshView.as_view(), name="token-refresh"),
@@ -135,42 +112,11 @@ urlpatterns = [
),
name="provider_group-providers-relationship",
),
# API endpoint to start SAML SSO flow
path(
"auth/saml/initiate/", SAMLInitiateAPIView.as_view(), name="api_saml_initiate"
),
path(
"accounts/saml/<organization_slug>/login/",
CustomSAMLLoginView.as_view(),
name="saml_login",
),
path(
"accounts/saml/<organization_slug>/acs/",
ACSView.as_view(),
name="saml_acs",
),
path(
"accounts/saml/<organization_slug>/acs/finish/",
TenantFinishACSView.as_view(),
name="saml_finish_acs",
),
path(
"accounts/saml/<organization_slug>/sls/",
SLSView.as_view(),
name="saml_sls",
),
path(
"accounts/saml/<organization_slug>/metadata/",
MetadataView.as_view(),
name="saml_metadata",
),
path("tokens/saml", SAMLTokenValidateView.as_view(), name="token-saml"),
path("tokens/google", GoogleSocialLoginView.as_view(), name="token-google"),
path("tokens/github", GithubSocialLoginView.as_view(), name="token-github"),
path("", include(router.urls)),
path("", include(tenants_router.urls)),
path("", include(users_router.urls)),
path("", include(integrations_router.urls)),
path("schema", SchemaView.as_view(), name="schema"),
path("docs", SpectacularRedocView.as_view(url_name="schema"), name="docs"),
]
File diff suppressed because it is too large Load Diff
-88
View File
@@ -1,5 +1,3 @@
import string
from django.core.exceptions import ValidationError
from django.utils.translation import gettext as _
@@ -22,89 +20,3 @@ class MaximumLengthValidator:
return _(
f"Your password must contain no more than {self.max_length} characters."
)
class SpecialCharactersValidator:
def __init__(self, special_characters=None, min_special_characters=1):
# Use string.punctuation if no custom characters provided
self.special_characters = special_characters or string.punctuation
self.min_special_characters = min_special_characters
def validate(self, password, user=None):
if (
sum(1 for char in password if char in self.special_characters)
< self.min_special_characters
):
raise ValidationError(
_("This password must contain at least one special character."),
code="password_no_special_characters",
params={
"special_characters": self.special_characters,
"min_special_characters": self.min_special_characters,
},
)
def get_help_text(self):
return _(
f"Your password must contain at least one special character from: {self.special_characters}"
)
class UppercaseValidator:
def __init__(self, min_uppercase=1):
self.min_uppercase = min_uppercase
def validate(self, password, user=None):
if sum(1 for char in password if char.isupper()) < self.min_uppercase:
raise ValidationError(
_(
"This password must contain at least %(min_uppercase)d uppercase letter."
),
code="password_no_uppercase_letters",
params={"min_uppercase": self.min_uppercase},
)
def get_help_text(self):
return _(
f"Your password must contain at least {self.min_uppercase} uppercase letter."
)
class LowercaseValidator:
def __init__(self, min_lowercase=1):
self.min_lowercase = min_lowercase
def validate(self, password, user=None):
if sum(1 for char in password if char.islower()) < self.min_lowercase:
raise ValidationError(
_(
"This password must contain at least %(min_lowercase)d lowercase letter."
),
code="password_no_lowercase_letters",
params={"min_lowercase": self.min_lowercase},
)
def get_help_text(self):
return _(
f"Your password must contain at least {self.min_lowercase} lowercase letter."
)
class NumericValidator:
def __init__(self, min_numeric=1):
self.min_numeric = min_numeric
def validate(self, password, user=None):
if sum(1 for char in password if char.isdigit()) < self.min_numeric:
raise ValidationError(
_(
"This password must contain at least %(min_numeric)d numeric character."
),
code="password_no_numeric_characters",
params={"min_numeric": self.min_numeric},
)
def get_help_text(self):
return _(
f"Your password must contain at least {self.min_numeric} numeric character."
)
-7
View File
@@ -1,13 +1,6 @@
import warnings
from celery import Celery, Task
from config.env import env
# Suppress specific warnings from django-rest-auth: https://github.com/iMerica/dj-rest-auth/issues/684
warnings.filterwarnings(
"ignore", category=UserWarning, module="dj_rest_auth.registration.serializers"
)
BROKER_VISIBILITY_TIMEOUT = env.int("DJANGO_BROKER_VISIBILITY_TIMEOUT", default=86400)
celery_app = Celery("tasks")
-43
View File
@@ -10,8 +10,6 @@ from config.settings.social_login import * # noqa
SECRET_KEY = env("SECRET_KEY", default="secret")
DEBUG = env.bool("DJANGO_DEBUG", default=False)
ALLOWED_HOSTS = ["localhost", "127.0.0.1"]
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
USE_X_FORWARDED_HOST = True
# Application definition
@@ -28,19 +26,16 @@ INSTALLED_APPS = [
"rest_framework",
"corsheaders",
"drf_spectacular",
"drf_spectacular_jsonapi",
"django_guid",
"rest_framework_json_api",
"django_celery_results",
"django_celery_beat",
"rest_framework_simplejwt.token_blacklist",
"allauth",
"django.contrib.sites",
"allauth.account",
"allauth.socialaccount",
"allauth.socialaccount.providers.google",
"allauth.socialaccount.providers.github",
"allauth.socialaccount.providers.saml",
"dj_rest_auth.registration",
"rest_framework.authtoken",
]
@@ -108,13 +103,6 @@ REST_FRAMEWORK = {
),
"TEST_REQUEST_DEFAULT_FORMAT": "vnd.api+json",
"JSON_API_UNIFORM_EXCEPTIONS": True,
"DEFAULT_THROTTLE_CLASSES": [
"rest_framework.throttling.ScopedRateThrottle",
],
"DEFAULT_THROTTLE_RATES": {
"token-obtain": env("DJANGO_THROTTLE_TOKEN_OBTAIN", default=None),
"dj_rest_auth": None,
},
}
SPECTACULAR_SETTINGS = {
@@ -123,9 +111,6 @@ SPECTACULAR_SETTINGS = {
"PREPROCESSING_HOOKS": [
"drf_spectacular_jsonapi.hooks.fix_nested_path_parameters",
],
"POSTPROCESSING_HOOKS": [
"api.schema_hooks.attach_task_202_examples",
],
"TITLE": "API Reference - Prowler",
}
@@ -169,30 +154,6 @@ AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
{
"NAME": "api.validators.SpecialCharactersValidator",
"OPTIONS": {
"min_special_characters": 1,
},
},
{
"NAME": "api.validators.UppercaseValidator",
"OPTIONS": {
"min_uppercase": 1,
},
},
{
"NAME": "api.validators.LowercaseValidator",
"OPTIONS": {
"min_lowercase": 1,
},
},
{
"NAME": "api.validators.NumericValidator",
"OPTIONS": {
"min_numeric": 1,
},
},
]
SIMPLE_JWT = {
@@ -283,7 +244,3 @@ X_FRAME_OPTIONS = "DENY"
SECURE_REFERRER_POLICY = "strict-origin-when-cross-origin"
DJANGO_DELETION_BATCH_SIZE = env.int("DJANGO_DELETION_BATCH_SIZE", 5000)
# SAML requirement
CSRF_COOKIE_SECURE = True
SESSION_COOKIE_SECURE = True
+6 -19
View File
@@ -4,7 +4,6 @@ from config.env import env
IGNORED_EXCEPTIONS = [
# Provider is not connected due to credentials errors
"is not connected",
"ProviderConnectionError",
# Authentication Errors from AWS
"InvalidToken",
"AccessDeniedException",
@@ -17,7 +16,7 @@ IGNORED_EXCEPTIONS = [
"InternalServerErrorException",
"AccessDenied",
"No Shodan API Key", # Shodan Check
"RequestLimitExceeded", # For now, we don't want to log the RequestLimitExceeded errors
"RequestLimitExceeded", # For now we don't want to log the RequestLimitExceeded errors
"ThrottlingException",
"Rate exceeded",
"SubscriptionRequiredException",
@@ -43,9 +42,7 @@ IGNORED_EXCEPTIONS = [
"AWSAccessKeyIDInvalidError",
"AWSSessionTokenExpiredError",
"EndpointConnectionError", # AWS Service is not available in a region
# The following comes from urllib3: eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an
# unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
"Pool is closed",
"Pool is closed", # The following comes from urllib3: eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
# Authentication Errors from GCP
"ClientAuthenticationError",
"AuthorizationFailed",
@@ -69,15 +66,12 @@ IGNORED_EXCEPTIONS = [
"AzureClientIdAndClientSecretNotBelongingToTenantIdError",
"AzureHTTPResponseError",
"Error with credentials provided",
# PowerShell Errors in User Authentication
"Microsoft Teams User Auth connection failed: Please check your permissions and try again.",
"Exchange Online User Auth connection failed: Please check your permissions and try again.",
]
def before_send(event, hint):
"""
before_send handles the Sentry events in order to send them or not
before_send handles the Sentry events in order to sent them or not
"""
# Ignore logs with the ignored_exceptions
# https://docs.python.org/3/library/logging.html#logrecord-objects
@@ -85,16 +79,9 @@ def before_send(event, hint):
log_msg = hint["log_record"].msg
log_lvl = hint["log_record"].levelno
# Handle Error and Critical events and discard the rest
if log_lvl <= 40 and any(ignored in log_msg for ignored in IGNORED_EXCEPTIONS):
return None # Explicitly return None to drop the event
# Ignore exceptions with the ignored_exceptions
if "exc_info" in hint and hint["exc_info"]:
exc_value = str(hint["exc_info"][1])
if any(ignored in exc_value for ignored in IGNORED_EXCEPTIONS):
return None # Explicitly return None to drop the event
# Handle Error events and discard the rest
if log_lvl == 40 and any(ignored in log_msg for ignored in IGNORED_EXCEPTIONS):
return
return event
@@ -11,7 +11,8 @@ GITHUB_OAUTH_CALLBACK_URL = env("SOCIAL_GITHUB_OAUTH_CALLBACK_URL", default="")
# Allauth settings
ACCOUNT_LOGIN_METHODS = {"email"} # Use Email / Password authentication
ACCOUNT_SIGNUP_FIELDS = ["email*", "password1*", "password2*"]
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = "none" # Do not require email confirmation
ACCOUNT_USER_MODEL_USERNAME_FIELD = None
REST_AUTH = {
@@ -24,20 +25,6 @@ SOCIALACCOUNT_EMAIL_AUTHENTICATION = True
# Connect local account and social account if local account with that email address already exists
SOCIALACCOUNT_EMAIL_AUTHENTICATION_AUTO_CONNECT = True
SOCIALACCOUNT_ADAPTER = "api.adapters.ProwlerSocialAccountAdapter"
# def inline(pem: str) -> str:
# return "".join(
# line.strip()
# for line in pem.splitlines()
# if "CERTIFICATE" not in line and "KEY" not in line
# )
# # SAML keys (TODO: Validate certificates)
# SAML_PUBLIC_CERT = inline(env("SAML_PUBLIC_CERT", default=""))
# SAML_PRIVATE_KEY = inline(env("SAML_PRIVATE_KEY", default=""))
SOCIALACCOUNT_PROVIDERS = {
"google": {
"APP": {
@@ -63,20 +50,4 @@ SOCIALACCOUNT_PROVIDERS = {
"read:org",
],
},
"saml": {
"use_nameid_for_email": True,
"sp": {
"entity_id": "urn:prowler.com:sp",
},
"advanced": {
# TODO: Validate certificates
# "x509cert": SAML_PUBLIC_CERT,
# "private_key": SAML_PRIVATE_KEY,
# "authn_request_signed": True,
# "want_message_signed": True,
# "want_assertion_signed": True,
"reject_idp_initiated_sso": False,
"name_id_format": "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress",
},
},
}
+3 -299
View File
@@ -1,9 +1,8 @@
import logging
from datetime import datetime, timedelta, timezone
from unittest.mock import MagicMock, patch
from unittest.mock import patch
import pytest
from allauth.socialaccount.models import SocialLogin
from django.conf import settings
from django.db import connection as django_connection
from django.db import connections as django_connections
@@ -16,27 +15,20 @@ from tasks.jobs.backfill import backfill_resource_scan_summaries
from api.db_utils import rls_transaction
from api.models import (
ComplianceOverview,
ComplianceRequirementOverview,
Finding,
Integration,
IntegrationProviderRelationship,
Invitation,
LighthouseConfiguration,
Membership,
Processor,
Provider,
ProviderGroup,
ProviderSecret,
Resource,
ResourceTag,
ResourceTagMapping,
Role,
SAMLConfiguration,
SAMLDomainIndex,
Scan,
ScanSummary,
StateChoices,
StatusChoices,
Task,
User,
UserRoleRelationship,
@@ -46,19 +38,12 @@ from api.v1.serializers import TokenSerializer
from prowler.lib.check.models import Severity
from prowler.lib.outputs.finding import Status
TODAY = str(datetime.today().date())
API_JSON_CONTENT_TYPE = "application/vnd.api+json"
NO_TENANT_HTTP_STATUS = status.HTTP_401_UNAUTHORIZED
TEST_USER = "dev@prowler.com"
TEST_PASSWORD = "testing_psswd"
def today_after_n_days(n_days: int) -> str:
return datetime.strftime(
datetime.today().date() + timedelta(days=n_days), "%Y-%m-%d"
)
@pytest.fixture(scope="module")
def enforce_test_user_db_connection(django_db_setup, django_db_blocker):
"""Ensure tests use the test user for database connections."""
@@ -390,27 +375,8 @@ def providers_fixture(tenants_fixture):
tenant_id=tenant.id,
scanner_args={"key1": "value1", "key2": {"key21": "value21"}},
)
provider6 = Provider.objects.create(
provider="m365",
uid="m365.test.com",
alias="m365_testing",
tenant_id=tenant.id,
)
return provider1, provider2, provider3, provider4, provider5, provider6
@pytest.fixture
def processor_fixture(tenants_fixture):
tenant, *_ = tenants_fixture
processor = Processor.objects.create(
tenant_id=tenant.id,
processor_type="mutelist",
configuration="Mutelist:\n Accounts:\n *:\n Checks:\n iam_user_hardware_mfa_enabled:\n "
" Regions:\n - *\n Resources:\n - *",
)
return processor
return provider1, provider2, provider3, provider4, provider5
@pytest.fixture
@@ -662,7 +628,6 @@ def findings_fixture(scans_fixture, resources_fixture):
check_metadata={
"CheckId": "test_check_id",
"Description": "test description apple sauce",
"servicename": "ec2",
},
first_seen_at="2024-01-02T00:00:00Z",
)
@@ -689,7 +654,6 @@ def findings_fixture(scans_fixture, resources_fixture):
check_metadata={
"CheckId": "test_check_id",
"Description": "test description orange juice",
"servicename": "s3",
},
first_seen_at="2024-01-02T00:00:00Z",
muted=True,
@@ -813,131 +777,6 @@ def compliance_overviews_fixture(scans_fixture, tenants_fixture):
return compliance_overview1, compliance_overview2
@pytest.fixture
def compliance_requirements_overviews_fixture(scans_fixture, tenants_fixture):
"""Fixture for ComplianceRequirementOverview objects used by the new ComplianceOverviewViewSet."""
tenant = tenants_fixture[0]
scan1, scan2, scan3 = scans_fixture
# Create ComplianceRequirementOverview objects for scan1
requirement_overview1 = ComplianceRequirementOverview.objects.create(
tenant=tenant,
scan=scan1,
compliance_id="aws_account_security_onboarding_aws",
framework="AWS-Account-Security-Onboarding",
version="1.0",
description="Description for AWS Account Security Onboarding",
region="eu-west-1",
requirement_id="requirement1",
requirement_status=StatusChoices.PASS,
passed_checks=2,
failed_checks=0,
total_checks=2,
)
requirement_overview2 = ComplianceRequirementOverview.objects.create(
tenant=tenant,
scan=scan1,
compliance_id="aws_account_security_onboarding_aws",
framework="AWS-Account-Security-Onboarding",
version="1.0",
description="Description for AWS Account Security Onboarding",
region="eu-west-1",
requirement_id="requirement2",
requirement_status=StatusChoices.PASS,
passed_checks=2,
failed_checks=0,
total_checks=2,
)
requirement_overview3 = ComplianceRequirementOverview.objects.create(
tenant=tenant,
scan=scan1,
compliance_id="aws_account_security_onboarding_aws",
framework="AWS-Account-Security-Onboarding",
version="1.0",
description="Description for AWS Account Security Onboarding",
region="eu-west-2",
requirement_id="requirement1",
requirement_status=StatusChoices.PASS,
passed_checks=2,
failed_checks=0,
total_checks=2,
)
requirement_overview4 = ComplianceRequirementOverview.objects.create(
tenant=tenant,
scan=scan1,
compliance_id="aws_account_security_onboarding_aws",
framework="AWS-Account-Security-Onboarding",
version="1.0",
description="Description for AWS Account Security Onboarding",
region="eu-west-2",
requirement_id="requirement2",
requirement_status=StatusChoices.FAIL,
passed_checks=1,
failed_checks=1,
total_checks=2,
)
requirement_overview5 = ComplianceRequirementOverview.objects.create(
tenant=tenant,
scan=scan1,
compliance_id="aws_account_security_onboarding_aws",
framework="AWS-Account-Security-Onboarding",
version="1.0",
description="Description for AWS Account Security Onboarding (MANUAL)",
region="eu-west-2",
requirement_id="requirement3",
requirement_status=StatusChoices.MANUAL,
passed_checks=0,
failed_checks=0,
total_checks=0,
)
# Create a different compliance framework for testing
requirement_overview6 = ComplianceRequirementOverview.objects.create(
tenant=tenant,
scan=scan1,
compliance_id="cis_1.4_aws",
framework="CIS-1.4-AWS",
version="1.4",
description="CIS AWS Foundations Benchmark v1.4.0",
region="eu-west-1",
requirement_id="cis_requirement1",
requirement_status=StatusChoices.FAIL,
passed_checks=0,
failed_checks=3,
total_checks=3,
)
# Create another compliance framework for testing MITRE ATT&CK
requirement_overview7 = ComplianceRequirementOverview.objects.create(
tenant=tenant,
scan=scan1,
compliance_id="mitre_attack_aws",
framework="MITRE-ATTACK",
version="1.0",
description="MITRE ATT&CK",
region="eu-west-1",
requirement_id="mitre_requirement1",
requirement_status=StatusChoices.FAIL,
passed_checks=0,
failed_checks=0,
total_checks=0,
)
return (
requirement_overview1,
requirement_overview2,
requirement_overview3,
requirement_overview4,
requirement_overview5,
requirement_overview6,
requirement_overview7,
)
def get_api_tokens(
api_client, user_email: str, user_password: str, tenant_id: str = None
) -> tuple[str, str]:
@@ -1065,7 +904,7 @@ def integrations_fixture(providers_fixture):
enabled=True,
connected=True,
integration_type="amazon_s3",
configuration={"key": "value1"},
configuration={"key": "value"},
credentials={"psswd": "1234"},
)
IntegrationProviderRelationship.objects.create(
@@ -1090,20 +929,6 @@ def backfill_scan_metadata_fixture(scans_fixture, findings_fixture):
backfill_resource_scan_summaries(tenant_id=tenant_id, scan_id=scan_id)
@pytest.fixture
def lighthouse_config_fixture(authenticated_client, tenants_fixture):
return LighthouseConfiguration.objects.create(
tenant_id=tenants_fixture[0].id,
name="OpenAI",
api_key_decoded="sk-test1234567890T3BlbkFJtest1234567890",
model="gpt-4o",
temperature=0,
max_tokens=4000,
business_context="Test business context",
is_active=True,
)
@pytest.fixture(scope="function")
def latest_scan_finding(authenticated_client, providers_fixture, resources_fixture):
provider = providers_fixture[0]
@@ -1145,127 +970,6 @@ def latest_scan_finding(authenticated_client, providers_fixture, resources_fixtu
return finding
@pytest.fixture(scope="function")
def latest_scan_resource(authenticated_client, providers_fixture):
provider = providers_fixture[0]
tenant_id = str(providers_fixture[0].tenant_id)
scan = Scan.objects.create(
name="latest completed scan for resource",
provider=provider,
trigger=Scan.TriggerChoices.MANUAL,
state=StateChoices.COMPLETED,
tenant_id=tenant_id,
)
resource = Resource.objects.create(
tenant_id=tenant_id,
provider=provider,
uid="latest_resource_uid",
name="Latest Resource",
region="us-east-1",
service="ec2",
type="instance",
metadata='{"test": "metadata"}',
details='{"test": "details"}',
)
resource_tag = ResourceTag.objects.create(
tenant_id=tenant_id,
key="environment",
value="test",
)
ResourceTagMapping.objects.create(
tenant_id=tenant_id,
resource=resource,
tag=resource_tag,
)
finding = Finding.objects.create(
tenant_id=tenant_id,
uid="test_finding_uid_latest",
scan=scan,
delta="new",
status=Status.FAIL,
status_extended="test status extended ",
impact=Severity.critical,
impact_extended="test impact extended",
severity=Severity.critical,
raw_result={
"status": Status.FAIL,
"impact": Severity.critical,
"severity": Severity.critical,
},
tags={"test": "latest"},
check_id="test_check_id_latest",
check_metadata={
"CheckId": "test_check_id_latest",
"Description": "test description latest",
},
first_seen_at="2024-01-02T00:00:00Z",
)
finding.add_resources([resource])
backfill_resource_scan_summaries(tenant_id, str(scan.id))
return resource
@pytest.fixture
def saml_setup(tenants_fixture):
tenant_id = tenants_fixture[0].id
domain = "prowler.com"
SAMLDomainIndex.objects.create(email_domain=domain, tenant_id=tenant_id)
metadata_xml = """<?xml version='1.0' encoding='UTF-8'?>
<md:EntityDescriptor entityID='TEST' xmlns:md='urn:oasis:names:tc:SAML:2.0:metadata'>
<md:IDPSSODescriptor WantAuthnRequestsSigned='false' protocolSupportEnumeration='urn:oasis:names:tc:SAML:2.0:protocol'>
<md:KeyDescriptor use='signing'>
<ds:KeyInfo xmlns:ds='http://www.w3.org/2000/09/xmldsig#'>
<ds:X509Data>
<ds:X509Certificate>TEST</ds:X509Certificate>
</ds:X509Data>
</ds:KeyInfo>
</md:KeyDescriptor>
<md:NameIDFormat>urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress</md:NameIDFormat>
<md:SingleSignOnService Binding='urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST' Location='https://TEST/sso/saml'/>
<md:SingleSignOnService Binding='urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect' Location='https://TEST/sso/saml'/>
</md:IDPSSODescriptor>
</md:EntityDescriptor>
"""
SAMLConfiguration.objects.create(
tenant_id=str(tenant_id),
email_domain=domain,
metadata_xml=metadata_xml,
)
return {
"email": f"user@{domain}",
"domain": domain,
"tenant_id": tenant_id,
}
@pytest.fixture
def saml_sociallogin(users_fixture):
user = users_fixture[0]
user.email = "samlsso@acme.com"
extra_data = {
"firstName": ["Test"],
"lastName": ["User"],
"organization": ["Prowler"],
"userType": ["member"],
}
account = MagicMock()
account.provider = "saml"
account.extra_data = extra_data
sociallogin = MagicMock(spec=SocialLogin)
sociallogin.account = account
sociallogin.user = user
return sociallogin
def get_authorization_header(access_token: str) -> dict:
return {"Authorization": f"Bearer {access_token}"}

Some files were not shown because too many files have changed in this diff Show More