mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-05-13 15:50:55 +00:00
Compare commits
161 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 481a2defc9 | |||
| 6716c4ae73 | |||
| 18f3bc098c | |||
| 67b1983d85 | |||
| 0585247890 | |||
| a3db23af7d | |||
| 3eaa21f06f | |||
| 5d5c109067 | |||
| c6cb4e4814 | |||
| ab06a09173 | |||
| 9c6c007f73 | |||
| 206f23b5a5 | |||
| 5c9e9bc86a | |||
| 34554d6123 | |||
| 000cb93157 | |||
| 524209bdf2 | |||
| c4a0da8204 | |||
| f0cba0321c | |||
| 79888c9312 | |||
| a79910a694 | |||
| 4cadee7bb1 | |||
| 756d436a2f | |||
| 5e85ef5835 | |||
| 0fa9e2da6c | |||
| ce7510db28 | |||
| 8e3d50c807 | |||
| d8908d2ccc | |||
| 0b9969a723 | |||
| 985d73f44f | |||
| 1d705e22da | |||
| ca55d4ce86 | |||
| 0201073fcb | |||
| 928c556721 | |||
| a653ad7852 | |||
| a3c811f801 | |||
| c85d3e9188 | |||
| 6f394cf9de | |||
| ba765fa07d | |||
| d928ee442f | |||
| 30ab5f52b9 | |||
| c424707e32 | |||
| 92efbe3926 | |||
| 4a61578dd8 | |||
| ec75b5d0a3 | |||
| db5bab51ae | |||
| be476b732a | |||
| 434b37f758 | |||
| c08c27e5c6 | |||
| 8773751779 | |||
| f70a959a49 | |||
| 20314cad8c | |||
| 564ad56d2f | |||
| b2d91c97d8 | |||
| c232195df4 | |||
| b4b9d800a8 | |||
| fc1d3d4a47 | |||
| d4be0f4d7a | |||
| 305339ffb4 | |||
| 272e4547b2 | |||
| 8c3e1b96f9 | |||
| d496f5a58e | |||
| 5789e87f4f | |||
| 1994750151 | |||
| 27304a8007 | |||
| 9761651f8d | |||
| 406aace585 | |||
| ebd5814112 | |||
| 42e816081e | |||
| 741217ce80 | |||
| 5f9ab68bd9 | |||
| fba2854f65 | |||
| 8794515318 | |||
| 335db928dc | |||
| 046baa8eb9 | |||
| ef60ea99c3 | |||
| 1483efa18e | |||
| b74744b135 | |||
| e80eed6baf | |||
| 1ba22f6f45 | |||
| da6b7b89cb | |||
| cc9aa7f7ee | |||
| ecf749fce8 | |||
| 1a7f52fc9c | |||
| b630234cdf | |||
| d6685eec1f | |||
| 86cff92d1f | |||
| 28e81783ef | |||
| 13266b8743 | |||
| 4e143cf013 | |||
| 5cfe140b7b | |||
| c7d7ec9a3b | |||
| 155a1813cc | |||
| 71e444d4ae | |||
| 42b7f0f1a9 | |||
| 5b3f0fbd7f | |||
| 06eb69e455 | |||
| 338a11eaaf | |||
| 8814a0710a | |||
| 76a55cdb54 | |||
| 736badb284 | |||
| 37f77bb778 | |||
| 7e5e48c588 | |||
| 5f0017046f | |||
| 612d867838 | |||
| 8c2668ebe4 | |||
| be4b1bd99b | |||
| 502525eff1 | |||
| 09b5afe9c3 | |||
| 9a4fc784db | |||
| 04177db648 | |||
| 2408dbf855 | |||
| 9c4a8782e4 | |||
| 0d549ea39e | |||
| 0060081cad | |||
| 0c2d06dd9a | |||
| 14b9be4c47 | |||
| 6bac5650e6 | |||
| 6170462a61 | |||
| 2ad5926b13 | |||
| a6ddc85e4c | |||
| aceff35f29 | |||
| 3ae96c3aa6 | |||
| 0dcaaa9083 | |||
| 323a7f0349 | |||
| 736cbea862 | |||
| d3e290978e | |||
| 9c91cfcb7d | |||
| e279f7fcfd | |||
| a555cffebe | |||
| 49f5435392 | |||
| a087dd9b85 | |||
| 6e89c301b2 | |||
| d5dac448a6 | |||
| 00e6eb35f1 | |||
| cdb455b2b1 | |||
| 837c65ba23 | |||
| 035293b612 | |||
| 250b5df836 | |||
| ec59dbc6ee | |||
| 4d5676f00e | |||
| 2a4b62527a | |||
| ec0341c696 | |||
| 2e5f3a5a66 | |||
| 231a5fab86 | |||
| 10319ea69d | |||
| 53bb5aff22 | |||
| 52a5fff61f | |||
| f28754b883 | |||
| 6fce797ca2 | |||
| a1fd315104 | |||
| a91f0ac8b5 | |||
| 2c96df05f4 | |||
| b57788c7b9 | |||
| 7431bab2a7 | |||
| a52697bfdf | |||
| 9dc2199381 | |||
| 89db760b89 | |||
| 4356c1e186 | |||
| e32cebc553 | |||
| 23e1cc281d | |||
| 48d3fb4fe3 |
@@ -29,6 +29,12 @@ POSTGRES_ADMIN_PASSWORD=postgres
|
||||
POSTGRES_USER=prowler
|
||||
POSTGRES_PASSWORD=postgres
|
||||
POSTGRES_DB=prowler_db
|
||||
# Read replica settings (optional)
|
||||
# POSTGRES_REPLICA_HOST=postgres-db
|
||||
# POSTGRES_REPLICA_PORT=5432
|
||||
# POSTGRES_REPLICA_USER=prowler
|
||||
# POSTGRES_REPLICA_PASSWORD=postgres
|
||||
# POSTGRES_REPLICA_DB=prowler_db
|
||||
|
||||
# Celery-Prowler task settings
|
||||
TASK_RETRY_DELAY_SECONDS=0.1
|
||||
@@ -99,7 +105,7 @@ SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.10.0
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.12.2
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
|
||||
+27
-5
@@ -1,6 +1,28 @@
|
||||
# SDK
|
||||
/* @prowler-cloud/sdk
|
||||
/.github/ @prowler-cloud/sdk
|
||||
prowler @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
tests @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
api @prowler-cloud/api
|
||||
ui @prowler-cloud/ui
|
||||
/prowler/ @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
/tests/ @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
/dashboard/ @prowler-cloud/sdk
|
||||
/docs/ @prowler-cloud/sdk
|
||||
/examples/ @prowler-cloud/sdk
|
||||
/util/ @prowler-cloud/sdk
|
||||
/contrib/ @prowler-cloud/sdk
|
||||
/permissions/ @prowler-cloud/sdk
|
||||
/codecov.yml @prowler-cloud/sdk @prowler-cloud/api
|
||||
|
||||
# API
|
||||
/api/ @prowler-cloud/api
|
||||
|
||||
# UI
|
||||
/ui/ @prowler-cloud/ui
|
||||
|
||||
# AI
|
||||
/mcp_server/ @prowler-cloud/ai
|
||||
|
||||
# Platform
|
||||
/.github/ @prowler-cloud/platform
|
||||
/Makefile @prowler-cloud/platform
|
||||
/kubernetes/ @prowler-cloud/platform
|
||||
**/Dockerfile* @prowler-cloud/platform
|
||||
**/docker-compose*.yml @prowler-cloud/platform
|
||||
**/docker-compose*.yaml @prowler-cloud/platform
|
||||
|
||||
@@ -3,6 +3,41 @@ description: Create a report to help us improve
|
||||
labels: ["bug", "status/needs-triage"]
|
||||
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: search
|
||||
attributes:
|
||||
label: Issue search
|
||||
options:
|
||||
- label: I have searched the existing issues and this bug has not been reported yet
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: component
|
||||
attributes:
|
||||
label: Which component is affected?
|
||||
multiple: true
|
||||
options:
|
||||
- Prowler CLI/SDK
|
||||
- Prowler API
|
||||
- Prowler UI
|
||||
- Prowler Dashboard
|
||||
- Prowler MCP Server
|
||||
- Documentation
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: provider
|
||||
attributes:
|
||||
label: Cloud Provider (if applicable)
|
||||
multiple: true
|
||||
options:
|
||||
- AWS
|
||||
- Azure
|
||||
- GCP
|
||||
- Kubernetes
|
||||
- GitHub
|
||||
- Microsoft 365
|
||||
- Not applicable
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
@@ -78,6 +113,15 @@ body:
|
||||
prowler --version
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: python-version
|
||||
attributes:
|
||||
label: Python version
|
||||
description: Which Python version are you using?
|
||||
placeholder: |-
|
||||
python --version
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: pip-version
|
||||
attributes:
|
||||
|
||||
@@ -1 +1,11 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: 📖 Documentation
|
||||
url: https://docs.prowler.com
|
||||
about: Check our comprehensive documentation for guides and tutorials
|
||||
- name: 💬 GitHub Discussions
|
||||
url: https://github.com/prowler-cloud/prowler/discussions
|
||||
about: Ask questions and discuss with the community
|
||||
- name: 🌟 Prowler Community
|
||||
url: https://goto.prowler.com/slack
|
||||
about: Join our community for support and updates
|
||||
|
||||
@@ -3,6 +3,42 @@ description: Suggest an idea for this project
|
||||
labels: ["feature-request", "status/needs-triage"]
|
||||
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: search
|
||||
attributes:
|
||||
label: Feature search
|
||||
options:
|
||||
- label: I have searched the existing issues and this feature has not been requested yet
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: component
|
||||
attributes:
|
||||
label: Which component would this feature affect?
|
||||
multiple: true
|
||||
options:
|
||||
- Prowler CLI/SDK
|
||||
- Prowler API
|
||||
- Prowler UI
|
||||
- Prowler Dashboard
|
||||
- Prowler MCP Server
|
||||
- Documentation
|
||||
- New component/Integration
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: provider
|
||||
attributes:
|
||||
label: Related to specific cloud provider?
|
||||
multiple: true
|
||||
options:
|
||||
- AWS
|
||||
- Azure
|
||||
- GCP
|
||||
- Kubernetes
|
||||
- GitHub
|
||||
- Microsoft 365
|
||||
- All providers
|
||||
- Not provider-specific
|
||||
- type: textarea
|
||||
id: Problem
|
||||
attributes:
|
||||
@@ -19,6 +55,14 @@ body:
|
||||
description: A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: use-case
|
||||
attributes:
|
||||
label: Use case and benefits
|
||||
description: Who would benefit from this feature and how?
|
||||
placeholder: This would help security teams by...
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Alternatives
|
||||
attributes:
|
||||
|
||||
@@ -0,0 +1,71 @@
|
||||
name: 'Setup Python with Poetry'
|
||||
description: 'Setup Python environment with Poetry and install dependencies'
|
||||
author: 'Prowler'
|
||||
|
||||
inputs:
|
||||
python-version:
|
||||
description: 'Python version to use'
|
||||
required: true
|
||||
working-directory:
|
||||
description: 'Working directory for Poetry'
|
||||
required: false
|
||||
default: '.'
|
||||
poetry-version:
|
||||
description: 'Poetry version to install'
|
||||
required: false
|
||||
default: '2.1.1'
|
||||
install-dependencies:
|
||||
description: 'Install Python dependencies with Poetry'
|
||||
required: false
|
||||
default: 'true'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Replace @master with current branch in pyproject.toml
|
||||
if: github.event_name == 'pull_request' && github.base_ref == 'master'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
|
||||
echo "Using branch: $BRANCH_NAME"
|
||||
sed -i "s|@master|@$BRANCH_NAME|g" pyproject.toml
|
||||
|
||||
- name: Install poetry
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==${{ inputs.poetry-version }}
|
||||
|
||||
- name: Update SDK resolved_reference to latest commit
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
LATEST_COMMIT=$(curl -s "https://api.github.com/repos/prowler-cloud/prowler/commits/master" | jq -r '.sha')
|
||||
echo "Latest commit hash: $LATEST_COMMIT"
|
||||
sed -i '/url = "https:\/\/github\.com\/prowler-cloud\/prowler\.git"/,/resolved_reference = / {
|
||||
s/resolved_reference = "[a-f0-9]\{40\}"/resolved_reference = "'"$LATEST_COMMIT"'"/
|
||||
}' poetry.lock
|
||||
echo "Updated resolved_reference:"
|
||||
grep -A2 -B2 "resolved_reference" poetry.lock
|
||||
|
||||
- name: Update poetry.lock
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: poetry lock
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
cache: 'poetry'
|
||||
cache-dependency-path: ${{ inputs.working-directory }}/poetry.lock
|
||||
|
||||
- name: Install Python dependencies
|
||||
if: inputs.install-dependencies == 'true'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry run pip list
|
||||
@@ -0,0 +1,152 @@
|
||||
name: 'Container Security Scan with Trivy'
|
||||
description: 'Scans container images for vulnerabilities using Trivy and reports results'
|
||||
author: 'Prowler'
|
||||
|
||||
inputs:
|
||||
image-name:
|
||||
description: 'Container image name to scan'
|
||||
required: true
|
||||
image-tag:
|
||||
description: 'Container image tag to scan'
|
||||
required: true
|
||||
default: ${{ github.sha }}
|
||||
severity:
|
||||
description: 'Severities to scan for (comma-separated)'
|
||||
required: false
|
||||
default: 'CRITICAL,HIGH,MEDIUM,LOW'
|
||||
fail-on-critical:
|
||||
description: 'Fail the build if critical vulnerabilities are found'
|
||||
required: false
|
||||
default: 'false'
|
||||
upload-sarif:
|
||||
description: 'Upload results to GitHub Security tab'
|
||||
required: false
|
||||
default: 'true'
|
||||
create-pr-comment:
|
||||
description: 'Create a comment on the PR with scan results'
|
||||
required: false
|
||||
default: 'true'
|
||||
artifact-retention-days:
|
||||
description: 'Days to retain the Trivy report artifact'
|
||||
required: false
|
||||
default: '2'
|
||||
|
||||
outputs:
|
||||
critical-count:
|
||||
description: 'Number of critical vulnerabilities found'
|
||||
value: ${{ steps.security-check.outputs.critical }}
|
||||
high-count:
|
||||
description: 'Number of high vulnerabilities found'
|
||||
value: ${{ steps.security-check.outputs.high }}
|
||||
total-count:
|
||||
description: 'Total number of vulnerabilities found'
|
||||
value: ${{ steps.security-check.outputs.total }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Run Trivy vulnerability scan (SARIF)
|
||||
if: inputs.upload-sarif == 'true'
|
||||
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # v0.33.1
|
||||
with:
|
||||
image-ref: ${{ inputs.image-name }}:${{ inputs.image-tag }}
|
||||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
exit-code: '0'
|
||||
|
||||
- name: Upload Trivy results to GitHub Security tab
|
||||
if: inputs.upload-sarif == 'true'
|
||||
uses: github/codeql-action/upload-sarif@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
category: 'trivy-container'
|
||||
|
||||
- name: Run Trivy vulnerability scan (JSON)
|
||||
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # v0.33.1
|
||||
with:
|
||||
image-ref: ${{ inputs.image-name }}:${{ inputs.image-tag }}
|
||||
format: 'json'
|
||||
output: 'trivy-report.json'
|
||||
severity: ${{ inputs.severity }}
|
||||
exit-code: '0'
|
||||
|
||||
- name: Upload Trivy report artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
if: always()
|
||||
with:
|
||||
name: trivy-scan-report-${{ inputs.image-name }}
|
||||
path: trivy-report.json
|
||||
retention-days: ${{ inputs.artifact-retention-days }}
|
||||
|
||||
- name: Generate security summary
|
||||
id: security-check
|
||||
shell: bash
|
||||
run: |
|
||||
CRITICAL=$(jq '[.Results[]?.Vulnerabilities[]? | select(.Severity=="CRITICAL")] | length' trivy-report.json)
|
||||
HIGH=$(jq '[.Results[]?.Vulnerabilities[]? | select(.Severity=="HIGH")] | length' trivy-report.json)
|
||||
TOTAL=$(jq '[.Results[]?.Vulnerabilities[]?] | length' trivy-report.json)
|
||||
|
||||
echo "critical=$CRITICAL" >> $GITHUB_OUTPUT
|
||||
echo "high=$HIGH" >> $GITHUB_OUTPUT
|
||||
echo "total=$TOTAL" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "### 🔒 Container Security Scan" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Image:** \`${{ inputs.image-name }}:${{ inputs.image-tag }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- 🔴 Critical: $CRITICAL" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- 🟠 High: $HIGH" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Total**: $TOTAL" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Comment scan results on PR
|
||||
if: inputs.create-pr-comment == 'true' && github.event_name == 'pull_request'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
IMAGE_NAME: ${{ inputs.image-name }}
|
||||
GITHUB_SHA: ${{ inputs.image-tag }}
|
||||
SEVERITY: ${{ inputs.severity }}
|
||||
with:
|
||||
script: |
|
||||
const comment = require('./.github/scripts/trivy-pr-comment.js');
|
||||
|
||||
// Unique identifier to find our comment
|
||||
const marker = '<!-- trivy-scan-comment:${{ inputs.image-name }} -->';
|
||||
const body = marker + '\n' + comment;
|
||||
|
||||
// Find existing comment
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
});
|
||||
|
||||
const existingComment = comments.find(c => c.body?.includes(marker));
|
||||
|
||||
if (existingComment) {
|
||||
// Update existing comment
|
||||
await github.rest.issues.updateComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: existingComment.id,
|
||||
body: body
|
||||
});
|
||||
console.log('✅ Updated existing Trivy scan comment');
|
||||
} else {
|
||||
// Create new comment
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: body
|
||||
});
|
||||
console.log('✅ Created new Trivy scan comment');
|
||||
}
|
||||
|
||||
- name: Check for critical vulnerabilities
|
||||
if: inputs.fail-on-critical == 'true' && steps.security-check.outputs.critical != '0'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "::error::Found ${{ steps.security-check.outputs.critical }} critical vulnerabilities"
|
||||
echo "::warning::Please update packages or use a different base image"
|
||||
exit 1
|
||||
@@ -1,3 +1,12 @@
|
||||
name: "API - CodeQL Config"
|
||||
name: 'API: CodeQL Config'
|
||||
paths:
|
||||
- "api/"
|
||||
- 'api/'
|
||||
|
||||
paths-ignore:
|
||||
- 'api/tests/**'
|
||||
- 'api/**/__pycache__/**'
|
||||
- 'api/**/migrations/**'
|
||||
- 'api/**/*.md'
|
||||
|
||||
queries:
|
||||
- uses: security-and-quality
|
||||
|
||||
@@ -42,6 +42,11 @@ provider/mongodbatlas:
|
||||
- any-glob-to-any-file: "prowler/providers/mongodbatlas/**"
|
||||
- any-glob-to-any-file: "tests/providers/mongodbatlas/**"
|
||||
|
||||
provider/oci:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/oraclecloud/**"
|
||||
- any-glob-to-any-file: "tests/providers/oraclecloud/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
|
||||
@@ -0,0 +1,102 @@
|
||||
const fs = require('fs');
|
||||
|
||||
// Configuration from environment variables
|
||||
const REPORT_FILE = process.env.TRIVY_REPORT_FILE || 'trivy-report.json';
|
||||
const IMAGE_NAME = process.env.IMAGE_NAME || 'container-image';
|
||||
const GITHUB_SHA = process.env.GITHUB_SHA || 'unknown';
|
||||
const GITHUB_REPOSITORY = process.env.GITHUB_REPOSITORY || '';
|
||||
const GITHUB_RUN_ID = process.env.GITHUB_RUN_ID || '';
|
||||
const SEVERITY = process.env.SEVERITY || 'CRITICAL,HIGH,MEDIUM,LOW';
|
||||
|
||||
// Parse severities to scan
|
||||
const scannedSeverities = SEVERITY.split(',').map(s => s.trim());
|
||||
|
||||
// Read and parse the Trivy report
|
||||
const report = JSON.parse(fs.readFileSync(REPORT_FILE, 'utf-8'));
|
||||
|
||||
let vulnCount = 0;
|
||||
let vulnsByType = { CRITICAL: 0, HIGH: 0, MEDIUM: 0, LOW: 0 };
|
||||
let affectedPackages = new Set();
|
||||
|
||||
if (report.Results && Array.isArray(report.Results)) {
|
||||
for (const result of report.Results) {
|
||||
if (result.Vulnerabilities && Array.isArray(result.Vulnerabilities)) {
|
||||
for (const vuln of result.Vulnerabilities) {
|
||||
vulnCount++;
|
||||
if (vulnsByType[vuln.Severity] !== undefined) {
|
||||
vulnsByType[vuln.Severity]++;
|
||||
}
|
||||
if (vuln.PkgName) {
|
||||
affectedPackages.add(vuln.PkgName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const shortSha = GITHUB_SHA.substring(0, 7);
|
||||
const timestamp = new Date().toISOString().replace('T', ' ').substring(0, 19) + ' UTC';
|
||||
|
||||
// Severity icons and labels
|
||||
const severityConfig = {
|
||||
CRITICAL: { icon: '🔴', label: 'Critical' },
|
||||
HIGH: { icon: '🟠', label: 'High' },
|
||||
MEDIUM: { icon: '🟡', label: 'Medium' },
|
||||
LOW: { icon: '🔵', label: 'Low' }
|
||||
};
|
||||
|
||||
let comment = '## 🔒 Container Security Scan\n\n';
|
||||
comment += `**Image:** \`${IMAGE_NAME}:${shortSha}\`\n`;
|
||||
comment += `**Last scan:** ${timestamp}\n\n`;
|
||||
|
||||
if (vulnCount === 0) {
|
||||
comment += '### ✅ No Vulnerabilities Detected\n\n';
|
||||
comment += 'The container image passed all security checks. No known CVEs were found.\n';
|
||||
} else {
|
||||
comment += '### 📊 Vulnerability Summary\n\n';
|
||||
comment += '| Severity | Count |\n';
|
||||
comment += '|----------|-------|\n';
|
||||
|
||||
// Only show severities that were scanned
|
||||
for (const severity of scannedSeverities) {
|
||||
const config = severityConfig[severity];
|
||||
const count = vulnsByType[severity] || 0;
|
||||
const isBold = (severity === 'CRITICAL' || severity === 'HIGH') && count > 0;
|
||||
const countDisplay = isBold ? `**${count}**` : count;
|
||||
comment += `| ${config.icon} ${config.label} | ${countDisplay} |\n`;
|
||||
}
|
||||
|
||||
comment += `| **Total** | **${vulnCount}** |\n\n`;
|
||||
|
||||
if (affectedPackages.size > 0) {
|
||||
comment += `**${affectedPackages.size}** package(s) affected\n\n`;
|
||||
}
|
||||
|
||||
if (vulnsByType.CRITICAL > 0) {
|
||||
comment += '### ⚠️ Action Required\n\n';
|
||||
comment += '**Critical severity vulnerabilities detected.** These should be addressed before merging:\n';
|
||||
comment += '- Review the detailed scan results\n';
|
||||
comment += '- Update affected packages to patched versions\n';
|
||||
comment += '- Consider using a different base image if updates are unavailable\n\n';
|
||||
} else if (vulnsByType.HIGH > 0) {
|
||||
comment += '### ⚠️ Attention Needed\n\n';
|
||||
comment += '**High severity vulnerabilities found.** Please review and plan remediation:\n';
|
||||
comment += '- Assess the risk and exploitability\n';
|
||||
comment += '- Prioritize updates in the next maintenance cycle\n\n';
|
||||
} else {
|
||||
comment += '### ℹ️ Review Recommended\n\n';
|
||||
comment += 'Medium/Low severity vulnerabilities found. Consider addressing during regular maintenance.\n\n';
|
||||
}
|
||||
}
|
||||
|
||||
comment += '---\n';
|
||||
comment += '📋 **Resources:**\n';
|
||||
|
||||
if (GITHUB_REPOSITORY && GITHUB_RUN_ID) {
|
||||
comment += `- [Download full report](https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}) (see artifacts)\n`;
|
||||
}
|
||||
|
||||
comment += '- [View in Security tab](https://github.com/' + (GITHUB_REPOSITORY || 'repository') + '/security/code-scanning)\n';
|
||||
comment += '- Scanned with [Trivy](https://github.com/aquasecurity/trivy)\n';
|
||||
|
||||
module.exports = comment;
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
@@ -107,7 +107,7 @@ jobs:
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
@@ -1,36 +1,34 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: API - CodeQL
|
||||
name: 'API: CodeQL'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- "api/**"
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-codeql.yml'
|
||||
- '.github/codeql/api-codeql-config.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- "api/**"
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-codeql.yml'
|
||||
- '.github/codeql/api-codeql-config.yml'
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
name: CodeQL Security Analysis
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
@@ -39,21 +37,20 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
language:
|
||||
- 'python'
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: '/language:${{ matrix.language }}'
|
||||
|
||||
@@ -1,20 +1,30 @@
|
||||
name: API - Pull Request
|
||||
name: 'API: Pull Request'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- ".github/workflows/api-pull-request.yml"
|
||||
- "api/**"
|
||||
- '.github/workflows/api-pull-request.yml'
|
||||
- 'api/**'
|
||||
- '!api/docs/**'
|
||||
- '!api/README.md'
|
||||
- '!api/CHANGELOG.md'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- ".github/workflows/api-pull-request.yml"
|
||||
- "api/**"
|
||||
- '.github/workflows/api-pull-request.yml'
|
||||
- 'api/**'
|
||||
- '!api/docs/**'
|
||||
- '!api/README.md'
|
||||
- '!api/CHANGELOG.md'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
POSTGRES_HOST: localhost
|
||||
@@ -29,21 +39,94 @@ env:
|
||||
VALKEY_DB: 0
|
||||
API_WORKING_DIR: ./api
|
||||
IMAGE_NAME: prowler-api
|
||||
IGNORE_FILES: |
|
||||
api/docs/**
|
||||
api/README.md
|
||||
api/CHANGELOG.md
|
||||
|
||||
jobs:
|
||||
test:
|
||||
code-quality:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.12"]
|
||||
python-version:
|
||||
- '3.12'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./api
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
|
||||
- name: Poetry check
|
||||
run: poetry check --lock
|
||||
|
||||
- name: Ruff lint
|
||||
run: poetry run ruff check . --exclude contrib
|
||||
|
||||
- name: Ruff format
|
||||
run: poetry run ruff format --check . --exclude contrib
|
||||
|
||||
- name: Pylint
|
||||
run: poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
|
||||
|
||||
security-scans:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
- '3.12'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./api
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
|
||||
- name: Bandit
|
||||
run: poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
|
||||
- name: Safety
|
||||
# 76352, 76353, 77323 come from SDK, but they cannot upgrade it yet. It does not affect API
|
||||
# TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X
|
||||
run: poetry run safety check --ignore 70612,66963,74429,76352,76353,77323,77744,77745
|
||||
|
||||
- name: Vulture
|
||||
run: poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
|
||||
|
||||
tests:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
- '3.12'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./api
|
||||
|
||||
# Service containers to run with `test`
|
||||
services:
|
||||
# Label used to access the service container
|
||||
postgres:
|
||||
image: postgres
|
||||
env:
|
||||
@@ -52,7 +135,6 @@ jobs:
|
||||
POSTGRES_USER: ${{ env.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }}
|
||||
POSTGRES_DB: ${{ env.POSTGRES_DB }}
|
||||
# Set health checks to wait until postgres has started
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
@@ -66,7 +148,6 @@ jobs:
|
||||
VALKEY_HOST: ${{ env.VALKEY_HOST }}
|
||||
VALKEY_PORT: ${{ env.VALKEY_PORT }}
|
||||
VALKEY_DB: ${{ env.VALKEY_DB }}
|
||||
# Set health checks to wait until postgres has started
|
||||
ports:
|
||||
- 6379:6379
|
||||
options: >-
|
||||
@@ -76,164 +157,72 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
api/**
|
||||
.github/workflows/api-pull-request.yml
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
|
||||
- name: Replace @master with current branch in pyproject.toml
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
|
||||
echo "Using branch: $BRANCH_NAME"
|
||||
sed -i "s|@master|@$BRANCH_NAME|g" pyproject.toml
|
||||
|
||||
- name: Install poetry
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Update SDK's poetry.lock resolved_reference to latest commit - Only for push events to `master`
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' && github.event_name == 'push'
|
||||
run: |
|
||||
# Get the latest commit hash from the prowler-cloud/prowler repository
|
||||
LATEST_COMMIT=$(curl -s "https://api.github.com/repos/prowler-cloud/prowler/commits/master" | jq -r '.sha')
|
||||
echo "Latest commit hash: $LATEST_COMMIT"
|
||||
|
||||
# Update the resolved_reference specifically for prowler-cloud/prowler repository
|
||||
sed -i '/url = "https:\/\/github\.com\/prowler-cloud\/prowler\.git"/,/resolved_reference = / {
|
||||
s/resolved_reference = "[a-f0-9]\{40\}"/resolved_reference = "'"$LATEST_COMMIT"'"/
|
||||
}' poetry.lock
|
||||
|
||||
# Verify the change was made
|
||||
echo "Updated resolved_reference:"
|
||||
grep -A2 -B2 "resolved_reference" poetry.lock
|
||||
|
||||
- name: Update poetry.lock
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry lock
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
working-directory: ./api
|
||||
|
||||
- name: Install system dependencies for xmlsec
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxml2-dev libxmlsec1-dev libxmlsec1-openssl pkg-config
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
|
||||
&& chmod +x /tmp/hadolint
|
||||
|
||||
- name: Poetry check
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry check --lock
|
||||
|
||||
- name: Lint with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run ruff check . --exclude contrib
|
||||
|
||||
- name: Check Format with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run ruff format --check . --exclude contrib
|
||||
|
||||
- name: Lint with pylint
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
|
||||
|
||||
- name: Bandit
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
|
||||
- name: Safety
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
# 76352, 76353, 77323 come from SDK, but they cannot upgrade it yet. It does not affect API
|
||||
# TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X
|
||||
run: |
|
||||
poetry run safety check --ignore 70612,66963,74429,76352,76353,77323,77744,77745
|
||||
|
||||
- name: Vulture
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
|
||||
|
||||
- name: Hadolint
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
- name: Test with pytest
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest --cov=./src/backend --cov-report=xml src/backend
|
||||
- name: Run tests with pytest
|
||||
run: poetry run pytest --cov=./src/backend --cov-report=xml src/backend
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: api
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Lint Dockerfile with Hadolint
|
||||
uses: hadolint/hadolint-action@2332a7b74a6de0dda2e2221d575162eba76ba5e5 # v3.3.0
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
dockerfile: api/Dockerfile
|
||||
ignore: DL3013
|
||||
|
||||
container-build-and-scan:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
- name: Build Container
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
|
||||
- name: Build container
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
tags: ${{ env.IMAGE_NAME }}:latest
|
||||
outputs: type=docker
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Scan container with Trivy
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -1,28 +1,35 @@
|
||||
name: Prowler - Automatic Backport
|
||||
name: 'Tools: Backport'
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ['master']
|
||||
types: ['labeled', 'closed']
|
||||
branches:
|
||||
- 'master'
|
||||
types:
|
||||
- 'labeled'
|
||||
- 'closed'
|
||||
paths:
|
||||
- '.github/workflows/backport.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
# The prefix of the label that triggers the backport must not contain the branch name
|
||||
# so, for example, if the branch is 'master', the label should be 'backport-to-<branch>'
|
||||
BACKPORT_LABEL_PREFIX: backport-to-
|
||||
BACKPORT_LABEL_IGNORE: was-backported
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport PR
|
||||
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport')) && !(contains(github.event.pull_request.labels.*.name, 'was-backported'))
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Check labels
|
||||
id: preview_label_check
|
||||
id: label_check
|
||||
uses: agilepathway/label-checker@c3d16ad512e7cea5961df85ff2486bb774caf3c5 # v1.6.65
|
||||
with:
|
||||
allow_failure: true
|
||||
@@ -31,17 +38,17 @@ jobs:
|
||||
none_of: ${{ env.BACKPORT_LABEL_IGNORE }}
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Backport Action
|
||||
if: steps.preview_label_check.outputs.label_check == 'success'
|
||||
- name: Backport PR
|
||||
if: steps.label_check.outputs.label_check == 'success'
|
||||
uses: sorenlouv/backport-github-action@ad888e978060bc1b2798690dd9d03c4036560947 # v9.5.1
|
||||
with:
|
||||
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
|
||||
- name: Info log
|
||||
if: ${{ success() && steps.preview_label_check.outputs.label_check == 'success' }}
|
||||
- name: Display backport info log
|
||||
if: success() && steps.label_check.outputs.label_check == 'success'
|
||||
run: cat ~/.backport/backport.info.log
|
||||
|
||||
- name: Debug log
|
||||
if: ${{ failure() && steps.preview_label_check.outputs.label_check == 'success' }}
|
||||
- name: Display backport debug log
|
||||
if: failure() && steps.label_check.outputs.label_check == 'success'
|
||||
run: cat ~/.backport/backport.debug.log
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
name: Prowler - Pull Request Documentation Link
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v3'
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- '.github/workflows/build-documentation-on-pr.yml'
|
||||
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
|
||||
jobs:
|
||||
documentation-link:
|
||||
name: Documentation Link
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Find existing documentation comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ env.PR_NUMBER }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '<!-- prowler-docs-link -->'
|
||||
|
||||
- name: Create or update PR comment with the Prowler Documentation URI
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ env.PR_NUMBER }}
|
||||
body: |
|
||||
<!-- prowler-docs-link -->
|
||||
You can check the documentation for this PR here -> [Prowler Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)
|
||||
edit-mode: replace
|
||||
@@ -1,23 +1,31 @@
|
||||
name: Prowler - Conventional Commit
|
||||
name: 'Tools: Conventional Commit'
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- "opened"
|
||||
- "edited"
|
||||
- "synchronize"
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
- 'master'
|
||||
- 'v3'
|
||||
- 'v4.*'
|
||||
- 'v5.*'
|
||||
types:
|
||||
- 'opened'
|
||||
- 'edited'
|
||||
- 'synchronize'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
conventional-commit-check:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
|
||||
steps:
|
||||
- name: conventional-commit-check
|
||||
id: conventional-commit-check
|
||||
- name: Check PR title format
|
||||
uses: agenthunt/conventional-commit-checker-action@9e552d650d0e205553ec7792d447929fc78e012b # v2.0.0
|
||||
with:
|
||||
pr-title-regex: '^([^\s(]+)(?:\(([^)]+)\))?: (.+)'
|
||||
pr-title-regex: '^(feat|fix|docs|style|refactor|perf|test|chore|build|ci|revert)(\([^)]+\))?!?: .+'
|
||||
|
||||
@@ -1,67 +1,70 @@
|
||||
name: Prowler - Create Backport Label
|
||||
name: 'Tools: Backport Label'
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
types:
|
||||
- 'published'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
BACKPORT_LABEL_PREFIX: backport-to-
|
||||
BACKPORT_LABEL_COLOR: B60205
|
||||
|
||||
jobs:
|
||||
create_label:
|
||||
create-label:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: write
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Create backport label
|
||||
- name: Create backport label for minor releases
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
OWNER_REPO: ${{ github.repository }}
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
VERSION_ONLY=${RELEASE_TAG#v} # Remove 'v' prefix if present (e.g., v3.2.0 -> 3.2.0)
|
||||
RELEASE_TAG="${{ github.event.release.tag_name }}"
|
||||
|
||||
if [ -z "$RELEASE_TAG" ]; then
|
||||
echo "Error: No release tag provided"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Processing release tag: $RELEASE_TAG"
|
||||
|
||||
# Remove 'v' prefix if present (e.g., v3.2.0 -> 3.2.0)
|
||||
VERSION_ONLY="${RELEASE_TAG#v}"
|
||||
|
||||
# Check if it's a minor version (X.Y.0)
|
||||
if [[ "$VERSION_ONLY" =~ ^[0-9]+\.[0-9]+\.0$ ]]; then
|
||||
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is a minor version. Proceeding to create backport label."
|
||||
if [[ "$VERSION_ONLY" =~ ^([0-9]+)\.([0-9]+)\.0$ ]]; then
|
||||
echo "Release $RELEASE_TAG (version $VERSION_ONLY) is a minor version. Proceeding to create backport label."
|
||||
|
||||
TWO_DIGIT_VERSION=${VERSION_ONLY%.0} # Extract X.Y from X.Y.0 (e.g., 5.6 from 5.6.0)
|
||||
# Extract X.Y from X.Y.0 (e.g., 5.6 from 5.6.0)
|
||||
MAJOR="${BASH_REMATCH[1]}"
|
||||
MINOR="${BASH_REMATCH[2]}"
|
||||
TWO_DIGIT_VERSION="${MAJOR}.${MINOR}"
|
||||
|
||||
FINAL_LABEL_NAME="backport-to-v${TWO_DIGIT_VERSION}"
|
||||
FINAL_DESCRIPTION="Backport PR to the v${TWO_DIGIT_VERSION} branch"
|
||||
LABEL_NAME="${BACKPORT_LABEL_PREFIX}v${TWO_DIGIT_VERSION}"
|
||||
LABEL_DESC="Backport PR to the v${TWO_DIGIT_VERSION} branch"
|
||||
LABEL_COLOR="$BACKPORT_LABEL_COLOR"
|
||||
|
||||
echo "Effective label name will be: ${FINAL_LABEL_NAME}"
|
||||
echo "Effective description will be: ${FINAL_DESCRIPTION}"
|
||||
echo "Label name: $LABEL_NAME"
|
||||
echo "Label description: $LABEL_DESC"
|
||||
|
||||
# Check if the label already exists
|
||||
STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token ${GITHUB_TOKEN}" "https://api.github.com/repos/${OWNER_REPO}/labels/${FINAL_LABEL_NAME}")
|
||||
|
||||
if [ "${STATUS_CODE}" -eq 200 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' already exists."
|
||||
elif [ "${STATUS_CODE}" -eq 404 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' does not exist. Creating it..."
|
||||
# Prepare JSON data payload
|
||||
JSON_DATA=$(printf '{"name":"%s","description":"%s","color":"B60205"}' "${FINAL_LABEL_NAME}" "${FINAL_DESCRIPTION}")
|
||||
|
||||
CREATE_STATUS_CODE=$(curl -s -o /tmp/curl_create_response.json -w "%{http_code}" -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GITHUB_TOKEN}" \
|
||||
--data "${JSON_DATA}" \
|
||||
"https://api.github.com/repos/${OWNER_REPO}/labels")
|
||||
|
||||
CREATE_RESPONSE_BODY=$(cat /tmp/curl_create_response.json)
|
||||
rm -f /tmp/curl_create_response.json
|
||||
|
||||
if [ "$CREATE_STATUS_CODE" -eq 201 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' created successfully."
|
||||
else
|
||||
echo "Error creating label '${FINAL_LABEL_NAME}'. Status: $CREATE_STATUS_CODE"
|
||||
echo "Response: $CREATE_RESPONSE_BODY"
|
||||
exit 1
|
||||
fi
|
||||
# Check if label already exists
|
||||
if gh label list --repo ${{ github.repository }} --limit 1000 | grep -q "^${LABEL_NAME}[[:space:]]"; then
|
||||
echo "Label '$LABEL_NAME' already exists."
|
||||
else
|
||||
echo "Error checking for label '${FINAL_LABEL_NAME}'. HTTP Status: ${STATUS_CODE}"
|
||||
exit 1
|
||||
echo "Label '$LABEL_NAME' does not exist. Creating it..."
|
||||
gh label create "$LABEL_NAME" \
|
||||
--description "$LABEL_DESC" \
|
||||
--color "$LABEL_COLOR" \
|
||||
--repo ${{ github.repository }}
|
||||
echo "Label '$LABEL_NAME' created successfully."
|
||||
fi
|
||||
else
|
||||
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is not a minor version. Skipping backport label creation."
|
||||
exit 0
|
||||
echo "Release $RELEASE_TAG (version $VERSION_ONLY) is not a minor version. Skipping backport label creation."
|
||||
fi
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
name: Prowler - Find secrets
|
||||
name: 'Tools: TruffleHog'
|
||||
|
||||
on: pull_request
|
||||
|
||||
@@ -7,11 +7,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@a05cf0859455b5b16317ee22d809887a4043cdf0 # v3.90.2
|
||||
uses: trufflesecurity/trufflehog@466da5b0bb161144f6afca9afe5d57975828c410 # v3.90.8
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
|
||||
@@ -14,4 +14,4 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
- uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
||||
|
||||
@@ -0,0 +1,113 @@
|
||||
name: 'MCP: Container Build and Push'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
paths:
|
||||
- "mcp_server/**"
|
||||
- ".github/workflows/mcp-container-build-push.yml"
|
||||
|
||||
# Uncomment to test this workflow on PRs
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - "master"
|
||||
# paths:
|
||||
# - "mcp_server/**"
|
||||
# - ".github/workflows/mcp-container-build-push.yml"
|
||||
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
STABLE_TAG: stable
|
||||
WORKING_DIRECTORY: ./mcp_server
|
||||
|
||||
# Container registries
|
||||
PROWLERCLOUD_DOCKERHUB_REPOSITORY: prowlercloud
|
||||
PROWLERCLOUD_DOCKERHUB_IMAGE: prowler-mcp
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
short-sha: ${{ steps.set-short-sha.outputs.short-sha }}
|
||||
steps:
|
||||
- name: Calculate short SHA
|
||||
id: set-short-sha
|
||||
run: echo "short-sha=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
|
||||
|
||||
container-build-push:
|
||||
needs: setup
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}
|
||||
labels: |
|
||||
org.opencontainers.image.title=Prowler MCP Server
|
||||
org.opencontainers.image.description=Model Context Protocol server for Prowler
|
||||
org.opencontainers.image.vendor=ProwlerPro, Inc.
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
org.opencontainers.image.created=${{ github.event.head_commit.timestamp }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }}
|
||||
labels: |
|
||||
org.opencontainers.image.title=Prowler MCP Server
|
||||
org.opencontainers.image.description=Model Context Protocol server for Prowler
|
||||
org.opencontainers.image.vendor=ProwlerPro, Inc.
|
||||
org.opencontainers.image.version=${{ env.RELEASE_TAG }}
|
||||
org.opencontainers.image.source=https://github.com/${{ github.repository }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
org.opencontainers.image.created=${{ github.event.release.published_at }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
event-type: mcp-prowler-deployment
|
||||
client-payload: '{"sha": "${{ github.sha }}", "short_sha": "${{ needs.setup.outputs.short-sha }}"}'
|
||||
@@ -9,14 +9,15 @@ on:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
# Leaving this commented until we find a way to run it for forks but in Prowler's context
|
||||
# pull_request_target:
|
||||
# types:
|
||||
# - opened
|
||||
# - synchronize
|
||||
# - reopened
|
||||
# branches:
|
||||
# - "master"
|
||||
# - "v5.*"
|
||||
|
||||
jobs:
|
||||
conflict-checker:
|
||||
@@ -28,11 +29,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
**
|
||||
@@ -70,7 +71,7 @@ jobs:
|
||||
|
||||
- name: Add conflict label
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
script: |
|
||||
@@ -96,7 +97,7 @@ jobs:
|
||||
|
||||
- name: Remove conflict label
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'false'
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
script: |
|
||||
@@ -118,7 +119,7 @@ jobs:
|
||||
|
||||
- name: Find existing conflict comment
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
@@ -148,7 +149,7 @@ jobs:
|
||||
|
||||
- name: Find existing conflict comment when resolved
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'false'
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
|
||||
id: find-resolved-comment
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
@@ -166,3 +167,9 @@ jobs:
|
||||
✅ **Conflict Markers Resolved**
|
||||
|
||||
All conflict markers have been successfully resolved in this pull request.
|
||||
|
||||
- name: Fail workflow if conflicts detected
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
run: |
|
||||
echo "::error::Workflow failed due to conflict markers in files: ${{ steps.conflict-check.outputs.conflict_files }}"
|
||||
exit 1
|
||||
|
||||
@@ -22,13 +22,13 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
@@ -59,25 +59,192 @@ jobs:
|
||||
BRANCH_NAME="v${MAJOR_VERSION}.${MINOR_VERSION}"
|
||||
echo "BRANCH_NAME=${BRANCH_NAME}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Calculate UI version (1.X.X format - matches Prowler minor version)
|
||||
UI_VERSION="1.${MINOR_VERSION}.${PATCH_VERSION}"
|
||||
echo "UI_VERSION=${UI_VERSION}" >> "${GITHUB_ENV}"
|
||||
# Function to extract the latest version from changelog
|
||||
extract_latest_version() {
|
||||
local changelog_file="$1"
|
||||
if [ -f "$changelog_file" ]; then
|
||||
# Extract the first version entry (most recent) from changelog
|
||||
# Format: ## [version] (1.2.3) or ## [vversion] (v1.2.3)
|
||||
local version=$(grep -m 1 '^## \[' "$changelog_file" | sed 's/^## \[\(.*\)\].*/\1/' | sed 's/^v//' | tr -d '[:space:]')
|
||||
echo "$version"
|
||||
else
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
# Calculate API version (1.X.X format - one minor version ahead)
|
||||
API_MINOR_VERSION=$((MINOR_VERSION + 1))
|
||||
API_VERSION="1.${API_MINOR_VERSION}.${PATCH_VERSION}"
|
||||
# Read actual versions from changelogs (source of truth)
|
||||
UI_VERSION=$(extract_latest_version "ui/CHANGELOG.md")
|
||||
API_VERSION=$(extract_latest_version "api/CHANGELOG.md")
|
||||
SDK_VERSION=$(extract_latest_version "prowler/CHANGELOG.md")
|
||||
MCP_VERSION=$(extract_latest_version "mcp_server/CHANGELOG.md")
|
||||
|
||||
echo "UI_VERSION=${UI_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "API_VERSION=${API_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "SDK_VERSION=${SDK_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "MCP_VERSION=${MCP_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
if [ -n "$UI_VERSION" ]; then
|
||||
echo "Read UI version from changelog: $UI_VERSION"
|
||||
else
|
||||
echo "Warning: No UI version found in ui/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
if [ -n "$API_VERSION" ]; then
|
||||
echo "Read API version from changelog: $API_VERSION"
|
||||
else
|
||||
echo "Warning: No API version found in api/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
if [ -n "$SDK_VERSION" ]; then
|
||||
echo "Read SDK version from changelog: $SDK_VERSION"
|
||||
else
|
||||
echo "Warning: No SDK version found in prowler/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
if [ -n "$MCP_VERSION" ]; then
|
||||
echo "Read MCP version from changelog: $MCP_VERSION"
|
||||
else
|
||||
echo "Warning: No MCP version found in mcp_server/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
echo "Prowler version: $PROWLER_VERSION"
|
||||
echo "Branch name: $BRANCH_NAME"
|
||||
echo "UI version: $UI_VERSION"
|
||||
echo "API version: $API_VERSION"
|
||||
echo "SDK version: $SDK_VERSION"
|
||||
echo "MCP version: $MCP_VERSION"
|
||||
echo "Is minor release: $([ $PATCH_VERSION -eq 0 ] && echo 'true' || echo 'false')"
|
||||
else
|
||||
echo "Invalid version syntax: '$PROWLER_VERSION' (must be N.N.N)" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Extract changelog entries
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Function to extract changelog for a specific version
|
||||
extract_changelog() {
|
||||
local file="$1"
|
||||
local version="$2"
|
||||
local output_file="$3"
|
||||
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "Warning: $file not found, skipping..."
|
||||
touch "$output_file"
|
||||
return
|
||||
fi
|
||||
|
||||
# Extract changelog section for this version
|
||||
awk -v version="$version" '
|
||||
/^## \[v?'"$version"'\]/ { found=1; next }
|
||||
found && /^## \[v?[0-9]+\.[0-9]+\.[0-9]+\]/ { found=0 }
|
||||
found && !/^## \[v?'"$version"'\]/ { print }
|
||||
' "$file" > "$output_file"
|
||||
|
||||
# Remove --- separators
|
||||
sed -i '/^---$/d' "$output_file"
|
||||
|
||||
# Remove trailing empty lines
|
||||
sed -i '/^$/d' "$output_file"
|
||||
}
|
||||
|
||||
# Calculate expected versions for this release
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
EXPECTED_UI_VERSION="1.${BASH_REMATCH[2]}.${BASH_REMATCH[3]}"
|
||||
EXPECTED_API_VERSION="1.$((${BASH_REMATCH[2]} + 1)).${BASH_REMATCH[3]}"
|
||||
|
||||
echo "Expected UI version for this release: $EXPECTED_UI_VERSION"
|
||||
echo "Expected API version for this release: $EXPECTED_API_VERSION"
|
||||
fi
|
||||
|
||||
# Determine if components have changes for this specific release
|
||||
# UI has changes if its current version matches what we expect for this release
|
||||
if [ -n "$UI_VERSION" ] && [ "$UI_VERSION" = "$EXPECTED_UI_VERSION" ]; then
|
||||
echo "HAS_UI_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ UI changes detected - version matches expected: $UI_VERSION"
|
||||
extract_changelog "ui/CHANGELOG.md" "$UI_VERSION" "ui_changelog.md"
|
||||
else
|
||||
echo "HAS_UI_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No UI changes for this release (current: $UI_VERSION, expected: $EXPECTED_UI_VERSION)"
|
||||
touch "ui_changelog.md"
|
||||
fi
|
||||
|
||||
# API has changes if its current version matches what we expect for this release
|
||||
if [ -n "$API_VERSION" ] && [ "$API_VERSION" = "$EXPECTED_API_VERSION" ]; then
|
||||
echo "HAS_API_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ API changes detected - version matches expected: $API_VERSION"
|
||||
extract_changelog "api/CHANGELOG.md" "$API_VERSION" "api_changelog.md"
|
||||
else
|
||||
echo "HAS_API_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No API changes for this release (current: $API_VERSION, expected: $EXPECTED_API_VERSION)"
|
||||
touch "api_changelog.md"
|
||||
fi
|
||||
|
||||
# SDK has changes if its current version matches the input version
|
||||
if [ -n "$SDK_VERSION" ] && [ "$SDK_VERSION" = "$PROWLER_VERSION" ]; then
|
||||
echo "HAS_SDK_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ SDK changes detected - version matches input: $SDK_VERSION"
|
||||
extract_changelog "prowler/CHANGELOG.md" "$PROWLER_VERSION" "prowler_changelog.md"
|
||||
else
|
||||
echo "HAS_SDK_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No SDK changes for this release (current: $SDK_VERSION, input: $PROWLER_VERSION)"
|
||||
touch "prowler_changelog.md"
|
||||
fi
|
||||
|
||||
# MCP has changes if the changelog references this Prowler version
|
||||
# Check if the changelog contains "(Prowler X.Y.Z)" or "(Prowler UNRELEASED)"
|
||||
if [ -f "mcp_server/CHANGELOG.md" ]; then
|
||||
MCP_PROWLER_REF=$(grep -m 1 "^## \[.*\] (Prowler" mcp_server/CHANGELOG.md | sed -E 's/.*\(Prowler ([^)]+)\).*/\1/' | tr -d '[:space:]')
|
||||
if [ "$MCP_PROWLER_REF" = "$PROWLER_VERSION" ] || [ "$MCP_PROWLER_REF" = "UNRELEASED" ]; then
|
||||
echo "HAS_MCP_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ MCP changes detected - Prowler reference: $MCP_PROWLER_REF (version: $MCP_VERSION)"
|
||||
extract_changelog "mcp_server/CHANGELOG.md" "$MCP_VERSION" "mcp_changelog.md"
|
||||
else
|
||||
echo "HAS_MCP_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No MCP changes for this release (Prowler reference: $MCP_PROWLER_REF, input: $PROWLER_VERSION)"
|
||||
touch "mcp_changelog.md"
|
||||
fi
|
||||
else
|
||||
echo "HAS_MCP_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No MCP changelog found"
|
||||
touch "mcp_changelog.md"
|
||||
fi
|
||||
|
||||
# Combine changelogs in order: UI, API, SDK, MCP
|
||||
> combined_changelog.md
|
||||
|
||||
if [ "$HAS_UI_CHANGES" = "true" ] && [ -s "ui_changelog.md" ]; then
|
||||
echo "## UI" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat ui_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ "$HAS_API_CHANGES" = "true" ] && [ -s "api_changelog.md" ]; then
|
||||
echo "## API" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat api_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ "$HAS_SDK_CHANGES" = "true" ] && [ -s "prowler_changelog.md" ]; then
|
||||
echo "## SDK" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat prowler_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ "$HAS_MCP_CHANGES" = "true" ] && [ -s "mcp_changelog.md" ]; then
|
||||
echo "## MCP" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat mcp_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
echo "Combined changelog preview:"
|
||||
cat combined_changelog.md
|
||||
|
||||
- name: Checkout existing branch for patch release
|
||||
if: ${{ env.PATCH_VERSION != '0' }}
|
||||
run: |
|
||||
@@ -114,6 +281,7 @@ jobs:
|
||||
echo "✓ prowler/config/config.py version: $CURRENT_VERSION"
|
||||
|
||||
- name: Verify version in api/pyproject.toml
|
||||
if: ${{ env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep '^version = ' api/pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
|
||||
@@ -124,7 +292,7 @@ jobs:
|
||||
echo "✓ api/pyproject.toml version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Verify prowler dependency in api/pyproject.toml
|
||||
if: ${{ env.PATCH_VERSION != '0' }}
|
||||
if: ${{ env.PATCH_VERSION != '0' && env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
|
||||
@@ -135,6 +303,7 @@ jobs:
|
||||
echo "✓ api/pyproject.toml prowler dependency: $CURRENT_PROWLER_REF"
|
||||
|
||||
- name: Verify version in api/src/backend/api/v1/views.py
|
||||
if: ${{ env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep 'spectacular_settings.VERSION = ' api/src/backend/api/v1/views.py | sed -E 's/.*spectacular_settings.VERSION = "([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
|
||||
@@ -162,12 +331,11 @@ jobs:
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
|
||||
|
||||
# Create a temporary branch for the PR
|
||||
# Create a temporary branch for the PR from the minor version branch
|
||||
TEMP_BRANCH="update-api-dependency-$BRANCH_NAME_TRIMMED-$(date +%s)"
|
||||
echo "TEMP_BRANCH=$TEMP_BRANCH" >> $GITHUB_ENV
|
||||
|
||||
# Switch back to master and create temp branch
|
||||
git checkout master
|
||||
# Create temp branch from the current minor version branch
|
||||
git checkout -b "$TEMP_BRANCH"
|
||||
|
||||
# Minor release: update the dependency to use the release branch
|
||||
@@ -221,80 +389,17 @@ jobs:
|
||||
component/api
|
||||
no-changelog
|
||||
|
||||
- name: Extract changelog entries
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Function to extract changelog for a specific version
|
||||
extract_changelog() {
|
||||
local file="$1"
|
||||
local version="$2"
|
||||
local output_file="$3"
|
||||
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "Warning: $file not found, skipping..."
|
||||
touch "$output_file"
|
||||
return
|
||||
fi
|
||||
|
||||
# Extract changelog section for this version
|
||||
awk -v version="$version" '
|
||||
/^## \[v?'"$version"'\]/ { found=1; next }
|
||||
found && /^## \[v?[0-9]+\.[0-9]+\.[0-9]+\]/ { found=0 }
|
||||
found && !/^## \[v?'"$version"'\]/ { print }
|
||||
' "$file" > "$output_file"
|
||||
|
||||
# Remove --- separators
|
||||
sed -i '/^---$/d' "$output_file"
|
||||
|
||||
# Remove trailing empty lines
|
||||
sed -i '/^$/d' "$output_file"
|
||||
}
|
||||
|
||||
# Extract changelogs
|
||||
echo "Extracting changelog entries..."
|
||||
extract_changelog "prowler/CHANGELOG.md" "$PROWLER_VERSION" "prowler_changelog.md"
|
||||
extract_changelog "api/CHANGELOG.md" "$API_VERSION" "api_changelog.md"
|
||||
extract_changelog "ui/CHANGELOG.md" "$UI_VERSION" "ui_changelog.md"
|
||||
|
||||
# Combine changelogs in order: UI, API, SDK
|
||||
> combined_changelog.md
|
||||
|
||||
if [ -s "ui_changelog.md" ]; then
|
||||
echo "## UI" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat ui_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ -s "api_changelog.md" ]; then
|
||||
echo "## API" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat api_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
if [ -s "prowler_changelog.md" ]; then
|
||||
echo "## SDK" >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
cat prowler_changelog.md >> combined_changelog.md
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
echo "Combined changelog preview:"
|
||||
cat combined_changelog.md
|
||||
|
||||
- name: Create draft release
|
||||
uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2
|
||||
uses: softprops/action-gh-release@6cbd405e2c4e67a21c47fa9e383d020e4e28b836 # v2.3.3
|
||||
with:
|
||||
tag_name: ${{ env.PROWLER_VERSION }}
|
||||
name: Prowler ${{ env.PROWLER_VERSION }}
|
||||
body_path: combined_changelog.md
|
||||
draft: true
|
||||
target_commitish: ${{ env.PATCH_VERSION == '0' && 'master' || env.BRANCH_NAME }}
|
||||
target_commitish: ${{ env.BRANCH_NAME }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Clean up temporary files
|
||||
run: |
|
||||
rm -f prowler_changelog.md api_changelog.md ui_changelog.md combined_changelog.md
|
||||
rm -f prowler_changelog.md api_changelog.md ui_changelog.md mcp_changelog.md combined_changelog.md
|
||||
|
||||
@@ -13,10 +13,10 @@ jobs:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
MONITORED_FOLDERS: "api ui prowler dashboard"
|
||||
MONITORED_FOLDERS: "api ui prowler mcp_server"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
- name: Find existing changelog comment
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
id: find_comment
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e #v3.1.0
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad #v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
|
||||
@@ -11,7 +11,7 @@ jobs:
|
||||
if: github.event.pull_request.merged == true && github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.merge_commit_sha }}
|
||||
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Trigger pull request
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
@@ -59,10 +59,10 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
@@ -108,13 +108,13 @@ jobs:
|
||||
esac
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to Public ECR
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: public.ecr.aws
|
||||
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
|
||||
|
||||
@@ -14,7 +14,7 @@ jobs:
|
||||
name: Bump Version
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Get Prowler version
|
||||
shell: bash
|
||||
|
||||
@@ -52,16 +52,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
@@ -21,11 +21,11 @@ jobs:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
@@ -51,7 +51,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
@@ -104,7 +104,7 @@ jobs:
|
||||
|
||||
- name: Dockerfile - Check if Dockerfile has changed
|
||||
id: dockerfile-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
Dockerfile
|
||||
@@ -117,7 +117,7 @@ jobs:
|
||||
# Test AWS
|
||||
- name: AWS - Check if any file has changed
|
||||
id: aws-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/aws/**
|
||||
@@ -132,7 +132,7 @@ jobs:
|
||||
# Test Azure
|
||||
- name: Azure - Check if any file has changed
|
||||
id: azure-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/azure/**
|
||||
@@ -147,7 +147,7 @@ jobs:
|
||||
# Test GCP
|
||||
- name: GCP - Check if any file has changed
|
||||
id: gcp-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/gcp/**
|
||||
@@ -162,7 +162,7 @@ jobs:
|
||||
# Test Kubernetes
|
||||
- name: Kubernetes - Check if any file has changed
|
||||
id: kubernetes-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/kubernetes/**
|
||||
@@ -177,7 +177,7 @@ jobs:
|
||||
# Test GitHub
|
||||
- name: GitHub - Check if any file has changed
|
||||
id: github-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/github/**
|
||||
@@ -192,7 +192,7 @@ jobs:
|
||||
# Test NHN
|
||||
- name: NHN - Check if any file has changed
|
||||
id: nhn-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/nhn/**
|
||||
@@ -207,7 +207,7 @@ jobs:
|
||||
# Test M365
|
||||
- name: M365 - Check if any file has changed
|
||||
id: m365-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/m365/**
|
||||
@@ -222,7 +222,7 @@ jobs:
|
||||
# Test IaC
|
||||
- name: IaC - Check if any file has changed
|
||||
id: iac-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/iac/**
|
||||
@@ -237,7 +237,7 @@ jobs:
|
||||
# Test MongoDB Atlas
|
||||
- name: MongoDB Atlas - Check if any file has changed
|
||||
id: mongodb-atlas-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/mongodbatlas/**
|
||||
@@ -249,6 +249,21 @@ jobs:
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/mongodbatlas --cov-report=xml:mongodb_atlas_coverage.xml tests/providers/mongodbatlas
|
||||
|
||||
# Test OCI
|
||||
- name: OCI - Check if any file has changed
|
||||
id: oci-changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
./prowler/providers/oraclecloud/**
|
||||
./tests/providers/oraclecloud/**
|
||||
./poetry.lock
|
||||
|
||||
- name: OCI - Test
|
||||
if: steps.oci-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest -n auto --cov=./prowler/providers/oraclecloud --cov-report=xml:oci_coverage.xml tests/providers/oraclecloud
|
||||
|
||||
# Common Tests
|
||||
- name: Lib - Test
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
@@ -263,9 +278,9 @@ jobs:
|
||||
# Codecov
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5.4.3
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: prowler
|
||||
files: ./aws_coverage.xml,./azure_coverage.xml,./gcp_coverage.xml,./kubernetes_coverage.xml,./github_coverage.xml,./nhn_coverage.xml,./m365_coverage.xml,./lib_coverage.xml,./config_coverage.xml
|
||||
files: ./aws_coverage.xml,./azure_coverage.xml,./gcp_coverage.xml,./kubernetes_coverage.xml,./github_coverage.xml,./nhn_coverage.xml,./m365_coverage.xml,./oci_coverage.xml,./lib_coverage.xml,./config_coverage.xml
|
||||
|
||||
@@ -64,14 +64,14 @@ jobs:
|
||||
;;
|
||||
esac
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
# cache: ${{ env.CACHE }}
|
||||
|
||||
@@ -23,12 +23,12 @@ jobs:
|
||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ env.GITHUB_BRANCH }}
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: 3.9 #install the python needed
|
||||
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
pip install boto3
|
||||
|
||||
- name: Configure AWS Credentials -- DEV
|
||||
uses: aws-actions/configure-aws-credentials@b47578312673ae6fa5b5096b330d9fbac3d116df # v4.2.1
|
||||
uses: aws-actions/configure-aws-credentials@a03048d87541d1d9fcf2ecf528a4a65ba9bd7838 # v5.0.0
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION_DEV }}
|
||||
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
|
||||
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set short git commit SHA
|
||||
id: vars
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
|
||||
- name: Trigger deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
@@ -44,16 +44,16 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@51f77329afa6477de8c49fc9c7046c15b9a4e79d # v3.29.5
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
@@ -20,7 +20,7 @@ jobs:
|
||||
NEXT_PUBLIC_API_BASE_URL: 'http://localhost:8080/api/v1'
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Fix API data directory permissions
|
||||
run: docker run --rm -v $(pwd)/_data/api:/data alpine chown -R 1000:1000 /data
|
||||
- name: Start API services
|
||||
@@ -59,7 +59,7 @@ jobs:
|
||||
echo "All database fixtures loaded successfully!"
|
||||
'
|
||||
- name: Setup Node.js environment
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@5a3ec84eff668545956fd18022155c47e93e2684 # v4.2.3
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
id: playwright-cache
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
|
||||
@@ -27,11 +27,11 @@ jobs:
|
||||
node-version: [20.x]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'npm'
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
- name: Build Container
|
||||
|
||||
@@ -80,9 +80,6 @@ _data/
|
||||
# Claude
|
||||
CLAUDE.md
|
||||
|
||||
# LLM's (Until we have a standard one)
|
||||
AGENTS.md
|
||||
|
||||
# MCP Server
|
||||
mcp_server/prowler_mcp_server/prowler_app/server.py
|
||||
mcp_server/prowler_mcp_server/prowler_app/utils/schema.yaml
|
||||
|
||||
@@ -6,6 +6,7 @@ repos:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
args: ["--unsafe"]
|
||||
exclude: prowler/config/llm_config.yaml
|
||||
- id: check-json
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
|
||||
@@ -0,0 +1,110 @@
|
||||
# Repository Guidelines
|
||||
|
||||
## How to Use This Guide
|
||||
|
||||
- Start here for cross-project norms, Prowler is a monorepo with several components. Every component should have an `AGENTS.md` file that contains the guidelines for the agents in that component. The file is located beside the code you are touching (e.g. `api/AGENTS.md`, `ui/AGENTS.md`, `prowler/AGENTS.md`).
|
||||
- Follow the stricter rule when guidance conflicts; component docs override this file for their scope.
|
||||
- Keep instructions synchronized. When you add new workflows or scripts, update both, the relevant component `AGENTS.md` and this file if they apply broadly.
|
||||
|
||||
## Project Overview
|
||||
|
||||
Prowler is an open-source cloud security assessment tool that supports multiple cloud providers (AWS, Azure, GCP, Kubernetes, GitHub, M365, etc.). The project consists in a monorepo with the following main components:
|
||||
|
||||
- **Prowler SDK**: Python SDK, includes the Prowler CLI, providers, services, checks, compliances, config, etc. (`prowler/`)
|
||||
- **Prowler API**: Django-based REST API backend (`api/`)
|
||||
- **Prowler UI**: Next.js frontend application (`ui/`)
|
||||
- **Prowler MCP Server**: Model Context Protocol server that gives access to the entire Prowler ecosystem for LLMs (`mcp_server/`)
|
||||
- **Prowler Dashboard**: Prowler CLI feature that allows to visualize the results of the scans in a simple dashboard (`dashboard/`)
|
||||
|
||||
### Project Structure (Key Folders & Files)
|
||||
|
||||
- `prowler/`: Main source code for Prowler SDK (CLI, providers, services, checks, compliances, config, etc.)
|
||||
- `api/`: Django-based REST API backend components
|
||||
- `ui/`: Next.js frontend application
|
||||
- `mcp_server/`: Model Context Protocol server that gives access to the entire Prowler ecosystem for LLMs
|
||||
- `dashboard/`: Prowler CLI feature that allows to visualize the results of the scans in a simple dashboard
|
||||
- `docs/`: Documentation
|
||||
- `examples/`: Example output formats for providers and scripts
|
||||
- `permissions/`: Permission-related files and policies
|
||||
- `contrib/`: Community-contributed scripts or modules
|
||||
- `tests/`: Prowler SDK test suite
|
||||
- `docker-compose.yml`: Docker compose file to run the Prowler App (API + UI) production environment
|
||||
- `docker-compose-dev.yml`: Docker compose file to run the Prowler App (API + UI) development environment
|
||||
- `pyproject.toml`: Poetry Prowler SDK project file
|
||||
- `.pre-commit-config.yaml`: Pre-commit hooks configuration
|
||||
- `Makefile`: Makefile to run the project
|
||||
- `LICENSE`: License file
|
||||
- `README.md`: README file
|
||||
- `CONTRIBUTING.md`: Contributing guide
|
||||
|
||||
## Python Development
|
||||
|
||||
Most of the code is written in Python, so the main files in the root are focused on Python code.
|
||||
|
||||
### Poetry Dev Environment
|
||||
|
||||
For developing in Python we recommend using `poetry` to manage the dependencies. The minimal version is `2.1.1`. So it is recommended to run all commands using `poetry run ...`.
|
||||
|
||||
To install the core dependencies to develop it is needed to run `poetry install --with dev`.
|
||||
|
||||
### Pre-commit hooks
|
||||
|
||||
The project has pre-commit hooks to lint and format the code. They are installed by running `poetry run pre-commit install`.
|
||||
|
||||
When commiting a change, the hooks will be run automatically. Some of them are:
|
||||
|
||||
- Code formatting (black, isort)
|
||||
- Linting (flake8, pylint)
|
||||
- Security checks (bandit, safety, trufflehog)
|
||||
- YAML/JSON validation
|
||||
- Poetry lock file validation
|
||||
|
||||
|
||||
### Linting and Formatting
|
||||
|
||||
We use the following tools to lint and format the code:
|
||||
|
||||
- `flake8`: for linting the code
|
||||
- `black`: for formatting the code
|
||||
- `pylint`: for linting the code
|
||||
|
||||
You can run all using the `make` command:
|
||||
```bash
|
||||
poetry run make lint
|
||||
poetry run make format
|
||||
```
|
||||
|
||||
Or they will be run automatically when you commit your changes using pre-commit hooks.
|
||||
|
||||
## Commit & Pull Request Guidelines
|
||||
|
||||
For the commit messages and pull requests name follow the conventional-commit style.
|
||||
|
||||
Befire creating a pull request, complete the checklist in `.github/pull_request_template.md`. Summaries should explain deployment impact, highlight review steps, and note changelog or permission updates. Run all relevant tests and linters before requesting review and link screenshots for UI or dashboard changes.
|
||||
|
||||
### Conventional Commit Style
|
||||
|
||||
The Conventional Commits specification is a lightweight convention on top of commit messages. It provides an easy set of rules for creating an explicit commit history; which makes it easier to write automated tools on top of.
|
||||
|
||||
The commit message should be structured as follows:
|
||||
|
||||
```
|
||||
<type>[optional scope]: <description>
|
||||
<BLANK LINE>
|
||||
[optional body]
|
||||
<BLANK LINE>
|
||||
[optional footer(s)]
|
||||
```
|
||||
|
||||
Any line of the commit message cannot be longer 100 characters! This allows the message to be easier to read on GitHub as well as in various git tools
|
||||
|
||||
#### Commit Types
|
||||
|
||||
- **feat**: code change introuce new functionality to the application
|
||||
- **fix**: code change that solve a bug in the codebase
|
||||
- **docs**: documentation only changes
|
||||
- **chore**: changes related to the build process or auxiliary tools and libraries, that do not affect the application's functionality
|
||||
- **perf**: code change that improves performance
|
||||
- **refactor**: code change that neither fixes a bug nor adds a feature
|
||||
- **style**: changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons, etc)
|
||||
- **test**: adding missing tests or correcting existing tests
|
||||
@@ -45,3 +45,7 @@ pypi-upload: ## Upload package
|
||||
help: ## Show this help.
|
||||
@echo "Prowler Makefile"
|
||||
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||
|
||||
##@ Development Environment
|
||||
run-api-dev: ## Start development environment with API, PostgreSQL, Valkey, and workers
|
||||
docker compose -f docker-compose-dev.yml up api-dev postgres valkey worker-dev worker-beat --build
|
||||
|
||||
@@ -82,14 +82,16 @@ prowler dashboard
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) | Support | Stage | Interface |
|
||||
|---|---|---|---|---|---|---|---|
|
||||
| AWS | 576 | 82 | 36 | 10 | Official | Stable | UI, API, CLI |
|
||||
| GCP | 79 | 13 | 10 | 3 | Official | Stable | UI, API, CLI |
|
||||
| Azure | 162 | 19 | 11 | 4 | Official | Stable | UI, API, CLI |
|
||||
| AWS | 576 | 82 | 38 | 10 | Official | Stable | UI, API, CLI |
|
||||
| GCP | 79 | 13 | 11 | 3 | Official | Stable | UI, API, CLI |
|
||||
| Azure | 162 | 19 | 12 | 4 | Official | Stable | UI, API, CLI |
|
||||
| Kubernetes | 83 | 7 | 5 | 7 | Official | Stable | UI, API, CLI |
|
||||
| GitHub | 17 | 2 | 1 | 0 | Official | Stable | UI, API, CLI |
|
||||
| M365 | 70 | 7 | 3 | 2 | Official | Stable | UI, API, CLI |
|
||||
| OCI | 51 | 13 | 1 | 10 | Official | Stable | CLI |
|
||||
| IaC | [See `trivy` docs.](https://trivy.dev/latest/docs/coverage/iac/) | N/A | N/A | N/A | Official | Beta | CLI |
|
||||
| MongoDB Atlas | 10 | 3 | 0 | 0 | Official | Beta | CLI |
|
||||
| LLM | [See `promptfoo` docs.](https://www.promptfoo.dev/docs/red-team/plugins/) | N/A | N/A | N/A | Official | Beta | CLI |
|
||||
| NHN | 6 | 2 | 1 | 0 | Unofficial | Beta | CLI |
|
||||
|
||||
> [!Note]
|
||||
|
||||
@@ -7,11 +7,29 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
### Added
|
||||
- Default JWT keys are generated and stored if they are missing from configuration [(#8655)](https://github.com/prowler-cloud/prowler/pull/8655)
|
||||
- `compliance_name` for each compliance [(#7920)](https://github.com/prowler-cloud/prowler/pull/7920)
|
||||
- Support C5 compliance framework for the AWS provider [(#8830)](https://github.com/prowler-cloud/prowler/pull/8830)
|
||||
- Support for M365 Certificate authentication [(#8538)](https://github.com/prowler-cloud/prowler/pull/8538)
|
||||
- API Key support [(#8805)](https://github.com/prowler-cloud/prowler/pull/8805)
|
||||
- SAML role mapping protection for single-admin tenants to prevent accidental lockout [(#8882)](https://github.com/prowler-cloud/prowler/pull/8882)
|
||||
- Support for `passed_findings` and `total_findings` fields in compliance requirement overview for accurate Prowler ThreatScore calculation [(#8582)](https://github.com/prowler-cloud/prowler/pull/8582)
|
||||
- Database read replica support [(#8869)](https://github.com/prowler-cloud/prowler/pull/8869)
|
||||
- Support Common Cloud Controls for AWS, Azure and GCP [(#8000)](https://github.com/prowler-cloud/prowler/pull/8000)
|
||||
- Add `provider_id__in` filter support to findings and findings severity overview endpoints [(#8951)](https://github.com/prowler-cloud/prowler/pull/8951)
|
||||
|
||||
### Changed
|
||||
- Now the MANAGE_ACCOUNT permission is required to modify or read user permissions instead of MANAGE_USERS [(#8281)](https://github.com/prowler-cloud/prowler/pull/8281)
|
||||
- Now at least one user with MANAGE_ACCOUNT permission is required in the tenant [(#8729)](https://github.com/prowler-cloud/prowler/pull/8729)
|
||||
|
||||
### Security
|
||||
- Django updated to the latest 5.1 security release, 5.1.13, due to problems with potential [SQL injection](https://github.com/prowler-cloud/prowler/security/dependabot/104) and [directory traversals](https://github.com/prowler-cloud/prowler/security/dependabot/103) [(#8842)](https://github.com/prowler-cloud/prowler/pull/8842)
|
||||
|
||||
---
|
||||
|
||||
## [1.13.2] (Prowler 5.12.3)
|
||||
|
||||
### Fixed
|
||||
- 500 error when deleting user [(#8731)](https://github.com/prowler-cloud/prowler/pull/8731)
|
||||
|
||||
---
|
||||
|
||||
## [1.13.1] (Prowler 5.12.2)
|
||||
|
||||
+3
-1
@@ -18,10 +18,12 @@ Valkey exposes a Redis 7.2 compliant API. Any service that exposes the Redis API
|
||||
|
||||
# Modify environment variables
|
||||
|
||||
Under the root path of the project, you can find a file called `.env.example`. This file shows all the environment variables that the project uses. You *must* create a new file called `.env` and set the values for the variables.
|
||||
Under the root path of the project, you can find a file called `.env`. This file shows all the environment variables that the project uses. You should review it and set the values for the variables you want to change.
|
||||
|
||||
If you don’t set `DJANGO_TOKEN_SIGNING_KEY` or `DJANGO_TOKEN_VERIFYING_KEY`, the API will generate them at `~/.config/prowler-api/` with `0600` and `0644` permissions; back up these files to persist identity across redeploys.
|
||||
|
||||
**Important note**: Every Prowler version (or repository branches and tags) could have different variables set in its `.env` file. Please use the `.env` file that corresponds with each version.
|
||||
|
||||
## Local deployment
|
||||
Keep in mind if you export the `.env` file to use it with local deployment that you will have to do it within the context of the Poetry interpreter, not before. Otherwise, variables will not be loaded properly.
|
||||
|
||||
|
||||
Generated
+88
-10
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.2.0 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "about-time"
|
||||
@@ -273,14 +273,14 @@ tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" a
|
||||
|
||||
[[package]]
|
||||
name = "authlib"
|
||||
version = "1.6.1"
|
||||
version = "1.6.5"
|
||||
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "authlib-1.6.1-py2.py3-none-any.whl", hash = "sha256:e9d2031c34c6309373ab845afc24168fe9e93dc52d252631f52642f21f5ed06e"},
|
||||
{file = "authlib-1.6.1.tar.gz", hash = "sha256:4dffdbb1460ba6ec8c17981a4c67af7d8af131231b5a36a88a1e8c80c111cdfd"},
|
||||
{file = "authlib-1.6.5-py2.py3-none-any.whl", hash = "sha256:3e0e0507807f842b02175507bdee8957a1d5707fd4afb17c32fb43fee90b6e3a"},
|
||||
{file = "authlib-1.6.5.tar.gz", hash = "sha256:6aaf9c79b7cc96c900f0b284061691c5d4e61221640a948fe690b556a6d6d10b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -383,6 +383,24 @@ cryptography = ">=2.1.4"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.0.1"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-apimanagement"
|
||||
version = "5.0.0"
|
||||
description = "Microsoft Azure API Management Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "azure_mgmt_apimanagement-5.0.0-py3-none-any.whl", hash = "sha256:b88c42a392333b60722fb86f15d092dfc19a8d67510dccd15c217381dff4e6ec"},
|
||||
{file = "azure_mgmt_apimanagement-5.0.0.tar.gz", hash = "sha256:0ab7fe17e70fe3154cd840ff47d19d7a4610217003eaa7c21acf3511a6e57999"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-common = ">=1.1"
|
||||
azure-mgmt-core = ">=1.3.2"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-applicationinsights"
|
||||
version = "4.1.0"
|
||||
@@ -540,6 +558,23 @@ azure-mgmt-core = ">=1.3.2"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-loganalytics"
|
||||
version = "12.0.0"
|
||||
description = "Microsoft Azure Log Analytics Management Client Library for Python"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "azure-mgmt-loganalytics-12.0.0.zip", hash = "sha256:da128a7e0291be7fa2063848df92a9180cf5c16d42adc09d2bc2efd711536bfb"},
|
||||
{file = "azure_mgmt_loganalytics-12.0.0-py2.py3-none-any.whl", hash = "sha256:75ac1d47dd81179905c40765be8834643d8994acff31056ddc1863017f3faa02"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-common = ">=1.1,<2.0"
|
||||
azure-mgmt-core = ">=1.2.0,<2.0.0"
|
||||
msrest = ">=0.6.21"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-monitor"
|
||||
version = "6.0.2"
|
||||
@@ -750,6 +785,23 @@ azure-mgmt-core = ">=1.3.2"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-monitor-query"
|
||||
version = "2.0.0"
|
||||
description = "Microsoft Corporation Azure Monitor Query Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "azure_monitor_query-2.0.0-py3-none-any.whl", hash = "sha256:8f52d581271d785e12f49cd5aaa144b8910fb843db2373855a7ef94c7fc462ea"},
|
||||
{file = "azure_monitor_query-2.0.0.tar.gz", hash = "sha256:7b05f2fcac4fb67fc9f77a7d4c5d98a0f3099fb73b57c69ec1b080773994671b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-core = ">=1.30.0"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-storage-blob"
|
||||
version = "12.24.1"
|
||||
@@ -1511,14 +1563,14 @@ with-social = ["django-allauth[socialaccount] (>=64.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "django"
|
||||
version = "5.1.12"
|
||||
version = "5.1.13"
|
||||
description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "django-5.1.12-py3-none-any.whl", hash = "sha256:9eb695636cea3601b65690f1596993c042206729afb320ca0960b55f8ed4477b"},
|
||||
{file = "django-5.1.12.tar.gz", hash = "sha256:8a8991b1ec052ef6a44fefd1ef336ab8daa221287bcb91a4a17d5e1abec5bbcc"},
|
||||
{file = "django-5.1.13-py3-none-any.whl", hash = "sha256:06f257f79dc4c17f3f9e23b106a4c5ed1335abecbe731e83c598c941d14fbeed"},
|
||||
{file = "django-5.1.13.tar.gz", hash = "sha256:543ff21679f15e80edfc01fe7ea35f8291b6d4ea589433882913626a7c1cf929"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1872,6 +1924,27 @@ files = [
|
||||
Django = ">=4.2"
|
||||
djangorestframework = ">=3.15.0"
|
||||
|
||||
[[package]]
|
||||
name = "drf-simple-apikey"
|
||||
version = "2.2.1"
|
||||
description = "API Key authentication and permissions for Django REST."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "drf_simple_apikey-2.2.1-py2.py3-none-any.whl", hash = "sha256:2a60b35676d14f907c47dee179dd0fa7425a84c34d6ff5b48d08d3b87ff32809"},
|
||||
{file = "drf_simple_apikey-2.2.1.tar.gz", hash = "sha256:e5a52804bbac12c8db80c10a3d51a8514fc59fc8385b5e751099a2bc944ad25d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = ">=38.0.4"
|
||||
django = ">=4.2"
|
||||
djangorestframework = ">=3.14.0"
|
||||
|
||||
[package.extras]
|
||||
test = ["coverage", "pytest", "pytest-django"]
|
||||
tooling = ["black (==22.3.0)", "bump2version", "pylint"]
|
||||
|
||||
[[package]]
|
||||
name = "drf-spectacular"
|
||||
version = "0.27.2"
|
||||
@@ -4003,7 +4076,7 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "prowler"
|
||||
version = "5.11.0"
|
||||
version = "5.13.0"
|
||||
description = "Prowler is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks."
|
||||
optional = false
|
||||
python-versions = ">3.9.1,<3.13"
|
||||
@@ -4016,6 +4089,7 @@ alive-progress = "3.3.0"
|
||||
awsipranges = "0.3.3"
|
||||
azure-identity = "1.21.0"
|
||||
azure-keyvault-keys = "4.10.0"
|
||||
azure-mgmt-apimanagement = "5.0.0"
|
||||
azure-mgmt-applicationinsights = "4.1.0"
|
||||
azure-mgmt-authorization = "4.0.0"
|
||||
azure-mgmt-compute = "34.0.0"
|
||||
@@ -4024,6 +4098,7 @@ azure-mgmt-containerservice = "34.1.0"
|
||||
azure-mgmt-cosmosdb = "9.7.0"
|
||||
azure-mgmt-databricks = "2.0.0"
|
||||
azure-mgmt-keyvault = "10.3.1"
|
||||
azure-mgmt-loganalytics = "12.0.0"
|
||||
azure-mgmt-monitor = "6.0.2"
|
||||
azure-mgmt-network = "28.1.0"
|
||||
azure-mgmt-rdbms = "10.1.0"
|
||||
@@ -4036,6 +4111,7 @@ azure-mgmt-sql = "3.0.1"
|
||||
azure-mgmt-storage = "22.1.1"
|
||||
azure-mgmt-subscription = "3.1.1"
|
||||
azure-mgmt-web = "8.0.0"
|
||||
azure-monitor-query = "2.0.0"
|
||||
azure-storage-blob = "12.24.1"
|
||||
boto3 = "1.39.15"
|
||||
botocore = "1.39.15"
|
||||
@@ -4047,8 +4123,10 @@ detect-secrets = "1.5.0"
|
||||
dulwich = "0.23.0"
|
||||
google-api-python-client = "2.163.0"
|
||||
google-auth-httplib2 = ">=0.1,<0.3"
|
||||
h2 = "4.3.0"
|
||||
jsonschema = "4.23.0"
|
||||
kubernetes = "32.0.1"
|
||||
markdown = "3.9.0"
|
||||
microsoft-kiota-abstractions = "1.9.2"
|
||||
msgraph-sdk = "1.23.0"
|
||||
numpy = "2.0.2"
|
||||
@@ -4069,7 +4147,7 @@ tzlocal = "5.3.1"
|
||||
type = "git"
|
||||
url = "https://github.com/prowler-cloud/prowler.git"
|
||||
reference = "master"
|
||||
resolved_reference = "525f152e51f82de2110ed158c8dc489e42c289cf"
|
||||
resolved_reference = "a52697bfdfee83d14a49c11dcbe96888b5cd767e"
|
||||
|
||||
[[package]]
|
||||
name = "psutil"
|
||||
@@ -6181,4 +6259,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.11,<3.13"
|
||||
content-hash = "91058a14382b76136a82f45624a30aece7a6d77c8b36c290bb4c40ea60c8850b"
|
||||
content-hash = "03442fd4673006c5a74374f90f53621fd1c9d117279fe6cc0355ef833eb7f9bb"
|
||||
|
||||
+3
-2
@@ -7,7 +7,7 @@ authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
dependencies = [
|
||||
"celery[pytest] (>=5.4.0,<6.0.0)",
|
||||
"dj-rest-auth[with_social,jwt] (==7.0.1)",
|
||||
"django (==5.1.12)",
|
||||
"django (==5.1.13)",
|
||||
"django-allauth[saml] (>=65.8.0,<66.0.0)",
|
||||
"django-celery-beat (>=2.7.0,<3.0.0)",
|
||||
"django-celery-results (>=2.5.1,<3.0.0)",
|
||||
@@ -32,7 +32,8 @@ dependencies = [
|
||||
"openai (>=1.82.0,<2.0.0)",
|
||||
"xmlsec==1.3.14",
|
||||
"h2 (==4.3.0)",
|
||||
"markdown (>=3.9,<4.0)"
|
||||
"markdown (>=3.9,<4.0)",
|
||||
"drf-simple-apikey (==2.2.1)"
|
||||
]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
from django.apps import AppConfig
|
||||
from django.conf import settings
|
||||
from pathlib import Path
|
||||
|
||||
from config.custom_logging import BackendLogger
|
||||
from config.env import env
|
||||
from django.apps import AppConfig
|
||||
from django.conf import settings
|
||||
|
||||
logger = logging.getLogger(BackendLogger.API)
|
||||
|
||||
@@ -30,6 +28,7 @@ class ApiConfig(AppConfig):
|
||||
name = "api"
|
||||
|
||||
def ready(self):
|
||||
from api import schema_extensions # noqa: F401
|
||||
from api import signals # noqa: F401
|
||||
from api.compliance import load_prowler_compliance
|
||||
|
||||
|
||||
@@ -0,0 +1,95 @@
|
||||
from typing import Optional, Tuple
|
||||
from uuid import UUID
|
||||
|
||||
from cryptography.fernet import InvalidToken
|
||||
from django.utils import timezone
|
||||
from drf_simple_apikey.backends import APIKeyAuthentication as BaseAPIKeyAuth
|
||||
from drf_simple_apikey.crypto import get_crypto
|
||||
from rest_framework.authentication import BaseAuthentication
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
from rest_framework.request import Request
|
||||
from rest_framework_simplejwt.authentication import JWTAuthentication
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.models import TenantAPIKey, TenantAPIKeyManager
|
||||
|
||||
|
||||
class TenantAPIKeyAuthentication(BaseAPIKeyAuth):
|
||||
model = TenantAPIKey
|
||||
|
||||
def __init__(self):
|
||||
self.key_crypto = get_crypto()
|
||||
|
||||
def _authenticate_credentials(self, request, key):
|
||||
"""
|
||||
Override to use admin connection, bypassing RLS during authentication.
|
||||
Delegates to parent after temporarily routing model queries to admin DB.
|
||||
"""
|
||||
# Temporarily point the model's manager to admin database
|
||||
original_objects = self.model.objects
|
||||
self.model.objects = self.model.objects.using(MainRouter.admin_db)
|
||||
|
||||
try:
|
||||
# Call parent method which will now use admin database
|
||||
return super()._authenticate_credentials(request, key)
|
||||
finally:
|
||||
# Restore original manager
|
||||
self.model.objects = original_objects
|
||||
|
||||
def authenticate(self, request: Request):
|
||||
prefixed_key = self.get_key(request)
|
||||
|
||||
# Split prefix from key (format: pk_xxxxxxxx.encrypted_key)
|
||||
try:
|
||||
prefix, key = prefixed_key.split(TenantAPIKeyManager.separator, 1)
|
||||
except ValueError:
|
||||
raise AuthenticationFailed("Invalid API Key.")
|
||||
|
||||
try:
|
||||
entity, _ = self._authenticate_credentials(request, key)
|
||||
except InvalidToken:
|
||||
raise AuthenticationFailed("Invalid API Key.")
|
||||
|
||||
# Get the API key instance to update last_used_at and retrieve tenant info
|
||||
# We need to decrypt again to get the pk (already validated by _authenticate_credentials)
|
||||
payload = self.key_crypto.decrypt(key)
|
||||
api_key_pk = payload["_pk"]
|
||||
|
||||
# Convert string UUID back to UUID object for lookup
|
||||
if isinstance(api_key_pk, str):
|
||||
api_key_pk = UUID(api_key_pk)
|
||||
|
||||
try:
|
||||
api_key_instance = TenantAPIKey.objects.using(MainRouter.admin_db).get(
|
||||
id=api_key_pk, prefix=prefix
|
||||
)
|
||||
except TenantAPIKey.DoesNotExist:
|
||||
raise AuthenticationFailed("Invalid API Key.")
|
||||
|
||||
# Update last_used_at
|
||||
api_key_instance.last_used_at = timezone.now()
|
||||
api_key_instance.save(update_fields=["last_used_at"], using=MainRouter.admin_db)
|
||||
|
||||
return entity, {
|
||||
"tenant_id": str(api_key_instance.tenant_id),
|
||||
"sub": str(api_key_instance.entity.id),
|
||||
"api_key_prefix": prefix,
|
||||
}
|
||||
|
||||
|
||||
class CombinedJWTOrAPIKeyAuthentication(BaseAuthentication):
|
||||
jwt_auth = JWTAuthentication()
|
||||
api_key_auth = TenantAPIKeyAuthentication()
|
||||
|
||||
def authenticate(self, request: Request) -> Optional[Tuple[object, dict]]:
|
||||
auth_header = request.headers.get("Authorization", "")
|
||||
|
||||
# Prioritize JWT authentication if both are present
|
||||
if auth_header.startswith("Bearer "):
|
||||
return self.jwt_auth.authenticate(request)
|
||||
|
||||
if auth_header.startswith("Api-Key "):
|
||||
return self.api_key_auth.authenticate(request)
|
||||
|
||||
# Default fallback
|
||||
return self.jwt_auth.authenticate(request)
|
||||
@@ -1,13 +1,15 @@
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import transaction
|
||||
from rest_framework import permissions
|
||||
from rest_framework.exceptions import NotAuthenticated
|
||||
from rest_framework.filters import SearchFilter
|
||||
from rest_framework.permissions import SAFE_METHODS
|
||||
from rest_framework_json_api import filters
|
||||
from rest_framework_json_api.views import ModelViewSet
|
||||
from rest_framework_simplejwt.authentication import JWTAuthentication
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.authentication import CombinedJWTOrAPIKeyAuthentication
|
||||
from api.db_router import MainRouter, reset_read_db_alias, set_read_db_alias
|
||||
from api.db_utils import POSTGRES_USER_VAR, rls_transaction
|
||||
from api.filters import CustomDjangoFilterBackend
|
||||
from api.models import Role, Tenant
|
||||
@@ -15,7 +17,7 @@ from api.rbac.permissions import HasPermissions
|
||||
|
||||
|
||||
class BaseViewSet(ModelViewSet):
|
||||
authentication_classes = [JWTAuthentication]
|
||||
authentication_classes = [CombinedJWTOrAPIKeyAuthentication]
|
||||
required_permissions = []
|
||||
permission_classes = [permissions.IsAuthenticated, HasPermissions]
|
||||
filter_backends = [
|
||||
@@ -31,6 +33,20 @@ class BaseViewSet(ModelViewSet):
|
||||
ordering_fields = "__all__"
|
||||
ordering = ["id"]
|
||||
|
||||
def _get_request_db_alias(self, request):
|
||||
if request is None:
|
||||
return MainRouter.default_db
|
||||
|
||||
read_alias = (
|
||||
MainRouter.replica_db
|
||||
if request.method in SAFE_METHODS
|
||||
and MainRouter.replica_db in settings.DATABASES
|
||||
else None
|
||||
)
|
||||
if read_alias:
|
||||
return read_alias
|
||||
return MainRouter.default_db
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
"""
|
||||
Sets required_permissions before permissions are checked.
|
||||
@@ -48,8 +64,21 @@ class BaseViewSet(ModelViewSet):
|
||||
|
||||
class BaseRLSViewSet(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
self.db_alias = self._get_request_db_alias(request)
|
||||
alias_token = None
|
||||
try:
|
||||
if self.db_alias != MainRouter.default_db:
|
||||
alias_token = set_read_db_alias(self.db_alias)
|
||||
|
||||
if request is not None:
|
||||
request.db_alias = self.db_alias
|
||||
|
||||
with transaction.atomic(using=self.db_alias):
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
finally:
|
||||
if alias_token is not None:
|
||||
reset_read_db_alias(alias_token)
|
||||
self.db_alias = MainRouter.default_db
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
# Ideally, this logic would be in the `.setup()` method but DRF view sets don't call it
|
||||
@@ -61,7 +90,9 @@ class BaseRLSViewSet(BaseViewSet):
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
with rls_transaction(
|
||||
tenant_id, using=getattr(self, "db_alias", MainRouter.default_db)
|
||||
):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
@@ -73,18 +104,33 @@ class BaseRLSViewSet(BaseViewSet):
|
||||
|
||||
class BaseTenantViewset(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
tenant = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
self.db_alias = self._get_request_db_alias(request)
|
||||
alias_token = None
|
||||
try:
|
||||
# If the request is a POST, create the admin role
|
||||
if request.method == "POST":
|
||||
isinstance(tenant, dict) and self._create_admin_role(tenant.data["id"])
|
||||
except Exception as e:
|
||||
self._handle_creation_error(e, tenant)
|
||||
raise
|
||||
if self.db_alias != MainRouter.default_db:
|
||||
alias_token = set_read_db_alias(self.db_alias)
|
||||
|
||||
return tenant
|
||||
if request is not None:
|
||||
request.db_alias = self.db_alias
|
||||
|
||||
with transaction.atomic(using=self.db_alias):
|
||||
tenant = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
try:
|
||||
# If the request is a POST, create the admin role
|
||||
if request.method == "POST":
|
||||
isinstance(tenant, dict) and self._create_admin_role(
|
||||
tenant.data["id"]
|
||||
)
|
||||
except Exception as e:
|
||||
self._handle_creation_error(e, tenant)
|
||||
raise
|
||||
|
||||
return tenant
|
||||
finally:
|
||||
if alias_token is not None:
|
||||
reset_read_db_alias(alias_token)
|
||||
self.db_alias = MainRouter.default_db
|
||||
|
||||
def _create_admin_role(self, tenant_id):
|
||||
Role.objects.using(MainRouter.admin_db).create(
|
||||
@@ -117,14 +163,31 @@ class BaseTenantViewset(BaseViewSet):
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
user_id = str(request.user.id)
|
||||
with rls_transaction(value=user_id, parameter=POSTGRES_USER_VAR):
|
||||
with rls_transaction(
|
||||
value=user_id,
|
||||
parameter=POSTGRES_USER_VAR,
|
||||
using=getattr(self, "db_alias", MainRouter.default_db),
|
||||
):
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
|
||||
class BaseUserViewset(BaseViewSet):
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with transaction.atomic():
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
self.db_alias = self._get_request_db_alias(request)
|
||||
alias_token = None
|
||||
try:
|
||||
if self.db_alias != MainRouter.default_db:
|
||||
alias_token = set_read_db_alias(self.db_alias)
|
||||
|
||||
if request is not None:
|
||||
request.db_alias = self.db_alias
|
||||
|
||||
with transaction.atomic(using=self.db_alias):
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
finally:
|
||||
if alias_token is not None:
|
||||
reset_read_db_alias(alias_token)
|
||||
self.db_alias = MainRouter.default_db
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
# TODO refactor after improving RLS on users
|
||||
@@ -137,6 +200,8 @@ class BaseUserViewset(BaseViewSet):
|
||||
if tenant_id is None:
|
||||
raise NotAuthenticated("Tenant ID is not present in token")
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
with rls_transaction(
|
||||
tenant_id, using=getattr(self, "db_alias", MainRouter.default_db)
|
||||
):
|
||||
self.request.tenant_id = tenant_id
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
@@ -150,12 +150,16 @@ def generate_scan_compliance(
|
||||
requirement["checks"][check_id] = status
|
||||
requirement["checks_status"][status.lower()] += 1
|
||||
|
||||
if requirement["status"] != "FAIL" and any(
|
||||
value == "FAIL" for value in requirement["checks"].values()
|
||||
):
|
||||
requirement["status"] = "FAIL"
|
||||
compliance_overview[compliance_id]["requirements_status"]["passed"] -= 1
|
||||
compliance_overview[compliance_id]["requirements_status"]["failed"] += 1
|
||||
if requirement["status"] != "FAIL" and any(
|
||||
value == "FAIL" for value in requirement["checks"].values()
|
||||
):
|
||||
requirement["status"] = "FAIL"
|
||||
compliance_overview[compliance_id]["requirements_status"][
|
||||
"passed"
|
||||
] -= 1
|
||||
compliance_overview[compliance_id]["requirements_status"][
|
||||
"failed"
|
||||
] += 1
|
||||
|
||||
|
||||
def generate_compliance_overview_template(prowler_compliance: dict):
|
||||
|
||||
@@ -1,9 +1,31 @@
|
||||
from contextvars import ContextVar
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
ALLOWED_APPS = ("django", "socialaccount", "account", "authtoken", "silk")
|
||||
|
||||
_read_db_alias = ContextVar("read_db_alias", default=None)
|
||||
|
||||
|
||||
def set_read_db_alias(alias: str | None):
|
||||
if not alias:
|
||||
return None
|
||||
return _read_db_alias.set(alias)
|
||||
|
||||
|
||||
def get_read_db_alias() -> str | None:
|
||||
return _read_db_alias.get()
|
||||
|
||||
|
||||
def reset_read_db_alias(token) -> None:
|
||||
if token is not None:
|
||||
_read_db_alias.reset(token)
|
||||
|
||||
|
||||
class MainRouter:
|
||||
default_db = "default"
|
||||
admin_db = "admin"
|
||||
replica_db = "replica"
|
||||
|
||||
def db_for_read(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
@@ -11,6 +33,9 @@ class MainRouter:
|
||||
model_table_name.startswith(f"{app}_") for app in ALLOWED_APPS
|
||||
):
|
||||
return self.admin_db
|
||||
read_alias = get_read_db_alias()
|
||||
if read_alias:
|
||||
return read_alias
|
||||
return None
|
||||
|
||||
def db_for_write(self, model, **hints): # noqa: F841
|
||||
@@ -27,3 +52,8 @@ class MainRouter:
|
||||
if {obj1._state.db, obj2._state.db} <= {self.default_db, self.admin_db}:
|
||||
return True
|
||||
return None
|
||||
|
||||
|
||||
READ_REPLICA_ALIAS = (
|
||||
MainRouter.replica_db if MainRouter.replica_db in settings.DATABASES else None
|
||||
)
|
||||
|
||||
@@ -6,12 +6,14 @@ from datetime import datetime, timedelta, timezone
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import BaseUserManager
|
||||
from django.db import connection, models, transaction
|
||||
from django.db import DEFAULT_DB_ALIAS, connection, connections, models, transaction
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from psycopg2 import connect as psycopg2_connect
|
||||
from psycopg2.extensions import AsIs, new_type, register_adapter, register_type
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
from api.db_router import get_read_db_alias, reset_read_db_alias, set_read_db_alias
|
||||
|
||||
DB_USER = settings.DATABASES["default"]["USER"] if not settings.TESTING else "test"
|
||||
DB_PASSWORD = (
|
||||
settings.DATABASES["default"]["PASSWORD"] if not settings.TESTING else "test"
|
||||
@@ -49,7 +51,11 @@ def psycopg_connection(database_alias: str):
|
||||
|
||||
|
||||
@contextmanager
|
||||
def rls_transaction(value: str, parameter: str = POSTGRES_TENANT_VAR):
|
||||
def rls_transaction(
|
||||
value: str,
|
||||
parameter: str = POSTGRES_TENANT_VAR,
|
||||
using: str | None = None,
|
||||
):
|
||||
"""
|
||||
Creates a new database transaction setting the given configuration value for Postgres RLS. It validates the
|
||||
if the value is a valid UUID.
|
||||
@@ -57,16 +63,32 @@ def rls_transaction(value: str, parameter: str = POSTGRES_TENANT_VAR):
|
||||
Args:
|
||||
value (str): Database configuration parameter value.
|
||||
parameter (str): Database configuration parameter name, by default is 'api.tenant_id'.
|
||||
using (str | None): Optional database alias to run the transaction against. Defaults to the
|
||||
active read alias (if any) or Django's default connection.
|
||||
"""
|
||||
with transaction.atomic():
|
||||
with connection.cursor() as cursor:
|
||||
try:
|
||||
# just in case the value is an UUID object
|
||||
uuid.UUID(str(value))
|
||||
except ValueError:
|
||||
raise ValidationError("Must be a valid UUID")
|
||||
cursor.execute(SET_CONFIG_QUERY, [parameter, value])
|
||||
yield cursor
|
||||
requested_alias = using or get_read_db_alias()
|
||||
db_alias = requested_alias or DEFAULT_DB_ALIAS
|
||||
if db_alias not in connections:
|
||||
db_alias = DEFAULT_DB_ALIAS
|
||||
|
||||
router_token = None
|
||||
try:
|
||||
if db_alias != DEFAULT_DB_ALIAS:
|
||||
router_token = set_read_db_alias(db_alias)
|
||||
|
||||
with transaction.atomic(using=db_alias):
|
||||
conn = connections[db_alias]
|
||||
with conn.cursor() as cursor:
|
||||
try:
|
||||
# just in case the value is a UUID object
|
||||
uuid.UUID(str(value))
|
||||
except ValueError:
|
||||
raise ValidationError("Must be a valid UUID")
|
||||
cursor.execute(SET_CONFIG_QUERY, [parameter, value])
|
||||
yield cursor
|
||||
finally:
|
||||
if router_token is not None:
|
||||
reset_read_db_alias(router_token)
|
||||
|
||||
|
||||
class CustomUserManager(BaseUserManager):
|
||||
@@ -434,6 +456,12 @@ def drop_index_on_partitions(
|
||||
schema_editor.execute(sql)
|
||||
|
||||
|
||||
def generate_api_key_prefix():
|
||||
"""Generate a random 8-character prefix for API keys (e.g., 'pk_abc123de')."""
|
||||
random_chars = generate_random_token(length=8)
|
||||
return f"pk_{random_chars}"
|
||||
|
||||
|
||||
# Postgres enum definition for member role
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
from django.core.exceptions import ValidationError as django_validation_error
|
||||
from rest_framework import status
|
||||
from rest_framework.exceptions import APIException
|
||||
from rest_framework.exceptions import (
|
||||
APIException,
|
||||
AuthenticationFailed,
|
||||
NotAuthenticated,
|
||||
)
|
||||
from rest_framework_json_api.exceptions import exception_handler
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
|
||||
@@ -68,15 +72,18 @@ def custom_exception_handler(exc, context):
|
||||
exc = ValidationError(exc.message_dict)
|
||||
else:
|
||||
exc = ValidationError(detail=exc.messages[0], code=exc.code)
|
||||
elif isinstance(exc, (TokenError, InvalidToken)):
|
||||
if (
|
||||
hasattr(exc, "detail")
|
||||
and isinstance(exc.detail, dict)
|
||||
and "messages" in exc.detail
|
||||
):
|
||||
exc.detail["messages"] = [
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
# Force 401 status for AuthenticationFailed exceptions regardless of the authentication backend
|
||||
elif isinstance(exc, (AuthenticationFailed, NotAuthenticated, TokenError)):
|
||||
exc.status_code = status.HTTP_401_UNAUTHORIZED
|
||||
if isinstance(exc, (TokenError, InvalidToken)):
|
||||
if (
|
||||
hasattr(exc, "detail")
|
||||
and isinstance(exc.detail, dict)
|
||||
and "messages" in exc.detail
|
||||
):
|
||||
exc.detail["messages"] = [
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
return exception_handler(exc, context)
|
||||
|
||||
|
||||
|
||||
@@ -43,6 +43,7 @@ from api.models import (
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
Task,
|
||||
TenantAPIKey,
|
||||
User,
|
||||
)
|
||||
from api.rls import Tenant
|
||||
@@ -219,10 +220,31 @@ class MembershipFilter(FilterSet):
|
||||
|
||||
|
||||
class ProviderFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
connected = BooleanFilter()
|
||||
inserted_at = DateFilter(
|
||||
field_name="inserted_at",
|
||||
lookup_expr="date",
|
||||
help_text="""Filter by date when the provider was added
|
||||
(format: YYYY-MM-DD)""",
|
||||
)
|
||||
updated_at = DateFilter(
|
||||
field_name="updated_at",
|
||||
lookup_expr="date",
|
||||
help_text="""Filter by date when the provider was updated
|
||||
(format: YYYY-MM-DD)""",
|
||||
)
|
||||
connected = BooleanFilter(
|
||||
help_text="""Filter by connection status. Set to True to return only
|
||||
connected providers, or False to return only providers with failed
|
||||
connections. If not specified, both connected and failed providers are
|
||||
included. Providers with no connection attempt (status is null) are
|
||||
excluded from this filter."""
|
||||
)
|
||||
provider = ChoiceFilter(choices=Provider.ProviderChoices.choices)
|
||||
provider__in = ChoiceInFilter(
|
||||
field_name="provider",
|
||||
choices=Provider.ProviderChoices.choices,
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Provider
|
||||
@@ -648,8 +670,16 @@ class LatestFindingFilter(CommonFindingFilters):
|
||||
|
||||
|
||||
class ProviderSecretFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
updated_at = DateFilter(field_name="updated_at", lookup_expr="date")
|
||||
inserted_at = DateFilter(
|
||||
field_name="inserted_at",
|
||||
lookup_expr="date",
|
||||
help_text="Filter by date when the secret was added (format: YYYY-MM-DD)",
|
||||
)
|
||||
updated_at = DateFilter(
|
||||
field_name="updated_at",
|
||||
lookup_expr="date",
|
||||
help_text="Filter by date when the secret was updated (format: YYYY-MM-DD)",
|
||||
)
|
||||
provider = UUIDFilter(field_name="provider__id", lookup_expr="exact")
|
||||
|
||||
class Meta:
|
||||
@@ -735,6 +765,7 @@ class ComplianceOverviewFilter(FilterSet):
|
||||
class ScanSummaryFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
provider_id = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider_id__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
@@ -880,3 +911,20 @@ class IntegrationJiraFindingsFilter(FilterSet):
|
||||
}
|
||||
)
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
|
||||
class TenantApiKeyFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="created", lookup_expr="date")
|
||||
inserted_at__gte = DateFilter(field_name="created", lookup_expr="gte")
|
||||
inserted_at__lte = DateFilter(field_name="created", lookup_expr="lte")
|
||||
expires_at = DateFilter(field_name="expiry_date", lookup_expr="date")
|
||||
expires_at__gte = DateFilter(field_name="expiry_date", lookup_expr="gte")
|
||||
expires_at__lte = DateFilter(field_name="expiry_date", lookup_expr="lte")
|
||||
|
||||
class Meta:
|
||||
model = TenantAPIKey
|
||||
fields = {
|
||||
"prefix": ["exact", "icontains"],
|
||||
"revoked": ["exact"],
|
||||
"name": ["exact", "icontains"],
|
||||
}
|
||||
|
||||
@@ -8,9 +8,14 @@ def extract_auth_info(request) -> dict:
|
||||
if getattr(request, "auth", None) is not None:
|
||||
tenant_id = request.auth.get("tenant_id", "N/A")
|
||||
user_id = request.auth.get("sub", "N/A")
|
||||
api_key_prefix = request.auth.get("api_key_prefix", "N/A")
|
||||
else:
|
||||
tenant_id, user_id = "N/A", "N/A"
|
||||
return {"tenant_id": tenant_id, "user_id": user_id}
|
||||
tenant_id, user_id, api_key_prefix = "N/A", "N/A", "N/A"
|
||||
return {
|
||||
"tenant_id": tenant_id,
|
||||
"user_id": user_id,
|
||||
"api_key_prefix": api_key_prefix,
|
||||
}
|
||||
|
||||
|
||||
class APILoggingMiddleware:
|
||||
@@ -38,6 +43,7 @@ class APILoggingMiddleware:
|
||||
extra={
|
||||
"user_id": auth_info["user_id"],
|
||||
"tenant_id": auth_info["tenant_id"],
|
||||
"api_key_prefix": auth_info["api_key_prefix"],
|
||||
"method": request.method,
|
||||
"path": request.path,
|
||||
"query_params": request.GET.dict(),
|
||||
|
||||
@@ -0,0 +1,136 @@
|
||||
# Generated by Django 5.1.12 on 2025-09-30 13:10
|
||||
|
||||
import uuid
|
||||
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
import drf_simple_apikey.models
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0047_remove_integration_unique_configuration_per_tenant"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="TenantAPIKey",
|
||||
fields=[
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
max_length=255,
|
||||
validators=[django.core.validators.MinLengthValidator(3)],
|
||||
),
|
||||
),
|
||||
(
|
||||
"expiry_date",
|
||||
models.DateTimeField(
|
||||
default=drf_simple_apikey.models._expiry_date,
|
||||
help_text="Once API key expires, entities cannot use it anymore.",
|
||||
verbose_name="Expires",
|
||||
),
|
||||
),
|
||||
(
|
||||
"revoked",
|
||||
models.BooleanField(
|
||||
blank=True,
|
||||
default=False,
|
||||
help_text="If the API key is revoked, entities cannot use it anymore. (This cannot be undone.)",
|
||||
),
|
||||
),
|
||||
("created", models.DateTimeField(auto_now_add=True, editable=False)),
|
||||
(
|
||||
"whitelisted_ips",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
help_text="List of allowed IP addresses for this API key.",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"blacklisted_ips",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
help_text="List of denied IP addresses for this API key.",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"prefix",
|
||||
models.CharField(
|
||||
default=api.db_utils.generate_api_key_prefix,
|
||||
editable=False,
|
||||
help_text="Unique prefix to identify the API key",
|
||||
max_length=11,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"last_used_at",
|
||||
models.DateTimeField(
|
||||
blank=True,
|
||||
help_text="Last time this API key was used for authentication",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"entity",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="user_api_keys",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "api_keys",
|
||||
"abstract": False,
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["tenant_id", "prefix"],
|
||||
name="api_keys_tenant_prefix_idx",
|
||||
)
|
||||
],
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "prefix"), name="unique_api_key_prefixes"
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "name"),
|
||||
name="unique_api_key_name_per_tenant",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="tenantapikey",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_tenantapikey",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
+21
@@ -0,0 +1,21 @@
|
||||
# Generated by Django 5.1.12 on 2025-10-07 10:41
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0048_api_key"),
|
||||
]
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="compliancerequirementoverview",
|
||||
name="passed_findings",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="compliancerequirementoverview",
|
||||
name="total_findings",
|
||||
field=models.IntegerField(default=0),
|
||||
),
|
||||
]
|
||||
@@ -22,6 +22,8 @@ from django.db.models import Q
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from django_celery_results.models import TaskResult
|
||||
from drf_simple_apikey.crypto import get_crypto
|
||||
from drf_simple_apikey.models import AbstractAPIKey, AbstractAPIKeyManager
|
||||
from psqlextra.manager import PostgresManager
|
||||
from psqlextra.models import PostgresPartitionedModel
|
||||
from psqlextra.types import PostgresPartitioningMethod
|
||||
@@ -42,6 +44,7 @@ from api.db_utils import (
|
||||
StateEnumField,
|
||||
StatusEnumField,
|
||||
enum_to_choices,
|
||||
generate_api_key_prefix,
|
||||
generate_random_token,
|
||||
one_week_from_now,
|
||||
)
|
||||
@@ -125,6 +128,17 @@ class ActiveProviderPartitionedManager(PostgresManager, ActiveProviderManager):
|
||||
return super().get_queryset().filter(self.active_provider_filter())
|
||||
|
||||
|
||||
class TenantAPIKeyManager(AbstractAPIKeyManager):
|
||||
separator = "."
|
||||
|
||||
def assign_api_key(self, obj) -> str:
|
||||
payload = {"_pk": str(obj.pk), "_exp": obj.expiry_date.timestamp()}
|
||||
key = get_crypto().generate(payload)
|
||||
|
||||
prefixed_key = f"{obj.prefix}{self.separator}{key}"
|
||||
return prefixed_key
|
||||
|
||||
|
||||
class User(AbstractBaseUser):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
name = models.CharField(max_length=150, validators=[MinLengthValidator(3)])
|
||||
@@ -204,6 +218,61 @@ class Membership(models.Model):
|
||||
resource_name = "memberships"
|
||||
|
||||
|
||||
class TenantAPIKey(AbstractAPIKey, RowLevelSecurityProtectedModel):
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
name = models.CharField(max_length=100, validators=[MinLengthValidator(3)])
|
||||
created = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
prefix = models.CharField(
|
||||
max_length=11,
|
||||
unique=True,
|
||||
default=generate_api_key_prefix,
|
||||
editable=False,
|
||||
help_text="Unique prefix to identify the API key",
|
||||
)
|
||||
last_used_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Last time this API key was used for authentication",
|
||||
)
|
||||
entity = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="user_api_keys",
|
||||
)
|
||||
|
||||
objects = TenantAPIKeyManager()
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "api_keys"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "prefix"),
|
||||
name="unique_api_key_prefixes",
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "name"),
|
||||
name="unique_api_key_name_per_tenant",
|
||||
),
|
||||
]
|
||||
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "prefix"], name="api_keys_tenant_prefix_idx"
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "api-keys"
|
||||
|
||||
|
||||
class Provider(RowLevelSecurityProtectedModel):
|
||||
objects = ActiveProviderManager()
|
||||
all_objects = models.Manager()
|
||||
@@ -1230,6 +1299,8 @@ class ComplianceRequirementOverview(RowLevelSecurityProtectedModel):
|
||||
passed_checks = models.IntegerField(default=0)
|
||||
failed_checks = models.IntegerField(default=0)
|
||||
total_checks = models.IntegerField(default=0)
|
||||
passed_findings = models.IntegerField(default=0)
|
||||
total_findings = models.IntegerField(default=0)
|
||||
|
||||
scan = models.ForeignKey(
|
||||
Scan,
|
||||
|
||||
@@ -11,11 +11,12 @@ class APIJSONRenderer(JSONRenderer):
|
||||
def render(self, data, accepted_media_type=None, renderer_context=None):
|
||||
request = renderer_context.get("request")
|
||||
tenant_id = getattr(request, "tenant_id", None) if request else None
|
||||
db_alias = getattr(request, "db_alias", None) if request else None
|
||||
include_param_present = "include" in request.query_params if request else False
|
||||
|
||||
# Use rls_transaction if needed for included resources, otherwise do nothing
|
||||
context_manager = (
|
||||
rls_transaction(tenant_id)
|
||||
rls_transaction(tenant_id, using=db_alias)
|
||||
if tenant_id and include_param_present
|
||||
else nullcontext()
|
||||
)
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
from drf_spectacular.extensions import OpenApiAuthenticationExtension
|
||||
from drf_spectacular.openapi import AutoSchema
|
||||
|
||||
|
||||
class CombinedJWTOrAPIKeyAuthenticationScheme(OpenApiAuthenticationExtension):
|
||||
target_class = "api.authentication.CombinedJWTOrAPIKeyAuthentication"
|
||||
name = "JWT or API Key"
|
||||
|
||||
def get_security_definition(self, auto_schema: AutoSchema): # noqa: F841
|
||||
return {
|
||||
"type": "http",
|
||||
"scheme": "bearer",
|
||||
"bearerFormat": "JWT",
|
||||
"description": "Supports both JWT Bearer tokens and API Key authentication. "
|
||||
"Use `Bearer <token>` for JWT or `Api-Key <key>` for API keys.",
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
from celery import states
|
||||
from celery.signals import before_task_publish
|
||||
from config.celery import celery_app
|
||||
from django.db.models.signals import post_delete
|
||||
from django.db.models.signals import post_delete, pre_delete
|
||||
from django.dispatch import receiver
|
||||
from django_celery_results.backends.database import DatabaseBackend
|
||||
|
||||
from api.db_utils import delete_related_daily_task
|
||||
from api.models import Provider
|
||||
from api.models import Membership, Provider, TenantAPIKey, User
|
||||
|
||||
|
||||
def create_task_result_on_publish(sender=None, headers=None, **kwargs): # noqa: F841
|
||||
@@ -32,3 +32,27 @@ before_task_publish.connect(
|
||||
def delete_provider_scan_task(sender, instance, **kwargs): # noqa: F841
|
||||
# Delete the associated periodic task when the provider is deleted
|
||||
delete_related_daily_task(instance.id)
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=User)
|
||||
def revoke_user_api_keys(sender, instance, **kwargs): # noqa: F841
|
||||
"""
|
||||
Revoke all API keys associated with a user before deletion.
|
||||
|
||||
The entity field will be set to NULL by on_delete=SET_NULL,
|
||||
but we explicitly revoke the keys to prevent further use.
|
||||
"""
|
||||
TenantAPIKey.objects.filter(entity=instance).update(revoked=True)
|
||||
|
||||
|
||||
@receiver(post_delete, sender=Membership)
|
||||
def revoke_membership_api_keys(sender, instance, **kwargs): # noqa: F841
|
||||
"""
|
||||
Revoke all API keys when a user is removed from a tenant.
|
||||
|
||||
When a membership is deleted, all API keys created by that user
|
||||
in that tenant should be revoked to prevent further access.
|
||||
"""
|
||||
TenantAPIKey.objects.filter(
|
||||
entity=instance.user, tenant_id=instance.tenant.id
|
||||
).update(revoked=True)
|
||||
|
||||
+977
-126
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,384 @@
|
||||
import time
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from unittest.mock import patch
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from django.test import RequestFactory
|
||||
from rest_framework.exceptions import AuthenticationFailed
|
||||
|
||||
from api.authentication import TenantAPIKeyAuthentication
|
||||
from api.db_router import MainRouter
|
||||
from api.models import TenantAPIKey
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTenantAPIKeyAuthentication:
|
||||
@pytest.fixture
|
||||
def auth_backend(self):
|
||||
"""Create an instance of TenantAPIKeyAuthentication."""
|
||||
return TenantAPIKeyAuthentication()
|
||||
|
||||
@pytest.fixture
|
||||
def request_factory(self):
|
||||
"""Create a Django request factory."""
|
||||
return RequestFactory()
|
||||
|
||||
def test_authenticate_credentials_uses_admin_database(
|
||||
self, auth_backend, api_keys_fixture, request_factory
|
||||
):
|
||||
"""Test that _authenticate_credentials routes queries to admin database."""
|
||||
api_key = api_keys_fixture[0]
|
||||
raw_key = api_key._raw_key
|
||||
|
||||
# Extract the encrypted key part (after the prefix and separator)
|
||||
_, encrypted_key = raw_key.split(TenantAPIKey.objects.separator, 1)
|
||||
|
||||
# Create a mock request
|
||||
request = request_factory.get("/")
|
||||
|
||||
# Call the method
|
||||
entity, auth_dict = auth_backend._authenticate_credentials(
|
||||
request, encrypted_key
|
||||
)
|
||||
|
||||
# Verify that the entity is the user associated with the API key
|
||||
assert entity == api_key.entity
|
||||
assert entity.id == api_key.entity.id
|
||||
|
||||
def test_authenticate_credentials_restores_manager_on_success(
|
||||
self, auth_backend, api_keys_fixture, request_factory
|
||||
):
|
||||
"""Test that the manager is restored after successful authentication."""
|
||||
api_key = api_keys_fixture[0]
|
||||
raw_key = api_key._raw_key
|
||||
_, encrypted_key = raw_key.split(TenantAPIKey.objects.separator, 1)
|
||||
|
||||
# Store the original manager
|
||||
original_manager = TenantAPIKey.objects
|
||||
|
||||
request = request_factory.get("/")
|
||||
|
||||
# Call the method
|
||||
auth_backend._authenticate_credentials(request, encrypted_key)
|
||||
|
||||
# Verify the manager was restored
|
||||
assert TenantAPIKey.objects == original_manager
|
||||
|
||||
def test_authenticate_credentials_restores_manager_on_exception(
|
||||
self, auth_backend, request_factory
|
||||
):
|
||||
"""Test that the manager is restored even when an exception occurs."""
|
||||
# Store the original manager
|
||||
original_manager = TenantAPIKey.objects
|
||||
|
||||
request = request_factory.get("/")
|
||||
|
||||
# Try to authenticate with an invalid key that will raise an exception
|
||||
with pytest.raises(Exception):
|
||||
auth_backend._authenticate_credentials(request, "invalid_encrypted_key")
|
||||
|
||||
# Verify the manager was restored despite the exception
|
||||
assert TenantAPIKey.objects == original_manager
|
||||
|
||||
def test_authenticate_valid_api_key(
|
||||
self, auth_backend, api_keys_fixture, request_factory
|
||||
):
|
||||
"""Test successful authentication with a valid API key."""
|
||||
api_key = api_keys_fixture[0]
|
||||
raw_key = api_key._raw_key
|
||||
|
||||
# Create a request with the API key in the Authorization header
|
||||
request = request_factory.get("/")
|
||||
request.META["HTTP_AUTHORIZATION"] = f"Api-Key {raw_key}"
|
||||
|
||||
# Authenticate
|
||||
entity, auth_dict = auth_backend.authenticate(request)
|
||||
|
||||
# Verify the entity and auth dict
|
||||
assert entity == api_key.entity
|
||||
assert auth_dict["tenant_id"] == str(api_key.tenant_id)
|
||||
assert auth_dict["sub"] == str(api_key.entity.id)
|
||||
assert auth_dict["api_key_prefix"] == api_key.prefix
|
||||
|
||||
# Verify that last_used_at was updated
|
||||
api_key.refresh_from_db()
|
||||
assert api_key.last_used_at is not None
|
||||
assert (datetime.now(timezone.utc) - api_key.last_used_at).seconds < 5
|
||||
|
||||
def test_authenticate_valid_api_key_uses_admin_database(
|
||||
self, auth_backend, api_keys_fixture, request_factory
|
||||
):
|
||||
"""Test that authenticate uses admin database for API key lookup."""
|
||||
api_key = api_keys_fixture[0]
|
||||
raw_key = api_key._raw_key
|
||||
|
||||
request = request_factory.get("/")
|
||||
request.META["HTTP_AUTHORIZATION"] = f"Api-Key {raw_key}"
|
||||
|
||||
# Mock the manager's using method to verify it's called with admin_db
|
||||
with patch.object(
|
||||
TenantAPIKey.objects, "using", wraps=TenantAPIKey.objects.using
|
||||
) as mock_using:
|
||||
auth_backend.authenticate(request)
|
||||
|
||||
# Verify that .using('admin') was called
|
||||
mock_using.assert_called_with(MainRouter.admin_db)
|
||||
|
||||
def test_authenticate_invalid_key_format_missing_separator(
|
||||
self, auth_backend, request_factory
|
||||
):
|
||||
"""Test authentication fails with invalid API key format (no separator)."""
|
||||
request = request_factory.get("/")
|
||||
request.META["HTTP_AUTHORIZATION"] = "Api-Key invalid_key_no_separator"
|
||||
|
||||
with pytest.raises(AuthenticationFailed) as exc_info:
|
||||
auth_backend.authenticate(request)
|
||||
|
||||
assert str(exc_info.value.detail) == "Invalid API Key."
|
||||
|
||||
def test_authenticate_invalid_key_format_empty_prefix(
|
||||
self, auth_backend, request_factory
|
||||
):
|
||||
"""Test authentication fails with empty prefix."""
|
||||
request = request_factory.get("/")
|
||||
request.META["HTTP_AUTHORIZATION"] = "Api-Key .encrypted_part"
|
||||
|
||||
with pytest.raises(AuthenticationFailed) as exc_info:
|
||||
auth_backend.authenticate(request)
|
||||
|
||||
assert str(exc_info.value.detail) == "Invalid API Key."
|
||||
|
||||
def test_authenticate_invalid_encrypted_key(
|
||||
self, auth_backend, api_keys_fixture, request_factory
|
||||
):
|
||||
"""Test authentication fails with invalid encrypted key."""
|
||||
api_key = api_keys_fixture[0]
|
||||
|
||||
# Create an invalid key with valid prefix but invalid encryption
|
||||
invalid_key = f"{api_key.prefix}.invalid_encrypted_data"
|
||||
|
||||
request = request_factory.get("/")
|
||||
request.META["HTTP_AUTHORIZATION"] = f"Api-Key {invalid_key}"
|
||||
|
||||
with pytest.raises(AuthenticationFailed) as exc_info:
|
||||
auth_backend.authenticate(request)
|
||||
|
||||
assert str(exc_info.value.detail) == "Invalid API Key."
|
||||
|
||||
def test_authenticate_revoked_api_key(
|
||||
self, auth_backend, api_keys_fixture, request_factory
|
||||
):
|
||||
"""Test authentication fails with a revoked API key."""
|
||||
# Use the revoked API key (index 2 from fixture)
|
||||
api_key = api_keys_fixture[2]
|
||||
raw_key = api_key._raw_key
|
||||
|
||||
request = request_factory.get("/")
|
||||
request.META["HTTP_AUTHORIZATION"] = f"Api-Key {raw_key}"
|
||||
|
||||
# The revoked key should fail during credential validation
|
||||
with pytest.raises(AuthenticationFailed) as exc_info:
|
||||
auth_backend.authenticate(request)
|
||||
|
||||
assert str(exc_info.value.detail) == "This API Key has been revoked."
|
||||
|
||||
def test_authenticate_expired_api_key(
|
||||
self, auth_backend, create_test_user, tenants_fixture, request_factory
|
||||
):
|
||||
"""Test authentication fails with an expired API key."""
|
||||
tenant = tenants_fixture[0]
|
||||
user = create_test_user
|
||||
|
||||
# Create an expired API key
|
||||
api_key, raw_key = TenantAPIKey.objects.create_api_key(
|
||||
name="Expired API Key",
|
||||
tenant_id=tenant.id,
|
||||
entity=user,
|
||||
expiry_date=datetime.now(timezone.utc) - timedelta(days=1),
|
||||
)
|
||||
|
||||
request = request_factory.get("/")
|
||||
request.META["HTTP_AUTHORIZATION"] = f"Api-Key {raw_key}"
|
||||
|
||||
with pytest.raises(AuthenticationFailed) as exc_info:
|
||||
auth_backend.authenticate(request)
|
||||
|
||||
assert str(exc_info.value.detail) == "API Key has already expired."
|
||||
|
||||
def test_authenticate_nonexistent_api_key(
|
||||
self, auth_backend, api_keys_fixture, request_factory
|
||||
):
|
||||
"""Test authentication fails when API key doesn't exist in database."""
|
||||
# Create a valid-looking encrypted key with a non-existent UUID
|
||||
api_key = api_keys_fixture[0]
|
||||
non_existent_uuid = str(uuid4())
|
||||
|
||||
# Manually create an encrypted key with a non-existent ID
|
||||
payload = {
|
||||
"_pk": non_existent_uuid,
|
||||
"_exp": (datetime.now(timezone.utc) + timedelta(days=30)).timestamp(),
|
||||
}
|
||||
encrypted_key = auth_backend.key_crypto.generate(payload)
|
||||
fake_key = f"{api_key.prefix}.{encrypted_key}"
|
||||
|
||||
request = request_factory.get("/")
|
||||
request.META["HTTP_AUTHORIZATION"] = f"Api-Key {fake_key}"
|
||||
|
||||
with pytest.raises(AuthenticationFailed) as exc_info:
|
||||
auth_backend.authenticate(request)
|
||||
|
||||
assert str(exc_info.value.detail) == "No entity matching this api key."
|
||||
|
||||
def test_authenticate_updates_last_used_at(
|
||||
self, auth_backend, api_keys_fixture, request_factory
|
||||
):
|
||||
"""Test that last_used_at is updated on successful authentication."""
|
||||
api_key = api_keys_fixture[0]
|
||||
raw_key = api_key._raw_key
|
||||
|
||||
# Store the original last_used_at
|
||||
original_last_used = api_key.last_used_at
|
||||
|
||||
request = request_factory.get("/")
|
||||
request.META["HTTP_AUTHORIZATION"] = f"Api-Key {raw_key}"
|
||||
|
||||
# Authenticate
|
||||
auth_backend.authenticate(request)
|
||||
|
||||
# Refresh from database
|
||||
api_key.refresh_from_db()
|
||||
|
||||
# Verify last_used_at was updated
|
||||
assert api_key.last_used_at is not None
|
||||
if original_last_used:
|
||||
assert api_key.last_used_at > original_last_used
|
||||
|
||||
def test_authenticate_saves_to_admin_database(
|
||||
self, auth_backend, api_keys_fixture, request_factory
|
||||
):
|
||||
"""Test that the API key save operation uses admin database."""
|
||||
api_key = api_keys_fixture[0]
|
||||
raw_key = api_key._raw_key
|
||||
|
||||
request = request_factory.get("/")
|
||||
request.META["HTTP_AUTHORIZATION"] = f"Api-Key {raw_key}"
|
||||
|
||||
# Mock the save method to verify it's called with using='admin'
|
||||
with patch.object(TenantAPIKey, "save") as mock_save:
|
||||
auth_backend.authenticate(request)
|
||||
|
||||
# Verify save was called with using=admin_db
|
||||
mock_save.assert_called_once_with(
|
||||
update_fields=["last_used_at"], using=MainRouter.admin_db
|
||||
)
|
||||
|
||||
def test_authenticate_returns_correct_auth_dict(
|
||||
self, auth_backend, api_keys_fixture, request_factory
|
||||
):
|
||||
"""Test that the auth dict contains all required fields."""
|
||||
api_key = api_keys_fixture[0]
|
||||
raw_key = api_key._raw_key
|
||||
|
||||
request = request_factory.get("/")
|
||||
request.META["HTTP_AUTHORIZATION"] = f"Api-Key {raw_key}"
|
||||
|
||||
entity, auth_dict = auth_backend.authenticate(request)
|
||||
|
||||
# Verify all required fields are present
|
||||
assert "tenant_id" in auth_dict
|
||||
assert "sub" in auth_dict
|
||||
assert "api_key_prefix" in auth_dict
|
||||
|
||||
# Verify values are correct
|
||||
assert auth_dict["tenant_id"] == str(api_key.tenant_id)
|
||||
assert auth_dict["sub"] == str(api_key.entity.id)
|
||||
assert auth_dict["api_key_prefix"] == api_key.prefix
|
||||
|
||||
def test_authenticate_with_multiple_api_keys_same_tenant(
|
||||
self, auth_backend, api_keys_fixture, request_factory
|
||||
):
|
||||
"""Test that authentication works correctly with multiple API keys for the same tenant."""
|
||||
# Test with first API key
|
||||
api_key1 = api_keys_fixture[0]
|
||||
raw_key1 = api_key1._raw_key
|
||||
|
||||
request1 = request_factory.get("/")
|
||||
request1.META["HTTP_AUTHORIZATION"] = f"Api-Key {raw_key1}"
|
||||
|
||||
entity1, auth_dict1 = auth_backend.authenticate(request1)
|
||||
|
||||
assert entity1 == api_key1.entity
|
||||
assert auth_dict1["api_key_prefix"] == api_key1.prefix
|
||||
|
||||
# Test with second API key
|
||||
api_key2 = api_keys_fixture[1]
|
||||
raw_key2 = api_key2._raw_key
|
||||
|
||||
request2 = request_factory.get("/")
|
||||
request2.META["HTTP_AUTHORIZATION"] = f"Api-Key {raw_key2}"
|
||||
|
||||
entity2, auth_dict2 = auth_backend.authenticate(request2)
|
||||
|
||||
assert entity2 == api_key2.entity
|
||||
assert auth_dict2["api_key_prefix"] == api_key2.prefix
|
||||
|
||||
# Verify they're different keys but same tenant
|
||||
assert auth_dict1["api_key_prefix"] != auth_dict2["api_key_prefix"]
|
||||
assert auth_dict1["tenant_id"] == auth_dict2["tenant_id"]
|
||||
|
||||
def test_authenticate_with_wrong_prefix_in_db(
|
||||
self, auth_backend, api_keys_fixture, request_factory
|
||||
):
|
||||
"""Test authentication fails when prefix doesn't match database."""
|
||||
api_key = api_keys_fixture[0]
|
||||
raw_key = api_key._raw_key
|
||||
|
||||
# Extract the encrypted part and combine with wrong prefix
|
||||
_, encrypted_part = raw_key.split(TenantAPIKey.objects.separator, 1)
|
||||
wrong_key = f"pk_wrong123.{encrypted_part}"
|
||||
|
||||
request = request_factory.get("/")
|
||||
request.META["HTTP_AUTHORIZATION"] = f"Api-Key {wrong_key}"
|
||||
|
||||
with pytest.raises(AuthenticationFailed) as exc_info:
|
||||
auth_backend.authenticate(request)
|
||||
|
||||
assert str(exc_info.value.detail) == "Invalid API Key."
|
||||
|
||||
def test_authenticate_credentials_exception_handling(
|
||||
self, auth_backend, request_factory
|
||||
):
|
||||
"""Test that exceptions in _authenticate_credentials are properly handled."""
|
||||
request = request_factory.get("/")
|
||||
|
||||
# Test with completely invalid data that will cause InvalidToken
|
||||
with pytest.raises(Exception):
|
||||
auth_backend._authenticate_credentials(request, "completely_invalid")
|
||||
|
||||
def test_authenticate_with_expired_timestamp(
|
||||
self, auth_backend, create_test_user, tenants_fixture, request_factory
|
||||
):
|
||||
"""Test that expired timestamp in encrypted key causes authentication failure."""
|
||||
tenant = tenants_fixture[0]
|
||||
user = create_test_user
|
||||
|
||||
# Create an API key with a very short expiry
|
||||
api_key, raw_key = TenantAPIKey.objects.create_api_key(
|
||||
name="Short-lived API Key",
|
||||
tenant_id=tenant.id,
|
||||
entity=user,
|
||||
expiry_date=datetime.now(timezone.utc) + timedelta(seconds=1),
|
||||
)
|
||||
|
||||
# Wait for the key to expire
|
||||
time.sleep(2)
|
||||
|
||||
request = request_factory.get("/")
|
||||
request.META["HTTP_AUTHORIZATION"] = f"Api-Key {raw_key}"
|
||||
|
||||
# Should fail with expired key
|
||||
with pytest.raises(AuthenticationFailed) as exc_info:
|
||||
auth_backend.authenticate(request)
|
||||
|
||||
assert str(exc_info.value.detail) == "API Key has already expired."
|
||||
@@ -11,6 +11,7 @@ from api.db_utils import (
|
||||
batch_delete,
|
||||
create_objects_in_batches,
|
||||
enum_to_choices,
|
||||
generate_api_key_prefix,
|
||||
generate_random_token,
|
||||
one_week_from_now,
|
||||
update_objects_in_batches,
|
||||
@@ -313,3 +314,28 @@ class TestUpdateObjectsInBatches:
|
||||
|
||||
qs = Provider.objects.filter(tenant=tenant, uid__endswith="_upd")
|
||||
assert qs.count() == total
|
||||
|
||||
|
||||
class TestGenerateApiKeyPrefix:
|
||||
def test_prefix_format(self):
|
||||
"""Test that generated prefix starts with 'pk_'."""
|
||||
prefix = generate_api_key_prefix()
|
||||
assert prefix.startswith("pk_")
|
||||
|
||||
def test_prefix_length(self):
|
||||
"""Test that prefix has correct length (pk_ + 8 random chars = 11)."""
|
||||
prefix = generate_api_key_prefix()
|
||||
assert len(prefix) == 11
|
||||
|
||||
def test_prefix_uniqueness(self):
|
||||
"""Test that multiple generations produce unique prefixes."""
|
||||
prefixes = {generate_api_key_prefix() for _ in range(100)}
|
||||
assert len(prefixes) == 100
|
||||
|
||||
def test_prefix_character_set(self):
|
||||
"""Test that random part uses only allowed characters."""
|
||||
allowed_chars = "23456789ABCDEFGHJKMNPQRSTVWXYZ"
|
||||
for _ in range(50):
|
||||
prefix = generate_api_key_prefix()
|
||||
random_part = prefix[3:] # Strip 'pk_'
|
||||
assert all(char in allowed_chars for char in random_part)
|
||||
|
||||
@@ -24,6 +24,7 @@ def test_api_logging_middleware_logging(mock_logger):
|
||||
mock_extract_auth_info.return_value = {
|
||||
"user_id": "user123",
|
||||
"tenant_id": "tenant456",
|
||||
"api_key_prefix": "pk_test",
|
||||
}
|
||||
|
||||
with patch("api.middleware.logging.getLogger") as mock_get_logger:
|
||||
@@ -44,6 +45,7 @@ def test_api_logging_middleware_logging(mock_logger):
|
||||
expected_extra = {
|
||||
"user_id": "user123",
|
||||
"tenant_id": "tenant456",
|
||||
"api_key_prefix": "pk_test",
|
||||
"method": "GET",
|
||||
"path": "/test-path",
|
||||
"query_params": {"param1": "value1", "param2": "value2"},
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -105,23 +105,25 @@ from rest_framework_json_api import serializers
|
||||
"type": "string",
|
||||
"description": "The Azure application (client) ID for authentication in Azure AD.",
|
||||
},
|
||||
"client_secret": {
|
||||
"type": "string",
|
||||
"description": "The client secret associated with the application (client) ID, providing "
|
||||
"secure access.",
|
||||
},
|
||||
"tenant_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure tenant ID, representing the directory where the application is "
|
||||
"registered.",
|
||||
},
|
||||
"client_secret": {
|
||||
"type": "string",
|
||||
"description": "The client secret associated with the application (client) ID, providing "
|
||||
"secure access.",
|
||||
},
|
||||
"user": {
|
||||
"type": "email",
|
||||
"description": "User microsoft email address.",
|
||||
"deprecated": True,
|
||||
},
|
||||
"password": {
|
||||
"type": "string",
|
||||
"description": "User password.",
|
||||
"deprecated": True,
|
||||
},
|
||||
},
|
||||
"required": [
|
||||
@@ -132,6 +134,30 @@ from rest_framework_json_api import serializers
|
||||
"password",
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "M365 Certificate Credentials",
|
||||
"properties": {
|
||||
"client_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure application (client) ID for authentication in Azure AD.",
|
||||
},
|
||||
"tenant_id": {
|
||||
"type": "string",
|
||||
"description": "The Azure tenant ID, representing the directory where the application is "
|
||||
"registered.",
|
||||
},
|
||||
"certificate_content": {
|
||||
"type": "string",
|
||||
"description": "The certificate content in base64 format for certificate-based authentication.",
|
||||
},
|
||||
},
|
||||
"required": [
|
||||
"client_id",
|
||||
"tenant_id",
|
||||
"certificate_content",
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GCP Static Credentials",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import base64
|
||||
import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
@@ -39,6 +40,7 @@ from api.models import (
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
Task,
|
||||
TenantAPIKey,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
@@ -316,6 +318,23 @@ class UserSerializer(BaseSerializerV1):
|
||||
)
|
||||
|
||||
|
||||
class UserIncludeSerializer(UserSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"email",
|
||||
"company_name",
|
||||
"date_joined",
|
||||
"roles",
|
||||
]
|
||||
|
||||
included_serializers = {
|
||||
"roles": "api.v1.serializers.RoleIncludeSerializer",
|
||||
}
|
||||
|
||||
|
||||
class UserCreateSerializer(BaseWriteSerializer):
|
||||
password = serializers.CharField(write_only=True)
|
||||
company_name = serializers.CharField(required=False)
|
||||
@@ -908,6 +927,17 @@ class ProviderCreateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"uid",
|
||||
# "scanner_args"
|
||||
]
|
||||
extra_kwargs = {
|
||||
"alias": {
|
||||
"help_text": "Human readable name to identify the provider, e.g. 'Production AWS Account', 'Dev Environment'",
|
||||
},
|
||||
"provider": {
|
||||
"help_text": "Type of provider to create.",
|
||||
},
|
||||
"uid": {
|
||||
"help_text": "Unique identifier for the provider, set by the provider, e.g. AWS account ID, Azure subscription ID, GCP project ID, etc.",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class ProviderUpdateSerializer(BaseWriteSerializer):
|
||||
@@ -922,6 +952,11 @@ class ProviderUpdateSerializer(BaseWriteSerializer):
|
||||
"alias",
|
||||
# "scanner_args"
|
||||
]
|
||||
extra_kwargs = {
|
||||
"alias": {
|
||||
"help_text": "Human readable name to identify the provider, e.g. 'Production AWS Account', 'Dev Environment'",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# Scans
|
||||
@@ -1361,10 +1396,38 @@ class AzureProviderSecret(serializers.Serializer):
|
||||
|
||||
class M365ProviderSecret(serializers.Serializer):
|
||||
client_id = serializers.CharField()
|
||||
client_secret = serializers.CharField()
|
||||
client_secret = serializers.CharField(required=False)
|
||||
tenant_id = serializers.CharField()
|
||||
user = serializers.EmailField(required=False)
|
||||
password = serializers.CharField(required=False)
|
||||
certificate_content = serializers.CharField(required=False)
|
||||
|
||||
def validate(self, attrs):
|
||||
if attrs.get("client_secret") and attrs.get("certificate_content"):
|
||||
raise serializers.ValidationError(
|
||||
"You cannot provide both client_secret and certificate_content."
|
||||
)
|
||||
if not attrs.get("client_secret") and not attrs.get("certificate_content"):
|
||||
raise serializers.ValidationError(
|
||||
"You must provide either client_secret or certificate_content."
|
||||
)
|
||||
return super().validate(attrs)
|
||||
|
||||
def validate_certificate_content(self, certificate_content):
|
||||
"""Validate that M365 certificate content is valid base64 encoded data."""
|
||||
if certificate_content:
|
||||
try:
|
||||
base64.b64decode(certificate_content, validate=True)
|
||||
except Exception as e:
|
||||
raise ValidationError(
|
||||
{
|
||||
"certificate_content": [
|
||||
f"The provided certificate content is not valid base64 encoded data: {str(e)}"
|
||||
]
|
||||
},
|
||||
code="m365-certificate-content",
|
||||
)
|
||||
return certificate_content
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
@@ -1957,6 +2020,17 @@ class ComplianceOverviewDetailSerializer(serializers.Serializer):
|
||||
resource_name = "compliance-requirements-details"
|
||||
|
||||
|
||||
class ComplianceOverviewDetailThreatscoreSerializer(ComplianceOverviewDetailSerializer):
|
||||
"""
|
||||
Serializer for detailed compliance requirement information for Threatscore.
|
||||
|
||||
Includes additional fields specific to the Threatscore framework.
|
||||
"""
|
||||
|
||||
passed_findings = serializers.IntegerField()
|
||||
total_findings = serializers.IntegerField()
|
||||
|
||||
|
||||
class ComplianceOverviewAttributesSerializer(serializers.Serializer):
|
||||
id = serializers.CharField()
|
||||
compliance_name = serializers.CharField()
|
||||
@@ -2735,3 +2809,125 @@ class LighthouseConfigUpdateSerializer(BaseWriteSerializer):
|
||||
instance.api_key_decoded = api_key
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
|
||||
# API Keys
|
||||
|
||||
|
||||
class TenantApiKeySerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the TenantApiKey model.
|
||||
"""
|
||||
|
||||
# Map database field names to API field names for consistency
|
||||
expires_at = serializers.DateTimeField(source="expiry_date", read_only=True)
|
||||
inserted_at = serializers.DateTimeField(source="created", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = TenantAPIKey
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"prefix",
|
||||
"expires_at",
|
||||
"revoked",
|
||||
"inserted_at",
|
||||
"last_used_at",
|
||||
"entity",
|
||||
]
|
||||
|
||||
included_serializers = {
|
||||
"entity": "api.v1.serializers.UserIncludeSerializer",
|
||||
}
|
||||
|
||||
|
||||
class TenantApiKeyCreateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"""Serializer for creating new API keys."""
|
||||
|
||||
# Map database field names to API field names for consistency
|
||||
expires_at = serializers.DateTimeField(source="expiry_date", required=False)
|
||||
inserted_at = serializers.DateTimeField(source="created", read_only=True)
|
||||
api_key = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = TenantAPIKey
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"prefix",
|
||||
"expires_at",
|
||||
"revoked",
|
||||
"entity",
|
||||
"inserted_at",
|
||||
"last_used_at",
|
||||
"api_key",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"prefix": {"read_only": True},
|
||||
"revoked": {"read_only": True},
|
||||
"entity": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"last_used_at": {"read_only": True},
|
||||
"api_key": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate_name(self, value):
|
||||
"""Validate that the name is unique within the tenant."""
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
if TenantAPIKey.objects.filter(tenant_id=tenant_id, name=value).exists():
|
||||
raise ValidationError("An API key with this name already exists.")
|
||||
return value
|
||||
|
||||
def get_api_key(self, obj):
|
||||
"""Return the raw API key if it was stored during creation."""
|
||||
return getattr(obj, "_raw_api_key", None)
|
||||
|
||||
def create(self, validated_data):
|
||||
instance, raw_api_key = TenantAPIKey.objects.create_api_key(
|
||||
**validated_data,
|
||||
tenant_id=self.context.get("tenant_id"),
|
||||
entity=self.context.get("request").user,
|
||||
)
|
||||
# Store the raw API key temporarily on the instance for the serializer
|
||||
instance._raw_api_key = raw_api_key
|
||||
return instance
|
||||
|
||||
|
||||
class TenantApiKeyUpdateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"""Serializer for updating API keys - only allows changing the name."""
|
||||
|
||||
# Map database field names to API field names for consistency
|
||||
expires_at = serializers.DateTimeField(source="expiry_date", read_only=True)
|
||||
inserted_at = serializers.DateTimeField(source="created", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = TenantAPIKey
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"prefix",
|
||||
"expires_at",
|
||||
"entity",
|
||||
"inserted_at",
|
||||
"last_used_at",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"prefix": {"read_only": True},
|
||||
"entity": {"read_only": True},
|
||||
"expires_at": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"last_used_at": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate_name(self, value):
|
||||
"""Validate that the name is unique within the tenant, excluding current instance."""
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
if (
|
||||
TenantAPIKey.objects.filter(tenant_id=tenant_id, name=value)
|
||||
.exclude(id=self.instance.id)
|
||||
.exists()
|
||||
):
|
||||
raise ValidationError("An API key with this name already exists.")
|
||||
return value
|
||||
|
||||
@@ -39,6 +39,7 @@ from api.v1.views import (
|
||||
TenantViewSet,
|
||||
UserRoleRelationshipView,
|
||||
UserViewSet,
|
||||
TenantApiKeyViewSet,
|
||||
)
|
||||
|
||||
router = routers.DefaultRouter(trailing_slash=False)
|
||||
@@ -65,6 +66,7 @@ router.register(
|
||||
LighthouseConfigViewSet,
|
||||
basename="lighthouseconfiguration",
|
||||
)
|
||||
router.register(r"api-keys", TenantApiKeyViewSet, basename="api-key")
|
||||
|
||||
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
|
||||
tenants_router.register(
|
||||
|
||||
+156
-38
@@ -95,6 +95,7 @@ from api.filters import (
|
||||
ScanSummarySeverityFilter,
|
||||
ServiceOverviewFilter,
|
||||
TaskFilter,
|
||||
TenantApiKeyFilter,
|
||||
TenantFilter,
|
||||
UserFilter,
|
||||
)
|
||||
@@ -124,6 +125,7 @@ from api.models import (
|
||||
SeverityChoices,
|
||||
StateChoices,
|
||||
Task,
|
||||
TenantAPIKey,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
@@ -140,6 +142,7 @@ from api.v1.mixins import PaginateByPkMixin, TaskManagementMixin
|
||||
from api.v1.serializers import (
|
||||
ComplianceOverviewAttributesSerializer,
|
||||
ComplianceOverviewDetailSerializer,
|
||||
ComplianceOverviewDetailThreatscoreSerializer,
|
||||
ComplianceOverviewMetadataSerializer,
|
||||
ComplianceOverviewSerializer,
|
||||
FindingDynamicFilterSerializer,
|
||||
@@ -189,6 +192,9 @@ from api.v1.serializers import (
|
||||
ScanUpdateSerializer,
|
||||
ScheduleDailyCreateSerializer,
|
||||
TaskSerializer,
|
||||
TenantApiKeyCreateSerializer,
|
||||
TenantApiKeySerializer,
|
||||
TenantApiKeyUpdateSerializer,
|
||||
TenantSerializer,
|
||||
TokenRefreshSerializer,
|
||||
TokenSerializer,
|
||||
@@ -387,6 +393,11 @@ class SchemaView(SpectacularAPIView):
|
||||
"description": "Endpoints for Single Sign-On authentication management via SAML for seamless user "
|
||||
"authentication.",
|
||||
},
|
||||
{
|
||||
"name": "API Keys",
|
||||
"description": "Endpoints for API keys management. These can be used as an alternative to JWT "
|
||||
"authorization.",
|
||||
},
|
||||
]
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
@@ -655,36 +666,48 @@ class TenantFinishACSView(FinishACSView):
|
||||
.get(email_domain=email_domain)
|
||||
.tenant
|
||||
)
|
||||
role_name = (
|
||||
extra.get("userType", ["no_permissions"])[0].strip()
|
||||
if extra.get("userType")
|
||||
else "no_permissions"
|
||||
|
||||
# Check if tenant has only one user with MANAGE_ACCOUNT role
|
||||
users_with_manage_account = (
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db)
|
||||
.filter(role__manage_account=True, tenant_id=tenant.id)
|
||||
.values("user")
|
||||
.distinct()
|
||||
.count()
|
||||
)
|
||||
try:
|
||||
role = Role.objects.using(MainRouter.admin_db).get(
|
||||
name=role_name, tenant=tenant
|
||||
|
||||
# Only apply role mapping from userType if tenant does NOT have exactly one user with MANAGE_ACCOUNT
|
||||
if users_with_manage_account != 1:
|
||||
role_name = (
|
||||
extra.get("userType", ["no_permissions"])[0].strip()
|
||||
if extra.get("userType")
|
||||
else "no_permissions"
|
||||
)
|
||||
except Role.DoesNotExist:
|
||||
role = Role.objects.using(MainRouter.admin_db).create(
|
||||
name=role_name,
|
||||
tenant=tenant,
|
||||
manage_users=False,
|
||||
manage_account=False,
|
||||
manage_billing=False,
|
||||
manage_providers=False,
|
||||
manage_integrations=False,
|
||||
manage_scans=False,
|
||||
unlimited_visibility=False,
|
||||
try:
|
||||
role = Role.objects.using(MainRouter.admin_db).get(
|
||||
name=role_name, tenant=tenant
|
||||
)
|
||||
except Role.DoesNotExist:
|
||||
role = Role.objects.using(MainRouter.admin_db).create(
|
||||
name=role_name,
|
||||
tenant=tenant,
|
||||
manage_users=False,
|
||||
manage_account=False,
|
||||
manage_billing=False,
|
||||
manage_providers=False,
|
||||
manage_integrations=False,
|
||||
manage_scans=False,
|
||||
unlimited_visibility=False,
|
||||
)
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).filter(
|
||||
user=user,
|
||||
tenant_id=tenant.id,
|
||||
).delete()
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).filter(
|
||||
user=user,
|
||||
tenant_id=tenant.id,
|
||||
).delete()
|
||||
UserRoleRelationship.objects.using(MainRouter.admin_db).create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
membership, _ = Membership.objects.using(MainRouter.admin_db).get_or_create(
|
||||
user=user,
|
||||
tenant=tenant,
|
||||
@@ -811,7 +834,9 @@ class UserViewSet(BaseUserViewset):
|
||||
if kwargs["pk"] != str(self.request.user.id):
|
||||
raise ValidationError("Only the current user can be deleted.")
|
||||
|
||||
return super().destroy(request, *args, **kwargs)
|
||||
user = self.get_object()
|
||||
user.delete(using=MainRouter.admin_db)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@extend_schema(
|
||||
parameters=[
|
||||
@@ -3424,15 +3449,16 @@ class ComplianceOverviewViewSet(BaseRLSViewSet, TaskManagementMixin):
|
||||
)
|
||||
filtered_queryset = self.filter_queryset(self.get_queryset())
|
||||
|
||||
all_requirements = (
|
||||
filtered_queryset.values(
|
||||
"requirement_id", "framework", "version", "description"
|
||||
)
|
||||
.distinct()
|
||||
.annotate(
|
||||
total_instances=Count("id"),
|
||||
manual_count=Count("id", filter=Q(requirement_status="MANUAL")),
|
||||
)
|
||||
all_requirements = filtered_queryset.values(
|
||||
"requirement_id",
|
||||
"framework",
|
||||
"version",
|
||||
"description",
|
||||
).annotate(
|
||||
total_instances=Count("id"),
|
||||
manual_count=Count("id", filter=Q(requirement_status="MANUAL")),
|
||||
passed_findings_sum=Sum("passed_findings"),
|
||||
total_findings_sum=Sum("total_findings"),
|
||||
)
|
||||
|
||||
passed_instances = (
|
||||
@@ -3451,6 +3477,8 @@ class ComplianceOverviewViewSet(BaseRLSViewSet, TaskManagementMixin):
|
||||
total_instances = requirement["total_instances"]
|
||||
passed_count = passed_counts.get(requirement_id, 0)
|
||||
is_manual = requirement["manual_count"] == total_instances
|
||||
passed_findings = requirement["passed_findings_sum"] or 0
|
||||
total_findings = requirement["total_findings_sum"] or 0
|
||||
if is_manual:
|
||||
requirement_status = "MANUAL"
|
||||
elif passed_count == total_instances:
|
||||
@@ -3465,10 +3493,19 @@ class ComplianceOverviewViewSet(BaseRLSViewSet, TaskManagementMixin):
|
||||
"version": requirement["version"],
|
||||
"description": requirement["description"],
|
||||
"status": requirement_status,
|
||||
"passed_findings": passed_findings,
|
||||
"total_findings": total_findings,
|
||||
}
|
||||
)
|
||||
|
||||
serializer = self.get_serializer(requirements_summary, many=True)
|
||||
# Use different serializer for threatscore framework
|
||||
if "threatscore" not in compliance_id:
|
||||
serializer = self.get_serializer(requirements_summary, many=True)
|
||||
else:
|
||||
serializer = ComplianceOverviewDetailThreatscoreSerializer(
|
||||
requirements_summary, many=True
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(detail=False, methods=["get"], url_name="attributes")
|
||||
@@ -4188,3 +4225,84 @@ class ProcessorViewSet(BaseRLSViewSet):
|
||||
elif self.action == "partial_update":
|
||||
return ProcessorUpdateSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
tags=["API Keys"],
|
||||
summary="List API keys",
|
||||
description="Retrieve a list of API keys for the tenant, with filtering support.",
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
tags=["API Keys"],
|
||||
summary="Retrieve API key details",
|
||||
description="Fetch detailed information about a specific API key by its ID.",
|
||||
),
|
||||
create=extend_schema(
|
||||
tags=["API Keys"],
|
||||
summary="Create a new API key",
|
||||
description="Create a new API key for the tenant.",
|
||||
),
|
||||
partial_update=extend_schema(
|
||||
tags=["API Keys"],
|
||||
summary="Partially update an API key",
|
||||
description="Modify certain fields of an existing API key without affecting other settings.",
|
||||
),
|
||||
revoke=extend_schema(
|
||||
tags=["API Keys"],
|
||||
summary="Revoke an API key",
|
||||
description="Revoke an API key by its ID. This action is irreversible and will prevent the key from being "
|
||||
"used.",
|
||||
request=None,
|
||||
responses={
|
||||
200: OpenApiResponse(
|
||||
response=TenantApiKeySerializer,
|
||||
description="API key was successfully revoked",
|
||||
)
|
||||
},
|
||||
),
|
||||
)
|
||||
class TenantApiKeyViewSet(BaseRLSViewSet):
|
||||
queryset = TenantAPIKey.objects.all()
|
||||
serializer_class = TenantApiKeySerializer
|
||||
filterset_class = TenantApiKeyFilter
|
||||
http_method_names = ["get", "post", "patch", "delete"]
|
||||
ordering = ["revoked", "-created"]
|
||||
ordering_fields = ["name", "prefix", "revoked", "inserted_at", "expires_at"]
|
||||
# RBAC required permissions
|
||||
required_permissions = [Permissions.MANAGE_ACCOUNT]
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = TenantAPIKey.objects.filter(
|
||||
tenant_id=self.request.tenant_id
|
||||
).annotate(inserted_at=F("created"), expires_at=F("expiry_date"))
|
||||
return queryset
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "create":
|
||||
return TenantApiKeyCreateSerializer
|
||||
elif self.action == "partial_update":
|
||||
return TenantApiKeyUpdateSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
@extend_schema(exclude=True)
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
raise MethodNotAllowed(method="DESTROY")
|
||||
|
||||
@action(detail=True, methods=["delete"])
|
||||
def revoke(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
|
||||
# Check if already revoked
|
||||
if instance.revoked:
|
||||
raise ValidationError(
|
||||
{
|
||||
"detail": "API key is already revoked",
|
||||
}
|
||||
)
|
||||
|
||||
TenantAPIKey.objects.revoke_api_key(instance.pk)
|
||||
instance.refresh_from_db()
|
||||
|
||||
serializer = self.get_serializer(instance)
|
||||
return Response(data=serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -48,6 +48,10 @@ class NDJSONFormatter(logging.Formatter):
|
||||
log_record["user_id"] = record.user_id
|
||||
if hasattr(record, "tenant_id"):
|
||||
log_record["tenant_id"] = record.tenant_id
|
||||
if hasattr(record, "api_key_prefix"):
|
||||
log_record["api_key_prefix"] = (
|
||||
record.api_key_prefix if record.api_key_prefix != "N/A" else None
|
||||
)
|
||||
if hasattr(record, "method"):
|
||||
log_record["method"] = record.method
|
||||
if hasattr(record, "path"):
|
||||
@@ -90,6 +94,9 @@ class HumanReadableFormatter(logging.Formatter):
|
||||
# Add REST API extra fields
|
||||
if hasattr(record, "user_id"):
|
||||
log_components.append(f"({record.user_id})")
|
||||
if hasattr(record, "api_key_prefix"):
|
||||
if record.api_key_prefix != "N/A":
|
||||
log_components.append(f"(API-Key {record.api_key_prefix})")
|
||||
if hasattr(record, "tenant_id"):
|
||||
log_components.append(f"[{record.tenant_id}]")
|
||||
if hasattr(record, "method"):
|
||||
|
||||
@@ -43,6 +43,7 @@ INSTALLED_APPS = [
|
||||
"allauth.socialaccount.providers.saml",
|
||||
"dj_rest_auth.registration",
|
||||
"rest_framework.authtoken",
|
||||
"drf_simple_apikey",
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
@@ -84,7 +85,7 @@ TEMPLATES = [
|
||||
REST_FRAMEWORK = {
|
||||
"DEFAULT_SCHEMA_CLASS": "drf_spectacular_jsonapi.schemas.openapi.JsonApiAutoSchema",
|
||||
"DEFAULT_AUTHENTICATION_CLASSES": (
|
||||
"rest_framework_simplejwt.authentication.JWTAuthentication",
|
||||
"api.authentication.CombinedJWTOrAPIKeyAuthentication",
|
||||
),
|
||||
"PAGE_SIZE": 10,
|
||||
"EXCEPTION_HANDLER": "api.exceptions.custom_exception_handler",
|
||||
@@ -220,7 +221,8 @@ SIMPLE_JWT = {
|
||||
"JTI_CLAIM": "jti",
|
||||
"USER_ID_FIELD": "id",
|
||||
"USER_ID_CLAIM": "sub",
|
||||
# Issuer and Audience claims, for the moment we will keep these values as default values, they may change in the future.
|
||||
# Issuer and Audience claims, for the moment we will keep these values as default values, they may change in the
|
||||
# future.
|
||||
"AUDIENCE": env.str("DJANGO_JWT_AUDIENCE", "https://api.prowler.com"),
|
||||
"ISSUER": env.str("DJANGO_JWT_ISSUER", "https://api.prowler.com"),
|
||||
# Additional security settings
|
||||
@@ -229,6 +231,13 @@ SIMPLE_JWT = {
|
||||
|
||||
SECRETS_ENCRYPTION_KEY = env.str("DJANGO_SECRETS_ENCRYPTION_KEY", "")
|
||||
|
||||
# DRF Simple API Key settings
|
||||
DRF_API_KEY = {
|
||||
"FERNET_SECRET": SECRETS_ENCRYPTION_KEY,
|
||||
"API_KEY_LIFETIME": 365,
|
||||
"AUTHENTICATION_KEYWORD_HEADER": "Api-Key",
|
||||
}
|
||||
|
||||
# Internationalization
|
||||
# https://docs.djangoproject.com/en/5.0/topics/i18n/
|
||||
|
||||
|
||||
@@ -5,24 +5,39 @@ DEBUG = env.bool("DJANGO_DEBUG", default=True)
|
||||
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["*"])
|
||||
|
||||
# Database
|
||||
default_db_name = env("POSTGRES_DB", default="prowler_db")
|
||||
default_db_user = env("POSTGRES_USER", default="prowler_user")
|
||||
default_db_password = env("POSTGRES_PASSWORD", default="prowler")
|
||||
default_db_host = env("POSTGRES_HOST", default="postgres-db")
|
||||
default_db_port = env("POSTGRES_PORT", default="5432")
|
||||
|
||||
DATABASES = {
|
||||
"prowler_user": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_DB", default="prowler_db"),
|
||||
"USER": env("POSTGRES_USER", default="prowler_user"),
|
||||
"PASSWORD": env("POSTGRES_PASSWORD", default="prowler"),
|
||||
"HOST": env("POSTGRES_HOST", default="postgres-db"),
|
||||
"PORT": env("POSTGRES_PORT", default="5432"),
|
||||
"NAME": default_db_name,
|
||||
"USER": default_db_user,
|
||||
"PASSWORD": default_db_password,
|
||||
"HOST": default_db_host,
|
||||
"PORT": default_db_port,
|
||||
},
|
||||
"admin": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_DB", default="prowler_db"),
|
||||
"NAME": default_db_name,
|
||||
"USER": env("POSTGRES_ADMIN_USER", default="prowler"),
|
||||
"PASSWORD": env("POSTGRES_ADMIN_PASSWORD", default="S3cret"),
|
||||
"HOST": env("POSTGRES_HOST", default="postgres-db"),
|
||||
"PORT": env("POSTGRES_PORT", default="5432"),
|
||||
"HOST": default_db_host,
|
||||
"PORT": default_db_port,
|
||||
},
|
||||
"replica": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_REPLICA_DB", default=default_db_name),
|
||||
"USER": env("POSTGRES_REPLICA_USER", default=default_db_user),
|
||||
"PASSWORD": env("POSTGRES_REPLICA_PASSWORD", default=default_db_password),
|
||||
"HOST": env("POSTGRES_REPLICA_HOST", default=default_db_host),
|
||||
"PORT": env("POSTGRES_REPLICA_PORT", default=default_db_port),
|
||||
},
|
||||
}
|
||||
|
||||
DATABASES["default"] = DATABASES["prowler_user"]
|
||||
|
||||
REST_FRAMEWORK["DEFAULT_RENDERER_CLASSES"] = tuple( # noqa: F405
|
||||
|
||||
@@ -6,22 +6,37 @@ ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["localhost", "127.0.0.
|
||||
|
||||
# Database
|
||||
# TODO Use Django database routers https://docs.djangoproject.com/en/5.0/topics/db/multi-db/#automatic-database-routing
|
||||
default_db_name = env("POSTGRES_DB")
|
||||
default_db_user = env("POSTGRES_USER")
|
||||
default_db_password = env("POSTGRES_PASSWORD")
|
||||
default_db_host = env("POSTGRES_HOST")
|
||||
default_db_port = env("POSTGRES_PORT")
|
||||
|
||||
DATABASES = {
|
||||
"prowler_user": {
|
||||
"ENGINE": "django.db.backends.postgresql",
|
||||
"NAME": env("POSTGRES_DB"),
|
||||
"USER": env("POSTGRES_USER"),
|
||||
"PASSWORD": env("POSTGRES_PASSWORD"),
|
||||
"HOST": env("POSTGRES_HOST"),
|
||||
"PORT": env("POSTGRES_PORT"),
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": default_db_name,
|
||||
"USER": default_db_user,
|
||||
"PASSWORD": default_db_password,
|
||||
"HOST": default_db_host,
|
||||
"PORT": default_db_port,
|
||||
},
|
||||
"admin": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_DB"),
|
||||
"NAME": default_db_name,
|
||||
"USER": env("POSTGRES_ADMIN_USER"),
|
||||
"PASSWORD": env("POSTGRES_ADMIN_PASSWORD"),
|
||||
"HOST": env("POSTGRES_HOST"),
|
||||
"PORT": env("POSTGRES_PORT"),
|
||||
"HOST": default_db_host,
|
||||
"PORT": default_db_port,
|
||||
},
|
||||
"replica": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_REPLICA_DB", default=default_db_name),
|
||||
"USER": env("POSTGRES_REPLICA_USER", default=default_db_user),
|
||||
"PASSWORD": env("POSTGRES_REPLICA_PASSWORD", default=default_db_password),
|
||||
"HOST": env("POSTGRES_REPLICA_HOST", default=default_db_host),
|
||||
"PORT": env("POSTGRES_REPLICA_PORT", default=default_db_port),
|
||||
},
|
||||
}
|
||||
|
||||
DATABASES["default"] = DATABASES["prowler_user"]
|
||||
|
||||
@@ -20,6 +20,13 @@ DATABASE_ROUTERS = []
|
||||
TESTING = True
|
||||
SECRETS_ENCRYPTION_KEY = "ZMiYVo7m4Fbe2eXXPyrwxdJss2WSalXSv3xHBcJkPl0="
|
||||
|
||||
# DRF Simple API Key settings
|
||||
DRF_API_KEY = {
|
||||
"FERNET_SECRET": SECRETS_ENCRYPTION_KEY,
|
||||
"API_KEY_LIFETIME": 365,
|
||||
"AUTHENTICATION_KEYWORD_HEADER": "Api-Key",
|
||||
}
|
||||
|
||||
# JWT
|
||||
|
||||
SIMPLE_JWT["ALGORITHM"] = "HS256" # noqa: F405
|
||||
|
||||
@@ -38,6 +38,7 @@ from api.models import (
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
Task,
|
||||
TenantAPIKey,
|
||||
User,
|
||||
UserRoleRelationship,
|
||||
)
|
||||
@@ -1368,6 +1369,56 @@ def saml_sociallogin(users_fixture):
|
||||
return sociallogin
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def api_keys_fixture(tenants_fixture, create_test_user):
|
||||
"""Create test API keys for testing."""
|
||||
tenant = tenants_fixture[0]
|
||||
user = create_test_user
|
||||
|
||||
# Create and assign role to user for API key authentication
|
||||
role = Role.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
name="Test API Key Role",
|
||||
unlimited_visibility=True,
|
||||
manage_account=True,
|
||||
)
|
||||
UserRoleRelationship.objects.create(
|
||||
user=user,
|
||||
role=role,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
# Create API keys with different states
|
||||
api_key1, raw_key1 = TenantAPIKey.objects.create_api_key(
|
||||
name="Test API Key 1",
|
||||
tenant_id=tenant.id,
|
||||
entity=user,
|
||||
)
|
||||
|
||||
api_key2, raw_key2 = TenantAPIKey.objects.create_api_key(
|
||||
name="Test API Key 2",
|
||||
tenant_id=tenant.id,
|
||||
entity=user,
|
||||
expiry_date=datetime.now(timezone.utc) + timedelta(days=60),
|
||||
)
|
||||
|
||||
# Revoked API key
|
||||
api_key3, raw_key3 = TenantAPIKey.objects.create_api_key(
|
||||
name="Revoked API Key",
|
||||
tenant_id=tenant.id,
|
||||
entity=user,
|
||||
)
|
||||
api_key3.revoked = True
|
||||
api_key3.save()
|
||||
|
||||
# Store raw keys on instances for testing
|
||||
api_key1._raw_key = raw_key1
|
||||
api_key2._raw_key = raw_key2
|
||||
api_key3._raw_key = raw_key3
|
||||
|
||||
return [api_key1, api_key2, api_key3]
|
||||
|
||||
|
||||
def get_authorization_header(access_token: str) -> dict:
|
||||
return {"Authorization": f"Bearer {access_token}"}
|
||||
|
||||
|
||||
@@ -20,6 +20,10 @@ from prowler.lib.outputs.asff.asff import ASFF
|
||||
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
|
||||
AWSWellArchitected,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.ccc.ccc_aws import CCC_AWS
|
||||
from prowler.lib.outputs.compliance.ccc.ccc_azure import CCC_Azure
|
||||
from prowler.lib.outputs.compliance.ccc.ccc_gcp import CCC_GCP
|
||||
from prowler.lib.outputs.compliance.c5.c5_aws import AWSC5
|
||||
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
|
||||
@@ -73,12 +77,15 @@ COMPLIANCE_CLASS_MAP = {
|
||||
(lambda name: name.startswith("iso27001_"), AWSISO27001),
|
||||
(lambda name: name.startswith("kisa"), AWSKISAISMSP),
|
||||
(lambda name: name == "prowler_threatscore_aws", ProwlerThreatScoreAWS),
|
||||
(lambda name: name == "ccc_aws", CCC_AWS),
|
||||
(lambda name: name.startswith("c5_"), AWSC5),
|
||||
],
|
||||
"azure": [
|
||||
(lambda name: name.startswith("cis_"), AzureCIS),
|
||||
(lambda name: name == "mitre_attack_azure", AzureMitreAttack),
|
||||
(lambda name: name.startswith("ens_"), AzureENS),
|
||||
(lambda name: name.startswith("iso27001_"), AzureISO27001),
|
||||
(lambda name: name == "ccc_azure", CCC_Azure),
|
||||
(lambda name: name == "prowler_threatscore_azure", ProwlerThreatScoreAzure),
|
||||
],
|
||||
"gcp": [
|
||||
@@ -87,6 +94,7 @@ COMPLIANCE_CLASS_MAP = {
|
||||
(lambda name: name.startswith("ens_"), GCPENS),
|
||||
(lambda name: name.startswith("iso27001_"), GCPISO27001),
|
||||
(lambda name: name == "prowler_threatscore_gcp", ProwlerThreatScoreGCP),
|
||||
(lambda name: name == "ccc_gcp", CCC_GCP),
|
||||
],
|
||||
"kubernetes": [
|
||||
(lambda name: name.startswith("cis_"), KubernetesCIS),
|
||||
|
||||
@@ -5,6 +5,7 @@ from celery.utils.log import get_task_logger
|
||||
from config.django.base import DJANGO_FINDINGS_BATCH_SIZE
|
||||
from tasks.utils import batched
|
||||
|
||||
from api.db_router import READ_REPLICA_ALIAS
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import Finding, Integration, Provider
|
||||
from api.utils import initialize_prowler_integration, initialize_prowler_provider
|
||||
@@ -289,7 +290,7 @@ def upload_security_hub_integration(
|
||||
has_findings = False
|
||||
batch_number = 0
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
qs = (
|
||||
Finding.all_objects.filter(tenant_id=tenant_id, scan_id=scan_id)
|
||||
.order_by("uid")
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
import csv
|
||||
import io
|
||||
import json
|
||||
import time
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any
|
||||
|
||||
from celery.utils.log import get_task_logger
|
||||
from config.settings.celery import CELERY_DEADLOCK_ATTEMPTS
|
||||
@@ -14,8 +18,11 @@ from api.compliance import (
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE,
|
||||
generate_scan_compliance,
|
||||
)
|
||||
from api.db_router import READ_REPLICA_ALIAS, MainRouter
|
||||
from api.db_utils import (
|
||||
create_objects_in_batches,
|
||||
POSTGRES_TENANT_VAR,
|
||||
SET_CONFIG_QUERY,
|
||||
psycopg_connection,
|
||||
rls_transaction,
|
||||
update_objects_in_batches,
|
||||
)
|
||||
@@ -40,6 +47,28 @@ from prowler.lib.scan.scan import Scan as ProwlerScan
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
# Column order must match `ComplianceRequirementOverview` schema in
|
||||
# `api/models.py`. Keep this list minimal but sufficient to populate all
|
||||
# non-nullable fields plus the counters we care about.
|
||||
COMPLIANCE_REQUIREMENT_COPY_COLUMNS = (
|
||||
"id",
|
||||
"tenant_id",
|
||||
"inserted_at",
|
||||
"compliance_id",
|
||||
"framework",
|
||||
"version",
|
||||
"description",
|
||||
"region",
|
||||
"requirement_id",
|
||||
"requirement_status",
|
||||
"passed_checks",
|
||||
"failed_checks",
|
||||
"total_checks",
|
||||
"passed_findings",
|
||||
"total_findings",
|
||||
"scan_id",
|
||||
)
|
||||
|
||||
|
||||
def _create_finding_delta(
|
||||
last_status: FindingStatus | None | str, new_status: FindingStatus | None
|
||||
@@ -107,6 +136,124 @@ def _store_resources(
|
||||
return resource_instance, (resource_instance.uid, resource_instance.region)
|
||||
|
||||
|
||||
def _copy_compliance_requirement_rows(
|
||||
tenant_id: str, rows: list[dict[str, Any]]
|
||||
) -> None:
|
||||
"""Stream compliance requirement rows into Postgres using COPY.
|
||||
|
||||
We leverage the admin connection (when available) to bypass the COPY + RLS
|
||||
restriction, writing only the fields required by
|
||||
``ComplianceRequirementOverview``.
|
||||
|
||||
Args:
|
||||
tenant_id: Target tenant UUID.
|
||||
rows: List of row dictionaries prepared by
|
||||
:func:`create_compliance_requirements`.
|
||||
"""
|
||||
|
||||
csv_buffer = io.StringIO()
|
||||
writer = csv.writer(csv_buffer)
|
||||
|
||||
datetime_now = datetime.now(tz=timezone.utc)
|
||||
for row in rows:
|
||||
writer.writerow(
|
||||
[
|
||||
str(row.get("id")),
|
||||
str(row.get("tenant_id")),
|
||||
(row.get("inserted_at") or datetime_now).isoformat(),
|
||||
row.get("compliance_id") or "",
|
||||
row.get("framework") or "",
|
||||
row.get("version") or "",
|
||||
row.get("description") or "",
|
||||
row.get("region") or "",
|
||||
row.get("requirement_id") or "",
|
||||
row.get("requirement_status") or "",
|
||||
row.get("passed_checks", 0),
|
||||
row.get("failed_checks", 0),
|
||||
row.get("total_checks", 0),
|
||||
row.get("passed_findings", 0),
|
||||
row.get("total_findings", 0),
|
||||
str(row.get("scan_id")),
|
||||
]
|
||||
)
|
||||
|
||||
csv_buffer.seek(0)
|
||||
copy_sql = (
|
||||
"COPY compliance_requirements_overviews ("
|
||||
+ ", ".join(COMPLIANCE_REQUIREMENT_COPY_COLUMNS)
|
||||
+ ") FROM STDIN WITH (FORMAT CSV, DELIMITER ',', QUOTE '\"', ESCAPE '\"', NULL '\\N')"
|
||||
)
|
||||
|
||||
try:
|
||||
with psycopg_connection(MainRouter.admin_db) as connection:
|
||||
connection.autocommit = False
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(SET_CONFIG_QUERY, [POSTGRES_TENANT_VAR, tenant_id])
|
||||
cursor.copy_expert(copy_sql, csv_buffer)
|
||||
connection.commit()
|
||||
except Exception:
|
||||
connection.rollback()
|
||||
raise
|
||||
finally:
|
||||
csv_buffer.close()
|
||||
|
||||
|
||||
def _persist_compliance_requirement_rows(
|
||||
tenant_id: str, rows: list[dict[str, Any]]
|
||||
) -> None:
|
||||
"""Persist compliance requirement rows using COPY with ORM fallback.
|
||||
|
||||
Args:
|
||||
tenant_id: Target tenant UUID.
|
||||
rows: Precomputed row dictionaries that reflect the compliance
|
||||
overview state for a scan.
|
||||
"""
|
||||
if not rows:
|
||||
return
|
||||
|
||||
try:
|
||||
_copy_compliance_requirement_rows(tenant_id, rows)
|
||||
except Exception as error:
|
||||
logger.exception(
|
||||
"COPY bulk insert for compliance requirements failed; falling back to ORM bulk_create",
|
||||
exc_info=error,
|
||||
)
|
||||
fallback_objects = [
|
||||
ComplianceRequirementOverview(
|
||||
id=row["id"],
|
||||
tenant_id=row["tenant_id"],
|
||||
inserted_at=row["inserted_at"],
|
||||
compliance_id=row["compliance_id"],
|
||||
framework=row["framework"],
|
||||
version=row["version"],
|
||||
description=row["description"],
|
||||
region=row["region"],
|
||||
requirement_id=row["requirement_id"],
|
||||
requirement_status=row["requirement_status"],
|
||||
passed_checks=row["passed_checks"],
|
||||
failed_checks=row["failed_checks"],
|
||||
total_checks=row["total_checks"],
|
||||
passed_findings=row.get("passed_findings", 0),
|
||||
total_findings=row.get("total_findings", 0),
|
||||
scan_id=row["scan_id"],
|
||||
)
|
||||
for row in rows
|
||||
]
|
||||
with rls_transaction(tenant_id):
|
||||
ComplianceRequirementOverview.objects.bulk_create(
|
||||
fallback_objects, batch_size=500
|
||||
)
|
||||
|
||||
|
||||
def _normalized_compliance_key(framework: str | None, version: str | None) -> str:
|
||||
"""Return normalized identifier used to group compliance totals."""
|
||||
|
||||
normalized_framework = (framework or "").lower().replace("-", "").replace("_", "")
|
||||
normalized_version = (version or "").lower().replace("-", "").replace("_", "")
|
||||
return f"{normalized_framework}{normalized_version}"
|
||||
|
||||
|
||||
def perform_prowler_scan(
|
||||
tenant_id: str,
|
||||
scan_id: str,
|
||||
@@ -143,7 +290,7 @@ def perform_prowler_scan(
|
||||
scan_instance.save()
|
||||
|
||||
# Find the mutelist processor if it exists
|
||||
with rls_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
try:
|
||||
mutelist_processor = Processor.objects.get(
|
||||
tenant_id=tenant_id, processor_type=Processor.ProcessorChoices.MUTELIST
|
||||
@@ -272,7 +419,7 @@ def perform_prowler_scan(
|
||||
unique_resources.add((resource_instance.uid, resource_instance.region))
|
||||
|
||||
# Process finding
|
||||
with rls_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
finding_uid = finding.uid
|
||||
last_first_seen_at = None
|
||||
if finding_uid not in last_status_cache:
|
||||
@@ -305,6 +452,12 @@ def perform_prowler_scan(
|
||||
# If the finding is muted at this time the reason must be the configured Mutelist
|
||||
muted_reason = "Muted by mutelist" if finding.muted else None
|
||||
|
||||
# Increment failed_findings_count cache if the finding status is FAIL and not muted
|
||||
if status == FindingStatus.FAIL and not finding.muted:
|
||||
resource_uid = finding.resource_uid
|
||||
resource_failed_findings_cache[resource_uid] += 1
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
# Create the finding
|
||||
finding_instance = Finding.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
@@ -325,11 +478,6 @@ def perform_prowler_scan(
|
||||
)
|
||||
finding_instance.add_resources([resource_instance])
|
||||
|
||||
# Increment failed_findings_count cache if the finding status is FAIL and not muted
|
||||
if status == FindingStatus.FAIL and not finding.muted:
|
||||
resource_uid = finding.resource_uid
|
||||
resource_failed_findings_cache[resource_uid] += 1
|
||||
|
||||
# Update scan resource summaries
|
||||
scan_resource_cache.add(
|
||||
(
|
||||
@@ -439,7 +587,7 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
- muted_new: Muted findings with a delta of 'new'.
|
||||
- muted_changed: Muted findings with a delta of 'changed'.
|
||||
"""
|
||||
with rls_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
findings = Finding.objects.filter(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
aggregation = findings.values(
|
||||
@@ -582,15 +730,32 @@ def create_compliance_requirements(tenant_id: str, scan_id: str):
|
||||
ValidationError: If tenant_id is not a valid UUID.
|
||||
"""
|
||||
try:
|
||||
with rls_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
scan_instance = Scan.objects.get(pk=scan_id)
|
||||
provider_instance = scan_instance.provider
|
||||
prowler_provider = return_prowler_provider(provider_instance)
|
||||
|
||||
compliance_template = PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE[
|
||||
provider_instance.provider
|
||||
]
|
||||
modeled_threatscore_compliance_id = "ProwlerThreatScore-1.0"
|
||||
threatscore_requirements_by_check: dict[str, set[str]] = {}
|
||||
threatscore_framework = compliance_template.get(
|
||||
modeled_threatscore_compliance_id
|
||||
)
|
||||
if threatscore_framework:
|
||||
for requirement_id, requirement in threatscore_framework[
|
||||
"requirements"
|
||||
].items():
|
||||
for check_id in requirement["checks"]:
|
||||
threatscore_requirements_by_check.setdefault(check_id, set()).add(
|
||||
requirement_id
|
||||
)
|
||||
|
||||
# Get check status data by region from findings
|
||||
findings = (
|
||||
Finding.all_objects.filter(scan_id=scan_id, muted=False)
|
||||
.only("id", "check_id", "status")
|
||||
.only("id", "check_id", "status", "compliance")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"resources",
|
||||
@@ -601,14 +766,36 @@ def create_compliance_requirements(tenant_id: str, scan_id: str):
|
||||
.iterator(chunk_size=1000)
|
||||
)
|
||||
|
||||
findings_count_by_compliance = {}
|
||||
check_status_by_region = {}
|
||||
with rls_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
for finding in findings:
|
||||
for resource in finding.small_resources:
|
||||
region = resource.region
|
||||
current_status = check_status_by_region.setdefault(region, {})
|
||||
if current_status.get(finding.check_id) != "FAIL":
|
||||
current_status[finding.check_id] = finding.status
|
||||
if modeled_threatscore_compliance_id in finding.compliance:
|
||||
for requirement_id in finding.compliance[
|
||||
modeled_threatscore_compliance_id
|
||||
]:
|
||||
compliance_key = findings_count_by_compliance.setdefault(
|
||||
region, {}
|
||||
).setdefault(
|
||||
modeled_threatscore_compliance_id.lower().replace(
|
||||
"-", ""
|
||||
),
|
||||
{},
|
||||
)
|
||||
if requirement_id not in compliance_key:
|
||||
compliance_key[requirement_id] = {
|
||||
"total": 0,
|
||||
"pass": 0,
|
||||
}
|
||||
|
||||
compliance_key[requirement_id]["total"] += 1
|
||||
if finding.status == "PASS":
|
||||
compliance_key[requirement_id]["pass"] += 1
|
||||
|
||||
try:
|
||||
# Try to get regions from provider
|
||||
@@ -617,11 +804,6 @@ def create_compliance_requirements(tenant_id: str, scan_id: str):
|
||||
# If not available, use regions from findings
|
||||
regions = set(check_status_by_region.keys())
|
||||
|
||||
# Get compliance template for the provider
|
||||
compliance_template = PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE[
|
||||
provider_instance.provider
|
||||
]
|
||||
|
||||
# Create compliance data by region
|
||||
compliance_overview_by_region = {
|
||||
region: deepcopy(compliance_template) for region in regions
|
||||
@@ -640,36 +822,53 @@ def create_compliance_requirements(tenant_id: str, scan_id: str):
|
||||
status,
|
||||
)
|
||||
|
||||
# Prepare compliance requirement objects
|
||||
compliance_requirement_objects = []
|
||||
# Prepare compliance requirement rows
|
||||
compliance_requirement_rows: list[dict[str, Any]] = []
|
||||
utc_datetime_now = datetime.now(tz=timezone.utc)
|
||||
for region, compliance_data in compliance_overview_by_region.items():
|
||||
for compliance_id, compliance in compliance_data.items():
|
||||
modeled_compliance_id = _normalized_compliance_key(
|
||||
compliance["framework"], compliance["version"]
|
||||
)
|
||||
# Create an overview record for each requirement within each compliance framework
|
||||
for requirement_id, requirement in compliance["requirements"].items():
|
||||
compliance_requirement_objects.append(
|
||||
ComplianceRequirementOverview(
|
||||
tenant_id=tenant_id,
|
||||
scan=scan_instance,
|
||||
region=region,
|
||||
compliance_id=compliance_id,
|
||||
framework=compliance["framework"],
|
||||
version=compliance["version"],
|
||||
requirement_id=requirement_id,
|
||||
description=requirement["description"],
|
||||
passed_checks=requirement["checks_status"]["pass"],
|
||||
failed_checks=requirement["checks_status"]["fail"],
|
||||
total_checks=requirement["checks_status"]["total"],
|
||||
requirement_status=requirement["status"],
|
||||
)
|
||||
checks_status = requirement["checks_status"]
|
||||
compliance_requirement_rows.append(
|
||||
{
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": tenant_id,
|
||||
"inserted_at": utc_datetime_now,
|
||||
"compliance_id": compliance_id,
|
||||
"framework": compliance["framework"],
|
||||
"version": compliance["version"] or "",
|
||||
"description": requirement.get("description") or "",
|
||||
"region": region,
|
||||
"requirement_id": requirement_id,
|
||||
"requirement_status": requirement["status"],
|
||||
"passed_checks": checks_status["pass"],
|
||||
"failed_checks": checks_status["fail"],
|
||||
"total_checks": checks_status["total"],
|
||||
"scan_id": scan_instance.id,
|
||||
"passed_findings": findings_count_by_compliance.get(
|
||||
region, {}
|
||||
)
|
||||
.get(modeled_compliance_id, {})
|
||||
.get(requirement_id, {})
|
||||
.get("pass", 0),
|
||||
"total_findings": findings_count_by_compliance.get(
|
||||
region, {}
|
||||
)
|
||||
.get(modeled_compliance_id, {})
|
||||
.get(requirement_id, {})
|
||||
.get("total", 0),
|
||||
}
|
||||
)
|
||||
|
||||
# Bulk create requirement records
|
||||
create_objects_in_batches(
|
||||
tenant_id, ComplianceRequirementOverview, compliance_requirement_objects
|
||||
)
|
||||
# Bulk create requirement records using PostgreSQL COPY
|
||||
_persist_compliance_requirement_rows(tenant_id, compliance_requirement_rows)
|
||||
|
||||
return {
|
||||
"requirements_created": len(compliance_requirement_objects),
|
||||
"requirements_created": len(compliance_requirement_rows),
|
||||
"regions_processed": list(regions),
|
||||
"compliance_frameworks": (
|
||||
list(compliance_overview_by_region.get(list(regions)[0], {}).keys())
|
||||
|
||||
@@ -34,6 +34,7 @@ from tasks.jobs.scan import (
|
||||
from tasks.utils import batched, get_next_execution_datetime
|
||||
|
||||
from api.compliance import get_compliance_frameworks
|
||||
from api.db_router import READ_REPLICA_ALIAS
|
||||
from api.db_utils import rls_transaction
|
||||
from api.decorators import set_tenant
|
||||
from api.models import Finding, Integration, Provider, Scan, ScanSummary, StateChoices
|
||||
@@ -343,70 +344,73 @@ def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
.order_by("uid")
|
||||
.iterator()
|
||||
)
|
||||
for batch, is_last in batched(qs, DJANGO_FINDINGS_BATCH_SIZE):
|
||||
fos = [FindingOutput.transform_api_finding(f, prowler_provider) for f in batch]
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
for batch, is_last in batched(qs, DJANGO_FINDINGS_BATCH_SIZE):
|
||||
fos = [
|
||||
FindingOutput.transform_api_finding(f, prowler_provider) for f in batch
|
||||
]
|
||||
|
||||
# Outputs
|
||||
for mode, cfg in OUTPUT_FORMATS_MAPPING.items():
|
||||
# Skip ASFF generation if not needed
|
||||
if mode == "json-asff" and not generate_asff:
|
||||
continue
|
||||
# Outputs
|
||||
for mode, cfg in OUTPUT_FORMATS_MAPPING.items():
|
||||
# Skip ASFF generation if not needed
|
||||
if mode == "json-asff" and not generate_asff:
|
||||
continue
|
||||
|
||||
cls = cfg["class"]
|
||||
suffix = cfg["suffix"]
|
||||
extra = cfg.get("kwargs", {}).copy()
|
||||
if mode == "html":
|
||||
extra.update(provider=prowler_provider, stats=scan_summary)
|
||||
cls = cfg["class"]
|
||||
suffix = cfg["suffix"]
|
||||
extra = cfg.get("kwargs", {}).copy()
|
||||
if mode == "html":
|
||||
extra.update(provider=prowler_provider, stats=scan_summary)
|
||||
|
||||
writer, initialization = get_writer(
|
||||
output_writers,
|
||||
cls,
|
||||
lambda cls=cls, fos=fos, suffix=suffix: cls(
|
||||
findings=fos,
|
||||
file_path=out_dir,
|
||||
file_extension=suffix,
|
||||
from_cli=False,
|
||||
),
|
||||
is_last,
|
||||
)
|
||||
if not initialization:
|
||||
writer.transform(fos)
|
||||
writer.batch_write_data_to_file(**extra)
|
||||
writer._data.clear()
|
||||
writer, initialization = get_writer(
|
||||
output_writers,
|
||||
cls,
|
||||
lambda cls=cls, fos=fos, suffix=suffix: cls(
|
||||
findings=fos,
|
||||
file_path=out_dir,
|
||||
file_extension=suffix,
|
||||
from_cli=False,
|
||||
),
|
||||
is_last,
|
||||
)
|
||||
if not initialization:
|
||||
writer.transform(fos)
|
||||
writer.batch_write_data_to_file(**extra)
|
||||
writer._data.clear()
|
||||
|
||||
# Compliance CSVs
|
||||
for name in frameworks_avail:
|
||||
compliance_obj = frameworks_bulk[name]
|
||||
# Compliance CSVs
|
||||
for name in frameworks_avail:
|
||||
compliance_obj = frameworks_bulk[name]
|
||||
|
||||
klass = GenericCompliance
|
||||
for condition, cls in COMPLIANCE_CLASS_MAP.get(provider_type, []):
|
||||
if condition(name):
|
||||
klass = cls
|
||||
break
|
||||
klass = GenericCompliance
|
||||
for condition, cls in COMPLIANCE_CLASS_MAP.get(provider_type, []):
|
||||
if condition(name):
|
||||
klass = cls
|
||||
break
|
||||
|
||||
filename = f"{comp_dir}_{name}.csv"
|
||||
filename = f"{comp_dir}_{name}.csv"
|
||||
|
||||
writer, initialization = get_writer(
|
||||
compliance_writers,
|
||||
name,
|
||||
lambda klass=klass, fos=fos: klass(
|
||||
findings=fos,
|
||||
compliance=compliance_obj,
|
||||
file_path=filename,
|
||||
from_cli=False,
|
||||
),
|
||||
is_last,
|
||||
)
|
||||
if not initialization:
|
||||
writer.transform(fos, compliance_obj, name)
|
||||
writer.batch_write_data_to_file()
|
||||
writer._data.clear()
|
||||
writer, initialization = get_writer(
|
||||
compliance_writers,
|
||||
name,
|
||||
lambda klass=klass, fos=fos: klass(
|
||||
findings=fos,
|
||||
compliance=compliance_obj,
|
||||
file_path=filename,
|
||||
from_cli=False,
|
||||
),
|
||||
is_last,
|
||||
)
|
||||
if not initialization:
|
||||
writer.transform(fos, compliance_obj, name)
|
||||
writer.batch_write_data_to_file()
|
||||
writer._data.clear()
|
||||
|
||||
compressed = _compress_output_files(out_dir)
|
||||
upload_uri = _upload_to_s3(tenant_id, compressed, scan_id)
|
||||
|
||||
# S3 integrations (need output_directory)
|
||||
with rls_transaction(tenant_id):
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
s3_integrations = Integration.objects.filter(
|
||||
integrationproviderrelationship__provider_id=provider_id,
|
||||
integration_type=Integration.IntegrationChoices.AMAZON_S3,
|
||||
|
||||
@@ -1,17 +1,22 @@
|
||||
import csv
|
||||
import json
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from io import StringIO
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from tasks.jobs.scan import (
|
||||
_copy_compliance_requirement_rows,
|
||||
_create_finding_delta,
|
||||
_persist_compliance_requirement_rows,
|
||||
_store_resources,
|
||||
create_compliance_requirements,
|
||||
perform_prowler_scan,
|
||||
)
|
||||
from tasks.utils import CustomEncoder
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.exceptions import ProviderConnectionError
|
||||
from api.models import Finding, Provider, Resource, Scan, StateChoices, StatusChoices
|
||||
from prowler.lib.check.models import Severity
|
||||
@@ -1045,3 +1050,773 @@ class TestCreateComplianceRequirements:
|
||||
|
||||
assert "requirements_created" in result
|
||||
assert result["requirements_created"] >= 0
|
||||
|
||||
|
||||
class TestComplianceRequirementCopy:
|
||||
@patch("tasks.jobs.scan.psycopg_connection")
|
||||
def test_copy_compliance_requirement_rows_streams_csv(
|
||||
self, mock_psycopg_connection, settings
|
||||
):
|
||||
settings.DATABASES.setdefault("admin", settings.DATABASES["default"])
|
||||
|
||||
connection = MagicMock()
|
||||
cursor = MagicMock()
|
||||
cursor_context = MagicMock()
|
||||
cursor_context.__enter__.return_value = cursor
|
||||
cursor_context.__exit__.return_value = False
|
||||
connection.cursor.return_value = cursor_context
|
||||
connection.__enter__.return_value = connection
|
||||
connection.__exit__.return_value = False
|
||||
|
||||
context_manager = MagicMock()
|
||||
context_manager.__enter__.return_value = connection
|
||||
context_manager.__exit__.return_value = False
|
||||
mock_psycopg_connection.return_value = context_manager
|
||||
|
||||
captured = {}
|
||||
|
||||
def copy_side_effect(sql, file_obj):
|
||||
captured["sql"] = sql
|
||||
captured["data"] = file_obj.read()
|
||||
|
||||
cursor.copy_expert.side_effect = copy_side_effect
|
||||
|
||||
row = {
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": str(uuid.uuid4()),
|
||||
"compliance_id": "cisa_aws",
|
||||
"framework": "CISA",
|
||||
"version": None,
|
||||
"description": "desc",
|
||||
"region": "us-east-1",
|
||||
"requirement_id": "req-1",
|
||||
"requirement_status": "PASS",
|
||||
"passed_checks": 1,
|
||||
"failed_checks": 0,
|
||||
"total_checks": 1,
|
||||
"scan_id": uuid.uuid4(),
|
||||
}
|
||||
|
||||
with patch.object(MainRouter, "admin_db", "admin"):
|
||||
_copy_compliance_requirement_rows(str(row["tenant_id"]), [row])
|
||||
|
||||
mock_psycopg_connection.assert_called_once_with("admin")
|
||||
connection.cursor.assert_called_once()
|
||||
cursor.execute.assert_called_once()
|
||||
cursor.copy_expert.assert_called_once()
|
||||
|
||||
csv_rows = list(csv.reader(StringIO(captured["data"])))
|
||||
assert csv_rows[0][0] == str(row["id"])
|
||||
assert csv_rows[0][5] == ""
|
||||
assert csv_rows[0][-1] == str(row["scan_id"])
|
||||
|
||||
@patch("tasks.jobs.scan.ComplianceRequirementOverview.objects.bulk_create")
|
||||
@patch("tasks.jobs.scan.rls_transaction")
|
||||
@patch(
|
||||
"tasks.jobs.scan._copy_compliance_requirement_rows",
|
||||
side_effect=Exception("copy failed"),
|
||||
)
|
||||
def test_persist_compliance_requirement_rows_fallback(
|
||||
self, mock_copy, mock_rls_transaction, mock_bulk_create
|
||||
):
|
||||
inserted_at = datetime.now(timezone.utc)
|
||||
row = {
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": str(uuid.uuid4()),
|
||||
"inserted_at": inserted_at,
|
||||
"compliance_id": "cisa_aws",
|
||||
"framework": "CISA",
|
||||
"version": "1.0",
|
||||
"description": "desc",
|
||||
"region": "us-east-1",
|
||||
"requirement_id": "req-1",
|
||||
"requirement_status": "PASS",
|
||||
"passed_checks": 1,
|
||||
"failed_checks": 0,
|
||||
"total_checks": 1,
|
||||
"scan_id": uuid.uuid4(),
|
||||
}
|
||||
|
||||
tenant_id = row["tenant_id"]
|
||||
|
||||
ctx = MagicMock()
|
||||
ctx.__enter__.return_value = None
|
||||
ctx.__exit__.return_value = False
|
||||
mock_rls_transaction.return_value = ctx
|
||||
|
||||
_persist_compliance_requirement_rows(tenant_id, [row])
|
||||
|
||||
mock_copy.assert_called_once_with(tenant_id, [row])
|
||||
mock_rls_transaction.assert_called_once_with(tenant_id)
|
||||
mock_bulk_create.assert_called_once()
|
||||
|
||||
args, kwargs = mock_bulk_create.call_args
|
||||
objects = args[0]
|
||||
assert len(objects) == 1
|
||||
fallback = objects[0]
|
||||
assert fallback.version == row["version"]
|
||||
assert fallback.compliance_id == row["compliance_id"]
|
||||
|
||||
@patch("tasks.jobs.scan.ComplianceRequirementOverview.objects.bulk_create")
|
||||
@patch("tasks.jobs.scan.rls_transaction")
|
||||
@patch("tasks.jobs.scan._copy_compliance_requirement_rows")
|
||||
def test_persist_compliance_requirement_rows_no_rows(
|
||||
self, mock_copy, mock_rls_transaction, mock_bulk_create
|
||||
):
|
||||
_persist_compliance_requirement_rows(str(uuid.uuid4()), [])
|
||||
|
||||
mock_copy.assert_not_called()
|
||||
mock_rls_transaction.assert_not_called()
|
||||
mock_bulk_create.assert_not_called()
|
||||
|
||||
@patch("tasks.jobs.scan.psycopg_connection")
|
||||
def test_copy_compliance_requirement_rows_multiple_rows(
|
||||
self, mock_psycopg_connection, settings
|
||||
):
|
||||
"""Test COPY with multiple rows to ensure batch processing works correctly."""
|
||||
settings.DATABASES.setdefault("admin", settings.DATABASES["default"])
|
||||
|
||||
connection = MagicMock()
|
||||
cursor = MagicMock()
|
||||
cursor_context = MagicMock()
|
||||
cursor_context.__enter__.return_value = cursor
|
||||
cursor_context.__exit__.return_value = False
|
||||
connection.cursor.return_value = cursor_context
|
||||
connection.__enter__.return_value = connection
|
||||
connection.__exit__.return_value = False
|
||||
|
||||
context_manager = MagicMock()
|
||||
context_manager.__enter__.return_value = connection
|
||||
context_manager.__exit__.return_value = False
|
||||
mock_psycopg_connection.return_value = context_manager
|
||||
|
||||
captured = {}
|
||||
|
||||
def copy_side_effect(sql, file_obj):
|
||||
captured["sql"] = sql
|
||||
captured["data"] = file_obj.read()
|
||||
|
||||
cursor.copy_expert.side_effect = copy_side_effect
|
||||
|
||||
tenant_id = str(uuid.uuid4())
|
||||
scan_id = uuid.uuid4()
|
||||
inserted_at = datetime.now(timezone.utc)
|
||||
|
||||
rows = [
|
||||
{
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": tenant_id,
|
||||
"inserted_at": inserted_at,
|
||||
"compliance_id": "cisa_aws",
|
||||
"framework": "CISA",
|
||||
"version": "1.0",
|
||||
"description": "First requirement",
|
||||
"region": "us-east-1",
|
||||
"requirement_id": "req-1",
|
||||
"requirement_status": "PASS",
|
||||
"passed_checks": 5,
|
||||
"failed_checks": 0,
|
||||
"total_checks": 5,
|
||||
"scan_id": scan_id,
|
||||
},
|
||||
{
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": tenant_id,
|
||||
"inserted_at": inserted_at,
|
||||
"compliance_id": "cisa_aws",
|
||||
"framework": "CISA",
|
||||
"version": "1.0",
|
||||
"description": "Second requirement",
|
||||
"region": "us-west-2",
|
||||
"requirement_id": "req-2",
|
||||
"requirement_status": "FAIL",
|
||||
"passed_checks": 3,
|
||||
"failed_checks": 2,
|
||||
"total_checks": 5,
|
||||
"scan_id": scan_id,
|
||||
},
|
||||
{
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": tenant_id,
|
||||
"inserted_at": inserted_at,
|
||||
"compliance_id": "aws_foundational_security_aws",
|
||||
"framework": "AWS-Foundational-Security-Best-Practices",
|
||||
"version": "2.0",
|
||||
"description": "Third requirement",
|
||||
"region": "eu-west-1",
|
||||
"requirement_id": "req-3",
|
||||
"requirement_status": "MANUAL",
|
||||
"passed_checks": 0,
|
||||
"failed_checks": 0,
|
||||
"total_checks": 3,
|
||||
"scan_id": scan_id,
|
||||
},
|
||||
]
|
||||
|
||||
with patch.object(MainRouter, "admin_db", "admin"):
|
||||
_copy_compliance_requirement_rows(tenant_id, rows)
|
||||
|
||||
mock_psycopg_connection.assert_called_once_with("admin")
|
||||
connection.cursor.assert_called_once()
|
||||
cursor.execute.assert_called_once()
|
||||
cursor.copy_expert.assert_called_once()
|
||||
|
||||
csv_rows = list(csv.reader(StringIO(captured["data"])))
|
||||
assert len(csv_rows) == 3
|
||||
|
||||
# Validate first row
|
||||
assert csv_rows[0][0] == str(rows[0]["id"])
|
||||
assert csv_rows[0][1] == tenant_id
|
||||
assert csv_rows[0][3] == "cisa_aws"
|
||||
assert csv_rows[0][4] == "CISA"
|
||||
assert csv_rows[0][6] == "First requirement"
|
||||
assert csv_rows[0][7] == "us-east-1"
|
||||
assert csv_rows[0][10] == "5"
|
||||
assert csv_rows[0][11] == "0"
|
||||
assert csv_rows[0][12] == "5"
|
||||
|
||||
# Validate second row
|
||||
assert csv_rows[1][0] == str(rows[1]["id"])
|
||||
assert csv_rows[1][7] == "us-west-2"
|
||||
assert csv_rows[1][9] == "FAIL"
|
||||
assert csv_rows[1][10] == "3"
|
||||
assert csv_rows[1][11] == "2"
|
||||
|
||||
# Validate third row
|
||||
assert csv_rows[2][0] == str(rows[2]["id"])
|
||||
assert csv_rows[2][3] == "aws_foundational_security_aws"
|
||||
assert csv_rows[2][5] == "2.0"
|
||||
assert csv_rows[2][9] == "MANUAL"
|
||||
|
||||
@patch("tasks.jobs.scan.psycopg_connection")
|
||||
def test_copy_compliance_requirement_rows_null_values(
|
||||
self, mock_psycopg_connection, settings
|
||||
):
|
||||
"""Test COPY handles NULL/None values correctly in nullable fields."""
|
||||
settings.DATABASES.setdefault("admin", settings.DATABASES["default"])
|
||||
|
||||
connection = MagicMock()
|
||||
cursor = MagicMock()
|
||||
cursor_context = MagicMock()
|
||||
cursor_context.__enter__.return_value = cursor
|
||||
cursor_context.__exit__.return_value = False
|
||||
connection.cursor.return_value = cursor_context
|
||||
connection.__enter__.return_value = connection
|
||||
connection.__exit__.return_value = False
|
||||
|
||||
context_manager = MagicMock()
|
||||
context_manager.__enter__.return_value = connection
|
||||
context_manager.__exit__.return_value = False
|
||||
mock_psycopg_connection.return_value = context_manager
|
||||
|
||||
captured = {}
|
||||
|
||||
def copy_side_effect(sql, file_obj):
|
||||
captured["sql"] = sql
|
||||
captured["data"] = file_obj.read()
|
||||
|
||||
cursor.copy_expert.side_effect = copy_side_effect
|
||||
|
||||
# Row with all nullable fields set to None/empty
|
||||
row = {
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": str(uuid.uuid4()),
|
||||
"compliance_id": "test_framework",
|
||||
"framework": "Test",
|
||||
"version": None, # nullable
|
||||
"description": None, # nullable
|
||||
"region": "",
|
||||
"requirement_id": "req-1",
|
||||
"requirement_status": "PASS",
|
||||
"passed_checks": 0,
|
||||
"failed_checks": 0,
|
||||
"total_checks": 0,
|
||||
"scan_id": uuid.uuid4(),
|
||||
}
|
||||
|
||||
with patch.object(MainRouter, "admin_db", "admin"):
|
||||
_copy_compliance_requirement_rows(str(row["tenant_id"]), [row])
|
||||
|
||||
csv_rows = list(csv.reader(StringIO(captured["data"])))
|
||||
assert len(csv_rows) == 1
|
||||
|
||||
# Validate that None values are converted to empty strings in CSV
|
||||
assert csv_rows[0][5] == "" # version
|
||||
assert csv_rows[0][6] == "" # description
|
||||
|
||||
@patch("tasks.jobs.scan.psycopg_connection")
|
||||
def test_copy_compliance_requirement_rows_special_characters(
|
||||
self, mock_psycopg_connection, settings
|
||||
):
|
||||
"""Test COPY correctly escapes special characters in CSV."""
|
||||
settings.DATABASES.setdefault("admin", settings.DATABASES["default"])
|
||||
|
||||
connection = MagicMock()
|
||||
cursor = MagicMock()
|
||||
cursor_context = MagicMock()
|
||||
cursor_context.__enter__.return_value = cursor
|
||||
cursor_context.__exit__.return_value = False
|
||||
connection.cursor.return_value = cursor_context
|
||||
connection.__enter__.return_value = connection
|
||||
connection.__exit__.return_value = False
|
||||
|
||||
context_manager = MagicMock()
|
||||
context_manager.__enter__.return_value = connection
|
||||
context_manager.__exit__.return_value = False
|
||||
mock_psycopg_connection.return_value = context_manager
|
||||
|
||||
captured = {}
|
||||
|
||||
def copy_side_effect(sql, file_obj):
|
||||
captured["sql"] = sql
|
||||
captured["data"] = file_obj.read()
|
||||
|
||||
cursor.copy_expert.side_effect = copy_side_effect
|
||||
|
||||
# Row with special characters that need escaping
|
||||
row = {
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": str(uuid.uuid4()),
|
||||
"compliance_id": 'framework"with"quotes',
|
||||
"framework": "Framework,with,commas",
|
||||
"version": "1.0",
|
||||
"description": 'Description with "quotes", commas, and\nnewlines',
|
||||
"region": "us-east-1",
|
||||
"requirement_id": "req-1",
|
||||
"requirement_status": "PASS",
|
||||
"passed_checks": 1,
|
||||
"failed_checks": 0,
|
||||
"total_checks": 1,
|
||||
"scan_id": uuid.uuid4(),
|
||||
}
|
||||
|
||||
with patch.object(MainRouter, "admin_db", "admin"):
|
||||
_copy_compliance_requirement_rows(str(row["tenant_id"]), [row])
|
||||
|
||||
# Verify CSV was generated (csv module handles escaping automatically)
|
||||
csv_rows = list(csv.reader(StringIO(captured["data"])))
|
||||
assert len(csv_rows) == 1
|
||||
|
||||
# Verify special characters are preserved after CSV parsing
|
||||
assert csv_rows[0][3] == 'framework"with"quotes'
|
||||
assert csv_rows[0][4] == "Framework,with,commas"
|
||||
assert "quotes" in csv_rows[0][6]
|
||||
assert "commas" in csv_rows[0][6]
|
||||
|
||||
@patch("tasks.jobs.scan.psycopg_connection")
|
||||
def test_copy_compliance_requirement_rows_missing_inserted_at(
|
||||
self, mock_psycopg_connection, settings
|
||||
):
|
||||
"""Test COPY uses current datetime when inserted_at is missing."""
|
||||
settings.DATABASES.setdefault("admin", settings.DATABASES["default"])
|
||||
|
||||
connection = MagicMock()
|
||||
cursor = MagicMock()
|
||||
cursor_context = MagicMock()
|
||||
cursor_context.__enter__.return_value = cursor
|
||||
cursor_context.__exit__.return_value = False
|
||||
connection.cursor.return_value = cursor_context
|
||||
connection.__enter__.return_value = connection
|
||||
connection.__exit__.return_value = False
|
||||
|
||||
context_manager = MagicMock()
|
||||
context_manager.__enter__.return_value = connection
|
||||
context_manager.__exit__.return_value = False
|
||||
mock_psycopg_connection.return_value = context_manager
|
||||
|
||||
captured = {}
|
||||
|
||||
def copy_side_effect(sql, file_obj):
|
||||
captured["sql"] = sql
|
||||
captured["data"] = file_obj.read()
|
||||
|
||||
cursor.copy_expert.side_effect = copy_side_effect
|
||||
|
||||
# Row without inserted_at field
|
||||
row = {
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": str(uuid.uuid4()),
|
||||
"compliance_id": "test_framework",
|
||||
"framework": "Test",
|
||||
"version": "1.0",
|
||||
"description": "desc",
|
||||
"region": "us-east-1",
|
||||
"requirement_id": "req-1",
|
||||
"requirement_status": "PASS",
|
||||
"passed_checks": 1,
|
||||
"failed_checks": 0,
|
||||
"total_checks": 1,
|
||||
"scan_id": uuid.uuid4(),
|
||||
# Note: inserted_at is intentionally missing
|
||||
}
|
||||
|
||||
before_call = datetime.now(timezone.utc)
|
||||
with patch.object(MainRouter, "admin_db", "admin"):
|
||||
_copy_compliance_requirement_rows(str(row["tenant_id"]), [row])
|
||||
after_call = datetime.now(timezone.utc)
|
||||
|
||||
csv_rows = list(csv.reader(StringIO(captured["data"])))
|
||||
assert len(csv_rows) == 1
|
||||
|
||||
# Verify inserted_at was auto-generated and is a valid ISO datetime
|
||||
inserted_at_str = csv_rows[0][2]
|
||||
inserted_at = datetime.fromisoformat(inserted_at_str)
|
||||
assert before_call <= inserted_at <= after_call
|
||||
|
||||
@patch("tasks.jobs.scan.psycopg_connection")
|
||||
def test_copy_compliance_requirement_rows_transaction_rollback_on_copy_error(
|
||||
self, mock_psycopg_connection, settings
|
||||
):
|
||||
"""Test transaction is rolled back when copy_expert fails."""
|
||||
settings.DATABASES.setdefault("admin", settings.DATABASES["default"])
|
||||
|
||||
connection = MagicMock()
|
||||
cursor = MagicMock()
|
||||
cursor_context = MagicMock()
|
||||
cursor_context.__enter__.return_value = cursor
|
||||
cursor_context.__exit__.return_value = False
|
||||
connection.cursor.return_value = cursor_context
|
||||
connection.__enter__.return_value = connection
|
||||
connection.__exit__.return_value = False
|
||||
|
||||
context_manager = MagicMock()
|
||||
context_manager.__enter__.return_value = connection
|
||||
context_manager.__exit__.return_value = False
|
||||
mock_psycopg_connection.return_value = context_manager
|
||||
|
||||
# Simulate copy_expert failure
|
||||
cursor.copy_expert.side_effect = Exception("COPY command failed")
|
||||
|
||||
row = {
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": str(uuid.uuid4()),
|
||||
"compliance_id": "test",
|
||||
"framework": "Test",
|
||||
"version": "1.0",
|
||||
"description": "desc",
|
||||
"region": "us-east-1",
|
||||
"requirement_id": "req-1",
|
||||
"requirement_status": "PASS",
|
||||
"passed_checks": 1,
|
||||
"failed_checks": 0,
|
||||
"total_checks": 1,
|
||||
"scan_id": uuid.uuid4(),
|
||||
}
|
||||
|
||||
with patch.object(MainRouter, "admin_db", "admin"):
|
||||
with pytest.raises(Exception, match="COPY command failed"):
|
||||
_copy_compliance_requirement_rows(str(row["tenant_id"]), [row])
|
||||
|
||||
# Verify rollback was called
|
||||
connection.rollback.assert_called_once()
|
||||
connection.commit.assert_not_called()
|
||||
|
||||
@patch("tasks.jobs.scan.psycopg_connection")
|
||||
def test_copy_compliance_requirement_rows_transaction_rollback_on_set_config_error(
|
||||
self, mock_psycopg_connection, settings
|
||||
):
|
||||
"""Test transaction is rolled back when SET_CONFIG fails."""
|
||||
settings.DATABASES.setdefault("admin", settings.DATABASES["default"])
|
||||
|
||||
connection = MagicMock()
|
||||
cursor = MagicMock()
|
||||
cursor_context = MagicMock()
|
||||
cursor_context.__enter__.return_value = cursor
|
||||
cursor_context.__exit__.return_value = False
|
||||
connection.cursor.return_value = cursor_context
|
||||
connection.__enter__.return_value = connection
|
||||
connection.__exit__.return_value = False
|
||||
|
||||
context_manager = MagicMock()
|
||||
context_manager.__enter__.return_value = connection
|
||||
context_manager.__exit__.return_value = False
|
||||
mock_psycopg_connection.return_value = context_manager
|
||||
|
||||
# Simulate cursor.execute failure
|
||||
cursor.execute.side_effect = Exception("SET prowler.tenant_id failed")
|
||||
|
||||
row = {
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": str(uuid.uuid4()),
|
||||
"compliance_id": "test",
|
||||
"framework": "Test",
|
||||
"version": "1.0",
|
||||
"description": "desc",
|
||||
"region": "us-east-1",
|
||||
"requirement_id": "req-1",
|
||||
"requirement_status": "PASS",
|
||||
"passed_checks": 1,
|
||||
"failed_checks": 0,
|
||||
"total_checks": 1,
|
||||
"scan_id": uuid.uuid4(),
|
||||
}
|
||||
|
||||
with patch.object(MainRouter, "admin_db", "admin"):
|
||||
with pytest.raises(Exception, match="SET prowler.tenant_id failed"):
|
||||
_copy_compliance_requirement_rows(str(row["tenant_id"]), [row])
|
||||
|
||||
# Verify rollback was called
|
||||
connection.rollback.assert_called_once()
|
||||
connection.commit.assert_not_called()
|
||||
|
||||
@patch("tasks.jobs.scan.psycopg_connection")
|
||||
def test_copy_compliance_requirement_rows_commit_on_success(
|
||||
self, mock_psycopg_connection, settings
|
||||
):
|
||||
"""Test transaction is committed on successful COPY."""
|
||||
settings.DATABASES.setdefault("admin", settings.DATABASES["default"])
|
||||
|
||||
connection = MagicMock()
|
||||
cursor = MagicMock()
|
||||
cursor_context = MagicMock()
|
||||
cursor_context.__enter__.return_value = cursor
|
||||
cursor_context.__exit__.return_value = False
|
||||
connection.cursor.return_value = cursor_context
|
||||
connection.__enter__.return_value = connection
|
||||
connection.__exit__.return_value = False
|
||||
|
||||
context_manager = MagicMock()
|
||||
context_manager.__enter__.return_value = connection
|
||||
context_manager.__exit__.return_value = False
|
||||
mock_psycopg_connection.return_value = context_manager
|
||||
|
||||
cursor.copy_expert.return_value = None # Success
|
||||
|
||||
row = {
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": str(uuid.uuid4()),
|
||||
"compliance_id": "test",
|
||||
"framework": "Test",
|
||||
"version": "1.0",
|
||||
"description": "desc",
|
||||
"region": "us-east-1",
|
||||
"requirement_id": "req-1",
|
||||
"requirement_status": "PASS",
|
||||
"passed_checks": 1,
|
||||
"failed_checks": 0,
|
||||
"total_checks": 1,
|
||||
"scan_id": uuid.uuid4(),
|
||||
}
|
||||
|
||||
with patch.object(MainRouter, "admin_db", "admin"):
|
||||
_copy_compliance_requirement_rows(str(row["tenant_id"]), [row])
|
||||
|
||||
# Verify commit was called and rollback was not
|
||||
connection.commit.assert_called_once()
|
||||
connection.rollback.assert_not_called()
|
||||
# Verify autocommit was disabled
|
||||
assert connection.autocommit is False
|
||||
|
||||
@patch("tasks.jobs.scan._copy_compliance_requirement_rows")
|
||||
def test_persist_compliance_requirement_rows_success(self, mock_copy):
|
||||
"""Test successful COPY path without fallback to ORM."""
|
||||
mock_copy.return_value = None # Success, no exception
|
||||
|
||||
tenant_id = str(uuid.uuid4())
|
||||
rows = [
|
||||
{
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": tenant_id,
|
||||
"inserted_at": datetime.now(timezone.utc),
|
||||
"compliance_id": "test",
|
||||
"framework": "Test",
|
||||
"version": "1.0",
|
||||
"description": "desc",
|
||||
"region": "us-east-1",
|
||||
"requirement_id": "req-1",
|
||||
"requirement_status": "PASS",
|
||||
"passed_checks": 1,
|
||||
"failed_checks": 0,
|
||||
"total_checks": 1,
|
||||
"scan_id": uuid.uuid4(),
|
||||
}
|
||||
]
|
||||
|
||||
_persist_compliance_requirement_rows(tenant_id, rows)
|
||||
|
||||
# Verify COPY was called
|
||||
mock_copy.assert_called_once_with(tenant_id, rows)
|
||||
|
||||
@patch("tasks.jobs.scan.logger")
|
||||
@patch("tasks.jobs.scan.ComplianceRequirementOverview.objects.bulk_create")
|
||||
@patch("tasks.jobs.scan.rls_transaction")
|
||||
@patch(
|
||||
"tasks.jobs.scan._copy_compliance_requirement_rows",
|
||||
side_effect=Exception("COPY failed"),
|
||||
)
|
||||
def test_persist_compliance_requirement_rows_fallback_logging(
|
||||
self, mock_copy, mock_rls_transaction, mock_bulk_create, mock_logger
|
||||
):
|
||||
"""Test logger.exception is called when COPY fails and fallback occurs."""
|
||||
tenant_id = str(uuid.uuid4())
|
||||
row = {
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": tenant_id,
|
||||
"inserted_at": datetime.now(timezone.utc),
|
||||
"compliance_id": "test",
|
||||
"framework": "Test",
|
||||
"version": "1.0",
|
||||
"description": "desc",
|
||||
"region": "us-east-1",
|
||||
"requirement_id": "req-1",
|
||||
"requirement_status": "PASS",
|
||||
"passed_checks": 1,
|
||||
"failed_checks": 0,
|
||||
"total_checks": 1,
|
||||
"scan_id": uuid.uuid4(),
|
||||
}
|
||||
|
||||
ctx = MagicMock()
|
||||
ctx.__enter__.return_value = None
|
||||
ctx.__exit__.return_value = False
|
||||
mock_rls_transaction.return_value = ctx
|
||||
|
||||
_persist_compliance_requirement_rows(tenant_id, [row])
|
||||
|
||||
# Verify logger.exception was called
|
||||
mock_logger.exception.assert_called_once()
|
||||
args, kwargs = mock_logger.exception.call_args
|
||||
assert "COPY bulk insert" in args[0]
|
||||
assert "falling back to ORM" in args[0]
|
||||
assert kwargs.get("exc_info") is not None
|
||||
|
||||
@patch("tasks.jobs.scan.ComplianceRequirementOverview.objects.bulk_create")
|
||||
@patch("tasks.jobs.scan.rls_transaction")
|
||||
@patch(
|
||||
"tasks.jobs.scan._copy_compliance_requirement_rows",
|
||||
side_effect=Exception("copy failed"),
|
||||
)
|
||||
def test_persist_compliance_requirement_rows_fallback_multiple_rows(
|
||||
self, mock_copy, mock_rls_transaction, mock_bulk_create
|
||||
):
|
||||
"""Test ORM fallback with multiple rows."""
|
||||
tenant_id = str(uuid.uuid4())
|
||||
scan_id = uuid.uuid4()
|
||||
inserted_at = datetime.now(timezone.utc)
|
||||
|
||||
rows = [
|
||||
{
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": tenant_id,
|
||||
"inserted_at": inserted_at,
|
||||
"compliance_id": "cisa_aws",
|
||||
"framework": "CISA",
|
||||
"version": "1.0",
|
||||
"description": "First requirement",
|
||||
"region": "us-east-1",
|
||||
"requirement_id": "req-1",
|
||||
"requirement_status": "PASS",
|
||||
"passed_checks": 5,
|
||||
"failed_checks": 0,
|
||||
"total_checks": 5,
|
||||
"scan_id": scan_id,
|
||||
},
|
||||
{
|
||||
"id": uuid.uuid4(),
|
||||
"tenant_id": tenant_id,
|
||||
"inserted_at": inserted_at,
|
||||
"compliance_id": "cisa_aws",
|
||||
"framework": "CISA",
|
||||
"version": "1.0",
|
||||
"description": "Second requirement",
|
||||
"region": "us-west-2",
|
||||
"requirement_id": "req-2",
|
||||
"requirement_status": "FAIL",
|
||||
"passed_checks": 2,
|
||||
"failed_checks": 3,
|
||||
"total_checks": 5,
|
||||
"scan_id": scan_id,
|
||||
},
|
||||
]
|
||||
|
||||
ctx = MagicMock()
|
||||
ctx.__enter__.return_value = None
|
||||
ctx.__exit__.return_value = False
|
||||
mock_rls_transaction.return_value = ctx
|
||||
|
||||
_persist_compliance_requirement_rows(tenant_id, rows)
|
||||
|
||||
mock_copy.assert_called_once_with(tenant_id, rows)
|
||||
mock_rls_transaction.assert_called_once_with(tenant_id)
|
||||
mock_bulk_create.assert_called_once()
|
||||
|
||||
args, kwargs = mock_bulk_create.call_args
|
||||
objects = args[0]
|
||||
assert len(objects) == 2
|
||||
assert kwargs["batch_size"] == 500
|
||||
|
||||
# Validate first object
|
||||
assert objects[0].id == rows[0]["id"]
|
||||
assert objects[0].tenant_id == rows[0]["tenant_id"]
|
||||
assert objects[0].compliance_id == rows[0]["compliance_id"]
|
||||
assert objects[0].framework == rows[0]["framework"]
|
||||
assert objects[0].region == rows[0]["region"]
|
||||
assert objects[0].passed_checks == 5
|
||||
assert objects[0].failed_checks == 0
|
||||
|
||||
# Validate second object
|
||||
assert objects[1].id == rows[1]["id"]
|
||||
assert objects[1].requirement_id == rows[1]["requirement_id"]
|
||||
assert objects[1].requirement_status == rows[1]["requirement_status"]
|
||||
assert objects[1].passed_checks == 2
|
||||
assert objects[1].failed_checks == 3
|
||||
|
||||
@patch("tasks.jobs.scan.ComplianceRequirementOverview.objects.bulk_create")
|
||||
@patch("tasks.jobs.scan.rls_transaction")
|
||||
@patch(
|
||||
"tasks.jobs.scan._copy_compliance_requirement_rows",
|
||||
side_effect=Exception("copy failed"),
|
||||
)
|
||||
def test_persist_compliance_requirement_rows_fallback_all_fields(
|
||||
self, mock_copy, mock_rls_transaction, mock_bulk_create
|
||||
):
|
||||
"""Test ORM fallback correctly maps all fields from row dict to model."""
|
||||
tenant_id = str(uuid.uuid4())
|
||||
row_id = uuid.uuid4()
|
||||
scan_id = uuid.uuid4()
|
||||
inserted_at = datetime.now(timezone.utc)
|
||||
|
||||
row = {
|
||||
"id": row_id,
|
||||
"tenant_id": tenant_id,
|
||||
"inserted_at": inserted_at,
|
||||
"compliance_id": "aws_foundational_security_aws",
|
||||
"framework": "AWS-Foundational-Security-Best-Practices",
|
||||
"version": "2.0",
|
||||
"description": "Ensure MFA is enabled",
|
||||
"region": "eu-west-1",
|
||||
"requirement_id": "iam.1",
|
||||
"requirement_status": "FAIL",
|
||||
"passed_checks": 10,
|
||||
"failed_checks": 5,
|
||||
"total_checks": 15,
|
||||
"scan_id": scan_id,
|
||||
}
|
||||
|
||||
ctx = MagicMock()
|
||||
ctx.__enter__.return_value = None
|
||||
ctx.__exit__.return_value = False
|
||||
mock_rls_transaction.return_value = ctx
|
||||
|
||||
_persist_compliance_requirement_rows(tenant_id, [row])
|
||||
|
||||
args, kwargs = mock_bulk_create.call_args
|
||||
objects = args[0]
|
||||
assert len(objects) == 1
|
||||
|
||||
obj = objects[0]
|
||||
# Validate ALL fields are correctly mapped
|
||||
assert obj.id == row_id
|
||||
assert obj.tenant_id == tenant_id
|
||||
assert obj.inserted_at == inserted_at
|
||||
assert obj.compliance_id == "aws_foundational_security_aws"
|
||||
assert obj.framework == "AWS-Foundational-Security-Best-Practices"
|
||||
assert obj.version == "2.0"
|
||||
assert obj.description == "Ensure MFA is enabled"
|
||||
assert obj.region == "eu-west-1"
|
||||
assert obj.requirement_id == "iam.1"
|
||||
assert obj.requirement_status == "FAIL"
|
||||
assert obj.passed_checks == 10
|
||||
assert obj.failed_checks == 5
|
||||
assert obj.total_checks == 15
|
||||
assert obj.scan_id == scan_id
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_3_levels
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
data["REQUIREMENTS_DESCRIPTION"] = (
|
||||
data["REQUIREMENTS_ID"] + " - " + data["REQUIREMENTS_DESCRIPTION"]
|
||||
)
|
||||
|
||||
data["REQUIREMENTS_DESCRIPTION"] = data["REQUIREMENTS_DESCRIPTION"].apply(
|
||||
lambda x: x[:150] + "..." if len(str(x)) > 150 else x
|
||||
)
|
||||
|
||||
data["REQUIREMENTS_ATTRIBUTES_SECTION"] = data[
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
].apply(lambda x: x[:80] + "..." if len(str(x)) > 80 else x)
|
||||
|
||||
data["REQUIREMENTS_ATTRIBUTES_SUBSECTION"] = data[
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBSECTION"
|
||||
].apply(lambda x: x[:150] + "..." if len(str(x)) > 150 else x)
|
||||
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBSECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_3_levels(
|
||||
aux,
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBSECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
)
|
||||
@@ -0,0 +1,36 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_3_levels
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
|
||||
data["REQUIREMENTS_ID"] = (
|
||||
data["REQUIREMENTS_ID"] + " - " + data["REQUIREMENTS_DESCRIPTION"]
|
||||
)
|
||||
|
||||
data["REQUIREMENTS_ID"] = data["REQUIREMENTS_ID"].apply(
|
||||
lambda x: x[:150] + "..." if len(str(x)) > 150 else x
|
||||
)
|
||||
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBSECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_3_levels(
|
||||
aux,
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBSECTION",
|
||||
"REQUIREMENTS_ID",
|
||||
)
|
||||
@@ -0,0 +1,36 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_3_levels
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
|
||||
data["REQUIREMENTS_ID"] = (
|
||||
data["REQUIREMENTS_ID"] + " - " + data["REQUIREMENTS_DESCRIPTION"]
|
||||
)
|
||||
|
||||
data["REQUIREMENTS_ID"] = data["REQUIREMENTS_ID"].apply(
|
||||
lambda x: x[:150] + "..." if len(str(x)) > 150 else x
|
||||
)
|
||||
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBSECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_3_levels(
|
||||
aux,
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBSECTION",
|
||||
"REQUIREMENTS_ID",
|
||||
)
|
||||
@@ -0,0 +1,36 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_3_levels
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
|
||||
data["REQUIREMENTS_ID"] = (
|
||||
data["REQUIREMENTS_ID"] + " - " + data["REQUIREMENTS_DESCRIPTION"]
|
||||
)
|
||||
|
||||
data["REQUIREMENTS_ID"] = data["REQUIREMENTS_ID"].apply(
|
||||
lambda x: x[:150] + "..." if len(str(x)) > 150 else x
|
||||
)
|
||||
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBSECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_3_levels(
|
||||
aux,
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBSECTION",
|
||||
"REQUIREMENTS_ID",
|
||||
)
|
||||
@@ -0,0 +1,41 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
"""
|
||||
Generate CIS OCI Foundations Benchmark v3.0 compliance table.
|
||||
|
||||
Args:
|
||||
data: DataFrame containing compliance check results with columns:
|
||||
- REQUIREMENTS_ID: CIS requirement ID (e.g., "1.1", "2.1")
|
||||
- REQUIREMENTS_DESCRIPTION: Description of the requirement
|
||||
- REQUIREMENTS_ATTRIBUTES_SECTION: CIS section name
|
||||
- CHECKID: Prowler check identifier
|
||||
- STATUS: Check status (PASS/FAIL)
|
||||
- REGION: OCI region
|
||||
- TENANCYID: OCI tenancy OCID
|
||||
- RESOURCEID: Resource OCID or identifier
|
||||
|
||||
Returns:
|
||||
Section containers organized by CIS sections for dashboard display
|
||||
"""
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"TENANCYID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
@@ -0,0 +1,4 @@
|
||||
.idea/
|
||||
.git/
|
||||
.claude/
|
||||
AGENTS.md
|
||||
+533
@@ -0,0 +1,533 @@
|
||||
Always that you are writting documentation try to follow this text/communication style guide:
|
||||
|
||||
# Prowler's Brand Voice
|
||||
|
||||
Prowler is the open cloud security platform trusted by thousands of organizations automating security monitoring and compliance with hundreds of built-in security checks, remediation solutions, and compliance frameworks. With over 10 million downloads, thousands of contributors, and a vibrant global community, Prowler is driving the open-source cloud security movement by providing transparent, customizable, and user-friendly solutions that help teams secure AWS, Azure, GCP, Kubernetes, and Microsoft 365 environments. Leveraging open-source innovation and cost savings, the Prowler platform makes cloud security 10 times more cost-effective and accessible than alternatives.
|
||||
|
||||
These values must be demonstrated in all our conversations and communications.
|
||||
|
||||
---
|
||||
|
||||
## Unbiased Communication
|
||||
|
||||
Prowler aims to reach every person in the globe. Our communications must be as inclusive and diverse as possible every time. We are guided by the following principles:
|
||||
|
||||
### Avoid Gendered Pronouns
|
||||
|
||||
Reference to gendered pronouns (she/her/hers, he/his/his, they/them/theirs) must be avoided whenever possible.
|
||||
|
||||
* Use second person for communications (you/your/yours).
|
||||
* Use a third-person reference instead of a gendered pronoun (the customer, the user).
|
||||
* In case a gendered pronoun must be forcibly used, use they/them/theirs.
|
||||
* Avoid double references like she/he, s/he, etc.
|
||||
|
||||
### Use Alternatives for Gendered Nouns
|
||||
|
||||
Avoid nouns that include gendered components. Examples:
|
||||
|
||||
* Businessman 🡪 Entrepreneur, businessperson, executive
|
||||
* Salesman 🡪 Sales executive, sales representative
|
||||
* Mankind 🡪 Humanity, people
|
||||
* Penmanship 🡪 Calligraphy, handwriting
|
||||
* Middleman 🡪 Intermediary, negotiator
|
||||
|
||||
### Diversity, Equity, and Inclusion
|
||||
|
||||
All communications must prioritize diversity and inclusivity. When incorporating examples, ensure representation across sex, gender, age, identity, race, culture, background, ability, and socioeconomic status. Strive for balanced and respectful depictions.
|
||||
|
||||
### Cultural and Geographical Awareness
|
||||
|
||||
Before referencing a region, country, culture, national status, political status, or socioeconomic realities, conduct thorough research. Maintain a respectful, informed approach and avoid unnecessary conflicts.
|
||||
|
||||
### Avoiding Generalizations
|
||||
|
||||
Avoid broad assumptions about gender, sex, race, sexual orientation, nationality, or culture. Generalizations can introduce bias and misrepresentation. Example to avoid: "Cybersecurity is of the utmost importance in the country, where corruption runs amok."
|
||||
|
||||
### Respectful Language
|
||||
|
||||
Derogatory terms must not be used. If uncertain about terminology, consult individuals from the relevant region or community to ensure accuracy and appropriateness.
|
||||
|
||||
### Clear and Accessible Language
|
||||
|
||||
* **Jargon:** Use technical terminology only when the audience is expected to understand it. If uncertain, opt for clear and universally accessible language.
|
||||
* **Slang:** Minimize slang usage. Even when confident about the audience, prefer formal and neutral language to enhance clarity.
|
||||
|
||||
### Militaristic Language
|
||||
|
||||
Current tendencies in a topic as sensitive as cybersecurity avoid violent and militaristic references save for explicit reference to combat. These are some alternatives:
|
||||
|
||||
* Combat, fight, eliminate 🡪 Address, protect, safeguard, ward
|
||||
* Kill chain 🡪 Cyberattack chain
|
||||
* Attacker 🡪 Cyberattacker, bad actor, threat actor
|
||||
* Defense-in-depth approach 🡪 Multilayered approach
|
||||
* First line of defense, frontline 🡪 Security, protection, defense
|
||||
* External attack surface 🡪 Vulnerabilities, point of access, external exposure
|
||||
|
||||
### Note on Safety and Security
|
||||
|
||||
“Safety” and “security” are terms often misunderstood. “Safety” is the microscopic, personal and individual term, while “security” is the macroscopic, broader, national term. Examples:
|
||||
|
||||
a. Seat belts are great for personal safety.
|
||||
b. National security is of the utmost concern nowadays.
|
||||
|
||||
---
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
### Prowler Features
|
||||
|
||||
Prowler Features are considered proper nouns. They are to be referenced without articles in all pieces of writing.
|
||||
|
||||
This is a list of Prowler Features:
|
||||
|
||||
* **Prowler App**
|
||||
* **Prowler CLI**
|
||||
* **Prowler SDK**
|
||||
* **Built-in Compliance Checks**
|
||||
* **Multi-cloud Security Scanning**
|
||||
* **Autonomous Cloud Security Analyst (AI)**
|
||||
* **Threat & Misconfiguration Detection**
|
||||
* **Role-Based Access Control (RBAC)**
|
||||
* **Identity & Access Risk Detection**
|
||||
* **Tag-Based Scanning & Filtering**
|
||||
* **Audit Logs & Security Reports**
|
||||
* **Agentless & Works Anywhere**
|
||||
* **Automated Scans & Continuous Monitoring**
|
||||
* **Chat-based Security Querying (AI)**
|
||||
* **AI-Generated Detections & Remediations**
|
||||
* **Prowler Studio**
|
||||
* **Custom Security Policies**
|
||||
* **Prowler Cloud**
|
||||
* **Prowler Registry**
|
||||
* **Open Source & Full APIs**
|
||||
|
||||
---
|
||||
|
||||
## Verbal Constructions in Technical Writing
|
||||
|
||||
Choosing verbal constructions (using verbs) over nominal (using nouns) constructions can significantly impact the clarity, conciseness and especially readability of the content.
|
||||
|
||||
Nominal constructions often introduce unnecessary complexity or vagueness. For example:
|
||||
|
||||
* Nominal: "The creation of the report was successful."
|
||||
* Verbal: "The report was successfully created."
|
||||
|
||||
Verbal constructions also tend to use fewer words, resulting in a more polished and concise style:
|
||||
|
||||
* Nominal: "The implementation of the solution reduced system downtime."
|
||||
* Verbal: "The solution reduced system downtime."
|
||||
|
||||
Verbal constructions are to be chosen over nominal constructions whenever possible.
|
||||
|
||||
---
|
||||
|
||||
### Addendum: Verbal Structures Actually State your Purpose
|
||||
|
||||
* **Example 1:** Recommendation for multiple subscriptions
|
||||
* **Example 2:** Recommendation for Managing Multiple Subscriptions
|
||||
|
||||
Example 1 is vague and even potentially ambiguous. Verbs state your purpose and they must be used whenever possible.
|
||||
|
||||
---
|
||||
|
||||
## Avoiding The Second Person Except for Imperative Instructions
|
||||
|
||||
Explicit use of second-person pronouns (you) and possessives (your) should be minimized whenever possible. Those constructions are best reserved for cases when instructions are directly given in an imperative form:
|
||||
|
||||
**Example of Improvement Through Avoiding Second Person Pronouns**
|
||||
|
||||
**Original:**
|
||||
Prowler App can be installed in different ways, depending on your environment:
|
||||
|
||||
**Improved Version:**
|
||||
Prowler App offers flexible installation methods tailored to various environments:
|
||||
|
||||
---
|
||||
|
||||
## Title-Case Capitalization
|
||||
|
||||
We use title case.
|
||||
|
||||
**Example:** This Is an Example on Title Case
|
||||
|
||||
Title case tends to be better for SEO because it improves readability and makes headlines more visually distinct, which can lead to higher click-through rates (CTR).
|
||||
|
||||
---
|
||||
|
||||
### Other Considerations on Capitalization
|
||||
|
||||
Follow these additional guidelines for capitalization:
|
||||
|
||||
### Inner Capitalization
|
||||
|
||||
Avoid internal capitalization of words in body text unless it is part of a proper name or brand denomination. Example: instead of E-mail and e-Book use email/e-mail and e-book.
|
||||
|
||||
### Acronym Capitalization
|
||||
|
||||
Do not capitalize the individual words of the spelled-out form of acronyms. Example: instead of CTI (Cyber Threat Intelligence) use CTI (cyber threat intelligence), but AWS (Amazon Web Services) is to be kept as is.
|
||||
|
||||
### Avoid Capitalization for Emphasis
|
||||
|
||||
Do not capitalize words in order to emphasize them.
|
||||
|
||||
### Capitalization of Languages and Standards
|
||||
|
||||
Check for the proper capitalization of language and standard names.
|
||||
|
||||
Language examples: HTML, JSON, YAML, XML, etc., must be capitalized.
|
||||
|
||||
Standard examples: standards follow title-case capitalization: Industrial Automation and Control Systems (IACS).
|
||||
|
||||
### Capitalization of Laws and Regulations
|
||||
|
||||
Laws and Regulations follow title-case capitalization. If referring to a non-domestic law or regulation, add the nationality and the original name.
|
||||
|
||||
**Example:** Code for the Cybersecurity Law published in the Spanish Official State Bulletin (BOE, Boletín Oficial del Estado).
|
||||
|
||||
Most UE Regulations have an official translation for all UE languages; please check it on EUR-Lex portal and choose the proper language: https://eur-lex.europa.eu/.
|
||||
|
||||
The different languages can be chosen on the portal under Languages, formats and link to OJ.
|
||||
|
||||
---
|
||||
|
||||
## Hyphenation
|
||||
|
||||
Hyphenation is to be used for noun modifiers in prenominal position, i.e., placed before nouns.
|
||||
|
||||
**Example:** Prowler is a world-leading company in open-source software.
|
||||
|
||||
It is not to be used for predicate adjectives in postnominal position.
|
||||
|
||||
**Example:** Prowler has many features built in.
|
||||
|
||||
### Note on Hyphenation and SEO
|
||||
|
||||
Google treats hyphens as word separators, as if they were blank spaces, i.e., the term `high quality checks` is treated as if it was the same term as `high-quality checks`. Hyphenation does not affect SEO on body text, thus the grammatically correct approach is recommended as sign of good writing. However, underscores (`_`) are treated as different words. This has implications particularly for URLs.
|
||||
|
||||
Hyphens are preferred for URLs as they improve readability and indexing.
|
||||
|
||||
**Example:**
|
||||
* Better approach: `example.com/this-is-an-URL`
|
||||
* Less ideal approach: `example.com/this_is_an_URL`
|
||||
|
||||
---
|
||||
|
||||
## Bullet Points
|
||||
|
||||
Bullet points offer several advantages:
|
||||
|
||||
* **Improved readability:** Bullet points make information scannable and enable vertical reading, allowing users to easily spot relevant details and breaking content into more digestible pieces.
|
||||
* **Highlighting of relevant information:** They emphasize the most salient points, improving focus and enabling quick summarization at a glance.
|
||||
* **Improved retention:** Bullet points enhance memory retention and contribute to a clearer, more polished final product.
|
||||
* **Structured presentation:** They improve user experience through the logical organization of content.
|
||||
* **SEO relevance (Search Engine Optimization):** Bullet points make content easier to consume and offer the following SEO benefits:
|
||||
* Reduced bounce rates
|
||||
* Increased time spent on page
|
||||
* Strategic keyword optimization
|
||||
* Improved chances of being featured in search engine snippets
|
||||
* Enhanced crawlability for search engines.
|
||||
|
||||
---
|
||||
|
||||
### When to Use Bullet Points
|
||||
|
||||
The use of bullet points is highly recommended when:
|
||||
|
||||
* Information can be logically divided into multiple categories, each sharing characteristics, features, or other relevant classifications.
|
||||
* Items are significant enough as standalone concepts to deserve their own bullet point.
|
||||
|
||||
**Example of Improvement Through Bullet Points**
|
||||
|
||||
**Original:**
|
||||
It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMS, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme), and your custom security frameworks.
|
||||
|
||||
**Improved with Bullet Points:**
|
||||
|
||||
**Prowler CLI Features:**
|
||||
Prowler CLI includes hundreds of built-in controls to ensure compliance with standards and frameworks, including:
|
||||
|
||||
* **Industry standards:** CIS, NIST 800, NIST CSF, and CISA
|
||||
* **Regulatory compliance and governance:** RBI, FedRAMP, and PCI-DSS
|
||||
* **Frameworks for sensitive data and privacy:** GDPR, HIPAA, and FFIEC
|
||||
* **Frameworks for organizational governance and quality control:** SOC2 and GXP
|
||||
* **AWS-specific guidance:** AWS Foundational Technical Review (FTR) and AWS Well-Architected Framework (Security Pillar)
|
||||
* **Regional compliance:** ENS (Spanish National Security Scheme)
|
||||
* **Custom security frameworks:** Tailored to meet your organization’s specific needs
|
||||
|
||||
---
|
||||
|
||||
### Punctuation of Bullet Points
|
||||
|
||||
There are several options for punctuating bullet points. Regardless of the style chosen, it is imperative to maintain consistency throughout the text.
|
||||
|
||||
* **No punctuation (minimalistic):** This strategy is suitable when no verbs are involved and is best used to highlight products or features in isolation. For example:
|
||||
|
||||
Prowler App is composed of three key components:
|
||||
* Prowler UI
|
||||
* Prowler API
|
||||
* Prowler SDK
|
||||
|
||||
This example highlights each element individually and fosters retention with a noise-free approach.
|
||||
|
||||
* **Periods for full sentences:** This approach works best when each bullet point forms a full sentence or includes verbs. For example:
|
||||
|
||||
Prowler App is composed of three key components:
|
||||
* Prowler UI, a web-based interface, built with Next.js, providing a user-friendly experience for executing Prowler scans and visualizing results.
|
||||
* Prowler API, a backend service, developed with Django REST Framework, responsible for running Prowler scans and storing the generated results.
|
||||
* Prowler SDK, a Python SDK designed to extend the functionality of the Prowler CLI for advanced capabilities.
|
||||
|
||||
This example demonstrates a polished list using proper punctuation.
|
||||
|
||||
* **Semi-colons with final period:** This approach was traditionally used for those cases when bullet points were part of a continuous sentence or logical succession. However, it is being deprecated, consistent with the declining use of semi-colons in modern writing. It is to be avoided whenever possible. For example:
|
||||
|
||||
Prowler UI:
|
||||
* is a web-based interface;
|
||||
* is built with Next.js;
|
||||
* provides a user-friendly experience for executing Prowler scans and visualizing results.
|
||||
|
||||
---
|
||||
|
||||
### Advantages of Adding Headers to Bullet Points
|
||||
|
||||
Adding headers to bullet points in technical writing is a powerful technique that enhances both the clarity and usability of the content. It has also advantages for SEO:
|
||||
|
||||
* Increased crawlability of search engines
|
||||
* Enhanced keyword integration
|
||||
* Improved user engagement
|
||||
* Enhanced snippetting by search engines
|
||||
* Reduced bounce rates
|
||||
|
||||
It is recommended to add headers to bullet points whenever possible.
|
||||
|
||||
---
|
||||
|
||||
## Quotation Marks
|
||||
|
||||
### Quotation Marks Usage in Technical Documentation
|
||||
|
||||
Proper use of quotation marks enhances clarity and consistency in technical writing. Below are key guidelines for using double and single quotation marks, following American English conventions.
|
||||
|
||||
### Double Quotation Marks
|
||||
|
||||
* Use for titles of books, movies, songs, and articles.
|
||||
* Enclose direct quotes:
|
||||
* **Example:** The developer said, “We will try to fix this issue.”
|
||||
* Capitalize the first word if quoting a full sentence.
|
||||
* When quoting a phrase within a sentence, do not capitalize:
|
||||
* **Example:** The news portal called our product “one of the most efficient online help authoring tools.”
|
||||
* Use scare quotes when words acquire a different or ironic meaning:
|
||||
* **Example:** The update is “scheduled” to release next week.
|
||||
* To refer to a term without applying its meaning, use double quotes (or italics):
|
||||
* **Example:** Avoid terms like “don’t worry” in pop-ups to prevent user anxiety.
|
||||
|
||||
### Single Quotation Marks
|
||||
|
||||
* Used inside double quotes:
|
||||
* **Example:** He said, “I am not sure what ‘single sourcing’ means.”
|
||||
* In British English, the order is often reversed (single quotation marks on the outside).
|
||||
|
||||
---
|
||||
|
||||
### Double Quoting in Software Documentation
|
||||
|
||||
Double quoting is to be used through software documentation where their use does not interfere with formatting restrictions.
|
||||
|
||||
1. **Menu Items & UI Options**
|
||||
* Use double quotation marks when referring to selectable items in software interfaces.
|
||||
* **Example:** Click “File” and select “Save As” to export your document.
|
||||
2. **Buttons & Commands**
|
||||
* Use double quotes for labeled interface elements that users interact with.
|
||||
* **Example:** Select “Submit” to finalize the form.
|
||||
3. **Exact Input & User Actions**
|
||||
* If users need to enter exact text, enclose it in double quotes.
|
||||
* **Example:** Type “admin” in the username field.
|
||||
4. **Avoid Quoting Software Names**
|
||||
* Do not use quotation marks for software product names unless required for clarity.
|
||||
* **Correct:** Open Microsoft Excel.
|
||||
* **Incorrect:** Open “Microsoft Excel.”
|
||||
|
||||
---
|
||||
|
||||
## Interaction Verbs
|
||||
|
||||
The following are the correct verbs that must be used when referring to user interactions with the software.
|
||||
|
||||
### Mouse & Trackpad Actions (Desktop/Laptop)
|
||||
|
||||
* **Click:** Press and release the left mouse button or trackpad without moving the pointer.
|
||||
* **Example:** Click the “OK” button to confirm. 🡪 Transitive
|
||||
* **Click on:** Often interchangeable with "Click," but less commonly used in technical writing for UI interactions.
|
||||
* **Example:** Click on the "Settings" icon to open preferences. (Less recommended—“Click” is preferred.)
|
||||
* **Double-click:** Press and release twice in quick succession, usually to open files or applications.
|
||||
* **Example:** Double-click the document to open it. 🡪 Transitive
|
||||
* **Right-click:** Press and release the right mouse button to open a context menu.
|
||||
* **Example:** Right-click the folder and select “Properties.” 🡪 Transitive
|
||||
|
||||
### Touchscreen Actions (Mobile & Touch)
|
||||
|
||||
* **Tap:** Touch the screen lightly with a finger or stylus, equivalent to "Click" on a mouse.
|
||||
* **Example:** Tap the “Sign in” button.
|
||||
* **Double-tap:** Quickly touch the screen twice, often used for zooming or selecting text.
|
||||
* **Example:** Double-tap an image to zoom in.
|
||||
* **Press and hold:** Touch and hold the screen for a moment to access additional options.
|
||||
* **Example:** Press and hold an app icon to see more actions. (Similar to “Right-click” in desktop environments.)
|
||||
|
||||
### Additional Actions
|
||||
|
||||
* **Drag:** Click or tap an item and move it while holding down the button or finger.
|
||||
* **Example:** Drag the file into the folder.
|
||||
* **Swipe:** Move a finger across the touchscreen horizontally or vertically.
|
||||
* **Example:** Swipe left to dismiss the notification.
|
||||
* **Pinch to zoom:** Use two fingers to zoom in or out.
|
||||
* **Example:** Pinch the screen to zoom in on the image.
|
||||
* **Scroll:** Move the mouse wheel, swipe, or use the arrow keys to navigate up/down.
|
||||
* **Example:** Scroll down to see more results.
|
||||
|
||||
The widely-accepted terminology for gestures is Windows’: https://support.microsoft.com/en-us/windows/touch-gestures-for-windows-a9d28305-4818-a5df-4e2b-e5590f850741
|
||||
|
||||
---
|
||||
|
||||
## Sentence Structure for Technical Writing and SEO
|
||||
|
||||
When writing technical documentation, clarity, conciseness, and searchability (SEO) are key factors. Let’s compare the following two sentence structures, extracted from Prowler’s documentation:
|
||||
|
||||
**Option 1:**
|
||||
"Open a terminal and execute the following command to create a new custom role."
|
||||
|
||||
**Option 2:**
|
||||
"To create a new custom role, open a terminal and execute the following command."
|
||||
|
||||
### SEO Optimization
|
||||
|
||||
* Search engines prioritize clear intent at the beginning of a sentence.
|
||||
* Option 2 starts with the action users are likely to search for (e.g., "Create a custom role"), which improves SEO rankings and makes the content more likely to match search queries.
|
||||
* Option 1 places the primary search term toward the end, making it less effective for keyword optimization.
|
||||
|
||||
### Technical Writing Best Practices
|
||||
|
||||
* Technical writing emphasizes clear objectives first, followed by actions.
|
||||
* Option 2 follows this best practice by stating the goal first ("To create a new custom role") and then providing instructions.
|
||||
* Option 1 is still acceptable for step-by-step guides, but Option 2 is more effective for tutorials, manuals, and documentation.
|
||||
|
||||
### Key Takeaways
|
||||
|
||||
* Draft trying to mimic the most likely way users are to find the information (“Ctrl + F approach”).
|
||||
* Place keywords and key terms at the beginning of sentences so that they rank better SEO-wise.
|
||||
* Rule of thumb: “In order to what” precedes the “what”. “What” must mirror the user’s most likely way of drafting or searching.
|
||||
|
||||
---
|
||||
|
||||
## Section Titles and Headers in Technical Writing
|
||||
|
||||
Effective headers and section titles enhance document readability and structure, making content more accessible to the reader. This chapter outlines best practices for crafting clear, consistent, and meaningful headings.
|
||||
|
||||
1. **Purpose of Headers**
|
||||
Headers serve several key functions:
|
||||
* **Improve Navigation:** Allow users to quickly locate relevant information.
|
||||
* **Enhance Readability:** Break down complex topics into manageable sections.
|
||||
* **Establish Hierarchy:** Define the logical flow of content.
|
||||
* **SEO:** Headers impact SEO both directly and indirectly:
|
||||
* Search engines use headings to determine the hierarchy and relevance of content.
|
||||
* **H1:** The primary heading (should be unique and descriptive).
|
||||
* **H2-H6:** Subheadings that break down content logically.
|
||||
* Best practices for SEO-friendly headers:
|
||||
* Include keywords naturally in headings.
|
||||
* Avoid keyword stuffing—keep it clear and readable.
|
||||
* Use structured hierarchy (H1 → H2 → H3, etc.).
|
||||
2. **Header Levels and Formatting**
|
||||
Use a structured approach for organizing section titles. Common conventions include:
|
||||
* **Title:** The primary heading of the document (e.g., H1).
|
||||
* **Main Sections:** First-level headers (H2), introducing key content areas.
|
||||
* **Subsections:** Second-level headers (H3) to detail specific topics within sections.
|
||||
* **Subtopics:** Third-level headers (H4+) used sparingly for finer details.
|
||||
|
||||
**Example:**
|
||||
|
||||
```markdown
|
||||
# Document Title (H1)
|
||||
## Main Section (H2)
|
||||
### Subsection (H3)
|
||||
#### Subtopic (H4)
|
||||
```
|
||||
|
||||
3. **Writing Effective Headers**
|
||||
When crafting headers and section titles, follow these guidelines:
|
||||
* **Be Descriptive:** Clearly indicate what the section covers.
|
||||
* **Poor:** Introduction (too vague)
|
||||
* **Good:** Introduction to AWS CloudShell Installation (informative)
|
||||
* **Keep It Concise:** Use precise language without unnecessary words.
|
||||
* **Maintain Consistency:** Apply uniform formatting and style conventions throughout.
|
||||
* **Avoid Special Characters:** Limit punctuation for clarity—avoid excessive symbols, dashes, or underscores.
|
||||
4. **Capitalization Rules**
|
||||
Use Title Case for headers to ensure a professional look:
|
||||
* **Good:** How to Clone and Install Prowler from GitHub
|
||||
* **Poor:** How to clone and install Prowler from GitHub
|
||||
|
||||
For technical documentation, sentence case may be used for readability in subheadings. Please note this differs from headers and it is only a recommendation, but consistency is to be kept throughout the documentation:
|
||||
|
||||
* **Example:**
|
||||
* How to Clone and Install Prowler from GitHub (header: Title case)
|
||||
* How to install poetry dependencies (subheading: Sentence case)
|
||||
5. **Using Keywords in Headers**
|
||||
Headers should include relevant keywords to improve document searchability:
|
||||
* **Good:** Scanning AWS Accounts in Parallel
|
||||
* **Poor:** Ways to scan on AWS (vague and imprecise)
|
||||
6. **Consistency Across Documents**
|
||||
Ensure uniformity in section titles across related documentation:
|
||||
* **Standardized Header Naming:** Use consistent wording for common sections (e.g., "Installation," "Setup," "Configuration").
|
||||
* **Numbering Sections (If Necessary):** For structured guides, include numbering where appropriate (e.g., "Step 1: Install Prowler").
|
||||
|
||||
---
|
||||
|
||||
## Avoid Assumptions Regarding Audience’s Expertise
|
||||
|
||||
### Understand Your Audience’s Expertise
|
||||
|
||||
Despite knowing your target audience, assumptions on target audience’s expertise or knowledge are to be avoided.
|
||||
|
||||
Adjust the level of detail based on expected reader proficiency, but make sure to be as explanatory as humanly possible.
|
||||
|
||||
### Define Key Terms and Acronyms on First Use
|
||||
|
||||
Even if your audience is technical, some domain-specific terms may vary.
|
||||
* Introduce jargon only after defining it clearly.
|
||||
* If using acronyms (e.g., IAM, MFA), spell them out on first mention:
|
||||
* AWS Identity and Access Management (IAM)
|
||||
* Multifactor Authentication (MFA)
|
||||
|
||||
### Don’t Assume Unwritten Knowledge
|
||||
|
||||
Even experienced readers may not know every prerequisite. If a process relies on prior steps, briefly reference them:
|
||||
|
||||
* Before configuring security groups, ensure VPC networking is set up.
|
||||
|
||||
### Use Consistent Formatting
|
||||
|
||||
### Provide as Many Examples as Deemed Right… and Then Some
|
||||
|
||||
### Anticipate Common Knowledge Gaps
|
||||
|
||||
### Avoid Excessive Notes
|
||||
|
||||
Notes are often omitted by readers and they clutter text, so use them sparingly and only for additional information that is not essential or prompts any error or mistake.
|
||||
|
||||
---
|
||||
|
||||
## Using Warnings and Danger Calls for High-Severity Information
|
||||
|
||||
In technical documentation, warnings and danger calls highlight critical risks, guiding users in preventing security breaches or system failures. Proper usage ensures clarity and actionable guidance.
|
||||
|
||||
1. **Define Severity Levels**
|
||||
Before applying Note, Warning, or Danger, clearly define their significance:
|
||||
* **Note:** Provides general information or best practices (low severity).
|
||||
* **Warning:** Indicates potential issues if instructions are not followed (moderate severity).
|
||||
* **Danger:** Highlights actions that could result in severe consequences, such as system corruption or data loss (high severity).
|
||||
2. **Explain Consequences**
|
||||
Each warning or danger call should explicitly describe the impact of ignoring the caution:
|
||||
* **Good:** Disabling encryption may expose sensitive data to unauthorized access.
|
||||
* **Poor:** Avoid disabling encryption.
|
||||
3. **Provide Remediation and Troubleshooting**
|
||||
Whenever possible, direct users to troubleshooting guides or mitigation steps to resolve the issue.
|
||||
|
||||
**Example:**
|
||||
**Danger:** Running this command will **permanently delete all data**. Refer to @Data Recovery Guide for restoration steps.
|
||||
@@ -0,0 +1,45 @@
|
||||
# Prowler Documentation
|
||||
|
||||
This repository contains the Prowler Open Source documentation powered by [Mintlify](https://mintlify.com).
|
||||
|
||||
## Documentation Structure
|
||||
|
||||
- **Getting Started**: Overview, installation, and basic usage guides
|
||||
- **User Guide**: Comprehensive guides for Prowler App, CLI, providers, and compliance
|
||||
- **Developer Guide**: Technical documentation for developers contributing to Prowler
|
||||
|
||||
## Local Development
|
||||
|
||||
Install the [Mintlify CLI](https://www.npmjs.com/package/mint) to preview documentation changes locally:
|
||||
|
||||
```bash
|
||||
npm i -g mint
|
||||
```
|
||||
|
||||
Run the following command at the root of your documentation (where `mint.json` is located):
|
||||
|
||||
```bash
|
||||
mint dev
|
||||
```
|
||||
|
||||
View your local preview at `http://localhost:3000`.
|
||||
|
||||
## Publishing Changes
|
||||
|
||||
Changes pushed to the main branch are automatically deployed to production through Mintlify's GitHub integration.
|
||||
|
||||
## Documentation Guidelines
|
||||
|
||||
When contributing to the documentation, please follow the Prowler documentation style guide located in the `.claude` directory.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
- If your dev environment isn't running: Run `mint update` to ensure you have the most recent version of the CLI.
|
||||
- If a page loads as a 404: Make sure you are running in a folder with a valid `mint.json` file and that the page path is correctly listed in the navigation.
|
||||
|
||||
## Resources
|
||||
|
||||
- [Prowler GitHub Repository](https://github.com/prowler-cloud/prowler)
|
||||
- [Prowler Documentation](https://docs.prowler.com/)
|
||||
- [Mintlify Documentation](https://mintlify.com/docs)
|
||||
- [Mintlify Community](https://mintlify.com/community)
|
||||
@@ -1,61 +0,0 @@
|
||||
## Access Prowler App
|
||||
|
||||
After [installation](../installation/prowler-app.md), navigate to [http://localhost:3000](http://localhost:3000) and sign up with email and password.
|
||||
|
||||
<img src="../../img/sign-up-button.png" alt="Sign Up Button" width="320"/>
|
||||
<img src="../../img/sign-up.png" alt="Sign Up" width="285"/>
|
||||
|
||||
???+ note "User creation and default tenant behavior"
|
||||
|
||||
When creating a new user, the behavior depends on whether an invitation is provided:
|
||||
|
||||
- **Without an invitation**:
|
||||
|
||||
- A new tenant is automatically created.
|
||||
- The new user is assigned to this tenant.
|
||||
- A set of **RBAC admin permissions** is generated and assigned to the user for the newly-created tenant.
|
||||
|
||||
- **With an invitation**: The user is added to the specified tenant with the permissions defined in the invitation.
|
||||
|
||||
This mechanism ensures that the first user in a newly created tenant has administrative permissions within that tenant.
|
||||
|
||||
## Log In
|
||||
|
||||
Access Prowler App by logging in with **email and password**.
|
||||
|
||||
<img src="../../img/log-in.png" alt="Log In" width="285"/>
|
||||
|
||||
## Add Cloud Provider
|
||||
|
||||
Configure a cloud provider for scanning:
|
||||
|
||||
1. Navigate to `Settings > Cloud Providers` and click `Add Account`.
|
||||
2. Select the cloud provider.
|
||||
3. Enter the provider's identifier (Optional: Add an alias):
|
||||
- **AWS**: Account ID
|
||||
- **GCP**: Project ID
|
||||
- **Azure**: Subscription ID
|
||||
- **Kubernetes**: Cluster ID
|
||||
- **M365**: Domain ID
|
||||
4. Follow the guided instructions to add and authenticate your credentials.
|
||||
|
||||
## Start a Scan
|
||||
|
||||
Once credentials are successfully added and validated, Prowler initiates a scan of your cloud environment.
|
||||
|
||||
Click `Go to Scans` to monitor progress.
|
||||
|
||||
## View Results
|
||||
|
||||
Review findings during scan execution in the following sections:
|
||||
|
||||
- **Overview** – Provides a high-level summary of your scans.
|
||||
<img src="../../products/img/overview.png" alt="Overview" width="700"/>
|
||||
|
||||
- **Compliance** – Displays compliance insights based on security frameworks.
|
||||
<img src="../../img/compliance.png" alt="Compliance" width="700"/>
|
||||
|
||||
> For detailed usage instructions, refer to the [Prowler App Guide](../tutorials/prowler-app.md).
|
||||
|
||||
???+ note
|
||||
Prowler will automatically scan all configured providers every **24 hours**, ensuring your cloud environment stays continuously monitored.
|
||||
@@ -1,4 +1,6 @@
|
||||
# Contact Us
|
||||
---
|
||||
title: 'Contact Us'
|
||||
---
|
||||
|
||||
For technical support or any type of inquiries, you are very welcome to:
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
# AWS Provider
|
||||
---
|
||||
title: 'AWS Provider'
|
||||
---
|
||||
|
||||
In this page you can find all the details about [Amazon Web Services (AWS)](https://aws.amazon.com/) provider implementation in Prowler.
|
||||
|
||||
By default, Prowler will audit just one account and organization settings per scan. To configure it, follow the [AWS getting started guide](../tutorials/aws/getting-started-aws.md).
|
||||
By default, Prowler will audit just one account and organization settings per scan. To configure it, follow the [AWS getting started guide](/user-guide/providers/aws/getting-started-aws).
|
||||
|
||||
## AWS Provider Classes Architecture
|
||||
|
||||
The AWS provider implementation follows the general [Provider structure](./provider.md). This section focuses on the AWS-specific implementation, highlighting how the generic provider concepts are realized for AWS in Prowler. For a full overview of the provider pattern, base classes, and extension guidelines, see [Provider documentation](./provider.md). In next subsection you can find a list of the main classes of the AWS provider.
|
||||
The AWS provider implementation follows the general [Provider structure](/developer-guide/provider). This section focuses on the AWS-specific implementation, highlighting how the generic provider concepts are realized for AWS in Prowler. For a full overview of the provider pattern, base classes, and extension guidelines, see [Provider documentation](/developer-guide/provider). In next subsection you can find a list of the main classes of the AWS provider.
|
||||
|
||||
### `AwsProvider` (Main Class)
|
||||
|
||||
@@ -33,7 +35,7 @@ The AWS provider implementation follows the general [Provider structure](./provi
|
||||
### `AWSService` (Service Base Class)
|
||||
|
||||
- **Location:** [`prowler/providers/aws/lib/service/service.py`](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/lib/service/service.py)
|
||||
- **Purpose:** Abstract base class that all AWS service-specific classes inherit from. This implements the generic service pattern (described in [service page](./services.md#service-base-class)) specifically for AWS.
|
||||
- **Purpose:** Abstract base class that all AWS service-specific classes inherit from. This implements the generic service pattern (described in [service page](/developer-guide/services#service-base-class)) specifically for AWS.
|
||||
- **Key AWS Responsibilities:**
|
||||
- Receives an `AwsProvider` instance to access session, identity, and configuration.
|
||||
- Manages clients for all services by regions.
|
||||
@@ -52,12 +54,12 @@ The AWS provider implementation follows the general [Provider structure](./provi
|
||||
|
||||
## Specific Patterns in AWS Services
|
||||
|
||||
The generic service pattern is described in [service page](./services.md#service-structure-and-initialisation). You can find all the right now implemented services in the following locations:
|
||||
The generic service pattern is described in [service page](/developer-guide/services#service-structure-and-initialisation). You can find all the right now implemented services in the following locations:
|
||||
|
||||
- Directly in the code, in location [`prowler/providers/aws/services/`](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers/aws/services)
|
||||
- In the [Prowler Hub](https://hub.prowler.com/). For a more human-readable view.
|
||||
|
||||
The best reference to understand how to implement a new service is following the [service implementation documentation](./services.md#adding-a-new-service) and taking other services already implemented as reference. In next subsection you can find a list of common patterns that are used accross all AWS services.
|
||||
The best reference to understand how to implement a new service is following the [service implementation documentation](/developer-guide/services#adding-a-new-service) and taking other services already implemented as reference. In next subsection you can find a list of common patterns that are used accross all AWS services.
|
||||
|
||||
### AWS Service Common Patterns
|
||||
|
||||
@@ -74,12 +76,12 @@ The best reference to understand how to implement a new service is following the
|
||||
|
||||
## Specific Patterns in AWS Checks
|
||||
|
||||
The AWS checks pattern is described in [checks page](./checks.md). You can find all the right now implemented checks:
|
||||
The AWS checks pattern is described in [checks page](/developer-guide/checks). You can find all the right now implemented checks:
|
||||
|
||||
- Directly in the code, within each service folder, each check has its own folder named after the name of the check. (e.g. [`prowler/providers/aws/services/s3/s3_bucket_acl_prohibited/`](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers/aws/services/s3/s3_bucket_acl_prohibited))
|
||||
- In the [Prowler Hub](https://hub.prowler.com/). For a more human-readable view.
|
||||
|
||||
The best reference to understand how to implement a new check is following the [check creation documentation](./checks.md#creating-a-check) and taking other similar checks as reference.
|
||||
The best reference to understand how to implement a new check is following the [check creation documentation](/developer-guide/checks#creating-a-check) and taking other similar checks as reference.
|
||||
|
||||
### Check Report Class
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
# Azure Provider
|
||||
---
|
||||
title: 'Azure Provider'
|
||||
---
|
||||
|
||||
In this page you can find all the details about [Microsoft Azure](https://azure.microsoft.com/) provider implementation in Prowler.
|
||||
|
||||
By default, Prowler will audit all the subscriptions that it is able to list in the Microsoft Entra tenant, and tenant Entra ID service. To configure it, follow the [Azure getting started guide](../tutorials/azure/getting-started-azure.md).
|
||||
By default, Prowler will audit all the subscriptions that it is able to list in the Microsoft Entra tenant, and tenant Entra ID service. To configure it, follow the [Azure getting started guide](/user-guide/providers/azure/getting-started-azure).
|
||||
|
||||
## Azure Provider Classes Architecture
|
||||
|
||||
The Azure provider implementation follows the general [Provider structure](./provider.md). This section focuses on the Azure-specific implementation, highlighting how the generic provider concepts are realized for Azure in Prowler. For a full overview of the provider pattern, base classes, and extension guidelines, see [Provider documentation](./provider.md). In next subsection you can find a list of the main classes of the Azure provider.
|
||||
The Azure provider implementation follows the general [Provider structure](/developer-guide/provider). This section focuses on the Azure-specific implementation, highlighting how the generic provider concepts are realized for Azure in Prowler. For a full overview of the provider pattern, base classes, and extension guidelines, see [Provider documentation](/developer-guide/provider). In next subsection you can find a list of the main classes of the Azure provider.
|
||||
|
||||
### `AzureProvider` (Main Class)
|
||||
|
||||
@@ -32,7 +34,7 @@ The Azure provider implementation follows the general [Provider structure](./pro
|
||||
### `AzureService` (Service Base Class)
|
||||
|
||||
- **Location:** [`prowler/providers/azure/lib/service/service.py`](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/lib/service/service.py)
|
||||
- **Purpose:** Abstract base class that all Azure service-specific classes inherit from. This implements the generic service pattern (described in [service page](./services.md#service-base-class)) specifically for Azure.
|
||||
- **Purpose:** Abstract base class that all Azure service-specific classes inherit from. This implements the generic service pattern (described in [service page](/developer-guide/services#service-base-class)) specifically for Azure.
|
||||
- **Key Azure Responsibilities:**
|
||||
- Receives an `AzureProvider` instance to access session, identity, and configuration.
|
||||
- Manages clients for all services by subscription.
|
||||
@@ -50,12 +52,12 @@ The Azure provider implementation follows the general [Provider structure](./pro
|
||||
|
||||
## Specific Patterns in Azure Services
|
||||
|
||||
The generic service pattern is described in [service page](./services.md#service-structure-and-initialisation). You can find all the currently implemented services in the following locations:
|
||||
The generic service pattern is described in [service page](/developer-guide/services#service-structure-and-initialisation). You can find all the currently implemented services in the following locations:
|
||||
|
||||
- Directly in the code, in location [`prowler/providers/azure/services/`](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers/azure/services)
|
||||
- In the [Prowler Hub](https://hub.prowler.com/) for a more human-readable view.
|
||||
|
||||
The best reference to understand how to implement a new service is following the [service implementation documentation](./services.md#adding-a-new-service) and taking other services already implemented as reference. In next subsection you can find a list of common patterns that are used accross all Azure services.
|
||||
The best reference to understand how to implement a new service is following the [service implementation documentation](/developer-guide/services#adding-a-new-service) and taking other services already implemented as reference. In next subsection you can find a list of common patterns that are used accross all Azure services.
|
||||
|
||||
### Azure Service Common Patterns
|
||||
|
||||
@@ -68,12 +70,12 @@ The best reference to understand how to implement a new service is following the
|
||||
|
||||
## Specific Patterns in Azure Checks
|
||||
|
||||
The Azure checks pattern is described in [checks page](./checks.md). You can find all the currently implemented checks:
|
||||
The Azure checks pattern is described in [checks page](/developer-guide/checks). You can find all the currently implemented checks:
|
||||
|
||||
- Directly in the code, within each service folder, each check has its own folder named after the name of the check. (e.g. [`prowler/providers/azure/services/storage/storage_blob_public_access_level_is_disabled/`](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers/azure/services/storage/storage_blob_public_access_level_is_disabled))
|
||||
- In the [Prowler Hub](https://hub.prowler.com/) for a more human-readable view.
|
||||
|
||||
The best reference to understand how to implement a new check is the [Azure check implementation documentation](./checks.md#creating-a-check) and taking other similar checks as reference.
|
||||
The best reference to understand how to implement a new check is the [Azure check implementation documentation](/developer-guide/checks#creating-a-check) and taking other similar checks as reference.
|
||||
|
||||
### Check Report Class
|
||||
|
||||
+4
-2
@@ -1,8 +1,10 @@
|
||||
# Check Metadata Guidelines
|
||||
---
|
||||
title: 'Check Metadata Guidelines'
|
||||
---
|
||||
|
||||
## Introduction
|
||||
|
||||
This guide provides comprehensive guidelines for creating check metadata in Prowler. For basic information on check metadata structure, refer to the [check metadata](./checks.md#metadata-structure-for-prowler-checks) section.
|
||||
This guide provides comprehensive guidelines for creating check metadata in Prowler. For basic information on check metadata structure, refer to the [check metadata](/developer-guide/checks#metadata-structure-for-prowler-checks) section.
|
||||
|
||||
## Check Title Guidelines
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
# Prowler Checks
|
||||
---
|
||||
title: 'Prowler Checks'
|
||||
---
|
||||
|
||||
This guide explains how to create new checks in Prowler.
|
||||
|
||||
@@ -12,7 +14,7 @@ The most common high level steps to create a new check are:
|
||||
|
||||
1. Prerequisites:
|
||||
- Verify the check does not already exist by searching [Prowler Hub](https://hub.prowler.com) or checking `prowler/providers/<provider>/services/<service>/<check_name_want_to_implement>/`.
|
||||
- Ensure required provider and service exist. If not, follow the [Provider](./provider.md) and [Service](./services.md) documentation to create them.
|
||||
- Ensure required provider and service exist. If not, follow the [Provider](/developer-guide/provider) and [Service](/developer-guide/services) documentation to create them.
|
||||
- Confirm the service has implemented all required methods and attributes for the check (in most cases, you will need to add or modify some methods in the service to get the data you need for the check).
|
||||
2. Navigate to the service directory. The path should be as follows: `prowler/providers/<provider>/services/<service>`.
|
||||
3. Create a check-specific folder. The path should follow this pattern: `prowler/providers/<provider>/services/<service>/<check_name_want_to_implement>`. Adhere to the [Naming Format for Checks](#naming-format-for-checks).
|
||||
@@ -20,7 +22,7 @@ The most common high level steps to create a new check are:
|
||||
5. Run the check locally to ensure it works as expected. For checking you can use the CLI in the next way:
|
||||
- To ensure the check has been detected by Prowler: `poetry run python prowler-cli.py <provider> --list-checks | grep <check_name>`.
|
||||
- To run the check, to find possible issues: `poetry run python prowler-cli.py <provider> --log-level ERROR --verbose --check <check_name>`.
|
||||
6. Create comprehensive tests for the check that cover multiple scenarios including both PASS (compliant) and FAIL (non-compliant) cases. For detailed information about test structure and implementation guidelines, refer to the [Testing](./unit-testing.md) documentation.
|
||||
6. Create comprehensive tests for the check that cover multiple scenarios including both PASS (compliant) and FAIL (non-compliant) cases. For detailed information about test structure and implementation guidelines, refer to the [Testing](/developer-guide/unit-testing) documentation.
|
||||
7. If the check and its corresponding tests are working as expected, you can submit a PR to Prowler.
|
||||
|
||||
### Naming Format for Checks
|
||||
@@ -39,8 +41,8 @@ The name components are:
|
||||
Each check in Prowler follows a straightforward structure. Within the newly created folder, three files must be added to implement the check logic:
|
||||
|
||||
- `__init__.py` (empty file) – Ensures Python treats the check folder as a package.
|
||||
- `<check_name>.py` (code file) – Contains the check logic, following the prescribed format. Please refer to the [prowler's check code structure](./checks.md#prowlers-check-code-structure) for more information.
|
||||
- `<check_name>.metadata.json` (metadata file) – Defines the check's metadata for contextual information. Please refer to the [check metadata](./checks.md#metadata-structure-for-prowler-checks) for more information.
|
||||
- `<check_name>.py` (code file) – Contains the check logic, following the prescribed format. Please refer to the [prowler's check code structure](/developer-guide/checks#prowlers-check-code-structure) for more information.
|
||||
- `<check_name>.metadata.json` (metadata file) – Defines the check's metadata for contextual information. Please refer to the [check metadata](/developer-guide/checks#metadata-structure-for-prowler-checks) for more information.
|
||||
|
||||
## Prowler's Check Code Structure
|
||||
|
||||
@@ -50,9 +52,10 @@ Below the code for a generic check is presented. It is strongly recommended to c
|
||||
|
||||
Report fields are the most dependent on the provider, consult the `CheckReport<Provider>` class for more information on what can be included in the report [here](https://github.com/prowler-cloud/prowler/blob/master/prowler/lib/check/models.py).
|
||||
|
||||
???+ note
|
||||
Legacy providers (AWS, Azure, GCP, Kubernetes) follow the `Check_Report_<Provider>` naming convention. This is not recommended for current instances. Newer providers adopt the `CheckReport<Provider>` naming convention. Learn more at [Prowler Code](https://github.com/prowler-cloud/prowler/tree/master/prowler/lib/check/models.py).
|
||||
<Note>
|
||||
Legacy providers (AWS, Azure, GCP, Kubernetes) follow the `Check_Report_<Provider>` naming convention. This is not recommended for current instances. Newer providers adopt the `CheckReport<Provider>` naming convention. Learn more at [Prowler Code](https://github.com/prowler-cloud/prowler/tree/master/prowler/lib/check/models.py).
|
||||
|
||||
</Note>
|
||||
```python title="Generic Check Class"
|
||||
# Required Imports
|
||||
# Import the base Check class and the provider-specific CheckReport class
|
||||
@@ -213,7 +216,7 @@ Each check **must** populate the report with an unique identifier for the audite
|
||||
|
||||
### Configurable Checks in Prowler
|
||||
|
||||
See [Configurable Checks](./configurable-checks.md) for detailed information on making checks configurable using the `audit_config` object and configuration file.
|
||||
See [Configurable Checks](/developer-guide/configurable-checks) for detailed information on making checks configurable using the `audit_config` object and configuration file.
|
||||
|
||||
## Metadata Structure for Prowler Checks
|
||||
|
||||
@@ -273,16 +276,17 @@ The `CheckTitle` field must be plain text, clearly and succinctly define **the b
|
||||
|
||||
**Always write the `CheckTitle` to describe the *PASS* case**, the desired secure or compliant state of the resource(s). This helps ensure that findings are easy to interpret and that the title always reflects the best practice being met.
|
||||
|
||||
For detailed guidelines on writing effective check titles, including how to determine singular vs. plural scope and common mistakes to avoid, see [CheckTitle Guidelines](./check-metadata-guidelines.md#check-title-guidelines).
|
||||
For detailed guidelines on writing effective check titles, including how to determine singular vs. plural scope and common mistakes to avoid, see [Check Title Guidelines](/developer-guide/check-metadata-guidelines#check-title-guidelines).
|
||||
|
||||
#### CheckType
|
||||
|
||||
???+ warning
|
||||
This field is only applicable to the AWS provider.
|
||||
<Warning>
|
||||
This field is only applicable to the AWS provider.
|
||||
|
||||
</Warning>
|
||||
It follows the [AWS Security Hub Types](https://docs.aws.amazon.com/securityhub/latest/userguide/asff-required-attributes.html#Types) format using the pattern `namespace/category/classifier`.
|
||||
|
||||
For the complete AWS Security Hub selection guidelines, see [CheckType Guidelines](./check-metadata-guidelines.md#check-type-guidelines-aws-only).
|
||||
For the complete AWS Security Hub selection guidelines, see [Check Type Guidelines](/developer-guide/check-metadata-guidelines#check-type-guidelines-aws-only).
|
||||
|
||||
#### ServiceName
|
||||
|
||||
@@ -314,13 +318,13 @@ The type of resource being audited. This field helps categorize and organize fin
|
||||
|
||||
A concise, natural language explanation that **clearly describes what the finding means**, focusing on clarity and context rather than technical implementation details. Use simple paragraphs with line breaks if needed, but avoid sections, code blocks, or complex formatting. This field is limited to maximum 400 characters.
|
||||
|
||||
For detailed writing guidelines and common mistakes to avoid, see [Description Guidelines](./check-metadata-guidelines.md#description-guidelines).
|
||||
For detailed writing guidelines and common mistakes to avoid, see [Description Guidelines](/developer-guide/check-metadata-guidelines#description-guidelines).
|
||||
|
||||
#### Risk
|
||||
|
||||
A clear, natural language explanation of **why this finding poses a cybersecurity risk**. Focus on how it may impact confidentiality, integrity, or availability. If those do not apply, describe any relevant operational or financial risks. Use simple paragraphs with line breaks if needed, but avoid sections, code blocks, or complex formatting. Limit your explanation to 400 characters.
|
||||
|
||||
For detailed writing guidelines and common mistakes to avoid, see [Risk Guidelines](./check-metadata-guidelines.md#risk-guidelines).
|
||||
For detailed writing guidelines and common mistakes to avoid, see [Risk Guidelines](/developer-guide/check-metadata-guidelines#risk-guidelines).
|
||||
|
||||
#### RelatedUrl
|
||||
|
||||
@@ -328,9 +332,10 @@ For detailed writing guidelines and common mistakes to avoid, see [Risk Guidelin
|
||||
|
||||
#### AdditionalURLs
|
||||
|
||||
???+ warning
|
||||
URLs must be valid and not repeated.
|
||||
<Warning>
|
||||
URLs must be valid and not repeated.
|
||||
|
||||
</Warning>
|
||||
A list of official documentation URLs for further reading. These should be authoritative sources that provide additional context, best practices, or detailed information about the security control being checked. Prefer official provider documentation, security standards, or well-established security resources. Avoid third-party blogs or unofficial sources unless they are highly reputable and directly relevant.
|
||||
|
||||
#### Remediation
|
||||
@@ -345,17 +350,17 @@ Provides both code examples and best practice recommendations for addressing the
|
||||
- **Terraform**: HashiCorp Configuration Language (HCL) code with an example of a compliant configuration.
|
||||
- **Other**: Manual steps through web interfaces or other tools to make the finding compliant.
|
||||
|
||||
For detailed guidelines on writing remediation code, see [Remediation Code Guidelines](./check-metadata-guidelines.md#remediation-code-guidelines).
|
||||
For detailed guidelines on writing remediation code, see [Remediation Code Guidelines](/developer-guide/check-metadata-guidelines#remediation-code-guidelines).
|
||||
|
||||
- **Recommendation**
|
||||
- **Text**: Generic best practice guidance in natural language using Markdown format (maximum 400 characters). For writing guidelines, see [Recommendation Guidelines](./check-metadata-guidelines.md#recommendation-guidelines).
|
||||
- **Text**: Generic best practice guidance in natural language using Markdown format (maximum 400 characters). For writing guidelines, see [Recommendation Guidelines](/developer-guide/check-metadata-guidelines#recommendation-guidelines).
|
||||
- **Url**: [Prowler Hub URL](https://hub.prowler.com/) of the check. This URL is always composed by `https://hub.prowler.com/check/<check_id>`.
|
||||
|
||||
#### Categories
|
||||
|
||||
One or more functional groupings used for execution filtering (e.g., `internet-exposed`). You can define new categories just by adding to this field.
|
||||
|
||||
For the complete list of available categories, see [Categories Guidelines](./check-metadata-guidelines.md#categories-guidelines).
|
||||
For the complete list of available categories, see [Categories Guidelines](/developer-guide/check-metadata-guidelines#categories-guidelines).
|
||||
|
||||
#### DependsOn
|
||||
|
||||
+4
-2
@@ -1,4 +1,6 @@
|
||||
# Configurable Checks in Prowler
|
||||
---
|
||||
title: 'Configurable Checks in Prowler'
|
||||
---
|
||||
|
||||
Prowler empowers users to extend and adapt cloud security coverage by making checks configurable through the use of the `audit_config` object. This approach enables customization of checks to meet specific requirements through a configuration file.
|
||||
|
||||
@@ -41,6 +43,6 @@ When adding a new configurable check to Prowler, update the following files:
|
||||
- **Test Fixtures:** If tests depend on this configuration, add the variable to `tests/config/fixtures/config.yaml`.
|
||||
- **Documentation:** Document the new variable in the list of configurable checks in `docs/tutorials/configuration_file.md`.
|
||||
|
||||
For a complete list of checks that already support configuration, see the [Configuration File Tutorial](../tutorials/configuration_file.md).
|
||||
For a complete list of checks that already support configuration, see the [Configuration File Tutorial](/user-guide/cli/tutorials/configuration_file).
|
||||
|
||||
This approach ensures that checks are easily configurable, making Prowler highly adaptable to different environments and requirements.
|
||||
@@ -1,4 +1,6 @@
|
||||
# Debugging in Prowler
|
||||
---
|
||||
title: 'Debugging in Prowler'
|
||||
---
|
||||
|
||||
Debugging in Prowler simplifies the development process, allowing developers to efficiently inspect and resolve unexpected issues during execution.
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
## Contributing to Documentation
|
||||
|
||||
Prowler documentation is built using `mkdocs`, allowing contributors to easily add or enhance documentation.
|
||||
|
||||
### Installation and Setup
|
||||
|
||||
Install all necessary dependencies using: `poetry install --with docs`.
|
||||
|
||||
1. Install `mkdocs` using your preferred package manager.
|
||||
|
||||
2. Running the Documentation Locally
|
||||
Navigate to the `prowler` repository folder.
|
||||
Start the local documentation server by running: `mkdocs serve`.
|
||||
Open `http://localhost:8000` in your browser to view live updates.
|
||||
|
||||
3. Making Documentation Changes
|
||||
Make all needed changes to docs or add new documents. Edit existing Markdown (.md) files inside `prowler/docs`.
|
||||
To add new sections or files, update the `mkdocs.yaml` file located in the root directory of Prowler’s repository.
|
||||
|
||||
4. Submitting Changes
|
||||
|
||||
Once documentation updates are complete:
|
||||
|
||||
Submit a pull request for review.
|
||||
|
||||
The Prowler team will assess and merge contributions.
|
||||
|
||||
Your efforts help improve Prowler documentation—thank you for contributing!
|
||||
@@ -0,0 +1,42 @@
|
||||
---
|
||||
title: 'Contributing to Documentation'
|
||||
---
|
||||
|
||||
Prowler documentation is built using [Mintlify](https://www.mintlify.com/docs), allowing contributors to easily add or enhance documentation.
|
||||
|
||||
## Installation and Setup
|
||||
|
||||
<Steps>
|
||||
<Step title="Install Mintlify CLI">
|
||||
```bash
|
||||
npm i -g mint
|
||||
```
|
||||
For detailed instructions, check the [Mintlify documentation](https://www.mintlify.com/docs/installation).
|
||||
</Step>
|
||||
|
||||
<Step title="Preview Documentation Locally">
|
||||
Start the local development server to preview changes in real-time.
|
||||
|
||||
```bash
|
||||
mint dev
|
||||
```
|
||||
|
||||
A local preview of your documentation will be available at http://localhost:3000
|
||||
</Step>
|
||||
|
||||
<Step title="Make Documentation Changes">
|
||||
Edit existing Markdown (.mdx) files inside the `docs` directory or add new documents.
|
||||
|
||||
For reference about formatting, check the [Mintlify documentation](https://www.mintlify.com/docs/create/text).
|
||||
|
||||
To add new sections or files, update the [`docs/docs.json`](https://github.com/prowler-cloud/prowler/blob/master/docs/docs.json) file to include them in the navigation.
|
||||
</Step>
|
||||
|
||||
<Step title="Submit Changes">
|
||||
Once documentation updates are complete, submit a pull request for review.
|
||||
|
||||
The Prowler team will assess and merge contributions.
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
Your efforts help improve Prowler documentation—thank you for contributing!
|
||||
@@ -1,12 +1,14 @@
|
||||
# Google Cloud Provider
|
||||
---
|
||||
title: 'Google Cloud Provider'
|
||||
---
|
||||
|
||||
This page details the [Google Cloud Platform (GCP)](https://cloud.google.com/) provider implementation in Prowler.
|
||||
|
||||
By default, Prowler will audit all the GCP projects that the authenticated identity can access. To configure it, follow the [GCP getting started guide](../tutorials/gcp/getting-started-gcp.md).
|
||||
By default, Prowler will audit all the GCP projects that the authenticated identity can access. To configure it, follow the [GCP getting started guide](/user-guide/providers/gcp/getting-started-gcp).
|
||||
|
||||
## GCP Provider Classes Architecture
|
||||
|
||||
The GCP provider implementation follows the general [Provider structure](./provider.md). This section focuses on the GCP-specific implementation, highlighting how the generic provider concepts are realized for GCP in Prowler. For a full overview of the provider pattern, base classes, and extension guidelines, see [Provider documentation](./provider.md).
|
||||
The GCP provider implementation follows the general [Provider structure](/developer-guide/provider). This section focuses on the GCP-specific implementation, highlighting how the generic provider concepts are realized for GCP in Prowler. For a full overview of the provider pattern, base classes, and extension guidelines, see [Provider documentation](/developer-guide/provider).
|
||||
|
||||
### Main Class
|
||||
|
||||
@@ -32,7 +34,7 @@ The GCP provider implementation follows the general [Provider structure](./provi
|
||||
### `GCPService` (Service Base Class)
|
||||
|
||||
- **Location:** [`prowler/providers/gcp/lib/service/service.py`](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/gcp/lib/service/service.py)
|
||||
- **Purpose:** Abstract base class that all GCP service-specific classes inherit from. This implements the generic service pattern (described in [service page](./services.md#service-base-class)) specifically for GCP.
|
||||
- **Purpose:** Abstract base class that all GCP service-specific classes inherit from. This implements the generic service pattern (described in [service page](/developer-guide/services#service-base-class)) specifically for GCP.
|
||||
- **Key GCP Responsibilities:**
|
||||
- Receives a `GcpProvider` instance to access session, identity, and configuration.
|
||||
- Manages clients for all services by project.
|
||||
@@ -95,12 +97,12 @@ def _get_instances(self):
|
||||
|
||||
## Specific Patterns in GCP Services
|
||||
|
||||
The generic service pattern is described in [service page](./services.md#service-structure-and-initialisation). You can find all the currently implemented services in the following locations:
|
||||
The generic service pattern is described in [service page](/developer-guide/services#service-structure-and-initialisation). You can find all the currently implemented services in the following locations:
|
||||
|
||||
- Directly in the code, in location [`prowler/providers/gcp/services/`](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers/gcp/services)
|
||||
- In the [Prowler Hub](https://hub.prowler.com/) for a more human-readable view.
|
||||
|
||||
The best reference to understand how to implement a new service is following the [service implementation documentation](./services.md#adding-a-new-service) and taking other services already implemented as reference. In next subsection you can find a list of common patterns that are used accross all GCP services.
|
||||
The best reference to understand how to implement a new service is following the [service implementation documentation](/developer-guide/services#adding-a-new-service) and taking other services already implemented as reference. In next subsection you can find a list of common patterns that are used accross all GCP services.
|
||||
|
||||
### GCP Service Common Patterns
|
||||
|
||||
@@ -117,12 +119,12 @@ The best reference to understand how to implement a new service is following the
|
||||
|
||||
## Specific Patterns in GCP Checks
|
||||
|
||||
The GCP checks pattern is described in [checks page](./checks.md). You can find all the currently implemented checks:
|
||||
The GCP checks pattern is described in [checks page](/developer-guide/checks). You can find all the currently implemented checks:
|
||||
|
||||
- Directly in the code, within each service folder, each check has its own folder named after the name of the check. (e.g. [`prowler/providers/gcp/services/iam/iam_sa_user_managed_key_unused/`](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers/gcp/services/iam/iam_sa_user_managed_key_unused))
|
||||
- In the [Prowler Hub](https://hub.prowler.com/) for a more human-readable view.
|
||||
|
||||
The best reference to understand how to implement a new check is following the [GCP check implementation documentation](./checks.md#creating-a-check) and taking other similar checks as reference.
|
||||
The best reference to understand how to implement a new check is following the [GCP check implementation documentation](/developer-guide/checks#creating-a-check) and taking other similar checks as reference.
|
||||
|
||||
### Check Report Class
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
# GitHub Provider
|
||||
---
|
||||
title: 'GitHub Provider'
|
||||
---
|
||||
|
||||
This page details the [GitHub](https://github.com/) provider implementation in Prowler.
|
||||
|
||||
By default, Prowler will audit the GitHub account - scanning all repositories, organizations, and applications that your configured credentials can access. To configure it, follow the [GitHub getting started guide](../tutorials/github/getting-started-github.md).
|
||||
By default, Prowler will audit the GitHub account - scanning all repositories, organizations, and applications that your configured credentials can access. To configure it, follow the [GitHub getting started guide](/user-guide/providers/github/getting-started-github).
|
||||
|
||||
## GitHub Provider Classes Architecture
|
||||
|
||||
The GitHub provider implementation follows the general [Provider structure](./provider.md). This section focuses on the GitHub-specific implementation, highlighting how the generic provider concepts are realized for GitHub in Prowler. For a full overview of the provider pattern, base classes, and extension guidelines, see [Provider documentation](./provider.md).
|
||||
The GitHub provider implementation follows the general [Provider structure](/developer-guide/provider). This section focuses on the GitHub-specific implementation, highlighting how the generic provider concepts are realized for GitHub in Prowler. For a full overview of the provider pattern, base classes, and extension guidelines, see [Provider documentation](/developer-guide/provider).
|
||||
|
||||
### `GithubProvider` (Main Class)
|
||||
|
||||
@@ -48,12 +50,12 @@ The GitHub provider implementation follows the general [Provider structure](./pr
|
||||
|
||||
## Specific Patterns in GitHub Services
|
||||
|
||||
The generic service pattern is described in [service page](./services.md#service-structure-and-initialisation). You can find all the currently implemented services in the following locations:
|
||||
The generic service pattern is described in [service page](/developer-guide/services#service-structure-and-initialisation). You can find all the currently implemented services in the following locations:
|
||||
|
||||
- Directly in the code, in location [`prowler/providers/github/services/`](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers/github/services)
|
||||
- In the [Prowler Hub](https://hub.prowler.com/) for a more human-readable view.
|
||||
|
||||
The best reference to understand how to implement a new service is following the [service implementation documentation](./services.md#adding-a-new-service) and by taking other already implemented services as reference.
|
||||
The best reference to understand how to implement a new service is following the [service implementation documentation](/developer-guide/services#adding-a-new-service) and by taking other already implemented services as reference.
|
||||
|
||||
### GitHub Service Common Patterns
|
||||
|
||||
@@ -66,12 +68,12 @@ The best reference to understand how to implement a new service is following the
|
||||
|
||||
## Specific Patterns in GitHub Checks
|
||||
|
||||
The GitHub checks pattern is described in [checks page](./checks.md). You can find all the currently implemented checks in:
|
||||
The GitHub checks pattern is described in [checks page](/developer-guide/checks). You can find all the currently implemented checks in:
|
||||
|
||||
- Directly in the code, within each service folder, each check has its own folder named after the name of the check. (e.g. [`prowler/providers/github/services/repository/repository_secret_scanning_enabled/`](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers/github/services/repository/repository_secret_scanning_enabled))
|
||||
- In the [Prowler Hub](https://hub.prowler.com/) for a more human-readable view.
|
||||
|
||||
The best reference to understand how to implement a new check is the [GitHub check implementation documentation](./checks.md#creating-a-check) and by taking other checks as reference.
|
||||
The best reference to understand how to implement a new check is the [GitHub check implementation documentation](/developer-guide/checks#creating-a-check) and by taking other checks as reference.
|
||||
|
||||
### Check Report Class
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
# Integration Tests
|
||||
|
||||
Coming soon ...
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user