mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-05-17 17:53:27 +00:00
Compare commits
63 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 3066d82863 | |||
| 969ca8863a | |||
| 03c6f98db4 | |||
| 8ebefb8aa1 | |||
| c3694fdc5b | |||
| df10bc0c4c | |||
| e694b0f634 | |||
| 81e3f87003 | |||
| 7ffe2aeec9 | |||
| 672aa6eb2f | |||
| 2e999f55f9 | |||
| 18998b8867 | |||
| ff4a186df6 | |||
| b8dab5e0ed | |||
| 0b3142f7a8 | |||
| f5dc0c9ee0 | |||
| a230809095 | |||
| e6d1b5639b | |||
| b1856e42f0 | |||
| ba8dbb0d28 | |||
| b436cc1cac | |||
| 51baa88644 | |||
| 5098b12e97 | |||
| 3d1e7015a6 | |||
| 0b7f02f7e4 | |||
| c0396e97bf | |||
| 8d4fa46038 | |||
| 4b160257b9 | |||
| 6184de52d9 | |||
| fdf45ea777 | |||
| b7ce9ae5f3 | |||
| 2039a5005c | |||
| 52ed92ac6a | |||
| f5cccecac6 | |||
| a47f6444f8 | |||
| f8c8dee2b3 | |||
| 6656629391 | |||
| 9f372902ad | |||
| b4ff1dcc75 | |||
| f596907223 | |||
| fe768c0a3e | |||
| 18f3bc098c | |||
| 67b1983d85 | |||
| a3db23af7d | |||
| 3eaa21f06f | |||
| 5d5c109067 | |||
| c6cb4e4814 | |||
| ab06a09173 | |||
| 9c6c007f73 | |||
| 206f23b5a5 | |||
| 5c9e9bc86a | |||
| 34554d6123 | |||
| 000cb93157 | |||
| 524209bdf2 | |||
| c4a0da8204 | |||
| f0cba0321c | |||
| 79888c9312 | |||
| a79910a694 | |||
| 4cadee7bb1 | |||
| 756d436a2f | |||
| 5e85ef5835 | |||
| 0fa9e2da6c | |||
| ce7510db28 |
+27
-5
@@ -1,6 +1,28 @@
|
||||
# SDK
|
||||
/* @prowler-cloud/sdk
|
||||
/.github/ @prowler-cloud/sdk
|
||||
prowler @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
tests @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
api @prowler-cloud/api
|
||||
ui @prowler-cloud/ui
|
||||
/prowler/ @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
/tests/ @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
/dashboard/ @prowler-cloud/sdk
|
||||
/docs/ @prowler-cloud/sdk
|
||||
/examples/ @prowler-cloud/sdk
|
||||
/util/ @prowler-cloud/sdk
|
||||
/contrib/ @prowler-cloud/sdk
|
||||
/permissions/ @prowler-cloud/sdk
|
||||
/codecov.yml @prowler-cloud/sdk @prowler-cloud/api
|
||||
|
||||
# API
|
||||
/api/ @prowler-cloud/api
|
||||
|
||||
# UI
|
||||
/ui/ @prowler-cloud/ui
|
||||
|
||||
# AI
|
||||
/mcp_server/ @prowler-cloud/ai
|
||||
|
||||
# Platform
|
||||
/.github/ @prowler-cloud/platform
|
||||
/Makefile @prowler-cloud/platform
|
||||
/kubernetes/ @prowler-cloud/platform
|
||||
**/Dockerfile* @prowler-cloud/platform
|
||||
**/docker-compose*.yml @prowler-cloud/platform
|
||||
**/docker-compose*.yaml @prowler-cloud/platform
|
||||
|
||||
@@ -3,6 +3,41 @@ description: Create a report to help us improve
|
||||
labels: ["bug", "status/needs-triage"]
|
||||
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: search
|
||||
attributes:
|
||||
label: Issue search
|
||||
options:
|
||||
- label: I have searched the existing issues and this bug has not been reported yet
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: component
|
||||
attributes:
|
||||
label: Which component is affected?
|
||||
multiple: true
|
||||
options:
|
||||
- Prowler CLI/SDK
|
||||
- Prowler API
|
||||
- Prowler UI
|
||||
- Prowler Dashboard
|
||||
- Prowler MCP Server
|
||||
- Documentation
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: provider
|
||||
attributes:
|
||||
label: Cloud Provider (if applicable)
|
||||
multiple: true
|
||||
options:
|
||||
- AWS
|
||||
- Azure
|
||||
- GCP
|
||||
- Kubernetes
|
||||
- GitHub
|
||||
- Microsoft 365
|
||||
- Not applicable
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
@@ -78,6 +113,15 @@ body:
|
||||
prowler --version
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: python-version
|
||||
attributes:
|
||||
label: Python version
|
||||
description: Which Python version are you using?
|
||||
placeholder: |-
|
||||
python --version
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: pip-version
|
||||
attributes:
|
||||
|
||||
@@ -1 +1,11 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: 📖 Documentation
|
||||
url: https://docs.prowler.com
|
||||
about: Check our comprehensive documentation for guides and tutorials
|
||||
- name: 💬 GitHub Discussions
|
||||
url: https://github.com/prowler-cloud/prowler/discussions
|
||||
about: Ask questions and discuss with the community
|
||||
- name: 🌟 Prowler Community
|
||||
url: https://goto.prowler.com/slack
|
||||
about: Join our community for support and updates
|
||||
|
||||
@@ -3,6 +3,42 @@ description: Suggest an idea for this project
|
||||
labels: ["feature-request", "status/needs-triage"]
|
||||
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: search
|
||||
attributes:
|
||||
label: Feature search
|
||||
options:
|
||||
- label: I have searched the existing issues and this feature has not been requested yet or is already in our [Public Roadmap](https://roadmap.prowler.com/roadmap)
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: component
|
||||
attributes:
|
||||
label: Which component would this feature affect?
|
||||
multiple: true
|
||||
options:
|
||||
- Prowler CLI/SDK
|
||||
- Prowler API
|
||||
- Prowler UI
|
||||
- Prowler Dashboard
|
||||
- Prowler MCP Server
|
||||
- Documentation
|
||||
- New component/Integration
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: provider
|
||||
attributes:
|
||||
label: Related to specific cloud provider?
|
||||
multiple: true
|
||||
options:
|
||||
- AWS
|
||||
- Azure
|
||||
- GCP
|
||||
- Kubernetes
|
||||
- GitHub
|
||||
- Microsoft 365
|
||||
- All providers
|
||||
- Not provider-specific
|
||||
- type: textarea
|
||||
id: Problem
|
||||
attributes:
|
||||
@@ -19,6 +55,14 @@ body:
|
||||
description: A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: use-case
|
||||
attributes:
|
||||
label: Use case and benefits
|
||||
description: Who would benefit from this feature and how?
|
||||
placeholder: This would help security teams by...
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: Alternatives
|
||||
attributes:
|
||||
|
||||
@@ -0,0 +1,71 @@
|
||||
name: 'Setup Python with Poetry'
|
||||
description: 'Setup Python environment with Poetry and install dependencies'
|
||||
author: 'Prowler'
|
||||
|
||||
inputs:
|
||||
python-version:
|
||||
description: 'Python version to use'
|
||||
required: true
|
||||
working-directory:
|
||||
description: 'Working directory for Poetry'
|
||||
required: false
|
||||
default: '.'
|
||||
poetry-version:
|
||||
description: 'Poetry version to install'
|
||||
required: false
|
||||
default: '2.1.1'
|
||||
install-dependencies:
|
||||
description: 'Install Python dependencies with Poetry'
|
||||
required: false
|
||||
default: 'true'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Replace @master with current branch in pyproject.toml
|
||||
if: github.event_name == 'pull_request' && github.base_ref == 'master'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
|
||||
echo "Using branch: $BRANCH_NAME"
|
||||
sed -i "s|@master|@$BRANCH_NAME|g" pyproject.toml
|
||||
|
||||
- name: Install poetry
|
||||
shell: bash
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==${{ inputs.poetry-version }}
|
||||
|
||||
- name: Update SDK resolved_reference to latest commit
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
LATEST_COMMIT=$(curl -s "https://api.github.com/repos/prowler-cloud/prowler/commits/master" | jq -r '.sha')
|
||||
echo "Latest commit hash: $LATEST_COMMIT"
|
||||
sed -i '/url = "https:\/\/github\.com\/prowler-cloud\/prowler\.git"/,/resolved_reference = / {
|
||||
s/resolved_reference = "[a-f0-9]\{40\}"/resolved_reference = "'"$LATEST_COMMIT"'"/
|
||||
}' poetry.lock
|
||||
echo "Updated resolved_reference:"
|
||||
grep -A2 -B2 "resolved_reference" poetry.lock
|
||||
|
||||
- name: Update poetry.lock
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: poetry lock
|
||||
|
||||
- name: Set up Python ${{ inputs.python-version }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ inputs.python-version }}
|
||||
cache: 'poetry'
|
||||
cache-dependency-path: ${{ inputs.working-directory }}/poetry.lock
|
||||
|
||||
- name: Install Python dependencies
|
||||
if: inputs.install-dependencies == 'true'
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry run pip list
|
||||
@@ -0,0 +1,152 @@
|
||||
name: 'Container Security Scan with Trivy'
|
||||
description: 'Scans container images for vulnerabilities using Trivy and reports results'
|
||||
author: 'Prowler'
|
||||
|
||||
inputs:
|
||||
image-name:
|
||||
description: 'Container image name to scan'
|
||||
required: true
|
||||
image-tag:
|
||||
description: 'Container image tag to scan'
|
||||
required: true
|
||||
default: ${{ github.sha }}
|
||||
severity:
|
||||
description: 'Severities to scan for (comma-separated)'
|
||||
required: false
|
||||
default: 'CRITICAL,HIGH,MEDIUM,LOW'
|
||||
fail-on-critical:
|
||||
description: 'Fail the build if critical vulnerabilities are found'
|
||||
required: false
|
||||
default: 'false'
|
||||
upload-sarif:
|
||||
description: 'Upload results to GitHub Security tab'
|
||||
required: false
|
||||
default: 'true'
|
||||
create-pr-comment:
|
||||
description: 'Create a comment on the PR with scan results'
|
||||
required: false
|
||||
default: 'true'
|
||||
artifact-retention-days:
|
||||
description: 'Days to retain the Trivy report artifact'
|
||||
required: false
|
||||
default: '2'
|
||||
|
||||
outputs:
|
||||
critical-count:
|
||||
description: 'Number of critical vulnerabilities found'
|
||||
value: ${{ steps.security-check.outputs.critical }}
|
||||
high-count:
|
||||
description: 'Number of high vulnerabilities found'
|
||||
value: ${{ steps.security-check.outputs.high }}
|
||||
total-count:
|
||||
description: 'Total number of vulnerabilities found'
|
||||
value: ${{ steps.security-check.outputs.total }}
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Run Trivy vulnerability scan (SARIF)
|
||||
if: inputs.upload-sarif == 'true'
|
||||
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # v0.33.1
|
||||
with:
|
||||
image-ref: ${{ inputs.image-name }}:${{ inputs.image-tag }}
|
||||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
severity: 'CRITICAL,HIGH'
|
||||
exit-code: '0'
|
||||
|
||||
- name: Upload Trivy results to GitHub Security tab
|
||||
if: inputs.upload-sarif == 'true'
|
||||
uses: github/codeql-action/upload-sarif@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
sarif_file: 'trivy-results.sarif'
|
||||
category: 'trivy-container'
|
||||
|
||||
- name: Run Trivy vulnerability scan (JSON)
|
||||
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # v0.33.1
|
||||
with:
|
||||
image-ref: ${{ inputs.image-name }}:${{ inputs.image-tag }}
|
||||
format: 'json'
|
||||
output: 'trivy-report.json'
|
||||
severity: ${{ inputs.severity }}
|
||||
exit-code: '0'
|
||||
|
||||
- name: Upload Trivy report artifact
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
if: always()
|
||||
with:
|
||||
name: trivy-scan-report-${{ inputs.image-name }}
|
||||
path: trivy-report.json
|
||||
retention-days: ${{ inputs.artifact-retention-days }}
|
||||
|
||||
- name: Generate security summary
|
||||
id: security-check
|
||||
shell: bash
|
||||
run: |
|
||||
CRITICAL=$(jq '[.Results[]?.Vulnerabilities[]? | select(.Severity=="CRITICAL")] | length' trivy-report.json)
|
||||
HIGH=$(jq '[.Results[]?.Vulnerabilities[]? | select(.Severity=="HIGH")] | length' trivy-report.json)
|
||||
TOTAL=$(jq '[.Results[]?.Vulnerabilities[]?] | length' trivy-report.json)
|
||||
|
||||
echo "critical=$CRITICAL" >> $GITHUB_OUTPUT
|
||||
echo "high=$HIGH" >> $GITHUB_OUTPUT
|
||||
echo "total=$TOTAL" >> $GITHUB_OUTPUT
|
||||
|
||||
echo "### 🔒 Container Security Scan" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "**Image:** \`${{ inputs.image-name }}:${{ inputs.image-tag }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- 🔴 Critical: $CRITICAL" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- 🟠 High: $HIGH" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- **Total**: $TOTAL" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Comment scan results on PR
|
||||
if: inputs.create-pr-comment == 'true' && github.event_name == 'pull_request'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
IMAGE_NAME: ${{ inputs.image-name }}
|
||||
GITHUB_SHA: ${{ inputs.image-tag }}
|
||||
SEVERITY: ${{ inputs.severity }}
|
||||
with:
|
||||
script: |
|
||||
const comment = require('./.github/scripts/trivy-pr-comment.js');
|
||||
|
||||
// Unique identifier to find our comment
|
||||
const marker = '<!-- trivy-scan-comment:${{ inputs.image-name }} -->';
|
||||
const body = marker + '\n' + comment;
|
||||
|
||||
// Find existing comment
|
||||
const { data: comments } = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
});
|
||||
|
||||
const existingComment = comments.find(c => c.body?.includes(marker));
|
||||
|
||||
if (existingComment) {
|
||||
// Update existing comment
|
||||
await github.rest.issues.updateComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: existingComment.id,
|
||||
body: body
|
||||
});
|
||||
console.log('✅ Updated existing Trivy scan comment');
|
||||
} else {
|
||||
// Create new comment
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: body
|
||||
});
|
||||
console.log('✅ Created new Trivy scan comment');
|
||||
}
|
||||
|
||||
- name: Check for critical vulnerabilities
|
||||
if: inputs.fail-on-critical == 'true' && steps.security-check.outputs.critical != '0'
|
||||
shell: bash
|
||||
run: |
|
||||
echo "::error::Found ${{ steps.security-check.outputs.critical }} critical vulnerabilities"
|
||||
echo "::warning::Please update packages or use a different base image"
|
||||
exit 1
|
||||
@@ -1,3 +1,12 @@
|
||||
name: "API - CodeQL Config"
|
||||
name: 'API: CodeQL Config'
|
||||
paths:
|
||||
- "api/"
|
||||
- 'api/'
|
||||
|
||||
paths-ignore:
|
||||
- 'api/tests/**'
|
||||
- 'api/**/__pycache__/**'
|
||||
- 'api/**/migrations/**'
|
||||
- 'api/**/*.md'
|
||||
|
||||
queries:
|
||||
- uses: security-and-quality
|
||||
|
||||
@@ -1,4 +1,18 @@
|
||||
name: "SDK - CodeQL Config"
|
||||
name: 'SDK: CodeQL Config'
|
||||
paths:
|
||||
- 'prowler/'
|
||||
|
||||
paths-ignore:
|
||||
- "api/"
|
||||
- "ui/"
|
||||
- 'api/'
|
||||
- 'ui/'
|
||||
- 'dashboard/'
|
||||
- 'mcp_server/'
|
||||
- 'tests/**'
|
||||
- 'util/**'
|
||||
- 'contrib/**'
|
||||
- 'examples/**'
|
||||
- 'prowler/**/__pycache__/**'
|
||||
- 'prowler/**/*.md'
|
||||
|
||||
queries:
|
||||
- uses: security-and-quality
|
||||
|
||||
@@ -1,3 +1,17 @@
|
||||
name: "UI - CodeQL Config"
|
||||
name: 'UI: CodeQL Config'
|
||||
paths:
|
||||
- "ui/"
|
||||
- 'ui/'
|
||||
|
||||
paths-ignore:
|
||||
- 'ui/node_modules/**'
|
||||
- 'ui/.next/**'
|
||||
- 'ui/out/**'
|
||||
- 'ui/tests/**'
|
||||
- 'ui/**/*.test.ts'
|
||||
- 'ui/**/*.test.tsx'
|
||||
- 'ui/**/*.spec.ts'
|
||||
- 'ui/**/*.spec.tsx'
|
||||
- 'ui/**/*.md'
|
||||
|
||||
queries:
|
||||
- uses: security-and-quality
|
||||
|
||||
@@ -0,0 +1,102 @@
|
||||
const fs = require('fs');
|
||||
|
||||
// Configuration from environment variables
|
||||
const REPORT_FILE = process.env.TRIVY_REPORT_FILE || 'trivy-report.json';
|
||||
const IMAGE_NAME = process.env.IMAGE_NAME || 'container-image';
|
||||
const GITHUB_SHA = process.env.GITHUB_SHA || 'unknown';
|
||||
const GITHUB_REPOSITORY = process.env.GITHUB_REPOSITORY || '';
|
||||
const GITHUB_RUN_ID = process.env.GITHUB_RUN_ID || '';
|
||||
const SEVERITY = process.env.SEVERITY || 'CRITICAL,HIGH,MEDIUM,LOW';
|
||||
|
||||
// Parse severities to scan
|
||||
const scannedSeverities = SEVERITY.split(',').map(s => s.trim());
|
||||
|
||||
// Read and parse the Trivy report
|
||||
const report = JSON.parse(fs.readFileSync(REPORT_FILE, 'utf-8'));
|
||||
|
||||
let vulnCount = 0;
|
||||
let vulnsByType = { CRITICAL: 0, HIGH: 0, MEDIUM: 0, LOW: 0 };
|
||||
let affectedPackages = new Set();
|
||||
|
||||
if (report.Results && Array.isArray(report.Results)) {
|
||||
for (const result of report.Results) {
|
||||
if (result.Vulnerabilities && Array.isArray(result.Vulnerabilities)) {
|
||||
for (const vuln of result.Vulnerabilities) {
|
||||
vulnCount++;
|
||||
if (vulnsByType[vuln.Severity] !== undefined) {
|
||||
vulnsByType[vuln.Severity]++;
|
||||
}
|
||||
if (vuln.PkgName) {
|
||||
affectedPackages.add(vuln.PkgName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const shortSha = GITHUB_SHA.substring(0, 7);
|
||||
const timestamp = new Date().toISOString().replace('T', ' ').substring(0, 19) + ' UTC';
|
||||
|
||||
// Severity icons and labels
|
||||
const severityConfig = {
|
||||
CRITICAL: { icon: '🔴', label: 'Critical' },
|
||||
HIGH: { icon: '🟠', label: 'High' },
|
||||
MEDIUM: { icon: '🟡', label: 'Medium' },
|
||||
LOW: { icon: '🔵', label: 'Low' }
|
||||
};
|
||||
|
||||
let comment = '## 🔒 Container Security Scan\n\n';
|
||||
comment += `**Image:** \`${IMAGE_NAME}:${shortSha}\`\n`;
|
||||
comment += `**Last scan:** ${timestamp}\n\n`;
|
||||
|
||||
if (vulnCount === 0) {
|
||||
comment += '### ✅ No Vulnerabilities Detected\n\n';
|
||||
comment += 'The container image passed all security checks. No known CVEs were found.\n';
|
||||
} else {
|
||||
comment += '### 📊 Vulnerability Summary\n\n';
|
||||
comment += '| Severity | Count |\n';
|
||||
comment += '|----------|-------|\n';
|
||||
|
||||
// Only show severities that were scanned
|
||||
for (const severity of scannedSeverities) {
|
||||
const config = severityConfig[severity];
|
||||
const count = vulnsByType[severity] || 0;
|
||||
const isBold = (severity === 'CRITICAL' || severity === 'HIGH') && count > 0;
|
||||
const countDisplay = isBold ? `**${count}**` : count;
|
||||
comment += `| ${config.icon} ${config.label} | ${countDisplay} |\n`;
|
||||
}
|
||||
|
||||
comment += `| **Total** | **${vulnCount}** |\n\n`;
|
||||
|
||||
if (affectedPackages.size > 0) {
|
||||
comment += `**${affectedPackages.size}** package(s) affected\n\n`;
|
||||
}
|
||||
|
||||
if (vulnsByType.CRITICAL > 0) {
|
||||
comment += '### ⚠️ Action Required\n\n';
|
||||
comment += '**Critical severity vulnerabilities detected.** These should be addressed before merging:\n';
|
||||
comment += '- Review the detailed scan results\n';
|
||||
comment += '- Update affected packages to patched versions\n';
|
||||
comment += '- Consider using a different base image if updates are unavailable\n\n';
|
||||
} else if (vulnsByType.HIGH > 0) {
|
||||
comment += '### ⚠️ Attention Needed\n\n';
|
||||
comment += '**High severity vulnerabilities found.** Please review and plan remediation:\n';
|
||||
comment += '- Assess the risk and exploitability\n';
|
||||
comment += '- Prioritize updates in the next maintenance cycle\n\n';
|
||||
} else {
|
||||
comment += '### ℹ️ Review Recommended\n\n';
|
||||
comment += 'Medium/Low severity vulnerabilities found. Consider addressing during regular maintenance.\n\n';
|
||||
}
|
||||
}
|
||||
|
||||
comment += '---\n';
|
||||
comment += '📋 **Resources:**\n';
|
||||
|
||||
if (GITHUB_REPOSITORY && GITHUB_RUN_ID) {
|
||||
comment += `- [Download full report](https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}) (see artifacts)\n`;
|
||||
}
|
||||
|
||||
comment += '- [View in Security tab](https://github.com/' + (GITHUB_REPOSITORY || 'repository') + '/security/code-scanning)\n';
|
||||
comment += '- Scanned with [Trivy](https://github.com/aquasecurity/trivy)\n';
|
||||
|
||||
module.exports = comment;
|
||||
@@ -1,36 +1,34 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: API - CodeQL
|
||||
name: 'API: CodeQL'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- "api/**"
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-codeql.yml'
|
||||
- '.github/codeql/api-codeql-config.yml'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- "api/**"
|
||||
- 'api/**'
|
||||
- '.github/workflows/api-codeql.yml'
|
||||
- '.github/codeql/api-codeql-config.yml'
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
name: CodeQL Security Analysis
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
@@ -39,21 +37,20 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
language:
|
||||
- 'python'
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: '/language:${{ matrix.language }}'
|
||||
|
||||
@@ -1,20 +1,30 @@
|
||||
name: API - Pull Request
|
||||
name: 'API: Pull Request'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- ".github/workflows/api-pull-request.yml"
|
||||
- "api/**"
|
||||
- '.github/workflows/api-pull-request.yml'
|
||||
- 'api/**'
|
||||
- '!api/docs/**'
|
||||
- '!api/README.md'
|
||||
- '!api/CHANGELOG.md'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- ".github/workflows/api-pull-request.yml"
|
||||
- "api/**"
|
||||
- '.github/workflows/api-pull-request.yml'
|
||||
- 'api/**'
|
||||
- '!api/docs/**'
|
||||
- '!api/README.md'
|
||||
- '!api/CHANGELOG.md'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
POSTGRES_HOST: localhost
|
||||
@@ -29,21 +39,94 @@ env:
|
||||
VALKEY_DB: 0
|
||||
API_WORKING_DIR: ./api
|
||||
IMAGE_NAME: prowler-api
|
||||
IGNORE_FILES: |
|
||||
api/docs/**
|
||||
api/README.md
|
||||
api/CHANGELOG.md
|
||||
|
||||
jobs:
|
||||
test:
|
||||
code-quality:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.12"]
|
||||
python-version:
|
||||
- '3.12'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./api
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
|
||||
- name: Poetry check
|
||||
run: poetry check --lock
|
||||
|
||||
- name: Ruff lint
|
||||
run: poetry run ruff check . --exclude contrib
|
||||
|
||||
- name: Ruff format
|
||||
run: poetry run ruff format --check . --exclude contrib
|
||||
|
||||
- name: Pylint
|
||||
run: poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
|
||||
|
||||
security-scans:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
- '3.12'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./api
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
working-directory: ./api
|
||||
|
||||
- name: Bandit
|
||||
run: poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
|
||||
- name: Safety
|
||||
# 76352, 76353, 77323 come from SDK, but they cannot upgrade it yet. It does not affect API
|
||||
# TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X
|
||||
run: poetry run safety check --ignore 70612,66963,74429,76352,76353,77323,77744,77745
|
||||
|
||||
- name: Vulture
|
||||
run: poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
|
||||
|
||||
tests:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
matrix:
|
||||
python-version:
|
||||
- '3.12'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./api
|
||||
|
||||
# Service containers to run with `test`
|
||||
services:
|
||||
# Label used to access the service container
|
||||
postgres:
|
||||
image: postgres
|
||||
env:
|
||||
@@ -52,7 +135,6 @@ jobs:
|
||||
POSTGRES_USER: ${{ env.POSTGRES_USER }}
|
||||
POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }}
|
||||
POSTGRES_DB: ${{ env.POSTGRES_DB }}
|
||||
# Set health checks to wait until postgres has started
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
@@ -66,7 +148,6 @@ jobs:
|
||||
VALKEY_HOST: ${{ env.VALKEY_HOST }}
|
||||
VALKEY_PORT: ${{ env.VALKEY_PORT }}
|
||||
VALKEY_DB: ${{ env.VALKEY_DB }}
|
||||
# Set health checks to wait until postgres has started
|
||||
ports:
|
||||
- 6379:6379
|
||||
options: >-
|
||||
@@ -76,158 +157,72 @@ jobs:
|
||||
--health-retries 5
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
api/**
|
||||
.github/workflows/api-pull-request.yml
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
|
||||
- name: Replace @master with current branch in pyproject.toml - Only for pull requests to `master`
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' && github.event_name == 'pull_request' && github.base_ref == 'master'
|
||||
run: |
|
||||
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
|
||||
echo "Using branch: $BRANCH_NAME"
|
||||
sed -i "s|@master|@$BRANCH_NAME|g" pyproject.toml
|
||||
|
||||
- name: Install poetry
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==2.1.1
|
||||
|
||||
- name: Update SDK's poetry.lock resolved_reference to latest commit - Only for push events to `master`
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' && github.event_name == 'push' && github.ref == 'refs/heads/master'
|
||||
run: |
|
||||
# Get the latest commit hash from the prowler-cloud/prowler repository
|
||||
LATEST_COMMIT=$(curl -s "https://api.github.com/repos/prowler-cloud/prowler/commits/master" | jq -r '.sha')
|
||||
echo "Latest commit hash: $LATEST_COMMIT"
|
||||
|
||||
# Update the resolved_reference specifically for prowler-cloud/prowler repository
|
||||
sed -i '/url = "https:\/\/github\.com\/prowler-cloud\/prowler\.git"/,/resolved_reference = / {
|
||||
s/resolved_reference = "[a-f0-9]\{40\}"/resolved_reference = "'"$LATEST_COMMIT"'"/
|
||||
}' poetry.lock
|
||||
|
||||
# Verify the change was made
|
||||
echo "Updated resolved_reference:"
|
||||
grep -A2 -B2 "resolved_reference" poetry.lock
|
||||
|
||||
- name: Update poetry.lock
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry lock
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
- name: Setup Python with Poetry
|
||||
uses: ./.github/actions/setup-python-poetry
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: "poetry"
|
||||
working-directory: ./api
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry install --no-root
|
||||
poetry run pip list
|
||||
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
|
||||
grep '"tag_name":' | \
|
||||
sed -E 's/.*"v([^"]+)".*/\1/' \
|
||||
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
|
||||
&& chmod +x /tmp/hadolint
|
||||
|
||||
- name: Poetry check
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry check --lock
|
||||
|
||||
- name: Lint with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run ruff check . --exclude contrib
|
||||
|
||||
- name: Check Format with ruff
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run ruff format --check . --exclude contrib
|
||||
|
||||
- name: Lint with pylint
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
|
||||
|
||||
- name: Bandit
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
|
||||
|
||||
- name: Safety
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
# 76352, 76353, 77323 come from SDK, but they cannot upgrade it yet. It does not affect API
|
||||
# TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X
|
||||
run: |
|
||||
poetry run safety check --ignore 70612,66963,74429,76352,76353,77323,77744,77745
|
||||
|
||||
- name: Vulture
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
|
||||
|
||||
- name: Hadolint
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
- name: Test with pytest
|
||||
working-directory: ./api
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run pytest --cov=./src/backend --cov-report=xml src/backend
|
||||
- name: Run tests with pytest
|
||||
run: poetry run pytest --cov=./src/backend --cov-report=xml src/backend
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
with:
|
||||
flags: api
|
||||
test-container-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Lint Dockerfile with Hadolint
|
||||
uses: hadolint/hadolint-action@2332a7b74a6de0dda2e2221d575162eba76ba5e5 # v3.3.0
|
||||
with:
|
||||
files: api/**
|
||||
files_ignore: ${{ env.IGNORE_FILES }}
|
||||
dockerfile: api/Dockerfile
|
||||
ignore: DL3013
|
||||
|
||||
container-build-and-scan:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
- name: Build Container
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
|
||||
- name: Build container
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
tags: ${{ env.IMAGE_NAME }}:latest
|
||||
outputs: type=docker
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Scan container with Trivy
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -1,28 +1,35 @@
|
||||
name: Prowler - Automatic Backport
|
||||
name: 'Tools: Backport'
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ['master']
|
||||
types: ['labeled', 'closed']
|
||||
branches:
|
||||
- 'master'
|
||||
types:
|
||||
- 'labeled'
|
||||
- 'closed'
|
||||
paths:
|
||||
- '.github/workflows/backport.yml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
# The prefix of the label that triggers the backport must not contain the branch name
|
||||
# so, for example, if the branch is 'master', the label should be 'backport-to-<branch>'
|
||||
BACKPORT_LABEL_PREFIX: backport-to-
|
||||
BACKPORT_LABEL_IGNORE: was-backported
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport PR
|
||||
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport')) && !(contains(github.event.pull_request.labels.*.name, 'was-backported'))
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Check labels
|
||||
id: preview_label_check
|
||||
id: label_check
|
||||
uses: agilepathway/label-checker@c3d16ad512e7cea5961df85ff2486bb774caf3c5 # v1.6.65
|
||||
with:
|
||||
allow_failure: true
|
||||
@@ -31,17 +38,17 @@ jobs:
|
||||
none_of: ${{ env.BACKPORT_LABEL_IGNORE }}
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Backport Action
|
||||
if: steps.preview_label_check.outputs.label_check == 'success'
|
||||
- name: Backport PR
|
||||
if: steps.label_check.outputs.label_check == 'success'
|
||||
uses: sorenlouv/backport-github-action@ad888e978060bc1b2798690dd9d03c4036560947 # v9.5.1
|
||||
with:
|
||||
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
|
||||
|
||||
- name: Info log
|
||||
if: ${{ success() && steps.preview_label_check.outputs.label_check == 'success' }}
|
||||
- name: Display backport info log
|
||||
if: success() && steps.label_check.outputs.label_check == 'success'
|
||||
run: cat ~/.backport/backport.info.log
|
||||
|
||||
- name: Debug log
|
||||
if: ${{ failure() && steps.preview_label_check.outputs.label_check == 'success' }}
|
||||
- name: Display backport debug log
|
||||
if: failure() && steps.label_check.outputs.label_check == 'success'
|
||||
run: cat ~/.backport/backport.debug.log
|
||||
|
||||
@@ -1,24 +1,31 @@
|
||||
name: Prowler - Conventional Commit
|
||||
name: 'Tools: Conventional Commit'
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- "opened"
|
||||
- "edited"
|
||||
- "synchronize"
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
- 'master'
|
||||
- 'v3'
|
||||
- 'v4.*'
|
||||
- 'v5.*'
|
||||
types:
|
||||
- 'opened'
|
||||
- 'edited'
|
||||
- 'synchronize'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
conventional-commit-check:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
|
||||
steps:
|
||||
- name: conventional-commit-check
|
||||
id: conventional-commit-check
|
||||
- name: Check PR title format
|
||||
uses: agenthunt/conventional-commit-checker-action@9e552d650d0e205553ec7792d447929fc78e012b # v2.0.0
|
||||
with:
|
||||
pr-title-regex: '^(feat|fix|docs|style|refactor|perf|test|chore|build|ci|revert)(\([^)]+\))?!?: .+'
|
||||
|
||||
pr-title-regex: '^(feat|fix|docs|style|refactor|perf|test|chore|build|ci|revert)(\([^)]+\))?!?: .+'
|
||||
|
||||
@@ -1,67 +1,70 @@
|
||||
name: Prowler - Create Backport Label
|
||||
name: 'Tools: Backport Label'
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
types:
|
||||
- 'published'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
BACKPORT_LABEL_PREFIX: backport-to-
|
||||
BACKPORT_LABEL_COLOR: B60205
|
||||
|
||||
jobs:
|
||||
create_label:
|
||||
create-label:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: write
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Create backport label
|
||||
- name: Create backport label for minor releases
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
OWNER_REPO: ${{ github.repository }}
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
VERSION_ONLY=${RELEASE_TAG#v} # Remove 'v' prefix if present (e.g., v3.2.0 -> 3.2.0)
|
||||
RELEASE_TAG="${{ github.event.release.tag_name }}"
|
||||
|
||||
if [ -z "$RELEASE_TAG" ]; then
|
||||
echo "Error: No release tag provided"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Processing release tag: $RELEASE_TAG"
|
||||
|
||||
# Remove 'v' prefix if present (e.g., v3.2.0 -> 3.2.0)
|
||||
VERSION_ONLY="${RELEASE_TAG#v}"
|
||||
|
||||
# Check if it's a minor version (X.Y.0)
|
||||
if [[ "$VERSION_ONLY" =~ ^[0-9]+\.[0-9]+\.0$ ]]; then
|
||||
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is a minor version. Proceeding to create backport label."
|
||||
if [[ "$VERSION_ONLY" =~ ^([0-9]+)\.([0-9]+)\.0$ ]]; then
|
||||
echo "Release $RELEASE_TAG (version $VERSION_ONLY) is a minor version. Proceeding to create backport label."
|
||||
|
||||
TWO_DIGIT_VERSION=${VERSION_ONLY%.0} # Extract X.Y from X.Y.0 (e.g., 5.6 from 5.6.0)
|
||||
# Extract X.Y from X.Y.0 (e.g., 5.6 from 5.6.0)
|
||||
MAJOR="${BASH_REMATCH[1]}"
|
||||
MINOR="${BASH_REMATCH[2]}"
|
||||
TWO_DIGIT_VERSION="${MAJOR}.${MINOR}"
|
||||
|
||||
FINAL_LABEL_NAME="backport-to-v${TWO_DIGIT_VERSION}"
|
||||
FINAL_DESCRIPTION="Backport PR to the v${TWO_DIGIT_VERSION} branch"
|
||||
LABEL_NAME="${BACKPORT_LABEL_PREFIX}v${TWO_DIGIT_VERSION}"
|
||||
LABEL_DESC="Backport PR to the v${TWO_DIGIT_VERSION} branch"
|
||||
LABEL_COLOR="$BACKPORT_LABEL_COLOR"
|
||||
|
||||
echo "Effective label name will be: ${FINAL_LABEL_NAME}"
|
||||
echo "Effective description will be: ${FINAL_DESCRIPTION}"
|
||||
echo "Label name: $LABEL_NAME"
|
||||
echo "Label description: $LABEL_DESC"
|
||||
|
||||
# Check if the label already exists
|
||||
STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token ${GITHUB_TOKEN}" "https://api.github.com/repos/${OWNER_REPO}/labels/${FINAL_LABEL_NAME}")
|
||||
|
||||
if [ "${STATUS_CODE}" -eq 200 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' already exists."
|
||||
elif [ "${STATUS_CODE}" -eq 404 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' does not exist. Creating it..."
|
||||
# Prepare JSON data payload
|
||||
JSON_DATA=$(printf '{"name":"%s","description":"%s","color":"B60205"}' "${FINAL_LABEL_NAME}" "${FINAL_DESCRIPTION}")
|
||||
|
||||
CREATE_STATUS_CODE=$(curl -s -o /tmp/curl_create_response.json -w "%{http_code}" -X POST \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${GITHUB_TOKEN}" \
|
||||
--data "${JSON_DATA}" \
|
||||
"https://api.github.com/repos/${OWNER_REPO}/labels")
|
||||
|
||||
CREATE_RESPONSE_BODY=$(cat /tmp/curl_create_response.json)
|
||||
rm -f /tmp/curl_create_response.json
|
||||
|
||||
if [ "$CREATE_STATUS_CODE" -eq 201 ]; then
|
||||
echo "Label '${FINAL_LABEL_NAME}' created successfully."
|
||||
else
|
||||
echo "Error creating label '${FINAL_LABEL_NAME}'. Status: $CREATE_STATUS_CODE"
|
||||
echo "Response: $CREATE_RESPONSE_BODY"
|
||||
exit 1
|
||||
fi
|
||||
# Check if label already exists
|
||||
if gh label list --repo ${{ github.repository }} --limit 1000 | grep -q "^${LABEL_NAME}[[:space:]]"; then
|
||||
echo "Label '$LABEL_NAME' already exists."
|
||||
else
|
||||
echo "Error checking for label '${FINAL_LABEL_NAME}'. HTTP Status: ${STATUS_CODE}"
|
||||
exit 1
|
||||
echo "Label '$LABEL_NAME' does not exist. Creating it..."
|
||||
gh label create "$LABEL_NAME" \
|
||||
--description "$LABEL_DESC" \
|
||||
--color "$LABEL_COLOR" \
|
||||
--repo ${{ github.repository }}
|
||||
echo "Label '$LABEL_NAME' created successfully."
|
||||
fi
|
||||
else
|
||||
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is not a minor version. Skipping backport label creation."
|
||||
exit 0
|
||||
echo "Release $RELEASE_TAG (version $VERSION_ONLY) is not a minor version. Skipping backport label creation."
|
||||
fi
|
||||
|
||||
@@ -1,19 +1,33 @@
|
||||
name: Prowler - Find secrets
|
||||
name: 'Tools: TruffleHog'
|
||||
|
||||
on: pull_request
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
trufflehog:
|
||||
scan-secrets:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@466da5b0bb161144f6afca9afe5d57975828c410 # v3.90.8
|
||||
|
||||
- name: Scan for secrets with TruffleHog
|
||||
uses: trufflesecurity/trufflehog@ad6fc8fb446b8fafbf7ea8193d2d6bfd42f45690 # v3.90.11
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
head: HEAD
|
||||
extra_args: --only-verified
|
||||
extra_args: '--results=verified,unknown'
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
name: Label Community Contributors PRs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
add-community-label:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Label community contributors
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
# Fetch fresh PR data to get current author_association
|
||||
ASSOCIATION=$(gh api /repos/${{ github.repository }}/pulls/${{ github.event.number }} --jq '.author_association')
|
||||
AUTHOR=$(gh api /repos/${{ github.repository }}/pulls/${{ github.event.number }} --jq '.user.login')
|
||||
|
||||
echo "Author: $AUTHOR, Association: $ASSOCIATION"
|
||||
|
||||
# Members have associations like: OWNER, MEMBER, COLLABORATOR
|
||||
# Non-members have: CONTRIBUTOR, FIRST_TIME_CONTRIBUTOR, FIRST_TIMER, NONE
|
||||
if [[ "$ASSOCIATION" != "OWNER" && "$ASSOCIATION" != "MEMBER" && "$ASSOCIATION" != "COLLABORATOR" ]]; then
|
||||
gh api /repos/${{ github.repository }}/issues/${{ github.event.number }}/labels \
|
||||
-X POST \
|
||||
-f labels[]='community'
|
||||
echo "Added 'community' label for $ASSOCIATION contributor"
|
||||
else
|
||||
echo "Skipped labeling for $ASSOCIATION"
|
||||
fi
|
||||
@@ -1,17 +1,29 @@
|
||||
name: Prowler - PR Labeler
|
||||
name: 'Tools: PR Labeler'
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
pull_request_target:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
types:
|
||||
- 'opened'
|
||||
- 'reopened'
|
||||
- 'synchronize'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
||||
- name: Apply labels to PR
|
||||
uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
||||
with:
|
||||
sync-labels: true
|
||||
|
||||
@@ -3,21 +3,13 @@ name: 'MCP: Container Build and Push'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- 'master'
|
||||
paths:
|
||||
- "mcp_server/**"
|
||||
- ".github/workflows/mcp-container-build-push.yml"
|
||||
|
||||
# Uncomment to test this workflow on PRs
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - "master"
|
||||
# paths:
|
||||
# - "mcp_server/**"
|
||||
# - ".github/workflows/mcp-container-build-push.yml"
|
||||
|
||||
- 'mcp_server/**'
|
||||
- '.github/workflows/mcp-container-build-push.yml'
|
||||
release:
|
||||
types: [published]
|
||||
types:
|
||||
- 'published'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -41,6 +33,7 @@ jobs:
|
||||
setup:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
outputs:
|
||||
short-sha: ${{ steps.set-short-sha.outputs.short-sha }}
|
||||
steps:
|
||||
@@ -51,8 +44,12 @@ jobs:
|
||||
container-build-push:
|
||||
needs: setup
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Login to DockerHub
|
||||
@@ -64,7 +61,7 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container (latest)
|
||||
- name: Build and push MCP container (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
@@ -83,7 +80,7 @@ jobs:
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container (release)
|
||||
- name: Build and push MCP container (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
@@ -103,7 +100,7 @@ jobs:
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Trigger deployment
|
||||
- name: Trigger MCP deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
|
||||
@@ -0,0 +1,80 @@
|
||||
name: 'MCP: Pull Request'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- '.github/workflows/mcp-pull-request.yml'
|
||||
- 'mcp_server/**'
|
||||
- '!mcp_server/README.md'
|
||||
- '!mcp_server/CHANGELOG.md'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- '.github/workflows/mcp-pull-request.yml'
|
||||
- 'mcp_server/**'
|
||||
- '!mcp_server/README.md'
|
||||
- '!mcp_server/CHANGELOG.md'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
MCP_WORKING_DIR: ./mcp_server
|
||||
IMAGE_NAME: prowler-mcp
|
||||
|
||||
jobs:
|
||||
dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Lint Dockerfile with Hadolint
|
||||
uses: hadolint/hadolint-action@2332a7b74a6de0dda2e2221d575162eba76ba5e5 # v3.3.0
|
||||
with:
|
||||
dockerfile: mcp_server/Dockerfile
|
||||
|
||||
container-build-and-scan:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
security-events: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build MCP container
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.MCP_WORKING_DIR }}
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Scan MCP container with Trivy
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
@@ -0,0 +1,103 @@
|
||||
name: 'Tools: Check Changelog'
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- 'opened'
|
||||
- 'synchronize'
|
||||
- 'reopened'
|
||||
- 'labeled'
|
||||
- 'unlabeled'
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check-changelog:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
MONITORED_FOLDERS: 'api ui prowler mcp_server'
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
api/**
|
||||
ui/**
|
||||
prowler/**
|
||||
mcp_server/**
|
||||
|
||||
- name: Check for folder changes and changelog presence
|
||||
id: check-folders
|
||||
run: |
|
||||
missing_changelogs=""
|
||||
|
||||
# Check api folder
|
||||
if [[ "${{ steps.changed-files.outputs.any_changed }}" == "true" ]]; then
|
||||
for folder in $MONITORED_FOLDERS; do
|
||||
# Get files changed in this folder
|
||||
changed_in_folder=$(echo "${{ steps.changed-files.outputs.all_changed_files }}" | tr ' ' '\n' | grep "^${folder}/" || true)
|
||||
|
||||
if [ -n "$changed_in_folder" ]; then
|
||||
echo "Detected changes in ${folder}/"
|
||||
|
||||
# Check if CHANGELOG.md was updated
|
||||
if ! echo "$changed_in_folder" | grep -q "^${folder}/CHANGELOG.md$"; then
|
||||
echo "No changelog update found for ${folder}/"
|
||||
missing_changelogs="${missing_changelogs}- \`${folder}\`"$'\n'
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
{
|
||||
echo "missing_changelogs<<EOF"
|
||||
echo -e "${missing_changelogs}"
|
||||
echo "EOF"
|
||||
} >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing changelog comment
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
id: find-comment
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '<!-- changelog-check -->'
|
||||
|
||||
- name: Update PR comment with changelog status
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
<!-- changelog-check -->
|
||||
${{ steps.check-folders.outputs.missing_changelogs != '' && format('⚠️ **Changes detected in the following folders without a corresponding update to the `CHANGELOG.md`:**
|
||||
|
||||
{0}
|
||||
|
||||
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.', steps.check-folders.outputs.missing_changelogs) || '✅ All necessary `CHANGELOG.md` files have been updated.' }}
|
||||
|
||||
- name: Fail if changelog is missing
|
||||
if: steps.check-folders.outputs.missing_changelogs != ''
|
||||
run: |
|
||||
echo "::error::Missing changelog updates in some folders"
|
||||
exit 1
|
||||
@@ -1,42 +1,40 @@
|
||||
name: Prowler - PR Conflict Checker
|
||||
name: 'Tools: PR Conflict Checker'
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- 'opened'
|
||||
- 'synchronize'
|
||||
- 'reopened'
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
# Leaving this commented until we find a way to run it for forks but in Prowler's context
|
||||
# pull_request_target:
|
||||
# types:
|
||||
# - opened
|
||||
# - synchronize
|
||||
# - reopened
|
||||
# branches:
|
||||
# - "master"
|
||||
# - "v5.*"
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
conflict-checker:
|
||||
check-conflicts:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
- name: Checkout PR head
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
**
|
||||
files: '**'
|
||||
|
||||
- name: Check for conflict markers
|
||||
id: conflict-check
|
||||
@@ -51,10 +49,10 @@ jobs:
|
||||
if [ -f "$file" ]; then
|
||||
echo "Checking file: $file"
|
||||
|
||||
# Look for conflict markers
|
||||
if grep -l "^<<<<<<<\|^=======\|^>>>>>>>" "$file" 2>/dev/null; then
|
||||
# Look for conflict markers (more precise regex)
|
||||
if grep -qE '^(<<<<<<<|=======|>>>>>>>)' "$file" 2>/dev/null; then
|
||||
echo "Conflict markers found in: $file"
|
||||
CONFLICT_FILES="$CONFLICT_FILES$file "
|
||||
CONFLICT_FILES="${CONFLICT_FILES}- \`${file}\`"$'\n'
|
||||
HAS_CONFLICTS=true
|
||||
fi
|
||||
fi
|
||||
@@ -62,114 +60,64 @@ jobs:
|
||||
|
||||
if [ "$HAS_CONFLICTS" = true ]; then
|
||||
echo "has_conflicts=true" >> $GITHUB_OUTPUT
|
||||
echo "conflict_files=$CONFLICT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Conflict markers detected in files: $CONFLICT_FILES"
|
||||
{
|
||||
echo "conflict_files<<EOF"
|
||||
echo "$CONFLICT_FILES"
|
||||
echo "EOF"
|
||||
} >> $GITHUB_OUTPUT
|
||||
echo "Conflict markers detected"
|
||||
else
|
||||
echo "has_conflicts=false" >> $GITHUB_OUTPUT
|
||||
echo "No conflict markers found in changed files"
|
||||
fi
|
||||
|
||||
- name: Add conflict label
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
script: |
|
||||
const { data: labels } = await github.rest.issues.listLabelsOnIssue({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
});
|
||||
- name: Manage conflict label
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
HAS_CONFLICTS: ${{ steps.conflict-check.outputs.has_conflicts }}
|
||||
run: |
|
||||
LABEL_NAME="has-conflicts"
|
||||
|
||||
const hasConflictLabel = labels.some(label => label.name === 'has-conflicts');
|
||||
# Add or remove label based on conflict status
|
||||
if [ "$HAS_CONFLICTS" = "true" ]; then
|
||||
echo "Adding conflict label to PR #${PR_NUMBER}..."
|
||||
gh pr edit "$PR_NUMBER" --add-label "$LABEL_NAME" --repo ${{ github.repository }} || true
|
||||
else
|
||||
echo "Removing conflict label from PR #${PR_NUMBER}..."
|
||||
gh pr edit "$PR_NUMBER" --remove-label "$LABEL_NAME" --repo ${{ github.repository }} || true
|
||||
fi
|
||||
|
||||
if (!hasConflictLabel) {
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
labels: ['has-conflicts']
|
||||
});
|
||||
console.log('Added has-conflicts label');
|
||||
} else {
|
||||
console.log('has-conflicts label already exists');
|
||||
}
|
||||
|
||||
- name: Remove conflict label
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'false'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
script: |
|
||||
try {
|
||||
await github.rest.issues.removeLabel({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
name: 'has-conflicts'
|
||||
});
|
||||
console.log('Removed has-conflicts label');
|
||||
} catch (error) {
|
||||
if (error.status === 404) {
|
||||
console.log('has-conflicts label was not present');
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
- name: Find existing conflict comment
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-regex: '(⚠️ \*\*Conflict Markers Detected\*\*|✅ \*\*Conflict Markers Resolved\*\*)'
|
||||
body-includes: '<!-- conflict-checker-comment -->'
|
||||
|
||||
- name: Create or update conflict comment
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
- name: Create or update comment
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
⚠️ **Conflict Markers Detected**
|
||||
<!-- conflict-checker-comment -->
|
||||
${{ steps.conflict-check.outputs.has_conflicts == 'true' && '⚠️ **Conflict Markers Detected**' || '✅ **Conflict Markers Resolved**' }}
|
||||
|
||||
This pull request contains unresolved conflict markers in the following files:
|
||||
```
|
||||
${{ steps.conflict-check.outputs.conflict_files }}
|
||||
```
|
||||
${{ steps.conflict-check.outputs.has_conflicts == 'true' && format('This pull request contains unresolved conflict markers in the following files:
|
||||
|
||||
{0}
|
||||
|
||||
Please resolve these conflicts by:
|
||||
1. Locating the conflict markers: `<<<<<<<`, `=======`, and `>>>>>>>`
|
||||
2. Manually editing the files to resolve the conflicts
|
||||
3. Removing all conflict markers
|
||||
4. Committing and pushing the changes
|
||||
|
||||
- name: Find existing conflict comment when resolved
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'false'
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
|
||||
id: find-resolved-comment
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-regex: '(⚠️ \*\*Conflict Markers Detected\*\*|✅ \*\*Conflict Markers Resolved\*\*)'
|
||||
|
||||
- name: Update comment when conflicts resolved
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'false' && steps.find-resolved-comment.outputs.comment-id != ''
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
comment-id: ${{ steps.find-resolved-comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
✅ **Conflict Markers Resolved**
|
||||
|
||||
All conflict markers have been successfully resolved in this pull request.
|
||||
4. Committing and pushing the changes', steps.conflict-check.outputs.conflict_files) || 'All conflict markers have been successfully resolved in this pull request.' }}
|
||||
|
||||
- name: Fail workflow if conflicts detected
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
run: |
|
||||
echo "::error::Workflow failed due to conflict markers in files: ${{ steps.conflict-check.outputs.conflict_files }}"
|
||||
echo "::error::Workflow failed due to conflict markers detected in the PR"
|
||||
exit 1
|
||||
|
||||
@@ -1,27 +1,31 @@
|
||||
name: Prowler - Merged Pull Request
|
||||
name: 'Tools: PR Merged'
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ['master']
|
||||
types: ['closed']
|
||||
branches:
|
||||
- 'master'
|
||||
types:
|
||||
- 'closed'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
trigger-cloud-pull-request:
|
||||
name: Trigger Cloud Pull Request
|
||||
if: github.event.pull_request.merged == true && github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.merge_commit_sha }}
|
||||
|
||||
- name: Set short git commit SHA
|
||||
- name: Calculate short commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.event.pull_request.merge_commit_sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
SHORT_SHA="${{ github.event.pull_request.merge_commit_sha }}"
|
||||
echo "SHORT_SHA=${SHORT_SHA::7}" >> $GITHUB_ENV
|
||||
|
||||
- name: Trigger pull request
|
||||
- name: Trigger Cloud repository pull request
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
@@ -31,8 +35,12 @@ jobs:
|
||||
{
|
||||
"PROWLER_COMMIT_SHA": "${{ github.event.pull_request.merge_commit_sha }}",
|
||||
"PROWLER_COMMIT_SHORT_SHA": "${{ env.SHORT_SHA }}",
|
||||
"PROWLER_PR_NUMBER": "${{ github.event.pull_request.number }}",
|
||||
"PROWLER_PR_TITLE": ${{ toJson(github.event.pull_request.title) }},
|
||||
"PROWLER_PR_LABELS": ${{ toJson(github.event.pull_request.labels.*.name) }},
|
||||
"PROWLER_PR_BODY": ${{ toJson(github.event.pull_request.body) }},
|
||||
"PROWLER_PR_URL": ${{ toJson(github.event.pull_request.html_url) }}
|
||||
"PROWLER_PR_URL": ${{ toJson(github.event.pull_request.html_url) }},
|
||||
"PROWLER_PR_MERGED_BY": "${{ github.event.pull_request.merged_by.login }}",
|
||||
"PROWLER_PR_BASE_BRANCH": "${{ github.event.pull_request.base.ref }}",
|
||||
"PROWLER_PR_HEAD_BRANCH": "${{ github.event.pull_request.head.ref }}"
|
||||
}
|
||||
+37
-34
@@ -1,6 +1,6 @@
|
||||
name: Prowler - Release Preparation
|
||||
name: 'Tools: Prepare Release'
|
||||
|
||||
run-name: Prowler Release Preparation for ${{ inputs.prowler_version }}
|
||||
run-name: 'Prepare Release for Prowler ${{ inputs.prowler_version }}'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@@ -10,18 +10,23 @@ on:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ inputs.prowler_version }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.inputs.prowler_version }}
|
||||
PROWLER_VERSION: ${{ inputs.prowler_version }}
|
||||
|
||||
jobs:
|
||||
prepare-release:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
if: github.event_name == 'workflow_dispatch' && github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -34,15 +39,15 @@ jobs:
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
python3 -m pip install --user poetry
|
||||
python3 -m pip install --user poetry==2.1.1
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global user.name "prowler-bot"
|
||||
git config --global user.email "179230569+prowler-bot@users.noreply.github.com"
|
||||
git config --global user.name 'prowler-bot'
|
||||
git config --global user.email '179230569+prowler-bot@users.noreply.github.com'
|
||||
|
||||
- name: Parse version and determine branch
|
||||
- name: Parse version and read changelogs
|
||||
run: |
|
||||
# Validate version format (reusing pattern from sdk-bump-version.yml)
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
@@ -119,7 +124,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Extract changelog entries
|
||||
- name: Extract and combine changelog entries
|
||||
run: |
|
||||
set -e
|
||||
|
||||
@@ -145,8 +150,8 @@ jobs:
|
||||
# Remove --- separators
|
||||
sed -i '/^---$/d' "$output_file"
|
||||
|
||||
# Remove trailing empty lines
|
||||
sed -i '/^$/d' "$output_file"
|
||||
# Remove only trailing empty lines (not all empty lines)
|
||||
sed -i -e :a -e '/^\s*$/d;N;ba' "$output_file"
|
||||
}
|
||||
|
||||
# Calculate expected versions for this release
|
||||
@@ -242,10 +247,15 @@ jobs:
|
||||
echo "" >> combined_changelog.md
|
||||
fi
|
||||
|
||||
# Add fallback message if no changelogs were added
|
||||
if [ ! -s combined_changelog.md ]; then
|
||||
echo "No component changes detected for this release." >> combined_changelog.md
|
||||
fi
|
||||
|
||||
echo "Combined changelog preview:"
|
||||
cat combined_changelog.md
|
||||
|
||||
- name: Checkout existing branch for patch release
|
||||
- name: Checkout release branch for patch release
|
||||
if: ${{ env.PATCH_VERSION != '0' }}
|
||||
run: |
|
||||
echo "Patch release detected, checking out existing branch $BRANCH_NAME..."
|
||||
@@ -260,7 +270,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verify version in pyproject.toml
|
||||
- name: Verify SDK version in pyproject.toml
|
||||
run: |
|
||||
CURRENT_VERSION=$(grep '^version = ' pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
|
||||
@@ -270,7 +280,7 @@ jobs:
|
||||
fi
|
||||
echo "✓ pyproject.toml version: $CURRENT_VERSION"
|
||||
|
||||
- name: Verify version in prowler/config/config.py
|
||||
- name: Verify SDK version in prowler/config/config.py
|
||||
run: |
|
||||
CURRENT_VERSION=$(grep '^prowler_version = ' prowler/config/config.py | sed -E 's/prowler_version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
|
||||
@@ -280,7 +290,7 @@ jobs:
|
||||
fi
|
||||
echo "✓ prowler/config/config.py version: $CURRENT_VERSION"
|
||||
|
||||
- name: Verify version in api/pyproject.toml
|
||||
- name: Verify API version in api/pyproject.toml
|
||||
if: ${{ env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep '^version = ' api/pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
@@ -291,7 +301,7 @@ jobs:
|
||||
fi
|
||||
echo "✓ api/pyproject.toml version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Verify prowler dependency in api/pyproject.toml
|
||||
- name: Verify API prowler dependency in api/pyproject.toml
|
||||
if: ${{ env.PATCH_VERSION != '0' && env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
@@ -302,7 +312,7 @@ jobs:
|
||||
fi
|
||||
echo "✓ api/pyproject.toml prowler dependency: $CURRENT_PROWLER_REF"
|
||||
|
||||
- name: Verify version in api/src/backend/api/v1/views.py
|
||||
- name: Verify API version in api/src/backend/api/v1/views.py
|
||||
if: ${{ env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep 'spectacular_settings.VERSION = ' api/src/backend/api/v1/views.py | sed -E 's/.*spectacular_settings.VERSION = "([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
@@ -313,7 +323,7 @@ jobs:
|
||||
fi
|
||||
echo "✓ api/src/backend/api/v1/views.py version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Checkout existing release branch for minor release
|
||||
- name: Checkout release branch for minor release
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
run: |
|
||||
echo "Minor release detected (patch = 0), checking out existing branch $BRANCH_NAME..."
|
||||
@@ -325,19 +335,12 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Prepare prowler dependency update for minor release
|
||||
- name: Update API prowler dependency for minor release
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
run: |
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
|
||||
|
||||
# Create a temporary branch for the PR from the minor version branch
|
||||
TEMP_BRANCH="update-api-dependency-$BRANCH_NAME_TRIMMED-$(date +%s)"
|
||||
echo "TEMP_BRANCH=$TEMP_BRANCH" >> $GITHUB_ENV
|
||||
|
||||
# Create temp branch from the current minor version branch
|
||||
git checkout -b "$TEMP_BRANCH"
|
||||
|
||||
# Minor release: update the dependency to use the release branch
|
||||
echo "Updating prowler dependency from '$CURRENT_PROWLER_REF' to '$BRANCH_NAME_TRIMMED'"
|
||||
sed -i "s|prowler @ git+https://github.com/prowler-cloud/prowler.git@[^\"]*\"|prowler @ git+https://github.com/prowler-cloud/prowler.git@$BRANCH_NAME_TRIMMED\"|" api/pyproject.toml
|
||||
@@ -355,20 +358,19 @@ jobs:
|
||||
poetry lock
|
||||
cd ..
|
||||
|
||||
# Commit and push the temporary branch
|
||||
git add api/pyproject.toml api/poetry.lock
|
||||
git commit -m "chore(api): update prowler dependency to $BRANCH_NAME_TRIMMED for release $PROWLER_VERSION"
|
||||
git push origin "$TEMP_BRANCH"
|
||||
|
||||
echo "✓ Prepared prowler dependency update to: $UPDATED_PROWLER_REF"
|
||||
|
||||
- name: Create Pull Request against release branch
|
||||
- name: Create PR for API dependency update
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
branch: ${{ env.TEMP_BRANCH }}
|
||||
commit-message: 'chore(api): update prowler dependency to ${{ env.BRANCH_NAME }} for release ${{ env.PROWLER_VERSION }}'
|
||||
branch: update-api-dependency-${{ env.BRANCH_NAME }}-${{ github.run_number }}
|
||||
base: ${{ env.BRANCH_NAME }}
|
||||
add-paths: |
|
||||
api/pyproject.toml
|
||||
api/poetry.lock
|
||||
title: "chore(api): Update prowler dependency to ${{ env.BRANCH_NAME }} for release ${{ env.PROWLER_VERSION }}"
|
||||
body: |
|
||||
### Description
|
||||
@@ -401,5 +403,6 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Clean up temporary files
|
||||
if: always()
|
||||
run: |
|
||||
rm -f prowler_changelog.md api_changelog.md ui_changelog.md mcp_changelog.md combined_changelog.md
|
||||
@@ -1,77 +0,0 @@
|
||||
name: Prowler - Check Changelog
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, labeled, unlabeled]
|
||||
|
||||
jobs:
|
||||
check-changelog:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
MONITORED_FOLDERS: "api ui prowler mcp_server"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get list of changed files
|
||||
id: changed_files
|
||||
run: |
|
||||
git fetch origin ${{ github.base_ref }}
|
||||
git diff --name-only origin/${{ github.base_ref }}...HEAD > changed_files.txt
|
||||
cat changed_files.txt
|
||||
|
||||
- name: Check for folder changes and changelog presence
|
||||
id: check_folders
|
||||
run: |
|
||||
missing_changelogs=""
|
||||
|
||||
for folder in $MONITORED_FOLDERS; do
|
||||
if grep -q "^${folder}/" changed_files.txt; then
|
||||
echo "Detected changes in ${folder}/"
|
||||
if ! grep -q "^${folder}/CHANGELOG.md$" changed_files.txt; then
|
||||
echo "No changelog update found for ${folder}/"
|
||||
missing_changelogs="${missing_changelogs}- \`${folder}\`\n"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo "missing_changelogs<<EOF" >> $GITHUB_OUTPUT
|
||||
echo -e "${missing_changelogs}" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing changelog comment
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
id: find_comment
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad #v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '<!-- changelog-check -->'
|
||||
|
||||
- name: Update PR comment with changelog status
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.find_comment.outputs.comment-id }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
<!-- changelog-check -->
|
||||
${{ steps.check_folders.outputs.missing_changelogs != '' && format('⚠️ **Changes detected in the following folders without a corresponding update to the `CHANGELOG.md`:**
|
||||
|
||||
{0}
|
||||
|
||||
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.', steps.check_folders.outputs.missing_changelogs) || '✅ All necessary `CHANGELOG.md` files have been updated. Great job! 🎉' }}
|
||||
|
||||
- name: Fail if changelog is missing
|
||||
if: steps.check_folders.outputs.missing_changelogs != ''
|
||||
run: |
|
||||
echo "ERROR: Missing changelog updates in some folders."
|
||||
exit 1
|
||||
@@ -1,44 +1,40 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: SDK - CodeQL
|
||||
name: 'SDK: CodeQL'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
- '.github/**'
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'prowler/**'
|
||||
- 'tests/**'
|
||||
- 'pyproject.toml'
|
||||
- '.github/workflows/sdk-codeql.yml'
|
||||
- '.github/codeql/sdk-codeql-config.yml'
|
||||
- '!prowler/CHANGELOG.md'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
- "v5.*"
|
||||
paths-ignore:
|
||||
- 'ui/**'
|
||||
- 'api/**'
|
||||
- '.github/**'
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- 'prowler/**'
|
||||
- 'tests/**'
|
||||
- 'pyproject.toml'
|
||||
- '.github/workflows/sdk-codeql.yml'
|
||||
- '.github/codeql/sdk-codeql-config.yml'
|
||||
- '!prowler/CHANGELOG.md'
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
name: CodeQL Security Analysis
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
@@ -47,21 +43,20 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
language:
|
||||
- 'python'
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: '/language:${{ matrix.language }}'
|
||||
|
||||
@@ -1,36 +1,36 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: UI - CodeQL
|
||||
name: 'UI: CodeQL'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- "ui/**"
|
||||
- 'ui/**'
|
||||
- '.github/workflows/ui-codeql.yml'
|
||||
- '.github/codeql/ui-codeql-config.yml'
|
||||
- '!ui/CHANGELOG.md'
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
paths:
|
||||
- "ui/**"
|
||||
- 'ui/**'
|
||||
- '.github/workflows/ui-codeql.yml'
|
||||
- '.github/codeql/ui-codeql-config.yml'
|
||||
- '!ui/CHANGELOG.md'
|
||||
schedule:
|
||||
- cron: "00 12 * * *"
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
name: CodeQL Security Analysis
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
@@ -39,14 +39,13 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: ["javascript"]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
language:
|
||||
- 'javascript-typescript'
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
@@ -56,4 +55,4 @@ jobs:
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
category: '/language:${{ matrix.language }}'
|
||||
|
||||
@@ -18,6 +18,7 @@ jobs:
|
||||
AUTH_TRUST_HOST: true
|
||||
NEXTAUTH_URL: 'http://localhost:3000'
|
||||
NEXT_PUBLIC_API_BASE_URL: 'http://localhost:8080/api/v1'
|
||||
E2E_NEW_PASSWORD: ${{ secrets.E2E_NEW_PASSWORD }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
@@ -39,6 +39,12 @@ secrets-*/
|
||||
# JUnit Reports
|
||||
junit-reports/
|
||||
|
||||
# Test and coverage artifacts
|
||||
*_coverage.xml
|
||||
pytest_*.xml
|
||||
.coverage
|
||||
htmlcov/
|
||||
|
||||
# VSCode files
|
||||
.vscode/
|
||||
|
||||
@@ -83,3 +89,6 @@ CLAUDE.md
|
||||
# MCP Server
|
||||
mcp_server/prowler_mcp_server/prowler_app/server.py
|
||||
mcp_server/prowler_mcp_server/prowler_app/utils/schema.yaml
|
||||
|
||||
# Compliance report
|
||||
*.pdf
|
||||
|
||||
@@ -46,6 +46,14 @@ help: ## Show this help.
|
||||
@echo "Prowler Makefile"
|
||||
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||
|
||||
##@ Build no cache
|
||||
build-no-cache-dev:
|
||||
docker compose -f docker-compose-dev.yml build --no-cache api-dev worker-dev worker-beat
|
||||
|
||||
##@ Development Environment
|
||||
run-api-dev: ## Start development environment with API, PostgreSQL, Valkey, and workers
|
||||
docker compose -f docker-compose-dev.yml up api-dev postgres valkey worker-dev worker-beat --build
|
||||
docker compose -f docker-compose-dev.yml up api-dev postgres valkey worker-dev worker-beat
|
||||
|
||||
##@ Development Environment
|
||||
build-and-run-api-dev: build-no-cache-dev run-api-dev
|
||||
|
||||
|
||||
+8
-1
@@ -2,7 +2,12 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [1.14.0] (Prowler UNRELEASED)
|
||||
## [1.15.0] (Prowler UNRELEASED)
|
||||
|
||||
### Added
|
||||
- Support for configuring multiple LLM providers [(#8772)](https://github.com/prowler-cloud/prowler/pull/8772)
|
||||
|
||||
## [1.14.0] (Prowler 5.13.0)
|
||||
|
||||
### Added
|
||||
- Default JWT keys are generated and stored if they are missing from configuration [(#8655)](https://github.com/prowler-cloud/prowler/pull/8655)
|
||||
@@ -12,8 +17,10 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
- API Key support [(#8805)](https://github.com/prowler-cloud/prowler/pull/8805)
|
||||
- SAML role mapping protection for single-admin tenants to prevent accidental lockout [(#8882)](https://github.com/prowler-cloud/prowler/pull/8882)
|
||||
- Support for `passed_findings` and `total_findings` fields in compliance requirement overview for accurate Prowler ThreatScore calculation [(#8582)](https://github.com/prowler-cloud/prowler/pull/8582)
|
||||
- PDF reporting for Prowler ThreatScore [(#8867)](https://github.com/prowler-cloud/prowler/pull/8867)
|
||||
- Database read replica support [(#8869)](https://github.com/prowler-cloud/prowler/pull/8869)
|
||||
- Support Common Cloud Controls for AWS, Azure and GCP [(#8000)](https://github.com/prowler-cloud/prowler/pull/8000)
|
||||
- Add `provider_id__in` filter support to findings and findings severity overview endpoints [(#8951)](https://github.com/prowler-cloud/prowler/pull/8951)
|
||||
|
||||
### Changed
|
||||
- Now the MANAGE_ACCOUNT permission is required to modify or read user permissions instead of MANAGE_USERS [(#8281)](https://github.com/prowler-cloud/prowler/pull/8281)
|
||||
|
||||
Generated
+528
-1
@@ -1256,6 +1256,98 @@ files = [
|
||||
{file = "contextlib2-21.6.0.tar.gz", hash = "sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "contourpy"
|
||||
version = "1.3.3"
|
||||
description = "Python library for calculating contours of 2D quadrilateral grids"
|
||||
optional = false
|
||||
python-versions = ">=3.11"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "contourpy-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:709a48ef9a690e1343202916450bc48b9e51c049b089c7f79a267b46cffcdaa1"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23416f38bfd74d5d28ab8429cc4d63fa67d5068bd711a85edb1c3fb0c3e2f381"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:929ddf8c4c7f348e4c0a5a3a714b5c8542ffaa8c22954862a46ca1813b667ee7"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9e999574eddae35f1312c2b4b717b7885d4edd6cb46700e04f7f02db454e67c1"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf67e0e3f482cb69779dd3061b534eb35ac9b17f163d851e2a547d56dba0a3a"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51e79c1f7470158e838808d4a996fa9bac72c498e93d8ebe5119bc1e6becb0db"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:598c3aaece21c503615fd59c92a3598b428b2f01bfb4b8ca9c4edeecc2438620"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:322ab1c99b008dad206d406bb61d014cf0174df491ae9d9d0fac6a6fda4f977f"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-win32.whl", hash = "sha256:fd907ae12cd483cd83e414b12941c632a969171bf90fc937d0c9f268a31cafff"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:3519428f6be58431c56581f1694ba8e50626f2dd550af225f82fb5f5814d2a42"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:15ff10bfada4bf92ec8b31c62bf7c1834c244019b4a33095a68000d7075df470"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:177fb367556747a686509d6fef71d221a4b198a3905fe824430e5ea0fda54eb5"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d002b6f00d73d69333dac9d0b8d5e84d9724ff9ef044fd63c5986e62b7c9e1b1"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:348ac1f5d4f1d66d3322420f01d42e43122f43616e0f194fc1c9f5d830c5b286"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:655456777ff65c2c548b7c454af9c6f33f16c8884f11083244b5819cc214f1b5"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:644a6853d15b2512d67881586bd03f462c7ab755db95f16f14d7e238f2852c67"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4debd64f124ca62069f313a9cb86656ff087786016d76927ae2cf37846b006c9"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a15459b0f4615b00bbd1e91f1b9e19b7e63aea7483d03d804186f278c0af2659"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca0fdcd73925568ca027e0b17ab07aad764be4706d0a925b89227e447d9737b7"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-win32.whl", hash = "sha256:b20c7c9a3bf701366556e1b1984ed2d0cedf999903c51311417cf5f591d8c78d"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:1cadd8b8969f060ba45ed7c1b714fe69185812ab43bd6b86a9123fe8f99c3263"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:fd914713266421b7536de2bfa8181aa8c699432b6763a0ea64195ebe28bff6a9"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:88df9880d507169449d434c293467418b9f6cbe82edd19284aa0409e7fdb933d"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d06bb1f751ba5d417047db62bca3c8fde202b8c11fb50742ab3ab962c81e8216"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4e6b05a45525357e382909a4c1600444e2a45b4795163d3b22669285591c1ae"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab3074b48c4e2cf1a960e6bbeb7f04566bf36b1861d5c9d4d8ac04b82e38ba20"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c3d53c796f8647d6deb1abe867daeb66dcc8a97e8455efa729516b997b8ed99"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50ed930df7289ff2a8d7afeb9603f8289e5704755c7e5c3bbd929c90c817164b"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4feffb6537d64b84877da813a5c30f1422ea5739566abf0bd18065ac040e120a"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2b7e9480ffe2b0cd2e787e4df64270e3a0440d9db8dc823312e2c940c167df7e"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-win32.whl", hash = "sha256:283edd842a01e3dcd435b1c5116798d661378d83d36d337b8dde1d16a5fc9ba3"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:87acf5963fc2b34825e5b6b048f40e3635dd547f590b04d2ab317c2619ef7ae8"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:3c30273eb2a55024ff31ba7d052dde990d7d8e5450f4bbb6e913558b3d6c2301"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fde6c716d51c04b1c25d0b90364d0be954624a0ee9d60e23e850e8d48353d07a"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cbedb772ed74ff5be440fa8eee9bd49f64f6e3fc09436d9c7d8f1c287b121d77"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22e9b1bd7a9b1d652cd77388465dc358dafcd2e217d35552424aa4f996f524f5"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a22738912262aa3e254e4f3cb079a95a67132fc5a063890e224393596902f5a4"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:afe5a512f31ee6bd7d0dda52ec9864c984ca3d66664444f2d72e0dc4eb832e36"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f64836de09927cba6f79dcd00fdd7d5329f3fccc633468507079c829ca4db4e3"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1fd43c3be4c8e5fd6e4f2baeae35ae18176cf2e5cced681cca908addf1cdd53b"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6afc576f7b33cf00996e5c1102dc2a8f7cc89e39c0b55df93a0b78c1bd992b36"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-win32.whl", hash = "sha256:66c8a43a4f7b8df8b71ee1840e4211a3c8d93b214b213f590e18a1beca458f7d"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:cf9022ef053f2694e31d630feaacb21ea24224be1c3ad0520b13d844274614fd"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:95b181891b4c71de4bb404c6621e7e2390745f887f2a026b2d99e92c17892339"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:33c82d0138c0a062380332c861387650c82e4cf1747aaa6938b9b6516762e772"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ea37e7b45949df430fe649e5de8351c423430046a2af20b1c1961cae3afcda77"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d304906ecc71672e9c89e87c4675dc5c2645e1f4269a5063b99b0bb29f232d13"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca658cd1a680a5c9ea96dc61cdbae1e85c8f25849843aa799dfd3cb370ad4fbe"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ab2fd90904c503739a75b7c8c5c01160130ba67944a7b77bbf36ef8054576e7f"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7301b89040075c30e5768810bc96a8e8d78085b47d8be6e4c3f5a0b4ed478a0"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2a2a8b627d5cc6b7c41a4beff6c5ad5eb848c88255fda4a8745f7e901b32d8e4"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fd6ec6be509c787f1caf6b247f0b1ca598bef13f4ddeaa126b7658215529ba0f"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b"},
|
||||
{file = "contourpy-1.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cd5dfcaeb10f7b7f9dc8941717c6c2ade08f587be2226222c12b25f0483ed497"},
|
||||
{file = "contourpy-1.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0c1fc238306b35f246d61a1d416a627348b5cf0648648a031e14bb8705fcdfe8"},
|
||||
{file = "contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f9aad7de812d6541d29d2bbf8feb22ff7e1c299523db288004e3157ff4674e"},
|
||||
{file = "contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed3657edf08512fc3fe81b510e35c2012fbd3081d2e26160f27ca28affec989"},
|
||||
{file = "contourpy-1.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3d1a3799d62d45c18bafd41c5fa05120b96a28079f2393af559b843d1a966a77"},
|
||||
{file = "contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
numpy = ">=1.25"
|
||||
|
||||
[package.extras]
|
||||
bokeh = ["bokeh", "selenium"]
|
||||
docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"]
|
||||
mypy = ["bokeh", "contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.17.0)", "types-Pillow"]
|
||||
test = ["Pillow", "contourpy[test-no-images]", "matplotlib"]
|
||||
test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.5.4"
|
||||
@@ -1390,6 +1482,22 @@ ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
name = "cycler"
|
||||
version = "0.12.1"
|
||||
description = "Composable style cycles"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"},
|
||||
{file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["ipython", "matplotlib", "numpydoc", "sphinx"]
|
||||
tests = ["pytest", "pytest-cov", "pytest-xdist"]
|
||||
|
||||
[[package]]
|
||||
name = "dash"
|
||||
version = "3.1.1"
|
||||
@@ -2120,6 +2228,87 @@ werkzeug = ">=3.1.0"
|
||||
async = ["asgiref (>=3.2)"]
|
||||
dotenv = ["python-dotenv"]
|
||||
|
||||
[[package]]
|
||||
name = "fonttools"
|
||||
version = "4.60.1"
|
||||
description = "Tools to manipulate font files"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "fonttools-4.60.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9a52f254ce051e196b8fe2af4634c2d2f02c981756c6464dc192f1b6050b4e28"},
|
||||
{file = "fonttools-4.60.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7420a2696a44650120cdd269a5d2e56a477e2bfa9d95e86229059beb1c19e15"},
|
||||
{file = "fonttools-4.60.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee0c0b3b35b34f782afc673d503167157094a16f442ace7c6c5e0ca80b08f50c"},
|
||||
{file = "fonttools-4.60.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:282dafa55f9659e8999110bd8ed422ebe1c8aecd0dc396550b038e6c9a08b8ea"},
|
||||
{file = "fonttools-4.60.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4ba4bd646e86de16160f0fb72e31c3b9b7d0721c3e5b26b9fa2fc931dfdb2652"},
|
||||
{file = "fonttools-4.60.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0b0835ed15dd5b40d726bb61c846a688f5b4ce2208ec68779bc81860adb5851a"},
|
||||
{file = "fonttools-4.60.1-cp310-cp310-win32.whl", hash = "sha256:1525796c3ffe27bb6268ed2a1bb0dcf214d561dfaf04728abf01489eb5339dce"},
|
||||
{file = "fonttools-4.60.1-cp310-cp310-win_amd64.whl", hash = "sha256:268ecda8ca6cb5c4f044b1fb9b3b376e8cd1b361cef275082429dc4174907038"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7b4c32e232a71f63a5d00259ca3d88345ce2a43295bb049d21061f338124246f"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3630e86c484263eaac71d117085d509cbcf7b18f677906824e4bace598fb70d2"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5c1015318e4fec75dd4943ad5f6a206d9727adf97410d58b7e32ab644a807914"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e6c58beb17380f7c2ea181ea11e7db8c0ceb474c9dd45f48e71e2cb577d146a1"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec3681a0cb34c255d76dd9d865a55f260164adb9fa02628415cdc2d43ee2c05d"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f4b5c37a5f40e4d733d3bbaaef082149bee5a5ea3156a785ff64d949bd1353fa"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-win32.whl", hash = "sha256:398447f3d8c0c786cbf1209711e79080a40761eb44b27cdafffb48f52bcec258"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-win_amd64.whl", hash = "sha256:d066ea419f719ed87bc2c99a4a4bfd77c2e5949cb724588b9dd58f3fd90b92bf"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:7b0c6d57ab00dae9529f3faf187f2254ea0aa1e04215cf2f1a8ec277c96661bc"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:839565cbf14645952d933853e8ade66a463684ed6ed6c9345d0faf1f0e868877"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8177ec9676ea6e1793c8a084a90b65a9f778771998eb919d05db6d4b1c0b114c"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:996a4d1834524adbb423385d5a629b868ef9d774670856c63c9a0408a3063401"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a46b2f450bc79e06ef3b6394f0c68660529ed51692606ad7f953fc2e448bc903"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6ec722ee589e89a89f5b7574f5c45604030aa6ae24cb2c751e2707193b466fed"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-win32.whl", hash = "sha256:b2cf105cee600d2de04ca3cfa1f74f1127f8455b71dbad02b9da6ec266e116d6"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-win_amd64.whl", hash = "sha256:992775c9fbe2cf794786fa0ffca7f09f564ba3499b8fe9f2f80bd7197db60383"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6f68576bb4bbf6060c7ab047b1574a1ebe5c50a17de62830079967b211059ebb"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:eedacb5c5d22b7097482fa834bda0dafa3d914a4e829ec83cdea2a01f8c813c4"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b33a7884fabd72bdf5f910d0cf46be50dce86a0362a65cfc746a4168c67eb96c"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2409d5fb7b55fd70f715e6d34e7a6e4f7511b8ad29a49d6df225ee76da76dd77"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c8651e0d4b3bdeda6602b85fdc2abbefc1b41e573ecb37b6779c4ca50753a199"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:145daa14bf24824b677b9357c5e44fd8895c2a8f53596e1b9ea3496081dc692c"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-win32.whl", hash = "sha256:2299df884c11162617a66b7c316957d74a18e3758c0274762d2cc87df7bc0272"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-win_amd64.whl", hash = "sha256:a3db56f153bd4c5c2b619ab02c5db5192e222150ce5a1bc10f16164714bc39ac"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:a884aef09d45ba1206712c7dbda5829562d3fea7726935d3289d343232ecb0d3"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8a44788d9d91df72d1a5eac49b31aeb887a5f4aab761b4cffc4196c74907ea85"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e852d9dda9f93ad3651ae1e3bb770eac544ec93c3807888798eccddf84596537"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:154cb6ee417e417bf5f7c42fe25858c9140c26f647c7347c06f0cc2d47eff003"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5664fd1a9ea7f244487ac8f10340c4e37664675e8667d6fee420766e0fb3cf08"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:583b7f8e3c49486e4d489ad1deacfb8d5be54a8ef34d6df824f6a171f8511d99"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-win32.whl", hash = "sha256:66929e2ea2810c6533a5184f938502cfdaea4bc3efb7130d8cc02e1c1b4108d6"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-win_amd64.whl", hash = "sha256:f3d5be054c461d6a2268831f04091dc82753176f6ea06dc6047a5e168265a987"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:b6379e7546ba4ae4b18f8ae2b9bc5960936007a1c0e30b342f662577e8bc3299"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9d0ced62b59e0430b3690dbc5373df1c2aa7585e9a8ce38eff87f0fd993c5b01"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:875cb7764708b3132637f6c5fb385b16eeba0f7ac9fa45a69d35e09b47045801"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a184b2ea57b13680ab6d5fbde99ccef152c95c06746cb7718c583abd8f945ccc"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:026290e4ec76583881763fac284aca67365e0be9f13a7fb137257096114cb3bc"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f0e8817c7d1a0c2eedebf57ef9a9896f3ea23324769a9a2061a80fe8852705ed"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-win32.whl", hash = "sha256:1410155d0e764a4615774e5c2c6fc516259fe3eca5882f034eb9bfdbee056259"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-win_amd64.whl", hash = "sha256:022beaea4b73a70295b688f817ddc24ed3e3418b5036ffcd5658141184ef0d0c"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:122e1a8ada290423c493491d002f622b1992b1ab0b488c68e31c413390dc7eb2"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a140761c4ff63d0cb9256ac752f230460ee225ccef4ad8f68affc723c88e2036"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0eae96373e4b7c9e45d099d7a523444e3554360927225c1cdae221a58a45b856"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:596ecaca36367027d525b3b426d8a8208169d09edcf8c7506aceb3a38bfb55c7"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2ee06fc57512144d8b0445194c2da9f190f61ad51e230f14836286470c99f854"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b42d86938e8dda1cd9a1a87a6d82f1818eaf933348429653559a458d027446da"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-win32.whl", hash = "sha256:8b4eb332f9501cb1cd3d4d099374a1e1306783ff95489a1026bde9eb02ccc34a"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-win_amd64.whl", hash = "sha256:7473a8ed9ed09aeaa191301244a5a9dbe46fe0bf54f9d6cd21d83044c3321217"},
|
||||
{file = "fonttools-4.60.1-py3-none-any.whl", hash = "sha256:906306ac7afe2156fcf0042173d6ebbb05416af70f6b370967b47f8f00103bbb"},
|
||||
{file = "fonttools-4.60.1.tar.gz", hash = "sha256:ef00af0439ebfee806b25f24c8f92109157ff3fac5731dc7867957812e87b8d9"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"]
|
||||
graphite = ["lz4 (>=1.7.4.2)"]
|
||||
interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""]
|
||||
lxml = ["lxml (>=4.0)"]
|
||||
pathops = ["skia-pathops (>=0.5.0)"]
|
||||
plot = ["matplotlib"]
|
||||
repacker = ["uharfbuzz (>=0.23.0)"]
|
||||
symfont = ["sympy"]
|
||||
type1 = ["xattr ; sys_platform == \"darwin\""]
|
||||
unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""]
|
||||
woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"]
|
||||
|
||||
[[package]]
|
||||
name = "freezegun"
|
||||
version = "1.5.1"
|
||||
@@ -2787,6 +2976,117 @@ files = [
|
||||
[package.dependencies]
|
||||
referencing = ">=0.31.0"
|
||||
|
||||
[[package]]
|
||||
name = "kiwisolver"
|
||||
version = "1.4.9"
|
||||
description = "A fast implementation of the Cassowary constraint solver"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b4b4d74bda2b8ebf4da5bd42af11d02d04428b2c32846e4c2c93219df8a7987b"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fb3b8132019ea572f4611d770991000d7f58127560c4889729248eb5852a102f"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84fd60810829c27ae375114cd379da1fa65e6918e1da405f356a775d49a62bcf"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b78efa4c6e804ecdf727e580dbb9cba85624d2e1c6b5cb059c66290063bd99a9"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4efec7bcf21671db6a3294ff301d2fc861c31faa3c8740d1a94689234d1b415"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:90f47e70293fc3688b71271100a1a5453aa9944a81d27ff779c108372cf5567b"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fdca1def57a2e88ef339de1737a1449d6dbf5fab184c54a1fca01d541317154"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cf554f21be770f5111a1690d42313e140355e687e05cf82cb23d0a721a64a48"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fc1795ac5cd0510207482c3d1d3ed781143383b8cfd36f5c645f3897ce066220"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:ccd09f20ccdbbd341b21a67ab50a119b64a403b09288c27481575105283c1586"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:540c7c72324d864406a009d72f5d6856f49693db95d1fbb46cf86febef873634"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-win_amd64.whl", hash = "sha256:ede8c6d533bc6601a47ad4046080d36b8fc99f81e6f1c17b0ac3c2dc91ac7611"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-win_arm64.whl", hash = "sha256:7b4da0d01ac866a57dd61ac258c5607b4cd677f63abaec7b148354d2b2cdd536"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eb14a5da6dc7642b0f3a18f13654847cd8b7a2550e2645a5bda677862b03ba16"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39a219e1c81ae3b103643d2aedb90f1ef22650deb266ff12a19e7773f3e5f089"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2405a7d98604b87f3fc28b1716783534b1b4b8510d8142adca34ee0bc3c87543"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dc1ae486f9abcef254b5618dfb4113dd49f94c68e3e027d03cf0143f3f772b61"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a1f570ce4d62d718dce3f179ee78dac3b545ac16c0c04bb363b7607a949c0d1"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb27e7b78d716c591e88e0a09a2139c6577865d7f2e152488c2cc6257f460872"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:15163165efc2f627eb9687ea5f3a28137217d217ac4024893d753f46bce9de26"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bdee92c56a71d2b24c33a7d4c2856bd6419d017e08caa7802d2963870e315028"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:412f287c55a6f54b0650bd9b6dce5aceddb95864a1a90c87af16979d37c89771"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2c93f00dcba2eea70af2be5f11a830a742fe6b579a1d4e00f47760ef13be247a"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f117e1a089d9411663a3207ba874f31be9ac8eaa5b533787024dc07aeb74f464"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-win_amd64.whl", hash = "sha256:be6a04e6c79819c9a8c2373317d19a96048e5a3f90bec587787e86a1153883c2"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-win_arm64.whl", hash = "sha256:0ae37737256ba2de764ddc12aed4956460277f00c4996d51a197e72f62f5eec7"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5d0432ccf1c7ab14f9949eec60c5d1f924f17c037e9f8b33352fa05799359b8"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efb3a45b35622bb6c16dbfab491a8f5a391fe0e9d45ef32f4df85658232ca0e2"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a12cf6398e8a0a001a059747a1cbf24705e18fe413bc22de7b3d15c67cffe3f"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b67e6efbf68e077dd71d1a6b37e43e1a99d0bff1a3d51867d45ee8908b931098"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5656aa670507437af0207645273ccdfee4f14bacd7f7c67a4306d0dcaeaf6eed"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bfc08add558155345129c7803b3671cf195e6a56e7a12f3dde7c57d9b417f525"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:40092754720b174e6ccf9e845d0d8c7d8e12c3d71e7fc35f55f3813e96376f78"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:497d05f29a1300d14e02e6441cf0f5ee81c1ff5a304b0d9fb77423974684e08b"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdd1a81a1860476eb41ac4bc1e07b3f07259e6d55bbf739b79c8aaedcf512799"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e6b93f13371d341afee3be9f7c5964e3fe61d5fa30f6a30eb49856935dfe4fc3"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d75aa530ccfaa593da12834b86a0724f58bff12706659baa9227c2ccaa06264c"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-win_amd64.whl", hash = "sha256:dd0a578400839256df88c16abddf9ba14813ec5f21362e1fe65022e00c883d4d"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-win_arm64.whl", hash = "sha256:d4188e73af84ca82468f09cadc5ac4db578109e52acb4518d8154698d3a87ca2"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:5a0f2724dfd4e3b3ac5a82436a8e6fd16baa7d507117e4279b660fe8ca38a3a1"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b11d6a633e4ed84fc0ddafd4ebfd8ea49b3f25082c04ad12b8315c11d504dc1"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61874cdb0a36016354853593cffc38e56fc9ca5aa97d2c05d3dcf6922cd55a11"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:60c439763a969a6af93b4881db0eed8fadf93ee98e18cbc35bc8da868d0c4f0c"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92a2f997387a1b79a75e7803aa7ded2cfbe2823852ccf1ba3bcf613b62ae3197"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31d512c812daea6d8b3be3b2bfcbeb091dbb09177706569bcfc6240dcf8b41c"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:52a15b0f35dad39862d376df10c5230155243a2c1a436e39eb55623ccbd68185"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a30fd6fdef1430fd9e1ba7b3398b5ee4e2887783917a687d86ba69985fb08748"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cc9617b46837c6468197b5945e196ee9ca43057bb7d9d1ae688101e4e1dddf64"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:0ab74e19f6a2b027ea4f845a78827969af45ce790e6cb3e1ebab71bdf9f215ff"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dba5ee5d3981160c28d5490f0d1b7ed730c22470ff7f6cc26cfcfaacb9896a07"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-win_arm64.whl", hash = "sha256:0749fd8f4218ad2e851e11cc4dc05c7cbc0cbc4267bdfdb31782e65aace4ee9c"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9928fe1eb816d11ae170885a74d074f57af3a0d65777ca47e9aeb854a1fba386"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d0005b053977e7b43388ddec89fa567f43d4f6d5c2c0affe57de5ebf290dc552"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2635d352d67458b66fd0667c14cb1d4145e9560d503219034a18a87e971ce4f3"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:767c23ad1c58c9e827b649a9ab7809fd5fd9db266a9cf02b0e926ddc2c680d58"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72d0eb9fba308b8311685c2268cf7d0a0639a6cd027d8128659f72bdd8a024b4"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f68e4f3eeca8fb22cc3d731f9715a13b652795ef657a13df1ad0c7dc0e9731df"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d84cd4061ae292d8ac367b2c3fa3aad11cb8625a95d135fe93f286f914f3f5a6"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a60ea74330b91bd22a29638940d115df9dc00af5035a9a2a6ad9399ffb4ceca5"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ce6a3a4e106cf35c2d9c4fa17c05ce0b180db622736845d4315519397a77beaf"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:77937e5e2a38a7b48eef0585114fe7930346993a88060d0bf886086d2aa49ef5"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:24c175051354f4a28c5d6a31c93906dc653e2bf234e8a4bbfb964892078898ce"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-win_amd64.whl", hash = "sha256:0763515d4df10edf6d06a3c19734e2566368980d21ebec439f33f9eb936c07b7"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-win_arm64.whl", hash = "sha256:0e4e2bf29574a6a7b7f6cb5fa69293b9f96c928949ac4a53ba3f525dffb87f9c"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d976bbb382b202f71c67f77b0ac11244021cfa3f7dfd9e562eefcea2df711548"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2489e4e5d7ef9a1c300a5e0196e43d9c739f066ef23270607d45aba368b91f2d"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e2ea9f7ab7fbf18fffb1b5434ce7c69a07582f7acc7717720f1d69f3e806f90c"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b34e51affded8faee0dfdb705416153819d8ea9250bbbf7ea1b249bdeb5f1122"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8aacd3d4b33b772542b2e01beb50187536967b514b00003bdda7589722d2a64"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7cf974dd4e35fa315563ac99d6287a1024e4dc2077b8a7d7cd3d2fb65d283134"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85bd218b5ecfbee8c8a82e121802dcb519a86044c9c3b2e4aef02fa05c6da370"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0856e241c2d3df4efef7c04a1e46b1936b6120c9bcf36dd216e3acd84bc4fb21"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9af39d6551f97d31a4deebeac6f45b156f9755ddc59c07b402c148f5dbb6482a"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:bb4ae2b57fc1d8cbd1cf7b1d9913803681ffa903e7488012be5b76dedf49297f"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:aedff62918805fb62d43a4aa2ecd4482c380dc76cd31bd7c8878588a61bd0369"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-win_amd64.whl", hash = "sha256:1fa333e8b2ce4d9660f2cda9c0e1b6bafcfb2457a9d259faa82289e73ec24891"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32"},
|
||||
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4d1d9e582ad4d63062d34077a9a1e9f3c34088a2ec5135b1f7190c07cf366527"},
|
||||
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:deed0c7258ceb4c44ad5ec7d9918f9f14fd05b2be86378d86cf50e63d1e7b771"},
|
||||
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a590506f303f512dff6b7f75fd2fd18e16943efee932008fe7140e5fa91d80e"},
|
||||
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e09c2279a4d01f099f52d5c4b3d9e208e91edcbd1a175c9662a8b16e000fece9"},
|
||||
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c9e7cdf45d594ee04d5be1b24dd9d49f3d1590959b2271fb30b5ca2b262c00fb"},
|
||||
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:720e05574713db64c356e86732c0f3c5252818d05f9df320f0ad8380641acea5"},
|
||||
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:17680d737d5335b552994a2008fab4c851bcd7de33094a82067ef3a576ff02fa"},
|
||||
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85b5352f94e490c028926ea567fc569c52ec79ce131dadb968d3853e809518c2"},
|
||||
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:464415881e4801295659462c49461a24fb107c140de781d55518c4b80cb6790f"},
|
||||
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:fb940820c63a9590d31d88b815e7a3aa5915cad3ce735ab45f0c730b39547de1"},
|
||||
{file = "kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kombu"
|
||||
version = "5.5.4"
|
||||
@@ -3137,6 +3437,85 @@ dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"]
|
||||
docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"]
|
||||
tests = ["pytest", "simplejson"]
|
||||
|
||||
[[package]]
|
||||
name = "matplotlib"
|
||||
version = "3.10.6"
|
||||
description = "Python plotting package"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "matplotlib-3.10.6-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:bc7316c306d97463a9866b89d5cc217824e799fa0de346c8f68f4f3d27c8693d"},
|
||||
{file = "matplotlib-3.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d00932b0d160ef03f59f9c0e16d1e3ac89646f7785165ce6ad40c842db16cc2e"},
|
||||
{file = "matplotlib-3.10.6-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8fa4c43d6bfdbfec09c733bca8667de11bfa4970e8324c471f3a3632a0301c15"},
|
||||
{file = "matplotlib-3.10.6-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ea117a9c1627acaa04dbf36265691921b999cbf515a015298e54e1a12c3af837"},
|
||||
{file = "matplotlib-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:08fc803293b4e1694ee325896030de97f74c141ccff0be886bb5915269247676"},
|
||||
{file = "matplotlib-3.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:2adf92d9b7527fbfb8818e050260f0ebaa460f79d61546374ce73506c9421d09"},
|
||||
{file = "matplotlib-3.10.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:905b60d1cb0ee604ce65b297b61cf8be9f4e6cfecf95a3fe1c388b5266bc8f4f"},
|
||||
{file = "matplotlib-3.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bac38d816637343e53d7185d0c66677ff30ffb131044a81898b5792c956ba76"},
|
||||
{file = "matplotlib-3.10.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:942a8de2b5bfff1de31d95722f702e2966b8a7e31f4e68f7cd963c7cd8861cf6"},
|
||||
{file = "matplotlib-3.10.6-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3276c85370bc0dfca051ec65c5817d1e0f8f5ce1b7787528ec8ed2d524bbc2f"},
|
||||
{file = "matplotlib-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9df5851b219225731f564e4b9e7f2ac1e13c9e6481f941b5631a0f8e2d9387ce"},
|
||||
{file = "matplotlib-3.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:abb5d9478625dd9c9eb51a06d39aae71eda749ae9b3138afb23eb38824026c7e"},
|
||||
{file = "matplotlib-3.10.6-cp311-cp311-win_arm64.whl", hash = "sha256:886f989ccfae63659183173bb3fced7fd65e9eb793c3cc21c273add368536951"},
|
||||
{file = "matplotlib-3.10.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:31ca662df6a80bd426f871105fdd69db7543e28e73a9f2afe80de7e531eb2347"},
|
||||
{file = "matplotlib-3.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1678bb61d897bb4ac4757b5ecfb02bfb3fddf7f808000fb81e09c510712fda75"},
|
||||
{file = "matplotlib-3.10.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:56cd2d20842f58c03d2d6e6c1f1cf5548ad6f66b91e1e48f814e4fb5abd1cb95"},
|
||||
{file = "matplotlib-3.10.6-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:662df55604a2f9a45435566d6e2660e41efe83cd94f4288dfbf1e6d1eae4b0bb"},
|
||||
{file = "matplotlib-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:08f141d55148cd1fc870c3387d70ca4df16dee10e909b3b038782bd4bda6ea07"},
|
||||
{file = "matplotlib-3.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:590f5925c2d650b5c9d813c5b3b5fc53f2929c3f8ef463e4ecfa7e052044fb2b"},
|
||||
{file = "matplotlib-3.10.6-cp312-cp312-win_arm64.whl", hash = "sha256:f44c8d264a71609c79a78d50349e724f5d5fc3684ead7c2a473665ee63d868aa"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:819e409653c1106c8deaf62e6de6b8611449c2cd9939acb0d7d4e57a3d95cc7a"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:59c8ac8382fefb9cb71308dde16a7c487432f5255d8f1fd32473523abecfecdf"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:84e82d9e0fd70c70bc55739defbd8055c54300750cbacf4740c9673a24d6933a"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25f7a3eb42d6c1c56e89eacd495661fc815ffc08d9da750bca766771c0fd9110"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f9c862d91ec0b7842920a4cfdaaec29662195301914ea54c33e01f1a28d014b2"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313-win_amd64.whl", hash = "sha256:1b53bd6337eba483e2e7d29c5ab10eee644bc3a2491ec67cc55f7b44583ffb18"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313-win_arm64.whl", hash = "sha256:cbd5eb50b7058b2892ce45c2f4e92557f395c9991f5c886d1bb74a1582e70fd6"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:acc86dd6e0e695c095001a7fccff158c49e45e0758fdf5dcdbb0103318b59c9f"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e228cd2ffb8f88b7d0b29e37f68ca9aaf83e33821f24a5ccc4f082dd8396bc27"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:658bc91894adeab669cf4bb4a186d049948262987e80f0857216387d7435d833"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8913b7474f6dd83ac444c9459c91f7f0f2859e839f41d642691b104e0af056aa"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:091cea22e059b89f6d7d1a18e2c33a7376c26eee60e401d92a4d6726c4e12706"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313t-win_amd64.whl", hash = "sha256:491e25e02a23d7207629d942c666924a6b61e007a48177fdd231a0097b7f507e"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313t-win_arm64.whl", hash = "sha256:3d80d60d4e54cda462e2cd9a086d85cd9f20943ead92f575ce86885a43a565d5"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:70aaf890ce1d0efd482df969b28a5b30ea0b891224bb315810a3940f67182899"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1565aae810ab79cb72e402b22facfa6501365e73ebab70a0fdfb98488d2c3c0c"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3b23315a01981689aa4e1a179dbf6ef9fbd17143c3eea77548c2ecfb0499438"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:30fdd37edf41a4e6785f9b37969de57aea770696cb637d9946eb37470c94a453"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bc31e693da1c08012c764b053e702c1855378e04102238e6a5ee6a7117c53a47"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314-win_amd64.whl", hash = "sha256:05be9bdaa8b242bc6ff96330d18c52f1fc59c6fb3a4dd411d953d67e7e1baf98"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314-win_arm64.whl", hash = "sha256:f56a0d1ab05d34c628592435781d185cd99630bdfd76822cd686fb5a0aecd43a"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:94f0b4cacb23763b64b5dace50d5b7bfe98710fed5f0cef5c08135a03399d98b"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cc332891306b9fb39462673d8225d1b824c89783fee82840a709f96714f17a5c"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee1d607b3fb1590deb04b69f02ea1d53ed0b0bf75b2b1a5745f269afcbd3cdd3"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:376a624a218116461696b27b2bbf7a8945053e6d799f6502fc03226d077807bf"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:83847b47f6524c34b4f2d3ce726bb0541c48c8e7692729865c3df75bfa0f495a"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c7e0518e0d223683532a07f4b512e2e0729b62674f1b3a1a69869f98e6b1c7e3"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314t-win_arm64.whl", hash = "sha256:4dd83e029f5b4801eeb87c64efd80e732452781c16a9cf7415b7b63ec8f374d7"},
|
||||
{file = "matplotlib-3.10.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:13fcd07ccf17e354398358e0307a1f53f5325dca22982556ddb9c52837b5af41"},
|
||||
{file = "matplotlib-3.10.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:470fc846d59d1406e34fa4c32ba371039cd12c2fe86801159a965956f2575bd1"},
|
||||
{file = "matplotlib-3.10.6-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f7173f8551b88f4ef810a94adae3128c2530e0d07529f7141be7f8d8c365f051"},
|
||||
{file = "matplotlib-3.10.6-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f2d684c3204fa62421bbf770ddfebc6b50130f9cad65531eeba19236d73bb488"},
|
||||
{file = "matplotlib-3.10.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:6f4a69196e663a41d12a728fab8751177215357906436804217d6d9cf0d4d6cf"},
|
||||
{file = "matplotlib-3.10.6-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d6ca6ef03dfd269f4ead566ec6f3fb9becf8dab146fb999022ed85ee9f6b3eb"},
|
||||
{file = "matplotlib-3.10.6.tar.gz", hash = "sha256:ec01b645840dd1996df21ee37f208cd8ba57644779fa20464010638013d3203c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
contourpy = ">=1.0.1"
|
||||
cycler = ">=0.10"
|
||||
fonttools = ">=4.22.0"
|
||||
kiwisolver = ">=1.3.1"
|
||||
numpy = ">=1.23"
|
||||
packaging = ">=20.0"
|
||||
pillow = ">=8"
|
||||
pyparsing = ">=2.3.1"
|
||||
python-dateutil = ">=2.7"
|
||||
|
||||
[package.extras]
|
||||
dev = ["meson-python (>=0.13.1,<0.17.0)", "pybind11 (>=2.13.2,!=2.13.3)", "setuptools (>=64)", "setuptools_scm (>=7)"]
|
||||
|
||||
[[package]]
|
||||
name = "mccabe"
|
||||
version = "0.7.0"
|
||||
@@ -3857,6 +4236,131 @@ files = [
|
||||
[package.dependencies]
|
||||
setuptools = "*"
|
||||
|
||||
[[package]]
|
||||
name = "pillow"
|
||||
version = "11.3.0"
|
||||
description = "Python Imaging Library (Fork)"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:97afb3a00b65cc0804d1c7abddbf090a81eaac02768af58cbdcaaa0a931e0b6d"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-win32.whl", hash = "sha256:ea944117a7974ae78059fcc1800e5d3295172bb97035c0c1d9345fca1419da71"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:e5c5858ad8ec655450a7c7df532e9842cf8df7cc349df7225c60d5d348c8aada"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"},
|
||||
{file = "pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"]
|
||||
fpx = ["olefile"]
|
||||
mic = ["olefile"]
|
||||
test-arrow = ["pyarrow"]
|
||||
tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "trove-classifiers (>=2024.10.12)"]
|
||||
typing = ["typing-extensions ; python_version < \"3.10\""]
|
||||
xmp = ["defusedxml"]
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "4.3.8"
|
||||
@@ -5016,6 +5520,29 @@ attrs = ">=22.2.0"
|
||||
rpds-py = ">=0.7.0"
|
||||
typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""}
|
||||
|
||||
[[package]]
|
||||
name = "reportlab"
|
||||
version = "4.4.4"
|
||||
description = "The Reportlab Toolkit"
|
||||
optional = false
|
||||
python-versions = "<4,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "reportlab-4.4.4-py3-none-any.whl", hash = "sha256:299b3b0534e7202bb94ed2ddcd7179b818dcda7de9d8518a57c85a58a1ebaadb"},
|
||||
{file = "reportlab-4.4.4.tar.gz", hash = "sha256:cb2f658b7f4a15be2cc68f7203aa67faef67213edd4f2d4bdd3eb20dab75a80d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
charset-normalizer = "*"
|
||||
pillow = ">=9.0.0"
|
||||
|
||||
[package.extras]
|
||||
accel = ["rl_accel (>=0.9.0,<1.1)"]
|
||||
bidi = ["rlbidi"]
|
||||
pycairo = ["freetype-py (>=2.3.0,<2.4)", "rlPyCairo (>=0.2.0,<1)"]
|
||||
renderpm = ["rl_renderPM (>=4.0.3,<4.1)"]
|
||||
shaping = ["uharfbuzz"]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.5"
|
||||
@@ -6259,4 +6786,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.11,<3.13"
|
||||
content-hash = "03442fd4673006c5a74374f90f53621fd1c9d117279fe6cc0355ef833eb7f9bb"
|
||||
content-hash = "3c9164d668d37d6373eb5200bbe768232ead934d9312b9c68046b1df922789f3"
|
||||
|
||||
+4
-2
@@ -33,7 +33,9 @@ dependencies = [
|
||||
"xmlsec==1.3.14",
|
||||
"h2 (==4.3.0)",
|
||||
"markdown (>=3.9,<4.0)",
|
||||
"drf-simple-apikey (==2.2.1)"
|
||||
"drf-simple-apikey (==2.2.1)",
|
||||
"matplotlib (>=3.10.6,<4.0.0)",
|
||||
"reportlab (>=4.4.4,<5.0.0)"
|
||||
]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
@@ -41,7 +43,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.14.0"
|
||||
version = "1.15.0"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -27,6 +27,8 @@ from api.models import (
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
LighthouseProviderConfiguration,
|
||||
LighthouseProviderModels,
|
||||
Membership,
|
||||
OverviewStatusChoices,
|
||||
PermissionChoices,
|
||||
@@ -765,6 +767,7 @@ class ComplianceOverviewFilter(FilterSet):
|
||||
class ScanSummaryFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
provider_id = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider_id__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
@@ -927,3 +930,45 @@ class TenantApiKeyFilter(FilterSet):
|
||||
"revoked": ["exact"],
|
||||
"name": ["exact", "icontains"],
|
||||
}
|
||||
|
||||
|
||||
class LighthouseProviderConfigFilter(FilterSet):
|
||||
provider_type = ChoiceFilter(
|
||||
choices=LighthouseProviderConfiguration.LLMProviderChoices.choices
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=LighthouseProviderConfiguration.LLMProviderChoices.choices,
|
||||
field_name="provider_type",
|
||||
lookup_expr="in",
|
||||
)
|
||||
is_active = BooleanFilter()
|
||||
|
||||
class Meta:
|
||||
model = LighthouseProviderConfiguration
|
||||
fields = {
|
||||
"provider_type": ["exact", "in"],
|
||||
"is_active": ["exact"],
|
||||
}
|
||||
|
||||
|
||||
class LighthouseProviderModelsFilter(FilterSet):
|
||||
provider_type = ChoiceFilter(
|
||||
choices=LighthouseProviderConfiguration.LLMProviderChoices.choices,
|
||||
field_name="provider_configuration__provider_type",
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
choices=LighthouseProviderConfiguration.LLMProviderChoices.choices,
|
||||
field_name="provider_configuration__provider_type",
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
# Allow filtering by model id
|
||||
model_id = CharFilter(field_name="model_id", lookup_expr="exact")
|
||||
model_id__icontains = CharFilter(field_name="model_id", lookup_expr="icontains")
|
||||
model_id__in = CharInFilter(field_name="model_id", lookup_expr="in")
|
||||
|
||||
class Meta:
|
||||
model = LighthouseProviderModels
|
||||
fields = {
|
||||
"model_id": ["exact", "icontains", "in"],
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ class Migration(migrations.Migration):
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
max_length=255,
|
||||
max_length=100,
|
||||
validators=[django.core.validators.MinLengthValidator(3)],
|
||||
),
|
||||
),
|
||||
|
||||
@@ -0,0 +1,266 @@
|
||||
# Generated by Django 5.1.12 on 2025-10-09 07:50
|
||||
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from config.custom_logging import BackendLogger
|
||||
from cryptography.fernet import Fernet
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
from api.db_router import MainRouter
|
||||
|
||||
logger = logging.getLogger(BackendLogger.API)
|
||||
|
||||
|
||||
def migrate_lighthouse_configs_forward(apps, schema_editor):
|
||||
"""
|
||||
Migrate data from old LighthouseConfiguration to new multi-provider models.
|
||||
Old system: one LighthouseConfiguration per tenant (always OpenAI).
|
||||
"""
|
||||
LighthouseConfiguration = apps.get_model("api", "LighthouseConfiguration")
|
||||
LighthouseProviderConfiguration = apps.get_model(
|
||||
"api", "LighthouseProviderConfiguration"
|
||||
)
|
||||
LighthouseTenantConfiguration = apps.get_model(
|
||||
"api", "LighthouseTenantConfiguration"
|
||||
)
|
||||
LighthouseProviderModels = apps.get_model("api", "LighthouseProviderModels")
|
||||
|
||||
fernet = Fernet(settings.SECRETS_ENCRYPTION_KEY.encode())
|
||||
|
||||
# Migrate only tenants that actually have a LighthouseConfiguration
|
||||
for old_config in (
|
||||
LighthouseConfiguration.objects.using(MainRouter.admin_db)
|
||||
.select_related("tenant")
|
||||
.all()
|
||||
):
|
||||
tenant = old_config.tenant
|
||||
tenant_id = str(tenant.id)
|
||||
|
||||
try:
|
||||
# Create OpenAI provider configuration for this tenant
|
||||
api_key_decrypted = fernet.decrypt(bytes(old_config.api_key)).decode()
|
||||
credentials_encrypted = fernet.encrypt(
|
||||
json.dumps({"api_key": api_key_decrypted}).encode()
|
||||
)
|
||||
provider_config = LighthouseProviderConfiguration.objects.using(
|
||||
MainRouter.admin_db
|
||||
).create(
|
||||
tenant=tenant,
|
||||
provider_type="openai",
|
||||
credentials=credentials_encrypted,
|
||||
is_active=old_config.is_active,
|
||||
)
|
||||
|
||||
# Create tenant configuration from old values
|
||||
LighthouseTenantConfiguration.objects.using(MainRouter.admin_db).create(
|
||||
tenant=tenant,
|
||||
business_context=old_config.business_context or "",
|
||||
default_provider="openai",
|
||||
default_models={"openai": old_config.model},
|
||||
)
|
||||
|
||||
# Create initial provider model record
|
||||
LighthouseProviderModels.objects.using(MainRouter.admin_db).create(
|
||||
tenant=tenant,
|
||||
provider_configuration=provider_config,
|
||||
model_id=old_config.model,
|
||||
model_name=old_config.model,
|
||||
default_parameters={},
|
||||
)
|
||||
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to migrate lighthouse config for tenant %s", tenant_id
|
||||
)
|
||||
continue
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0049_compliancerequirementoverview_passed_failed_findings"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="LighthouseProviderConfiguration",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"provider_type",
|
||||
models.CharField(
|
||||
choices=[("openai", "OpenAI")],
|
||||
help_text="LLM provider name",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
("base_url", models.URLField(blank=True, null=True)),
|
||||
(
|
||||
"credentials",
|
||||
models.BinaryField(
|
||||
help_text="Encrypted JSON credentials for the provider"
|
||||
),
|
||||
),
|
||||
("is_active", models.BooleanField(default=True)),
|
||||
],
|
||||
options={
|
||||
"db_table": "lighthouse_provider_configurations",
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="LighthouseProviderModels",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("model_id", models.CharField(max_length=100)),
|
||||
("model_name", models.CharField(max_length=100)),
|
||||
("default_parameters", models.JSONField(blank=True, default=dict)),
|
||||
],
|
||||
options={
|
||||
"db_table": "lighthouse_provider_models",
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="LighthouseTenantConfiguration",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("business_context", models.TextField(blank=True, default="")),
|
||||
("default_provider", models.CharField(blank=True, max_length=50)),
|
||||
("default_models", models.JSONField(blank=True, default=dict)),
|
||||
],
|
||||
options={
|
||||
"db_table": "lighthouse_tenant_config",
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="lighthouseproviderconfiguration",
|
||||
name="tenant",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="lighthouseprovidermodels",
|
||||
name="provider_configuration",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="available_models",
|
||||
to="api.lighthouseproviderconfiguration",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="lighthouseprovidermodels",
|
||||
name="tenant",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="lighthousetenantconfiguration",
|
||||
name="tenant",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="lighthouseproviderconfiguration",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "provider_type"], name="lh_pc_tenant_type_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="lighthouseproviderconfiguration",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_lighthouseproviderconfiguration",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="lighthouseproviderconfiguration",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "provider_type"),
|
||||
name="unique_provider_config_per_tenant",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="lighthouseprovidermodels",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "provider_configuration"],
|
||||
name="lh_prov_models_cfg_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="lighthouseprovidermodels",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_lighthouseprovidermodels",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="lighthouseprovidermodels",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "provider_configuration", "model_id"),
|
||||
name="unique_provider_model_per_configuration",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="lighthousetenantconfiguration",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_lighthousetenantconfiguration",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="lighthousetenantconfiguration",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id",), name="unique_tenant_lighthouse_config"
|
||||
),
|
||||
),
|
||||
# Migrate data from old LighthouseConfiguration to new tables
|
||||
# This runs after all tables, indexes, and constraints are created
|
||||
# The old Lighthouse configuration table is not removed, so reverse_code is noop
|
||||
# During rollbacks, the old Lighthouse configuration remains intact while the new tables are removed
|
||||
migrations.RunPython(
|
||||
migrate_lighthouse_configs_forward,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
),
|
||||
]
|
||||
+181
-25
@@ -1873,22 +1873,6 @@ class LighthouseConfiguration(RowLevelSecurityProtectedModel):
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
# Validate temperature
|
||||
if not 0 <= self.temperature <= 1:
|
||||
raise ModelValidationError(
|
||||
detail="Temperature must be between 0 and 1",
|
||||
code="invalid_temperature",
|
||||
pointer="/data/attributes/temperature",
|
||||
)
|
||||
|
||||
# Validate max_tokens
|
||||
if not 500 <= self.max_tokens <= 5000:
|
||||
raise ModelValidationError(
|
||||
detail="Max tokens must be between 500 and 5000",
|
||||
code="invalid_max_tokens",
|
||||
pointer="/data/attributes/max_tokens",
|
||||
)
|
||||
|
||||
@property
|
||||
def api_key_decoded(self):
|
||||
"""Return the decrypted API key, or None if unavailable or invalid."""
|
||||
@@ -1913,15 +1897,6 @@ class LighthouseConfiguration(RowLevelSecurityProtectedModel):
|
||||
code="invalid_api_key",
|
||||
pointer="/data/attributes/api_key",
|
||||
)
|
||||
|
||||
# Validate OpenAI API key format
|
||||
openai_key_pattern = r"^sk-[\w-]+T3BlbkFJ[\w-]+$"
|
||||
if not re.match(openai_key_pattern, value):
|
||||
raise ModelValidationError(
|
||||
detail="Invalid OpenAI API key format.",
|
||||
code="invalid_api_key",
|
||||
pointer="/data/attributes/api_key",
|
||||
)
|
||||
self.api_key = fernet.encrypt(value.encode())
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
@@ -1984,3 +1959,184 @@ class Processor(RowLevelSecurityProtectedModel):
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "processors"
|
||||
|
||||
|
||||
class LighthouseProviderConfiguration(RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
Per-tenant configuration for an LLM provider (credentials, base URL, activation).
|
||||
|
||||
One configuration per provider type per tenant.
|
||||
"""
|
||||
|
||||
class LLMProviderChoices(models.TextChoices):
|
||||
OPENAI = "openai", _("OpenAI")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
|
||||
provider_type = models.CharField(
|
||||
max_length=50,
|
||||
choices=LLMProviderChoices.choices,
|
||||
help_text="LLM provider name",
|
||||
)
|
||||
|
||||
# For OpenAI-compatible providers
|
||||
base_url = models.URLField(blank=True, null=True)
|
||||
|
||||
# Encrypted JSON for provider-specific auth
|
||||
credentials = models.BinaryField(
|
||||
blank=False, null=False, help_text="Encrypted JSON credentials for the provider"
|
||||
)
|
||||
|
||||
is_active = models.BooleanField(default=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.get_provider_type_display()} ({self.tenant_id})"
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
@property
|
||||
def credentials_decoded(self):
|
||||
if not self.credentials:
|
||||
return None
|
||||
try:
|
||||
decrypted_data = fernet.decrypt(bytes(self.credentials))
|
||||
return json.loads(decrypted_data.decode())
|
||||
except (InvalidToken, json.JSONDecodeError) as e:
|
||||
logger.warning("Failed to decrypt provider credentials: %s", e)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"Unexpected error while decrypting provider credentials: %s", e
|
||||
)
|
||||
return None
|
||||
|
||||
@credentials_decoded.setter
|
||||
def credentials_decoded(self, value):
|
||||
"""
|
||||
Set and encrypt credentials (assumes serializer performed validation).
|
||||
"""
|
||||
if not value:
|
||||
raise ModelValidationError(
|
||||
detail="Credentials are required",
|
||||
code="invalid_credentials",
|
||||
pointer="/data/attributes/credentials",
|
||||
)
|
||||
self.credentials = fernet.encrypt(json.dumps(value).encode())
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "lighthouse_provider_configurations"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
fields=["tenant_id", "provider_type"],
|
||||
name="unique_provider_config_per_tenant",
|
||||
),
|
||||
]
|
||||
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_type"],
|
||||
name="lh_pc_tenant_type_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "lighthouse-providers"
|
||||
|
||||
|
||||
class LighthouseTenantConfiguration(RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
Tenant-level Lighthouse settings (business context and defaults).
|
||||
One record per tenant.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
|
||||
business_context = models.TextField(blank=True, default="")
|
||||
|
||||
# Preferred provider key (e.g., "openai", "bedrock", "openai_compatible")
|
||||
default_provider = models.CharField(max_length=50, blank=True)
|
||||
|
||||
# Mapping of provider -> model id, e.g., {"openai": "gpt-4o", "bedrock": "anthropic.claude-v2"}
|
||||
default_models = models.JSONField(default=dict, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"Lighthouse Tenant Config for {self.tenant_id}"
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "lighthouse_tenant_config"
|
||||
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
fields=["tenant_id"], name="unique_tenant_lighthouse_config"
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "lighthouse-config"
|
||||
|
||||
|
||||
class LighthouseProviderModels(RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
Per-tenant, per-provider configuration list of available LLM models.
|
||||
RLS-protected; populated via provider API using tenant-scoped credentials.
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
|
||||
# Scope to a specific provider configuration within a tenant
|
||||
provider_configuration = models.ForeignKey(
|
||||
LighthouseProviderConfiguration,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="available_models",
|
||||
)
|
||||
model_id = models.CharField(max_length=100)
|
||||
|
||||
# Human-friendly model name
|
||||
model_name = models.CharField(max_length=100)
|
||||
|
||||
# Model-specific default parameters (e.g., temperature, max_tokens)
|
||||
default_parameters = models.JSONField(default=dict, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.provider_configuration.provider_type}:{self.model_id} ({self.tenant_id})"
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "lighthouse_provider_models"
|
||||
constraints = [
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
fields=["tenant_id", "provider_configuration", "model_id"],
|
||||
name="unique_provider_model_per_configuration",
|
||||
),
|
||||
]
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_configuration"],
|
||||
name="lh_prov_models_cfg_idx",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -6,7 +6,14 @@ from django.dispatch import receiver
|
||||
from django_celery_results.backends.database import DatabaseBackend
|
||||
|
||||
from api.db_utils import delete_related_daily_task
|
||||
from api.models import Membership, Provider, TenantAPIKey, User
|
||||
from api.models import (
|
||||
LighthouseProviderConfiguration,
|
||||
LighthouseTenantConfiguration,
|
||||
Membership,
|
||||
Provider,
|
||||
TenantAPIKey,
|
||||
User,
|
||||
)
|
||||
|
||||
|
||||
def create_task_result_on_publish(sender=None, headers=None, **kwargs): # noqa: F841
|
||||
@@ -56,3 +63,33 @@ def revoke_membership_api_keys(sender, instance, **kwargs): # noqa: F841
|
||||
TenantAPIKey.objects.filter(
|
||||
entity=instance.user, tenant_id=instance.tenant.id
|
||||
).update(revoked=True)
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=LighthouseProviderConfiguration)
|
||||
def cleanup_lighthouse_defaults_before_delete(sender, instance, **kwargs): # noqa: F841
|
||||
"""
|
||||
Ensure tenant Lighthouse defaults do not reference a soon-to-be-deleted provider.
|
||||
|
||||
This runs for both per-instance deletes and queryset (bulk) deletes.
|
||||
"""
|
||||
try:
|
||||
tenant_cfg = LighthouseTenantConfiguration.objects.get(
|
||||
tenant_id=instance.tenant_id
|
||||
)
|
||||
except LighthouseTenantConfiguration.DoesNotExist:
|
||||
return
|
||||
|
||||
updated = False
|
||||
defaults = tenant_cfg.default_models or {}
|
||||
|
||||
if instance.provider_type in defaults:
|
||||
defaults.pop(instance.provider_type, None)
|
||||
tenant_cfg.default_models = defaults
|
||||
updated = True
|
||||
|
||||
if tenant_cfg.default_provider == instance.provider_type:
|
||||
tenant_cfg.default_provider = ""
|
||||
updated = True
|
||||
|
||||
if updated:
|
||||
tenant_cfg.save()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -35,6 +35,9 @@ from api.db_router import MainRouter
|
||||
from api.models import (
|
||||
Integration,
|
||||
Invitation,
|
||||
LighthouseProviderConfiguration,
|
||||
LighthouseProviderModels,
|
||||
LighthouseTenantConfiguration,
|
||||
Membership,
|
||||
Processor,
|
||||
Provider,
|
||||
@@ -46,6 +49,7 @@ from api.models import (
|
||||
SAMLConfiguration,
|
||||
SAMLToken,
|
||||
Scan,
|
||||
ScanSummary,
|
||||
StateChoices,
|
||||
Task,
|
||||
TenantAPIKey,
|
||||
@@ -2688,6 +2692,55 @@ class TestScanViewSet:
|
||||
== "There is a problem with credentials."
|
||||
)
|
||||
|
||||
@patch("api.v1.views.ScanViewSet._get_task_status")
|
||||
@patch("api.v1.views.get_s3_client")
|
||||
@patch("api.v1.views.env.str")
|
||||
def test_threatscore_s3_wildcard(
|
||||
self,
|
||||
mock_env_str,
|
||||
mock_get_s3_client,
|
||||
mock_get_task_status,
|
||||
authenticated_client,
|
||||
scans_fixture,
|
||||
):
|
||||
"""
|
||||
When the threatscore endpoint is called with an S3 output_location,
|
||||
the view should list objects in S3 using wildcard pattern matching,
|
||||
retrieve the matching PDF file, and return it with HTTP 200 and proper headers.
|
||||
"""
|
||||
scan = scans_fixture[0]
|
||||
scan.state = StateChoices.COMPLETED
|
||||
bucket = "test-bucket"
|
||||
zip_key = "tenant-id/scan-id/prowler-output-foo.zip"
|
||||
scan.output_location = f"s3://{bucket}/{zip_key}"
|
||||
scan.save()
|
||||
|
||||
pdf_key = os.path.join(
|
||||
os.path.dirname(zip_key),
|
||||
"threatscore",
|
||||
"prowler-output-123_threatscore_report.pdf",
|
||||
)
|
||||
|
||||
mock_s3_client = Mock()
|
||||
mock_s3_client.list_objects_v2.return_value = {"Contents": [{"Key": pdf_key}]}
|
||||
mock_s3_client.get_object.return_value = {"Body": io.BytesIO(b"pdf-bytes")}
|
||||
|
||||
mock_env_str.return_value = bucket
|
||||
mock_get_s3_client.return_value = mock_s3_client
|
||||
mock_get_task_status.return_value = None
|
||||
|
||||
url = reverse("scan-threatscore", kwargs={"pk": scan.id})
|
||||
response = authenticated_client.get(url)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response["Content-Type"] == "application/pdf"
|
||||
assert response["Content-Disposition"].endswith(
|
||||
'"prowler-output-123_threatscore_report.pdf"'
|
||||
)
|
||||
assert response.content == b"pdf-bytes"
|
||||
mock_s3_client.list_objects_v2.assert_called_once()
|
||||
mock_s3_client.get_object.assert_called_once_with(Bucket=bucket, Key=pdf_key)
|
||||
|
||||
def test_report_s3_success(self, authenticated_client, scans_fixture, monkeypatch):
|
||||
"""
|
||||
When output_location is an S3 URL and the S3 client returns the file successfully,
|
||||
@@ -5766,6 +5819,171 @@ class TestOverviewViewSet:
|
||||
assert service1_data["attributes"]["muted"] == 1
|
||||
assert service2_data["attributes"]["muted"] == 0
|
||||
|
||||
def test_overview_findings_provider_id_in_filter(
|
||||
self, authenticated_client, tenants_fixture, providers_fixture
|
||||
):
|
||||
tenant = tenants_fixture[0]
|
||||
provider1, provider2, *_ = providers_fixture
|
||||
|
||||
scan1 = Scan.objects.create(
|
||||
name="scan-one",
|
||||
provider=provider1,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.COMPLETED,
|
||||
tenant=tenant,
|
||||
)
|
||||
scan2 = Scan.objects.create(
|
||||
name="scan-two",
|
||||
provider=provider2,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.COMPLETED,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
ScanSummary.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
check_id="check-provider-one",
|
||||
service="service-a",
|
||||
severity="high",
|
||||
region="region-a",
|
||||
_pass=5,
|
||||
fail=1,
|
||||
muted=2,
|
||||
total=8,
|
||||
new=5,
|
||||
changed=2,
|
||||
unchanged=1,
|
||||
fail_new=1,
|
||||
fail_changed=0,
|
||||
pass_new=3,
|
||||
pass_changed=2,
|
||||
muted_new=1,
|
||||
muted_changed=1,
|
||||
)
|
||||
|
||||
ScanSummary.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan2,
|
||||
check_id="check-provider-two",
|
||||
service="service-b",
|
||||
severity="medium",
|
||||
region="region-b",
|
||||
_pass=2,
|
||||
fail=3,
|
||||
muted=1,
|
||||
total=6,
|
||||
new=3,
|
||||
changed=2,
|
||||
unchanged=1,
|
||||
fail_new=2,
|
||||
fail_changed=1,
|
||||
pass_new=1,
|
||||
pass_changed=1,
|
||||
muted_new=1,
|
||||
muted_changed=0,
|
||||
)
|
||||
|
||||
single_response = authenticated_client.get(
|
||||
reverse("overview-findings"),
|
||||
{"filter[provider_id__in]": str(provider1.id)},
|
||||
)
|
||||
assert single_response.status_code == status.HTTP_200_OK
|
||||
single_attributes = single_response.json()["data"]["attributes"]
|
||||
assert single_attributes["pass"] == 5
|
||||
assert single_attributes["fail"] == 1
|
||||
assert single_attributes["muted"] == 2
|
||||
assert single_attributes["total"] == 8
|
||||
|
||||
combined_response = authenticated_client.get(
|
||||
reverse("overview-findings"),
|
||||
{"filter[provider_id__in]": f"{provider1.id},{provider2.id}"},
|
||||
)
|
||||
assert combined_response.status_code == status.HTTP_200_OK
|
||||
combined_attributes = combined_response.json()["data"]["attributes"]
|
||||
assert combined_attributes["pass"] == 7
|
||||
assert combined_attributes["fail"] == 4
|
||||
assert combined_attributes["muted"] == 3
|
||||
assert combined_attributes["total"] == 14
|
||||
|
||||
def test_overview_findings_severity_provider_id_in_filter(
|
||||
self, authenticated_client, tenants_fixture, providers_fixture
|
||||
):
|
||||
tenant = tenants_fixture[0]
|
||||
provider1, provider2, *_ = providers_fixture
|
||||
|
||||
scan1 = Scan.objects.create(
|
||||
name="severity-scan-one",
|
||||
provider=provider1,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.COMPLETED,
|
||||
tenant=tenant,
|
||||
)
|
||||
scan2 = Scan.objects.create(
|
||||
name="severity-scan-two",
|
||||
provider=provider2,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.COMPLETED,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
ScanSummary.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
check_id="severity-check-one",
|
||||
service="service-a",
|
||||
severity="high",
|
||||
region="region-a",
|
||||
_pass=4,
|
||||
fail=4,
|
||||
muted=0,
|
||||
total=8,
|
||||
)
|
||||
ScanSummary.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan1,
|
||||
check_id="severity-check-two",
|
||||
service="service-a",
|
||||
severity="medium",
|
||||
region="region-b",
|
||||
_pass=2,
|
||||
fail=2,
|
||||
muted=0,
|
||||
total=4,
|
||||
)
|
||||
ScanSummary.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan2,
|
||||
check_id="severity-check-three",
|
||||
service="service-b",
|
||||
severity="critical",
|
||||
region="region-c",
|
||||
_pass=1,
|
||||
fail=2,
|
||||
muted=0,
|
||||
total=3,
|
||||
)
|
||||
|
||||
single_response = authenticated_client.get(
|
||||
reverse("overview-findings_severity"),
|
||||
{"filter[provider_id__in]": str(provider1.id)},
|
||||
)
|
||||
assert single_response.status_code == status.HTTP_200_OK
|
||||
single_attributes = single_response.json()["data"]["attributes"]
|
||||
assert single_attributes["high"] == 8
|
||||
assert single_attributes["medium"] == 4
|
||||
assert single_attributes["critical"] == 0
|
||||
|
||||
combined_response = authenticated_client.get(
|
||||
reverse("overview-findings_severity"),
|
||||
{"filter[provider_id__in]": f"{provider1.id},{provider2.id}"},
|
||||
)
|
||||
assert combined_response.status_code == status.HTTP_200_OK
|
||||
combined_attributes = combined_response.json()["data"]["attributes"]
|
||||
assert combined_attributes["high"] == 8
|
||||
assert combined_attributes["medium"] == 4
|
||||
assert combined_attributes["critical"] == 3
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestScheduleViewSet:
|
||||
@@ -8488,3 +8706,483 @@ class TestTenantApiKeyViewSet:
|
||||
# Verify error object structure
|
||||
error = response_data["errors"][0]
|
||||
assert "detail" in error or "title" in error
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestLighthouseTenantConfigViewSet:
|
||||
"""Test Lighthouse tenant configuration endpoint (singleton pattern)"""
|
||||
|
||||
def test_lighthouse_tenant_config_create_via_patch(self, authenticated_client):
|
||||
"""Test creating a tenant config successfully via PATCH (upsert)"""
|
||||
payload = {
|
||||
"data": {
|
||||
"type": "lighthouse-config",
|
||||
"attributes": {
|
||||
"business_context": "Test business context for security analysis",
|
||||
"default_provider": "",
|
||||
"default_models": {},
|
||||
},
|
||||
}
|
||||
}
|
||||
response = authenticated_client.patch(
|
||||
reverse("lighthouse-config"),
|
||||
data=payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()["data"]
|
||||
assert (
|
||||
data["attributes"]["business_context"]
|
||||
== "Test business context for security analysis"
|
||||
)
|
||||
assert data["attributes"]["default_provider"] == ""
|
||||
assert data["attributes"]["default_models"] == {}
|
||||
|
||||
def test_lighthouse_tenant_config_upsert_behavior(self, authenticated_client):
|
||||
"""Test that PATCH creates config if not exists and updates if exists (upsert)"""
|
||||
payload = {
|
||||
"data": {
|
||||
"type": "lighthouse-config",
|
||||
"attributes": {
|
||||
"business_context": "First config",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
# First PATCH creates the config
|
||||
response = authenticated_client.patch(
|
||||
reverse("lighthouse-config"),
|
||||
data=payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
first_data = response.json()["data"]
|
||||
assert first_data["attributes"]["business_context"] == "First config"
|
||||
|
||||
# Second PATCH updates the same config (not creating a duplicate)
|
||||
payload["data"]["attributes"]["business_context"] = "Updated config"
|
||||
response = authenticated_client.patch(
|
||||
reverse("lighthouse-config"),
|
||||
data=payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
second_data = response.json()["data"]
|
||||
assert second_data["attributes"]["business_context"] == "Updated config"
|
||||
# Verify it's the same config (same ID)
|
||||
assert first_data["id"] == second_data["id"]
|
||||
|
||||
@patch("openai.OpenAI")
|
||||
def test_lighthouse_tenant_config_retrieve(
|
||||
self, mock_openai_client, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test retrieving the singleton tenant config with proper provider and model validation"""
|
||||
|
||||
# Mock OpenAI client and models response
|
||||
mock_models_response = Mock()
|
||||
mock_models_response.data = [
|
||||
Mock(id="gpt-4o"),
|
||||
Mock(id="gpt-4o-mini"),
|
||||
Mock(id="gpt-5"),
|
||||
]
|
||||
mock_openai_client.return_value.models.list.return_value = mock_models_response
|
||||
|
||||
# Create OpenAI provider configuration
|
||||
provider_config = LighthouseProviderConfiguration.objects.create(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_type="openai",
|
||||
credentials=b'{"api_key": "sk-test1234567890T3BlbkFJtest1234567890"}',
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
# Create provider models (simulating refresh)
|
||||
LighthouseProviderModels.objects.create(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_configuration=provider_config,
|
||||
model_id="gpt-4o",
|
||||
default_parameters={},
|
||||
)
|
||||
LighthouseProviderModels.objects.create(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_configuration=provider_config,
|
||||
model_id="gpt-4o-mini",
|
||||
default_parameters={},
|
||||
)
|
||||
|
||||
# Create tenant configuration with valid provider and model
|
||||
config = LighthouseTenantConfiguration.objects.create(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
business_context="Test context",
|
||||
default_provider="openai",
|
||||
default_models={"openai": "gpt-4o"},
|
||||
)
|
||||
|
||||
# Retrieve and verify the configuration
|
||||
response = authenticated_client.get(reverse("lighthouse-config"))
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()["data"]
|
||||
assert data["id"] == str(config.id)
|
||||
assert data["attributes"]["business_context"] == "Test context"
|
||||
assert data["attributes"]["default_provider"] == "openai"
|
||||
assert data["attributes"]["default_models"] == {"openai": "gpt-4o"}
|
||||
|
||||
def test_lighthouse_tenant_config_retrieve_not_found(self, authenticated_client):
|
||||
"""Test GET when config doesn't exist returns 404"""
|
||||
response = authenticated_client.get(reverse("lighthouse-config"))
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
assert "not found" in response.json()["errors"][0]["detail"].lower()
|
||||
|
||||
def test_lighthouse_tenant_config_partial_update(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test updating tenant config fields"""
|
||||
from api.models import LighthouseTenantConfiguration
|
||||
|
||||
# Create config first
|
||||
config = LighthouseTenantConfiguration.objects.create(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
business_context="Original context",
|
||||
default_provider="",
|
||||
default_models={},
|
||||
)
|
||||
|
||||
# Update it
|
||||
payload = {
|
||||
"data": {
|
||||
"type": "lighthouse-config",
|
||||
"attributes": {
|
||||
"business_context": "Updated context for cloud security",
|
||||
},
|
||||
}
|
||||
}
|
||||
response = authenticated_client.patch(
|
||||
reverse("lighthouse-config"),
|
||||
data=payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
# Verify update
|
||||
config.refresh_from_db()
|
||||
assert config.business_context == "Updated context for cloud security"
|
||||
|
||||
def test_lighthouse_tenant_config_update_invalid_provider(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test validation fails when default_provider is not configured and active"""
|
||||
from api.models import LighthouseTenantConfiguration
|
||||
|
||||
# Create config first
|
||||
LighthouseTenantConfiguration.objects.create(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
business_context="Test",
|
||||
)
|
||||
|
||||
# Try to set invalid provider
|
||||
payload = {
|
||||
"data": {
|
||||
"type": "lighthouse-config",
|
||||
"attributes": {
|
||||
"default_provider": "nonexistent-provider",
|
||||
},
|
||||
}
|
||||
}
|
||||
response = authenticated_client.patch(
|
||||
reverse("lighthouse-config"),
|
||||
data=payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert "provider" in response.json()["errors"][0]["detail"].lower()
|
||||
|
||||
def test_lighthouse_tenant_config_update_invalid_json_format(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Test that invalid JSON payload is rejected"""
|
||||
from api.models import LighthouseTenantConfiguration
|
||||
|
||||
# Create config first
|
||||
LighthouseTenantConfiguration.objects.create(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
business_context="Test",
|
||||
)
|
||||
|
||||
# Send invalid JSON
|
||||
response = authenticated_client.patch(
|
||||
reverse("lighthouse-config"),
|
||||
data="invalid json",
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestLighthouseProviderConfigViewSet:
|
||||
"""Tests for LighthouseProviderConfiguration create validations"""
|
||||
|
||||
def test_invalid_provider_type(self, authenticated_client):
|
||||
"""Add invalid provider (testprovider) should error"""
|
||||
payload = {
|
||||
"data": {
|
||||
"type": "lighthouse-providers",
|
||||
"attributes": {
|
||||
"provider_type": "testprovider",
|
||||
"credentials": {"api_key": "sk-testT3BlbkFJkey"},
|
||||
},
|
||||
}
|
||||
}
|
||||
resp = authenticated_client.post(
|
||||
reverse("lighthouse-providers-list"),
|
||||
data=payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert resp.status_code == status.HTTP_400_BAD_REQUEST
|
||||
|
||||
def test_openai_missing_credentials(self, authenticated_client):
|
||||
"""OpenAI provider without credentials should error"""
|
||||
payload = {
|
||||
"data": {
|
||||
"type": "lighthouse-providers",
|
||||
"attributes": {
|
||||
"provider_type": "openai",
|
||||
},
|
||||
}
|
||||
}
|
||||
resp = authenticated_client.post(
|
||||
reverse("lighthouse-providers-list"),
|
||||
data=payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert resp.status_code == status.HTTP_400_BAD_REQUEST
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"credentials",
|
||||
[
|
||||
{}, # empty credentials
|
||||
{"token": "sk-testT3BlbkFJkey"}, # wrong key name
|
||||
{"api_key": "ks-invalid-format"}, # wrong format
|
||||
],
|
||||
)
|
||||
def test_openai_invalid_credentials(self, authenticated_client, credentials):
|
||||
"""OpenAI provider with invalid credentials should error"""
|
||||
payload = {
|
||||
"data": {
|
||||
"type": "lighthouse-providers",
|
||||
"attributes": {
|
||||
"provider_type": "openai",
|
||||
"credentials": credentials,
|
||||
},
|
||||
}
|
||||
}
|
||||
resp = authenticated_client.post(
|
||||
reverse("lighthouse-providers-list"),
|
||||
data=payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert resp.status_code == status.HTTP_400_BAD_REQUEST
|
||||
|
||||
def test_openai_valid_credentials_success(self, authenticated_client):
|
||||
"""OpenAI provider with valid sk-xxx format should succeed"""
|
||||
valid_key = "sk-abc123T3BlbkFJxyz456"
|
||||
payload = {
|
||||
"data": {
|
||||
"type": "lighthouse-providers",
|
||||
"attributes": {
|
||||
"provider_type": "openai",
|
||||
"credentials": {"api_key": valid_key},
|
||||
},
|
||||
}
|
||||
}
|
||||
resp = authenticated_client.post(
|
||||
reverse("lighthouse-providers-list"),
|
||||
data=payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert resp.status_code == status.HTTP_201_CREATED
|
||||
data = resp.json()["data"]
|
||||
|
||||
masked_creds = data["attributes"].get("credentials")
|
||||
assert masked_creds is not None
|
||||
assert "api_key" in masked_creds
|
||||
assert masked_creds["api_key"] == ("*" * len(valid_key))
|
||||
|
||||
def test_openai_provider_duplicate_per_tenant(self, authenticated_client):
|
||||
"""If an OpenAI provider exists for tenant, creating again should error"""
|
||||
valid_key = "sk-dup123T3BlbkFJdup456"
|
||||
payload = {
|
||||
"data": {
|
||||
"type": "lighthouse-providers",
|
||||
"attributes": {
|
||||
"provider_type": "openai",
|
||||
"credentials": {"api_key": valid_key},
|
||||
},
|
||||
}
|
||||
}
|
||||
# First creation succeeds
|
||||
resp1 = authenticated_client.post(
|
||||
reverse("lighthouse-providers-list"),
|
||||
data=payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert resp1.status_code == status.HTTP_201_CREATED
|
||||
|
||||
# Second creation should fail with validation error
|
||||
resp2 = authenticated_client.post(
|
||||
reverse("lighthouse-providers-list"),
|
||||
data=payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert resp2.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert "already exists" in str(resp2.json()).lower()
|
||||
|
||||
def test_openai_patch_base_url_and_is_active(self, authenticated_client):
|
||||
"""After creating, should be able to patch base_url and is_active"""
|
||||
valid_key = "sk-patch123T3BlbkFJpatch456"
|
||||
create_payload = {
|
||||
"data": {
|
||||
"type": "lighthouse-providers",
|
||||
"attributes": {
|
||||
"provider_type": "openai",
|
||||
"credentials": {"api_key": valid_key},
|
||||
},
|
||||
}
|
||||
}
|
||||
create_resp = authenticated_client.post(
|
||||
reverse("lighthouse-providers-list"),
|
||||
data=create_payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert create_resp.status_code == status.HTTP_201_CREATED
|
||||
provider_id = create_resp.json()["data"]["id"]
|
||||
|
||||
patch_payload = {
|
||||
"data": {
|
||||
"type": "lighthouse-providers",
|
||||
"id": provider_id,
|
||||
"attributes": {
|
||||
"base_url": "https://api.example.com/v1",
|
||||
"is_active": False,
|
||||
},
|
||||
}
|
||||
}
|
||||
patch_resp = authenticated_client.patch(
|
||||
reverse("lighthouse-providers-detail", kwargs={"pk": provider_id}),
|
||||
data=patch_payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert patch_resp.status_code == status.HTTP_200_OK
|
||||
updated = patch_resp.json()["data"]["attributes"]
|
||||
assert updated["base_url"] == "https://api.example.com/v1"
|
||||
assert updated["is_active"] is False
|
||||
|
||||
def test_openai_patch_invalid_credentials(self, authenticated_client):
|
||||
"""PATCH with invalid credentials.api_key should error (400)"""
|
||||
valid_key = "sk-ok123T3BlbkFJok456"
|
||||
create_payload = {
|
||||
"data": {
|
||||
"type": "lighthouse-providers",
|
||||
"attributes": {
|
||||
"provider_type": "openai",
|
||||
"credentials": {"api_key": valid_key},
|
||||
},
|
||||
}
|
||||
}
|
||||
create_resp = authenticated_client.post(
|
||||
reverse("lighthouse-providers-list"),
|
||||
data=create_payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert create_resp.status_code == status.HTTP_201_CREATED
|
||||
provider_id = create_resp.json()["data"]["id"]
|
||||
|
||||
# Try patch with invalid api_key format
|
||||
patch_payload = {
|
||||
"data": {
|
||||
"type": "lighthouse-providers",
|
||||
"id": provider_id,
|
||||
"attributes": {
|
||||
"credentials": {"api_key": "ks-invalid-format"},
|
||||
},
|
||||
}
|
||||
}
|
||||
patch_resp = authenticated_client.patch(
|
||||
reverse("lighthouse-providers-detail", kwargs={"pk": provider_id}),
|
||||
data=patch_payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert patch_resp.status_code == status.HTTP_400_BAD_REQUEST
|
||||
|
||||
def test_openai_get_masking_and_fields_filter(self, authenticated_client):
|
||||
valid_key = "sk-get123T3BlbkFJget456"
|
||||
create_payload = {
|
||||
"data": {
|
||||
"type": "lighthouse-providers",
|
||||
"attributes": {
|
||||
"provider_type": "openai",
|
||||
"credentials": {"api_key": valid_key},
|
||||
},
|
||||
}
|
||||
}
|
||||
create_resp = authenticated_client.post(
|
||||
reverse("lighthouse-providers-list"),
|
||||
data=create_payload,
|
||||
content_type=API_JSON_CONTENT_TYPE,
|
||||
)
|
||||
assert create_resp.status_code == status.HTTP_201_CREATED
|
||||
provider_id = create_resp.json()["data"]["id"]
|
||||
|
||||
# Default GET should return masked credentials
|
||||
get_resp = authenticated_client.get(
|
||||
reverse("lighthouse-providers-detail", kwargs={"pk": provider_id})
|
||||
)
|
||||
assert get_resp.status_code == status.HTTP_200_OK
|
||||
masked = get_resp.json()["data"]["attributes"]["credentials"]["api_key"]
|
||||
assert masked == ("*" * len(valid_key))
|
||||
|
||||
# Fields filter should return decrypted credentials structure
|
||||
get_full = authenticated_client.get(
|
||||
reverse("lighthouse-providers-detail", kwargs={"pk": provider_id})
|
||||
+ "?fields[lighthouse-providers]=credentials"
|
||||
)
|
||||
assert get_full.status_code == status.HTTP_200_OK
|
||||
creds = get_full.json()["data"]["attributes"]["credentials"]
|
||||
assert creds["api_key"] == valid_key
|
||||
|
||||
def test_delete_provider_updates_tenant_defaults(
|
||||
self, authenticated_client, tenants_fixture
|
||||
):
|
||||
"""Deleting a provider config should clear tenant default_provider and its default_model entry."""
|
||||
|
||||
tenant = tenants_fixture[0]
|
||||
|
||||
# Create provider configuration to delete
|
||||
provider = LighthouseProviderConfiguration.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider_type="openai",
|
||||
credentials=b'{"api_key":"sk-test123T3BlbkFJ"}',
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
# Seed tenant defaults referencing the provider we will delete
|
||||
cfg = LighthouseTenantConfiguration.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
business_context="Test",
|
||||
default_provider="openai",
|
||||
default_models={"openai": "gpt-4o", "other": "model-x"},
|
||||
)
|
||||
|
||||
# Delete via API and validate response
|
||||
url = reverse("lighthouse-providers-detail", kwargs={"pk": str(provider.id)})
|
||||
resp = authenticated_client.delete(url)
|
||||
assert resp.status_code in (
|
||||
status.HTTP_204_NO_CONTENT,
|
||||
status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
# Tenant defaults should be updated
|
||||
cfg.refresh_from_db()
|
||||
assert cfg.default_provider == ""
|
||||
assert "openai" not in cfg.default_models
|
||||
|
||||
# Unrelated entries should remain untouched
|
||||
assert cfg.default_models.get("other") == "model-x"
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
import re
|
||||
|
||||
from rest_framework_json_api import serializers
|
||||
|
||||
|
||||
class OpenAICredentialsSerializer(serializers.Serializer):
|
||||
api_key = serializers.CharField()
|
||||
|
||||
def validate_api_key(self, value: str) -> str:
|
||||
pattern = r"^sk-[\w-]+$"
|
||||
if not re.match(pattern, value or ""):
|
||||
raise serializers.ValidationError("Invalid OpenAI API key format.")
|
||||
return value
|
||||
@@ -6,8 +6,10 @@ from django.conf import settings
|
||||
from django.contrib.auth import authenticate
|
||||
from django.contrib.auth.models import update_last_login
|
||||
from django.contrib.auth.password_validation import validate_password
|
||||
from django.db import IntegrityError
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from jwt.exceptions import InvalidKeyError
|
||||
from rest_framework.reverse import reverse
|
||||
from rest_framework.validators import UniqueTogetherValidator
|
||||
from rest_framework_json_api import serializers
|
||||
from rest_framework_json_api.relations import SerializerMethodResourceRelatedField
|
||||
@@ -25,6 +27,9 @@ from api.models import (
|
||||
Invitation,
|
||||
InvitationRoleRelationship,
|
||||
LighthouseConfiguration,
|
||||
LighthouseProviderConfiguration,
|
||||
LighthouseProviderModels,
|
||||
LighthouseTenantConfiguration,
|
||||
Membership,
|
||||
Processor,
|
||||
Provider,
|
||||
@@ -54,6 +59,7 @@ from api.v1.serializer_utils.integrations import (
|
||||
S3ConfigSerializer,
|
||||
SecurityHubConfigSerializer,
|
||||
)
|
||||
from api.v1.serializer_utils.lighthouse import OpenAICredentialsSerializer
|
||||
from api.v1.serializer_utils.processors import ProcessorConfigField
|
||||
from api.v1.serializer_utils.providers import ProviderSecretField
|
||||
from prowler.lib.mutelist.mutelist import Mutelist
|
||||
@@ -2750,6 +2756,16 @@ class LighthouseConfigCreateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"updated_at": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate_temperature(self, value):
|
||||
if not 0 <= value <= 1:
|
||||
raise ValidationError("Temperature must be between 0 and 1.")
|
||||
return value
|
||||
|
||||
def validate_max_tokens(self, value):
|
||||
if not 500 <= value <= 5000:
|
||||
raise ValidationError("Max tokens must be between 500 and 5000.")
|
||||
return value
|
||||
|
||||
def validate(self, attrs):
|
||||
tenant_id = self.context.get("request").tenant_id
|
||||
if LighthouseConfiguration.objects.filter(tenant_id=tenant_id).exists():
|
||||
@@ -2758,6 +2774,11 @@ class LighthouseConfigCreateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"tenant_id": "Lighthouse configuration already exists for this tenant."
|
||||
}
|
||||
)
|
||||
api_key = attrs.get("api_key")
|
||||
if api_key is not None:
|
||||
OpenAICredentialsSerializer(data={"api_key": api_key}).is_valid(
|
||||
raise_exception=True
|
||||
)
|
||||
return super().validate(attrs)
|
||||
|
||||
def create(self, validated_data):
|
||||
@@ -2802,6 +2823,24 @@ class LighthouseConfigUpdateSerializer(BaseWriteSerializer):
|
||||
"max_tokens": {"required": False},
|
||||
}
|
||||
|
||||
def validate_temperature(self, value):
|
||||
if not 0 <= value <= 1:
|
||||
raise ValidationError("Temperature must be between 0 and 1.")
|
||||
return value
|
||||
|
||||
def validate_max_tokens(self, value):
|
||||
if not 500 <= value <= 5000:
|
||||
raise ValidationError("Max tokens must be between 500 and 5000.")
|
||||
return value
|
||||
|
||||
def validate(self, attrs):
|
||||
api_key = attrs.get("api_key", None)
|
||||
if api_key is not None:
|
||||
OpenAICredentialsSerializer(data={"api_key": api_key}).is_valid(
|
||||
raise_exception=True
|
||||
)
|
||||
return super().validate(attrs)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
api_key = validated_data.pop("api_key", None)
|
||||
instance = super().update(instance, validated_data)
|
||||
@@ -2931,3 +2970,352 @@ class TenantApiKeyUpdateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
):
|
||||
raise ValidationError("An API key with this name already exists.")
|
||||
return value
|
||||
|
||||
|
||||
# Lighthouse: Provider configurations
|
||||
|
||||
|
||||
class LighthouseProviderConfigSerializer(RLSSerializer):
|
||||
"""
|
||||
Read serializer for LighthouseProviderConfiguration.
|
||||
"""
|
||||
|
||||
# Decrypted credentials are only returned in to_representation when requested
|
||||
credentials = serializers.JSONField(required=False, read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseProviderConfiguration
|
||||
fields = [
|
||||
"id",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"provider_type",
|
||||
"base_url",
|
||||
"is_active",
|
||||
"credentials",
|
||||
"url",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"is_active": {"read_only": True},
|
||||
"url": {"read_only": True, "view_name": "lighthouse-providers-detail"},
|
||||
}
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "lighthouse-providers"
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
# Support JSON:API fields filter: fields[lighthouse-providers]=credentials,base_url
|
||||
fields_param = self.context.get("request", None) and self.context[
|
||||
"request"
|
||||
].query_params.get("fields[lighthouse-providers]", "")
|
||||
|
||||
creds = instance.credentials_decoded
|
||||
|
||||
requested_fields = (
|
||||
[f.strip() for f in fields_param.split(",")] if fields_param else []
|
||||
)
|
||||
|
||||
if "credentials" in requested_fields:
|
||||
# Return full decrypted credentials JSON
|
||||
data["credentials"] = creds
|
||||
else:
|
||||
# Return masked credentials by default
|
||||
def mask_value(value):
|
||||
if isinstance(value, str):
|
||||
return "*" * len(value)
|
||||
if isinstance(value, dict):
|
||||
return {k: mask_value(v) for k, v in value.items()}
|
||||
if isinstance(value, list):
|
||||
return [mask_value(v) for v in value]
|
||||
return value
|
||||
|
||||
# Always return masked credentials, even if creds is None
|
||||
if creds is not None:
|
||||
data["credentials"] = mask_value(creds)
|
||||
else:
|
||||
# If credentials_decoded returns None, return None for credentials field
|
||||
data["credentials"] = None
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class LighthouseProviderConfigCreateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
"""
|
||||
Create serializer for LighthouseProviderConfiguration.
|
||||
Accepts credentials as JSON; stored encrypted via credentials_decoded.
|
||||
"""
|
||||
|
||||
credentials = serializers.JSONField(write_only=True, required=True)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseProviderConfiguration
|
||||
fields = [
|
||||
"provider_type",
|
||||
"base_url",
|
||||
"credentials",
|
||||
"is_active",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"is_active": {"required": False},
|
||||
"base_url": {"required": False, "allow_null": True},
|
||||
}
|
||||
|
||||
def create(self, validated_data):
|
||||
credentials = validated_data.pop("credentials")
|
||||
|
||||
instance = LighthouseProviderConfiguration(**validated_data)
|
||||
instance.tenant_id = self.context.get("tenant_id")
|
||||
instance.credentials_decoded = credentials
|
||||
|
||||
try:
|
||||
instance.save()
|
||||
return instance
|
||||
except IntegrityError:
|
||||
raise ValidationError(
|
||||
{
|
||||
"provider_type": "Configuration for this provider already exists for the tenant."
|
||||
}
|
||||
)
|
||||
|
||||
def validate(self, attrs):
|
||||
provider_type = attrs.get("provider_type")
|
||||
credentials = attrs.get("credentials") or {}
|
||||
|
||||
if provider_type == LighthouseProviderConfiguration.LLMProviderChoices.OPENAI:
|
||||
try:
|
||||
OpenAICredentialsSerializer(data=credentials).is_valid(
|
||||
raise_exception=True
|
||||
)
|
||||
except ValidationError as e:
|
||||
details = e.detail.copy()
|
||||
for key, value in details.items():
|
||||
e.detail[f"credentials/{key}"] = value
|
||||
del e.detail[key]
|
||||
raise e
|
||||
|
||||
return super().validate(attrs)
|
||||
|
||||
|
||||
class LighthouseProviderConfigUpdateSerializer(BaseWriteSerializer):
|
||||
"""
|
||||
Update serializer for LighthouseProviderConfiguration.
|
||||
"""
|
||||
|
||||
credentials = serializers.JSONField(write_only=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseProviderConfiguration
|
||||
fields = [
|
||||
"id",
|
||||
"provider_type",
|
||||
"base_url",
|
||||
"credentials",
|
||||
"is_active",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"provider_type": {"read_only": True},
|
||||
"base_url": {"required": False, "allow_null": True},
|
||||
"is_active": {"required": False},
|
||||
}
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
credentials = validated_data.pop("credentials", None)
|
||||
|
||||
for attr, value in validated_data.items():
|
||||
setattr(instance, attr, value)
|
||||
|
||||
if credentials is not None:
|
||||
instance.credentials_decoded = credentials
|
||||
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
def validate(self, attrs):
|
||||
provider_type = getattr(self.instance, "provider_type", None)
|
||||
credentials = attrs.get("credentials", None)
|
||||
|
||||
if (
|
||||
credentials is not None
|
||||
and provider_type
|
||||
== LighthouseProviderConfiguration.LLMProviderChoices.OPENAI
|
||||
):
|
||||
try:
|
||||
OpenAICredentialsSerializer(data=credentials).is_valid(
|
||||
raise_exception=True
|
||||
)
|
||||
except ValidationError as e:
|
||||
details = e.detail.copy()
|
||||
for key, value in details.items():
|
||||
e.detail[f"credentials/{key}"] = value
|
||||
del e.detail[key]
|
||||
raise e
|
||||
|
||||
return super().validate(attrs)
|
||||
|
||||
|
||||
# Lighthouse: Tenant configuration
|
||||
|
||||
|
||||
class LighthouseTenantConfigSerializer(RLSSerializer):
|
||||
"""
|
||||
Read serializer for LighthouseTenantConfiguration.
|
||||
"""
|
||||
|
||||
# Build singleton URL without pk
|
||||
url = serializers.SerializerMethodField()
|
||||
|
||||
def get_url(self, obj):
|
||||
request = self.context.get("request")
|
||||
return reverse("lighthouse-config", request=request)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseTenantConfiguration
|
||||
fields = [
|
||||
"id",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"business_context",
|
||||
"default_provider",
|
||||
"default_models",
|
||||
"url",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"url": {"read_only": True},
|
||||
}
|
||||
|
||||
|
||||
class LighthouseTenantConfigUpdateSerializer(BaseWriteSerializer):
|
||||
class Meta:
|
||||
model = LighthouseTenantConfiguration
|
||||
fields = [
|
||||
"id",
|
||||
"business_context",
|
||||
"default_provider",
|
||||
"default_models",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
}
|
||||
|
||||
def validate(self, attrs):
|
||||
request = self.context.get("request")
|
||||
tenant_id = self.context.get("tenant_id") or (
|
||||
getattr(request, "tenant_id", None) if request else None
|
||||
)
|
||||
|
||||
default_provider = attrs.get(
|
||||
"default_provider", getattr(self.instance, "default_provider", "")
|
||||
)
|
||||
default_models = attrs.get(
|
||||
"default_models", getattr(self.instance, "default_models", {})
|
||||
)
|
||||
|
||||
if default_provider:
|
||||
supported = set(LighthouseProviderConfiguration.LLMProviderChoices.values)
|
||||
if default_provider not in supported:
|
||||
raise ValidationError(
|
||||
{"default_provider": f"Unsupported provider '{default_provider}'."}
|
||||
)
|
||||
if not LighthouseProviderConfiguration.objects.filter(
|
||||
tenant_id=tenant_id, provider_type=default_provider, is_active=True
|
||||
).exists():
|
||||
raise ValidationError(
|
||||
{
|
||||
"default_provider": f"No active configuration found for '{default_provider}'."
|
||||
}
|
||||
)
|
||||
|
||||
if default_models is not None and not isinstance(default_models, dict):
|
||||
raise ValidationError(
|
||||
{"default_models": "Must be an object mapping provider -> model_id."}
|
||||
)
|
||||
|
||||
for provider_type, model_id in (default_models or {}).items():
|
||||
provider_cfg = LighthouseProviderConfiguration.objects.filter(
|
||||
tenant_id=tenant_id, provider_type=provider_type, is_active=True
|
||||
).first()
|
||||
if not provider_cfg:
|
||||
raise ValidationError(
|
||||
{
|
||||
"default_models": f"No active configuration for provider '{provider_type}'."
|
||||
}
|
||||
)
|
||||
if not LighthouseProviderModels.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
provider_configuration=provider_cfg,
|
||||
model_id=model_id,
|
||||
).exists():
|
||||
raise ValidationError(
|
||||
{
|
||||
"default_models": f"Invalid model '{model_id}' for provider '{provider_type}'."
|
||||
}
|
||||
)
|
||||
|
||||
return super().validate(attrs)
|
||||
|
||||
|
||||
# Lighthouse: Provider models
|
||||
|
||||
|
||||
class LighthouseProviderModelsSerializer(RLSSerializer):
|
||||
"""
|
||||
Read serializer for LighthouseProviderModels.
|
||||
"""
|
||||
|
||||
provider_configuration = serializers.ResourceRelatedField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseProviderModels
|
||||
fields = [
|
||||
"id",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
"provider_configuration",
|
||||
"model_id",
|
||||
"model_name",
|
||||
"default_parameters",
|
||||
"url",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
"inserted_at": {"read_only": True},
|
||||
"updated_at": {"read_only": True},
|
||||
"url": {"read_only": True, "view_name": "lighthouse-models-detail"},
|
||||
}
|
||||
|
||||
|
||||
class LighthouseProviderModelsCreateSerializer(RLSSerializer, BaseWriteSerializer):
|
||||
provider_configuration = serializers.ResourceRelatedField(
|
||||
queryset=LighthouseProviderConfiguration.objects.all()
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = LighthouseProviderModels
|
||||
fields = [
|
||||
"provider_configuration",
|
||||
"model_id",
|
||||
"default_parameters",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"default_parameters": {"required": False},
|
||||
}
|
||||
|
||||
|
||||
class LighthouseProviderModelsUpdateSerializer(BaseWriteSerializer):
|
||||
class Meta:
|
||||
model = LighthouseProviderModels
|
||||
fields = [
|
||||
"id",
|
||||
"default_parameters",
|
||||
]
|
||||
extra_kwargs = {
|
||||
"id": {"read_only": True},
|
||||
}
|
||||
|
||||
@@ -17,6 +17,9 @@ from api.v1.views import (
|
||||
InvitationAcceptViewSet,
|
||||
InvitationViewSet,
|
||||
LighthouseConfigViewSet,
|
||||
LighthouseProviderConfigViewSet,
|
||||
LighthouseProviderModelsViewSet,
|
||||
LighthouseTenantConfigViewSet,
|
||||
MembershipViewSet,
|
||||
OverviewViewSet,
|
||||
ProcessorViewSet,
|
||||
@@ -34,12 +37,12 @@ from api.v1.views import (
|
||||
ScheduleViewSet,
|
||||
SchemaView,
|
||||
TaskViewSet,
|
||||
TenantApiKeyViewSet,
|
||||
TenantFinishACSView,
|
||||
TenantMembersViewSet,
|
||||
TenantViewSet,
|
||||
UserRoleRelationshipView,
|
||||
UserViewSet,
|
||||
TenantApiKeyViewSet,
|
||||
)
|
||||
|
||||
router = routers.DefaultRouter(trailing_slash=False)
|
||||
@@ -67,6 +70,16 @@ router.register(
|
||||
basename="lighthouseconfiguration",
|
||||
)
|
||||
router.register(r"api-keys", TenantApiKeyViewSet, basename="api-key")
|
||||
router.register(
|
||||
r"lighthouse/providers",
|
||||
LighthouseProviderConfigViewSet,
|
||||
basename="lighthouse-providers",
|
||||
)
|
||||
router.register(
|
||||
r"lighthouse/models",
|
||||
LighthouseProviderModelsViewSet,
|
||||
basename="lighthouse-models",
|
||||
)
|
||||
|
||||
tenants_router = routers.NestedSimpleRouter(router, r"tenants", lookup="tenant")
|
||||
tenants_router.register(
|
||||
@@ -137,6 +150,14 @@ urlpatterns = [
|
||||
),
|
||||
name="provider_group-providers-relationship",
|
||||
),
|
||||
# Lighthouse tenant config as singleton endpoint
|
||||
path(
|
||||
"lighthouse/configuration",
|
||||
LighthouseTenantConfigViewSet.as_view(
|
||||
{"get": "list", "patch": "partial_update"}
|
||||
),
|
||||
name="lighthouse-config",
|
||||
),
|
||||
# API endpoint to start SAML SSO flow
|
||||
path(
|
||||
"auth/saml/initiate/", SAMLInitiateAPIView.as_view(), name="api_saml_initiate"
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import fnmatch
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timedelta, timezone
|
||||
@@ -59,11 +61,13 @@ from tasks.tasks import (
|
||||
backfill_scan_resource_summaries_task,
|
||||
check_integration_connection_task,
|
||||
check_lighthouse_connection_task,
|
||||
check_lighthouse_provider_connection_task,
|
||||
check_provider_connection_task,
|
||||
delete_provider_task,
|
||||
delete_tenant_task,
|
||||
jira_integration_task,
|
||||
perform_scan_task,
|
||||
refresh_lighthouse_provider_models_task,
|
||||
)
|
||||
|
||||
from api.base_views import BaseRLSViewSet, BaseTenantViewset, BaseUserViewset
|
||||
@@ -83,6 +87,8 @@ from api.filters import (
|
||||
InvitationFilter,
|
||||
LatestFindingFilter,
|
||||
LatestResourceFilter,
|
||||
LighthouseProviderConfigFilter,
|
||||
LighthouseProviderModelsFilter,
|
||||
MembershipFilter,
|
||||
ProcessorFilter,
|
||||
ProviderFilter,
|
||||
@@ -105,6 +111,9 @@ from api.models import (
|
||||
Integration,
|
||||
Invitation,
|
||||
LighthouseConfiguration,
|
||||
LighthouseProviderConfiguration,
|
||||
LighthouseProviderModels,
|
||||
LighthouseTenantConfiguration,
|
||||
Membership,
|
||||
Processor,
|
||||
Provider,
|
||||
@@ -159,6 +168,12 @@ from api.v1.serializers import (
|
||||
LighthouseConfigCreateSerializer,
|
||||
LighthouseConfigSerializer,
|
||||
LighthouseConfigUpdateSerializer,
|
||||
LighthouseProviderConfigCreateSerializer,
|
||||
LighthouseProviderConfigSerializer,
|
||||
LighthouseProviderConfigUpdateSerializer,
|
||||
LighthouseProviderModelsSerializer,
|
||||
LighthouseTenantConfigSerializer,
|
||||
LighthouseTenantConfigUpdateSerializer,
|
||||
MembershipSerializer,
|
||||
OverviewFindingSerializer,
|
||||
OverviewProviderSerializer,
|
||||
@@ -306,7 +321,7 @@ class SchemaView(SpectacularAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
spectacular_settings.TITLE = "Prowler API"
|
||||
spectacular_settings.VERSION = "1.14.0"
|
||||
spectacular_settings.VERSION = "1.15.0"
|
||||
spectacular_settings.DESCRIPTION = (
|
||||
"Prowler API specification.\n\nThis file is auto-generated."
|
||||
)
|
||||
@@ -1593,6 +1608,25 @@ class ProviderViewSet(BaseRLSViewSet):
|
||||
},
|
||||
request=None,
|
||||
),
|
||||
threatscore=extend_schema(
|
||||
tags=["Scan"],
|
||||
summary="Retrieve threatscore report",
|
||||
description="Download a specific threatscore report (e.g., 'prowler_threatscore_aws') as a PDF file.",
|
||||
request=None,
|
||||
responses={
|
||||
200: OpenApiResponse(
|
||||
description="PDF file containing the threatscore report"
|
||||
),
|
||||
202: OpenApiResponse(description="The task is in progress"),
|
||||
401: OpenApiResponse(
|
||||
description="API key missing or user not Authenticated"
|
||||
),
|
||||
403: OpenApiResponse(description="There is a problem with credentials"),
|
||||
404: OpenApiResponse(
|
||||
description="The scan has no threatscore reports, or the threatscore report generation task has not started yet"
|
||||
),
|
||||
},
|
||||
),
|
||||
)
|
||||
@method_decorator(CACHE_DECORATOR, name="list")
|
||||
@method_decorator(CACHE_DECORATOR, name="retrieve")
|
||||
@@ -1649,6 +1683,9 @@ class ScanViewSet(BaseRLSViewSet):
|
||||
if hasattr(self, "response_serializer_class"):
|
||||
return self.response_serializer_class
|
||||
return ScanComplianceReportSerializer
|
||||
elif self.action == "threatscore":
|
||||
if hasattr(self, "response_serializer_class"):
|
||||
return self.response_serializer_class
|
||||
return super().get_serializer_class()
|
||||
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
@@ -1753,7 +1790,18 @@ class ScanViewSet(BaseRLSViewSet):
|
||||
status=status.HTTP_502_BAD_GATEWAY,
|
||||
)
|
||||
contents = resp.get("Contents", [])
|
||||
keys = [obj["Key"] for obj in contents if obj["Key"].endswith(suffix)]
|
||||
keys = []
|
||||
for obj in contents:
|
||||
key = obj["Key"]
|
||||
key_basename = os.path.basename(key)
|
||||
if any(ch in suffix for ch in ("*", "?", "[")):
|
||||
if fnmatch.fnmatch(key_basename, suffix):
|
||||
keys.append(key)
|
||||
elif key_basename == suffix:
|
||||
keys.append(key)
|
||||
elif key.endswith(suffix):
|
||||
# Backward compatibility if suffix already includes directories
|
||||
keys.append(key)
|
||||
if not keys:
|
||||
return Response(
|
||||
{
|
||||
@@ -1880,6 +1928,45 @@ class ScanViewSet(BaseRLSViewSet):
|
||||
content, filename = loader
|
||||
return self._serve_file(content, filename, "text/csv")
|
||||
|
||||
@action(
|
||||
detail=True,
|
||||
methods=["get"],
|
||||
url_name="threatscore",
|
||||
)
|
||||
def threatscore(self, request, pk=None):
|
||||
scan = self.get_object()
|
||||
running_resp = self._get_task_status(scan)
|
||||
if running_resp:
|
||||
return running_resp
|
||||
|
||||
if not scan.output_location:
|
||||
return Response(
|
||||
{
|
||||
"detail": "The scan has no reports, or the threatscore report generation task has not started yet."
|
||||
},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
if scan.output_location.startswith("s3://"):
|
||||
bucket = env.str("DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET", "")
|
||||
key_prefix = scan.output_location.removeprefix(f"s3://{bucket}/")
|
||||
prefix = os.path.join(
|
||||
os.path.dirname(key_prefix),
|
||||
"threatscore",
|
||||
"*_threatscore_report.pdf",
|
||||
)
|
||||
loader = self._load_file(prefix, s3=True, bucket=bucket, list_objects=True)
|
||||
else:
|
||||
base = os.path.dirname(scan.output_location)
|
||||
pattern = os.path.join(base, "threatscore", "*_threatscore_report.pdf")
|
||||
loader = self._load_file(pattern, s3=False)
|
||||
|
||||
if isinstance(loader, Response):
|
||||
return loader
|
||||
|
||||
content, filename = loader
|
||||
return self._serve_file(content, filename, "application/pdf")
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
input_serializer = self.get_serializer(data=request.data)
|
||||
input_serializer.is_valid(raise_exception=True)
|
||||
@@ -4104,21 +4191,25 @@ class IntegrationJiraViewSet(BaseRLSViewSet):
|
||||
tags=["Lighthouse AI"],
|
||||
summary="List all Lighthouse AI configurations",
|
||||
description="Retrieve a list of all Lighthouse AI configurations.",
|
||||
deprecated=True,
|
||||
),
|
||||
create=extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
summary="Create a new Lighthouse AI configuration",
|
||||
description="Create a new Lighthouse AI configuration with the specified details.",
|
||||
deprecated=True,
|
||||
),
|
||||
partial_update=extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
summary="Partially update a Lighthouse AI configuration",
|
||||
description="Update certain fields of an existing Lighthouse AI configuration.",
|
||||
deprecated=True,
|
||||
),
|
||||
destroy=extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
summary="Delete a Lighthouse AI configuration",
|
||||
description="Remove a Lighthouse AI configuration by its ID.",
|
||||
deprecated=True,
|
||||
),
|
||||
connection=extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
@@ -4126,6 +4217,7 @@ class IntegrationJiraViewSet(BaseRLSViewSet):
|
||||
description="Verify the connection to the OpenAI API for a specific Lighthouse AI configuration.",
|
||||
request=None,
|
||||
responses={202: OpenApiResponse(response=TaskSerializer)},
|
||||
deprecated=True,
|
||||
),
|
||||
)
|
||||
class LighthouseConfigViewSet(BaseRLSViewSet):
|
||||
@@ -4176,6 +4268,273 @@ class LighthouseConfigViewSet(BaseRLSViewSet):
|
||||
)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
summary="List all LLM provider configs",
|
||||
description="Retrieve all LLM provider configurations for the current tenant",
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
summary="Retrieve LLM provider config",
|
||||
description="Get details for a specific provider configuration in the current tenant.",
|
||||
),
|
||||
create=extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
summary="Create LLM provider config",
|
||||
description="Create a per-tenant configuration for an LLM provider. Only one configuration per provider type is allowed per tenant.",
|
||||
),
|
||||
partial_update=extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
summary="Update LLM provider config",
|
||||
description="Partially update a provider configuration (e.g., base_url, is_active).",
|
||||
),
|
||||
destroy=extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
summary="Delete LLM provider config",
|
||||
description="Delete a provider configuration. Any tenant defaults that reference this provider are cleared during deletion.",
|
||||
),
|
||||
)
|
||||
class LighthouseProviderConfigViewSet(BaseRLSViewSet):
|
||||
queryset = LighthouseProviderConfiguration.objects.all()
|
||||
serializer_class = LighthouseProviderConfigSerializer
|
||||
http_method_names = ["get", "post", "patch", "delete"]
|
||||
filterset_class = LighthouseProviderConfigFilter
|
||||
|
||||
def get_queryset(self):
|
||||
if getattr(self, "swagger_fake_view", False):
|
||||
return LighthouseProviderConfiguration.objects.none()
|
||||
return LighthouseProviderConfiguration.objects.filter(
|
||||
tenant_id=self.request.tenant_id
|
||||
)
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "create":
|
||||
return LighthouseProviderConfigCreateSerializer
|
||||
elif self.action == "partial_update":
|
||||
return LighthouseProviderConfigUpdateSerializer
|
||||
elif self.action in ["connection", "refresh_models"]:
|
||||
return TaskSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
instance = serializer.save()
|
||||
|
||||
read_serializer = LighthouseProviderConfigSerializer(
|
||||
instance, context=self.get_serializer_context()
|
||||
)
|
||||
headers = self.get_success_headers(read_serializer.data)
|
||||
return Response(
|
||||
data=read_serializer.data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
serializer = self.get_serializer(
|
||||
instance,
|
||||
data=request.data,
|
||||
partial=True,
|
||||
context=self.get_serializer_context(),
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
read_serializer = LighthouseProviderConfigSerializer(
|
||||
instance, context=self.get_serializer_context()
|
||||
)
|
||||
return Response(data=read_serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
summary="Check LLM provider connection",
|
||||
description="Validate provider credentials asynchronously and toggle is_active.",
|
||||
request=None,
|
||||
responses={202: OpenApiResponse(response=TaskSerializer)},
|
||||
)
|
||||
@action(detail=True, methods=["post"], url_name="connection")
|
||||
def connection(self, request, pk=None):
|
||||
instance = self.get_object()
|
||||
if (
|
||||
instance.provider_type
|
||||
!= LighthouseProviderConfiguration.LLMProviderChoices.OPENAI
|
||||
):
|
||||
return Response(
|
||||
data={
|
||||
"errors": [{"detail": "Only 'openai' provider supported in MVP"}]
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
with transaction.atomic():
|
||||
task = check_lighthouse_provider_connection_task.delay(
|
||||
provider_config_id=str(instance.id), tenant_id=self.request.tenant_id
|
||||
)
|
||||
|
||||
prowler_task = Task.objects.get(id=task.id)
|
||||
serializer = TaskSerializer(prowler_task)
|
||||
return Response(
|
||||
data=serializer.data,
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
headers={
|
||||
"Content-Location": reverse(
|
||||
"task-detail", kwargs={"pk": prowler_task.id}
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
summary="Refresh LLM models catalog",
|
||||
description="Fetch available models for this provider configuration and upsert into catalog.",
|
||||
request=None,
|
||||
responses={202: OpenApiResponse(response=TaskSerializer)},
|
||||
)
|
||||
@action(
|
||||
detail=True,
|
||||
methods=["post"],
|
||||
url_path="refresh-models",
|
||||
url_name="refresh-models",
|
||||
)
|
||||
def refresh_models(self, request, pk=None):
|
||||
instance = self.get_object()
|
||||
if (
|
||||
instance.provider_type
|
||||
!= LighthouseProviderConfiguration.LLMProviderChoices.OPENAI
|
||||
):
|
||||
return Response(
|
||||
data={
|
||||
"errors": [{"detail": "Only 'openai' provider supported in MVP"}]
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
with transaction.atomic():
|
||||
task = refresh_lighthouse_provider_models_task.delay(
|
||||
provider_config_id=str(instance.id), tenant_id=self.request.tenant_id
|
||||
)
|
||||
|
||||
prowler_task = Task.objects.get(id=task.id)
|
||||
serializer = TaskSerializer(prowler_task)
|
||||
return Response(
|
||||
data=serializer.data,
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
headers={
|
||||
"Content-Location": reverse(
|
||||
"task-detail", kwargs={"pk": prowler_task.id}
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
summary="Get Lighthouse AI Tenant config",
|
||||
description="Retrieve current tenant-level Lighthouse AI settings. Returns a single configuration object.",
|
||||
),
|
||||
partial_update=extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
summary="Update Lighthouse AI Tenant config",
|
||||
description="Update tenant-level settings. Validates that the default provider is configured and active and that default model IDs exist for the chosen providers. Auto-creates configuration if it doesn't exist.",
|
||||
),
|
||||
)
|
||||
class LighthouseTenantConfigViewSet(BaseRLSViewSet):
|
||||
"""
|
||||
Singleton endpoint for tenant-level Lighthouse AI configuration.
|
||||
|
||||
This viewset implements a true singleton pattern:
|
||||
- GET returns the single configuration object (or 404 if not found)
|
||||
- PATCH updates/creates the configuration (upsert semantics)
|
||||
- No ID is required in the URL
|
||||
"""
|
||||
|
||||
queryset = LighthouseTenantConfiguration.objects.all()
|
||||
serializer_class = LighthouseTenantConfigSerializer
|
||||
http_method_names = ["get", "patch"]
|
||||
|
||||
def get_queryset(self):
|
||||
if getattr(self, "swagger_fake_view", False):
|
||||
return LighthouseTenantConfiguration.objects.none()
|
||||
return LighthouseTenantConfiguration.objects.filter(
|
||||
tenant_id=self.request.tenant_id
|
||||
)
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "partial_update":
|
||||
return LighthouseTenantConfigUpdateSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def get_object(self):
|
||||
"""Retrieve the singleton instance for the current tenant."""
|
||||
obj = LighthouseTenantConfiguration.objects.filter(
|
||||
tenant_id=self.request.tenant_id
|
||||
).first()
|
||||
if obj is None:
|
||||
raise NotFound("Tenant Lighthouse configuration not found")
|
||||
self.check_object_permissions(self.request, obj)
|
||||
return obj
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
"""GET endpoint for singleton - returns single object, not an array."""
|
||||
instance = self.get_object()
|
||||
serializer = self.get_serializer(instance)
|
||||
return Response(serializer.data)
|
||||
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
"""PATCH endpoint for singleton - no pk required. Auto-creates if not exists."""
|
||||
# Auto-create tenant config if it doesn't exist (upsert semantics)
|
||||
instance, created = LighthouseTenantConfiguration.objects.get_or_create(
|
||||
tenant_id=self.request.tenant_id,
|
||||
defaults={},
|
||||
)
|
||||
|
||||
# Extract attributes from JSON:API payload
|
||||
try:
|
||||
payload = json.loads(request.body)
|
||||
attributes = payload.get("data", {}).get("attributes", {})
|
||||
except (json.JSONDecodeError, AttributeError):
|
||||
raise ValidationError("Invalid JSON:API payload")
|
||||
|
||||
serializer = self.get_serializer(instance, data=attributes, partial=True)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
serializer.save()
|
||||
read_serializer = LighthouseTenantConfigSerializer(
|
||||
instance, context=self.get_serializer_context()
|
||||
)
|
||||
return Response(read_serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
summary="List all LLM models",
|
||||
description="List available LLM models per configured provider for the current tenant.",
|
||||
),
|
||||
retrieve=extend_schema(
|
||||
tags=["Lighthouse AI"],
|
||||
summary="Retrieve LLM model details",
|
||||
description="Get details for a specific LLM model.",
|
||||
),
|
||||
)
|
||||
class LighthouseProviderModelsViewSet(BaseRLSViewSet):
|
||||
queryset = LighthouseProviderModels.objects.all()
|
||||
serializer_class = LighthouseProviderModelsSerializer
|
||||
filterset_class = LighthouseProviderModelsFilter
|
||||
# Expose as read-only catalog collection
|
||||
http_method_names = ["get"]
|
||||
|
||||
def get_queryset(self):
|
||||
if getattr(self, "swagger_fake_view", False):
|
||||
return LighthouseProviderModels.objects.none()
|
||||
return LighthouseProviderModels.objects.filter(tenant_id=self.request.tenant_id)
|
||||
|
||||
def get_serializer_class(self):
|
||||
return super().get_serializer_class()
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
tags=["Processor"],
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
After Width: | Height: | Size: 24 KiB |
@@ -20,10 +20,10 @@ from prowler.lib.outputs.asff.asff import ASFF
|
||||
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
|
||||
AWSWellArchitected,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.c5.c5_aws import AWSC5
|
||||
from prowler.lib.outputs.compliance.ccc.ccc_aws import CCC_AWS
|
||||
from prowler.lib.outputs.compliance.ccc.ccc_azure import CCC_Azure
|
||||
from prowler.lib.outputs.compliance.ccc.ccc_gcp import CCC_GCP
|
||||
from prowler.lib.outputs.compliance.c5.c5_aws import AWSC5
|
||||
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
|
||||
@@ -183,18 +183,21 @@ def get_s3_client():
|
||||
return s3_client
|
||||
|
||||
|
||||
def _upload_to_s3(tenant_id: str, zip_path: str, scan_id: str) -> str | None:
|
||||
def _upload_to_s3(
|
||||
tenant_id: str, scan_id: str, local_path: str, relative_key: str
|
||||
) -> str | None:
|
||||
"""
|
||||
Upload the specified ZIP file to an S3 bucket.
|
||||
If the S3 bucket environment variables are not configured,
|
||||
the function returns None without performing an upload.
|
||||
Upload a local artifact to an S3 bucket under the tenant/scan prefix.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The tenant identifier, used as part of the S3 key prefix.
|
||||
zip_path (str): The local file system path to the ZIP file to be uploaded.
|
||||
scan_id (str): The scan identifier, used as part of the S3 key prefix.
|
||||
tenant_id (str): The tenant identifier used as the first segment of the S3 key.
|
||||
scan_id (str): The scan identifier used as the second segment of the S3 key.
|
||||
local_path (str): Filesystem path to the artifact to upload.
|
||||
relative_key (str): Object key relative to `<tenant_id>/<scan_id>/`.
|
||||
|
||||
Returns:
|
||||
str: The S3 URI of the uploaded file (e.g., "s3://<bucket>/<key>") if successful.
|
||||
None: If the required environment variables for the S3 bucket are not set.
|
||||
str | None: S3 URI of the uploaded artifact, or None if the upload is skipped.
|
||||
|
||||
Raises:
|
||||
botocore.exceptions.ClientError: If the upload attempt to S3 fails for any reason.
|
||||
"""
|
||||
@@ -202,34 +205,26 @@ def _upload_to_s3(tenant_id: str, zip_path: str, scan_id: str) -> str | None:
|
||||
if not bucket:
|
||||
return
|
||||
|
||||
if not relative_key:
|
||||
return
|
||||
|
||||
if not os.path.isfile(local_path):
|
||||
return
|
||||
|
||||
try:
|
||||
s3 = get_s3_client()
|
||||
|
||||
# Upload the ZIP file (outputs) to the S3 bucket
|
||||
zip_key = f"{tenant_id}/{scan_id}/{os.path.basename(zip_path)}"
|
||||
s3.upload_file(
|
||||
Filename=zip_path,
|
||||
Bucket=bucket,
|
||||
Key=zip_key,
|
||||
)
|
||||
s3_key = f"{tenant_id}/{scan_id}/{relative_key}"
|
||||
s3.upload_file(Filename=local_path, Bucket=bucket, Key=s3_key)
|
||||
|
||||
# Upload the compliance directory to the S3 bucket
|
||||
compliance_dir = os.path.join(os.path.dirname(zip_path), "compliance")
|
||||
for filename in os.listdir(compliance_dir):
|
||||
local_path = os.path.join(compliance_dir, filename)
|
||||
if not os.path.isfile(local_path):
|
||||
continue
|
||||
file_key = f"{tenant_id}/{scan_id}/compliance/{filename}"
|
||||
s3.upload_file(Filename=local_path, Bucket=bucket, Key=file_key)
|
||||
|
||||
return f"s3://{base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET}/{zip_key}"
|
||||
return f"s3://{base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET}/{s3_key}"
|
||||
except (ClientError, NoCredentialsError, ParamValidationError, ValueError) as e:
|
||||
logger.error(f"S3 upload failed: {str(e)}")
|
||||
|
||||
|
||||
def _generate_output_directory(
|
||||
output_directory, prowler_provider: object, tenant_id: str, scan_id: str
|
||||
) -> tuple[str, str]:
|
||||
) -> tuple[str, str, str]:
|
||||
"""
|
||||
Generate a file system path for the output directory of a prowler scan.
|
||||
|
||||
@@ -256,6 +251,7 @@ def _generate_output_directory(
|
||||
>>> _generate_output_directory("/tmp", "aws", "tenant-1234", "scan-5678")
|
||||
'/tmp/tenant-1234/aws/scan-5678/prowler-output-2023-02-15T12:34:56',
|
||||
'/tmp/tenant-1234/aws/scan-5678/compliance/prowler-output-2023-02-15T12:34:56'
|
||||
'/tmp/tenant-1234/aws/scan-5678/threatscore/prowler-output-2023-02-15T12:34:56'
|
||||
"""
|
||||
# Sanitize the prowler provider name to ensure it is a valid directory name
|
||||
prowler_provider_sanitized = re.sub(r"[^\w\-]", "-", prowler_provider)
|
||||
@@ -276,4 +272,10 @@ def _generate_output_directory(
|
||||
)
|
||||
os.makedirs("/".join(compliance_path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
return path, compliance_path
|
||||
threatscore_path = (
|
||||
f"{output_directory}/{tenant_id}/{scan_id}/threatscore/prowler-output-"
|
||||
f"{prowler_provider_sanitized}-{timestamp}"
|
||||
)
|
||||
os.makedirs("/".join(threatscore_path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
return path, compliance_path, threatscore_path
|
||||
|
||||
@@ -0,0 +1,163 @@
|
||||
from typing import Dict, Set
|
||||
|
||||
import openai
|
||||
from celery.utils.log import get_task_logger
|
||||
|
||||
from api.models import LighthouseProviderConfiguration, LighthouseProviderModels
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def _extract_openai_api_key(
|
||||
provider_cfg: LighthouseProviderConfiguration,
|
||||
) -> str | None:
|
||||
"""
|
||||
Safely extract the OpenAI API key from a provider configuration.
|
||||
|
||||
Args:
|
||||
provider_cfg (LighthouseProviderConfiguration): The provider configuration instance
|
||||
containing the credentials.
|
||||
|
||||
Returns:
|
||||
str | None: The API key string if present and valid, otherwise None.
|
||||
"""
|
||||
creds = provider_cfg.credentials_decoded
|
||||
if not isinstance(creds, dict):
|
||||
return None
|
||||
api_key = creds.get("api_key")
|
||||
if not isinstance(api_key, str) or not api_key:
|
||||
return None
|
||||
return api_key
|
||||
|
||||
|
||||
def check_lighthouse_provider_connection(provider_config_id: str) -> Dict:
|
||||
"""
|
||||
Validate a Lighthouse provider configuration by calling the provider API and
|
||||
toggle its active state accordingly.
|
||||
|
||||
Currently supports the OpenAI provider by invoking `models.list` to verify that
|
||||
the provided credentials are valid.
|
||||
|
||||
Args:
|
||||
provider_config_id (str): The primary key of the `LighthouseProviderConfiguration`
|
||||
to validate.
|
||||
|
||||
Returns:
|
||||
dict: A result dictionary with the following keys:
|
||||
- "connected" (bool): Whether the provider credentials are valid.
|
||||
- "error" (str | None): The error message when not connected, otherwise None.
|
||||
|
||||
Side Effects:
|
||||
- Updates and persists `is_active` on the `LighthouseProviderConfiguration`.
|
||||
|
||||
Raises:
|
||||
LighthouseProviderConfiguration.DoesNotExist: If no configuration exists with the given ID.
|
||||
"""
|
||||
provider_cfg = LighthouseProviderConfiguration.objects.get(pk=provider_config_id)
|
||||
|
||||
# TODO: Add support for other providers
|
||||
if (
|
||||
provider_cfg.provider_type
|
||||
!= LighthouseProviderConfiguration.LLMProviderChoices.OPENAI
|
||||
):
|
||||
return {"connected": False, "error": "Unsupported provider type"}
|
||||
|
||||
api_key = _extract_openai_api_key(provider_cfg)
|
||||
if not api_key:
|
||||
provider_cfg.is_active = False
|
||||
provider_cfg.save()
|
||||
return {"connected": False, "error": "API key is invalid or missing"}
|
||||
|
||||
try:
|
||||
client = openai.OpenAI(api_key=api_key)
|
||||
_ = client.models.list()
|
||||
provider_cfg.is_active = True
|
||||
provider_cfg.save()
|
||||
return {"connected": True, "error": None}
|
||||
except Exception as e:
|
||||
logger.warning("OpenAI connection check failed: %s", str(e))
|
||||
provider_cfg.is_active = False
|
||||
provider_cfg.save()
|
||||
return {"connected": False, "error": str(e)}
|
||||
|
||||
|
||||
def refresh_lighthouse_provider_models(provider_config_id: str) -> Dict:
|
||||
"""
|
||||
Refresh the catalog of models for a Lighthouse provider configuration.
|
||||
|
||||
For the OpenAI provider, this fetches the current list of models, upserts entries
|
||||
into `LighthouseProviderModels`, and deletes stale entries no longer returned by
|
||||
the provider.
|
||||
|
||||
Args:
|
||||
provider_config_id (str): The primary key of the `LighthouseProviderConfiguration`
|
||||
whose models should be refreshed.
|
||||
|
||||
Returns:
|
||||
dict: A result dictionary with the following keys on success:
|
||||
- "created" (int): Number of new model rows created.
|
||||
- "updated" (int): Number of existing model rows updated.
|
||||
- "deleted" (int): Number of stale model rows removed.
|
||||
If an error occurs, the dictionary will contain an "error" (str) field instead.
|
||||
|
||||
Raises:
|
||||
LighthouseProviderConfiguration.DoesNotExist: If no configuration exists with the given ID.
|
||||
"""
|
||||
provider_cfg = LighthouseProviderConfiguration.objects.get(pk=provider_config_id)
|
||||
|
||||
if (
|
||||
provider_cfg.provider_type
|
||||
!= LighthouseProviderConfiguration.LLMProviderChoices.OPENAI
|
||||
):
|
||||
return {
|
||||
"created": 0,
|
||||
"updated": 0,
|
||||
"deleted": 0,
|
||||
"error": "Unsupported provider type",
|
||||
}
|
||||
|
||||
api_key = _extract_openai_api_key(provider_cfg)
|
||||
if not api_key:
|
||||
return {
|
||||
"created": 0,
|
||||
"updated": 0,
|
||||
"deleted": 0,
|
||||
"error": "API key is invalid or missing",
|
||||
}
|
||||
|
||||
try:
|
||||
client = openai.OpenAI(api_key=api_key)
|
||||
models = client.models.list()
|
||||
fetched_ids: Set[str] = {m.id for m in getattr(models, "data", [])}
|
||||
except Exception as e: # noqa: BLE001
|
||||
logger.warning("OpenAI models refresh failed: %s", str(e))
|
||||
return {"created": 0, "updated": 0, "deleted": 0, "error": str(e)}
|
||||
|
||||
created = 0
|
||||
updated = 0
|
||||
|
||||
for model_id in fetched_ids:
|
||||
obj, was_created = LighthouseProviderModels.objects.update_or_create(
|
||||
tenant_id=provider_cfg.tenant_id,
|
||||
provider_configuration=provider_cfg,
|
||||
model_id=model_id,
|
||||
defaults={
|
||||
"model_name": model_id, # OpenAI doesn't return a separate display name
|
||||
"default_parameters": {},
|
||||
},
|
||||
)
|
||||
if was_created:
|
||||
created += 1
|
||||
else:
|
||||
updated += 1
|
||||
|
||||
# Delete stale models not present anymore
|
||||
deleted, _ = (
|
||||
LighthouseProviderModels.objects.filter(
|
||||
tenant_id=provider_cfg.tenant_id, provider_configuration=provider_cfg
|
||||
)
|
||||
.exclude(model_id__in=fetched_ids)
|
||||
.delete()
|
||||
)
|
||||
|
||||
return {"created": created, "updated": updated, "deleted": deleted}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
@@ -26,6 +27,11 @@ from tasks.jobs.integrations import (
|
||||
upload_s3_integration,
|
||||
upload_security_hub_integration,
|
||||
)
|
||||
from tasks.jobs.lighthouse_providers import (
|
||||
check_lighthouse_provider_connection,
|
||||
refresh_lighthouse_provider_models,
|
||||
)
|
||||
from tasks.jobs.report import generate_threatscore_report_job
|
||||
from tasks.jobs.scan import (
|
||||
aggregate_findings,
|
||||
create_compliance_requirements,
|
||||
@@ -64,10 +70,15 @@ def _perform_scan_complete_tasks(tenant_id: str, scan_id: str, provider_id: str)
|
||||
generate_outputs_task.si(
|
||||
scan_id=scan_id, provider_id=provider_id, tenant_id=tenant_id
|
||||
),
|
||||
check_integrations_task.si(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
scan_id=scan_id,
|
||||
group(
|
||||
generate_threatscore_report_task.si(
|
||||
tenant_id=tenant_id, scan_id=scan_id, provider_id=provider_id
|
||||
),
|
||||
check_integrations_task.si(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
scan_id=scan_id,
|
||||
),
|
||||
),
|
||||
).apply_async()
|
||||
|
||||
@@ -304,7 +315,7 @@ def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
|
||||
frameworks_bulk = Compliance.get_bulk(provider_type)
|
||||
frameworks_avail = get_compliance_frameworks(provider_type)
|
||||
out_dir, comp_dir = _generate_output_directory(
|
||||
out_dir, comp_dir, _ = _generate_output_directory(
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY, provider_uid, tenant_id, scan_id
|
||||
)
|
||||
|
||||
@@ -407,7 +418,24 @@ def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
writer._data.clear()
|
||||
|
||||
compressed = _compress_output_files(out_dir)
|
||||
upload_uri = _upload_to_s3(tenant_id, compressed, scan_id)
|
||||
|
||||
upload_uri = _upload_to_s3(
|
||||
tenant_id,
|
||||
scan_id,
|
||||
compressed,
|
||||
os.path.basename(compressed),
|
||||
)
|
||||
|
||||
compliance_dir_path = Path(comp_dir).parent
|
||||
if compliance_dir_path.exists():
|
||||
for artifact_path in sorted(compliance_dir_path.iterdir()):
|
||||
if artifact_path.is_file():
|
||||
_upload_to_s3(
|
||||
tenant_id,
|
||||
scan_id,
|
||||
str(artifact_path),
|
||||
f"compliance/{artifact_path.name}",
|
||||
)
|
||||
|
||||
# S3 integrations (need output_directory)
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
@@ -500,6 +528,24 @@ def check_lighthouse_connection_task(lighthouse_config_id: str, tenant_id: str =
|
||||
return check_lighthouse_connection(lighthouse_config_id=lighthouse_config_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="lighthouse-provider-connection-check")
|
||||
@set_tenant
|
||||
def check_lighthouse_provider_connection_task(
|
||||
provider_config_id: str, tenant_id: str | None = None
|
||||
) -> dict:
|
||||
"""Task wrapper to validate provider credentials and set is_active."""
|
||||
return check_lighthouse_provider_connection(provider_config_id=provider_config_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="lighthouse-provider-models-refresh")
|
||||
@set_tenant
|
||||
def refresh_lighthouse_provider_models_task(
|
||||
provider_config_id: str, tenant_id: str | None = None
|
||||
) -> dict:
|
||||
"""Task wrapper to refresh provider models catalog for the given configuration."""
|
||||
return refresh_lighthouse_provider_models(provider_config_id=provider_config_id)
|
||||
|
||||
|
||||
@shared_task(name="integration-check")
|
||||
def check_integrations_task(tenant_id: str, provider_id: str, scan_id: str = None):
|
||||
"""
|
||||
@@ -617,3 +663,21 @@ def jira_integration_task(
|
||||
return send_findings_to_jira(
|
||||
tenant_id, integration_id, project_key, issue_type, finding_ids
|
||||
)
|
||||
|
||||
|
||||
@shared_task(
|
||||
base=RLSTask,
|
||||
name="scan-threatscore-report",
|
||||
queue="scan-reports",
|
||||
)
|
||||
def generate_threatscore_report_task(tenant_id: str, scan_id: str, provider_id: str):
|
||||
"""
|
||||
Task to generate a threatscore report for a given scan.
|
||||
Args:
|
||||
tenant_id (str): The tenant identifier.
|
||||
scan_id (str): The scan identifier.
|
||||
provider_id (str): The provider identifier.
|
||||
"""
|
||||
return generate_threatscore_report_job(
|
||||
tenant_id=tenant_id, scan_id=scan_id, provider_id=provider_id
|
||||
)
|
||||
|
||||
@@ -72,17 +72,26 @@ class TestOutputs:
|
||||
client_mock = MagicMock()
|
||||
mock_get_client.return_value = client_mock
|
||||
|
||||
result = _upload_to_s3("tenant-id", str(zip_path), "scan-id")
|
||||
result = _upload_to_s3(
|
||||
"tenant-id",
|
||||
"scan-id",
|
||||
str(zip_path),
|
||||
"outputs.zip",
|
||||
)
|
||||
|
||||
expected_uri = "s3://test-bucket/tenant-id/scan-id/outputs.zip"
|
||||
assert result == expected_uri
|
||||
assert client_mock.upload_file.call_count == 2
|
||||
client_mock.upload_file.assert_called_once_with(
|
||||
Filename=str(zip_path),
|
||||
Bucket="test-bucket",
|
||||
Key="tenant-id/scan-id/outputs.zip",
|
||||
)
|
||||
|
||||
@patch("tasks.jobs.export.get_s3_client")
|
||||
@patch("tasks.jobs.export.base")
|
||||
def test_upload_to_s3_missing_bucket(self, mock_base, mock_get_client):
|
||||
mock_base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET = ""
|
||||
result = _upload_to_s3("tenant", "/tmp/fake.zip", "scan")
|
||||
result = _upload_to_s3("tenant", "scan", "/tmp/fake.zip", "fake.zip")
|
||||
assert result is None
|
||||
|
||||
@patch("tasks.jobs.export.get_s3_client")
|
||||
@@ -101,11 +110,15 @@ class TestOutputs:
|
||||
client_mock = MagicMock()
|
||||
mock_get_client.return_value = client_mock
|
||||
|
||||
result = _upload_to_s3("tenant", str(zip_path), "scan")
|
||||
result = _upload_to_s3(
|
||||
"tenant",
|
||||
"scan",
|
||||
str(compliance_dir / "subdir"),
|
||||
"compliance/subdir",
|
||||
)
|
||||
|
||||
expected_uri = "s3://test-bucket/tenant/scan/results.zip"
|
||||
assert result == expected_uri
|
||||
client_mock.upload_file.assert_called_once()
|
||||
assert result is None
|
||||
client_mock.upload_file.assert_not_called()
|
||||
|
||||
@patch(
|
||||
"tasks.jobs.export.get_s3_client",
|
||||
@@ -126,7 +139,12 @@ class TestOutputs:
|
||||
compliance_dir.mkdir()
|
||||
(compliance_dir / "report.csv").write_text("csv")
|
||||
|
||||
_upload_to_s3("tenant", str(zip_path), "scan")
|
||||
_upload_to_s3(
|
||||
"tenant",
|
||||
"scan",
|
||||
str(zip_path),
|
||||
"zipfile.zip",
|
||||
)
|
||||
mock_logger.assert_called()
|
||||
|
||||
@patch("tasks.jobs.export.rls_transaction")
|
||||
@@ -150,15 +168,17 @@ class TestOutputs:
|
||||
provider = "aws"
|
||||
expected_timestamp = "20230615103045"
|
||||
|
||||
path, compliance = _generate_output_directory(
|
||||
path, compliance, threatscore = _generate_output_directory(
|
||||
base_dir, provider, tenant_id, scan_id
|
||||
)
|
||||
|
||||
assert os.path.isdir(os.path.dirname(path))
|
||||
assert os.path.isdir(os.path.dirname(compliance))
|
||||
assert os.path.isdir(os.path.dirname(threatscore))
|
||||
|
||||
assert path.endswith(f"{provider}-{expected_timestamp}")
|
||||
assert compliance.endswith(f"{provider}-{expected_timestamp}")
|
||||
assert threatscore.endswith(f"{provider}-{expected_timestamp}")
|
||||
|
||||
@patch("tasks.jobs.export.rls_transaction")
|
||||
@patch("tasks.jobs.export.Scan")
|
||||
@@ -181,12 +201,14 @@ class TestOutputs:
|
||||
provider = "aws/test@check"
|
||||
expected_timestamp = "20230615103045"
|
||||
|
||||
path, compliance = _generate_output_directory(
|
||||
path, compliance, threatscore = _generate_output_directory(
|
||||
base_dir, provider, tenant_id, scan_id
|
||||
)
|
||||
|
||||
assert os.path.isdir(os.path.dirname(path))
|
||||
assert os.path.isdir(os.path.dirname(compliance))
|
||||
assert os.path.isdir(os.path.dirname(threatscore))
|
||||
|
||||
assert path.endswith(f"aws-test-check-{expected_timestamp}")
|
||||
assert compliance.endswith(f"aws-test-check-{expected_timestamp}")
|
||||
assert threatscore.endswith(f"aws-test-check-{expected_timestamp}")
|
||||
|
||||
@@ -0,0 +1,963 @@
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import matplotlib
|
||||
import pytest
|
||||
from tasks.jobs.report import (
|
||||
_aggregate_requirement_statistics_from_database,
|
||||
_calculate_requirements_data_from_statistics,
|
||||
_load_findings_for_requirement_checks,
|
||||
generate_threatscore_report,
|
||||
generate_threatscore_report_job,
|
||||
)
|
||||
from tasks.tasks import generate_threatscore_report_task
|
||||
|
||||
from api.models import Finding, StatusChoices
|
||||
from prowler.lib.check.models import Severity
|
||||
|
||||
matplotlib.use("Agg") # Use non-interactive backend for tests
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestGenerateThreatscoreReport:
|
||||
def setup_method(self):
|
||||
self.scan_id = str(uuid.uuid4())
|
||||
self.provider_id = str(uuid.uuid4())
|
||||
self.tenant_id = str(uuid.uuid4())
|
||||
|
||||
def test_no_findings_returns_early(self):
|
||||
with patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = False
|
||||
|
||||
result = generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": False}
|
||||
mock_filter.assert_called_once_with(scan_id=self.scan_id)
|
||||
|
||||
@patch("tasks.jobs.report.rmtree")
|
||||
@patch("tasks.jobs.report._upload_to_s3")
|
||||
@patch("tasks.jobs.report.generate_threatscore_report")
|
||||
@patch("tasks.jobs.report._generate_output_directory")
|
||||
@patch("tasks.jobs.report.Provider.objects.get")
|
||||
@patch("tasks.jobs.report.ScanSummary.objects.filter")
|
||||
def test_generate_threatscore_report_happy_path(
|
||||
self,
|
||||
mock_scan_summary_filter,
|
||||
mock_provider_get,
|
||||
mock_generate_output_directory,
|
||||
mock_generate_report,
|
||||
mock_upload,
|
||||
mock_rmtree,
|
||||
):
|
||||
mock_scan_summary_filter.return_value.exists.return_value = True
|
||||
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.uid = "provider-uid"
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
mock_generate_output_directory.return_value = (
|
||||
"/tmp/output",
|
||||
"/tmp/compressed",
|
||||
"/tmp/threatscore_path",
|
||||
)
|
||||
|
||||
mock_upload.return_value = "s3://bucket/threatscore_report.pdf"
|
||||
|
||||
result = generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": True}
|
||||
mock_generate_report.assert_called_once_with(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
compliance_id="prowler_threatscore_aws",
|
||||
output_path="/tmp/threatscore_path_threatscore_report.pdf",
|
||||
provider_id=self.provider_id,
|
||||
only_failed=True,
|
||||
min_risk_level=4,
|
||||
)
|
||||
mock_upload.assert_called_once_with(
|
||||
self.tenant_id,
|
||||
self.scan_id,
|
||||
"/tmp/threatscore_path_threatscore_report.pdf",
|
||||
"threatscore/threatscore_path_threatscore_report.pdf",
|
||||
)
|
||||
mock_rmtree.assert_called_once_with(
|
||||
Path("/tmp/threatscore_path_threatscore_report.pdf").parent,
|
||||
ignore_errors=True,
|
||||
)
|
||||
|
||||
def test_generate_threatscore_report_fails_upload(self):
|
||||
with (
|
||||
patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.jobs.report.Provider.objects.get") as mock_provider_get,
|
||||
patch("tasks.jobs.report._generate_output_directory") as mock_gen_dir,
|
||||
patch("tasks.jobs.report.generate_threatscore_report"),
|
||||
patch("tasks.jobs.report._upload_to_s3", return_value=None),
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
|
||||
# Mock provider
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.uid = "aws-provider-uid"
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
mock_gen_dir.return_value = (
|
||||
"/tmp/output",
|
||||
"/tmp/compressed",
|
||||
"/tmp/threatscore_path",
|
||||
)
|
||||
|
||||
result = generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": False}
|
||||
|
||||
def test_generate_threatscore_report_logs_rmtree_exception(self, caplog):
|
||||
with (
|
||||
patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.jobs.report.Provider.objects.get") as mock_provider_get,
|
||||
patch("tasks.jobs.report._generate_output_directory") as mock_gen_dir,
|
||||
patch("tasks.jobs.report.generate_threatscore_report"),
|
||||
patch(
|
||||
"tasks.jobs.report._upload_to_s3", return_value="s3://bucket/report.pdf"
|
||||
),
|
||||
patch(
|
||||
"tasks.jobs.report.rmtree", side_effect=Exception("Test deletion error")
|
||||
),
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
|
||||
# Mock provider
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.uid = "aws-provider-uid"
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
mock_gen_dir.return_value = (
|
||||
"/tmp/output",
|
||||
"/tmp/compressed",
|
||||
"/tmp/threatscore_path",
|
||||
)
|
||||
|
||||
with caplog.at_level("ERROR"):
|
||||
generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
assert "Error deleting output files" in caplog.text
|
||||
|
||||
def test_generate_threatscore_report_azure_provider(self):
|
||||
with (
|
||||
patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.jobs.report.Provider.objects.get") as mock_provider_get,
|
||||
patch("tasks.jobs.report._generate_output_directory") as mock_gen_dir,
|
||||
patch("tasks.jobs.report.generate_threatscore_report") as mock_generate,
|
||||
patch(
|
||||
"tasks.jobs.report._upload_to_s3", return_value="s3://bucket/report.pdf"
|
||||
),
|
||||
patch("tasks.jobs.report.rmtree"),
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.uid = "azure-provider-uid"
|
||||
mock_provider.provider = "azure"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
mock_gen_dir.return_value = (
|
||||
"/tmp/output",
|
||||
"/tmp/compressed",
|
||||
"/tmp/threatscore_path",
|
||||
)
|
||||
|
||||
generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
mock_generate.assert_called_once_with(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
compliance_id="prowler_threatscore_azure",
|
||||
output_path="/tmp/threatscore_path_threatscore_report.pdf",
|
||||
provider_id=self.provider_id,
|
||||
only_failed=True,
|
||||
min_risk_level=4,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestAggregateRequirementStatistics:
|
||||
"""Test suite for _aggregate_requirement_statistics_from_database function."""
|
||||
|
||||
def test_aggregates_findings_correctly(self, tenants_fixture, scans_fixture):
|
||||
"""Verify correct pass/total counts per check are aggregated from database."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
# Create findings with different check_ids and statuses
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-1",
|
||||
check_id="check_1",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.high,
|
||||
impact=Severity.high,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-2",
|
||||
check_id="check_1",
|
||||
status=StatusChoices.FAIL,
|
||||
severity=Severity.high,
|
||||
impact=Severity.high,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-3",
|
||||
check_id="check_2",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
result = _aggregate_requirement_statistics_from_database(
|
||||
str(tenant.id), str(scan.id)
|
||||
)
|
||||
|
||||
assert result == {
|
||||
"check_1": {"passed": 1, "total": 2},
|
||||
"check_2": {"passed": 1, "total": 1},
|
||||
}
|
||||
|
||||
def test_handles_empty_scan(self, tenants_fixture, scans_fixture):
|
||||
"""Return empty dict when no findings exist for the scan."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
result = _aggregate_requirement_statistics_from_database(
|
||||
str(tenant.id), str(scan.id)
|
||||
)
|
||||
|
||||
assert result == {}
|
||||
|
||||
def test_multiple_findings_same_check(self, tenants_fixture, scans_fixture):
|
||||
"""Aggregate multiple findings for same check_id correctly."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
# Create 5 findings for same check, 3 passed
|
||||
for i in range(3):
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid=f"finding-pass-{i}",
|
||||
check_id="check_same",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
for i in range(2):
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid=f"finding-fail-{i}",
|
||||
check_id="check_same",
|
||||
status=StatusChoices.FAIL,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
result = _aggregate_requirement_statistics_from_database(
|
||||
str(tenant.id), str(scan.id)
|
||||
)
|
||||
|
||||
assert result == {"check_same": {"passed": 3, "total": 5}}
|
||||
|
||||
def test_only_failed_findings(self, tenants_fixture, scans_fixture):
|
||||
"""Correctly count when all findings are FAIL status."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-fail-1",
|
||||
check_id="check_fail",
|
||||
status=StatusChoices.FAIL,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-fail-2",
|
||||
check_id="check_fail",
|
||||
status=StatusChoices.FAIL,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
result = _aggregate_requirement_statistics_from_database(
|
||||
str(tenant.id), str(scan.id)
|
||||
)
|
||||
|
||||
assert result == {"check_fail": {"passed": 0, "total": 2}}
|
||||
|
||||
def test_mixed_statuses(self, tenants_fixture, scans_fixture):
|
||||
"""Test with PASS, FAIL, and MANUAL statuses mixed."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-pass",
|
||||
check_id="check_mixed",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-fail",
|
||||
check_id="check_mixed",
|
||||
status=StatusChoices.FAIL,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-manual",
|
||||
check_id="check_mixed",
|
||||
status=StatusChoices.MANUAL,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
result = _aggregate_requirement_statistics_from_database(
|
||||
str(tenant.id), str(scan.id)
|
||||
)
|
||||
|
||||
# Only PASS status is counted as passed
|
||||
assert result == {"check_mixed": {"passed": 1, "total": 3}}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestLoadFindingsForChecks:
|
||||
"""Test suite for _load_findings_for_requirement_checks function."""
|
||||
|
||||
def test_loads_only_requested_checks(
|
||||
self, tenants_fixture, scans_fixture, providers_fixture
|
||||
):
|
||||
"""Verify only findings for specified check_ids are loaded."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
providers_fixture[0]
|
||||
|
||||
# Create findings with different check_ids
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-1",
|
||||
check_id="check_requested",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-2",
|
||||
check_id="check_not_requested",
|
||||
status=StatusChoices.FAIL,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
mock_provider = MagicMock()
|
||||
|
||||
with patch(
|
||||
"tasks.jobs.report.FindingOutput.transform_api_finding"
|
||||
) as mock_transform:
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.check_id = "check_requested"
|
||||
mock_transform.return_value = mock_finding_output
|
||||
|
||||
result = _load_findings_for_requirement_checks(
|
||||
str(tenant.id), str(scan.id), ["check_requested"], mock_provider
|
||||
)
|
||||
|
||||
# Only one finding should be loaded
|
||||
assert "check_requested" in result
|
||||
assert "check_not_requested" not in result
|
||||
assert len(result["check_requested"]) == 1
|
||||
assert mock_transform.call_count == 1
|
||||
|
||||
def test_empty_check_ids_returns_empty(
|
||||
self, tenants_fixture, scans_fixture, providers_fixture
|
||||
):
|
||||
"""Return empty dict when check_ids list is empty."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
mock_provider = MagicMock()
|
||||
|
||||
result = _load_findings_for_requirement_checks(
|
||||
str(tenant.id), str(scan.id), [], mock_provider
|
||||
)
|
||||
|
||||
assert result == {}
|
||||
|
||||
def test_groups_by_check_id(
|
||||
self, tenants_fixture, scans_fixture, providers_fixture
|
||||
):
|
||||
"""Multiple findings for same check are grouped correctly."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
# Create multiple findings for same check
|
||||
for i in range(3):
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid=f"finding-{i}",
|
||||
check_id="check_group",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
mock_provider = MagicMock()
|
||||
|
||||
with patch(
|
||||
"tasks.jobs.report.FindingOutput.transform_api_finding"
|
||||
) as mock_transform:
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.check_id = "check_group"
|
||||
mock_transform.return_value = mock_finding_output
|
||||
|
||||
result = _load_findings_for_requirement_checks(
|
||||
str(tenant.id), str(scan.id), ["check_group"], mock_provider
|
||||
)
|
||||
|
||||
assert len(result["check_group"]) == 3
|
||||
|
||||
def test_transforms_to_finding_output(
|
||||
self, tenants_fixture, scans_fixture, providers_fixture
|
||||
):
|
||||
"""Findings are transformed using FindingOutput.transform_api_finding."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-transform",
|
||||
check_id="check_transform",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
mock_provider = MagicMock()
|
||||
|
||||
with patch(
|
||||
"tasks.jobs.report.FindingOutput.transform_api_finding"
|
||||
) as mock_transform:
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.check_id = "check_transform"
|
||||
mock_transform.return_value = mock_finding_output
|
||||
|
||||
result = _load_findings_for_requirement_checks(
|
||||
str(tenant.id), str(scan.id), ["check_transform"], mock_provider
|
||||
)
|
||||
|
||||
# Verify transform was called
|
||||
mock_transform.assert_called_once()
|
||||
# Verify the transformed output is in the result
|
||||
assert result["check_transform"][0] == mock_finding_output
|
||||
|
||||
def test_batched_iteration(self, tenants_fixture, scans_fixture, providers_fixture):
|
||||
"""Works correctly with multiple batches of findings."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
# Create enough findings to ensure batching (assuming batch size > 1)
|
||||
for i in range(10):
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid=f"finding-batch-{i}",
|
||||
check_id="check_batch",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
mock_provider = MagicMock()
|
||||
|
||||
with patch(
|
||||
"tasks.jobs.report.FindingOutput.transform_api_finding"
|
||||
) as mock_transform:
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.check_id = "check_batch"
|
||||
mock_transform.return_value = mock_finding_output
|
||||
|
||||
result = _load_findings_for_requirement_checks(
|
||||
str(tenant.id), str(scan.id), ["check_batch"], mock_provider
|
||||
)
|
||||
|
||||
# All 10 findings should be loaded regardless of batching
|
||||
assert len(result["check_batch"]) == 10
|
||||
assert mock_transform.call_count == 10
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestCalculateRequirementsData:
|
||||
"""Test suite for _calculate_requirements_data_from_statistics function."""
|
||||
|
||||
def test_requirement_status_all_pass(self):
|
||||
"""Status is PASS when all findings for requirement checks pass."""
|
||||
mock_compliance = MagicMock()
|
||||
mock_compliance.Framework = "TestFramework"
|
||||
mock_compliance.Version = "1.0"
|
||||
|
||||
mock_requirement = MagicMock()
|
||||
mock_requirement.Id = "req_1"
|
||||
mock_requirement.Description = "Test requirement"
|
||||
mock_requirement.Checks = ["check_1", "check_2"]
|
||||
mock_requirement.Attributes = [MagicMock()]
|
||||
|
||||
mock_compliance.Requirements = [mock_requirement]
|
||||
|
||||
requirement_statistics = {
|
||||
"check_1": {"passed": 5, "total": 5},
|
||||
"check_2": {"passed": 3, "total": 3},
|
||||
}
|
||||
|
||||
attributes_by_id, requirements_list = (
|
||||
_calculate_requirements_data_from_statistics(
|
||||
mock_compliance, requirement_statistics
|
||||
)
|
||||
)
|
||||
|
||||
assert len(requirements_list) == 1
|
||||
assert requirements_list[0]["attributes"]["status"] == StatusChoices.PASS
|
||||
assert requirements_list[0]["attributes"]["passed_findings"] == 8
|
||||
assert requirements_list[0]["attributes"]["total_findings"] == 8
|
||||
|
||||
def test_requirement_status_some_fail(self):
|
||||
"""Status is FAIL when some findings fail."""
|
||||
mock_compliance = MagicMock()
|
||||
mock_compliance.Framework = "TestFramework"
|
||||
mock_compliance.Version = "1.0"
|
||||
|
||||
mock_requirement = MagicMock()
|
||||
mock_requirement.Id = "req_2"
|
||||
mock_requirement.Description = "Test requirement with failures"
|
||||
mock_requirement.Checks = ["check_3"]
|
||||
mock_requirement.Attributes = [MagicMock()]
|
||||
|
||||
mock_compliance.Requirements = [mock_requirement]
|
||||
|
||||
requirement_statistics = {
|
||||
"check_3": {"passed": 2, "total": 5},
|
||||
}
|
||||
|
||||
attributes_by_id, requirements_list = (
|
||||
_calculate_requirements_data_from_statistics(
|
||||
mock_compliance, requirement_statistics
|
||||
)
|
||||
)
|
||||
|
||||
assert len(requirements_list) == 1
|
||||
assert requirements_list[0]["attributes"]["status"] == StatusChoices.FAIL
|
||||
assert requirements_list[0]["attributes"]["passed_findings"] == 2
|
||||
assert requirements_list[0]["attributes"]["total_findings"] == 5
|
||||
|
||||
def test_requirement_status_no_findings(self):
|
||||
"""Status is MANUAL when no findings exist for requirement."""
|
||||
mock_compliance = MagicMock()
|
||||
mock_compliance.Framework = "TestFramework"
|
||||
mock_compliance.Version = "1.0"
|
||||
|
||||
mock_requirement = MagicMock()
|
||||
mock_requirement.Id = "req_3"
|
||||
mock_requirement.Description = "Manual requirement"
|
||||
mock_requirement.Checks = ["check_nonexistent"]
|
||||
mock_requirement.Attributes = [MagicMock()]
|
||||
|
||||
mock_compliance.Requirements = [mock_requirement]
|
||||
|
||||
requirement_statistics = {}
|
||||
|
||||
attributes_by_id, requirements_list = (
|
||||
_calculate_requirements_data_from_statistics(
|
||||
mock_compliance, requirement_statistics
|
||||
)
|
||||
)
|
||||
|
||||
assert len(requirements_list) == 1
|
||||
assert requirements_list[0]["attributes"]["status"] == StatusChoices.MANUAL
|
||||
assert requirements_list[0]["attributes"]["passed_findings"] == 0
|
||||
assert requirements_list[0]["attributes"]["total_findings"] == 0
|
||||
|
||||
def test_aggregates_multiple_checks(self):
|
||||
"""Correctly sum stats across multiple checks in requirement."""
|
||||
mock_compliance = MagicMock()
|
||||
mock_compliance.Framework = "TestFramework"
|
||||
mock_compliance.Version = "1.0"
|
||||
|
||||
mock_requirement = MagicMock()
|
||||
mock_requirement.Id = "req_4"
|
||||
mock_requirement.Description = "Multi-check requirement"
|
||||
mock_requirement.Checks = ["check_a", "check_b", "check_c"]
|
||||
mock_requirement.Attributes = [MagicMock()]
|
||||
|
||||
mock_compliance.Requirements = [mock_requirement]
|
||||
|
||||
requirement_statistics = {
|
||||
"check_a": {"passed": 10, "total": 15},
|
||||
"check_b": {"passed": 5, "total": 10},
|
||||
"check_c": {"passed": 0, "total": 5},
|
||||
}
|
||||
|
||||
attributes_by_id, requirements_list = (
|
||||
_calculate_requirements_data_from_statistics(
|
||||
mock_compliance, requirement_statistics
|
||||
)
|
||||
)
|
||||
|
||||
assert len(requirements_list) == 1
|
||||
# 10 + 5 + 0 = 15 passed
|
||||
assert requirements_list[0]["attributes"]["passed_findings"] == 15
|
||||
# 15 + 10 + 5 = 30 total
|
||||
assert requirements_list[0]["attributes"]["total_findings"] == 30
|
||||
# Not all passed, so should be FAIL
|
||||
assert requirements_list[0]["attributes"]["status"] == StatusChoices.FAIL
|
||||
|
||||
def test_returns_correct_structure(self):
|
||||
"""Verify tuple structure and dict keys are correct."""
|
||||
mock_compliance = MagicMock()
|
||||
mock_compliance.Framework = "TestFramework"
|
||||
mock_compliance.Version = "1.0"
|
||||
|
||||
mock_attribute = MagicMock()
|
||||
mock_requirement = MagicMock()
|
||||
mock_requirement.Id = "req_5"
|
||||
mock_requirement.Description = "Structure test"
|
||||
mock_requirement.Checks = ["check_struct"]
|
||||
mock_requirement.Attributes = [mock_attribute]
|
||||
|
||||
mock_compliance.Requirements = [mock_requirement]
|
||||
|
||||
requirement_statistics = {"check_struct": {"passed": 1, "total": 1}}
|
||||
|
||||
attributes_by_id, requirements_list = (
|
||||
_calculate_requirements_data_from_statistics(
|
||||
mock_compliance, requirement_statistics
|
||||
)
|
||||
)
|
||||
|
||||
# Verify attributes_by_id structure
|
||||
assert "req_5" in attributes_by_id
|
||||
assert "attributes" in attributes_by_id["req_5"]
|
||||
assert "description" in attributes_by_id["req_5"]
|
||||
assert "req_attributes" in attributes_by_id["req_5"]["attributes"]
|
||||
assert "checks" in attributes_by_id["req_5"]["attributes"]
|
||||
|
||||
# Verify requirements_list structure
|
||||
assert len(requirements_list) == 1
|
||||
req = requirements_list[0]
|
||||
assert "id" in req
|
||||
assert "attributes" in req
|
||||
assert "framework" in req["attributes"]
|
||||
assert "version" in req["attributes"]
|
||||
assert "status" in req["attributes"]
|
||||
assert "description" in req["attributes"]
|
||||
assert "passed_findings" in req["attributes"]
|
||||
assert "total_findings" in req["attributes"]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestGenerateThreatscoreReportFunction:
|
||||
def setup_method(self):
|
||||
self.scan_id = str(uuid.uuid4())
|
||||
self.provider_id = str(uuid.uuid4())
|
||||
self.tenant_id = str(uuid.uuid4())
|
||||
self.compliance_id = "prowler_threatscore_aws"
|
||||
self.output_path = "/tmp/test_threatscore_report.pdf"
|
||||
|
||||
@patch("tasks.jobs.report.initialize_prowler_provider")
|
||||
@patch("tasks.jobs.report.Provider.objects.get")
|
||||
@patch("tasks.jobs.report.Compliance.get_bulk")
|
||||
@patch("tasks.jobs.report._aggregate_requirement_statistics_from_database")
|
||||
@patch("tasks.jobs.report._calculate_requirements_data_from_statistics")
|
||||
@patch("tasks.jobs.report._load_findings_for_requirement_checks")
|
||||
@patch("tasks.jobs.report.SimpleDocTemplate")
|
||||
@patch("tasks.jobs.report.Image")
|
||||
@patch("tasks.jobs.report.Spacer")
|
||||
@patch("tasks.jobs.report.Paragraph")
|
||||
@patch("tasks.jobs.report.PageBreak")
|
||||
@patch("tasks.jobs.report.Table")
|
||||
@patch("tasks.jobs.report.TableStyle")
|
||||
@patch("tasks.jobs.report.plt.subplots")
|
||||
@patch("tasks.jobs.report.plt.savefig")
|
||||
@patch("tasks.jobs.report.io.BytesIO")
|
||||
def test_generate_threatscore_report_success(
|
||||
self,
|
||||
mock_bytesio,
|
||||
mock_savefig,
|
||||
mock_subplots,
|
||||
mock_table_style,
|
||||
mock_table,
|
||||
mock_page_break,
|
||||
mock_paragraph,
|
||||
mock_spacer,
|
||||
mock_image,
|
||||
mock_doc_template,
|
||||
mock_load_findings,
|
||||
mock_calculate_requirements,
|
||||
mock_aggregate_statistics,
|
||||
mock_compliance_get_bulk,
|
||||
mock_provider_get,
|
||||
mock_initialize_provider,
|
||||
):
|
||||
"""Test the updated generate_threatscore_report using new memory-efficient architecture."""
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
prowler_provider = MagicMock()
|
||||
mock_initialize_provider.return_value = prowler_provider
|
||||
|
||||
# Mock compliance object with requirements
|
||||
mock_compliance_obj = MagicMock()
|
||||
mock_compliance_obj.Framework = "ProwlerThreatScore"
|
||||
mock_compliance_obj.Version = "1.0"
|
||||
mock_compliance_obj.Description = "Test Description"
|
||||
|
||||
# Configure requirement with properly set numeric attributes for chart generation
|
||||
mock_requirement = MagicMock()
|
||||
mock_requirement.Id = "req_1"
|
||||
mock_requirement.Description = "Test requirement"
|
||||
mock_requirement.Checks = ["check_1"]
|
||||
|
||||
# Create a properly configured attribute mock with numeric values
|
||||
mock_requirement_attr = MagicMock()
|
||||
mock_requirement_attr.Section = "1. IAM"
|
||||
mock_requirement_attr.SubSection = "1.1 Identity"
|
||||
mock_requirement_attr.Title = "Test Requirement Title"
|
||||
mock_requirement_attr.LevelOfRisk = 3
|
||||
mock_requirement_attr.Weight = 100
|
||||
mock_requirement_attr.AttributeDescription = "Test requirement description"
|
||||
mock_requirement_attr.AdditionalInformation = "Additional test information"
|
||||
|
||||
mock_requirement.Attributes = [mock_requirement_attr]
|
||||
mock_compliance_obj.Requirements = [mock_requirement]
|
||||
|
||||
mock_compliance_get_bulk.return_value = {
|
||||
self.compliance_id: mock_compliance_obj
|
||||
}
|
||||
|
||||
# Mock the aggregated statistics from database
|
||||
mock_aggregate_statistics.return_value = {"check_1": {"passed": 5, "total": 10}}
|
||||
|
||||
# Mock the calculated requirements data with properly configured attributes
|
||||
mock_attributes_by_id = {
|
||||
"req_1": {
|
||||
"attributes": {
|
||||
"req_attributes": [mock_requirement_attr],
|
||||
"checks": ["check_1"],
|
||||
},
|
||||
"description": "Test requirement",
|
||||
}
|
||||
}
|
||||
mock_requirements_list = [
|
||||
{
|
||||
"id": "req_1",
|
||||
"attributes": {
|
||||
"framework": "ProwlerThreatScore",
|
||||
"version": "1.0",
|
||||
"status": StatusChoices.FAIL,
|
||||
"description": "Test requirement",
|
||||
"passed_findings": 5,
|
||||
"total_findings": 10,
|
||||
},
|
||||
}
|
||||
]
|
||||
mock_calculate_requirements.return_value = (
|
||||
mock_attributes_by_id,
|
||||
mock_requirements_list,
|
||||
)
|
||||
|
||||
# Mock the on-demand loaded findings
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.check_id = "check_1"
|
||||
mock_finding_output.status = "FAIL"
|
||||
mock_finding_output.metadata = MagicMock()
|
||||
mock_finding_output.metadata.CheckTitle = "Test Check"
|
||||
mock_finding_output.metadata.Severity = "HIGH"
|
||||
mock_finding_output.resource_name = "test-resource"
|
||||
mock_finding_output.region = "us-east-1"
|
||||
|
||||
mock_load_findings.return_value = {"check_1": [mock_finding_output]}
|
||||
|
||||
# Mock PDF generation components
|
||||
mock_doc = MagicMock()
|
||||
mock_doc_template.return_value = mock_doc
|
||||
|
||||
mock_fig, mock_ax = MagicMock(), MagicMock()
|
||||
mock_subplots.return_value = (mock_fig, mock_ax)
|
||||
mock_buffer = MagicMock()
|
||||
mock_bytesio.return_value = mock_buffer
|
||||
|
||||
mock_image.return_value = MagicMock()
|
||||
mock_spacer.return_value = MagicMock()
|
||||
mock_paragraph.return_value = MagicMock()
|
||||
mock_page_break.return_value = MagicMock()
|
||||
mock_table.return_value = MagicMock()
|
||||
mock_table_style.return_value = MagicMock()
|
||||
|
||||
# Execute the function
|
||||
generate_threatscore_report(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
compliance_id=self.compliance_id,
|
||||
output_path=self.output_path,
|
||||
provider_id=self.provider_id,
|
||||
only_failed=True,
|
||||
min_risk_level=4,
|
||||
)
|
||||
|
||||
# Verify the new workflow was followed
|
||||
mock_provider_get.assert_called_once_with(id=self.provider_id)
|
||||
mock_initialize_provider.assert_called_once_with(mock_provider)
|
||||
mock_compliance_get_bulk.assert_called_once_with("aws")
|
||||
|
||||
# Verify the new functions were called in correct order with correct parameters
|
||||
mock_aggregate_statistics.assert_called_once_with(self.tenant_id, self.scan_id)
|
||||
mock_calculate_requirements.assert_called_once_with(
|
||||
mock_compliance_obj, {"check_1": {"passed": 5, "total": 10}}
|
||||
)
|
||||
mock_load_findings.assert_called_once_with(
|
||||
self.tenant_id, self.scan_id, ["check_1"], prowler_provider
|
||||
)
|
||||
|
||||
# Verify PDF was built
|
||||
mock_doc_template.assert_called_once()
|
||||
mock_doc.build.assert_called_once()
|
||||
|
||||
@patch("tasks.jobs.report.initialize_prowler_provider")
|
||||
@patch("tasks.jobs.report.Provider.objects.get")
|
||||
@patch("tasks.jobs.report.Compliance.get_bulk")
|
||||
@patch("tasks.jobs.report.Finding.all_objects.filter")
|
||||
def test_generate_threatscore_report_exception_handling(
|
||||
self,
|
||||
mock_finding_filter,
|
||||
mock_compliance_get_bulk,
|
||||
mock_provider_get,
|
||||
mock_initialize_provider,
|
||||
):
|
||||
mock_provider_get.side_effect = Exception("Provider not found")
|
||||
|
||||
with pytest.raises(Exception, match="Provider not found"):
|
||||
generate_threatscore_report(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
compliance_id=self.compliance_id,
|
||||
output_path=self.output_path,
|
||||
provider_id=self.provider_id,
|
||||
only_failed=True,
|
||||
min_risk_level=4,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestGenerateThreatscoreReportTask:
|
||||
def setup_method(self):
|
||||
self.scan_id = str(uuid.uuid4())
|
||||
self.provider_id = str(uuid.uuid4())
|
||||
self.tenant_id = str(uuid.uuid4())
|
||||
|
||||
@patch("tasks.tasks.generate_threatscore_report_job")
|
||||
def test_generate_threatscore_report_task_calls_job(self, mock_generate_job):
|
||||
mock_generate_job.return_value = {"upload": True}
|
||||
|
||||
result = generate_threatscore_report_task(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": True}
|
||||
mock_generate_job.assert_called_once_with(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
@patch("tasks.tasks.generate_threatscore_report_job")
|
||||
def test_generate_threatscore_report_task_handles_job_exception(
|
||||
self, mock_generate_job
|
||||
):
|
||||
mock_generate_job.side_effect = Exception("Job failed")
|
||||
|
||||
with pytest.raises(Exception, match="Job failed"):
|
||||
generate_threatscore_report_task(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
@@ -98,7 +98,11 @@ class TestGenerateOutputs:
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("out-dir", "comp-dir"),
|
||||
return_value=(
|
||||
"/tmp/test/out-dir",
|
||||
"/tmp/test/comp-dir",
|
||||
"/tmp/test/threat-dir",
|
||||
),
|
||||
),
|
||||
patch("tasks.tasks.Scan.all_objects.filter") as mock_scan_update,
|
||||
patch("tasks.tasks.rmtree"),
|
||||
@@ -126,7 +130,8 @@ class TestGenerateOutputs:
|
||||
patch("tasks.tasks.get_compliance_frameworks"),
|
||||
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("/tmp/test/out", "/tmp/test/comp", "/tmp/test/threat"),
|
||||
),
|
||||
patch("tasks.tasks.FindingOutput._transform_findings_stats"),
|
||||
patch("tasks.tasks.FindingOutput.transform_api_finding"),
|
||||
@@ -168,15 +173,35 @@ class TestGenerateOutputs:
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.compliance = {"cis": ["requirement-1", "requirement-2"]}
|
||||
|
||||
html_writer_mock = MagicMock()
|
||||
html_writer_mock._data = []
|
||||
html_writer_mock.close_file = False
|
||||
html_writer_mock.transform = MagicMock()
|
||||
html_writer_mock.batch_write_data_to_file = MagicMock()
|
||||
|
||||
compliance_writer_mock = MagicMock()
|
||||
compliance_writer_mock._data = []
|
||||
compliance_writer_mock.close_file = False
|
||||
compliance_writer_mock.transform = MagicMock()
|
||||
compliance_writer_mock.batch_write_data_to_file = MagicMock()
|
||||
|
||||
# Create a mock class that returns our mock instance when called
|
||||
mock_compliance_class = MagicMock(return_value=compliance_writer_mock)
|
||||
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider.uid = "test-provider-uid"
|
||||
|
||||
with (
|
||||
patch("tasks.tasks.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.tasks.Provider.objects.get"),
|
||||
patch("tasks.tasks.Provider.objects.get", return_value=mock_provider),
|
||||
patch("tasks.tasks.initialize_prowler_provider"),
|
||||
patch("tasks.tasks.Compliance.get_bulk", return_value={"cis": MagicMock()}),
|
||||
patch("tasks.tasks.get_compliance_frameworks", return_value=["cis"]),
|
||||
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("/tmp/test/out", "/tmp/test/comp", "/tmp/test/threat"),
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.FindingOutput._transform_findings_stats",
|
||||
@@ -190,6 +215,20 @@ class TestGenerateOutputs:
|
||||
patch("tasks.tasks._upload_to_s3", return_value="s3://bucket/f.zip"),
|
||||
patch("tasks.tasks.Scan.all_objects.filter"),
|
||||
patch("tasks.tasks.rmtree"),
|
||||
patch(
|
||||
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
|
||||
{
|
||||
"html": {
|
||||
"class": lambda *args, **kwargs: html_writer_mock,
|
||||
"suffix": ".html",
|
||||
"kwargs": {},
|
||||
}
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.COMPLIANCE_CLASS_MAP",
|
||||
{"aws": [(lambda x: True, mock_compliance_class)]},
|
||||
),
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_findings.return_value.order_by.return_value.iterator.return_value = [
|
||||
@@ -197,29 +236,12 @@ class TestGenerateOutputs:
|
||||
True,
|
||||
]
|
||||
|
||||
html_writer_mock = MagicMock()
|
||||
with (
|
||||
patch(
|
||||
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
|
||||
{
|
||||
"html": {
|
||||
"class": lambda *args, **kwargs: html_writer_mock,
|
||||
"suffix": ".html",
|
||||
"kwargs": {},
|
||||
}
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.COMPLIANCE_CLASS_MAP",
|
||||
{"aws": [(lambda x: True, MagicMock())]},
|
||||
),
|
||||
):
|
||||
generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
html_writer_mock.batch_write_data_to_file.assert_called_once()
|
||||
generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
html_writer_mock.batch_write_data_to_file.assert_called_once()
|
||||
|
||||
def test_transform_called_only_on_second_batch(self):
|
||||
raw1 = MagicMock()
|
||||
@@ -256,7 +278,11 @@ class TestGenerateOutputs:
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("outdir", "compdir"),
|
||||
return_value=(
|
||||
"/tmp/test/outdir",
|
||||
"/tmp/test/compdir",
|
||||
"/tmp/test/threatdir",
|
||||
),
|
||||
),
|
||||
patch("tasks.tasks._compress_output_files", return_value="outdir.zip"),
|
||||
patch("tasks.tasks._upload_to_s3", return_value="s3://bucket/outdir.zip"),
|
||||
@@ -303,12 +329,14 @@ class TestGenerateOutputs:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.transform_calls = []
|
||||
self._data = []
|
||||
self.close_file = False
|
||||
writer_instances.append(self)
|
||||
|
||||
def transform(self, fos, comp_obj, name):
|
||||
self.transform_calls.append((fos, comp_obj, name))
|
||||
|
||||
def batch_write_data_to_file(self):
|
||||
# Mock implementation - do nothing
|
||||
pass
|
||||
|
||||
two_batches = [
|
||||
@@ -329,7 +357,11 @@ class TestGenerateOutputs:
|
||||
patch("tasks.tasks.get_compliance_frameworks", return_value=["cis"]),
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("outdir", "compdir"),
|
||||
return_value=(
|
||||
"/tmp/test/outdir",
|
||||
"/tmp/test/compdir",
|
||||
"/tmp/test/threatdir",
|
||||
),
|
||||
),
|
||||
patch("tasks.tasks.FindingOutput._transform_findings_stats"),
|
||||
patch(
|
||||
@@ -368,15 +400,35 @@ class TestGenerateOutputs:
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.compliance = {"cis": ["requirement-1", "requirement-2"]}
|
||||
|
||||
json_writer_mock = MagicMock()
|
||||
json_writer_mock._data = []
|
||||
json_writer_mock.close_file = False
|
||||
json_writer_mock.transform = MagicMock()
|
||||
json_writer_mock.batch_write_data_to_file = MagicMock()
|
||||
|
||||
compliance_writer_mock = MagicMock()
|
||||
compliance_writer_mock._data = []
|
||||
compliance_writer_mock.close_file = False
|
||||
compliance_writer_mock.transform = MagicMock()
|
||||
compliance_writer_mock.batch_write_data_to_file = MagicMock()
|
||||
|
||||
# Create a mock class that returns our mock instance when called
|
||||
mock_compliance_class = MagicMock(return_value=compliance_writer_mock)
|
||||
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider.uid = "test-provider-uid"
|
||||
|
||||
with (
|
||||
patch("tasks.tasks.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.tasks.Provider.objects.get"),
|
||||
patch("tasks.tasks.Provider.objects.get", return_value=mock_provider),
|
||||
patch("tasks.tasks.initialize_prowler_provider"),
|
||||
patch("tasks.tasks.Compliance.get_bulk", return_value={"cis": MagicMock()}),
|
||||
patch("tasks.tasks.get_compliance_frameworks", return_value=["cis"]),
|
||||
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("/tmp/test/out", "/tmp/test/comp", "/tmp/test/threat"),
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.FindingOutput._transform_findings_stats",
|
||||
@@ -390,6 +442,20 @@ class TestGenerateOutputs:
|
||||
patch("tasks.tasks._upload_to_s3", return_value="s3://bucket/file.zip"),
|
||||
patch("tasks.tasks.Scan.all_objects.filter"),
|
||||
patch("tasks.tasks.rmtree", side_effect=Exception("Test deletion error")),
|
||||
patch(
|
||||
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
|
||||
{
|
||||
"json": {
|
||||
"class": lambda *args, **kwargs: json_writer_mock,
|
||||
"suffix": ".json",
|
||||
"kwargs": {},
|
||||
}
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.COMPLIANCE_CLASS_MAP",
|
||||
{"aws": [(lambda x: True, mock_compliance_class)]},
|
||||
),
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_findings.return_value.order_by.return_value.iterator.return_value = [
|
||||
@@ -397,29 +463,13 @@ class TestGenerateOutputs:
|
||||
True,
|
||||
]
|
||||
|
||||
with (
|
||||
patch(
|
||||
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
|
||||
{
|
||||
"json": {
|
||||
"class": lambda *args, **kwargs: MagicMock(),
|
||||
"suffix": ".json",
|
||||
"kwargs": {},
|
||||
}
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.COMPLIANCE_CLASS_MAP",
|
||||
{"aws": [(lambda x: True, MagicMock())]},
|
||||
),
|
||||
):
|
||||
with caplog.at_level("ERROR"):
|
||||
generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
assert "Error deleting output files" in caplog.text
|
||||
with caplog.at_level("ERROR"):
|
||||
generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
assert "Error deleting output files" in caplog.text
|
||||
|
||||
@patch("tasks.tasks.rls_transaction")
|
||||
@patch("tasks.tasks.Integration.objects.filter")
|
||||
@@ -435,7 +485,8 @@ class TestGenerateOutputs:
|
||||
patch("tasks.tasks.get_compliance_frameworks", return_value=[]),
|
||||
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("/tmp/test/out", "/tmp/test/comp", "/tmp/test/threat"),
|
||||
),
|
||||
patch("tasks.tasks.FindingOutput._transform_findings_stats"),
|
||||
patch("tasks.tasks.FindingOutput.transform_api_finding"),
|
||||
@@ -476,8 +527,15 @@ class TestScanCompleteTasks:
|
||||
@patch("tasks.tasks.create_compliance_requirements_task.apply_async")
|
||||
@patch("tasks.tasks.perform_scan_summary_task.si")
|
||||
@patch("tasks.tasks.generate_outputs_task.si")
|
||||
@patch("tasks.tasks.generate_threatscore_report_task.si")
|
||||
@patch("tasks.tasks.check_integrations_task.si")
|
||||
def test_scan_complete_tasks(
|
||||
self, mock_outputs_task, mock_scan_summary_task, mock_compliance_tasks
|
||||
self,
|
||||
mock_check_integrations_task,
|
||||
mock_threatscore_task,
|
||||
mock_outputs_task,
|
||||
mock_scan_summary_task,
|
||||
mock_compliance_tasks,
|
||||
):
|
||||
_perform_scan_complete_tasks("tenant-id", "scan-id", "provider-id")
|
||||
mock_compliance_tasks.assert_called_once_with(
|
||||
@@ -492,6 +550,16 @@ class TestScanCompleteTasks:
|
||||
provider_id="provider-id",
|
||||
tenant_id="tenant-id",
|
||||
)
|
||||
mock_threatscore_task.assert_called_once_with(
|
||||
tenant_id="tenant-id",
|
||||
scan_id="scan-id",
|
||||
provider_id="provider-id",
|
||||
)
|
||||
mock_check_integrations_task.assert_called_once_with(
|
||||
tenant_id="tenant-id",
|
||||
provider_id="provider-id",
|
||||
scan_id="scan-id",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -662,7 +730,7 @@ class TestCheckIntegrationsTask:
|
||||
mock_initialize_provider.return_value = MagicMock()
|
||||
mock_compliance_bulk.return_value = {}
|
||||
mock_get_frameworks.return_value = []
|
||||
mock_generate_dir.return_value = ("out-dir", "comp-dir")
|
||||
mock_generate_dir.return_value = ("out-dir", "comp-dir", "threat-dir")
|
||||
mock_transform_stats.return_value = {"stats": "data"}
|
||||
|
||||
# Mock findings
|
||||
@@ -787,7 +855,7 @@ class TestCheckIntegrationsTask:
|
||||
mock_initialize_provider.return_value = MagicMock()
|
||||
mock_compliance_bulk.return_value = {}
|
||||
mock_get_frameworks.return_value = []
|
||||
mock_generate_dir.return_value = ("out-dir", "comp-dir")
|
||||
mock_generate_dir.return_value = ("out-dir", "comp-dir", "threat-dir")
|
||||
mock_transform_stats.return_value = {"stats": "data"}
|
||||
|
||||
# Mock findings
|
||||
@@ -903,7 +971,7 @@ class TestCheckIntegrationsTask:
|
||||
mock_initialize_provider.return_value = MagicMock()
|
||||
mock_compliance_bulk.return_value = {}
|
||||
mock_get_frameworks.return_value = []
|
||||
mock_generate_dir.return_value = ("out-dir", "comp-dir")
|
||||
mock_generate_dir.return_value = ("out-dir", "comp-dir", "threat-dir")
|
||||
mock_transform_stats.return_value = {"stats": "data"}
|
||||
|
||||
# Mock findings
|
||||
|
||||
+3377
-95
File diff suppressed because it is too large
Load Diff
@@ -5,8 +5,7 @@ title: 'Prowler Services'
|
||||
Here you can find how to create a new service, or to complement an existing one, for a [Prowler Provider](/developer-guide/provider).
|
||||
|
||||
<Note>
|
||||
First ensure that the provider you want to add the service is already created. It can be checked [here](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers). If the provider is not present, please refer to the [Provider](/developer-guide/provider) documentation to create it from scratch.
|
||||
|
||||
First ensure that the provider you want to add the service is already created. It can be checked [here](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers). If the provider is not present, please refer to the [Provider](./provider.md) documentation to create it from scratch.
|
||||
</Note>
|
||||
## Introduction
|
||||
|
||||
@@ -201,11 +200,11 @@ class <Item>(BaseModel):
|
||||
|
||||
#### Service Attributes
|
||||
|
||||
*Optimized Data Storage with Python Dictionaries*
|
||||
_Optimized Data Storage with Python Dictionaries_
|
||||
|
||||
Each group of resources within a service should be structured as a Python [dictionary](https://docs.python.org/3/tutorial/datastructures.html#dictionaries) to enable efficient lookups. The dictionary lookup operation has [O(1) complexity](https://en.wikipedia.org/wiki/Big_O_notation#Orders_of_common_functions), and lookups are constantly executed.
|
||||
|
||||
*Assigning Unique Identifiers*
|
||||
_Assigning Unique Identifiers_
|
||||
|
||||
Each dictionary key must be a unique ID to identify the resource in a univocal way.
|
||||
|
||||
@@ -241,6 +240,301 @@ Provider-Specific Permissions Documentation:
|
||||
- [M365](/user-guide/providers/microsoft365/authentication#required-permissions)
|
||||
- [GitHub](/user-guide/providers/github/authentication)
|
||||
|
||||
## Service Architecture and Cross-Service Communication
|
||||
|
||||
### Core Principle: Service Isolation with Client Communication
|
||||
|
||||
Each service must contain **ONLY** the information unique to that specific service. When a check requires information from multiple services, it must use the **client objects** of other services rather than directly accessing their data structures.
|
||||
|
||||
This architecture ensures:
|
||||
|
||||
- **Loose coupling** between services
|
||||
- **Clear separation of concerns**
|
||||
- **Maintainable and testable code**
|
||||
- **Consistent data access patterns**
|
||||
|
||||
### Cross-Service Communication Pattern
|
||||
|
||||
Instead of services directly accessing each other's internal data, checks should import and use client objects:
|
||||
|
||||
**❌ INCORRECT - Direct data access:**
|
||||
|
||||
```python
|
||||
# DON'T DO THIS
|
||||
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import cloudtrail_service
|
||||
from prowler.providers.aws.services.s3.s3_service import s3_service
|
||||
|
||||
class cloudtrail_bucket_requires_mfa_delete(Check):
|
||||
def execute(self):
|
||||
# WRONG: Directly accessing service data
|
||||
for trail in cloudtrail_service.trails.values():
|
||||
for bucket in s3_service.buckets.values():
|
||||
# Direct access violates separation of concerns
|
||||
```
|
||||
|
||||
**✅ CORRECT - Client-based communication:**
|
||||
|
||||
```python
|
||||
# DO THIS INSTEAD
|
||||
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import cloudtrail_client
|
||||
from prowler.providers.aws.services.s3.s3_client import s3_client
|
||||
|
||||
class cloudtrail_bucket_requires_mfa_delete(Check):
|
||||
def execute(self):
|
||||
# CORRECT: Using client objects for cross-service communication
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
trail_bucket = trail.s3_bucket
|
||||
for bucket in s3_client.buckets.values():
|
||||
if trail_bucket == bucket.name:
|
||||
# Use bucket properties through s3_client
|
||||
if bucket.mfa_delete:
|
||||
# Implementation logic
|
||||
```
|
||||
|
||||
### Real-World Example: CloudTrail + S3 Integration
|
||||
|
||||
This example demonstrates how CloudTrail checks validate S3 bucket configurations:
|
||||
|
||||
```python
|
||||
from prowler.lib.check.models import Check, Check_Report_AWS
|
||||
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import cloudtrail_client
|
||||
from prowler.providers.aws.services.s3.s3_client import s3_client
|
||||
|
||||
class cloudtrail_bucket_requires_mfa_delete(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
if cloudtrail_client.trails is not None:
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.is_logging:
|
||||
trail_bucket_is_in_account = False
|
||||
trail_bucket = trail.s3_bucket
|
||||
|
||||
# Cross-service communication: CloudTrail check uses S3 client
|
||||
for bucket in s3_client.buckets.values():
|
||||
if trail_bucket == bucket.name:
|
||||
trail_bucket_is_in_account = True
|
||||
if bucket.mfa_delete:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) has MFA delete enabled."
|
||||
|
||||
# Handle cross-account scenarios
|
||||
if not trail_bucket_is_in_account:
|
||||
report.status = "MANUAL"
|
||||
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) is a cross-account bucket or out of Prowler's audit scope, please check it manually."
|
||||
|
||||
findings.append(report)
|
||||
return findings
|
||||
```
|
||||
|
||||
**Key Benefits:**
|
||||
|
||||
- **CloudTrail service** only contains CloudTrail-specific data (trails, configurations)
|
||||
- **S3 service** only contains S3-specific data (buckets, policies, ACLs)
|
||||
- **Check logic** orchestrates between services using their public client interfaces
|
||||
- **Cross-account detection** is handled gracefully when resources span accounts
|
||||
|
||||
### Service Consolidation Guidelines
|
||||
|
||||
**When to combine services in the same file:**
|
||||
|
||||
Implement multiple services as **separate classes in the same file** when two services are **practically the same** or one is a **direct extension** of another.
|
||||
|
||||
**Example: S3 and S3Control**
|
||||
|
||||
S3Control is an extension of S3 that provides account-level controls and access points. Both are implemented in `s3_service.py`:
|
||||
|
||||
```python
|
||||
# File: prowler/providers/aws/services/s3/s3_service.py
|
||||
|
||||
class S3(AWSService):
|
||||
"""Standard S3 service for bucket operations"""
|
||||
def __init__(self, provider):
|
||||
super().__init__(__class__.__name__, provider)
|
||||
self.buckets = {}
|
||||
self.regions_with_buckets = []
|
||||
|
||||
# S3-specific initialization
|
||||
self._list_buckets(provider)
|
||||
self._get_bucket_versioning()
|
||||
# ... other S3-specific operations
|
||||
|
||||
class S3Control(AWSService):
|
||||
"""S3Control service for account-level and access point operations"""
|
||||
def __init__(self, provider):
|
||||
super().__init__(__class__.__name__, provider)
|
||||
self.account_public_access_block = None
|
||||
self.access_points = {}
|
||||
|
||||
# S3Control-specific initialization
|
||||
self._get_public_access_block()
|
||||
self._list_access_points()
|
||||
# ... other S3Control-specific operations
|
||||
```
|
||||
|
||||
**Separate client files:**
|
||||
|
||||
```python
|
||||
# File: prowler/providers/aws/services/s3/s3_client.py
|
||||
from prowler.providers.aws.services.s3.s3_service import S3
|
||||
s3_client = S3(Provider.get_global_provider())
|
||||
|
||||
# File: prowler/providers/aws/services/s3/s3control_client.py
|
||||
from prowler.providers.aws.services.s3.s3_service import S3Control
|
||||
s3control_client = S3Control(Provider.get_global_provider())
|
||||
```
|
||||
|
||||
**When NOT to consolidate services:**
|
||||
|
||||
Keep services separate when they:
|
||||
|
||||
- **Operate on different resource types** (EC2 vs RDS)
|
||||
- **Have different authentication mechanisms** (different API endpoints)
|
||||
- **Serve different operational domains** (IAM vs CloudTrail)
|
||||
- **Have different regional behaviors** (global vs regional services)
|
||||
|
||||
### Cross-Service Dependencies Guidelines
|
||||
|
||||
**1. Always use client imports:**
|
||||
|
||||
```python
|
||||
# Correct pattern
|
||||
from prowler.providers.aws.services.service_a.service_a_client import service_a_client
|
||||
from prowler.providers.aws.services.service_b.service_b_client import service_b_client
|
||||
```
|
||||
|
||||
**2. Handle missing resources gracefully:**
|
||||
|
||||
```python
|
||||
# Handle cross-service scenarios
|
||||
resource_found_in_account = False
|
||||
for external_resource in other_service_client.resources.values():
|
||||
if target_resource_id == external_resource.id:
|
||||
resource_found_in_account = True
|
||||
# Process found resource
|
||||
break
|
||||
|
||||
if not resource_found_in_account:
|
||||
# Handle cross-account or missing resource scenarios
|
||||
report.status = "MANUAL"
|
||||
report.status_extended = "Resource is cross-account or out of audit scope"
|
||||
```
|
||||
|
||||
**3. Document cross-service dependencies:**
|
||||
|
||||
```python
|
||||
class check_with_dependencies(Check):
|
||||
"""
|
||||
Check Description
|
||||
|
||||
Dependencies:
|
||||
- service_a_client: For primary resource information
|
||||
- service_b_client: For related resource validation
|
||||
- service_c_client: For policy analysis
|
||||
"""
|
||||
```
|
||||
|
||||
## Regional Service Implementation
|
||||
|
||||
When implementing services for regional providers (like AWS, Azure, GCP), special considerations are needed to handle resource discovery across multiple geographic locations. This section provides a complete guide using AWS as the reference example.
|
||||
|
||||
### Regional vs Non-Regional Services
|
||||
|
||||
**Regional Services:** Require iteration across multiple geographic locations where resources may exist (e.g., EC2 instances, VPC, RDS databases).
|
||||
|
||||
**Non-Regional/Global Services:** Operate at a global or tenant level without regional concepts (e.g., IAM users, Route53 hosted zones).
|
||||
|
||||
### AWS Regional Implementation Example
|
||||
|
||||
AWS is the perfect example of a regional provider. Here's how Prowler handles AWS's regional architecture:
|
||||
|
||||
|
||||
```python
|
||||
# File: prowler/providers/aws/services/ec2/ec2_service.py
|
||||
class EC2(AWSService):
|
||||
def __init__(self, provider):
|
||||
super().__init__(__class__.__name__, provider)
|
||||
self.instances = {}
|
||||
self.security_groups = {}
|
||||
|
||||
# Regional resource discovery across all AWS regions
|
||||
self.__threading_call__(self._describe_instances)
|
||||
self.__threading_call__(self._describe_security_groups)
|
||||
|
||||
def _describe_instances(self, regional_client):
|
||||
"""Discover EC2 instances in a specific region"""
|
||||
try:
|
||||
describe_instances_paginator = regional_client.get_paginator("describe_instances")
|
||||
for page in describe_instances_paginator.paginate():
|
||||
for reservation in page["Reservations"]:
|
||||
for instance in reservation["Instances"]:
|
||||
# Each instance includes its region
|
||||
self.instances[instance["InstanceId"]] = Instance(
|
||||
id=instance["InstanceId"],
|
||||
region=regional_client.region,
|
||||
state=instance["State"]["Name"],
|
||||
# ... other properties
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(f"Failed to describe instances in {regional_client.region}: {error}")
|
||||
```
|
||||
|
||||
#### Regional Check Execution
|
||||
|
||||
```python
|
||||
# File: prowler/providers/aws/services/ec2/ec2_instance_public_ip/ec2_instance_public_ip.py
|
||||
class ec2_instance_public_ip(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
|
||||
# Automatically iterates across ALL AWS regions where instances exist
|
||||
for instance in ec2_client.instances.values():
|
||||
report = Check_Report_AWS(metadata=self.metadata(), resource=instance)
|
||||
report.region = instance.region # Critical: region attribution
|
||||
report.resource_arn = f"arn:aws:ec2:{instance.region}:{instance.account_id}:instance/{instance.id}"
|
||||
|
||||
if instance.public_ip:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Instance {instance.id} in {instance.region} has public IP {instance.public_ip}"
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Instance {instance.id} in {instance.region} does not have a public IP"
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
```
|
||||
|
||||
#### Key AWS Regional Features
|
||||
|
||||
**Region-Specific ARNs:**
|
||||
|
||||
```
|
||||
arn:aws:ec2:us-east-1:123456789012:instance/i-1234567890abcdef0
|
||||
arn:aws:s3:eu-west-1:123456789012:bucket/my-bucket
|
||||
arn:aws:rds:ap-southeast-2:123456789012:db:my-database
|
||||
```
|
||||
|
||||
**Parallel Processing:**
|
||||
|
||||
- Each region processed independently in separate threads
|
||||
- Failed regions don't affect other regions
|
||||
- User can filter specific regions: `-f us-east-1`
|
||||
|
||||
**Global vs Regional Services:**
|
||||
|
||||
- **Regional**: EC2, RDS, VPC (require region iteration)
|
||||
- **Global**: IAM, Route53, CloudFront (single `us-east-1` call)
|
||||
|
||||
This architecture allows Prowler to efficiently scan AWS accounts with resources spread across multiple regions while maintaining performance and error isolation.
|
||||
|
||||
### Regional Service Best Practices
|
||||
|
||||
1. **Use Threading for Regional Discovery**: Leverage the `__threading_call__` method to parallelize resource discovery across regions
|
||||
2. **Store Region Information**: Always include region metadata in resource objects for proper attribution
|
||||
3. **Handle Regional Failures Gracefully**: Ensure that failures in one region don't affect others
|
||||
4. **Optimize for Performance**: Use paginated calls and efficient data structures for large-scale resource discovery
|
||||
5. **Support Region Filtering**: Allow users to limit scans to specific regions for focused audits
|
||||
|
||||
## Best Practices
|
||||
|
||||
- When available in the provider, use threading or parallelization utilities for all methods that can be parallelized by to maximize performance and reduce scan time.
|
||||
@@ -252,3 +546,5 @@ Provider-Specific Permissions Documentation:
|
||||
- Collect and store resource tags and additional attributes to support richer checks and reporting.
|
||||
- Leverage shared utility helpers for session setup, identifier parsing, and other cross-cutting concerns to avoid code duplication. This kind of code is typically stored in a `lib` folder in the service folder.
|
||||
- Keep code modular, maintainable, and well-documented for ease of extension and troubleshooting.
|
||||
- **Each service should contain only information unique to that specific service** - use client objects for cross-service communication.
|
||||
- **Handle cross-account and missing resources gracefully** when checks span multiple services.
|
||||
|
||||
+22
-10
@@ -98,7 +98,7 @@
|
||||
]
|
||||
},
|
||||
"user-guide/tutorials/prowler-app-rbac",
|
||||
"user-guide/providers/prowler-app-api-keys",
|
||||
"user-guide/tutorials/prowler-app-api-keys",
|
||||
"user-guide/tutorials/prowler-app-mute-findings",
|
||||
{
|
||||
"group": "Integrations",
|
||||
@@ -114,7 +114,8 @@
|
||||
"group": "Tutorials",
|
||||
"pages": [
|
||||
"user-guide/tutorials/prowler-app-sso-entra",
|
||||
"user-guide/tutorials/bulk-provider-provisioning"
|
||||
"user-guide/tutorials/bulk-provider-provisioning",
|
||||
"user-guide/tutorials/aws-organizations-bulk-provisioning"
|
||||
]
|
||||
}
|
||||
]
|
||||
@@ -250,6 +251,25 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"tab": "Workshop",
|
||||
"groups": [
|
||||
{
|
||||
"group": "Hands-On Labs",
|
||||
"pages": [
|
||||
"workshop/introduction",
|
||||
"workshop/lab-01-getting-started",
|
||||
"workshop/lab-02-threat-detection",
|
||||
"workshop/lab-03-custom-checks",
|
||||
"workshop/lab-04-azure-multicloud",
|
||||
"workshop/lab-05-gcp-multicloud",
|
||||
"workshop/lab-06-compliance-as-code",
|
||||
"workshop/lab-07-integrations",
|
||||
"workshop/lab-08-prowler-saas"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"tab": "Developer Guide",
|
||||
"groups": [
|
||||
@@ -404,14 +424,6 @@
|
||||
"source": "/projects/prowler-open-source/en/latest/tutorials/gcp/getting-started-gcp",
|
||||
"destination": "/user-guide/providers/gcp/getting-started-gcp"
|
||||
},
|
||||
{
|
||||
"source": "/projects/prowler-open-source/en/latest/tutorials/prowler-app",
|
||||
"destination": "/user-guide/tutorials/prowler-app#step-4-4%3A-kubernetes-credentials%3A"
|
||||
},
|
||||
{
|
||||
"source": "/projects/prowler-open-source/en/latest/tutorials/prowler-app/#step-3-add-a-provider",
|
||||
"destination": "/user-guide/tutorials/prowler-app#step-3-add-a-provider"
|
||||
},
|
||||
{
|
||||
"source": "/projects/prowler-open-source/en/latest/tutorials/microsoft365/getting-started-m365",
|
||||
"destination": "/user-guide/providers/microsoft365/getting-started-m365"
|
||||
|
||||
@@ -10,7 +10,7 @@ Configure your MCP client to connect to Prowler MCP Server.
|
||||
**Authentication is optional**: Prowler Hub and Prowler Documentation features work without authentication. An API key is only required for Prowler Cloud and Prowler App (Self-Managed) features.
|
||||
</Note>
|
||||
|
||||
To use Prowler Cloud or Prowler App (Self-Managed) features. To get the API key, please refer to the [API Keys](/user-guide/providers/prowler-app-api-keys) guide.
|
||||
To use Prowler Cloud or Prowler App (Self-Managed) features. To get the API key, please refer to the [API Keys](/user-guide/tutorials/prowler-app-api-keys) guide.
|
||||
|
||||
<Warning>
|
||||
Keep the API key secure. Never share it publicly or commit it to version control.
|
||||
|
||||
@@ -25,6 +25,9 @@ Prowler configuration is based in `.env` files. Every version of Prowler can hav
|
||||
curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/master/.env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
> Containers are built for `linux/amd64`. If your workstation's architecture is different, please set `DOCKER_DEFAULT_PLATFORM=linux/amd64` in your environment or use the `--platform linux/amd64` flag in the docker command.
|
||||
|
||||
</Tab>
|
||||
<Tab title="GitHub">
|
||||
_Requirements_:
|
||||
|
||||
@@ -182,19 +182,19 @@ Configure the server using environment variables:
|
||||
|----------|-------------|----------|---------|
|
||||
| `PROWLER_APP_API_KEY` | Prowler API key | Only for STDIO mode | - |
|
||||
| `PROWLER_API_BASE_URL` | Custom Prowler API endpoint | No | `https://api.prowler.com` |
|
||||
| `PROWLER_MCP_MODE` | Default transport mode (overwritten by `--transport` argument) | No | `stdio` |
|
||||
| `PROWLER_MCP_TRANSPORT_MODE` | Default transport mode (overwritten by `--transport` argument) | No | `stdio` |
|
||||
|
||||
<CodeGroup>
|
||||
```bash macOS/Linux
|
||||
export PROWLER_APP_API_KEY="pk_your_api_key_here"
|
||||
export PROWLER_API_BASE_URL="https://api.prowler.com"
|
||||
export PROWLER_MCP_MODE="http"
|
||||
export PROWLER_MCP_TRANSPORT_MODE="http"
|
||||
```
|
||||
|
||||
```bash Windows PowerShell
|
||||
$env:PROWLER_APP_API_KEY="pk_your_api_key_here"
|
||||
$env:PROWLER_API_BASE_URL="https://api.prowler.com"
|
||||
$env:PROWLER_MCP_MODE="http"
|
||||
$env:PROWLER_MCP_TRANSPORT_MODE="http"
|
||||
```
|
||||
</CodeGroup>
|
||||
|
||||
@@ -209,7 +209,7 @@ For convenience, create a `.env` file in the `mcp_server` directory:
|
||||
```bash .env
|
||||
PROWLER_APP_API_KEY=pk_your_api_key_here
|
||||
PROWLER_API_BASE_URL=https://api.prowler.com
|
||||
PROWLER_MCP_MODE=stdio
|
||||
PROWLER_MCP_TRANSPORT_MODE=stdio
|
||||
```
|
||||
|
||||
When using Docker, pass the environment file:
|
||||
@@ -228,6 +228,35 @@ uvx /path/to/prowler/mcp_server/
|
||||
|
||||
This is particularly useful when configuring MCP clients that need to launch the server from a specific path.
|
||||
|
||||
## Production Deployment
|
||||
|
||||
For production deployments that require customization, it is recommended to use the ASGI application that can be found in `prowler_mcp_server.server`. This can be run with uvicorn:
|
||||
|
||||
```bash
|
||||
uvicorn prowler_mcp_server.server:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
For more details on production deployment options, see the [FastMCP production deployment guide](https://gofastmcp.com/deployment/http#production-deployment) and [uvicorn settings](https://www.uvicorn.org/settings/).
|
||||
|
||||
### Entrypoint Script
|
||||
|
||||
The source tree includes `entrypoint.sh` to simplify switching between the
|
||||
standard CLI runner and the ASGI app. The first argument selects the mode and
|
||||
any additional flags are passed straight through:
|
||||
|
||||
```bash
|
||||
# Default CLI experience (prowler-mcp console script)
|
||||
./entrypoint.sh main --transport http --host 0.0.0.0
|
||||
|
||||
# ASGI app via uvicorn
|
||||
./entrypoint.sh uvicorn --host 0.0.0.0 --port 9000
|
||||
```
|
||||
|
||||
Omitting the mode defaults to `main`, matching the `prowler-mcp` console script.
|
||||
When `uvicorn` mode is selected, the script exports `PROWLER_MCP_TRANSPORT_MODE=http` automatically.
|
||||
|
||||
This is the default entrypoint for the Docker container.
|
||||
|
||||
## Next Steps
|
||||
|
||||
Now that you have the Prowler MCP Server installed, proceed to configure your MCP client:
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 71 KiB |
+1
-1
@@ -16,7 +16,7 @@ We use encryption everywhere possible. The data and communications used by **Pro
|
||||
|
||||
Prowler Cloud is GDPR compliant in regards to personal data and the ["right to be forgotten"](https://gdpr.eu/right-to-be-forgotten/). When a user deletes their account their user information will be deleted from Prowler Cloud online and backup systems within 10 calendar days.
|
||||
|
||||
## Software Security
|
||||
## Software Security
|
||||
|
||||
We follow a **security-by-design approach** throughout our software development lifecycle. All changes go through automated checks at every stage, from local development to production deployment.
|
||||
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
export const VersionBadge = ({ version }) => {
|
||||
return (
|
||||
<code className="version-badge-container">
|
||||
<p className="version-badge">
|
||||
<span className="version-badge-label">Added in:</span>
|
||||
<code className="version-badge-version">{version}</code>
|
||||
</p>
|
||||
</code>
|
||||
|
||||
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,51 @@
|
||||
/* Version Badge Styling */
|
||||
.version-badge-container {
|
||||
display: inline-block;
|
||||
margin: 0 0 1rem 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.version-badge {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
margin: 0;
|
||||
padding: 0.375rem 0.75rem;
|
||||
background: linear-gradient(135deg, #1a1a1a 0%, #000000 100%);
|
||||
color: #ffffff;
|
||||
border-radius: 1.25rem;
|
||||
font-weight: 400;
|
||||
font-size: 0.875rem;
|
||||
line-height: 1.25rem;
|
||||
border: 1px solid rgba(0, 0, 0, 0.15);
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
.version-badge-label {
|
||||
font-weight: 400;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.version-badge-version {
|
||||
background: rgba(255, 255, 255, 0.12);
|
||||
padding: 0.125rem 0.5rem;
|
||||
border-radius: 0.875rem;
|
||||
font-family: ui-monospace, SFMono-Regular, 'SF Mono', Menlo, Monaco, 'Cascadia Code', 'Roboto Mono', Consolas, 'Courier New', monospace;
|
||||
font-weight: 600;
|
||||
font-size: 0.875rem;
|
||||
color: #ffffff;
|
||||
border: none;
|
||||
}
|
||||
|
||||
|
||||
.dark .version-badge {
|
||||
background: #55B685;
|
||||
color: #000000;
|
||||
border: 2px solid rgba(85, 182, 133, 0.3);
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
.dark .version-badge-version {
|
||||
background: rgba(0, 0, 0, 0.1);
|
||||
color: #000000;
|
||||
border: none;
|
||||
}
|
||||
@@ -0,0 +1,491 @@
|
||||
---
|
||||
title: 'AWS Organizations Bulk Provisioning in Prowler'
|
||||
---
|
||||
|
||||
Prowler offers an automated tool to discover and provision all AWS accounts within an AWS Organization. This streamlines onboarding for organizations managing multiple AWS accounts by automatically generating the configuration needed for bulk provisioning.
|
||||
|
||||
The tool, `aws_org_generator.py`, complements the [Bulk Provider Provisioning](./bulk-provider-provisioning) tool and is available in the Prowler repository at: [util/prowler-bulk-provisioning](https://github.com/prowler-cloud/prowler/tree/master/util/prowler-bulk-provisioning)
|
||||
|
||||
<Note>
|
||||
Native support for bulk provisioning AWS Organizations and similar multi-account structures directly in the Prowler UI/API is on the official roadmap.
|
||||
|
||||
Track progress and vote for this feature at: [Bulk Provisioning in the UI/API for AWS Organizations](https://roadmap.prowler.com/p/builk-provisioning-in-the-uiapi-for-aws-organizations-and-alike)
|
||||
</Note>
|
||||
|
||||
{/* TODO: Add screenshot of the tool in action */}
|
||||
|
||||
## Overview
|
||||
|
||||
The AWS Organizations Bulk Provisioning tool simplifies multi-account onboarding by:
|
||||
|
||||
* Automatically discovering all active accounts in an AWS Organization
|
||||
* Generating YAML configuration files for bulk provisioning
|
||||
* Supporting account filtering and custom role configurations
|
||||
* Eliminating manual entry of account IDs and role ARNs
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### Requirements
|
||||
|
||||
* Python 3.7 or higher
|
||||
* AWS credentials with Organizations read access
|
||||
* ProwlerRole (or custom role) deployed across all target accounts
|
||||
* Prowler API key (from Prowler Cloud or self-hosted Prowler App)
|
||||
* For self-hosted Prowler App, remember to [point to your API base URL](./bulk-provider-provisioning#custom-api-endpoints)
|
||||
* Learn how to create API keys: [Prowler App API Keys](../tutorials/prowler-app-api-keys)
|
||||
|
||||
### Deploying ProwlerRole Across AWS Organizations
|
||||
|
||||
Before using the AWS Organizations generator, deploy the ProwlerRole across all accounts in the organization using CloudFormation StackSets.
|
||||
|
||||
<Note>
|
||||
**Follow the official documentation:**
|
||||
[Deploying Prowler IAM Roles Across AWS Organizations](../providers/aws/organizations#deploying-prowler-iam-roles-across-aws-organizations)
|
||||
|
||||
**Key points:**
|
||||
|
||||
* Use CloudFormation StackSets from the management account
|
||||
* Deploy to all organizational units (OUs) or specific OUs
|
||||
* Use an external ID for enhanced security
|
||||
* Ensure the role has necessary permissions for Prowler scans
|
||||
</Note>
|
||||
|
||||
### Installation
|
||||
|
||||
Clone the repository and install required dependencies:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/prowler-cloud/prowler.git
|
||||
cd prowler/util/prowler-bulk-provisioning
|
||||
pip install -r requirements-aws-org.txt
|
||||
```
|
||||
|
||||
### AWS Credentials Setup
|
||||
|
||||
Configure AWS credentials with Organizations read access:
|
||||
|
||||
* **Management account credentials**, or
|
||||
* **Delegated administrator account** with `organizations:ListAccounts` permission
|
||||
|
||||
Required IAM permissions:
|
||||
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"organizations:ListAccounts",
|
||||
"organizations:DescribeOrganization"
|
||||
],
|
||||
"Resource": "*"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Prowler API Key Setup
|
||||
|
||||
Configure your Prowler API key:
|
||||
|
||||
```bash
|
||||
export PROWLER_API_KEY="pk_example-api-key"
|
||||
```
|
||||
|
||||
To create an API key:
|
||||
|
||||
1. Log in to Prowler Cloud or Prowler App
|
||||
2. Click **Profile** → **Account**
|
||||
3. Click **Create API Key**
|
||||
4. Provide a descriptive name and optionally set an expiration date
|
||||
5. Copy the generated API key (it will only be shown once)
|
||||
|
||||
For detailed instructions, see: [Prowler App API Keys](../tutorials/prowler-app-api-keys)
|
||||
|
||||
## Basic Usage
|
||||
|
||||
### Generate Configuration for All Accounts
|
||||
|
||||
To generate a YAML configuration file for all active accounts in the organization:
|
||||
|
||||
```bash
|
||||
python aws_org_generator.py -o aws-accounts.yaml --external-id prowler-ext-id-2024
|
||||
```
|
||||
|
||||
This command:
|
||||
|
||||
1. Lists all ACTIVE accounts in the organization
|
||||
2. Generates YAML entries for each account
|
||||
3. Saves the configuration to `aws-accounts.yaml`
|
||||
|
||||
**Output:**
|
||||
|
||||
```
|
||||
Fetching accounts from AWS Organizations...
|
||||
Found 47 active accounts in organization
|
||||
Generated configuration for 47 accounts
|
||||
|
||||
Configuration written to: aws-accounts.yaml
|
||||
|
||||
Next steps:
|
||||
1. Review the generated file: cat aws-accounts.yaml | head -n 20
|
||||
2. Run bulk provisioning: python prowler_bulk_provisioning.py aws-accounts.yaml
|
||||
```
|
||||
|
||||
### Review Generated Configuration
|
||||
|
||||
Review the generated YAML configuration:
|
||||
|
||||
```bash
|
||||
head -n 20 aws-accounts.yaml
|
||||
```
|
||||
|
||||
**Example output:**
|
||||
|
||||
```yaml
|
||||
- provider: aws
|
||||
uid: '111111111111'
|
||||
alias: Production-Account
|
||||
auth_method: role
|
||||
credentials:
|
||||
role_arn: arn:aws:iam::111111111111:role/ProwlerRole
|
||||
external_id: prowler-ext-id-2024
|
||||
|
||||
- provider: aws
|
||||
uid: '222222222222'
|
||||
alias: Development-Account
|
||||
auth_method: role
|
||||
credentials:
|
||||
role_arn: arn:aws:iam::222222222222:role/ProwlerRole
|
||||
external_id: prowler-ext-id-2024
|
||||
```
|
||||
|
||||
### Dry Run Mode
|
||||
|
||||
Test the configuration without writing a file:
|
||||
|
||||
```bash
|
||||
python aws_org_generator.py \
|
||||
--external-id prowler-ext-id-2024 \
|
||||
--dry-run
|
||||
```
|
||||
|
||||
## Advanced Configuration
|
||||
|
||||
### Using a Specific AWS Profile
|
||||
|
||||
Specify an AWS profile when multiple profiles are configured:
|
||||
|
||||
```bash
|
||||
python aws_org_generator.py \
|
||||
-o aws-accounts.yaml \
|
||||
--profile org-management-admin \
|
||||
--external-id prowler-ext-id-2024
|
||||
```
|
||||
|
||||
### Excluding Specific Accounts
|
||||
|
||||
Exclude the management account or other accounts from provisioning:
|
||||
|
||||
```bash
|
||||
python aws_org_generator.py \
|
||||
-o aws-accounts.yaml \
|
||||
--external-id prowler-ext-id-2024 \
|
||||
--exclude 123456789012,210987654321
|
||||
```
|
||||
|
||||
Common exclusion scenarios:
|
||||
|
||||
* Management account (requires different permissions)
|
||||
* Break-glass accounts (emergency access)
|
||||
* Suspended or archived accounts
|
||||
|
||||
### Including Only Specific Accounts
|
||||
|
||||
Generate configuration for specific accounts only:
|
||||
|
||||
```bash
|
||||
python aws_org_generator.py \
|
||||
-o aws-accounts.yaml \
|
||||
--external-id prowler-ext-id-2024 \
|
||||
--include 111111111111,222222222222,333333333333
|
||||
```
|
||||
|
||||
### Custom Role Name
|
||||
|
||||
Specify a custom role name if not using the default `ProwlerRole`:
|
||||
|
||||
```bash
|
||||
python aws_org_generator.py \
|
||||
-o aws-accounts.yaml \
|
||||
--role-name ProwlerExecutionRole \
|
||||
--external-id prowler-ext-id-2024
|
||||
```
|
||||
|
||||
### Custom Alias Format
|
||||
|
||||
Customize account aliases using template variables:
|
||||
|
||||
```bash
|
||||
# Use account name and ID
|
||||
python aws_org_generator.py \
|
||||
-o aws-accounts.yaml \
|
||||
--alias-format "{name}-{id}" \
|
||||
--external-id prowler-ext-id-2024
|
||||
|
||||
# Use email prefix
|
||||
python aws_org_generator.py \
|
||||
-o aws-accounts.yaml \
|
||||
--alias-format "{email}" \
|
||||
--external-id prowler-ext-id-2024
|
||||
```
|
||||
|
||||
Available template variables:
|
||||
|
||||
* `{name}` - Account name
|
||||
* `{id}` - Account ID
|
||||
* `{email}` - Account email
|
||||
|
||||
### Additional Role Assumption Options
|
||||
|
||||
Configure optional role assumption parameters:
|
||||
|
||||
```bash
|
||||
python aws_org_generator.py \
|
||||
-o aws-accounts.yaml \
|
||||
--role-name ProwlerRole \
|
||||
--external-id prowler-ext-id-2024 \
|
||||
--session-name prowler-scan-session \
|
||||
--duration-seconds 3600
|
||||
```
|
||||
|
||||
## Complete Workflow Example
|
||||
|
||||
<Steps>
|
||||
<Step title="Deploy ProwlerRole Using StackSets">
|
||||
1. Log in to the AWS management account
|
||||
2. Open CloudFormation → StackSets
|
||||
3. Create a new StackSet using the [Prowler role template](https://github.com/prowler-cloud/prowler/blob/master/permissions/templates/cloudformation/prowler-scan-role.yml)
|
||||
4. Deploy to all organizational units
|
||||
5. Use a unique external ID (e.g., `prowler-org-2024-abc123`)
|
||||
|
||||
{/* TODO: Add screenshot of CloudFormation StackSets deployment */}
|
||||
</Step>
|
||||
|
||||
<Step title="Generate YAML Configuration">
|
||||
Configure AWS credentials and generate the YAML file:
|
||||
|
||||
```bash
|
||||
# Using management account credentials
|
||||
export AWS_PROFILE=org-management
|
||||
|
||||
# Generate configuration
|
||||
python aws_org_generator.py \
|
||||
-o aws-org-accounts.yaml \
|
||||
--external-id prowler-org-2024-abc123 \
|
||||
--exclude 123456789012
|
||||
```
|
||||
|
||||
**Output:**
|
||||
|
||||
```
|
||||
Fetching accounts from AWS Organizations...
|
||||
Using AWS profile: org-management
|
||||
Found 47 active accounts in organization
|
||||
Generated configuration for 46 accounts
|
||||
|
||||
Configuration written to: aws-org-accounts.yaml
|
||||
|
||||
Next steps:
|
||||
1. Review the generated file: cat aws-org-accounts.yaml | head -n 20
|
||||
2. Run bulk provisioning: python prowler_bulk_provisioning.py aws-org-accounts.yaml
|
||||
```
|
||||
</Step>
|
||||
|
||||
<Step title="Review Generated Configuration">
|
||||
Verify the generated YAML configuration:
|
||||
|
||||
```bash
|
||||
# View first 20 lines
|
||||
head -n 20 aws-org-accounts.yaml
|
||||
|
||||
# Check for unexpected accounts
|
||||
grep "uid:" aws-org-accounts.yaml
|
||||
|
||||
# Verify role ARNs
|
||||
grep "role_arn:" aws-org-accounts.yaml | head -5
|
||||
|
||||
# Count accounts
|
||||
grep "provider: aws" aws-org-accounts.yaml | wc -l
|
||||
```
|
||||
</Step>
|
||||
|
||||
<Step title="Run Bulk Provisioning">
|
||||
Provision all accounts to Prowler Cloud or Prowler App:
|
||||
|
||||
```bash
|
||||
# Set Prowler API key
|
||||
export PROWLER_API_KEY="pk_example-api-key"
|
||||
|
||||
# Run bulk provisioning with connection testing
|
||||
python prowler_bulk_provisioning.py aws-org-accounts.yaml
|
||||
```
|
||||
|
||||
**With custom options:**
|
||||
|
||||
```bash
|
||||
python prowler_bulk_provisioning.py aws-org-accounts.yaml \
|
||||
--concurrency 10 \
|
||||
--timeout 120
|
||||
```
|
||||
|
||||
**Successful output:**
|
||||
|
||||
```
|
||||
[1] ✅ Created provider (id=db9a8985-f9ec-4dd8-b5a0-e05ab3880bed)
|
||||
[1] ✅ Created secret (id=466f76c6-5878-4602-a4bc-13f9522c1fd2)
|
||||
[1] ✅ Connection test: Connected
|
||||
|
||||
[2] ✅ Created provider (id=7a99f789-0cf5-4329-8279-2d443a962676)
|
||||
[2] ✅ Created secret (id=c5702180-f7c4-40fd-be0e-f6433479b126)
|
||||
[2] ✅ Connection test: Connected
|
||||
|
||||
Done. Success: 47 Failures: 0
|
||||
```
|
||||
|
||||
{/* TODO: Add screenshot of successful bulk provisioning output */}
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
## Command Reference
|
||||
|
||||
### Full Command-Line Options
|
||||
|
||||
```bash
|
||||
python aws_org_generator.py \
|
||||
-o OUTPUT_FILE \
|
||||
--role-name ROLE_NAME \
|
||||
--external-id EXTERNAL_ID \
|
||||
--session-name SESSION_NAME \
|
||||
--duration-seconds SECONDS \
|
||||
--alias-format FORMAT \
|
||||
--exclude ACCOUNT_IDS \
|
||||
--include ACCOUNT_IDS \
|
||||
--profile AWS_PROFILE \
|
||||
--region AWS_REGION \
|
||||
--dry-run
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Error: "No AWS credentials found"
|
||||
|
||||
**Solution:** Configure AWS credentials using one of these methods:
|
||||
|
||||
```bash
|
||||
# Method 1: AWS CLI configure
|
||||
aws configure
|
||||
|
||||
# Method 2: Environment variables
|
||||
export AWS_ACCESS_KEY_ID=your-key-id
|
||||
export AWS_SECRET_ACCESS_KEY=your-secret-key
|
||||
|
||||
# Method 3: Use AWS profile
|
||||
export AWS_PROFILE=org-management
|
||||
```
|
||||
|
||||
### Error: "Access denied to AWS Organizations API"
|
||||
|
||||
**Cause:** Current credentials don't have permission to list organization accounts.
|
||||
|
||||
**Solution:**
|
||||
|
||||
* Ensure management account credentials are used
|
||||
* Verify IAM permissions include `organizations:ListAccounts`
|
||||
* Check IAM policies for Organizations access
|
||||
|
||||
### Error: "AWS Organizations is not enabled"
|
||||
|
||||
**Cause:** The account is not part of an organization.
|
||||
|
||||
**Solution:** This tool requires an AWS Organization. Create one in the AWS Organizations console or use standard bulk provisioning for standalone accounts.
|
||||
|
||||
### No Accounts Generated After Filters
|
||||
|
||||
**Cause:** All accounts were filtered out by `--exclude` or `--include` options.
|
||||
|
||||
**Solution:** Review filter options and verify account IDs are correct:
|
||||
|
||||
```bash
|
||||
# List all accounts in organization
|
||||
aws organizations list-accounts --query "Accounts[?Status=='ACTIVE'].[Id,Name]" --output table
|
||||
```
|
||||
|
||||
### Connection Test Failures During Bulk Provisioning
|
||||
|
||||
**Cause:** ProwlerRole may not be deployed correctly or credentials are invalid.
|
||||
|
||||
**Solution:**
|
||||
|
||||
* Verify StackSet deployment status in CloudFormation
|
||||
* Check role trust policy includes correct external ID
|
||||
* Test role assumption manually:
|
||||
|
||||
```bash
|
||||
aws sts assume-role \
|
||||
--role-arn arn:aws:iam::123456789012:role/ProwlerRole \
|
||||
--role-session-name test \
|
||||
--external-id prowler-ext-id-2024
|
||||
```
|
||||
|
||||
## Security Best Practices
|
||||
|
||||
### Use External ID
|
||||
|
||||
Always use an external ID when assuming cross-account roles:
|
||||
|
||||
```bash
|
||||
python aws_org_generator.py \
|
||||
-o aws-accounts.yaml \
|
||||
--external-id $(uuidgen | tr '[:upper:]' '[:lower:]')
|
||||
```
|
||||
|
||||
The external ID must match the one configured in the ProwlerRole trust policy across all accounts.
|
||||
|
||||
### Exclude Sensitive Accounts
|
||||
|
||||
Exclude accounts that shouldn't be scanned or require special handling:
|
||||
|
||||
```bash
|
||||
python aws_org_generator.py \
|
||||
-o aws-accounts.yaml \
|
||||
--external-id prowler-ext-id \
|
||||
--exclude 123456789012,111111111111 # management, break-glass accounts
|
||||
```
|
||||
|
||||
### Review Generated Configuration
|
||||
|
||||
Always review the generated YAML before provisioning:
|
||||
|
||||
```bash
|
||||
# Check for unexpected accounts
|
||||
grep "uid:" aws-org-accounts.yaml
|
||||
|
||||
# Verify role ARNs
|
||||
grep "role_arn:" aws-org-accounts.yaml | head -5
|
||||
|
||||
# Count accounts
|
||||
grep "provider: aws" aws-org-accounts.yaml | wc -l
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
<Columns cols={2}>
|
||||
<Card title="Bulk Provider Provisioning" icon="terminal" href="/user-guide/tutorials/bulk-provider-provisioning">
|
||||
Learn how to bulk provision providers in Prowler.
|
||||
</Card>
|
||||
<Card title="Prowler App" icon="pen-to-square" href="/user-guide/tutorials/prowler-app">
|
||||
Detailed instructions on how to use Prowler.
|
||||
</Card>
|
||||
</Columns>
|
||||
@@ -17,14 +17,18 @@ The Bulk Provider Provisioning tool automates the creation of cloud providers in
|
||||
* Testing connections to verify successful authentication
|
||||
* Processing multiple providers concurrently for efficiency
|
||||
|
||||
<Tip>
|
||||
**Using AWS Organizations?** For organizations with many AWS accounts, use the automated [AWS Organizations Bulk Provisioning](./aws-organizations-bulk-provisioning) tool to automatically discover and generate configuration for all accounts in your organization.
|
||||
</Tip>
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### Requirements
|
||||
|
||||
* Python 3.7 or higher
|
||||
* Prowler API token (from Prowler Cloud or self-hosted Prowler App)
|
||||
* Prowler API key (from Prowler Cloud or self-hosted Prowler App)
|
||||
* For self-hosted Prowler App, remember to [point to your API base URL](#custom-api-endpoints)
|
||||
* Learn how to create API keys: [Prowler App API Keys](../tutorials/prowler-app-api-keys)
|
||||
* Authentication credentials for target cloud providers
|
||||
|
||||
### Installation
|
||||
@@ -39,28 +43,21 @@ pip install -r requirements.txt
|
||||
|
||||
### Authentication Setup
|
||||
|
||||
Configure your Prowler API token:
|
||||
Configure your Prowler API key:
|
||||
|
||||
```bash
|
||||
export PROWLER_API_TOKEN="your-prowler-api-token"
|
||||
export PROWLER_API_KEY="pk_example-api-key"
|
||||
```
|
||||
|
||||
To obtain an API token programmatically:
|
||||
To create an API key:
|
||||
|
||||
```bash
|
||||
export PROWLER_API_TOKEN=$(curl --location 'https://api.prowler.com/api/v1/tokens' \
|
||||
--header 'Content-Type: application/vnd.api+json' \
|
||||
--header 'Accept: application/vnd.api+json' \
|
||||
--data-raw '{
|
||||
"data": {
|
||||
"type": "tokens",
|
||||
"attributes": {
|
||||
"email": "your@email.com",
|
||||
"password": "your-password"
|
||||
}
|
||||
}
|
||||
}' | jq -r .data.attributes.access)
|
||||
```
|
||||
1. Log in to Prowler Cloud or Prowler App
|
||||
2. Click **Profile** → **Account**
|
||||
3. Click **Create API Key**
|
||||
4. Provide a descriptive name and optionally set an expiration date
|
||||
5. Copy the generated API key (it will only be shown once)
|
||||
|
||||
For detailed instructions, see: [Prowler App API Keys](../tutorials/prowler-app-api-keys)
|
||||
|
||||
## Configuration File Structure
|
||||
|
||||
@@ -340,11 +337,11 @@ Done. Success: 2 Failures: 0
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Invalid API Token
|
||||
### Invalid API Key
|
||||
|
||||
```
|
||||
Error: 401 Unauthorized
|
||||
Solution: Verify your PROWLER_API_TOKEN or --token parameter
|
||||
Solution: Verify your PROWLER_API_KEY environment variable or --api-key parameter
|
||||
```
|
||||
|
||||
### Network Timeouts
|
||||
|
||||
+4
@@ -2,6 +2,10 @@
|
||||
title: 'API Keys'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.13.0" />
|
||||
|
||||
API key authentication in Prowler App provides an alternative to JWT tokens and empowers automation, CI/CD pipelines, and third-party integrations. This guide explains how to create, manage, and safeguard API keys when working with the Prowler API.
|
||||
|
||||
## API Key Advantages
|
||||
@@ -1,6 +1,9 @@
|
||||
---
|
||||
title: "Jira Integration"
|
||||
---
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.12.0" />
|
||||
|
||||
Prowler App enables automatic export of security findings to Jira, providing seamless integration with Atlassian's work item tracking and project management platform. This comprehensive guide demonstrates how to configure and manage Jira integrations to streamline security incident management and enhance team collaboration across security workflows.
|
||||
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
title: 'Prowler Lighthouse AI'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.8.0" />
|
||||
|
||||
Prowler Lighthouse AI is a Cloud Security Analyst chatbot that helps you understand, prioritize, and remediate security findings in your cloud environments. It's designed to provide security expertise for teams without dedicated resources, acting as your 24/7 virtual cloud security analyst.
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-intro.png" alt="Prowler Lighthouse" />
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
---
|
||||
title: 'Mute Findings (Mutelist)'
|
||||
---
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.9.0" />
|
||||
|
||||
Prowler App allows users to mute specific findings to focus on the most critical security issues. This comprehensive guide demonstrates how to effectively use the Mutelist feature to manage and prioritize security findings.
|
||||
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
title: 'Managing Users and Role-Based Access Control (RBAC)'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.1.0" />
|
||||
|
||||
**Prowler App** supports multiple users within a single tenant, enabling seamless collaboration by allowing team members to easily share insights and manage security findings.
|
||||
|
||||
[Roles](#roles) help you control user permissions, determining what actions each user can perform and the data they can access within Prowler. By default, each account includes an immutable **admin** role, ensuring that your account always retains administrative access.
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
title: 'Amazon S3 Integration'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.10.0" />
|
||||
|
||||
**Prowler App** allows automatic export of scan results to Amazon S3 buckets, providing seamless integration with existing data workflows and storage infrastructure. This comprehensive guide demonstrates configuration and management of Amazon S3 integrations to streamline security finding management and reporting.
|
||||
|
||||
When enabled and configured, scan results are automatically stored in the configured bucket. Results are provided in `csv`, `html` and `json-ocsf` formats, offering flexibility for custom integrations:
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
---
|
||||
title: "AWS Security Hub Integration"
|
||||
---
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.11.0" />
|
||||
|
||||
Prowler App enables automatic export of security findings to AWS Security Hub, providing seamless integration with AWS's native security and compliance service. This comprehensive guide demonstrates how to configure and manage AWS Security Hub integrations to centralize security findings and enhance compliance tracking across AWS environments.
|
||||
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
title: 'Social Login Configuration'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.5.0" />
|
||||
|
||||
**Prowler App** supports social login using Google and GitHub OAuth providers. This document guides you through configuring the required environment variables to enable social authentication.
|
||||
|
||||
<img src="/images/prowler-app/social-login/social_login_buttons.png" alt="Social login buttons" width="700" />
|
||||
|
||||
@@ -2,6 +2,10 @@
|
||||
title: 'SAML Single Sign-On (SSO)'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.9.0" />
|
||||
|
||||
This guide provides comprehensive instructions to configure SAML-based Single Sign-On (SSO) in Prowler App. This configuration allows users to authenticate using the organization's Identity Provider (IdP).
|
||||
|
||||
This document is divided into two main sections:
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
---
|
||||
title: "Workshop Introduction"
|
||||
description: "Hands-on labs to master Prowler's cloud security capabilities across AWS, Azure, and GCP"
|
||||
---
|
||||
|
||||
# Prowler Workshop
|
||||
|
||||
Welcome to the Prowler Workshop. This hands-on training provides practical experience with Prowler's cloud security monitoring and compliance automation capabilities across multiple cloud platforms.
|
||||
|
||||
## Workshop Overview
|
||||
|
||||
This workshop consists of eight progressive labs designed to guide you through Prowler's core features and advanced capabilities:
|
||||
|
||||
* **Lab 1:** Getting Started with Prowler CLI
|
||||
* **Lab 2:** Threat Detection with Prowler
|
||||
* **Lab 3:** Custom Checks with Prowler
|
||||
* **Lab 4:** Multi-Cloud Security with Prowler (Azure)
|
||||
* **Lab 5:** Multi-Cloud Security with Prowler (GCP)
|
||||
* **Lab 6:** Compliance as Code with Prowler
|
||||
* **Lab 7:** Integrations with Prowler (AWS Security Hub)
|
||||
* **Lab 8:** Prowler SaaS Platform
|
||||
|
||||
## Lab Structure
|
||||
|
||||
Each lab is self-contained and includes:
|
||||
|
||||
* **Prerequisites:** Required cloud accounts, tools, and prior lab dependencies
|
||||
* **Objectives:** Clear learning goals for the lab
|
||||
* **Step-by-step instructions:** Detailed guidance through each task
|
||||
* **Expected outcomes:** What you should achieve by completing the lab
|
||||
* **Verification steps:** How to confirm successful completion
|
||||
|
||||
## Prerequisites Approach
|
||||
|
||||
Each lab specifies its own prerequisites, as different labs require different cloud provider accounts, tools, and access levels. Review the prerequisites section at the beginning of each lab before starting.
|
||||
|
||||
## How to Use This Workshop
|
||||
|
||||
* Labs are designed to be completed sequentially, as later labs may build on concepts from earlier ones
|
||||
* Estimated time to complete varies by lab (typically 30-60 minutes each)
|
||||
* You can pause between labs and resume later
|
||||
* Some labs can be completed independently if you have the necessary prerequisites
|
||||
|
||||
## Getting Help
|
||||
|
||||
If you encounter issues during the workshop:
|
||||
|
||||
* Refer to the [Troubleshooting](/troubleshooting) guide
|
||||
* Join the [Prowler Slack community](https://goto.prowler.com/slack)
|
||||
* Visit the [Prowler GitHub repository](https://github.com/prowler-cloud/prowler) for documentation and issues
|
||||
|
||||
## Ready to Start?
|
||||
|
||||
Begin with [Lab 1: Getting Started with Prowler CLI](/workshop/lab-01-getting-started) to set up your environment and run your first security scan.
|
||||
@@ -0,0 +1,203 @@
|
||||
---
|
||||
title: "Lab 1: Getting Started with Prowler CLI"
|
||||
description: "Install Prowler CLI and run your first cloud security assessment on AWS"
|
||||
---
|
||||
|
||||
<Note>
|
||||
**Tags:** `workshop` `aws` `getting-started` `beginner` `cli`
|
||||
</Note>
|
||||
|
||||
# Lab 1: Getting Started with Prowler CLI
|
||||
|
||||
Learn to install Prowler CLI and perform your first cloud security assessment on AWS.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* AWS account with active resources
|
||||
* AWS CLI installed and configured
|
||||
* IAM credentials with appropriate permissions (see [AWS Authentication](/user-guide/providers/aws/authentication))
|
||||
* Python 3.9 or higher
|
||||
* Basic command-line experience
|
||||
|
||||
**Estimated Time:** 30 minutes
|
||||
|
||||
## Lab Objectives
|
||||
|
||||
By completing this lab, you will:
|
||||
|
||||
* Install Prowler CLI using pip
|
||||
* Configure AWS credentials for Prowler
|
||||
* Execute your first security scan
|
||||
* Understand Prowler's output formats
|
||||
* Review security findings
|
||||
|
||||
## Step 1: Install Prowler CLI
|
||||
|
||||
Install Prowler using pip:
|
||||
|
||||
```bash
|
||||
pip install prowler
|
||||
```
|
||||
|
||||
Verify the installation:
|
||||
|
||||
```bash
|
||||
prowler -v
|
||||
```
|
||||
|
||||
Expected output:
|
||||
```
|
||||
Prowler X.X.X
|
||||
```
|
||||
|
||||
<Tip>
|
||||
For alternative installation methods (Docker, from source), see [Prowler CLI Installation](/getting-started/installation/prowler-cli).
|
||||
</Tip>
|
||||
|
||||
## Step 2: Configure AWS Credentials
|
||||
|
||||
Ensure AWS credentials are configured. Prowler uses the same credential chain as AWS CLI.
|
||||
|
||||
Verify credentials:
|
||||
|
||||
```bash
|
||||
aws sts get-caller-identity
|
||||
```
|
||||
|
||||
Expected output:
|
||||
```json
|
||||
{
|
||||
"UserId": "AIDACKCEVSQ6C2EXAMPLE",
|
||||
"Account": "123456789012",
|
||||
"Arn": "arn:aws:iam::123456789012:user/username"
|
||||
}
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 8 showing AWS credential verification - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 3: Run Your First Scan
|
||||
|
||||
Execute a basic Prowler scan:
|
||||
|
||||
```bash
|
||||
prowler aws
|
||||
```
|
||||
|
||||
This command:
|
||||
* Scans all enabled AWS regions
|
||||
* Runs all available security checks
|
||||
* Generates output in the current directory
|
||||
|
||||
<Note>
|
||||
The scan may take 5-15 minutes depending on the number of resources in your AWS account.
|
||||
</Note>
|
||||
|
||||
## Step 4: Understanding the Output
|
||||
|
||||
Prowler generates multiple output formats in the `output` directory:
|
||||
|
||||
* **CSV:** Detailed findings (`prowler-output-*.csv`)
|
||||
* **JSON:** Machine-readable format (`prowler-output-*.json`)
|
||||
* **HTML:** Human-readable report (`prowler-output-*.html`)
|
||||
|
||||
Review the HTML report:
|
||||
|
||||
```bash
|
||||
open output/prowler-output-*.html
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 10 showing HTML report - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 5: Analyze Security Findings
|
||||
|
||||
Examine the findings structure in the HTML report:
|
||||
|
||||
* **Status:** PASS, FAIL, or MANUAL
|
||||
* **Severity:** critical, high, medium, low, informational
|
||||
* **Service:** AWS service affected (e.g., S3, IAM, EC2)
|
||||
* **Check ID:** Unique identifier for each check
|
||||
* **Region:** AWS region where the resource exists
|
||||
* **Resource:** Specific resource ARN or identifier
|
||||
|
||||
Example finding structure:
|
||||
|
||||
```json
|
||||
{
|
||||
"Status": "FAIL",
|
||||
"Severity": "high",
|
||||
"Service": "s3",
|
||||
"CheckID": "s3_bucket_public_access",
|
||||
"Region": "us-east-1",
|
||||
"Resource": "arn:aws:s3:::my-bucket"
|
||||
}
|
||||
```
|
||||
|
||||
## Step 6: Filter Scan by Service
|
||||
|
||||
Run a targeted scan for specific AWS services:
|
||||
|
||||
```bash
|
||||
prowler aws --services s3 iam
|
||||
```
|
||||
|
||||
This scans only S3 and IAM services, reducing execution time.
|
||||
|
||||
## Step 7: Run Checks by Severity
|
||||
|
||||
Scan for critical and high-severity findings only:
|
||||
|
||||
```bash
|
||||
prowler aws --severity critical high
|
||||
```
|
||||
|
||||
This focuses on the most important security issues.
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 13 showing severity filtering - to be added]
|
||||
</Note>
|
||||
|
||||
## Verification Steps
|
||||
|
||||
Confirm successful lab completion:
|
||||
|
||||
1. Prowler CLI installed and version verified
|
||||
2. AWS credentials properly configured
|
||||
3. First scan completed successfully
|
||||
4. Output files generated in the `output` directory
|
||||
5. HTML report reviewed and findings understood
|
||||
6. Filtered scans executed by service and severity
|
||||
|
||||
## Expected Outcomes
|
||||
|
||||
After completing this lab, you should have:
|
||||
|
||||
* Working Prowler CLI installation
|
||||
* Understanding of basic Prowler commands
|
||||
* Knowledge of output formats
|
||||
* Ability to run targeted scans
|
||||
* Familiarity with finding severity levels
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Issue:** `prowler: command not found`
|
||||
* **Solution:** Ensure Python's bin directory is in your PATH, or use `python3 -m prowler`
|
||||
|
||||
**Issue:** AWS credentials error
|
||||
* **Solution:** Run `aws configure` to set up credentials, or use environment variables
|
||||
|
||||
**Issue:** Scan takes too long
|
||||
* **Solution:** Use `--services` to scan specific services or `--regions` to limit regions
|
||||
|
||||
## Next Steps
|
||||
|
||||
Continue to [Lab 2: Threat Detection with Prowler](/workshop/lab-02-threat-detection) to learn about identifying security threats in your AWS environment.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
* [Prowler CLI Documentation](/getting-started/basic-usage/prowler-cli)
|
||||
* [AWS Authentication Methods](/user-guide/providers/aws/authentication)
|
||||
* [Output Formats](/user-guide/cli/tutorials/reporting)
|
||||
@@ -0,0 +1,263 @@
|
||||
---
|
||||
title: "Lab 2: Threat Detection with Prowler"
|
||||
description: "Identify and analyze security threats in AWS environments using Prowler's threat detection capabilities"
|
||||
---
|
||||
|
||||
<Note>
|
||||
**Tags:** `workshop` `aws` `threat-detection` `intermediate` `security`
|
||||
</Note>
|
||||
|
||||
# Lab 2: Threat Detection with Prowler
|
||||
|
||||
Learn to identify security threats, exposed resources, and potential attack vectors in AWS environments using Prowler's threat detection features.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* Completion of [Lab 1: Getting Started with Prowler CLI](/workshop/lab-01-getting-started)
|
||||
* AWS account with resources (EC2 instances, S3 buckets, security groups)
|
||||
* Prowler CLI installed and configured
|
||||
* Basic understanding of AWS security concepts
|
||||
|
||||
**Estimated Time:** 45 minutes
|
||||
|
||||
## Lab Objectives
|
||||
|
||||
By completing this lab, you will:
|
||||
|
||||
* Understand Prowler's threat detection capabilities
|
||||
* Identify publicly exposed resources
|
||||
* Detect insecure configurations
|
||||
* Analyze CloudTrail events for suspicious activity
|
||||
* Prioritize security findings by risk
|
||||
|
||||
## Step 1: Understanding Threat Detection Checks
|
||||
|
||||
Prowler includes checks that identify:
|
||||
|
||||
* Public exposure (S3 buckets, EC2 instances, RDS databases)
|
||||
* Insecure network configurations (security groups, NACLs)
|
||||
* Weak encryption settings
|
||||
* Suspicious IAM permissions
|
||||
* CloudTrail anomalies
|
||||
|
||||
List threat detection checks:
|
||||
|
||||
```bash
|
||||
prowler aws --list-checks | grep -i "public\|exposed\|open"
|
||||
```
|
||||
|
||||
## Step 2: Scan for Publicly Exposed Resources
|
||||
|
||||
Run a scan focusing on public exposure:
|
||||
|
||||
```bash
|
||||
prowler aws --checks s3_bucket_public_access ec2_instance_public_ip rds_instance_publicly_accessible
|
||||
```
|
||||
|
||||
This identifies:
|
||||
* S3 buckets with public access
|
||||
* EC2 instances with public IPs
|
||||
* RDS databases accessible from the internet
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 17 showing public exposure findings - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 3: Analyze Security Group Misconfigurations
|
||||
|
||||
Security groups control network access. Scan for insecure rules:
|
||||
|
||||
```bash
|
||||
prowler aws --services ec2 --checks ec2_securitygroup*
|
||||
```
|
||||
|
||||
Look for findings related to:
|
||||
* `0.0.0.0/0` ingress rules (any IP can connect)
|
||||
* Open high-risk ports (22, 3389, 3306, 5432)
|
||||
* Overly permissive egress rules
|
||||
|
||||
Example vulnerable security group:
|
||||
```
|
||||
Port 22 (SSH) open to 0.0.0.0/0
|
||||
Port 3389 (RDP) open to 0.0.0.0/0
|
||||
```
|
||||
|
||||
<Warning>
|
||||
Security groups with `0.0.0.0/0` on sensitive ports expose resources to the entire internet and should be restricted immediately.
|
||||
</Warning>
|
||||
|
||||
## Step 4: Check for Unencrypted Data
|
||||
|
||||
Scan for unencrypted storage and data transmission:
|
||||
|
||||
```bash
|
||||
prowler aws --checks s3_bucket_default_encryption ebs_volume_encryption rds_instance_storage_encrypted
|
||||
```
|
||||
|
||||
Key checks:
|
||||
* S3 bucket default encryption disabled
|
||||
* EBS volumes without encryption
|
||||
* RDS instances with unencrypted storage
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 20 showing encryption findings - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 5: CloudTrail Threat Detection
|
||||
|
||||
Enable CloudTrail event analysis to detect suspicious activity:
|
||||
|
||||
```bash
|
||||
prowler aws --services cloudtrail
|
||||
```
|
||||
|
||||
Prowler checks for:
|
||||
* CloudTrail disabled in regions
|
||||
* Log file validation disabled
|
||||
* S3 bucket not encrypted
|
||||
* CloudWatch logging not configured
|
||||
|
||||
<Tip>
|
||||
CloudTrail provides audit logs of API calls. Proper configuration is essential for threat detection and incident response.
|
||||
</Tip>
|
||||
|
||||
## Step 6: Analyze IAM Security Risks
|
||||
|
||||
Identify IAM misconfigurations that could lead to privilege escalation:
|
||||
|
||||
```bash
|
||||
prowler aws --services iam --severity critical high
|
||||
```
|
||||
|
||||
Look for:
|
||||
* Root account usage
|
||||
* IAM users without MFA
|
||||
* Overly permissive IAM policies (e.g., `*:*`)
|
||||
* Inactive credentials not rotated
|
||||
|
||||
Example critical finding:
|
||||
```
|
||||
IAM user with administrative privileges without MFA enabled
|
||||
```
|
||||
|
||||
## Step 7: Generate a Threat-Focused Report
|
||||
|
||||
Create a filtered report with only security threats:
|
||||
|
||||
```bash
|
||||
prowler aws --severity critical high --status FAIL -o html json
|
||||
```
|
||||
|
||||
This generates reports containing only:
|
||||
* Critical and high-severity findings
|
||||
* Failed checks (PASS checks excluded)
|
||||
|
||||
Review the HTML report:
|
||||
|
||||
```bash
|
||||
open output/prowler-output-*.html
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 25 showing threat-focused report - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 8: Prioritize Findings
|
||||
|
||||
Categorize findings by risk level:
|
||||
|
||||
**Critical Priority (Address Immediately):**
|
||||
* S3 buckets with public write access
|
||||
* Root account without MFA
|
||||
* Database instances publicly accessible
|
||||
* Security groups open to `0.0.0.0/0` on sensitive ports
|
||||
|
||||
**High Priority (Address Soon):**
|
||||
* Unencrypted storage volumes
|
||||
* CloudTrail logging disabled
|
||||
* IAM users without MFA
|
||||
* Overly permissive IAM policies
|
||||
|
||||
**Medium Priority (Address as Resources Allow):**
|
||||
* Old access keys not rotated
|
||||
* S3 bucket logging disabled
|
||||
* VPC flow logs not enabled
|
||||
|
||||
## Step 9: Export Findings for Remediation
|
||||
|
||||
Export findings to CSV for tracking:
|
||||
|
||||
```bash
|
||||
prowler aws --severity critical high --status FAIL -o csv
|
||||
```
|
||||
|
||||
Share the CSV with your security team for remediation tracking.
|
||||
|
||||
## Verification Steps
|
||||
|
||||
Confirm successful lab completion:
|
||||
|
||||
1. Identified publicly exposed resources
|
||||
2. Detected insecure security group configurations
|
||||
3. Found unencrypted data storage
|
||||
4. Reviewed CloudTrail security settings
|
||||
5. Analyzed IAM security risks
|
||||
6. Generated threat-focused reports
|
||||
7. Prioritized findings by risk level
|
||||
|
||||
## Expected Outcomes
|
||||
|
||||
After completing this lab, you should:
|
||||
|
||||
* Understand common AWS security threats
|
||||
* Know how to identify exposed resources
|
||||
* Be able to prioritize security findings
|
||||
* Have generated threat detection reports
|
||||
|
||||
## Remediation Examples
|
||||
|
||||
**Example 1: Remove public access from S3 bucket**
|
||||
```bash
|
||||
aws s3api put-public-access-block \
|
||||
--bucket my-bucket \
|
||||
--public-access-block-configuration \
|
||||
"BlockPublicAcls=true,IgnorePublicAcls=true,BlockPublicPolicy=true,RestrictPublicBuckets=true"
|
||||
```
|
||||
|
||||
**Example 2: Restrict security group rule**
|
||||
```bash
|
||||
aws ec2 revoke-security-group-ingress \
|
||||
--group-id sg-12345678 \
|
||||
--protocol tcp \
|
||||
--port 22 \
|
||||
--cidr 0.0.0.0/0
|
||||
```
|
||||
|
||||
**Example 3: Enable S3 bucket encryption**
|
||||
```bash
|
||||
aws s3api put-bucket-encryption \
|
||||
--bucket my-bucket \
|
||||
--server-side-encryption-configuration \
|
||||
'{"Rules":[{"ApplyServerSideEncryptionByDefault":{"SSEAlgorithm":"AES256"}}]}'
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Issue:** Too many findings to review
|
||||
* **Solution:** Use `--severity critical high` to focus on the most important issues first
|
||||
|
||||
**Issue:** Don't understand a finding
|
||||
* **Solution:** Use `--describe-check <check-id>` to get detailed information
|
||||
|
||||
**Issue:** Need to share findings with team
|
||||
* **Solution:** Export to CSV or JSON and use collaboration tools
|
||||
|
||||
## Next Steps
|
||||
|
||||
Continue to [Lab 3: Custom Checks with Prowler](/workshop/lab-03-custom-checks) to learn how to create organization-specific security checks.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
* [AWS Threat Detection Guide](/user-guide/providers/aws/threat-detection)
|
||||
* [Security Best Practices](/user-guide/providers/aws/getting-started-aws)
|
||||
* [Prowler Check Reference](https://hub.prowler.com)
|
||||
@@ -0,0 +1,359 @@
|
||||
---
|
||||
title: "Lab 3: Custom Checks with Prowler"
|
||||
description: "Create organization-specific security checks and customize Prowler for your security requirements"
|
||||
---
|
||||
|
||||
<Note>
|
||||
**Tags:** `workshop` `aws` `custom-checks` `advanced` `development`
|
||||
</Note>
|
||||
|
||||
# Lab 3: Custom Checks with Prowler
|
||||
|
||||
Learn to create custom security checks tailored to your organization's specific security policies and compliance requirements.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* Completion of [Lab 1: Getting Started with Prowler CLI](/workshop/lab-01-getting-started)
|
||||
* Prowler CLI installed from source (for custom check development)
|
||||
* Python 3.9 or higher
|
||||
* Basic Python programming knowledge
|
||||
* Understanding of AWS SDK (boto3)
|
||||
* Text editor or IDE (VS Code, PyCharm)
|
||||
|
||||
**Estimated Time:** 60 minutes
|
||||
|
||||
## Lab Objectives
|
||||
|
||||
By completing this lab, you will:
|
||||
|
||||
* Understand Prowler's check structure
|
||||
* Create a custom security check
|
||||
* Test and validate custom checks
|
||||
* Use custom check metadata
|
||||
* Integrate custom checks into scans
|
||||
|
||||
## Step 1: Install Prowler from Source
|
||||
|
||||
To develop custom checks, install Prowler from source:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
pip install poetry
|
||||
poetry install
|
||||
```
|
||||
|
||||
Activate the virtual environment:
|
||||
|
||||
```bash
|
||||
poetry shell
|
||||
```
|
||||
|
||||
Verify installation:
|
||||
|
||||
```bash
|
||||
prowler -v
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 29 showing source installation - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 2: Understand Check Structure
|
||||
|
||||
Prowler checks are Python files located in:
|
||||
```
|
||||
prowler/providers/<provider>/services/<service>/
|
||||
```
|
||||
|
||||
Example check structure:
|
||||
```
|
||||
prowler/providers/aws/services/s3/s3_bucket_custom_check/
|
||||
├── s3_bucket_custom_check.py # Check logic
|
||||
└── s3_bucket_custom_check.metadata.json # Check metadata
|
||||
```
|
||||
|
||||
## Step 3: Create a Custom Check Directory
|
||||
|
||||
Create a custom check to verify S3 buckets have specific naming conventions:
|
||||
|
||||
```bash
|
||||
mkdir -p prowler/providers/aws/services/s3/s3_bucket_naming_convention
|
||||
cd prowler/providers/aws/services/s3/s3_bucket_naming_convention
|
||||
```
|
||||
|
||||
## Step 4: Write the Check Logic
|
||||
|
||||
Create `s3_bucket_naming_convention.py`:
|
||||
|
||||
```python
|
||||
from prowler.lib.check.models import Check, Check_Report_AWS
|
||||
from prowler.providers.aws.services.s3.s3_client import s3_client
|
||||
|
||||
class s3_bucket_naming_convention(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
# Define your organization's naming pattern
|
||||
naming_pattern = "company-"
|
||||
|
||||
for bucket in s3_client.buckets:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = bucket.region
|
||||
report.resource_id = bucket.name
|
||||
report.resource_arn = bucket.arn
|
||||
report.resource_tags = bucket.tags
|
||||
|
||||
# Check if bucket name follows naming convention
|
||||
if bucket.name.startswith(naming_pattern):
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"S3 bucket {bucket.name} follows naming convention."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"S3 bucket {bucket.name} does not follow naming convention (should start with '{naming_pattern}')."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
```
|
||||
|
||||
<Tip>
|
||||
Customize the `naming_pattern` variable to match your organization's requirements (e.g., "prod-", "dev-", "projectname-").
|
||||
</Tip>
|
||||
|
||||
## Step 5: Create Check Metadata
|
||||
|
||||
Create `s3_bucket_naming_convention.metadata.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "s3_bucket_naming_convention",
|
||||
"CheckTitle": "Check if S3 buckets follow naming convention",
|
||||
"CheckType": ["Software and Configuration Checks"],
|
||||
"ServiceName": "s3",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:aws:s3:::bucket_name",
|
||||
"Severity": "low",
|
||||
"ResourceType": "AwsS3Bucket",
|
||||
"Description": "Ensure S3 buckets follow the organization's naming convention for consistency and management.",
|
||||
"Risk": "S3 buckets not following naming conventions may lead to management difficulties and confusion.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "Rename the S3 bucket to follow the organization's naming convention or update bucket policies.",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Ensure all S3 buckets follow the defined naming convention for your organization.",
|
||||
"Url": "https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"forensics-ready"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "This is a custom check created for organization-specific requirements."
|
||||
}
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 33 showing metadata structure - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 6: Test the Custom Check
|
||||
|
||||
Run only your custom check:
|
||||
|
||||
```bash
|
||||
prowler aws --checks s3_bucket_naming_convention
|
||||
```
|
||||
|
||||
Review the output to verify:
|
||||
* Check executes without errors
|
||||
* Findings are generated for each S3 bucket
|
||||
* Status is correct (PASS/FAIL) based on naming convention
|
||||
|
||||
## Step 7: Create a Custom Check for EC2 Instance Tags
|
||||
|
||||
Create another custom check to enforce EC2 tagging policies:
|
||||
|
||||
```bash
|
||||
mkdir -p prowler/providers/aws/services/ec2/ec2_instance_required_tags
|
||||
cd prowler/providers/aws/services/ec2/ec2_instance_required_tags
|
||||
```
|
||||
|
||||
Create `ec2_instance_required_tags.py`:
|
||||
|
||||
```python
|
||||
from prowler.lib.check.models import Check, Check_Report_AWS
|
||||
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
|
||||
|
||||
class ec2_instance_required_tags(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
# Define required tags
|
||||
required_tags = ["Environment", "Owner", "CostCenter"]
|
||||
|
||||
for instance in ec2_client.instances:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = instance.region
|
||||
report.resource_id = instance.id
|
||||
report.resource_arn = instance.arn
|
||||
report.resource_tags = instance.tags
|
||||
|
||||
# Get instance tag keys
|
||||
instance_tag_keys = [tag["Key"] for tag in instance.tags] if instance.tags else []
|
||||
|
||||
# Check if all required tags are present
|
||||
missing_tags = [tag for tag in required_tags if tag not in instance_tag_keys]
|
||||
|
||||
if not missing_tags:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"EC2 instance {instance.id} has all required tags."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"EC2 instance {instance.id} is missing required tags: {', '.join(missing_tags)}."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
```
|
||||
|
||||
Create `ec2_instance_required_tags.metadata.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "ec2_instance_required_tags",
|
||||
"CheckTitle": "Check if EC2 instances have required tags",
|
||||
"CheckType": ["Software and Configuration Checks"],
|
||||
"ServiceName": "ec2",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:aws:ec2:region:account-id:instance/instance-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsEc2Instance",
|
||||
"Description": "Ensure EC2 instances have required tags for proper resource management and cost allocation.",
|
||||
"Risk": "EC2 instances without required tags may lead to difficulties in cost tracking, ownership identification, and resource management.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws ec2 create-tags --resources <instance-id> --tags Key=Environment,Value=<value> Key=Owner,Value=<value> Key=CostCenter,Value=<value>",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": "resource \"aws_ec2_tag\" \"example\" {\n resource_id = aws_instance.example.id\n key = \"Environment\"\n value = \"Production\"\n}"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Add the required tags (Environment, Owner, CostCenter) to all EC2 instances.",
|
||||
"Url": "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"tagging"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Customize the required_tags list in the check code to match your organization's tagging policy."
|
||||
}
|
||||
```
|
||||
|
||||
## Step 8: Test Multiple Custom Checks
|
||||
|
||||
Run both custom checks together:
|
||||
|
||||
```bash
|
||||
prowler aws --checks s3_bucket_naming_convention ec2_instance_required_tags
|
||||
```
|
||||
|
||||
## Step 9: Create a Custom Checks Group
|
||||
|
||||
Create a file to group your custom checks:
|
||||
|
||||
Create `prowler/config/custom_checks.yaml`:
|
||||
|
||||
```yaml
|
||||
custom-checks:
|
||||
- s3_bucket_naming_convention
|
||||
- ec2_instance_required_tags
|
||||
```
|
||||
|
||||
Run all custom checks:
|
||||
|
||||
```bash
|
||||
prowler aws --checks-file prowler/config/custom_checks.yaml
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 38 showing custom checks output - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 10: Validate Check Metadata
|
||||
|
||||
Prowler includes metadata validation. Ensure your metadata follows guidelines:
|
||||
|
||||
```bash
|
||||
python -m prowler.lib.check.check_metadata_validator
|
||||
```
|
||||
|
||||
This validates:
|
||||
* Required metadata fields are present
|
||||
* Severity values are valid
|
||||
* URLs are properly formatted
|
||||
* JSON structure is correct
|
||||
|
||||
## Verification Steps
|
||||
|
||||
Confirm successful lab completion:
|
||||
|
||||
1. Prowler installed from source
|
||||
2. Custom S3 naming convention check created
|
||||
3. Custom EC2 tagging check created
|
||||
4. Both checks execute successfully
|
||||
5. Metadata files are properly formatted
|
||||
6. Custom checks grouped for easy execution
|
||||
|
||||
## Expected Outcomes
|
||||
|
||||
After completing this lab, you should:
|
||||
|
||||
* Understand Prowler's check architecture
|
||||
* Be able to create custom security checks
|
||||
* Know how to write check metadata
|
||||
* Be capable of testing and validating checks
|
||||
* Have created reusable custom security policies
|
||||
|
||||
## Best Practices for Custom Checks
|
||||
|
||||
1. **Follow naming conventions:** Use descriptive check IDs (e.g., `service_resource_requirement`)
|
||||
2. **Set appropriate severity:** Match severity to the security impact
|
||||
3. **Provide clear descriptions:** Help users understand what the check validates
|
||||
4. **Include remediation guidance:** Provide actionable steps to fix findings
|
||||
5. **Test thoroughly:** Verify checks work across different AWS regions and account configurations
|
||||
6. **Document assumptions:** Note any specific requirements or limitations
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Issue:** Check not found when running
|
||||
* **Solution:** Ensure the check directory and files follow the correct naming convention and location
|
||||
|
||||
**Issue:** Import errors in check code
|
||||
* **Solution:** Verify you're using the Poetry virtual environment (`poetry shell`)
|
||||
|
||||
**Issue:** Metadata validation fails
|
||||
* **Solution:** Review the metadata format against Prowler's schema requirements
|
||||
|
||||
**Issue:** Check returns no findings
|
||||
* **Solution:** Add print statements or use a debugger to verify the service client has data
|
||||
|
||||
## Next Steps
|
||||
|
||||
Continue to [Lab 4: Multi-Cloud Security with Prowler (Azure)](/workshop/lab-04-azure-multicloud) to extend security monitoring to Azure environments.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
* [Custom Checks Development Guide](/developer-guide/checks)
|
||||
* [Check Metadata Guidelines](/developer-guide/check-metadata-guidelines)
|
||||
* [Prowler Development Documentation](/developer-guide/introduction)
|
||||
* [Prowler Check Kreator](/user-guide/cli/tutorials/prowler-check-kreator)
|
||||
@@ -0,0 +1,346 @@
|
||||
---
|
||||
title: "Lab 4: Multi-Cloud Security with Prowler (Azure)"
|
||||
description: "Extend security monitoring to Azure environments using Prowler's multi-cloud capabilities"
|
||||
---
|
||||
|
||||
<Note>
|
||||
**Tags:** `workshop` `azure` `multi-cloud` `intermediate` `authentication`
|
||||
</Note>
|
||||
|
||||
# Lab 4: Multi-Cloud Security with Prowler (Azure)
|
||||
|
||||
Learn to secure Azure environments using Prowler's multi-cloud security assessment capabilities.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* Prowler CLI installed ([Lab 1](/workshop/lab-01-getting-started))
|
||||
* Active Azure subscription
|
||||
* Azure CLI installed
|
||||
* Azure account with appropriate permissions (Reader role minimum)
|
||||
* Basic understanding of Azure services
|
||||
|
||||
**Estimated Time:** 45 minutes
|
||||
|
||||
## Lab Objectives
|
||||
|
||||
By completing this lab, you will:
|
||||
|
||||
* Configure Azure authentication for Prowler
|
||||
* Run security assessments on Azure subscriptions
|
||||
* Understand Azure-specific security checks
|
||||
* Compare security findings across cloud providers
|
||||
* Implement multi-cloud security strategies
|
||||
|
||||
## Step 1: Install Azure CLI
|
||||
|
||||
Install Azure CLI if not already present:
|
||||
|
||||
**macOS:**
|
||||
```bash
|
||||
brew install azure-cli
|
||||
```
|
||||
|
||||
**Linux:**
|
||||
```bash
|
||||
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
|
||||
```
|
||||
|
||||
**Windows:**
|
||||
```powershell
|
||||
winget install Microsoft.AzureCLI
|
||||
```
|
||||
|
||||
Verify installation:
|
||||
```bash
|
||||
az --version
|
||||
```
|
||||
|
||||
## Step 2: Authenticate to Azure
|
||||
|
||||
Sign in to Azure:
|
||||
|
||||
```bash
|
||||
az login
|
||||
```
|
||||
|
||||
This opens a browser window for authentication.
|
||||
|
||||
Verify authentication:
|
||||
```bash
|
||||
az account show
|
||||
```
|
||||
|
||||
Expected output:
|
||||
```json
|
||||
{
|
||||
"id": "12345678-1234-1234-1234-123456789012",
|
||||
"name": "My Subscription",
|
||||
"tenantId": "87654321-4321-4321-4321-210987654321",
|
||||
"state": "Enabled"
|
||||
}
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 43 showing Azure authentication - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 3: List Azure Subscriptions
|
||||
|
||||
If you have multiple subscriptions, list them:
|
||||
|
||||
```bash
|
||||
az account list --output table
|
||||
```
|
||||
|
||||
Set the active subscription:
|
||||
```bash
|
||||
az account set --subscription "subscription-id"
|
||||
```
|
||||
|
||||
## Step 4: Configure Azure Service Principal (Optional)
|
||||
|
||||
For automated scans, create a service principal:
|
||||
|
||||
```bash
|
||||
az ad sp create-for-rbac --name "prowler-scanner" --role Reader --scopes /subscriptions/{subscription-id}
|
||||
```
|
||||
|
||||
This returns:
|
||||
```json
|
||||
{
|
||||
"appId": "app-id",
|
||||
"displayName": "prowler-scanner",
|
||||
"password": "password",
|
||||
"tenant": "tenant-id"
|
||||
}
|
||||
```
|
||||
|
||||
<Warning>
|
||||
Store service principal credentials securely. These provide programmatic access to your Azure subscription.
|
||||
</Warning>
|
||||
|
||||
Export credentials as environment variables:
|
||||
```bash
|
||||
export AZURE_CLIENT_ID="app-id"
|
||||
export AZURE_CLIENT_SECRET="password"
|
||||
export AZURE_TENANT_ID="tenant-id"
|
||||
export AZURE_SUBSCRIPTION_ID="subscription-id"
|
||||
```
|
||||
|
||||
## Step 5: Run Your First Azure Scan
|
||||
|
||||
Execute Prowler against Azure:
|
||||
|
||||
```bash
|
||||
prowler azure
|
||||
```
|
||||
|
||||
This command:
|
||||
* Uses Azure CLI credentials (or service principal if configured)
|
||||
* Scans the active subscription
|
||||
* Runs all Azure security checks
|
||||
* Generates output in multiple formats
|
||||
|
||||
<Note>
|
||||
Azure scans typically take 5-10 minutes depending on resource count.
|
||||
</Note>
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 47 showing Azure scan execution - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 6: Scan Specific Azure Services
|
||||
|
||||
Run targeted scans for specific services:
|
||||
|
||||
```bash
|
||||
prowler azure --services storage network
|
||||
```
|
||||
|
||||
This focuses on:
|
||||
* Azure Storage accounts
|
||||
* Virtual networks
|
||||
* Network security groups
|
||||
|
||||
## Step 7: Analyze Azure Security Findings
|
||||
|
||||
Review Azure-specific security checks:
|
||||
|
||||
**Storage Account Security:**
|
||||
* Public blob access disabled
|
||||
* Secure transfer required (HTTPS)
|
||||
* Storage encryption enabled
|
||||
* Soft delete enabled
|
||||
|
||||
**Network Security:**
|
||||
* Network security groups properly configured
|
||||
* No overly permissive rules
|
||||
* DDoS protection enabled
|
||||
* Network watcher enabled
|
||||
|
||||
**Identity and Access:**
|
||||
* Multi-factor authentication enabled
|
||||
* Conditional access policies configured
|
||||
* Privileged identity management enabled
|
||||
|
||||
Open the HTML report:
|
||||
```bash
|
||||
open output/prowler-output-azure-*.html
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 50 showing Azure findings report - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 8: Compare AWS and Azure Security Posture
|
||||
|
||||
If you completed Lab 1, compare security findings:
|
||||
|
||||
**AWS findings:**
|
||||
```bash
|
||||
cat output/prowler-output-aws-*.csv | wc -l
|
||||
```
|
||||
|
||||
**Azure findings:**
|
||||
```bash
|
||||
cat output/prowler-output-azure-*.csv | wc -l
|
||||
```
|
||||
|
||||
Key comparison metrics:
|
||||
* Total findings by severity
|
||||
* Service coverage
|
||||
* Compliance status
|
||||
* Resource exposure
|
||||
|
||||
## Step 9: Multi-Cloud Security Dashboard
|
||||
|
||||
Generate a combined security view:
|
||||
|
||||
Create a directory for multi-cloud reports:
|
||||
```bash
|
||||
mkdir -p multi-cloud-reports
|
||||
cp output/prowler-output-aws-*.json multi-cloud-reports/
|
||||
cp output/prowler-output-azure-*.json multi-cloud-reports/
|
||||
```
|
||||
|
||||
<Tip>
|
||||
Use Prowler Cloud or custom dashboards to visualize multi-cloud security posture in a unified interface.
|
||||
</Tip>
|
||||
|
||||
## Step 10: Azure-Specific Remediation
|
||||
|
||||
Example remediations for common Azure findings:
|
||||
|
||||
**Enable secure transfer for storage account:**
|
||||
```bash
|
||||
az storage account update \
|
||||
--name mystorageaccount \
|
||||
--resource-group myresourcegroup \
|
||||
--https-only true
|
||||
```
|
||||
|
||||
**Enable storage encryption:**
|
||||
```bash
|
||||
az storage account update \
|
||||
--name mystorageaccount \
|
||||
--resource-group myresourcegroup \
|
||||
--encryption-services blob
|
||||
```
|
||||
|
||||
**Disable public blob access:**
|
||||
```bash
|
||||
az storage account update \
|
||||
--name mystorageaccount \
|
||||
--resource-group myresourcegroup \
|
||||
--allow-blob-public-access false
|
||||
```
|
||||
|
||||
**Update network security group rule:**
|
||||
```bash
|
||||
az network nsg rule update \
|
||||
--resource-group myresourcegroup \
|
||||
--nsg-name mynsg \
|
||||
--name mynsgrule \
|
||||
--source-address-prefixes 10.0.0.0/16
|
||||
```
|
||||
|
||||
## Step 11: Scan Multiple Azure Subscriptions
|
||||
|
||||
Scan all subscriptions in your tenant:
|
||||
|
||||
```bash
|
||||
prowler azure --subscription-ids subscription-id-1 subscription-id-2
|
||||
```
|
||||
|
||||
Or scan all accessible subscriptions:
|
||||
```bash
|
||||
prowler azure --az-cli-auth
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 56 showing multi-subscription scan - to be added]
|
||||
</Note>
|
||||
|
||||
## Verification Steps
|
||||
|
||||
Confirm successful lab completion:
|
||||
|
||||
1. Azure CLI installed and authenticated
|
||||
2. First Azure scan completed successfully
|
||||
3. Azure security findings reviewed
|
||||
4. Service-specific scans executed
|
||||
5. Multi-cloud comparison performed
|
||||
6. Azure-specific remediations understood
|
||||
|
||||
## Expected Outcomes
|
||||
|
||||
After completing this lab, you should:
|
||||
|
||||
* Be able to authenticate Prowler with Azure
|
||||
* Understand Azure security checks
|
||||
* Know how to scan multiple subscriptions
|
||||
* Have compared security posture across AWS and Azure
|
||||
* Be familiar with Azure-specific remediation commands
|
||||
|
||||
## Common Azure Security Findings
|
||||
|
||||
**Storage Accounts:**
|
||||
* Public blob access enabled
|
||||
* Secure transfer (HTTPS) not required
|
||||
* Storage encryption disabled
|
||||
* Logging not configured
|
||||
|
||||
**Virtual Networks:**
|
||||
* Network security groups allow 0.0.0.0/0 access
|
||||
* DDoS protection not enabled
|
||||
* Network watcher not configured
|
||||
|
||||
**Identity:**
|
||||
* MFA not enabled for all users
|
||||
* Guest users have excessive permissions
|
||||
* Password policies are weak
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Issue:** Azure authentication fails
|
||||
* **Solution:** Run `az login` and ensure you have the correct subscription selected
|
||||
|
||||
**Issue:** Permission errors during scan
|
||||
* **Solution:** Ensure your account or service principal has Reader role at subscription level
|
||||
|
||||
**Issue:** Subscription not found
|
||||
* **Solution:** Verify subscription ID with `az account list` and check it's enabled
|
||||
|
||||
**Issue:** Slow scan performance
|
||||
* **Solution:** Use `--services` flag to scan specific services instead of all
|
||||
|
||||
## Next Steps
|
||||
|
||||
Continue to [Lab 5: Multi-Cloud Security with Prowler (GCP)](/workshop/lab-05-gcp-multicloud) to add Google Cloud Platform to your multi-cloud security monitoring.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
* [Azure Getting Started Guide](/user-guide/providers/azure/getting-started-azure)
|
||||
* [Azure Authentication Methods](/user-guide/providers/azure/authentication)
|
||||
* [Create Prowler Service Principal](/user-guide/providers/azure/create-prowler-service-principal)
|
||||
* [Azure Subscriptions Management](/user-guide/providers/azure/subscriptions)
|
||||
@@ -0,0 +1,377 @@
|
||||
---
|
||||
title: "Lab 5: Multi-Cloud Security with Prowler (GCP)"
|
||||
description: "Complete your multi-cloud security coverage by adding Google Cloud Platform assessments"
|
||||
---
|
||||
|
||||
<Note>
|
||||
**Tags:** `workshop` `gcp` `multi-cloud` `intermediate` `authentication`
|
||||
</Note>
|
||||
|
||||
# Lab 5: Multi-Cloud Security with Prowler (GCP)
|
||||
|
||||
Learn to secure Google Cloud Platform environments and achieve comprehensive multi-cloud security coverage with Prowler.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* Prowler CLI installed ([Lab 1](/workshop/lab-01-getting-started))
|
||||
* Active GCP project
|
||||
* Google Cloud SDK (gcloud) installed
|
||||
* GCP account with appropriate permissions (Viewer role minimum)
|
||||
* Basic understanding of GCP services
|
||||
|
||||
**Estimated Time:** 45 minutes
|
||||
|
||||
## Lab Objectives
|
||||
|
||||
By completing this lab, you will:
|
||||
|
||||
* Configure GCP authentication for Prowler
|
||||
* Run security assessments on GCP projects
|
||||
* Understand GCP-specific security checks
|
||||
* Achieve comprehensive multi-cloud security coverage (AWS, Azure, GCP)
|
||||
* Implement unified security policies across cloud providers
|
||||
|
||||
## Step 1: Install Google Cloud SDK
|
||||
|
||||
Install gcloud CLI if not already present:
|
||||
|
||||
**macOS:**
|
||||
```bash
|
||||
brew install google-cloud-sdk
|
||||
```
|
||||
|
||||
**Linux:**
|
||||
```bash
|
||||
curl https://sdk.cloud.google.com | bash
|
||||
exec -l $SHELL
|
||||
```
|
||||
|
||||
**Windows:**
|
||||
Download and install from: https://cloud.google.com/sdk/docs/install
|
||||
|
||||
Verify installation:
|
||||
```bash
|
||||
gcloud --version
|
||||
```
|
||||
|
||||
## Step 2: Authenticate to GCP
|
||||
|
||||
Initialize gcloud and authenticate:
|
||||
|
||||
```bash
|
||||
gcloud init
|
||||
```
|
||||
|
||||
This prompts you to:
|
||||
1. Log in to your Google account
|
||||
2. Select or create a GCP project
|
||||
3. Configure default region/zone (optional)
|
||||
|
||||
Verify authentication:
|
||||
```bash
|
||||
gcloud auth list
|
||||
```
|
||||
|
||||
Display active project:
|
||||
```bash
|
||||
gcloud config get-value project
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 60 showing GCP authentication - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 3: Configure Application Default Credentials
|
||||
|
||||
Prowler uses Application Default Credentials (ADC):
|
||||
|
||||
```bash
|
||||
gcloud auth application-default login
|
||||
```
|
||||
|
||||
This creates credentials file at:
|
||||
* **Linux/macOS:** `~/.config/gcloud/application_default_credentials.json`
|
||||
* **Windows:** `%APPDATA%\gcloud\application_default_credentials.json`
|
||||
|
||||
## Step 4: Set Up Service Account (Optional)
|
||||
|
||||
For automated scans, create a service account:
|
||||
|
||||
```bash
|
||||
# Create service account
|
||||
gcloud iam service-accounts create prowler-scanner \
|
||||
--display-name="Prowler Security Scanner"
|
||||
|
||||
# Get project ID
|
||||
PROJECT_ID=$(gcloud config get-value project)
|
||||
|
||||
# Grant Viewer role
|
||||
gcloud projects add-iam-policy-binding $PROJECT_ID \
|
||||
--member="serviceAccount:prowler-scanner@${PROJECT_ID}.iam.gserviceaccount.com" \
|
||||
--role="roles/viewer"
|
||||
|
||||
# Generate key file
|
||||
gcloud iam service-accounts keys create ~/prowler-credentials.json \
|
||||
--iam-account=prowler-scanner@${PROJECT_ID}.iam.gserviceaccount.com
|
||||
```
|
||||
|
||||
<Warning>
|
||||
Store service account key files securely. These provide programmatic access to your GCP project.
|
||||
</Warning>
|
||||
|
||||
Use service account credentials:
|
||||
```bash
|
||||
export GOOGLE_APPLICATION_CREDENTIALS=~/prowler-credentials.json
|
||||
```
|
||||
|
||||
## Step 5: Run Your First GCP Scan
|
||||
|
||||
Execute Prowler against GCP:
|
||||
|
||||
```bash
|
||||
prowler gcp
|
||||
```
|
||||
|
||||
This command:
|
||||
* Uses Application Default Credentials (or service account)
|
||||
* Scans the active project
|
||||
* Runs all GCP security checks
|
||||
* Generates output in multiple formats
|
||||
|
||||
<Note>
|
||||
GCP scans typically take 5-10 minutes depending on resource count.
|
||||
</Note>
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 65 showing GCP scan execution - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 6: Scan Specific GCP Projects
|
||||
|
||||
Scan a specific project:
|
||||
|
||||
```bash
|
||||
prowler gcp --project-id my-project-id
|
||||
```
|
||||
|
||||
Scan multiple projects:
|
||||
```bash
|
||||
prowler gcp --project-id project-1 project-2 project-3
|
||||
```
|
||||
|
||||
## Step 7: Scan Specific GCP Services
|
||||
|
||||
Run targeted scans for specific services:
|
||||
|
||||
```bash
|
||||
prowler gcp --services storage compute iam
|
||||
```
|
||||
|
||||
This focuses on:
|
||||
* Cloud Storage buckets
|
||||
* Compute Engine instances
|
||||
* IAM policies and permissions
|
||||
|
||||
## Step 8: Analyze GCP Security Findings
|
||||
|
||||
Review GCP-specific security checks:
|
||||
|
||||
**Cloud Storage Security:**
|
||||
* Buckets not publicly accessible
|
||||
* Uniform bucket-level access enabled
|
||||
* Encryption at rest enabled
|
||||
* Versioning enabled
|
||||
|
||||
**Compute Engine Security:**
|
||||
* OS Login enabled
|
||||
* Serial port access disabled
|
||||
* Shielded VMs enabled
|
||||
* IP forwarding disabled
|
||||
|
||||
**IAM Security:**
|
||||
* Service accounts with minimal permissions
|
||||
* No primitive roles (Owner, Editor, Viewer) assigned to users
|
||||
* Service account keys rotated regularly
|
||||
* Cloud Identity-Aware Proxy (IAP) enabled
|
||||
|
||||
Open the HTML report:
|
||||
```bash
|
||||
open output/prowler-output-gcp-*.html
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 69 showing GCP findings report - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 9: Multi-Cloud Security Overview
|
||||
|
||||
You now have security coverage across three major cloud providers:
|
||||
|
||||
Create a comprehensive multi-cloud report directory:
|
||||
|
||||
```bash
|
||||
mkdir -p multi-cloud-security-reports
|
||||
cp output/prowler-output-aws-*.json multi-cloud-security-reports/
|
||||
cp output/prowler-output-azure-*.json multi-cloud-security-reports/
|
||||
cp output/prowler-output-gcp-*.json multi-cloud-security-reports/
|
||||
```
|
||||
|
||||
Compare security posture metrics:
|
||||
|
||||
```bash
|
||||
# Count findings by provider
|
||||
echo "AWS findings:"
|
||||
jq '.findings | length' multi-cloud-security-reports/prowler-output-aws-*.json
|
||||
|
||||
echo "Azure findings:"
|
||||
jq '.findings | length' multi-cloud-security-reports/prowler-output-azure-*.json
|
||||
|
||||
echo "GCP findings:"
|
||||
jq '.findings | length' multi-cloud-security-reports/prowler-output-gcp-*.json
|
||||
```
|
||||
|
||||
## Step 10: GCP-Specific Remediation
|
||||
|
||||
Example remediations for common GCP findings:
|
||||
|
||||
**Enable uniform bucket-level access:**
|
||||
```bash
|
||||
gsutil uniformbucketlevelaccess set on gs://bucket-name
|
||||
```
|
||||
|
||||
**Disable public access to bucket:**
|
||||
```bash
|
||||
gsutil iam ch -d allUsers gs://bucket-name
|
||||
gsutil iam ch -d allAuthenticatedUsers gs://bucket-name
|
||||
```
|
||||
|
||||
**Enable OS Login on project:**
|
||||
```bash
|
||||
gcloud compute project-info add-metadata \
|
||||
--metadata enable-oslogin=TRUE
|
||||
```
|
||||
|
||||
**Disable serial port access:**
|
||||
```bash
|
||||
gcloud compute instances add-metadata instance-name \
|
||||
--metadata serial-port-enable=FALSE
|
||||
```
|
||||
|
||||
**Remove primitive role binding:**
|
||||
```bash
|
||||
gcloud projects remove-iam-policy-binding PROJECT_ID \
|
||||
--member='user:email@example.com' \
|
||||
--role='roles/editor'
|
||||
```
|
||||
|
||||
## Step 11: Scan GCP Organization
|
||||
|
||||
If you have organization-level access:
|
||||
|
||||
```bash
|
||||
prowler gcp --organization-id org-id
|
||||
```
|
||||
|
||||
This scans all projects within the organization.
|
||||
|
||||
<Tip>
|
||||
Organization-level scanning requires `resourcemanager.organizations.get` permission at the organization level.
|
||||
</Tip>
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 74 showing organization scan - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 12: Multi-Cloud Security Strategy
|
||||
|
||||
Establish consistent security controls across clouds:
|
||||
|
||||
**Identity and Access:**
|
||||
* Enforce MFA across all providers
|
||||
* Implement least privilege access
|
||||
* Regular access reviews
|
||||
* Centralized identity management
|
||||
|
||||
**Data Protection:**
|
||||
* Encryption at rest and in transit
|
||||
* Regular backups
|
||||
* Data retention policies
|
||||
* Access logging enabled
|
||||
|
||||
**Network Security:**
|
||||
* Zero-trust network architecture
|
||||
* Network segmentation
|
||||
* DDoS protection
|
||||
* Traffic inspection
|
||||
|
||||
**Monitoring and Compliance:**
|
||||
* Centralized logging
|
||||
* Security information and event management (SIEM)
|
||||
* Regular compliance scans
|
||||
* Automated remediation where possible
|
||||
|
||||
## Verification Steps
|
||||
|
||||
Confirm successful lab completion:
|
||||
|
||||
1. Google Cloud SDK installed and authenticated
|
||||
2. First GCP scan completed successfully
|
||||
3. GCP security findings reviewed
|
||||
4. Service-specific scans executed
|
||||
5. Multi-cloud reports collected (AWS, Azure, GCP)
|
||||
6. GCP-specific remediations understood
|
||||
|
||||
## Expected Outcomes
|
||||
|
||||
After completing this lab, you should:
|
||||
|
||||
* Be able to authenticate Prowler with GCP
|
||||
* Understand GCP security checks
|
||||
* Know how to scan multiple projects and organizations
|
||||
* Have achieved multi-cloud security coverage
|
||||
* Be familiar with GCP-specific remediation commands
|
||||
|
||||
## Common GCP Security Findings
|
||||
|
||||
**Cloud Storage:**
|
||||
* Buckets with public access
|
||||
* Uniform bucket-level access not enabled
|
||||
* Versioning disabled
|
||||
* Logging not configured
|
||||
|
||||
**Compute Engine:**
|
||||
* OS Login not enabled
|
||||
* Legacy metadata endpoints enabled
|
||||
* Serial port access enabled
|
||||
* IP forwarding enabled on instances
|
||||
|
||||
**IAM:**
|
||||
* Primitive roles assigned to users
|
||||
* Service account keys not rotated
|
||||
* Over-permissive service accounts
|
||||
* No organization policies enforced
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Issue:** GCP authentication fails
|
||||
* **Solution:** Run `gcloud auth application-default login` and ensure project is set
|
||||
|
||||
**Issue:** Permission errors during scan
|
||||
* **Solution:** Ensure account has Viewer role at project or organization level
|
||||
|
||||
**Issue:** Project not found
|
||||
* **Solution:** Verify project ID with `gcloud projects list` and check it's active
|
||||
|
||||
**Issue:** API not enabled errors
|
||||
* **Solution:** Enable required APIs: `gcloud services enable cloudresourcemanager.googleapis.com`
|
||||
|
||||
## Next Steps
|
||||
|
||||
Continue to [Lab 6: Compliance as Code with Prowler](/workshop/lab-06-compliance-as-code) to learn how to automate compliance reporting across all cloud providers.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
* [GCP Getting Started Guide](/user-guide/providers/gcp/getting-started-gcp)
|
||||
* [GCP Authentication Methods](/user-guide/providers/gcp/authentication)
|
||||
* [GCP Projects Management](/user-guide/providers/gcp/projects)
|
||||
* [GCP Organization Scanning](/user-guide/providers/gcp/organization)
|
||||
@@ -0,0 +1,465 @@
|
||||
---
|
||||
title: "Lab 6: Compliance as Code with Prowler"
|
||||
description: "Automate compliance reporting and validation against industry standards and regulatory frameworks"
|
||||
---
|
||||
|
||||
<Note>
|
||||
**Tags:** `workshop` `aws` `compliance` `intermediate` `automation` `frameworks`
|
||||
</Note>
|
||||
|
||||
# Lab 6: Compliance as Code with Prowler
|
||||
|
||||
Learn to automate compliance validation and reporting against industry standards such as CIS, PCI-DSS, HIPAA, and custom compliance frameworks.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* Completion of [Lab 1: Getting Started with Prowler CLI](/workshop/lab-01-getting-started)
|
||||
* AWS account with resources
|
||||
* Prowler CLI installed and configured
|
||||
* Understanding of compliance frameworks (CIS, PCI-DSS, HIPAA)
|
||||
|
||||
**Estimated Time:** 50 minutes
|
||||
|
||||
## Lab Objectives
|
||||
|
||||
By completing this lab, you will:
|
||||
|
||||
* Understand compliance frameworks in Prowler
|
||||
* Generate compliance reports for industry standards
|
||||
* Validate compliance status programmatically
|
||||
* Create custom compliance frameworks
|
||||
* Automate compliance reporting in CI/CD pipelines
|
||||
|
||||
## Step 1: List Available Compliance Frameworks
|
||||
|
||||
View all supported compliance frameworks:
|
||||
|
||||
```bash
|
||||
prowler aws --list-compliance
|
||||
```
|
||||
|
||||
This displays frameworks such as:
|
||||
* CIS AWS Foundations Benchmark (multiple versions)
|
||||
* PCI-DSS v4.0
|
||||
* HIPAA
|
||||
* SOC2
|
||||
* GDPR
|
||||
* ISO 27001
|
||||
* NIST 800-53
|
||||
* AWS Foundational Security Best Practices
|
||||
* Custom frameworks
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 78 showing compliance frameworks list - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 2: Run CIS Benchmark Compliance Scan
|
||||
|
||||
Execute a CIS AWS Foundations Benchmark scan:
|
||||
|
||||
```bash
|
||||
prowler aws --compliance cis_2.0_aws
|
||||
```
|
||||
|
||||
This command:
|
||||
* Runs only checks mapped to CIS Benchmark v2.0
|
||||
* Generates a compliance report
|
||||
* Shows compliance percentage
|
||||
* Identifies non-compliant controls
|
||||
|
||||
Review compliance summary:
|
||||
```bash
|
||||
open output/compliance/prowler-compliance-cis_2.0_aws-*.html
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 80 showing CIS compliance report - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 3: Analyze Compliance Requirements
|
||||
|
||||
Understanding compliance report structure:
|
||||
|
||||
**Requirement ID:** Control identifier (e.g., 1.1, 1.2)
|
||||
**Requirement Description:** What the control validates
|
||||
**Status:** PASS or FAIL
|
||||
**Related Checks:** Prowler checks that map to this requirement
|
||||
**Resources Affected:** Specific resources that failed
|
||||
|
||||
Example CIS requirement:
|
||||
|
||||
```
|
||||
ID: 1.4
|
||||
Description: Ensure no root account access key exists
|
||||
Status: FAIL
|
||||
Checks: iam_root_user_no_access_keys
|
||||
Resources: Root account has 1 active access key
|
||||
```
|
||||
|
||||
## Step 4: Generate Multiple Compliance Reports
|
||||
|
||||
Run scans for multiple frameworks:
|
||||
|
||||
```bash
|
||||
prowler aws --compliance cis_2.0_aws pci_dss_v4.0_aws hipaa_aws
|
||||
```
|
||||
|
||||
This generates three separate compliance reports:
|
||||
* `prowler-compliance-cis_2.0_aws-*.html`
|
||||
* `prowler-compliance-pci_dss_v4.0_aws-*.html`
|
||||
* `prowler-compliance-hipaa_aws-*.html`
|
||||
|
||||
Compare compliance posture across frameworks:
|
||||
```bash
|
||||
grep "Compliance Status" output/compliance/*.html
|
||||
```
|
||||
|
||||
## Step 5: Export Compliance Data
|
||||
|
||||
Export compliance results to JSON for automation:
|
||||
|
||||
```bash
|
||||
prowler aws --compliance cis_2.0_aws -o json-ocsf
|
||||
```
|
||||
|
||||
The JSON output includes:
|
||||
* Compliance score (percentage)
|
||||
* Passed requirements
|
||||
* Failed requirements
|
||||
* Resource-level details
|
||||
* Remediation guidance
|
||||
|
||||
Query compliance status programmatically:
|
||||
```bash
|
||||
jq '.compliance.cis_2.0_aws.score' output/prowler-output-*.json-ocsf
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 84 showing JSON compliance output - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 6: Create a Custom Compliance Framework
|
||||
|
||||
Create a custom framework for organization-specific requirements:
|
||||
|
||||
Create `custom_compliance.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"Framework": "custom_security_baseline",
|
||||
"Version": "1.0",
|
||||
"Provider": "aws",
|
||||
"Description": "Organization Security Baseline Requirements",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "1.1",
|
||||
"Description": "S3 buckets must have encryption enabled",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Data Protection",
|
||||
"SubSection": "Encryption at Rest",
|
||||
"Type": "automated",
|
||||
"Service": "s3"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_secure_transport_policy"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "1.2",
|
||||
"Description": "CloudTrail must be enabled in all regions",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Logging and Monitoring",
|
||||
"SubSection": "Audit Logging",
|
||||
"Type": "automated",
|
||||
"Service": "cloudtrail"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_log_file_validation_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "2.1",
|
||||
"Description": "IAM users must have MFA enabled",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Identity and Access Management",
|
||||
"SubSection": "Multi-Factor Authentication",
|
||||
"Type": "automated",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_root_mfa_enabled"
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "3.1",
|
||||
"Description": "Security groups must not allow unrestricted access",
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Network Security",
|
||||
"SubSection": "Firewall Rules",
|
||||
"Type": "automated",
|
||||
"Service": "ec2"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Save to `prowler/compliance/aws/`:
|
||||
```bash
|
||||
cp custom_compliance.json ~/.prowler/compliance/aws/
|
||||
```
|
||||
|
||||
## Step 7: Run Custom Compliance Framework
|
||||
|
||||
Execute scan against custom framework:
|
||||
|
||||
```bash
|
||||
prowler aws --compliance-framework custom_compliance.json
|
||||
```
|
||||
|
||||
Or if placed in Prowler's compliance directory:
|
||||
```bash
|
||||
prowler aws --compliance custom_security_baseline
|
||||
```
|
||||
|
||||
Review custom compliance report:
|
||||
```bash
|
||||
open output/compliance/prowler-compliance-custom_security_baseline-*.html
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 88 showing custom compliance report - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 8: Compliance Reporting for Audits
|
||||
|
||||
Generate audit-ready compliance reports:
|
||||
|
||||
```bash
|
||||
prowler aws \
|
||||
--compliance cis_2.0_aws \
|
||||
-o html csv json \
|
||||
--output-directory ./audit-reports-$(date +%Y%m%d)
|
||||
```
|
||||
|
||||
This creates:
|
||||
* HTML report for human review
|
||||
* CSV for spreadsheet analysis
|
||||
* JSON for programmatic processing
|
||||
|
||||
Package for auditors:
|
||||
```bash
|
||||
tar -czf compliance-audit-$(date +%Y%m%d).tar.gz audit-reports-*
|
||||
```
|
||||
|
||||
## Step 9: Automate Compliance Validation
|
||||
|
||||
Create a compliance validation script:
|
||||
|
||||
Create `compliance-check.sh`:
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
|
||||
# Configuration
|
||||
COMPLIANCE_FRAMEWORK="cis_2.0_aws"
|
||||
REQUIRED_SCORE=85
|
||||
OUTPUT_DIR="./compliance-reports"
|
||||
|
||||
# Run Prowler
|
||||
prowler aws \
|
||||
--compliance $COMPLIANCE_FRAMEWORK \
|
||||
-o json \
|
||||
--output-directory $OUTPUT_DIR
|
||||
|
||||
# Extract compliance score
|
||||
SCORE=$(jq -r ".compliance.${COMPLIANCE_FRAMEWORK}.score" \
|
||||
$OUTPUT_DIR/prowler-output-*.json | head -1)
|
||||
|
||||
echo "Compliance Score: ${SCORE}%"
|
||||
|
||||
# Validate compliance threshold
|
||||
if (( $(echo "$SCORE >= $REQUIRED_SCORE" | bc -l) )); then
|
||||
echo "✓ Compliance check PASSED (score: ${SCORE}% >= ${REQUIRED_SCORE}%)"
|
||||
exit 0
|
||||
else
|
||||
echo "✗ Compliance check FAILED (score: ${SCORE}% < ${REQUIRED_SCORE}%)"
|
||||
exit 1
|
||||
fi
|
||||
```
|
||||
|
||||
Make executable:
|
||||
```bash
|
||||
chmod +x compliance-check.sh
|
||||
```
|
||||
|
||||
Run validation:
|
||||
```bash
|
||||
./compliance-check.sh
|
||||
```
|
||||
|
||||
## Step 10: Integrate with CI/CD Pipeline
|
||||
|
||||
Example GitHub Actions workflow:
|
||||
|
||||
Create `.github/workflows/compliance-check.yml`:
|
||||
|
||||
```yaml
|
||||
name: Compliance Validation
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * *' # Daily at midnight
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
prowler-compliance:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Install Prowler
|
||||
run: pip install prowler
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@v2
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: Run compliance scan
|
||||
run: |
|
||||
prowler aws \
|
||||
--compliance cis_2.0_aws \
|
||||
-o html json \
|
||||
--output-directory ./reports
|
||||
|
||||
- name: Upload compliance reports
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: compliance-reports
|
||||
path: ./reports/
|
||||
|
||||
- name: Check compliance threshold
|
||||
run: |
|
||||
SCORE=$(jq -r '.compliance.cis_2.0_aws.score' reports/prowler-output-*.json)
|
||||
if (( $(echo "$SCORE < 85" | bc -l) )); then
|
||||
echo "Compliance score ${SCORE}% is below threshold"
|
||||
exit 1
|
||||
fi
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 92 showing CI/CD integration - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 11: Continuous Compliance Monitoring
|
||||
|
||||
Implement continuous compliance monitoring:
|
||||
|
||||
**Daily Scans:**
|
||||
* Schedule automated scans
|
||||
* Track compliance trends over time
|
||||
* Alert on compliance score drops
|
||||
|
||||
**Drift Detection:**
|
||||
* Compare current state vs. baseline
|
||||
* Identify new non-compliant resources
|
||||
* Generate remediation tickets automatically
|
||||
|
||||
**Compliance Dashboard:**
|
||||
* Visualize compliance status
|
||||
* Track remediation progress
|
||||
* Generate executive reports
|
||||
|
||||
## Verification Steps
|
||||
|
||||
Confirm successful lab completion:
|
||||
|
||||
1. Listed available compliance frameworks
|
||||
2. Generated CIS compliance report
|
||||
3. Created multiple framework reports
|
||||
4. Built custom compliance framework
|
||||
5. Automated compliance validation
|
||||
6. Integrated compliance checks in CI/CD
|
||||
|
||||
## Expected Outcomes
|
||||
|
||||
After completing this lab, you should:
|
||||
|
||||
* Understand Prowler compliance capabilities
|
||||
* Be able to generate compliance reports
|
||||
* Know how to create custom frameworks
|
||||
* Have automated compliance validation
|
||||
* Be ready for audit processes
|
||||
|
||||
## Compliance Framework Mapping
|
||||
|
||||
Common frameworks supported:
|
||||
|
||||
**AWS:**
|
||||
* CIS AWS Foundations Benchmark v1.4, v1.5, v2.0, v3.0
|
||||
* AWS Foundational Security Best Practices
|
||||
* PCI-DSS v4.0
|
||||
* HIPAA
|
||||
* SOC2
|
||||
* GDPR
|
||||
* ISO 27001
|
||||
* NIST 800-53
|
||||
* FedRAMP
|
||||
* ENS (Spanish National Security Scheme)
|
||||
|
||||
**Azure:**
|
||||
* CIS Microsoft Azure Foundations Benchmark
|
||||
* Azure Security Benchmark
|
||||
|
||||
**GCP:**
|
||||
* CIS Google Cloud Platform Foundation Benchmark
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Issue:** Compliance framework not found
|
||||
* **Solution:** Use `--list-compliance` to see exact framework names
|
||||
|
||||
**Issue:** Low compliance score
|
||||
* **Solution:** Review failed checks and prioritize remediation by severity
|
||||
|
||||
**Issue:** Missing compliance report
|
||||
* **Solution:** Check `output/compliance/` directory for framework-specific reports
|
||||
|
||||
**Issue:** Custom framework not loading
|
||||
* **Solution:** Validate JSON syntax and ensure file is in correct directory
|
||||
|
||||
## Next Steps
|
||||
|
||||
Continue to [Lab 7: Integrations with Prowler](/workshop/lab-07-integrations) to learn how to integrate Prowler with AWS Security Hub and other security tools.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
* [Compliance Reporting Guide](/user-guide/cli/tutorials/compliance)
|
||||
* [Compliance Frameworks Documentation](/user-guide/cli/tutorials/compliance)
|
||||
* [Custom Compliance Framework Guide](/developer-guide/security-compliance-framework)
|
||||
* [Prowler Hub Compliance Frameworks](https://hub.prowler.com/compliance)
|
||||
@@ -0,0 +1,425 @@
|
||||
---
|
||||
title: "Lab 7: Integrations with Prowler"
|
||||
description: "Integrate Prowler findings with AWS Security Hub and other security tools for centralized security management"
|
||||
---
|
||||
|
||||
<Note>
|
||||
**Tags:** `workshop` `aws` `integrations` `intermediate` `security-hub` `automation`
|
||||
</Note>
|
||||
|
||||
# Lab 7: Integrations with Prowler
|
||||
|
||||
Learn to integrate Prowler with AWS Security Hub and other security tools to centralize security findings and automate remediation workflows.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* Completion of [Lab 1: Getting Started with Prowler CLI](/workshop/lab-01-getting-started)
|
||||
* AWS account with Security Hub enabled
|
||||
* IAM permissions for Security Hub operations
|
||||
* Prowler CLI installed and configured
|
||||
* Basic understanding of AWS Security Hub
|
||||
|
||||
**Estimated Time:** 45 minutes
|
||||
|
||||
## Lab Objectives
|
||||
|
||||
By completing this lab, you will:
|
||||
|
||||
* Enable AWS Security Hub integration
|
||||
* Send Prowler findings to Security Hub
|
||||
* Understand finding formats and mapping
|
||||
* Configure automated finding synchronization
|
||||
* Integrate with third-party security tools
|
||||
* Implement centralized security dashboards
|
||||
|
||||
## Step 1: Enable AWS Security Hub
|
||||
|
||||
Enable Security Hub in your AWS account:
|
||||
|
||||
**Via AWS Console:**
|
||||
1. Navigate to AWS Security Hub
|
||||
2. Click "Go to Security Hub"
|
||||
3. Click "Enable Security Hub"
|
||||
|
||||
**Via AWS CLI:**
|
||||
```bash
|
||||
aws securityhub enable-security-hub
|
||||
```
|
||||
|
||||
Verify Security Hub is enabled:
|
||||
```bash
|
||||
aws securityhub get-enabled-standards
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 96 showing Security Hub enablement - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 2: Configure IAM Permissions
|
||||
|
||||
Ensure your IAM role/user has Security Hub permissions:
|
||||
|
||||
Required permissions:
|
||||
```json
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": [
|
||||
"securityhub:BatchImportFindings",
|
||||
"securityhub:GetFindings"
|
||||
],
|
||||
"Resource": "*"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Create and attach policy:
|
||||
```bash
|
||||
aws iam create-policy \
|
||||
--policy-name ProwlerSecurityHubIntegration \
|
||||
--policy-document file://securityhub-policy.json
|
||||
|
||||
aws iam attach-user-policy \
|
||||
--user-name prowler-user \
|
||||
--policy-arn arn:aws:iam::ACCOUNT_ID:policy/ProwlerSecurityHubIntegration
|
||||
```
|
||||
|
||||
## Step 3: Run Prowler with Security Hub Integration
|
||||
|
||||
Execute Prowler and send findings to Security Hub:
|
||||
|
||||
```bash
|
||||
prowler aws --security-hub
|
||||
```
|
||||
|
||||
This command:
|
||||
* Runs all security checks
|
||||
* Transforms findings to AWS Security Finding Format (ASFF)
|
||||
* Sends findings to Security Hub via `BatchImportFindings` API
|
||||
* Generates local reports
|
||||
|
||||
<Warning>
|
||||
Security Hub has API rate limits. For large environments, findings are sent in batches automatically.
|
||||
</Warning>
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 99 showing Prowler sending findings to Security Hub - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 4: View Findings in Security Hub
|
||||
|
||||
Navigate to AWS Security Hub console and review Prowler findings:
|
||||
|
||||
**Filter by Product:**
|
||||
1. Go to "Findings" in Security Hub
|
||||
2. Add filter: `Product name is Prowler`
|
||||
3. Review findings by severity
|
||||
|
||||
**View Finding Details:**
|
||||
* Severity (CRITICAL, HIGH, MEDIUM, LOW, INFORMATIONAL)
|
||||
* Affected resource
|
||||
* Compliance framework mapping
|
||||
* Remediation guidance
|
||||
* Workflow status
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 101 showing Security Hub findings view - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 5: Understanding ASFF Mapping
|
||||
|
||||
Prowler findings are mapped to AWS Security Finding Format:
|
||||
|
||||
**Prowler Status → Security Hub Compliance Status:**
|
||||
* PASS → PASSED
|
||||
* FAIL → FAILED
|
||||
* MANUAL → NOT_AVAILABLE
|
||||
|
||||
**Prowler Severity → Security Hub Severity:**
|
||||
* critical → CRITICAL (90-100)
|
||||
* high → HIGH (70-89)
|
||||
* medium → MEDIUM (40-69)
|
||||
* low → LOW (1-39)
|
||||
* informational → INFORMATIONAL (0)
|
||||
|
||||
Example ASFF finding structure:
|
||||
```json
|
||||
{
|
||||
"SchemaVersion": "2018-10-08",
|
||||
"Id": "prowler-aws/account/region/check/resource",
|
||||
"ProductArn": "arn:aws:securityhub:region::product/prowler/prowler",
|
||||
"GeneratorId": "prowler-check-id",
|
||||
"AwsAccountId": "123456789012",
|
||||
"Types": ["Software and Configuration Checks"],
|
||||
"CreatedAt": "2024-01-01T00:00:00.000Z",
|
||||
"UpdatedAt": "2024-01-01T00:00:00.000Z",
|
||||
"Severity": {
|
||||
"Label": "HIGH"
|
||||
},
|
||||
"Title": "Check title",
|
||||
"Description": "Check description",
|
||||
"Resources": [
|
||||
{
|
||||
"Type": "AwsS3Bucket",
|
||||
"Id": "arn:aws:s3:::bucket-name"
|
||||
}
|
||||
],
|
||||
"Compliance": {
|
||||
"Status": "FAILED"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Step 6: Update Existing Findings
|
||||
|
||||
Run subsequent scans to update Security Hub findings:
|
||||
|
||||
```bash
|
||||
prowler aws --security-hub
|
||||
```
|
||||
|
||||
Prowler automatically:
|
||||
* Updates existing findings (same resource, same check)
|
||||
* Marks remediated issues as PASSED
|
||||
* Creates new findings for new resources
|
||||
* Archives findings for deleted resources
|
||||
|
||||
## Step 7: Regional Security Hub Integration
|
||||
|
||||
Send findings to Security Hub in specific regions:
|
||||
|
||||
```bash
|
||||
prowler aws --security-hub --region us-east-1 us-west-2
|
||||
```
|
||||
|
||||
Or enable aggregation in a single region:
|
||||
|
||||
```bash
|
||||
# Configure finding aggregator in Security Hub
|
||||
aws securityhub create-finding-aggregator \
|
||||
--region-linking-mode ALL_REGIONS
|
||||
```
|
||||
|
||||
<Tip>
|
||||
Use Security Hub finding aggregation to centralize findings from multiple regions in a single dashboard.
|
||||
</Tip>
|
||||
|
||||
## Step 8: Filter Findings Sent to Security Hub
|
||||
|
||||
Send only critical and high-severity findings:
|
||||
|
||||
```bash
|
||||
prowler aws --security-hub --severity critical high
|
||||
```
|
||||
|
||||
Send findings for specific compliance frameworks:
|
||||
|
||||
```bash
|
||||
prowler aws --security-hub --compliance cis_2.0_aws
|
||||
```
|
||||
|
||||
## Step 9: Integrate with S3 for Long-Term Storage
|
||||
|
||||
Store Prowler reports in S3 alongside Security Hub integration:
|
||||
|
||||
```bash
|
||||
prowler aws \
|
||||
--security-hub \
|
||||
-o html json csv \
|
||||
--output-bucket-no-assume s3://my-security-reports-bucket
|
||||
```
|
||||
|
||||
This enables:
|
||||
* Long-term retention of historical reports
|
||||
* Compliance audit trails
|
||||
* Trend analysis over time
|
||||
* Cost-effective storage
|
||||
|
||||
Configure S3 bucket lifecycle policies:
|
||||
```bash
|
||||
aws s3api put-bucket-lifecycle-configuration \
|
||||
--bucket my-security-reports-bucket \
|
||||
--lifecycle-configuration file://lifecycle.json
|
||||
```
|
||||
|
||||
`lifecycle.json`:
|
||||
```json
|
||||
{
|
||||
"Rules": [
|
||||
{
|
||||
"Id": "ArchiveOldReports",
|
||||
"Status": "Enabled",
|
||||
"Transitions": [
|
||||
{
|
||||
"Days": 90,
|
||||
"StorageClass": "GLACIER"
|
||||
}
|
||||
],
|
||||
"Expiration": {
|
||||
"Days": 365
|
||||
},
|
||||
"Filter": {
|
||||
"Prefix": "prowler-reports/"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 107 showing S3 integration - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 10: Integrate with Third-Party Tools
|
||||
|
||||
**Send to Slack:**
|
||||
```bash
|
||||
prowler aws --security-hub | \
|
||||
jq -r '.findings[] | select(.status=="FAIL" and .severity=="critical")' | \
|
||||
curl -X POST -H 'Content-type: application/json' \
|
||||
--data @- https://hooks.slack.com/services/YOUR/WEBHOOK/URL
|
||||
```
|
||||
|
||||
**Send to Jira:**
|
||||
Create Jira tickets for critical findings using Jira API:
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
JIRA_URL="https://your-domain.atlassian.net"
|
||||
JIRA_API_TOKEN="your-api-token"
|
||||
JIRA_PROJECT="SEC"
|
||||
|
||||
# Extract critical findings
|
||||
FINDINGS=$(prowler aws -o json-ocsf | \
|
||||
jq '.findings[] | select(.status=="FAIL" and .severity=="critical")')
|
||||
|
||||
# Create Jira tickets
|
||||
echo "$FINDINGS" | jq -c '.' | while read finding; do
|
||||
TITLE=$(echo $finding | jq -r '.check_title')
|
||||
DESCRIPTION=$(echo $finding | jq -r '.status_extended')
|
||||
|
||||
curl -X POST "$JIRA_URL/rest/api/2/issue" \
|
||||
-H "Authorization: Bearer $JIRA_API_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{
|
||||
\"fields\": {
|
||||
\"project\": {\"key\": \"$JIRA_PROJECT\"},
|
||||
\"summary\": \"$TITLE\",
|
||||
\"description\": \"$DESCRIPTION\",
|
||||
\"issuetype\": {\"name\": \"Task\"}
|
||||
}
|
||||
}"
|
||||
done
|
||||
```
|
||||
|
||||
**Send to Splunk:**
|
||||
```bash
|
||||
prowler aws -o json-ocsf | \
|
||||
curl -k https://splunk-server:8088/services/collector/event \
|
||||
-H "Authorization: Splunk YOUR-HEC-TOKEN" \
|
||||
-d @-
|
||||
```
|
||||
|
||||
## Step 11: Automate Security Hub Updates
|
||||
|
||||
Create a Lambda function to run Prowler periodically:
|
||||
|
||||
**Lambda Function (Python):**
|
||||
```python
|
||||
import subprocess
|
||||
import boto3
|
||||
|
||||
def lambda_handler(event, context):
|
||||
# Run Prowler with Security Hub integration
|
||||
result = subprocess.run(
|
||||
['prowler', 'aws', '--security-hub'],
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
|
||||
return {
|
||||
'statusCode': 200,
|
||||
'body': f'Prowler scan completed. Output: {result.stdout}'
|
||||
}
|
||||
```
|
||||
|
||||
**Schedule with EventBridge:**
|
||||
```bash
|
||||
aws events put-rule \
|
||||
--name DailyProwlerScan \
|
||||
--schedule-expression "cron(0 2 * * ? *)"
|
||||
|
||||
aws events put-targets \
|
||||
--rule DailyProwlerScan \
|
||||
--targets "Id"="1","Arn"="arn:aws:lambda:region:account:function:ProwlerScanFunction"
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 111 showing automated integration - to be added]
|
||||
</Note>
|
||||
|
||||
## Verification Steps
|
||||
|
||||
Confirm successful lab completion:
|
||||
|
||||
1. AWS Security Hub enabled
|
||||
2. Prowler findings sent to Security Hub
|
||||
3. Findings visible in Security Hub console
|
||||
4. Subsequent scans update existing findings
|
||||
5. S3 integration configured for report storage
|
||||
6. Third-party integration examples tested
|
||||
|
||||
## Expected Outcomes
|
||||
|
||||
After completing this lab, you should:
|
||||
|
||||
* Understand Security Hub integration
|
||||
* Know how to send findings to Security Hub
|
||||
* Be able to configure automated synchronization
|
||||
* Have integrated with S3 for storage
|
||||
* Be familiar with third-party tool integrations
|
||||
|
||||
## Security Hub Benefits
|
||||
|
||||
**Centralized Security:**
|
||||
* Aggregate findings from multiple tools
|
||||
* Unified view across AWS accounts and regions
|
||||
* Compliance dashboard
|
||||
|
||||
**Automated Workflows:**
|
||||
* Trigger remediation workflows
|
||||
* Create incidents automatically
|
||||
* Integrate with SIEM tools
|
||||
|
||||
**Prioritization:**
|
||||
* Filter by severity and compliance status
|
||||
* Track remediation progress
|
||||
* Generate executive reports
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Issue:** Security Hub not enabled
|
||||
* **Solution:** Run `aws securityhub enable-security-hub` to enable
|
||||
|
||||
**Issue:** Permission denied sending findings
|
||||
* **Solution:** Ensure IAM role has `securityhub:BatchImportFindings` permission
|
||||
|
||||
**Issue:** Findings not appearing in Security Hub
|
||||
* **Solution:** Check Prowler output for errors, verify region configuration
|
||||
|
||||
**Issue:** Rate limit errors
|
||||
* **Solution:** Prowler batches findings automatically; retry if transient failures occur
|
||||
|
||||
## Next Steps
|
||||
|
||||
Continue to [Lab 8: Prowler SaaS Platform](/workshop/lab-08-prowler-saas) to explore the managed Prowler Cloud platform with advanced features.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
* [Security Hub Integration Guide](/user-guide/providers/aws/securityhub)
|
||||
* [S3 Integration Guide](/user-guide/providers/aws/s3)
|
||||
* [Integrations Documentation](/user-guide/cli/tutorials/integrations)
|
||||
* [AWS Security Hub Documentation](https://docs.aws.amazon.com/securityhub/)
|
||||
@@ -0,0 +1,440 @@
|
||||
---
|
||||
title: "Lab 8: Prowler SaaS Platform"
|
||||
description: "Explore Prowler Cloud's managed platform with advanced features, team collaboration, and continuous monitoring"
|
||||
---
|
||||
|
||||
<Note>
|
||||
**Tags:** `workshop` `prowler-cloud` `saas` `intermediate` `platform` `collaboration`
|
||||
</Note>
|
||||
|
||||
# Lab 8: Prowler SaaS Platform
|
||||
|
||||
Learn to use Prowler Cloud, the managed SaaS platform that provides advanced security monitoring, team collaboration, compliance dashboards, and AI-powered security insights.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
* Completion of previous labs (recommended but not required)
|
||||
* Prowler Cloud account (free trial available)
|
||||
* Cloud provider accounts (AWS, Azure, or GCP)
|
||||
* Basic understanding of Prowler concepts
|
||||
|
||||
**Estimated Time:** 60 minutes
|
||||
|
||||
## Lab Objectives
|
||||
|
||||
By completing this lab, you will:
|
||||
|
||||
* Set up Prowler Cloud account
|
||||
* Connect cloud providers to Prowler Cloud
|
||||
* Navigate the Prowler Cloud interface
|
||||
* Use team collaboration features
|
||||
* Leverage AI-powered security insights
|
||||
* Configure continuous monitoring and alerts
|
||||
* Generate executive compliance reports
|
||||
|
||||
## Step 1: Create Prowler Cloud Account
|
||||
|
||||
Sign up for Prowler Cloud:
|
||||
|
||||
1. Visit [https://cloud.prowler.com](https://cloud.prowler.com)
|
||||
2. Click "Start Free Trial"
|
||||
3. Choose authentication method:
|
||||
* Email/password
|
||||
* Google authentication
|
||||
* GitHub authentication
|
||||
* SSO (for enterprise plans)
|
||||
4. Verify email address
|
||||
5. Complete onboarding wizard
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 115 showing Prowler Cloud signup - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 2: Connect Your First Cloud Provider
|
||||
|
||||
**Connect AWS Account:**
|
||||
|
||||
1. Navigate to "Providers" in Prowler Cloud
|
||||
2. Click "Add Provider"
|
||||
3. Select "AWS"
|
||||
4. Choose connection method:
|
||||
* **CloudFormation Stack** (recommended)
|
||||
* **Manual IAM Role**
|
||||
5. Deploy CloudFormation template
|
||||
6. Copy Role ARN and External ID
|
||||
7. Test connection
|
||||
8. Click "Save"
|
||||
|
||||
**CloudFormation Stack Deployment:**
|
||||
```bash
|
||||
aws cloudformation create-stack \
|
||||
--stack-name prowler-integration \
|
||||
--template-url https://prowler-public.s3.amazonaws.com/prowler-role.yaml \
|
||||
--parameters ParameterKey=ExternalId,ParameterValue=<your-external-id> \
|
||||
--capabilities CAPABILITY_NAMED_IAM
|
||||
```
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 118 showing provider connection - to be added]
|
||||
</Note>
|
||||
|
||||
<Tip>
|
||||
The CloudFormation template creates a read-only IAM role with the minimum permissions required for Prowler scans.
|
||||
</Tip>
|
||||
|
||||
## Step 3: Run Your First Cloud Scan
|
||||
|
||||
Initiate a security scan:
|
||||
|
||||
1. Go to "Scans" page
|
||||
2. Click "New Scan"
|
||||
3. Select provider(s) to scan
|
||||
4. Choose scan type:
|
||||
* **Quick Scan:** Essential security checks
|
||||
* **Full Scan:** Comprehensive assessment
|
||||
* **Compliance Scan:** Framework-specific validation
|
||||
5. Click "Start Scan"
|
||||
|
||||
Monitor scan progress:
|
||||
* Real-time progress indicator
|
||||
* Checks completed
|
||||
* Resources discovered
|
||||
* Findings identified
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 121 showing scan execution - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 4: Explore the Findings Dashboard
|
||||
|
||||
Navigate findings dashboard:
|
||||
|
||||
**Overview Statistics:**
|
||||
* Total findings by severity
|
||||
* Compliance score
|
||||
* Trend over time
|
||||
* Top affected services
|
||||
|
||||
**Filtering Options:**
|
||||
* Severity (Critical, High, Medium, Low)
|
||||
* Status (Open, In Progress, Resolved)
|
||||
* Cloud provider
|
||||
* Service
|
||||
* Compliance framework
|
||||
* Resource tags
|
||||
|
||||
**Finding Details:**
|
||||
* Detailed description
|
||||
* Affected resources
|
||||
* Risk assessment
|
||||
* Remediation steps
|
||||
* Related compliance requirements
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 124 showing findings dashboard - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 5: Use AI-Powered Security Insights
|
||||
|
||||
Leverage Prowler Lighthouse AI features:
|
||||
|
||||
**AI Security Assistant:**
|
||||
1. Click "Lighthouse" in navigation
|
||||
2. Ask questions about your security posture:
|
||||
* "What are my critical security risks?"
|
||||
* "Show me publicly exposed resources"
|
||||
* "How can I improve my compliance score?"
|
||||
* "What encryption issues exist?"
|
||||
|
||||
**AI Remediation Guidance:**
|
||||
* Select any finding
|
||||
* Click "AI Remediation"
|
||||
* Review generated remediation steps
|
||||
* Get customized code/CLI commands
|
||||
* Apply fixes with confidence
|
||||
|
||||
**AI Threat Analysis:**
|
||||
* Identifies attack patterns
|
||||
* Correlates related findings
|
||||
* Suggests priority order for remediation
|
||||
* Explains security impact
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 127 showing Lighthouse AI - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 6: Configure Team Collaboration
|
||||
|
||||
Set up team access and workflows:
|
||||
|
||||
**Invite Team Members:**
|
||||
1. Go to "Settings" → "Team"
|
||||
2. Click "Invite Member"
|
||||
3. Enter email address
|
||||
4. Assign role:
|
||||
* **Admin:** Full access
|
||||
* **Editor:** Scan and remediate
|
||||
* **Viewer:** Read-only access
|
||||
5. Send invitation
|
||||
|
||||
**Assign Findings:**
|
||||
1. Select findings
|
||||
2. Click "Assign"
|
||||
3. Choose team member
|
||||
4. Add due date
|
||||
5. Add comments/notes
|
||||
|
||||
**Workflow States:**
|
||||
* Open → New finding
|
||||
* In Progress → Being investigated/fixed
|
||||
* Resolved → Remediated
|
||||
* False Positive → Not applicable
|
||||
* Risk Accepted → Acknowledged but not fixed
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 130 showing team collaboration - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 7: Configure Continuous Monitoring
|
||||
|
||||
Set up automated scanning:
|
||||
|
||||
**Scheduled Scans:**
|
||||
1. Go to "Scans" → "Schedules"
|
||||
2. Click "Create Schedule"
|
||||
3. Configure:
|
||||
* Name: "Daily Security Scan"
|
||||
* Frequency: Daily, Weekly, or Custom cron
|
||||
* Time: 2:00 AM UTC
|
||||
* Providers: Select all
|
||||
* Notification preferences
|
||||
4. Save schedule
|
||||
|
||||
**Real-Time Monitoring:**
|
||||
* Enable CloudTrail integration
|
||||
* Receive alerts for security events
|
||||
* Detect configuration drift
|
||||
* Identify new resources
|
||||
|
||||
<Tip>
|
||||
Schedule scans during off-peak hours to minimize performance impact on your cloud APIs.
|
||||
</Tip>
|
||||
|
||||
## Step 8: Configure Alerts and Notifications
|
||||
|
||||
Set up security alerts:
|
||||
|
||||
**Alert Rules:**
|
||||
1. Navigate to "Alerts"
|
||||
2. Click "Create Alert Rule"
|
||||
3. Define conditions:
|
||||
* Finding severity ≥ High
|
||||
* Compliance score drops below 80%
|
||||
* New critical findings discovered
|
||||
* Public exposure detected
|
||||
4. Choose notification channels:
|
||||
* Email
|
||||
* Slack
|
||||
* Microsoft Teams
|
||||
* PagerDuty
|
||||
* Webhooks
|
||||
5. Save rule
|
||||
|
||||
**Slack Integration:**
|
||||
1. Go to "Integrations" → "Slack"
|
||||
2. Click "Connect to Slack"
|
||||
3. Authorize Prowler app
|
||||
4. Select channel for notifications
|
||||
5. Configure alert preferences
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 134 showing alert configuration - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 9: Generate Compliance Reports
|
||||
|
||||
Create compliance reports for auditors:
|
||||
|
||||
**Compliance Dashboard:**
|
||||
1. Navigate to "Compliance"
|
||||
2. View compliance scores by framework:
|
||||
* CIS Benchmarks
|
||||
* PCI-DSS
|
||||
* HIPAA
|
||||
* SOC2
|
||||
* ISO 27001
|
||||
3. Drill down into requirements
|
||||
4. View evidence for each control
|
||||
|
||||
**Export Reports:**
|
||||
1. Select compliance framework
|
||||
2. Click "Generate Report"
|
||||
3. Choose format:
|
||||
* PDF (executive summary)
|
||||
* Excel (detailed findings)
|
||||
* CSV (raw data)
|
||||
4. Schedule recurring reports:
|
||||
* Weekly status updates
|
||||
* Monthly compliance reports
|
||||
* Quarterly audit packages
|
||||
|
||||
**Report Customization:**
|
||||
* Add company logo
|
||||
* Include executive summary
|
||||
* Filter by business unit
|
||||
* Show remediation progress
|
||||
* Include trend analysis
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 137 showing compliance reports - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 10: Multi-Account and Multi-Cloud Management
|
||||
|
||||
Manage multiple cloud environments:
|
||||
|
||||
**Add Multiple Providers:**
|
||||
1. Connect AWS accounts (dev, staging, production)
|
||||
2. Connect Azure subscriptions
|
||||
3. Connect GCP projects
|
||||
4. Organize with tags/labels
|
||||
|
||||
**Provider Groups:**
|
||||
1. Create provider groups:
|
||||
* Production environments
|
||||
* Development environments
|
||||
* By business unit
|
||||
* By geographic region
|
||||
2. Run group-wide scans
|
||||
3. Generate consolidated reports
|
||||
|
||||
**Cross-Cloud Insights:**
|
||||
* Compare security posture across providers
|
||||
* Identify configuration inconsistencies
|
||||
* Standardize security policies
|
||||
* Track multi-cloud compliance
|
||||
|
||||
<Note>
|
||||
[Note: Screenshot of slide 140 showing multi-cloud management - to be added]
|
||||
</Note>
|
||||
|
||||
## Step 11: Advanced Features
|
||||
|
||||
Explore advanced Prowler Cloud capabilities:
|
||||
|
||||
**Custom Checks:**
|
||||
* Create organization-specific security policies
|
||||
* Define custom compliance requirements
|
||||
* Share with team
|
||||
|
||||
**API Access:**
|
||||
* Programmatic access to findings
|
||||
* Integrate with internal tools
|
||||
* Automate workflows
|
||||
|
||||
**RBAC (Role-Based Access Control):**
|
||||
* Fine-grained permissions
|
||||
* Provider-level access control
|
||||
* Audit logging
|
||||
|
||||
**Security Integrations:**
|
||||
* AWS Security Hub
|
||||
* Jira
|
||||
* ServiceNow
|
||||
* Splunk
|
||||
* Custom webhooks
|
||||
|
||||
## Verification Steps
|
||||
|
||||
Confirm successful lab completion:
|
||||
|
||||
1. Prowler Cloud account created
|
||||
2. Cloud provider(s) connected
|
||||
3. Security scan completed
|
||||
4. Findings dashboard explored
|
||||
5. AI insights leveraged
|
||||
6. Team collaboration configured
|
||||
7. Continuous monitoring set up
|
||||
8. Compliance reports generated
|
||||
|
||||
## Expected Outcomes
|
||||
|
||||
After completing this lab, you should:
|
||||
|
||||
* Understand Prowler Cloud platform capabilities
|
||||
* Be able to connect and scan cloud providers
|
||||
* Know how to use AI-powered insights
|
||||
* Have configured team collaboration
|
||||
* Be able to generate compliance reports
|
||||
* Have set up continuous monitoring
|
||||
|
||||
## Prowler Cloud vs. Prowler CLI
|
||||
|
||||
**Prowler Cloud Advantages:**
|
||||
* Managed infrastructure (no installation)
|
||||
* Web-based interface
|
||||
* Team collaboration features
|
||||
* AI-powered insights (Lighthouse)
|
||||
* Continuous monitoring
|
||||
* Historical trend analysis
|
||||
* Executive dashboards
|
||||
* Built-in integrations
|
||||
* Scheduled scans
|
||||
* Role-based access control
|
||||
|
||||
**Prowler CLI Advantages:**
|
||||
* Self-hosted (on-premises)
|
||||
* No data leaves your environment
|
||||
* Scriptable and automatable
|
||||
* Free and open source
|
||||
* Custom integrations
|
||||
* Offline scanning
|
||||
|
||||
<Tip>
|
||||
Many organizations use both: Prowler CLI for automated CI/CD pipelines and Prowler Cloud for centralized visibility and team collaboration.
|
||||
</Tip>
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
**Issue:** Cannot connect cloud provider
|
||||
* **Solution:** Verify IAM role permissions and trust relationship, check External ID
|
||||
|
||||
**Issue:** Scan fails or times out
|
||||
* **Solution:** Check provider credentials are valid, ensure APIs are not rate-limited
|
||||
|
||||
**Issue:** No findings appearing
|
||||
* **Solution:** Verify scan completed successfully, check filtering settings
|
||||
|
||||
**Issue:** Alert notifications not received
|
||||
* **Solution:** Verify integration configuration, check notification channel settings
|
||||
|
||||
## Workshop Completion
|
||||
|
||||
Congratulations on completing the Prowler Workshop! You have learned:
|
||||
|
||||
* Prowler CLI installation and basic usage
|
||||
* Threat detection techniques
|
||||
* Custom check development
|
||||
* Multi-cloud security (AWS, Azure, GCP)
|
||||
* Compliance automation
|
||||
* Security tool integrations
|
||||
* Prowler Cloud platform capabilities
|
||||
|
||||
## Next Steps
|
||||
|
||||
Continue your Prowler journey:
|
||||
|
||||
* Join the [Prowler Community](https://goto.prowler.com/slack)
|
||||
* Contribute to [Prowler Open Source](https://github.com/prowler-cloud/prowler)
|
||||
* Explore [Prowler Hub](https://hub.prowler.com) for checks and frameworks
|
||||
* Read the [Prowler Documentation](https://docs.prowler.com)
|
||||
* Follow [Prowler on Twitter](https://twitter.com/prowlercloud)
|
||||
* Subscribe to [Prowler YouTube](https://www.youtube.com/@prowlercloud)
|
||||
|
||||
## Additional Resources
|
||||
|
||||
* [Prowler Cloud Documentation](/getting-started/products/prowler-cloud)
|
||||
* [Prowler Cloud Pricing](/getting-started/products/prowler-cloud-pricing)
|
||||
* [AWS Marketplace Listing](/getting-started/products/prowler-cloud-aws-marketplace)
|
||||
* [Prowler API Reference](/getting-started/goto/prowler-api-reference)
|
||||
* [Prowler Lighthouse AI](/user-guide/tutorials/prowler-app-lighthouse)
|
||||
@@ -1,3 +1,3 @@
|
||||
PROWLER_APP_API_KEY="pk_your_api_key_here"
|
||||
PROWLER_API_BASE_URL="https://api.prowler.com"
|
||||
PROWLER_MCP_MODE="stdio"
|
||||
PROWLER_MCP_TRANSPORT_MODE="stdio"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
All notable changes to the **Prowler MCP Server** are documented in this file.
|
||||
|
||||
## [0.1.0] (Prowler UNRELEASED)
|
||||
## [0.1.0] (Prowler 5.13.0)
|
||||
|
||||
### Added
|
||||
- Initial release of Prowler MCP Server [(#8695)](https://github.com/prowler-cloud/prowler/pull/8695)
|
||||
@@ -13,4 +13,5 @@ All notable changes to the **Prowler MCP Server** are documented in this file.
|
||||
- Add new MCP Server for Prowler Documentation [(#8795)](https://github.com/prowler-cloud/prowler/pull/8795)
|
||||
- API key support for STDIO mode and enhanced HTTP mode authentication [(#8823)](https://github.com/prowler-cloud/prowler/pull/8823)
|
||||
- Add health check endpoint [(#8905)](https://github.com/prowler-cloud/prowler/pull/8905)
|
||||
- Update Prowler Documentation MCP Server to use Mintlify API [(#8915)](https://github.com/prowler-cloud/prowler/pull/8915)
|
||||
- Update Prowler Documentation MCP Server to use Mintlify API [(#8916)](https://github.com/prowler-cloud/prowler/pull/8916)
|
||||
- Add custom production deployment using uvicorn [(#8958)](https://github.com/prowler-cloud/prowler/pull/8958)
|
||||
|
||||
@@ -47,13 +47,12 @@ COPY --from=builder --chown=prowler /app/prowler_mcp_server /app/prowler_mcp_ser
|
||||
# 3. Project metadata file (may be needed by some packages at runtime)
|
||||
COPY --from=builder --chown=prowler /app/pyproject.toml /app/pyproject.toml
|
||||
|
||||
# 4. Entrypoint helper script for selecting runtime mode
|
||||
COPY --from=builder --chown=prowler /app/entrypoint.sh /app/entrypoint.sh
|
||||
|
||||
# Add virtual environment to PATH so prowler-mcp command is available
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
|
||||
# Entry point for the MCP server
|
||||
# Default to stdio mode, but allow overriding via command arguments
|
||||
# Examples:
|
||||
# docker run -p 8000:8000 prowler-mcp --transport http --host 0.0.0.0 --port 8000
|
||||
# docker run prowler-mcp --transport stdio
|
||||
ENTRYPOINT ["prowler-mcp"]
|
||||
CMD ["--transport", "stdio"]
|
||||
# Entrypoint wrapper defaults to CLI mode; override with `uvicorn` to run ASGI app
|
||||
ENTRYPOINT ["/app/entrypoint.sh"]
|
||||
CMD ["main"]
|
||||
|
||||
+17
-3
@@ -144,11 +144,11 @@ uv run prowler-mcp --transport http
|
||||
uv run prowler-mcp --transport http --host 0.0.0.0 --port 8080
|
||||
```
|
||||
|
||||
For self-deployed MCP remote server, you can use also configure the server to use a custom API base URL with the environment variable `PROWLER_API_BASE_URL`; and the transport mode with the environment variable `PROWLER_MCP_MODE`.
|
||||
For self-deployed MCP remote server, you can use also configure the server to use a custom API base URL with the environment variable `PROWLER_API_BASE_URL`; and the transport mode with the environment variable `PROWLER_MCP_TRANSPORT_MODE`.
|
||||
|
||||
```bash
|
||||
export PROWLER_API_BASE_URL="https://api.prowler.com"
|
||||
export PROWLER_MCP_MODE="http"
|
||||
export PROWLER_MCP_TRANSPORT_MODE="http"
|
||||
```
|
||||
|
||||
### Using uv directly
|
||||
@@ -190,6 +190,16 @@ docker run --rm --env-file ./.env -p 8000:8000 -it prowler-mcp --transport http
|
||||
docker run --rm --env-file ./.env -p 8080:8080 -it prowler-mcp --transport http --host 0.0.0.0 --port 8080
|
||||
```
|
||||
|
||||
## Production Deployment
|
||||
|
||||
For production deployments that require customization, it is recommended to use the ASGI application that can be found in `prowler_mcp_server.server`. This can be run with uvicorn:
|
||||
|
||||
```bash
|
||||
uvicorn prowler_mcp_server.server:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
For more details on production deployment options, see the [FastMCP production deployment guide](https://gofastmcp.com/deployment/http#production-deployment) and [uvicorn settings](https://www.uvicorn.org/settings/).
|
||||
|
||||
## Command Line Arguments
|
||||
|
||||
The Prowler MCP server supports the following command line arguments:
|
||||
@@ -482,6 +492,10 @@ If you want to have it globally available, add the example server to Cursor's co
|
||||
|
||||
If you want to have it only for the current project, add the example server to the project's root in a new `.cursor/mcp.json` file.
|
||||
|
||||
## Documentation
|
||||
|
||||
For detailed documentation about the Prowler MCP Server, including guides, tutorials, and use cases, visit the [official Prowler documentation](https://docs.prowler.com).
|
||||
|
||||
## License
|
||||
|
||||
This project follows the repository’s main license. See the [LICENSE](../LICENSE) file at the repository root.
|
||||
This project follows the repository's main license. See the [LICENSE](../LICENSE) file at the repository root.
|
||||
|
||||
Executable
+50
@@ -0,0 +1,50 @@
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
Usage: ./entrypoint.sh [main|uvicorn] [args...]
|
||||
|
||||
Modes:
|
||||
main (default) Run prowler-mcp
|
||||
uvicorn Run uvicorn prowler_mcp_server.server:app
|
||||
|
||||
All additional arguments are forwarded to the selected command.
|
||||
EOF
|
||||
}
|
||||
|
||||
mode="main"
|
||||
|
||||
if [ "$#" -gt 0 ]; then
|
||||
case "$1" in
|
||||
main|cli)
|
||||
mode="main"
|
||||
shift
|
||||
;;
|
||||
uvicorn|asgi)
|
||||
mode="uvicorn"
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
mode="main"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
case "$mode" in
|
||||
main)
|
||||
exec prowler-mcp "$@"
|
||||
;;
|
||||
uvicorn)
|
||||
export PROWLER_MCP_TRANSPORT_MODE="http"
|
||||
exec uvicorn prowler_mcp_server.server:app "$@"
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
@@ -1,10 +1,8 @@
|
||||
import argparse
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
|
||||
from prowler_mcp_server.lib.logger import logger
|
||||
from prowler_mcp_server.server import setup_main_server
|
||||
|
||||
|
||||
def parse_arguments():
|
||||
@@ -13,7 +11,7 @@ def parse_arguments():
|
||||
parser.add_argument(
|
||||
"--transport",
|
||||
choices=["stdio", "http"],
|
||||
default=os.getenv("PROWLER_MCP_MODE", "stdio"),
|
||||
default=None,
|
||||
help="Transport method (default: stdio)",
|
||||
)
|
||||
parser.add_argument(
|
||||
@@ -35,13 +33,26 @@ def main():
|
||||
try:
|
||||
args = parse_arguments()
|
||||
|
||||
# Set up server with configuration
|
||||
prowler_mcp_server = asyncio.run(setup_main_server(transport=args.transport))
|
||||
print(f"args.transport: {args.transport}")
|
||||
|
||||
if args.transport is None:
|
||||
args.transport = os.getenv("PROWLER_MCP_TRANSPORT_MODE", "stdio")
|
||||
else:
|
||||
os.environ["PROWLER_MCP_TRANSPORT_MODE"] = args.transport
|
||||
|
||||
from prowler_mcp_server.server import prowler_mcp_server
|
||||
|
||||
if args.transport == "stdio":
|
||||
prowler_mcp_server.run(transport="stdio")
|
||||
prowler_mcp_server.run(transport=args.transport, show_banner=False)
|
||||
elif args.transport == "http":
|
||||
prowler_mcp_server.run(transport="http", host=args.host, port=args.port)
|
||||
prowler_mcp_server.run(
|
||||
transport=args.transport,
|
||||
host=args.host,
|
||||
port=args.port,
|
||||
show_banner=False,
|
||||
)
|
||||
else:
|
||||
logger.error(f"Invalid transport: {args.transport}")
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Shutting down Prowler MCP server...")
|
||||
|
||||
@@ -14,7 +14,7 @@ class ProwlerAppAuth:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
mode: str = os.getenv("PROWLER_MCP_MODE", "stdio"),
|
||||
mode: str = os.getenv("PROWLER_MCP_TRANSPORT_MODE", "stdio"),
|
||||
base_url: str = os.getenv("PROWLER_API_BASE_URL", "https://api.prowler.com"),
|
||||
):
|
||||
self.base_url = base_url.rstrip("/")
|
||||
@@ -33,7 +33,14 @@ class ProwlerAppAuth:
|
||||
raise ValueError("Prowler App API key format is incorrect")
|
||||
|
||||
def _parse_jwt(self, token: str) -> Optional[Dict]:
|
||||
"""Parse JWT token and return payload, similar to JS parseJwt function."""
|
||||
"""Parse JWT token and return payload
|
||||
|
||||
Args:
|
||||
token: JWT token to parse
|
||||
|
||||
Returns:
|
||||
Parsed JWT payload, or None if parsing fails
|
||||
"""
|
||||
if not token:
|
||||
return None
|
||||
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
from fastmcp import FastMCP
|
||||
from prowler_mcp_server import __version__
|
||||
from prowler_mcp_server.lib.logger import logger
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
prowler_mcp_server = FastMCP("prowler-mcp-server")
|
||||
|
||||
async def setup_main_server(transport: str) -> FastMCP:
|
||||
|
||||
async def setup_main_server():
|
||||
"""Set up the main Prowler MCP server with all available integrations."""
|
||||
|
||||
# Initialize main Prowler MCP server
|
||||
prowler_mcp_server = FastMCP("prowler-mcp-server")
|
||||
|
||||
# Import Prowler Hub tools with prowler_hub_ prefix
|
||||
try:
|
||||
logger.info("Importing Prowler Hub server...")
|
||||
@@ -21,12 +21,10 @@ async def setup_main_server(transport: str) -> FastMCP:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to import Prowler Hub server: {e}")
|
||||
|
||||
# Import Prowler App tools with prowler_app_ prefix
|
||||
try:
|
||||
logger.info("Importing Prowler App server...")
|
||||
|
||||
if os.getenv("PROWLER_MCP_MODE", None) is None:
|
||||
os.environ["PROWLER_MCP_MODE"] = transport
|
||||
|
||||
if not os.path.exists(
|
||||
os.path.join(os.path.dirname(__file__), "prowler_app", "server.py")
|
||||
):
|
||||
@@ -44,6 +42,7 @@ async def setup_main_server(transport: str) -> FastMCP:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to import Prowler App server: {e}")
|
||||
|
||||
# Import Prowler Documentation tools with prowler_docs_ prefix
|
||||
try:
|
||||
logger.info("Importing Prowler Documentation server...")
|
||||
from prowler_mcp_server.prowler_documentation.server import docs_mcp_server
|
||||
@@ -53,9 +52,23 @@ async def setup_main_server(transport: str) -> FastMCP:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to import Prowler Documentation server: {e}")
|
||||
|
||||
# Add health check endpoint
|
||||
@prowler_mcp_server.custom_route("/health", methods=["GET"])
|
||||
async def health_check(request):
|
||||
return JSONResponse({"status": "healthy", "service": "prowler-mcp-server"})
|
||||
|
||||
return prowler_mcp_server
|
||||
# Add health check endpoint
|
||||
@prowler_mcp_server.custom_route("/health", methods=["GET"])
|
||||
async def health_check(request) -> JSONResponse:
|
||||
"""Health check endpoint."""
|
||||
return JSONResponse(
|
||||
{"status": "healthy", "service": "prowler-mcp-server", "version": __version__}
|
||||
)
|
||||
|
||||
|
||||
# Get or create the event loop
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
# If we have a running loop, schedule the setup as a task
|
||||
loop.create_task(setup_main_server())
|
||||
except RuntimeError:
|
||||
# No running loop, use asyncio.run (for standalone execution)
|
||||
asyncio.run(setup_main_server())
|
||||
|
||||
app = prowler_mcp_server.http_app()
|
||||
|
||||
@@ -0,0 +1,366 @@
|
||||
# Prowler SDK Agent Guide
|
||||
|
||||
**Complete guide for AI agents and developers working on the Prowler SDK - the core Python security scanning engine.**
|
||||
|
||||
## Project Overview
|
||||
|
||||
The Prowler SDK is the core Python engine that powers Prowler's cloud security assessment capabilities. It provides:
|
||||
|
||||
- **Multi-cloud Security Scanning**: AWS, Azure, GCP, Kubernetes, GitHub, M365, Oracle Cloud, MongoDB Atlas, and more
|
||||
- **Compliance Frameworks**: 30+ frameworks including CIS, NIST, PCI-DSS, SOC2, GDPR
|
||||
- **1000+ Security Checks**: Comprehensive coverage across all supported providers
|
||||
- **Multiple Output Formats**: JSON, CSV, HTML, ASFF, OCSF, and compliance-specific formats
|
||||
|
||||
## Mission & Scope
|
||||
|
||||
- Maintain and enhance the core Prowler SDK functionality with security and stability as top priorities
|
||||
- Follow best practices for Python patterns, code style, security, and comprehensive testing
|
||||
- To get more information about development guidelines, please refer to the Prowler Developer Guide in `docs/developer-guide/`
|
||||
|
||||
---
|
||||
|
||||
## Architecture Rules
|
||||
|
||||
### 1. Provider Architecture Pattern
|
||||
|
||||
All Prowler providers MUST follow the established pattern:
|
||||
|
||||
```
|
||||
prowler/providers/{provider}/
|
||||
├── {provider}_provider.py # Main provider class
|
||||
├── models.py # Provider-specific models
|
||||
├── config.py # Provider configuration
|
||||
├── exceptions/ # Provider-specific exceptions
|
||||
├── lib/ # Provider libraries (as minimun it should have implemented the next folders: service, arguments, mutelist)
|
||||
│ ├── service/ # Provider-specific service class to be inherited by all services of the provider
|
||||
│ ├── arguments/ # Provider-specific CLI arguments parser
|
||||
│ └── mutelist/ # Provider-specific mutelist functionality
|
||||
└── services/ # All provider services to be audited
|
||||
└── {service}/ # Individual service
|
||||
├── {service}_service.py # Class to fetch the needed resources from the API and store them to be used by the checks
|
||||
├── {service}_client.py # Python instance of the service class to be used by the checks
|
||||
└── {check_name}/ # Individual check folder
|
||||
├── {check_name}.py # Python class to implement the check logic
|
||||
└── {check_name}.metadata.json # JSON file to store the check metadata
|
||||
└── {check_name_2}/ # Other checks can be added to the same service folder
|
||||
├── {check_name_2}.py
|
||||
└── {check_name_2}.metadata.json
|
||||
...
|
||||
└── {service_2}/ # Other services can be added to the same provider folder
|
||||
...
|
||||
```
|
||||
|
||||
### 2. Check Implementation Standards
|
||||
|
||||
Every security check MUST implement:
|
||||
|
||||
```python
|
||||
from prowler.lib.check.models import Check, CheckReport<Provider>
|
||||
from prowler.providers.<provider>.services.<service>.<service>_client import <service>_client
|
||||
|
||||
class check_name(Check):
|
||||
"""Ensure that <resource> meets <security_requirement>."""
|
||||
def execute(self) -> list[CheckReport<Provider>]:
|
||||
"""Execute the check logic.
|
||||
|
||||
Returns:
|
||||
A list of reports containing the result of the check.
|
||||
"""
|
||||
findings = []
|
||||
# Check implementation here
|
||||
for resource in <service>_client.<resources>:
|
||||
# Security validation logic
|
||||
report = CheckReport<Provider>(metadata=self.metadata(), resource=resource)
|
||||
report.status = "PASS" | "FAIL"
|
||||
report.status_extended = "Detailed explanation"
|
||||
findings.append(report) # Add the report to the list of findings
|
||||
return findings
|
||||
```
|
||||
|
||||
### 3. Compliance Framework Integration
|
||||
|
||||
All compliance frameworks must be defined in:
|
||||
- `prowler/compliance/{provider}/{framework}.json`
|
||||
- Follow the established Compliance model structure
|
||||
- Include proper requirement mappings and metadata
|
||||
|
||||
---
|
||||
|
||||
## Tech Stack
|
||||
|
||||
- **Language**: Python 3.9+
|
||||
- **Dependency Management**: Poetry 2+
|
||||
- **CLI Framework**: Custom argument parser with provider-specific subcommands
|
||||
- **Testing**: Pytest with extensive unit and integration tests
|
||||
- **Code Quality**: Pre-commit hooks for Black, Flake8, Pylint, Bandit for security scanning
|
||||
|
||||
## Commands
|
||||
|
||||
### Development Environment
|
||||
|
||||
```bash
|
||||
# Core development setup
|
||||
poetry install --with dev # Install all dependencies
|
||||
poetry run pre-commit install # Install pre-commit hooks
|
||||
|
||||
# Code quality
|
||||
poetry run pre-commit run --all-files
|
||||
|
||||
# Run tests
|
||||
poetry run pytest -n auto -vvv -s -x tests/
|
||||
```
|
||||
|
||||
### Running Prowler CLI
|
||||
|
||||
```bash
|
||||
# Run Prowler
|
||||
poetry run python prowler-cli.py --help
|
||||
|
||||
# Run Prowler with a specific provider
|
||||
poetry run python prowler-cli.py <provider>
|
||||
|
||||
# Run Prowler with error logging
|
||||
poetry run python prowler-cli.py <provider> --log-level ERROR --verbose
|
||||
|
||||
# Run specific checks
|
||||
poetry run python prowler-cli.py <provider> --checks <check_name_1> <check_name_2>
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
prowler/
|
||||
├── __main__.py # Main CLI entry point
|
||||
├── config/ # Global configuration
|
||||
│ ├── config.py # Core configuration settings
|
||||
│ └── __init__.py
|
||||
├── lib/ # Core library functions
|
||||
│ ├── check/ # Check execution engine
|
||||
│ │ ├── check.py # Check execution logic
|
||||
│ │ ├── checks_loader.py # Dynamic check loading
|
||||
│ │ ├── compliance.py # Compliance framework handling
|
||||
│ │ └── models.py # Check and report models
|
||||
│ ├── cli/ # Command-line interface
|
||||
│ │ └── parser.py # Argument parsing
|
||||
│ ├── outputs/ # Output format handlers
|
||||
│ │ ├── csv/ # CSV output
|
||||
│ │ ├── html/ # HTML reports
|
||||
│ │ ├── json/ # JSON formats
|
||||
│ │ └── compliance/ # Compliance reports
|
||||
│ ├── scan/ # Scan orchestration
|
||||
│ ├── utils/ # Utility functions
|
||||
│ └── mutelist/ # Mute list functionality
|
||||
├── providers/ # Cloud provider implementations
|
||||
│ ├── aws/ # AWS provider
|
||||
│ ├── azure/ # Azure provider
|
||||
│ ├── gcp/ # Google Cloud provider
|
||||
│ ├── kubernetes/ # Kubernetes provider
|
||||
│ ├── github/ # GitHub provider
|
||||
│ ├── m365/ # Microsoft 365 provider
|
||||
│ ├── mongodbatlas/ # MongoDB Atlas provider
|
||||
│ ├── oci/ # Oracle Cloud provider
|
||||
│ ├── ...
|
||||
│ └── common/ # Shared provider utilities
|
||||
├── compliance/ # Compliance framework definitions
|
||||
│ ├── aws/ # AWS compliance frameworks
|
||||
│ ├── azure/ # Azure compliance frameworks
|
||||
│ ├── gcp/ # GCP compliance frameworks
|
||||
│ ├── ...
|
||||
└── exceptions/ # Global exception definitions
|
||||
```
|
||||
|
||||
## Key Components
|
||||
|
||||
### 1. Provider System
|
||||
|
||||
Each cloud provider implements:
|
||||
|
||||
```python
|
||||
class Provider:
|
||||
"""Base provider class"""
|
||||
|
||||
def __init__(self, arguments):
|
||||
self.session = self._setup_session(arguments)
|
||||
self.regions = self._get_regions()
|
||||
# Initialize all services
|
||||
|
||||
def _setup_session(self, arguments):
|
||||
"""Provider-specific authentication"""
|
||||
pass
|
||||
|
||||
def _get_regions(self):
|
||||
"""Get available regions for provider"""
|
||||
pass
|
||||
```
|
||||
|
||||
### 2. Check Engine
|
||||
|
||||
The check execution system:
|
||||
|
||||
- **Dynamic Loading**: Automatically discovers and loads checks
|
||||
- **Parallel Execution**: Runs checks in parallel for performance
|
||||
- **Error Isolation**: Individual check failures don't affect others
|
||||
- **Comprehensive Reporting**: Detailed findings with remediation guidance
|
||||
|
||||
### 3. Compliance Framework Engine
|
||||
|
||||
Compliance frameworks are defined as JSON files mapping checks to requirements:
|
||||
|
||||
```json
|
||||
{
|
||||
"Framework": "CIS",
|
||||
"Name": "CIS Amazon Web Services Foundations Benchmark v2.0.0",
|
||||
"Version": "2.0",
|
||||
"Provider": "AWS",
|
||||
"Description": "The CIS Amazon Web Services Foundations Benchmark provides prescriptive guidance for configuring security options for a subset of Amazon Web Services with an emphasis on foundational, testable, and architecture agnostic settings.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "1.1",
|
||||
"Description": "Maintain current contact details",
|
||||
"Checks": ["account_contact_details_configured"]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Output System
|
||||
|
||||
Multiple output formats supported:
|
||||
|
||||
- **JSON**: Machine-readable findings
|
||||
- **CSV**: Spreadsheet-compatible format
|
||||
- **HTML**: Interactive web reports
|
||||
- **ASFF**: AWS Security Finding Format
|
||||
- **OCSF**: Open Cybersecurity Schema Framework
|
||||
|
||||
## Development Patterns
|
||||
|
||||
### Adding New Cloud Providers
|
||||
|
||||
1. **Create Provider Structure**:
|
||||
```bash
|
||||
mkdir -p prowler/providers/{provider}
|
||||
mkdir -p prowler/providers/{provider}/services
|
||||
mkdir -p prowler/providers/{provider}/lib/{service,arguments,mutelist}
|
||||
mkdir -p prowler/providers/{provider}/exceptions
|
||||
```
|
||||
|
||||
2. **Implement Provider Class**:
|
||||
```python
|
||||
from prowler.providers.common.provider import Provider
|
||||
|
||||
class NewProvider(Provider):
|
||||
def __init__(self, arguments):
|
||||
super().__init__(arguments)
|
||||
# Provider-specific initialization
|
||||
```
|
||||
|
||||
3. **Add Provider to CLI**:
|
||||
Update `prowler/lib/cli/parser.py` to include new provider arguments.
|
||||
|
||||
### Adding New Security Checks
|
||||
|
||||
The most common high level steps to create a new check are:
|
||||
|
||||
1. Prerequisites:
|
||||
- Verify the check does not already exist by searching in the same service folder as `prowler/providers/<provider>/services/<service>/<check_name_want_to_implement>/`.
|
||||
- Ensure required provider and service exist. If not, you will need to create them first.
|
||||
- Confirm the service has implemented all required methods and attributes for the check (in most cases, you will need to add or modify some methods in the service to get the data you need for the check).
|
||||
2. Navigate to the service directory. The path should be as follows: `prowler/providers/<provider>/services/<service>`.
|
||||
3. Create a check-specific folder. The path should follow this pattern: `prowler/providers/<provider>/services/<service>/<check_name_want_to_implement>`. Adhere to the [Naming Format for Checks](/developer-guide/checks#naming-format-for-checks).
|
||||
4. Create the check files, you can use next commands:
|
||||
```bash
|
||||
mkdir -p prowler/providers/<provider>/services/<service>/<check_name_want_to_implement>
|
||||
touch prowler/providers/<provider>/services/<service>/<check_name_want_to_implement>/__init__.py
|
||||
touch prowler/providers/<provider>/services/<service>/<check_name_want_to_implement>/<check_name_want_to_implement>.py
|
||||
touch prowler/providers/<provider>/services/<service>/<check_name_want_to_implement>/<check_name_want_to_implement>.metadata.json
|
||||
```
|
||||
5. Run the check locally to ensure it works as expected. For checking you can use the CLI in the next way:
|
||||
- To ensure the check has been detected by Prowler: `poetry run python prowler-cli.py <provider> --list-checks | grep <check_name>`.
|
||||
- To run the check, to find possible issues: `poetry run python prowler-cli.py <provider> --log-level ERROR --verbose --check <check_name>`.
|
||||
6. Create comprehensive tests for the check that cover multiple scenarios including both PASS (compliant) and FAIL (non-compliant) cases. For detailed information about test structure and implementation guidelines, refer to the [Testing](/developer-guide/unit-testing) documentation.
|
||||
7. If the check and its corresponding tests are working as expected, you can submit a PR to Prowler.
|
||||
|
||||
### Adding Compliance Frameworks
|
||||
|
||||
1. **Create Framework File**:
|
||||
```bash
|
||||
# Create prowler/compliance/{provider}/{framework}.json
|
||||
```
|
||||
|
||||
2. **Define Requirements**:
|
||||
Map framework requirements to existing checks.
|
||||
|
||||
3. **Test Compliance**:
|
||||
```bash
|
||||
poetry run python -m prowler {provider} --compliance {framework}
|
||||
```
|
||||
|
||||
## Code Quality Standards
|
||||
|
||||
### 1. Python Style
|
||||
|
||||
- **PEP 8 Compliance**: Enforced by black and flake8
|
||||
- **Type Hints**: Required for all public functions
|
||||
- **Docstrings**: Required for all classes and methods
|
||||
- **Import Organization**: Use isort for consistent import ordering
|
||||
|
||||
```python
|
||||
import standard_library
|
||||
|
||||
from third_party import library
|
||||
|
||||
from prowler.lib import internal_module
|
||||
|
||||
class ExampleClass:
|
||||
"""Class docstring."""
|
||||
|
||||
def method(self, param: str) -> dict | list | None:
|
||||
"""Method docstring.
|
||||
|
||||
Args:
|
||||
param: Description of parameter
|
||||
|
||||
Returns:
|
||||
Description of return value
|
||||
"""
|
||||
return None
|
||||
```
|
||||
|
||||
### 2. Error Handling
|
||||
|
||||
```python
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
try:
|
||||
# Risky operation
|
||||
result = api_call()
|
||||
except ProviderSpecificException as e:
|
||||
logger.error(f"Provider error: {e}")
|
||||
# Graceful handling
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error: {e}")
|
||||
# Never let checks crash the entire scan
|
||||
```
|
||||
|
||||
### 3. Security Practices
|
||||
|
||||
- **No Hardcoded Secrets**: Use environment variables or secure credential management
|
||||
- **Input Validation**: Validate all external inputs
|
||||
- **Principle of Least Privilege**: Request minimal necessary permissions
|
||||
- **Secure Defaults**: Default to secure configurations
|
||||
|
||||
## Testing Guidelines
|
||||
|
||||
### Unit Tests
|
||||
|
||||
- **100% Coverage Goal**: Aim for complete test coverage
|
||||
- **Mock External Services**: Use mock objects to simulate the external services
|
||||
- **Test Edge Cases**: Include error conditions and boundary cases
|
||||
|
||||
## References
|
||||
|
||||
- **Root Project Guide**: `../AGENTS.md` (takes priority for cross-component guidance)
|
||||
- **Provider Examples**: Reference existing providers for implementation patterns
|
||||
- **Check Examples**: Study existing checks for proper implementation patterns
|
||||
- **Compliance Framework Examples**: Review existing frameworks for structure
|
||||
+23
-7
@@ -2,7 +2,23 @@
|
||||
|
||||
All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
## [v5.13.0] (Prowler UNRELEASED)
|
||||
## [v5.14.0] (Prowler UNRELEASED)
|
||||
|
||||
### Added
|
||||
- GitHub provider check `organization_default_repository_permission_strict` [(#8785)](https://github.com/prowler-cloud/prowler/pull/8785)
|
||||
- Update AWS Direct Connect service metadata to new format [(#8855)](https://github.com/prowler-cloud/prowler/pull/8855)
|
||||
|
||||
---
|
||||
|
||||
## [v5.13.1] (Prowler UNRELEASED)
|
||||
|
||||
### Fixed
|
||||
- Add `resource_name` for checks under `logging` for the GCP provider [(#9023)](https://github.com/prowler-cloud/prowler/pull/9023)
|
||||
|
||||
|
||||
---
|
||||
|
||||
## [v5.13.0] (Prowler v5.13.0)
|
||||
|
||||
### Added
|
||||
- Support for AdditionalURLs in outputs [(#8651)](https://github.com/prowler-cloud/prowler/pull/8651)
|
||||
@@ -17,6 +33,8 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- Oracle Cloud provider with CIS 3.0 benchmark [(#8893)](https://github.com/prowler-cloud/prowler/pull/8893)
|
||||
- Support for Atlassian Document Format (ADF) in Jira integration [(#8878)](https://github.com/prowler-cloud/prowler/pull/8878)
|
||||
- Add Common Cloud Controls for AWS, Azure and GCP [(#8000)](https://github.com/prowler-cloud/prowler/pull/8000)
|
||||
- Improve Provider documentation guide [(#8430)](https://github.com/prowler-cloud/prowler/pull/8430)
|
||||
- `cloudstorage_bucket_lifecycle_management_enabled` check for GCP provider [(#8936)](https://github.com/prowler-cloud/prowler/pull/8936)
|
||||
|
||||
### Changed
|
||||
|
||||
@@ -32,10 +50,13 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- Update AWS AppStream service metadata to new format [(#8789)](https://github.com/prowler-cloud/prowler/pull/8789)
|
||||
- Update AWS API Gateway service metadata to new format [(#8788)](https://github.com/prowler-cloud/prowler/pull/8788)
|
||||
- Update AWS Athena service metadata to new format [(#8790)](https://github.com/prowler-cloud/prowler/pull/8790)
|
||||
- Update AWS CloudTrail service metadata to new format [(#8831)](https://github.com/prowler-cloud/prowler/pull/8831)
|
||||
- Update AWS Auto Scaling service metadata to new format [(#8824)](https://github.com/prowler-cloud/prowler/pull/8824)
|
||||
- Update AWS Backup service metadata to new format [(#8826)](https://github.com/prowler-cloud/prowler/pull/8826)
|
||||
- Update AWS CloudFormation service metadata to new format [(#8828)](https://github.com/prowler-cloud/prowler/pull/8828)
|
||||
- Update AWS Lambda service metadata to new format [(#8825)](https://github.com/prowler-cloud/prowler/pull/8825)
|
||||
- Update AWS DLM service metadata to new format [(#8860)](https://github.com/prowler-cloud/prowler/pull/8860)
|
||||
- Update AWS DMS service metadata to new format [(#8861)](https://github.com/prowler-cloud/prowler/pull/8861)
|
||||
- Update AWS Directory Service service metadata to new format [(#8859)](https://github.com/prowler-cloud/prowler/pull/8859)
|
||||
- Update AWS CloudFront service metadata to new format [(#8829)](https://github.com/prowler-cloud/prowler/pull/8829)
|
||||
- Deprecate user authentication for M365 provider [(#8865)](https://github.com/prowler-cloud/prowler/pull/8865)
|
||||
@@ -47,13 +68,8 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- Prowler ThreatScore scoring calculation CLI [(#8582)](https://github.com/prowler-cloud/prowler/pull/8582)
|
||||
- Add missing attributes for Mitre Attack AWS, Azure and GCP [(#8907)](https://github.com/prowler-cloud/prowler/pull/8907)
|
||||
- Fix KeyError in CloudSQL and Monitoring services in GCP provider [(#8909)](https://github.com/prowler-cloud/prowler/pull/8909)
|
||||
- Fix Value Errors in Entra service for M365 provider [(#8919)](https://github.com/prowler-cloud/prowler/pull/8919)
|
||||
- Fix ResourceName in GCP provider [(#8928)](https://github.com/prowler-cloud/prowler/pull/8928)
|
||||
|
||||
---
|
||||
|
||||
## [v5.12.4] (Prowler UNRELEASED)
|
||||
|
||||
### Fixed
|
||||
- Fix KeyError in `elb_ssl_listeners_use_acm_certificate` check and handle None cluster version in `eks_cluster_uses_a_supported_version` check [(#8791)](https://github.com/prowler-cloud/prowler/pull/8791)
|
||||
- Fix file extension parsing for compliance reports [(#8791)](https://github.com/prowler-cloud/prowler/pull/8791)
|
||||
- Added user pagination to Entra and Admincenter services [(#8858)](https://github.com/prowler-cloud/prowler/pull/8858)
|
||||
|
||||
+394
-2360
File diff suppressed because it is too large
Load Diff
+396
-2393
File diff suppressed because it is too large
Load Diff
+392
-2374
File diff suppressed because it is too large
Load Diff
@@ -753,7 +753,9 @@
|
||||
{
|
||||
"Id": "1.3.8",
|
||||
"Description": "Base permissions define the permission level automatically granted to all organization members. Define strict base access permissions for all of the repositories in the organization, including new ones.",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"organization_default_repository_permission_strict"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "1 Source Code",
|
||||
|
||||
@@ -12,7 +12,7 @@ from prowler.lib.logger import logger
|
||||
|
||||
timestamp = datetime.today()
|
||||
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||
prowler_version = "5.13.0"
|
||||
prowler_version = "5.14.0"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
|
||||
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
|
||||
|
||||
@@ -199,6 +199,7 @@
|
||||
"aws": [
|
||||
"ap-south-1",
|
||||
"ap-southeast-2",
|
||||
"ca-central-1",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"us-east-1",
|
||||
@@ -819,18 +820,6 @@
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"apptest": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"ap-southeast-2",
|
||||
"eu-central-1",
|
||||
"sa-east-1",
|
||||
"us-east-1"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"aps": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -1223,6 +1212,7 @@
|
||||
"b2bi": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"eu-west-1",
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
"us-west-2"
|
||||
@@ -1574,6 +1564,7 @@
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ap-southeast-5",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
@@ -2952,6 +2943,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ap-southeast-5",
|
||||
"ap-southeast-6",
|
||||
"ap-southeast-7",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
@@ -3618,6 +3610,7 @@
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"eu-central-1",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
@@ -5194,6 +5187,7 @@
|
||||
"aws": [
|
||||
"af-south-1",
|
||||
"ap-east-1",
|
||||
"ap-east-2",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
@@ -5204,6 +5198,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ap-southeast-5",
|
||||
"ap-southeast-7",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
"eu-central-1",
|
||||
@@ -7131,6 +7126,7 @@
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-5",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-north-1",
|
||||
@@ -7718,6 +7714,7 @@
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ap-southeast-5",
|
||||
"ap-southeast-6",
|
||||
"ap-southeast-7",
|
||||
"ca-central-1",
|
||||
"ca-west-1",
|
||||
@@ -8395,6 +8392,7 @@
|
||||
"payment-cryptography": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"af-south-1",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
@@ -8723,6 +8721,7 @@
|
||||
"ap-southeast-5",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
"eu-south-2",
|
||||
"eu-west-1",
|
||||
@@ -9207,11 +9206,13 @@
|
||||
"ap-east-2",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
"ap-south-2",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-5",
|
||||
"ap-southeast-7",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
@@ -12436,7 +12437,12 @@
|
||||
"workspaces-instances": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"ap-northeast-2"
|
||||
"ap-east-1",
|
||||
"ap-northeast-2",
|
||||
"ap-southeast-5",
|
||||
"eu-south-2",
|
||||
"me-central-1",
|
||||
"us-east-2"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user