Compare commits

...

40 Commits

Author SHA1 Message Date
Andoni A. 0cb4784187 docs: add AWS Orgs tip in bulk provisioning tutorial 2025-10-22 12:46:55 +02:00
Andoni A. 124676e893 docs: remove how to configure aws creds -- too much detailed 2025-10-22 12:40:27 +02:00
Andoni A. e08c2f2605 docs: review with docs styleguide 2025-10-22 12:28:13 +02:00
Andoni A. 9758fc36df chore: use Prowler API key instead of temporal token 2025-10-22 12:22:28 +02:00
Andoni A. 56bb5e92cc docs: include AWS Orgs bulk importer tutorial 2025-10-22 12:12:36 +02:00
Andoni A. 2ef750d133 Merge branch 'master' into DEVREL-99-provision-all-aws-accounts-in-an-organization 2025-10-22 11:49:05 +02:00
César Arroba 18f3bc098c chore(github): trigger only if repository is prowler (#8974) 2025-10-22 09:27:33 +02:00
César Arroba 67b1983d85 chore(github): fix action (#8973) 2025-10-22 09:10:47 +02:00
César Arroba a3db23af7d chore(github): improve conventional commits action (#8969) 2025-10-21 17:57:29 +02:00
César Arroba 3eaa21f06f chore(github): improve backport label action (#8970) 2025-10-21 17:57:04 +02:00
Rubén De la Torre Vico 5d5c109067 chore(aws): enhance metadata for dlm service (#8860)
Co-authored-by: Daniel Barranquero <danielbo2001@gmail.com>
2025-10-21 17:40:19 +02:00
César Arroba c6cb4e4814 chore(github): improve backport action (#8968) 2025-10-21 17:14:40 +02:00
César Arroba ab06a09173 chore(api): improve pull request action (#8963) 2025-10-21 17:10:48 +02:00
Rubén De la Torre Vico 9c6c007f73 fix(mcp): add missing argument to health check (#8967) 2025-10-21 16:45:05 +02:00
Rubén De la Torre Vico 206f23b5a5 chore(aws): enhance metadata for dms service (#8861)
Co-authored-by: Daniel Barranquero <danielbo2001@gmail.com>
2025-10-21 16:31:18 +02:00
Andoni Alonso 5c9e9bc86a docs: fix security heading (#8965) 2025-10-21 16:13:55 +02:00
Rubén De la Torre Vico 34554d6123 feat(mcp): add support for production deployment with uvicorn (#8958) 2025-10-21 16:03:24 +02:00
Pepe Fagoaga 000cb93157 chore: remove security template as it's already there (#8964) 2025-10-21 19:34:42 +05:45
Adrián Jesús Peña Rodríguez 524209bdf2 feat(api): add provider_id__in filter for ScanSummary queries (#8951) 2025-10-21 15:24:09 +02:00
César Arroba c4a0da8204 chore(github): review and update issue templates (#8961) 2025-10-21 13:40:25 +02:00
César Arroba f0cba0321c chore(codeql): improve API CodeQL action and settings (#8962) 2025-10-21 13:40:07 +02:00
dependabot[bot] 79888c9312 chore(deps): bump playwright and @playwright/test in /ui (#8956)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-21 13:22:21 +02:00
Rubén De la Torre Vico a79910a694 chore(aws): enhance metadata for cloudtrail service (#8831)
Co-authored-by: HugoPBrito <hugopbrit@gmail.com>
2025-10-21 12:45:31 +02:00
César Arroba 4cadee7bb1 chore(github): update codeowners file (#8960) 2025-10-21 11:48:21 +02:00
Pedro Martín 756d436a2f feat(compliance): improve CCC catalogs (#8944) 2025-10-21 03:16:05 +02:00
Alejandro Bailo 5e85ef5835 feat(ui): new card components and derivates for overview (#8921)
Co-authored-by: Alan Buscaglia <gentlemanprogramming@gmail.com>
2025-10-20 16:49:09 +02:00
Prowler Bot 0fa9e2da6c chore(regions_update): Changes in regions for AWS services (#8946)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2025-10-20 09:20:29 -04:00
Andoni Alonso ce7510db28 docs: remove anchors from redirects (#8953) 2025-10-20 14:58:53 +02:00
Pepe Fagoaga 8e3d50c807 fix(docs): redirect user-guide-tutorials (#8945) 2025-10-20 14:51:15 +02:00
Pepe Fagoaga d8908d2ccc docs(fix): space in providers table (#8938) 2025-10-20 14:39:03 +02:00
Alejandro Bailo 0b9969a723 feat: update M365 credentials form (#8929)
Co-authored-by: HugoPBrito <hugopbrit@gmail.com>
2025-10-20 13:51:11 +02:00
StylusFrost 985d73f44f test(ui): enhance Playwright test setups for user authentication (#8881)
Co-authored-by: Alejandro Bailo <59607668+alejandrobailo@users.noreply.github.com>
2025-10-20 13:45:20 +02:00
Pedro Martín 1d705e22da feat(util): add from_yaml_to_json.py (#8943) 2025-10-20 12:29:29 +02:00
Rubén De la Torre Vico ca55d4ce86 chore(aws): enhance metadata for directoryservice service (#8859)
Co-authored-by: Daniel Barranquero <danielbo2001@gmail.com>
2025-10-20 12:20:16 +02:00
Hugo Pereira Brito 0201073fcb fix(docs): small enhancement in warning (#8950) 2025-10-20 12:19:49 +02:00
Alejandro Bailo 928c556721 fix: Mutelist view blinks at opening (#8932) 2025-10-17 19:26:57 +02:00
Rubén De la Torre Vico a653ad7852 chore(deps): remove docs group dependency (#8937) 2025-10-17 16:37:32 +02:00
Sergio Garcia a3c811f801 docs(github): clarify GitHub App configuration requirements (#8930) 2025-10-17 09:30:54 -04:00
Hugo Pereira Brito c85d3e9188 feat(docs): add M365 certificate and azure cli authentication methods (#8939) 2025-10-17 13:42:48 +02:00
Andoni A. f734b249a4 chore: create script to generate AWS accounts list from AWS Org for bulk provisioning 2025-10-13 16:07:33 +02:00
131 changed files with 6720 additions and 8561 deletions
+27 -5
View File
@@ -1,6 +1,28 @@
# SDK
/* @prowler-cloud/sdk
/.github/ @prowler-cloud/sdk
prowler @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
tests @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
api @prowler-cloud/api
ui @prowler-cloud/ui
/prowler/ @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
/tests/ @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
/dashboard/ @prowler-cloud/sdk
/docs/ @prowler-cloud/sdk
/examples/ @prowler-cloud/sdk
/util/ @prowler-cloud/sdk
/contrib/ @prowler-cloud/sdk
/permissions/ @prowler-cloud/sdk
/codecov.yml @prowler-cloud/sdk @prowler-cloud/api
# API
/api/ @prowler-cloud/api
# UI
/ui/ @prowler-cloud/ui
# AI
/mcp_server/ @prowler-cloud/ai
# Platform
/.github/ @prowler-cloud/platform
/Makefile @prowler-cloud/platform
/kubernetes/ @prowler-cloud/platform
**/Dockerfile* @prowler-cloud/platform
**/docker-compose*.yml @prowler-cloud/platform
**/docker-compose*.yaml @prowler-cloud/platform
+44
View File
@@ -3,6 +3,41 @@ description: Create a report to help us improve
labels: ["bug", "status/needs-triage"]
body:
- type: checkboxes
id: search
attributes:
label: Issue search
options:
- label: I have searched the existing issues and this bug has not been reported yet
required: true
- type: dropdown
id: component
attributes:
label: Which component is affected?
multiple: true
options:
- Prowler CLI/SDK
- Prowler API
- Prowler UI
- Prowler Dashboard
- Prowler MCP Server
- Documentation
- Other
validations:
required: true
- type: dropdown
id: provider
attributes:
label: Cloud Provider (if applicable)
multiple: true
options:
- AWS
- Azure
- GCP
- Kubernetes
- GitHub
- Microsoft 365
- Not applicable
- type: textarea
id: reproduce
attributes:
@@ -78,6 +113,15 @@ body:
prowler --version
validations:
required: true
- type: input
id: python-version
attributes:
label: Python version
description: Which Python version are you using?
placeholder: |-
python --version
validations:
required: true
- type: input
id: pip-version
attributes:
+10
View File
@@ -1 +1,11 @@
blank_issues_enabled: false
contact_links:
- name: 📖 Documentation
url: https://docs.prowler.com
about: Check our comprehensive documentation for guides and tutorials
- name: 💬 GitHub Discussions
url: https://github.com/prowler-cloud/prowler/discussions
about: Ask questions and discuss with the community
- name: 🌟 Prowler Community
url: https://goto.prowler.com/slack
about: Join our community for support and updates
@@ -3,6 +3,42 @@ description: Suggest an idea for this project
labels: ["feature-request", "status/needs-triage"]
body:
- type: checkboxes
id: search
attributes:
label: Feature search
options:
- label: I have searched the existing issues and this feature has not been requested yet
required: true
- type: dropdown
id: component
attributes:
label: Which component would this feature affect?
multiple: true
options:
- Prowler CLI/SDK
- Prowler API
- Prowler UI
- Prowler Dashboard
- Prowler MCP Server
- Documentation
- New component/Integration
validations:
required: true
- type: dropdown
id: provider
attributes:
label: Related to specific cloud provider?
multiple: true
options:
- AWS
- Azure
- GCP
- Kubernetes
- GitHub
- Microsoft 365
- All providers
- Not provider-specific
- type: textarea
id: Problem
attributes:
@@ -19,6 +55,14 @@ body:
description: A clear and concise description of what you want to happen.
validations:
required: true
- type: textarea
id: use-case
attributes:
label: Use case and benefits
description: Who would benefit from this feature and how?
placeholder: This would help security teams by...
validations:
required: true
- type: textarea
id: Alternatives
attributes:
@@ -0,0 +1,71 @@
name: 'Setup Python with Poetry'
description: 'Setup Python environment with Poetry and install dependencies'
author: 'Prowler'
inputs:
python-version:
description: 'Python version to use'
required: true
working-directory:
description: 'Working directory for Poetry'
required: false
default: '.'
poetry-version:
description: 'Poetry version to install'
required: false
default: '2.1.1'
install-dependencies:
description: 'Install Python dependencies with Poetry'
required: false
default: 'true'
runs:
using: 'composite'
steps:
- name: Replace @master with current branch in pyproject.toml
if: github.event_name == 'pull_request' && github.base_ref == 'master'
shell: bash
working-directory: ${{ inputs.working-directory }}
run: |
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
echo "Using branch: $BRANCH_NAME"
sed -i "s|@master|@$BRANCH_NAME|g" pyproject.toml
- name: Install poetry
shell: bash
run: |
python -m pip install --upgrade pip
pipx install poetry==${{ inputs.poetry-version }}
- name: Update SDK resolved_reference to latest commit
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
shell: bash
working-directory: ${{ inputs.working-directory }}
run: |
LATEST_COMMIT=$(curl -s "https://api.github.com/repos/prowler-cloud/prowler/commits/master" | jq -r '.sha')
echo "Latest commit hash: $LATEST_COMMIT"
sed -i '/url = "https:\/\/github\.com\/prowler-cloud\/prowler\.git"/,/resolved_reference = / {
s/resolved_reference = "[a-f0-9]\{40\}"/resolved_reference = "'"$LATEST_COMMIT"'"/
}' poetry.lock
echo "Updated resolved_reference:"
grep -A2 -B2 "resolved_reference" poetry.lock
- name: Update poetry.lock
shell: bash
working-directory: ${{ inputs.working-directory }}
run: poetry lock
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: ${{ inputs.python-version }}
cache: 'poetry'
cache-dependency-path: ${{ inputs.working-directory }}/poetry.lock
- name: Install Python dependencies
if: inputs.install-dependencies == 'true'
shell: bash
working-directory: ${{ inputs.working-directory }}
run: |
poetry install --no-root
poetry run pip list
+152
View File
@@ -0,0 +1,152 @@
name: 'Container Security Scan with Trivy'
description: 'Scans container images for vulnerabilities using Trivy and reports results'
author: 'Prowler'
inputs:
image-name:
description: 'Container image name to scan'
required: true
image-tag:
description: 'Container image tag to scan'
required: true
default: ${{ github.sha }}
severity:
description: 'Severities to scan for (comma-separated)'
required: false
default: 'CRITICAL,HIGH,MEDIUM,LOW'
fail-on-critical:
description: 'Fail the build if critical vulnerabilities are found'
required: false
default: 'false'
upload-sarif:
description: 'Upload results to GitHub Security tab'
required: false
default: 'true'
create-pr-comment:
description: 'Create a comment on the PR with scan results'
required: false
default: 'true'
artifact-retention-days:
description: 'Days to retain the Trivy report artifact'
required: false
default: '2'
outputs:
critical-count:
description: 'Number of critical vulnerabilities found'
value: ${{ steps.security-check.outputs.critical }}
high-count:
description: 'Number of high vulnerabilities found'
value: ${{ steps.security-check.outputs.high }}
total-count:
description: 'Total number of vulnerabilities found'
value: ${{ steps.security-check.outputs.total }}
runs:
using: 'composite'
steps:
- name: Run Trivy vulnerability scan (SARIF)
if: inputs.upload-sarif == 'true'
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # v0.33.1
with:
image-ref: ${{ inputs.image-name }}:${{ inputs.image-tag }}
format: 'sarif'
output: 'trivy-results.sarif'
severity: 'CRITICAL,HIGH'
exit-code: '0'
- name: Upload Trivy results to GitHub Security tab
if: inputs.upload-sarif == 'true'
uses: github/codeql-action/upload-sarif@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
with:
sarif_file: 'trivy-results.sarif'
category: 'trivy-container'
- name: Run Trivy vulnerability scan (JSON)
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # v0.33.1
with:
image-ref: ${{ inputs.image-name }}:${{ inputs.image-tag }}
format: 'json'
output: 'trivy-report.json'
severity: ${{ inputs.severity }}
exit-code: '0'
- name: Upload Trivy report artifact
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
if: always()
with:
name: trivy-scan-report-${{ inputs.image-name }}
path: trivy-report.json
retention-days: ${{ inputs.artifact-retention-days }}
- name: Generate security summary
id: security-check
shell: bash
run: |
CRITICAL=$(jq '[.Results[]?.Vulnerabilities[]? | select(.Severity=="CRITICAL")] | length' trivy-report.json)
HIGH=$(jq '[.Results[]?.Vulnerabilities[]? | select(.Severity=="HIGH")] | length' trivy-report.json)
TOTAL=$(jq '[.Results[]?.Vulnerabilities[]?] | length' trivy-report.json)
echo "critical=$CRITICAL" >> $GITHUB_OUTPUT
echo "high=$HIGH" >> $GITHUB_OUTPUT
echo "total=$TOTAL" >> $GITHUB_OUTPUT
echo "### 🔒 Container Security Scan" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Image:** \`${{ inputs.image-name }}:${{ inputs.image-tag }}\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- 🔴 Critical: $CRITICAL" >> $GITHUB_STEP_SUMMARY
echo "- 🟠 High: $HIGH" >> $GITHUB_STEP_SUMMARY
echo "- **Total**: $TOTAL" >> $GITHUB_STEP_SUMMARY
- name: Comment scan results on PR
if: inputs.create-pr-comment == 'true' && github.event_name == 'pull_request'
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
IMAGE_NAME: ${{ inputs.image-name }}
GITHUB_SHA: ${{ inputs.image-tag }}
SEVERITY: ${{ inputs.severity }}
with:
script: |
const comment = require('./.github/scripts/trivy-pr-comment.js');
// Unique identifier to find our comment
const marker = '<!-- trivy-scan-comment:${{ inputs.image-name }} -->';
const body = marker + '\n' + comment;
// Find existing comment
const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
});
const existingComment = comments.find(c => c.body?.includes(marker));
if (existingComment) {
// Update existing comment
await github.rest.issues.updateComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: existingComment.id,
body: body
});
console.log('✅ Updated existing Trivy scan comment');
} else {
// Create new comment
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body: body
});
console.log('✅ Created new Trivy scan comment');
}
- name: Check for critical vulnerabilities
if: inputs.fail-on-critical == 'true' && steps.security-check.outputs.critical != '0'
shell: bash
run: |
echo "::error::Found ${{ steps.security-check.outputs.critical }} critical vulnerabilities"
echo "::warning::Please update packages or use a different base image"
exit 1
+11 -2
View File
@@ -1,3 +1,12 @@
name: "API - CodeQL Config"
name: 'API: CodeQL Config'
paths:
- "api/"
- 'api/'
paths-ignore:
- 'api/tests/**'
- 'api/**/__pycache__/**'
- 'api/**/migrations/**'
- 'api/**/*.md'
queries:
- uses: security-and-quality
+102
View File
@@ -0,0 +1,102 @@
const fs = require('fs');
// Configuration from environment variables
const REPORT_FILE = process.env.TRIVY_REPORT_FILE || 'trivy-report.json';
const IMAGE_NAME = process.env.IMAGE_NAME || 'container-image';
const GITHUB_SHA = process.env.GITHUB_SHA || 'unknown';
const GITHUB_REPOSITORY = process.env.GITHUB_REPOSITORY || '';
const GITHUB_RUN_ID = process.env.GITHUB_RUN_ID || '';
const SEVERITY = process.env.SEVERITY || 'CRITICAL,HIGH,MEDIUM,LOW';
// Parse severities to scan
const scannedSeverities = SEVERITY.split(',').map(s => s.trim());
// Read and parse the Trivy report
const report = JSON.parse(fs.readFileSync(REPORT_FILE, 'utf-8'));
let vulnCount = 0;
let vulnsByType = { CRITICAL: 0, HIGH: 0, MEDIUM: 0, LOW: 0 };
let affectedPackages = new Set();
if (report.Results && Array.isArray(report.Results)) {
for (const result of report.Results) {
if (result.Vulnerabilities && Array.isArray(result.Vulnerabilities)) {
for (const vuln of result.Vulnerabilities) {
vulnCount++;
if (vulnsByType[vuln.Severity] !== undefined) {
vulnsByType[vuln.Severity]++;
}
if (vuln.PkgName) {
affectedPackages.add(vuln.PkgName);
}
}
}
}
}
const shortSha = GITHUB_SHA.substring(0, 7);
const timestamp = new Date().toISOString().replace('T', ' ').substring(0, 19) + ' UTC';
// Severity icons and labels
const severityConfig = {
CRITICAL: { icon: '🔴', label: 'Critical' },
HIGH: { icon: '🟠', label: 'High' },
MEDIUM: { icon: '🟡', label: 'Medium' },
LOW: { icon: '🔵', label: 'Low' }
};
let comment = '## 🔒 Container Security Scan\n\n';
comment += `**Image:** \`${IMAGE_NAME}:${shortSha}\`\n`;
comment += `**Last scan:** ${timestamp}\n\n`;
if (vulnCount === 0) {
comment += '### ✅ No Vulnerabilities Detected\n\n';
comment += 'The container image passed all security checks. No known CVEs were found.\n';
} else {
comment += '### 📊 Vulnerability Summary\n\n';
comment += '| Severity | Count |\n';
comment += '|----------|-------|\n';
// Only show severities that were scanned
for (const severity of scannedSeverities) {
const config = severityConfig[severity];
const count = vulnsByType[severity] || 0;
const isBold = (severity === 'CRITICAL' || severity === 'HIGH') && count > 0;
const countDisplay = isBold ? `**${count}**` : count;
comment += `| ${config.icon} ${config.label} | ${countDisplay} |\n`;
}
comment += `| **Total** | **${vulnCount}** |\n\n`;
if (affectedPackages.size > 0) {
comment += `**${affectedPackages.size}** package(s) affected\n\n`;
}
if (vulnsByType.CRITICAL > 0) {
comment += '### ⚠️ Action Required\n\n';
comment += '**Critical severity vulnerabilities detected.** These should be addressed before merging:\n';
comment += '- Review the detailed scan results\n';
comment += '- Update affected packages to patched versions\n';
comment += '- Consider using a different base image if updates are unavailable\n\n';
} else if (vulnsByType.HIGH > 0) {
comment += '### ⚠️ Attention Needed\n\n';
comment += '**High severity vulnerabilities found.** Please review and plan remediation:\n';
comment += '- Assess the risk and exploitability\n';
comment += '- Prioritize updates in the next maintenance cycle\n\n';
} else {
comment += '### ️ Review Recommended\n\n';
comment += 'Medium/Low severity vulnerabilities found. Consider addressing during regular maintenance.\n\n';
}
}
comment += '---\n';
comment += '📋 **Resources:**\n';
if (GITHUB_REPOSITORY && GITHUB_RUN_ID) {
comment += `- [Download full report](https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}) (see artifacts)\n`;
}
comment += '- [View in Security tab](https://github.com/' + (GITHUB_REPOSITORY || 'repository') + '/security/code-scanning)\n';
comment += '- Scanned with [Trivy](https://github.com/aquasecurity/trivy)\n';
module.exports = comment;
+30 -33
View File
@@ -1,36 +1,34 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: API - CodeQL
name: 'API: CodeQL'
on:
push:
branches:
- "master"
- "v5.*"
- 'master'
- 'v5.*'
paths:
- "api/**"
- 'api/**'
- '.github/workflows/api-codeql.yml'
- '.github/codeql/api-codeql-config.yml'
pull_request:
branches:
- "master"
- "v5.*"
- 'master'
- 'v5.*'
paths:
- "api/**"
- 'api/**'
- '.github/workflows/api-codeql.yml'
- '.github/codeql/api-codeql-config.yml'
schedule:
- cron: '00 12 * * *'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
analyze:
name: Analyze
name: CodeQL Security Analysis
runs-on: ubuntu-latest
timeout-minutes: 30
permissions:
actions: read
contents: read
@@ -39,21 +37,20 @@ jobs:
strategy:
fail-fast: false
matrix:
language: [ 'python' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
language:
- 'python'
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/api-codeql-config.yml
- name: Initialize CodeQL
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/api-codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
with:
category: "/language:${{matrix.language}}"
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
with:
category: '/language:${{ matrix.language }}'
+148 -153
View File
@@ -1,20 +1,30 @@
name: API - Pull Request
name: 'API: Pull Request'
on:
push:
branches:
- "master"
- "v5.*"
- 'master'
- 'v5.*'
paths:
- ".github/workflows/api-pull-request.yml"
- "api/**"
- '.github/workflows/api-pull-request.yml'
- 'api/**'
- '!api/docs/**'
- '!api/README.md'
- '!api/CHANGELOG.md'
pull_request:
branches:
- "master"
- "v5.*"
- 'master'
- 'v5.*'
paths:
- ".github/workflows/api-pull-request.yml"
- "api/**"
- '.github/workflows/api-pull-request.yml'
- 'api/**'
- '!api/docs/**'
- '!api/README.md'
- '!api/CHANGELOG.md'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
POSTGRES_HOST: localhost
@@ -29,21 +39,94 @@ env:
VALKEY_DB: 0
API_WORKING_DIR: ./api
IMAGE_NAME: prowler-api
IGNORE_FILES: |
api/docs/**
api/README.md
api/CHANGELOG.md
jobs:
test:
code-quality:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
timeout-minutes: 30
permissions:
contents: read
strategy:
matrix:
python-version: ["3.12"]
python-version:
- '3.12'
defaults:
run:
working-directory: ./api
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Setup Python with Poetry
uses: ./.github/actions/setup-python-poetry
with:
python-version: ${{ matrix.python-version }}
working-directory: ./api
- name: Poetry check
run: poetry check --lock
- name: Ruff lint
run: poetry run ruff check . --exclude contrib
- name: Ruff format
run: poetry run ruff format --check . --exclude contrib
- name: Pylint
run: poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
security-scans:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
strategy:
matrix:
python-version:
- '3.12'
defaults:
run:
working-directory: ./api
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Setup Python with Poetry
uses: ./.github/actions/setup-python-poetry
with:
python-version: ${{ matrix.python-version }}
working-directory: ./api
- name: Bandit
run: poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
- name: Safety
# 76352, 76353, 77323 come from SDK, but they cannot upgrade it yet. It does not affect API
# TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X
run: poetry run safety check --ignore 70612,66963,74429,76352,76353,77323,77744,77745
- name: Vulture
run: poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
tests:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
timeout-minutes: 30
permissions:
contents: read
strategy:
matrix:
python-version:
- '3.12'
defaults:
run:
working-directory: ./api
# Service containers to run with `test`
services:
# Label used to access the service container
postgres:
image: postgres
env:
@@ -52,7 +135,6 @@ jobs:
POSTGRES_USER: ${{ env.POSTGRES_USER }}
POSTGRES_PASSWORD: ${{ env.POSTGRES_PASSWORD }}
POSTGRES_DB: ${{ env.POSTGRES_DB }}
# Set health checks to wait until postgres has started
ports:
- 5432:5432
options: >-
@@ -66,7 +148,6 @@ jobs:
VALKEY_HOST: ${{ env.VALKEY_HOST }}
VALKEY_PORT: ${{ env.VALKEY_PORT }}
VALKEY_DB: ${{ env.VALKEY_DB }}
# Set health checks to wait until postgres has started
ports:
- 6379:6379
options: >-
@@ -76,158 +157,72 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
api/**
.github/workflows/api-pull-request.yml
files_ignore: ${{ env.IGNORE_FILES }}
- name: Replace @master with current branch in pyproject.toml - Only for pull requests to `master`
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' && github.event_name == 'pull_request' && github.base_ref == 'master'
run: |
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
echo "Using branch: $BRANCH_NAME"
sed -i "s|@master|@$BRANCH_NAME|g" pyproject.toml
- name: Install poetry
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
python -m pip install --upgrade pip
pipx install poetry==2.1.1
- name: Update SDK's poetry.lock resolved_reference to latest commit - Only for push events to `master`
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' && github.event_name == 'push' && github.ref == 'refs/heads/master'
run: |
# Get the latest commit hash from the prowler-cloud/prowler repository
LATEST_COMMIT=$(curl -s "https://api.github.com/repos/prowler-cloud/prowler/commits/master" | jq -r '.sha')
echo "Latest commit hash: $LATEST_COMMIT"
# Update the resolved_reference specifically for prowler-cloud/prowler repository
sed -i '/url = "https:\/\/github\.com\/prowler-cloud\/prowler\.git"/,/resolved_reference = / {
s/resolved_reference = "[a-f0-9]\{40\}"/resolved_reference = "'"$LATEST_COMMIT"'"/
}' poetry.lock
# Verify the change was made
echo "Updated resolved_reference:"
grep -A2 -B2 "resolved_reference" poetry.lock
- name: Update poetry.lock
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry lock
- name: Set up Python ${{ matrix.python-version }}
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
- name: Setup Python with Poetry
uses: ./.github/actions/setup-python-poetry
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
working-directory: ./api
- name: Install dependencies
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry install --no-root
poetry run pip list
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
grep '"tag_name":' | \
sed -E 's/.*"v([^"]+)".*/\1/' \
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
&& chmod +x /tmp/hadolint
- name: Poetry check
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry check --lock
- name: Lint with ruff
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run ruff check . --exclude contrib
- name: Check Format with ruff
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run ruff format --check . --exclude contrib
- name: Lint with pylint
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn src/
- name: Bandit
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run bandit -q -lll -x '*_test.py,./contrib/' -r .
- name: Safety
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
# 76352, 76353, 77323 come from SDK, but they cannot upgrade it yet. It does not affect API
# TODO: Botocore needs urllib3 1.X so we need to ignore these vulnerabilities 77744,77745. Remove this once we upgrade to urllib3 2.X
run: |
poetry run safety check --ignore 70612,66963,74429,76352,76353,77323,77744,77745
- name: Vulture
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run vulture --exclude "contrib,tests,conftest.py" --min-confidence 100 .
- name: Hadolint
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
/tmp/hadolint Dockerfile --ignore=DL3013
- name: Test with pytest
working-directory: ./api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pytest --cov=./src/backend --cov-report=xml src/backend
- name: Run tests with pytest
run: poetry run pytest --cov=./src/backend --cov-report=xml src/backend
- name: Upload coverage reports to Codecov
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: api
test-container-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
dockerfile-lint:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Lint Dockerfile with Hadolint
uses: hadolint/hadolint-action@2332a7b74a6de0dda2e2221d575162eba76ba5e5 # v3.3.0
with:
files: api/**
files_ignore: ${{ env.IGNORE_FILES }}
dockerfile: api/Dockerfile
ignore: DL3013
container-build-and-scan:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
timeout-minutes: 30
permissions:
contents: read
security-events: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Docker Buildx
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Build Container
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
- name: Build container
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
context: ${{ env.API_WORKING_DIR }}
push: false
tags: ${{ env.IMAGE_NAME }}:latest
outputs: type=docker
load: true
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Scan container with Trivy
uses: ./.github/actions/trivy-scan
with:
image-name: ${{ env.IMAGE_NAME }}
image-tag: ${{ github.sha }}
fail-on-critical: 'false'
severity: 'CRITICAL'
+22 -15
View File
@@ -1,28 +1,35 @@
name: Prowler - Automatic Backport
name: 'Tools: Backport'
on:
pull_request_target:
branches: ['master']
types: ['labeled', 'closed']
branches:
- 'master'
types:
- 'labeled'
- 'closed'
paths:
- '.github/workflows/backport.yml'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: false
env:
# The prefix of the label that triggers the backport must not contain the branch name
# so, for example, if the branch is 'master', the label should be 'backport-to-<branch>'
BACKPORT_LABEL_PREFIX: backport-to-
BACKPORT_LABEL_IGNORE: was-backported
jobs:
backport:
name: Backport PR
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport')) && !(contains(github.event.pull_request.labels.*.name, 'was-backported'))
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
id-token: write
pull-requests: write
contents: write
pull-requests: write
steps:
- name: Check labels
id: preview_label_check
id: label_check
uses: agilepathway/label-checker@c3d16ad512e7cea5961df85ff2486bb774caf3c5 # v1.6.65
with:
allow_failure: true
@@ -31,17 +38,17 @@ jobs:
none_of: ${{ env.BACKPORT_LABEL_IGNORE }}
repo_token: ${{ secrets.GITHUB_TOKEN }}
- name: Backport Action
if: steps.preview_label_check.outputs.label_check == 'success'
- name: Backport PR
if: steps.label_check.outputs.label_check == 'success'
uses: sorenlouv/backport-github-action@ad888e978060bc1b2798690dd9d03c4036560947 # v9.5.1
with:
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
auto_backport_label_prefix: ${{ env.BACKPORT_LABEL_PREFIX }}
- name: Info log
if: ${{ success() && steps.preview_label_check.outputs.label_check == 'success' }}
- name: Display backport info log
if: success() && steps.label_check.outputs.label_check == 'success'
run: cat ~/.backport/backport.info.log
- name: Debug log
if: ${{ failure() && steps.preview_label_check.outputs.label_check == 'success' }}
- name: Display backport debug log
if: failure() && steps.label_check.outputs.label_check == 'success'
run: cat ~/.backport/backport.debug.log
+20 -13
View File
@@ -1,24 +1,31 @@
name: Prowler - Conventional Commit
name: 'Tools: Conventional Commit'
on:
pull_request:
types:
- "opened"
- "edited"
- "synchronize"
branches:
- "master"
- "v3"
- "v4.*"
- "v5.*"
- 'master'
- 'v3'
- 'v4.*'
- 'v5.*'
types:
- 'opened'
- 'edited'
- 'synchronize'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
jobs:
conventional-commit-check:
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
pull-requests: read
steps:
- name: conventional-commit-check
id: conventional-commit-check
- name: Check PR title format
uses: agenthunt/conventional-commit-checker-action@9e552d650d0e205553ec7792d447929fc78e012b # v2.0.0
with:
pr-title-regex: '^(feat|fix|docs|style|refactor|perf|test|chore|build|ci|revert)(\([^)]+\))?!?: .+'
pr-title-regex: '^(feat|fix|docs|style|refactor|perf|test|chore|build|ci|revert)(\([^)]+\))?!?: .+'
+49 -46
View File
@@ -1,67 +1,70 @@
name: Prowler - Create Backport Label
name: 'Tools: Backport Label'
on:
release:
types: [published]
types:
- 'published'
concurrency:
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
cancel-in-progress: false
env:
BACKPORT_LABEL_PREFIX: backport-to-
BACKPORT_LABEL_COLOR: B60205
jobs:
create_label:
create-label:
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: write
contents: read
issues: write
steps:
- name: Create backport label
- name: Create backport label for minor releases
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
RELEASE_TAG: ${{ github.event.release.tag_name }}
OWNER_REPO: ${{ github.repository }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
VERSION_ONLY=${RELEASE_TAG#v} # Remove 'v' prefix if present (e.g., v3.2.0 -> 3.2.0)
RELEASE_TAG="${{ github.event.release.tag_name }}"
if [ -z "$RELEASE_TAG" ]; then
echo "Error: No release tag provided"
exit 1
fi
echo "Processing release tag: $RELEASE_TAG"
# Remove 'v' prefix if present (e.g., v3.2.0 -> 3.2.0)
VERSION_ONLY="${RELEASE_TAG#v}"
# Check if it's a minor version (X.Y.0)
if [[ "$VERSION_ONLY" =~ ^[0-9]+\.[0-9]+\.0$ ]]; then
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is a minor version. Proceeding to create backport label."
if [[ "$VERSION_ONLY" =~ ^([0-9]+)\.([0-9]+)\.0$ ]]; then
echo "Release $RELEASE_TAG (version $VERSION_ONLY) is a minor version. Proceeding to create backport label."
TWO_DIGIT_VERSION=${VERSION_ONLY%.0} # Extract X.Y from X.Y.0 (e.g., 5.6 from 5.6.0)
# Extract X.Y from X.Y.0 (e.g., 5.6 from 5.6.0)
MAJOR="${BASH_REMATCH[1]}"
MINOR="${BASH_REMATCH[2]}"
TWO_DIGIT_VERSION="${MAJOR}.${MINOR}"
FINAL_LABEL_NAME="backport-to-v${TWO_DIGIT_VERSION}"
FINAL_DESCRIPTION="Backport PR to the v${TWO_DIGIT_VERSION} branch"
LABEL_NAME="${BACKPORT_LABEL_PREFIX}v${TWO_DIGIT_VERSION}"
LABEL_DESC="Backport PR to the v${TWO_DIGIT_VERSION} branch"
LABEL_COLOR="$BACKPORT_LABEL_COLOR"
echo "Effective label name will be: ${FINAL_LABEL_NAME}"
echo "Effective description will be: ${FINAL_DESCRIPTION}"
echo "Label name: $LABEL_NAME"
echo "Label description: $LABEL_DESC"
# Check if the label already exists
STATUS_CODE=$(curl -s -o /dev/null -w "%{http_code}" -H "Authorization: token ${GITHUB_TOKEN}" "https://api.github.com/repos/${OWNER_REPO}/labels/${FINAL_LABEL_NAME}")
if [ "${STATUS_CODE}" -eq 200 ]; then
echo "Label '${FINAL_LABEL_NAME}' already exists."
elif [ "${STATUS_CODE}" -eq 404 ]; then
echo "Label '${FINAL_LABEL_NAME}' does not exist. Creating it..."
# Prepare JSON data payload
JSON_DATA=$(printf '{"name":"%s","description":"%s","color":"B60205"}' "${FINAL_LABEL_NAME}" "${FINAL_DESCRIPTION}")
CREATE_STATUS_CODE=$(curl -s -o /tmp/curl_create_response.json -w "%{http_code}" -X POST \
-H "Accept: application/vnd.github.v3+json" \
-H "Authorization: token ${GITHUB_TOKEN}" \
--data "${JSON_DATA}" \
"https://api.github.com/repos/${OWNER_REPO}/labels")
CREATE_RESPONSE_BODY=$(cat /tmp/curl_create_response.json)
rm -f /tmp/curl_create_response.json
if [ "$CREATE_STATUS_CODE" -eq 201 ]; then
echo "Label '${FINAL_LABEL_NAME}' created successfully."
else
echo "Error creating label '${FINAL_LABEL_NAME}'. Status: $CREATE_STATUS_CODE"
echo "Response: $CREATE_RESPONSE_BODY"
exit 1
fi
# Check if label already exists
if gh label list --repo ${{ github.repository }} --limit 1000 | grep -q "^${LABEL_NAME}[[:space:]]"; then
echo "Label '$LABEL_NAME' already exists."
else
echo "Error checking for label '${FINAL_LABEL_NAME}'. HTTP Status: ${STATUS_CODE}"
exit 1
echo "Label '$LABEL_NAME' does not exist. Creating it..."
gh label create "$LABEL_NAME" \
--description "$LABEL_DESC" \
--color "$LABEL_COLOR" \
--repo ${{ github.repository }}
echo "Label '$LABEL_NAME' created successfully."
fi
else
echo "Release ${RELEASE_TAG} (version ${VERSION_ONLY}) is not a minor version. Skipping backport label creation."
exit 0
echo "Release $RELEASE_TAG (version $VERSION_ONLY) is not a minor version. Skipping backport label creation."
fi
+1 -1
View File
@@ -1,4 +1,4 @@
name: Prowler - Find secrets
name: 'Tools: TruffleHog'
on: pull_request
+1
View File
@@ -14,6 +14,7 @@ All notable changes to the **Prowler API** are documented in this file.
- Support for `passed_findings` and `total_findings` fields in compliance requirement overview for accurate Prowler ThreatScore calculation [(#8582)](https://github.com/prowler-cloud/prowler/pull/8582)
- Database read replica support [(#8869)](https://github.com/prowler-cloud/prowler/pull/8869)
- Support Common Cloud Controls for AWS, Azure and GCP [(#8000)](https://github.com/prowler-cloud/prowler/pull/8000)
- Add `provider_id__in` filter support to findings and findings severity overview endpoints [(#8951)](https://github.com/prowler-cloud/prowler/pull/8951)
### Changed
- Now the MANAGE_ACCOUNT permission is required to modify or read user permissions instead of MANAGE_USERS [(#8281)](https://github.com/prowler-cloud/prowler/pull/8281)
+1
View File
@@ -765,6 +765,7 @@ class ComplianceOverviewFilter(FilterSet):
class ScanSummaryFilter(FilterSet):
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
provider_id = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
provider_id__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
provider_type = ChoiceFilter(
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
)
+30
View File
@@ -3611,6 +3611,16 @@ paths:
schema:
type: string
format: uuid
- in: query
name: filter[provider_id__in]
schema:
type: array
items:
type: string
format: uuid
description: Multiple values may be separated by commas.
explode: false
style: form
- in: query
name: filter[provider_type]
schema:
@@ -3778,6 +3788,16 @@ paths:
schema:
type: string
format: uuid
- in: query
name: filter[provider_id__in]
schema:
type: array
items:
type: string
format: uuid
description: Multiple values may be separated by commas.
explode: false
style: form
- in: query
name: filter[provider_type]
schema:
@@ -3980,6 +4000,16 @@ paths:
schema:
type: string
format: uuid
- in: query
name: filter[provider_id__in]
schema:
type: array
items:
type: string
format: uuid
description: Multiple values may be separated by commas.
explode: false
style: form
- in: query
name: filter[provider_type]
schema:
+166
View File
@@ -46,6 +46,7 @@ from api.models import (
SAMLConfiguration,
SAMLToken,
Scan,
ScanSummary,
StateChoices,
Task,
TenantAPIKey,
@@ -5766,6 +5767,171 @@ class TestOverviewViewSet:
assert service1_data["attributes"]["muted"] == 1
assert service2_data["attributes"]["muted"] == 0
def test_overview_findings_provider_id_in_filter(
self, authenticated_client, tenants_fixture, providers_fixture
):
tenant = tenants_fixture[0]
provider1, provider2, *_ = providers_fixture
scan1 = Scan.objects.create(
name="scan-one",
provider=provider1,
trigger=Scan.TriggerChoices.MANUAL,
state=StateChoices.COMPLETED,
tenant=tenant,
)
scan2 = Scan.objects.create(
name="scan-two",
provider=provider2,
trigger=Scan.TriggerChoices.MANUAL,
state=StateChoices.COMPLETED,
tenant=tenant,
)
ScanSummary.objects.create(
tenant=tenant,
scan=scan1,
check_id="check-provider-one",
service="service-a",
severity="high",
region="region-a",
_pass=5,
fail=1,
muted=2,
total=8,
new=5,
changed=2,
unchanged=1,
fail_new=1,
fail_changed=0,
pass_new=3,
pass_changed=2,
muted_new=1,
muted_changed=1,
)
ScanSummary.objects.create(
tenant=tenant,
scan=scan2,
check_id="check-provider-two",
service="service-b",
severity="medium",
region="region-b",
_pass=2,
fail=3,
muted=1,
total=6,
new=3,
changed=2,
unchanged=1,
fail_new=2,
fail_changed=1,
pass_new=1,
pass_changed=1,
muted_new=1,
muted_changed=0,
)
single_response = authenticated_client.get(
reverse("overview-findings"),
{"filter[provider_id__in]": str(provider1.id)},
)
assert single_response.status_code == status.HTTP_200_OK
single_attributes = single_response.json()["data"]["attributes"]
assert single_attributes["pass"] == 5
assert single_attributes["fail"] == 1
assert single_attributes["muted"] == 2
assert single_attributes["total"] == 8
combined_response = authenticated_client.get(
reverse("overview-findings"),
{"filter[provider_id__in]": f"{provider1.id},{provider2.id}"},
)
assert combined_response.status_code == status.HTTP_200_OK
combined_attributes = combined_response.json()["data"]["attributes"]
assert combined_attributes["pass"] == 7
assert combined_attributes["fail"] == 4
assert combined_attributes["muted"] == 3
assert combined_attributes["total"] == 14
def test_overview_findings_severity_provider_id_in_filter(
self, authenticated_client, tenants_fixture, providers_fixture
):
tenant = tenants_fixture[0]
provider1, provider2, *_ = providers_fixture
scan1 = Scan.objects.create(
name="severity-scan-one",
provider=provider1,
trigger=Scan.TriggerChoices.MANUAL,
state=StateChoices.COMPLETED,
tenant=tenant,
)
scan2 = Scan.objects.create(
name="severity-scan-two",
provider=provider2,
trigger=Scan.TriggerChoices.MANUAL,
state=StateChoices.COMPLETED,
tenant=tenant,
)
ScanSummary.objects.create(
tenant=tenant,
scan=scan1,
check_id="severity-check-one",
service="service-a",
severity="high",
region="region-a",
_pass=4,
fail=4,
muted=0,
total=8,
)
ScanSummary.objects.create(
tenant=tenant,
scan=scan1,
check_id="severity-check-two",
service="service-a",
severity="medium",
region="region-b",
_pass=2,
fail=2,
muted=0,
total=4,
)
ScanSummary.objects.create(
tenant=tenant,
scan=scan2,
check_id="severity-check-three",
service="service-b",
severity="critical",
region="region-c",
_pass=1,
fail=2,
muted=0,
total=3,
)
single_response = authenticated_client.get(
reverse("overview-findings_severity"),
{"filter[provider_id__in]": str(provider1.id)},
)
assert single_response.status_code == status.HTTP_200_OK
single_attributes = single_response.json()["data"]["attributes"]
assert single_attributes["high"] == 8
assert single_attributes["medium"] == 4
assert single_attributes["critical"] == 0
combined_response = authenticated_client.get(
reverse("overview-findings_severity"),
{"filter[provider_id__in]": f"{provider1.id},{provider2.id}"},
)
assert combined_response.status_code == status.HTTP_200_OK
combined_attributes = combined_response.json()["data"]["attributes"]
assert combined_attributes["high"] == 8
assert combined_attributes["medium"] == 4
assert combined_attributes["critical"] == 3
@pytest.mark.django_db
class TestScheduleViewSet:
+6 -9
View File
@@ -114,7 +114,8 @@
"group": "Tutorials",
"pages": [
"user-guide/tutorials/prowler-app-sso-entra",
"user-guide/tutorials/bulk-provider-provisioning"
"user-guide/tutorials/bulk-provider-provisioning",
"user-guide/tutorials/aws-organizations-bulk-provisioning"
]
}
]
@@ -404,14 +405,6 @@
"source": "/projects/prowler-open-source/en/latest/tutorials/gcp/getting-started-gcp",
"destination": "/user-guide/providers/gcp/getting-started-gcp"
},
{
"source": "/projects/prowler-open-source/en/latest/tutorials/prowler-app",
"destination": "/user-guide/tutorials/prowler-app#step-4-4%3A-kubernetes-credentials%3A"
},
{
"source": "/projects/prowler-open-source/en/latest/tutorials/prowler-app/#step-3-add-a-provider",
"destination": "/user-guide/tutorials/prowler-app#step-3-add-a-provider"
},
{
"source": "/projects/prowler-open-source/en/latest/tutorials/microsoft365/getting-started-m365",
"destination": "/user-guide/providers/microsoft365/getting-started-m365"
@@ -431,6 +424,10 @@
{
"source": "/projects/prowler-saas/en/latest/:slug*",
"destination": "https://docs.prowler.pro/en/latest/:slug*"
},
{
"source": "/projects/prowler-open-source/en/latest/tutorials/:slug*",
"destination": "/user-guide/tutorials/:slug*"
}
]
}
@@ -182,19 +182,19 @@ Configure the server using environment variables:
|----------|-------------|----------|---------|
| `PROWLER_APP_API_KEY` | Prowler API key | Only for STDIO mode | - |
| `PROWLER_API_BASE_URL` | Custom Prowler API endpoint | No | `https://api.prowler.com` |
| `PROWLER_MCP_MODE` | Default transport mode (overwritten by `--transport` argument) | No | `stdio` |
| `PROWLER_MCP_TRANSPORT_MODE` | Default transport mode (overwritten by `--transport` argument) | No | `stdio` |
<CodeGroup>
```bash macOS/Linux
export PROWLER_APP_API_KEY="pk_your_api_key_here"
export PROWLER_API_BASE_URL="https://api.prowler.com"
export PROWLER_MCP_MODE="http"
export PROWLER_MCP_TRANSPORT_MODE="http"
```
```bash Windows PowerShell
$env:PROWLER_APP_API_KEY="pk_your_api_key_here"
$env:PROWLER_API_BASE_URL="https://api.prowler.com"
$env:PROWLER_MCP_MODE="http"
$env:PROWLER_MCP_TRANSPORT_MODE="http"
```
</CodeGroup>
@@ -209,7 +209,7 @@ For convenience, create a `.env` file in the `mcp_server` directory:
```bash .env
PROWLER_APP_API_KEY=pk_your_api_key_here
PROWLER_API_BASE_URL=https://api.prowler.com
PROWLER_MCP_MODE=stdio
PROWLER_MCP_TRANSPORT_MODE=stdio
```
When using Docker, pass the environment file:
@@ -228,6 +228,35 @@ uvx /path/to/prowler/mcp_server/
This is particularly useful when configuring MCP clients that need to launch the server from a specific path.
## Production Deployment
For production deployments that require customization, it is recommended to use the ASGI application that can be found in `prowler_mcp_server.server`. This can be run with uvicorn:
```bash
uvicorn prowler_mcp_server.server:app --host 0.0.0.0 --port 8000
```
For more details on production deployment options, see the [FastMCP production deployment guide](https://gofastmcp.com/deployment/http#production-deployment) and [uvicorn settings](https://www.uvicorn.org/settings/).
### Entrypoint Script
The source tree includes `entrypoint.sh` to simplify switching between the
standard CLI runner and the ASGI app. The first argument selects the mode and
any additional flags are passed straight through:
```bash
# Default CLI experience (prowler-mcp console script)
./entrypoint.sh main --transport http --host 0.0.0.0
# ASGI app via uvicorn
./entrypoint.sh uvicorn --host 0.0.0.0 --port 9000
```
Omitting the mode defaults to `main`, matching the `prowler-mcp` console script.
When `uvicorn` mode is selected, the script exports `PROWLER_MCP_TRANSPORT_MODE=http` automatically.
This is the default entrypoint for the Docker container.
## Next Steps
Now that you have the Prowler MCP Server installed, proceed to configure your MCP client:
Binary file not shown.

After

Width:  |  Height:  |  Size: 412 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 285 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 346 KiB

+22 -44
View File
@@ -5,69 +5,47 @@
![](/images/products/overview.png)
<Columns cols={2}>
<Card
title="Prowler CLI"
icon="terminal"
href="/getting-started/products/prowler-cli"
>
<Card title="Prowler CLI" icon="terminal" href="/getting-started/products/prowler-cli">
Command Line Interface
</Card>
<Card
title="Prowler App"
icon="pen-to-square"
href="/getting-started/products/prowler-app"
>
<Card title="Prowler App" icon="pen-to-square" href="/getting-started/products/prowler-app">
Web Application
</Card>
<Card
title="Prowler Cloud"
icon="pen-to-square"
href="/getting-started/products/prowler-cloud"
>
<Card title="Prowler Cloud" icon="pen-to-square" href="/getting-started/products/prowler-cloud">
A managed service built on top of Prowler App.
</Card>
<Card
title="Prowler Hub"
icon="map"
href="/getting-started/products/prowler-hub"
>
<Card title="Prowler Hub" icon="map" href="/getting-started/products/prowler-hub">
A public library of versioned checks, cloud service artifacts, and compliance frameworks.
</Card>
</Columns>
## Supported Providers
The supported providers right now are:
| Provider | Support | Interface |
|----------|--------|----------|
| [AWS](/user-guide/providers/aws/getting-started-aws) | Official | UI, API, CLI |
| [Azure](/user-guide/providers/azure/getting-started-azure) | Official | UI, API, CLI |
| [Google Cloud](/user-guide/providers/gcp/getting-started-gcp) | Official | UI, API, CLI |
| [Kubernetes](/user-guide/providers/kubernetes/in-cluster) | Official | UI, API, CLI |
| [M365](/user-guide/providers/microsoft365/getting-started-m365) | Official | UI, API, CLI |
| [Github](/user-guide/providers/github/getting-started-github) | Official | UI, API, CLI |
| [Oracle Cloud](/user-guide/providers/oci/getting-started-oci) | Official | CLI |
| [Infra as Code](/user-guide/providers/iac/getting-started-iac) | Official | CLI |
| [MongoDB Atlas](/user-guide/providers/mongodbatlas/getting-started-mongodbatlas) | Official | CLI |
| [LLM](/user-guide/providers/llm/getting-started-llm) | Official | CLI |
| **NHN** | Unofficial | CLI |
| Provider | Support | Interface |
| -------------------------------------------------------------------------------- | ---------- | ------------ |
| [AWS](/user-guide/providers/aws/getting-started-aws) | Official | UI, API, CLI |
| [Azure](/user-guide/providers/azure/getting-started-azure) | Official | UI, API, CLI |
| [Google Cloud](/user-guide/providers/gcp/getting-started-gcp) | Official | UI, API, CLI |
| [Kubernetes](/user-guide/providers/kubernetes/in-cluster) | Official | UI, API, CLI |
| [M365](/user-guide/providers/microsoft365/getting-started-m365) | Official | UI, API, CLI |
| [Github](/user-guide/providers/github/getting-started-github) | Official | UI, API, CLI |
| [Oracle Cloud](/user-guide/providers/oci/getting-started-oci) | Official | CLI |
| [Infra as Code](/user-guide/providers/iac/getting-started-iac) | Official | CLI |
| [MongoDB Atlas](/user-guide/providers/mongodbatlas/getting-started-mongodbatlas) | Official | CLI |
| [LLM](/user-guide/providers/llm/getting-started-llm) | Official | CLI |
| **NHN** | Unofficial | CLI |
For more information about the checks and compliance of each provider visit [Prowler Hub](https://hub.prowler.com).
## Where to go next?
<Columns cols={2}>
<Card
title="User Guide"
icon="terminal"
href="/user-guide/tutorials/prowler-app"
>
<Card title="User Guide" icon="terminal" href="/user-guide/tutorials/prowler-app">
Detailed instructions on how to use Prowler.
</Card>
<Card
title="Development Guide"
icon="pen-to-square"
href="/developer-guide/introduction"
>
<Card title="Development Guide" icon="pen-to-square" href="/developer-guide/introduction">
Interested in contributing to Prowler?
</Card>
</Columns>
</Columns>
+1 -1
View File
@@ -16,7 +16,7 @@ We use encryption everywhere possible. The data and communications used by **Pro
Prowler Cloud is GDPR compliant in regards to personal data and the ["right to be forgotten"](https://gdpr.eu/right-to-be-forgotten/). When a user deletes their account their user information will be deleted from Prowler Cloud online and backup systems within 10 calendar days.
## Software Security
## Software Security
We follow a **security-by-design approach** throughout our software development lifecycle. All changes go through automated checks at every stage, from local development to production deployment.
@@ -144,10 +144,19 @@ GitHub Apps provide the recommended integration method for accessing multiple re
2. **Create New GitHub App**
- Click "New GitHub App"
- Complete the required fields:
- **GitHub App name**: Unique application name
- **Homepage URL**: Application homepage
- **Webhook URL**: Webhook payload URL (optional)
- **Permissions**: Application permission requirements
- **GitHub App name**: Choose a unique, descriptive name (e.g., "Prowler Security Scanner")
- **Homepage URL**: Enter your organization's website or the Prowler documentation URL (e.g., `https://prowler.com` or `https://docs.prowler.com`). This is just for reference and doesn't affect functionality.
- **Webhook URL**: Leave blank or uncheck "Active" under Webhook. Prowler doesn't require webhooks since it performs on-demand scans rather than responding to GitHub events.
- **Webhook secret**: Leave blank (not needed for Prowler)
- **Permissions**: Configure in the next step (see below)
<Note>
**About Homepage URL and Webhooks**
The Homepage URL is purely informational and can be any valid URL - it's just displayed to users who view the app. Use your company website, your GitHub organization URL, or even `https://docs.prowler.com`.
Webhooks are **not required** for Prowler. Since Prowler performs on-demand security scans when you run it (rather than automatically responding to GitHub events), you can safely disable webhooks or leave the URL blank.
</Note>
3. **Configure Permissions**
To enable Prowler functionality, configure these permissions:
@@ -1,24 +1,26 @@
---
title: 'Microsoft 365 Authentication in Prowler'
title: "Microsoft 365 Authentication in Prowler"
---
Prowler for Microsoft 365 supports multiple authentication types. Authentication methods vary between Prowler App and Prowler CLI:
**Prowler App:**
- [**Service Principal Application**](#service-principal-authentication-recommended) (**Recommended**)
- **Service Principal with User Credentials** (Deprecated)
- [**Application Certificate Authentication**](#certificate-based-authentication) (**Recommended**)
- [**Application Client Secret Authentication**](#client-secret-authentication)
**Prowler CLI:**
- [**Service Principal Application**](#service-principal-authentication-recommended) (**Recommended**)
- **Service Principal with User Credentials** (Deprecated)
- [**Interactive browser authentication**](#interactive-browser-authentication)
- [**Application Certificate Authentication**](#certificate-based-authentication) (**Recommended**)
- [**Application Client Secret Authentication**](#client-secret-authentication)
- [**Azure CLI Authentication**](#azure-cli-authentication)
- [**Interactive Browser Authentication**](#interactive-browser-authentication)
## Required Permissions
To run the full Prowler provider, including PowerShell checks, two types of permission scopes must be set in **Microsoft Entra ID**.
### Service Principal Authentication Permissions (Recommended)
### Application Permissions for App-Only Authentication
When using service principal authentication, add these **Application Permissions**:
@@ -35,11 +37,11 @@ When using service principal authentication, add these **Application Permissions
- `application_access` from external API `Skype and Teams Tenant Admin API`: Required for Teams PowerShell module app authentication.
<Note>
`Directory.Read.All` can be replaced with `Domain.Read.All` for more restrictive permissions, but Entra checks related to DirectoryRoles and GetUsers will not run. If using this option, you must also add the `Organization.Read.All` permission to the service principal application for authentication.
`Directory.Read.All` can be replaced with `Domain.Read.All` for more restrictive permissions, but Entra checks related to DirectoryRoles and GetUsers will not run. If using this option, you must also add the `Organization.Read.All` permission to the application registration for authentication.
</Note>
<Note>
This is the **recommended authentication method** because it allows running the full M365 provider including PowerShell checks, providing complete coverage of all available security checks.
These permissions enable application-based authentication methods (client secret and certificate). Using certificate-based authentication is the recommended way to run the full M365 provider, including PowerShell checks.
</Note>
### Browser Authentication Permissions
@@ -47,96 +49,189 @@ This is the **recommended authentication method** because it allows running the
When using browser authentication, permissions are delegated to the user, so the user must have the appropriate permissions rather than the application.
<Warning>
With browser authentication, you will only be able to run checks that work through MS Graph API. PowerShell module checks will not be executed.
Browser and Azure CLI authentication methods limit scanning capabilities to checks that operate through Microsoft Graph API. Checks requiring PowerShell modules will not execute, as they need application-level permissions that cannot be delegated through browser authentication.
</Warning>
### Step-by-Step Permission Assignment
#### Create Service Principal Application
#### Create Application Registration
1. Access **Microsoft Entra ID**
![Overview of Microsoft Entra ID](/images/providers/microsoft-entra-id.png)
![Overview of Microsoft Entra ID](/images/providers/microsoft-entra-id.png)
2. Navigate to "Applications" > "App registrations"
![App Registration nav](/images/providers/app-registration-menu.png)
![App Registration nav](/images/providers/app-registration-menu.png)
3. Click "+ New registration", complete the form, and click "Register"
![New Registration](/images/providers/new-registration.png)
![New Registration](/images/providers/new-registration.png)
4. Go to "Certificates & secrets" > "Client secrets" > "+ New client secret"
![Certificate & Secrets nav](/images/providers/certificates-and-secrets.png)
![Certificate & Secrets nav](/images/providers/certificates-and-secrets.png)
5. Fill in the required fields and click "Add", then copy the generated value (this will be `AZURE_CLIENT_SECRET`)
![New Client Secret](/images/providers/new-client-secret.png)
![New Client Secret](/images/providers/new-client-secret.png)
#### Grant Microsoft Graph API Permissions
1. Go to App Registration > Select your Prowler App > click on "API permissions"
![API Permission Page](/images/providers/api-permissions-page.png)
![API Permission Page](/images/providers/api-permissions-page.png)
2. Click "+ Add a permission" > "Microsoft Graph" > "Application permissions"
![Add API Permission](/images/providers/add-app-api-permission.png)
![Add API Permission](/images/providers/add-app-api-permission.png)
3. Search and select the required permissions:
- `AuditLog.Read.All`: Required for Entra service
- `Directory.Read.All`: Required for all services
- `Policy.Read.All`: Required for all services
- `SharePointTenantSettings.Read.All`: Required for SharePoint service
![Permission Screenshots](/images/providers/directory-permission.png)
- `AuditLog.Read.All`: Required for Entra service
- `Directory.Read.All`: Required for all services
- `Policy.Read.All`: Required for all services
- `SharePointTenantSettings.Read.All`: Required for SharePoint service
![Application Permissions](/images/providers/app-permissions.png)
![Permission Screenshots](/images/providers/directory-permission.png)
4. Click "Add permissions", then click "Grant admin consent for ``<your-tenant-name>``"
![Application Permissions](/images/providers/app-permissions.png)
#### Grant PowerShell Module Permissions (For Service Principal Authentication)
4. Click "Add permissions", then click "Grant admin consent for `<your-tenant-name>`"
#### Grant PowerShell Module Permissions
1. **Add Exchange API:**
- Search and select "Office 365 Exchange Online" API in **APIs my organization uses**
- Search and select "Office 365 Exchange Online" API in **APIs my organization uses**
![Office 365 Exchange Online API](/images/providers/search-exchange-api.png)
![Office 365 Exchange Online API](/images/providers/search-exchange-api.png)
- Select "Exchange.ManageAsApp" permission and click "Add permissions"
- Select "Exchange.ManageAsApp" permission and click "Add permissions"
![Exchange.ManageAsApp Permission](/images/providers/exchange-permission.png)
![Exchange.ManageAsApp Permission](/images/providers/exchange-permission.png)
- Assign `Global Reader` role to the app: Go to `Roles and administrators` > click `here` for directory level assignment
- Assign `Global Reader` role to the app: Go to `Roles and administrators` > click `here` for directory level assignment
![Roles and administrators](/images/providers/here.png)
![Roles and administrators](/images/providers/here.png)
- Search for `Global Reader` and assign it to your application
- Search for `Global Reader` and assign it to your application
![Global Reader Role](/images/providers/global-reader-role.png)
![Global Reader Role](/images/providers/global-reader-role.png)
2. **Add Teams API:**
- Search and select "Skype and Teams Tenant Admin API" in **APIs my organization uses**
- Search and select "Skype and Teams Tenant Admin API" in **APIs my organization uses**
![Skype and Teams Tenant Admin API](/images/providers/search-skype-teams-tenant-admin-api.png)
![Skype and Teams Tenant Admin API](/images/providers/search-skype-teams-tenant-admin-api.png)
- Select "application_access" permission and click "Add permissions"
- Select "application_access" permission and click "Add permissions"
![application_access Permission](/images/providers/teams-permission.png)
![application_access Permission](/images/providers/teams-permission.png)
3. Click "Grant admin consent for `<your-tenant-name>`" to grant admin consent
![Grant Admin Consent](/images/providers/grant-external-api-permissions.png)
![Grant Admin Consent](/images/providers/grant-external-api-permissions.png)
## Service Principal Authentication (Recommended)
Final permissions should look like this:
*Available for both Prowler App and Prowler CLI*
![Final Permissions](/images/providers/final-permissions.png)
<a id="client-secret-authentication"></a>
<a id="certificate-based-authentication"></a>
## Application Certificate Authentication (Recommended)
_Available for both Prowler App and Prowler CLI_
**Authentication flag for CLI:** `--certificate-auth`
Certificate-based authentication replaces the client secret with an X.509 certificate that signs Microsoft Entra ID tokens for the Prowler application registration.
This is the recommended approach for production environments because it avoids long-lived secrets, supports the full provider (including PowerShell checks), and simplifies unattended automation. Microsoft also recommends certificate credentials for app-only access, see [Manage certificates for applications](https://learn.microsoft.com/en-us/entra/identity-platform/certificate-credentials).
### Generate the Certificate
The service principal needs a certificate that contains the private key locally (for Prowler) and the public key uploaded to Microsoft Entra ID. The following commands show a secure baseline workflow on macOS or Linux using OpenSSL:
```console
# 1. Create a private key (keep this file private; do not upload it to the portal)
openssl genrsa -out prowlerm365.key 2048
# 2. Create a self-signed certificate valid for two years
openssl req -x509 -new -nodes -key prowlerm365.key -sha256 -days 730 -out prowlerm365.cer -subj "/CN=ProwlerM365Cert"
# 3. Package the key and certificate into a passwordless PFX bundle for Prowler
openssl pkcs12 -export \
-out prowlerm365.pfx \
-inkey prowlerm365.key \
-in prowlerm365.cer \
-passout pass:
```
<Warning>
Guard `prowlerm365.key` and `prowlerm365.pfx`. Only upload the `.cer` file to the Entra ID portal. Rotate or revoke the certificate before it expires or if there is any suspicion of exposure.
</Warning>
If your organization uses a certificate authority, you can replace step 2 with a CSR workflow and import the signed certificate instead.
### Upload the Certificate to Microsoft Entra ID
1. Open **Microsoft Entra ID** > **App registrations** > your application.
2. Go to **Certificates & secrets** > **Certificates**.
3. Select **Upload certificate** and choose `prowlerm365.cer`.
4. Confirm the certificate appears with the expected expiration date.
After the certificate is in place, encode the PFX file so it can be stored in an environment variable (macOS/Linux example):
```console
base64 -i prowlerm365.pfx -o prowlerm365.pfx.b64
cat prowlerm365.pfx.b64 | tr -d '\n'
```
Copy the resulting single-line Base64 string (or the contents of `prowlerm365.pfx.b64`)—you will use it in the next step.
### Provide the Certificate to Prowler
You can supply the private certificate to Prowler in two ways:
- **Environment variables (recommended for headless execution)**
```console
export AZURE_CLIENT_ID="00000000-0000-0000-0000-000000000000"
export AZURE_TENANT_ID="11111111-1111-1111-1111-111111111111"
export M365_CERTIFICATE_CONTENT="$(base64 < prowlerm365.pfx | tr -d '\n')"
```
The `M365_CERTIFICATE_CONTENT` variable must contain a single-line Base64 string. Remove any line breaks or spaces before exporting.
- **Local file path**
Store the PFX securely and reference it when you run the CLI:
```console
python3 prowler-cli.py m365 --certificate-auth --certificate-path /secure/path/prowlerm365.pfx
```
The CLI still needs `AZURE_CLIENT_ID` and `AZURE_TENANT_ID` in the environment when you use `--certificate-path`.
For the **Prowler App**, paste the Base64-encoded PFX in the `certificate_content` field when you configure the provider secrets. The platform persists the encrypted certificate and supplies it during scans.
<Note>
Do not mix certificate authentication with a client secret. Provide either a certificate **or** a secret to the application registration and Prowler configuration.
</Note>
<a id="client-secret-authentication"></a>
<a id="service-principal-authentication"></a>
<a id="service-principal-authentication-recommended"></a>
## Application Client Secret Authentication
_Available for both Prowler App and Prowler CLI_
**Authentication flag for CLI:** `--sp-env-auth`
Authenticate using the **Service Principal Application** by configuring the following environment variables:
Authenticate using a **Microsoft Entra application registration with a client secret** by configuring the following environment variables:
```console
export AZURE_CLIENT_ID="XXXXXXXXX"
@@ -150,13 +245,61 @@ Refer to the [Step-by-Step Permission Assignment](#step-by-step-permission-assig
If the external API permissions described in the mentioned section above are not added only checks that work through MS Graph will be executed. This means that the full provider will not be executed.
This workflow is helpful for initial validation or temporary access. Plan to transition to certificate-based authentication to remove long-lived secrets and keep full provider coverage in unattended environments.
<Note>
In order to scan all the checks from M365 required permissions to the service principal application must be added. Refer to the [PowerShell Module Permissions](#grant-powershell-module-permissions-for-service-principal-authentication) section for more information.
To scan every M365 check, ensure the required permissions are added to the application registration. Refer to the [PowerShell Module Permissions](#grant-powershell-module-permissions-for-app-only-authentication) section for more information.
</Note>
### Run Prowler with Certificate Authentication
After the variables or path are in place, run the Microsoft 365 provider as usual:
```console
python3 prowler-cli.py m365 --certificate-auth --init-modules --log-level ERROR
```
The command above initializes PowerShell modules if needed. You can combine other standard flags (for example, `--region M365USGovernment` or custom outputs) with `--certificate-auth`.
Prowler prints the certificate thumbprint during execution so you can confirm the correct credential is in use.
<a id="azure-cli-authentication"></a>
## Azure CLI Authentication
_Available only for Prowler CLI_
**Authentication flag for CLI:** `--az-cli-auth`
Azure CLI authentication relies on the identity that is already signed in with the Azure CLI. Before running Prowler, make sure you have an active CLI session in the target tenant:
```console
az login --tenant <TENANT_ID>
# Optional: enforce the tenant when several are available
az account set --tenant <TENANT_ID>
```
If you prefer to reuse the same service principal that powers certificate-based authentication, authenticate it through Azure CLI instead of exporting environment variables. Azure CLI expects the certificate in PEM format; convert the PFX produced earlier and sign in:
```console
openssl pkcs12 -in prowlerm365.pfx -out prowlerm365.pem -nodes
az login --service-principal \
--username <AZURE_CLIENT_ID> \
--password /secure/path/prowlerm365.pem \
--tenant <AZURE_TENANT_ID>
```
After the CLI session is authenticated, launch Prowler with the Azure CLI flag:
```console
python3 prowler-cli.py m365 --az-cli-auth
```
The Azure CLI identity must hold the same Microsoft Graph and external API permissions required for the full provider. Signing in with a user account limits the scan to delegated Microsoft Graph endpoints and skips PowerShell-based checks. Use a service principal with the necessary application permissions to keep complete coverage.
## Interactive Browser Authentication
*Available only for Prowler CLI*
_Available only for Prowler CLI_
**Authentication flag:** `--browser-auth`
@@ -171,6 +314,7 @@ Since this is a **delegated permission** authentication method, necessary permis
PowerShell is required to run certain M365 checks.
**Supported versions:**
- **PowerShell 7.4 or higher** (7.5 is recommended)
#### Why Is PowerShell 7.4+ Required?
@@ -193,6 +337,7 @@ Installing PowerShell is different depending on your OS:
```console
winget install --id Microsoft.PowerShell --source winget
```
</Tab>
<Tab title="MacOS">
[MacOS](https://learn.microsoft.com/es-es/powershell/scripting/install/installing-powershell-on-macos?view=powershell-7.5#install-the-latest-stable-release-of-powershell): installing PowerShell on MacOS needs to have installed [brew](https://brew.sh/), once installed, simply run the command shown above, Pwsh is only supported in macOS 15 (Sequoia) x64 and Arm64, macOS 14 (Sonoma) x64 and Arm64, macOS 13 (Ventura) x64 and Arm64
@@ -202,6 +347,7 @@ Installing PowerShell is different depending on your OS:
```
Once it's installed run `pwsh` on your terminal to verify it's working.
</Tab>
<Tab title="Linux (Ubuntu)">
[Ubuntu](https://learn.microsoft.com/es-es/powershell/scripting/install/install-ubuntu?view=powershell-7.5#installation-via-package-repository-the-package-repository): The required version for installing PowerShell +7.4 on Ubuntu are Ubuntu 22.04 and Ubuntu 24.04.
@@ -241,6 +387,7 @@ Installing PowerShell is different depending on your OS:
# Start PowerShell
pwsh
```
</Tab>
<Tab title="Linux (Alpine)">
[Alpine](https://learn.microsoft.com/es-es/powershell/scripting/install/install-alpine?view=powershell-7.5#installation-steps): The only supported version for installing PowerShell +7.4 on Alpine is Alpine 3.20. The unique way to install it is downloading the tar.gz package available on [PowerShell github](https://github.com/PowerShell/PowerShell/releases/download/v7.5.0/powershell-7.5.0-linux-musl-x64.tar.gz).
@@ -286,6 +433,7 @@ Installing PowerShell is different depending on your OS:
# Start PowerShell
pwsh
```
</Tab>
<Tab title="Linux (Debian)">
[Debian](https://learn.microsoft.com/es-es/powershell/scripting/install/install-debian?view=powershell-7.5#installation-on-debian-11-or-12-via-the-package-repository): The required version for installing PowerShell +7.4 on Debian are Debian 11 and Debian 12. The recommended way to install it is downloading the package available on PMC.
@@ -324,6 +472,7 @@ Installing PowerShell is different depending on your OS:
# Start PowerShell
pwsh
```
</Tab>
<Tab title="Linux (RHEL)">
[Rhel](https://learn.microsoft.com/es-es/powershell/scripting/install/install-rhel?view=powershell-7.5#installation-via-the-package-repository): The required version for installing PowerShell +7.4 on Red Hat are RHEL 8 and RHEL 9. The recommended way to install it is downloading the package available on PMC.
@@ -357,6 +506,7 @@ Installing PowerShell is different depending on your OS:
# Install PowerShell
sudo dnf install powershell -y
```
</Tab>
<Tab title="Docker">
[Docker](https://learn.microsoft.com/es-es/powershell/scripting/install/powershell-in-docker?view=powershell-7.5#use-powershell-in-a-container): The following command download the latest stable versions of PowerShell:
@@ -370,6 +520,7 @@ Installing PowerShell is different depending on your OS:
```console
docker run -it mcr.microsoft.com/dotnet/sdk:9.0 pwsh
```
</Tab>
</Tabs>
### Required PowerShell Modules
@@ -386,6 +537,7 @@ Example command:
```console
python3 prowler-cli.py m365 --verbose --log-level ERROR --sp-env-auth --init-modules
```
If the modules are already installed, running this command will not cause issues—it will simply verify that the necessary modules are available.
<Note>
@@ -399,7 +551,6 @@ Install-Module -Name "ModuleName" -Scope AllUsers -Force
</Note>
#### Modules Version
- [MSAL.PS](https://www.powershellgallery.com/packages/MSAL.PS/4.32.0): Required for Exchange module via application authentication.
- [ExchangeOnlineManagement](https://www.powershellgallery.com/packages/ExchangeOnlineManagement/3.6.0) (Minimum version: 3.6.0) Required for checks across Exchange, Defender, and Purview.
- [MicrosoftTeams](https://www.powershellgallery.com/packages/MicrosoftTeams/6.6.0) (Minimum version: 6.6.0) Required for all Teams checks.
- [MSAL.PS](https://www.powershellgallery.com/packages/MSAL.PS/4.32.0): Required for Exchange module via application authentication.
- [MSAL.PS](https://www.powershellgallery.com/packages/MSAL.PS/4.32.0): Required for Exchange module via application authentication.
@@ -12,8 +12,9 @@ Government cloud accounts or tenants (Microsoft 365 Government) are currently un
Configure authentication for Microsoft 365 by following the [Microsoft 365 Authentication](/user-guide/providers/microsoft365/authentication) guide. This includes:
- Creating a Service Principal Application
- Granting required Microsoft Graph API permissions
- Registering an application in Microsoft Entra ID
- Granting all required Microsoft Graph and external API permissions
- Generating the application certificate (recommended) or client secret
- Setting up PowerShell module permissions (for full security coverage)
## Prowler App
@@ -47,25 +48,38 @@ Configure authentication for Microsoft 365 by following the [Microsoft 365 Authe
![Add Domain ID](/images/providers/add-domain-id.png)
### Step 3: Add Credentials to Prowler App
### Step 3: Select Authentication Method and Provide Credentials
1. Go to App Registration overview and copy the Client ID and Tenant ID
Prowler App now separates Microsoft 365 authentication into two app-only options. After adding the Domain ID, choose the method that matches your setup:
![App Overview](/images/providers/app-overview.png)
<img src="/images/providers/m365-auth-selection-form.png" alt="M365 authentication method selection" width="700" />
2. Go to Prowler App and paste:
#### Application Certificate Authentication (Recommended)
- Client ID
- Tenant ID
- `AZURE_CLIENT_SECRET` from the Service Principal setup
1. Copy the Application (client) ID and Tenant ID from the app registration overview page.
2. Paste both values into the Prowler App form.
3. Upload the PFX bundle or paste the Base64-encoded certificate (`M365_CERTIFICATE_CONTENT`), then click **Test Connection**.
![Prowler Cloud M365 Credentials](/images/providers/m365-credentials.png)
<img src="/images/providers/certificate-form.png" alt="M365 certificate authentication form" width="700" />
3. Click "Next"
Use this method whenever possible to avoid managing client secrets and to unlock every Microsoft 365 check, including those that require PowerShell modules.
#### Application Client Secret Authentication
1. From the app registration, copy the Application (client) ID and Tenant ID.
2. Paste both values plus the client secret into the Prowler App form.
3. Click **Test Connection** to validate the credentials.
<img src="/images/providers/secret-form.png" alt="M365 client secret authentication form" width="700" />
### Step 4: Launch the Scan
1. Review the summary, then click **Next**.
![Next Detail](/images/providers/click-next-m365.png)
4. Click "Launch Scan"
2. Click **Launch Scan** to start auditing Microsoft 365.
![Launch Scan M365](/images/providers/launch-scan.png)
@@ -83,7 +97,9 @@ PowerShell 7.4+ is required for comprehensive Microsoft 365 security coverage. I
Select an authentication method from the [Microsoft 365 Authentication](/user-guide/providers/microsoft365/authentication) guide:
- **Service Principal Application** (recommended): `--sp-env-auth`
- **Application Certificate Authentication** (recommended): `--certificate-auth`
- **Application Client Secret Authentication**: `--sp-env-auth`
- **Azure CLI Authentication**: `--az-cli-auth`
- **Interactive Browser Authentication**: `--browser-auth`
### Basic Usage
@@ -0,0 +1,491 @@
---
title: 'AWS Organizations Bulk Provisioning in Prowler'
---
Prowler offers an automated tool to discover and provision all AWS accounts within an AWS Organization. This streamlines onboarding for organizations managing multiple AWS accounts by automatically generating the configuration needed for bulk provisioning.
The tool, `aws_org_generator.py`, complements the [Bulk Provider Provisioning](./bulk-provider-provisioning) tool and is available in the Prowler repository at: [util/prowler-bulk-provisioning](https://github.com/prowler-cloud/prowler/tree/master/util/prowler-bulk-provisioning)
<Note>
Native support for bulk provisioning AWS Organizations and similar multi-account structures directly in the Prowler UI/API is on the official roadmap.
Track progress and vote for this feature at: [Bulk Provisioning in the UI/API for AWS Organizations](https://roadmap.prowler.com/p/builk-provisioning-in-the-uiapi-for-aws-organizations-and-alike)
</Note>
{/* TODO: Add screenshot of the tool in action */}
## Overview
The AWS Organizations Bulk Provisioning tool simplifies multi-account onboarding by:
* Automatically discovering all active accounts in an AWS Organization
* Generating YAML configuration files for bulk provisioning
* Supporting account filtering and custom role configurations
* Eliminating manual entry of account IDs and role ARNs
## Prerequisites
### Requirements
* Python 3.7 or higher
* AWS credentials with Organizations read access
* ProwlerRole (or custom role) deployed across all target accounts
* Prowler API key (from Prowler Cloud or self-hosted Prowler App)
* For self-hosted Prowler App, remember to [point to your API base URL](./bulk-provider-provisioning#custom-api-endpoints)
* Learn how to create API keys: [Prowler App API Keys](../providers/prowler-app-api-keys)
### Deploying ProwlerRole Across AWS Organizations
Before using the AWS Organizations generator, deploy the ProwlerRole across all accounts in the organization using CloudFormation StackSets.
<Note>
**Follow the official documentation:**
[Deploying Prowler IAM Roles Across AWS Organizations](../providers/aws/organizations#deploying-prowler-iam-roles-across-aws-organizations)
**Key points:**
* Use CloudFormation StackSets from the management account
* Deploy to all organizational units (OUs) or specific OUs
* Use an external ID for enhanced security
* Ensure the role has necessary permissions for Prowler scans
</Note>
### Installation
Clone the repository and install required dependencies:
```bash
git clone https://github.com/prowler-cloud/prowler.git
cd prowler/util/prowler-bulk-provisioning
pip install -r requirements-aws-org.txt
```
### AWS Credentials Setup
Configure AWS credentials with Organizations read access:
* **Management account credentials**, or
* **Delegated administrator account** with `organizations:ListAccounts` permission
Required IAM permissions:
```json
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"organizations:ListAccounts",
"organizations:DescribeOrganization"
],
"Resource": "*"
}
]
}
```
### Prowler API Key Setup
Configure your Prowler API key:
```bash
export PROWLER_API_KEY="pk_example-api-key"
```
To create an API key:
1. Log in to Prowler Cloud or Prowler App
2. Click **Profile** → **Account**
3. Click **Create API Key**
4. Provide a descriptive name and optionally set an expiration date
5. Copy the generated API key (it will only be shown once)
For detailed instructions, see: [Prowler App API Keys](../providers/prowler-app-api-keys)
## Basic Usage
### Generate Configuration for All Accounts
To generate a YAML configuration file for all active accounts in the organization:
```bash
python aws_org_generator.py -o aws-accounts.yaml --external-id prowler-ext-id-2024
```
This command:
1. Lists all ACTIVE accounts in the organization
2. Generates YAML entries for each account
3. Saves the configuration to `aws-accounts.yaml`
**Output:**
```
Fetching accounts from AWS Organizations...
Found 47 active accounts in organization
Generated configuration for 47 accounts
Configuration written to: aws-accounts.yaml
Next steps:
1. Review the generated file: cat aws-accounts.yaml | head -n 20
2. Run bulk provisioning: python prowler_bulk_provisioning.py aws-accounts.yaml
```
### Review Generated Configuration
Review the generated YAML configuration:
```bash
head -n 20 aws-accounts.yaml
```
**Example output:**
```yaml
- provider: aws
uid: '111111111111'
alias: Production-Account
auth_method: role
credentials:
role_arn: arn:aws:iam::111111111111:role/ProwlerRole
external_id: prowler-ext-id-2024
- provider: aws
uid: '222222222222'
alias: Development-Account
auth_method: role
credentials:
role_arn: arn:aws:iam::222222222222:role/ProwlerRole
external_id: prowler-ext-id-2024
```
### Dry Run Mode
Test the configuration without writing a file:
```bash
python aws_org_generator.py \
--external-id prowler-ext-id-2024 \
--dry-run
```
## Advanced Configuration
### Using a Specific AWS Profile
Specify an AWS profile when multiple profiles are configured:
```bash
python aws_org_generator.py \
-o aws-accounts.yaml \
--profile org-management-admin \
--external-id prowler-ext-id-2024
```
### Excluding Specific Accounts
Exclude the management account or other accounts from provisioning:
```bash
python aws_org_generator.py \
-o aws-accounts.yaml \
--external-id prowler-ext-id-2024 \
--exclude 123456789012,210987654321
```
Common exclusion scenarios:
* Management account (requires different permissions)
* Break-glass accounts (emergency access)
* Suspended or archived accounts
### Including Only Specific Accounts
Generate configuration for specific accounts only:
```bash
python aws_org_generator.py \
-o aws-accounts.yaml \
--external-id prowler-ext-id-2024 \
--include 111111111111,222222222222,333333333333
```
### Custom Role Name
Specify a custom role name if not using the default `ProwlerRole`:
```bash
python aws_org_generator.py \
-o aws-accounts.yaml \
--role-name ProwlerExecutionRole \
--external-id prowler-ext-id-2024
```
### Custom Alias Format
Customize account aliases using template variables:
```bash
# Use account name and ID
python aws_org_generator.py \
-o aws-accounts.yaml \
--alias-format "{name}-{id}" \
--external-id prowler-ext-id-2024
# Use email prefix
python aws_org_generator.py \
-o aws-accounts.yaml \
--alias-format "{email}" \
--external-id prowler-ext-id-2024
```
Available template variables:
* `{name}` - Account name
* `{id}` - Account ID
* `{email}` - Account email
### Additional Role Assumption Options
Configure optional role assumption parameters:
```bash
python aws_org_generator.py \
-o aws-accounts.yaml \
--role-name ProwlerRole \
--external-id prowler-ext-id-2024 \
--session-name prowler-scan-session \
--duration-seconds 3600
```
## Complete Workflow Example
<Steps>
<Step title="Deploy ProwlerRole Using StackSets">
1. Log in to the AWS management account
2. Open CloudFormation → StackSets
3. Create a new StackSet using the [Prowler role template](https://github.com/prowler-cloud/prowler/blob/master/permissions/templates/cloudformation/prowler-scan-role.yml)
4. Deploy to all organizational units
5. Use a unique external ID (e.g., `prowler-org-2024-abc123`)
{/* TODO: Add screenshot of CloudFormation StackSets deployment */}
</Step>
<Step title="Generate YAML Configuration">
Configure AWS credentials and generate the YAML file:
```bash
# Using management account credentials
export AWS_PROFILE=org-management
# Generate configuration
python aws_org_generator.py \
-o aws-org-accounts.yaml \
--external-id prowler-org-2024-abc123 \
--exclude 123456789012
```
**Output:**
```
Fetching accounts from AWS Organizations...
Using AWS profile: org-management
Found 47 active accounts in organization
Generated configuration for 46 accounts
Configuration written to: aws-org-accounts.yaml
Next steps:
1. Review the generated file: cat aws-org-accounts.yaml | head -n 20
2. Run bulk provisioning: python prowler_bulk_provisioning.py aws-org-accounts.yaml
```
</Step>
<Step title="Review Generated Configuration">
Verify the generated YAML configuration:
```bash
# View first 20 lines
head -n 20 aws-org-accounts.yaml
# Check for unexpected accounts
grep "uid:" aws-org-accounts.yaml
# Verify role ARNs
grep "role_arn:" aws-org-accounts.yaml | head -5
# Count accounts
grep "provider: aws" aws-org-accounts.yaml | wc -l
```
</Step>
<Step title="Run Bulk Provisioning">
Provision all accounts to Prowler Cloud or Prowler App:
```bash
# Set Prowler API key
export PROWLER_API_KEY="pk_example-api-key"
# Run bulk provisioning with connection testing
python prowler_bulk_provisioning.py aws-org-accounts.yaml
```
**With custom options:**
```bash
python prowler_bulk_provisioning.py aws-org-accounts.yaml \
--concurrency 10 \
--timeout 120
```
**Successful output:**
```
[1] ✅ Created provider (id=db9a8985-f9ec-4dd8-b5a0-e05ab3880bed)
[1] ✅ Created secret (id=466f76c6-5878-4602-a4bc-13f9522c1fd2)
[1] ✅ Connection test: Connected
[2] ✅ Created provider (id=7a99f789-0cf5-4329-8279-2d443a962676)
[2] ✅ Created secret (id=c5702180-f7c4-40fd-be0e-f6433479b126)
[2] ✅ Connection test: Connected
Done. Success: 47 Failures: 0
```
{/* TODO: Add screenshot of successful bulk provisioning output */}
</Step>
</Steps>
## Command Reference
### Full Command-Line Options
```bash
python aws_org_generator.py \
-o OUTPUT_FILE \
--role-name ROLE_NAME \
--external-id EXTERNAL_ID \
--session-name SESSION_NAME \
--duration-seconds SECONDS \
--alias-format FORMAT \
--exclude ACCOUNT_IDS \
--include ACCOUNT_IDS \
--profile AWS_PROFILE \
--region AWS_REGION \
--dry-run
```
## Troubleshooting
### Error: "No AWS credentials found"
**Solution:** Configure AWS credentials using one of these methods:
```bash
# Method 1: AWS CLI configure
aws configure
# Method 2: Environment variables
export AWS_ACCESS_KEY_ID=your-key-id
export AWS_SECRET_ACCESS_KEY=your-secret-key
# Method 3: Use AWS profile
export AWS_PROFILE=org-management
```
### Error: "Access denied to AWS Organizations API"
**Cause:** Current credentials don't have permission to list organization accounts.
**Solution:**
* Ensure management account credentials are used
* Verify IAM permissions include `organizations:ListAccounts`
* Check IAM policies for Organizations access
### Error: "AWS Organizations is not enabled"
**Cause:** The account is not part of an organization.
**Solution:** This tool requires an AWS Organization. Create one in the AWS Organizations console or use standard bulk provisioning for standalone accounts.
### No Accounts Generated After Filters
**Cause:** All accounts were filtered out by `--exclude` or `--include` options.
**Solution:** Review filter options and verify account IDs are correct:
```bash
# List all accounts in organization
aws organizations list-accounts --query "Accounts[?Status=='ACTIVE'].[Id,Name]" --output table
```
### Connection Test Failures During Bulk Provisioning
**Cause:** ProwlerRole may not be deployed correctly or credentials are invalid.
**Solution:**
* Verify StackSet deployment status in CloudFormation
* Check role trust policy includes correct external ID
* Test role assumption manually:
```bash
aws sts assume-role \
--role-arn arn:aws:iam::123456789012:role/ProwlerRole \
--role-session-name test \
--external-id prowler-ext-id-2024
```
## Security Best Practices
### Use External ID
Always use an external ID when assuming cross-account roles:
```bash
python aws_org_generator.py \
-o aws-accounts.yaml \
--external-id $(uuidgen | tr '[:upper:]' '[:lower:]')
```
The external ID must match the one configured in the ProwlerRole trust policy across all accounts.
### Exclude Sensitive Accounts
Exclude accounts that shouldn't be scanned or require special handling:
```bash
python aws_org_generator.py \
-o aws-accounts.yaml \
--external-id prowler-ext-id \
--exclude 123456789012,111111111111 # management, break-glass accounts
```
### Review Generated Configuration
Always review the generated YAML before provisioning:
```bash
# Check for unexpected accounts
grep "uid:" aws-org-accounts.yaml
# Verify role ARNs
grep "role_arn:" aws-org-accounts.yaml | head -5
# Count accounts
grep "provider: aws" aws-org-accounts.yaml | wc -l
```
## Next Steps
<Columns cols={2}>
<Card title="Bulk Provider Provisioning" icon="terminal" href="/user-guide/tutorials/bulk-provider-provisioning">
Learn how to bulk provision providers in Prowler.
</Card>
<Card title="Prowler App" icon="pen-to-square" href="/user-guide/tutorials/prowler-app">
Detailed instructions on how to use Prowler.
</Card>
</Columns>
@@ -17,14 +17,18 @@ The Bulk Provider Provisioning tool automates the creation of cloud providers in
* Testing connections to verify successful authentication
* Processing multiple providers concurrently for efficiency
<Tip>
**Using AWS Organizations?** For organizations with many AWS accounts, use the automated [AWS Organizations Bulk Provisioning](./aws-organizations-bulk-provisioning) tool to automatically discover and generate configuration for all accounts in your organization.
</Tip>
## Prerequisites
### Requirements
* Python 3.7 or higher
* Prowler API token (from Prowler Cloud or self-hosted Prowler App)
* Prowler API key (from Prowler Cloud or self-hosted Prowler App)
* For self-hosted Prowler App, remember to [point to your API base URL](#custom-api-endpoints)
* Learn how to create API keys: [Prowler App API Keys](../providers/prowler-app-api-keys)
* Authentication credentials for target cloud providers
### Installation
@@ -39,28 +43,21 @@ pip install -r requirements.txt
### Authentication Setup
Configure your Prowler API token:
Configure your Prowler API key:
```bash
export PROWLER_API_TOKEN="your-prowler-api-token"
export PROWLER_API_KEY="pk_example-api-key"
```
To obtain an API token programmatically:
To create an API key:
```bash
export PROWLER_API_TOKEN=$(curl --location 'https://api.prowler.com/api/v1/tokens' \
--header 'Content-Type: application/vnd.api+json' \
--header 'Accept: application/vnd.api+json' \
--data-raw '{
"data": {
"type": "tokens",
"attributes": {
"email": "your@email.com",
"password": "your-password"
}
}
}' | jq -r .data.attributes.access)
```
1. Log in to Prowler Cloud or Prowler App
2. Click **Profile** → **Account**
3. Click **Create API Key**
4. Provide a descriptive name and optionally set an expiration date
5. Copy the generated API key (it will only be shown once)
For detailed instructions, see: [Prowler App API Keys](../providers/prowler-app-api-keys)
## Configuration File Structure
@@ -340,11 +337,11 @@ Done. Success: 2 Failures: 0
## Troubleshooting
### Invalid API Token
### Invalid API Key
```
Error: 401 Unauthorized
Solution: Verify your PROWLER_API_TOKEN or --token parameter
Solution: Verify your PROWLER_API_KEY environment variable or --api-key parameter
```
### Network Timeouts
+18 -7
View File
@@ -207,17 +207,28 @@ If you are adding an **EKS**, **GKE**, **AKS** or external cluster, follow these
4. Now you can add the modified `kubeconfig` in Prowler Cloud. Then test the connection.
### **Step 4.5: M365 Credentials**
For M365, you must enter your Domain ID and choose the authentication method you want to use:
Enter your Microsoft Entra domain (primary tenant domain) and select how the provider should authenticate. Prowler App guides you through the process:
- Service Principal Authentication (Recommended)
<img src="/images/providers/m365-auth-selection-form.png" alt="M365 authentication method selection" width="700" />
<Warning>
User authentication with M365_USER and M365_PASSWORD is deprecated and will be removed.
- **Application Client Secret Authentication**: Client secret-based authentication.
- **Application Certificate Authentication (Recommended)**: Certificate-based authentication. Recommended by Microsoft.
</Warning>
For full setup instructions and requirements, check the [Microsoft 365 provider requirements](/user-guide/providers/microsoft365/getting-started-m365).
#### Step 4.5.1: Application Client Secret Authentication
1. **Enter your tenant ID**: This is the unique identifier for your Microsoft Entra ID directory.
2. **Enter your application (client) ID**: This is the unique identifier assigned to your app registration in Microsoft Entra ID.
3. **Enter your client secret**: This is the secret key used to authenticate your application.
<img src="/images/m365-credentials.png" alt="Prowler Cloud M365 Credentials" width="700" />
<img src="/images/providers/secret-form.png" alt="M365 client secret authentication form" width="700" />
For full setup instructions, certificate generation commands, and required permissions, review the [Microsoft 365 provider requirements](/user-guide/providers/microsoft365/getting-started-m365).
#### Step 4.5.2: Application Certificate Authentication (Recommended)
1. **Enter your tenant ID**: This is the unique identifier for your Microsoft Entra ID directory.
2. **Enter your application (client) ID**: This is the unique identifier assigned to your app registration in Microsoft Entra ID.
3. **Upload your certificate file content**: This is the **Base64** encoded certificate content used to authenticate your application.
<img src="/images/providers/certificate-form.png" alt="M365 certificate authentication form" width="700" />
### **Step 4.6: GitHub Credentials**
For GitHub, you must enter your Provider ID (username or organization name) and choose the authentication method you want to use:
+1 -1
View File
@@ -1,3 +1,3 @@
PROWLER_APP_API_KEY="pk_your_api_key_here"
PROWLER_API_BASE_URL="https://api.prowler.com"
PROWLER_MCP_MODE="stdio"
PROWLER_MCP_TRANSPORT_MODE="stdio"
+2 -1
View File
@@ -13,4 +13,5 @@ All notable changes to the **Prowler MCP Server** are documented in this file.
- Add new MCP Server for Prowler Documentation [(#8795)](https://github.com/prowler-cloud/prowler/pull/8795)
- API key support for STDIO mode and enhanced HTTP mode authentication [(#8823)](https://github.com/prowler-cloud/prowler/pull/8823)
- Add health check endpoint [(#8905)](https://github.com/prowler-cloud/prowler/pull/8905)
- Update Prowler Documentation MCP Server to use Mintlify API [(#8915)](https://github.com/prowler-cloud/prowler/pull/8915)
- Update Prowler Documentation MCP Server to use Mintlify API [(#8916)](https://github.com/prowler-cloud/prowler/pull/8916)
- Add custom production deployment using uvicorn [(#8958)](https://github.com/prowler-cloud/prowler/pull/8958)
+6 -7
View File
@@ -47,13 +47,12 @@ COPY --from=builder --chown=prowler /app/prowler_mcp_server /app/prowler_mcp_ser
# 3. Project metadata file (may be needed by some packages at runtime)
COPY --from=builder --chown=prowler /app/pyproject.toml /app/pyproject.toml
# 4. Entrypoint helper script for selecting runtime mode
COPY --from=builder --chown=prowler /app/entrypoint.sh /app/entrypoint.sh
# Add virtual environment to PATH so prowler-mcp command is available
ENV PATH="/app/.venv/bin:$PATH"
# Entry point for the MCP server
# Default to stdio mode, but allow overriding via command arguments
# Examples:
# docker run -p 8000:8000 prowler-mcp --transport http --host 0.0.0.0 --port 8000
# docker run prowler-mcp --transport stdio
ENTRYPOINT ["prowler-mcp"]
CMD ["--transport", "stdio"]
# Entrypoint wrapper defaults to CLI mode; override with `uvicorn` to run ASGI app
ENTRYPOINT ["/app/entrypoint.sh"]
CMD ["main"]
+17 -3
View File
@@ -144,11 +144,11 @@ uv run prowler-mcp --transport http
uv run prowler-mcp --transport http --host 0.0.0.0 --port 8080
```
For self-deployed MCP remote server, you can use also configure the server to use a custom API base URL with the environment variable `PROWLER_API_BASE_URL`; and the transport mode with the environment variable `PROWLER_MCP_MODE`.
For self-deployed MCP remote server, you can use also configure the server to use a custom API base URL with the environment variable `PROWLER_API_BASE_URL`; and the transport mode with the environment variable `PROWLER_MCP_TRANSPORT_MODE`.
```bash
export PROWLER_API_BASE_URL="https://api.prowler.com"
export PROWLER_MCP_MODE="http"
export PROWLER_MCP_TRANSPORT_MODE="http"
```
### Using uv directly
@@ -190,6 +190,16 @@ docker run --rm --env-file ./.env -p 8000:8000 -it prowler-mcp --transport http
docker run --rm --env-file ./.env -p 8080:8080 -it prowler-mcp --transport http --host 0.0.0.0 --port 8080
```
## Production Deployment
For production deployments that require customization, it is recommended to use the ASGI application that can be found in `prowler_mcp_server.server`. This can be run with uvicorn:
```bash
uvicorn prowler_mcp_server.server:app --host 0.0.0.0 --port 8000
```
For more details on production deployment options, see the [FastMCP production deployment guide](https://gofastmcp.com/deployment/http#production-deployment) and [uvicorn settings](https://www.uvicorn.org/settings/).
## Command Line Arguments
The Prowler MCP server supports the following command line arguments:
@@ -482,6 +492,10 @@ If you want to have it globally available, add the example server to Cursor's co
If you want to have it only for the current project, add the example server to the project's root in a new `.cursor/mcp.json` file.
## Documentation
For detailed documentation about the Prowler MCP Server, including guides, tutorials, and use cases, visit the [official Prowler documentation](https://docs.prowler.com).
## License
This project follows the repositorys main license. See the [LICENSE](../LICENSE) file at the repository root.
This project follows the repository's main license. See the [LICENSE](../LICENSE) file at the repository root.
+50
View File
@@ -0,0 +1,50 @@
#!/bin/sh
set -eu
usage() {
cat <<'EOF'
Usage: ./entrypoint.sh [main|uvicorn] [args...]
Modes:
main (default) Run prowler-mcp
uvicorn Run uvicorn prowler_mcp_server.server:app
All additional arguments are forwarded to the selected command.
EOF
}
mode="main"
if [ "$#" -gt 0 ]; then
case "$1" in
main|cli)
mode="main"
shift
;;
uvicorn|asgi)
mode="uvicorn"
shift
;;
-h|--help)
usage
exit 0
;;
*)
mode="main"
;;
esac
fi
case "$mode" in
main)
exec prowler-mcp "$@"
;;
uvicorn)
export PROWLER_MCP_TRANSPORT_MODE="http"
exec uvicorn prowler_mcp_server.server:app "$@"
;;
*)
usage
exit 1
;;
esac
+18 -7
View File
@@ -1,10 +1,8 @@
import argparse
import asyncio
import os
import sys
from prowler_mcp_server.lib.logger import logger
from prowler_mcp_server.server import setup_main_server
def parse_arguments():
@@ -13,7 +11,7 @@ def parse_arguments():
parser.add_argument(
"--transport",
choices=["stdio", "http"],
default=os.getenv("PROWLER_MCP_MODE", "stdio"),
default=None,
help="Transport method (default: stdio)",
)
parser.add_argument(
@@ -35,13 +33,26 @@ def main():
try:
args = parse_arguments()
# Set up server with configuration
prowler_mcp_server = asyncio.run(setup_main_server(transport=args.transport))
print(f"args.transport: {args.transport}")
if args.transport is None:
args.transport = os.getenv("PROWLER_MCP_TRANSPORT_MODE", "stdio")
else:
os.environ["PROWLER_MCP_TRANSPORT_MODE"] = args.transport
from prowler_mcp_server.server import prowler_mcp_server
if args.transport == "stdio":
prowler_mcp_server.run(transport="stdio")
prowler_mcp_server.run(transport=args.transport, show_banner=False)
elif args.transport == "http":
prowler_mcp_server.run(transport="http", host=args.host, port=args.port)
prowler_mcp_server.run(
transport=args.transport,
host=args.host,
port=args.port,
show_banner=False,
)
else:
logger.error(f"Invalid transport: {args.transport}")
except KeyboardInterrupt:
logger.info("Shutting down Prowler MCP server...")
@@ -14,7 +14,7 @@ class ProwlerAppAuth:
def __init__(
self,
mode: str = os.getenv("PROWLER_MCP_MODE", "stdio"),
mode: str = os.getenv("PROWLER_MCP_TRANSPORT_MODE", "stdio"),
base_url: str = os.getenv("PROWLER_API_BASE_URL", "https://api.prowler.com"),
):
self.base_url = base_url.rstrip("/")
@@ -33,7 +33,14 @@ class ProwlerAppAuth:
raise ValueError("Prowler App API key format is incorrect")
def _parse_jwt(self, token: str) -> Optional[Dict]:
"""Parse JWT token and return payload, similar to JS parseJwt function."""
"""Parse JWT token and return payload
Args:
token: JWT token to parse
Returns:
Parsed JWT payload, or None if parsing fails
"""
if not token:
return None
+26 -13
View File
@@ -1,16 +1,16 @@
import asyncio
import os
from fastmcp import FastMCP
from prowler_mcp_server import __version__
from prowler_mcp_server.lib.logger import logger
from starlette.responses import JSONResponse
prowler_mcp_server = FastMCP("prowler-mcp-server")
async def setup_main_server(transport: str) -> FastMCP:
async def setup_main_server():
"""Set up the main Prowler MCP server with all available integrations."""
# Initialize main Prowler MCP server
prowler_mcp_server = FastMCP("prowler-mcp-server")
# Import Prowler Hub tools with prowler_hub_ prefix
try:
logger.info("Importing Prowler Hub server...")
@@ -21,12 +21,10 @@ async def setup_main_server(transport: str) -> FastMCP:
except Exception as e:
logger.error(f"Failed to import Prowler Hub server: {e}")
# Import Prowler App tools with prowler_app_ prefix
try:
logger.info("Importing Prowler App server...")
if os.getenv("PROWLER_MCP_MODE", None) is None:
os.environ["PROWLER_MCP_MODE"] = transport
if not os.path.exists(
os.path.join(os.path.dirname(__file__), "prowler_app", "server.py")
):
@@ -44,6 +42,7 @@ async def setup_main_server(transport: str) -> FastMCP:
except Exception as e:
logger.error(f"Failed to import Prowler App server: {e}")
# Import Prowler Documentation tools with prowler_docs_ prefix
try:
logger.info("Importing Prowler Documentation server...")
from prowler_mcp_server.prowler_documentation.server import docs_mcp_server
@@ -53,9 +52,23 @@ async def setup_main_server(transport: str) -> FastMCP:
except Exception as e:
logger.error(f"Failed to import Prowler Documentation server: {e}")
# Add health check endpoint
@prowler_mcp_server.custom_route("/health", methods=["GET"])
async def health_check(request):
return JSONResponse({"status": "healthy", "service": "prowler-mcp-server"})
return prowler_mcp_server
# Add health check endpoint
@prowler_mcp_server.custom_route("/health", methods=["GET"])
async def health_check(request) -> JSONResponse:
"""Health check endpoint."""
return JSONResponse(
{"status": "healthy", "service": "prowler-mcp-server", "version": __version__}
)
# Get or create the event loop
try:
loop = asyncio.get_running_loop()
# If we have a running loop, schedule the setup as a task
loop.create_task(setup_main_server())
except RuntimeError:
# No running loop, use asyncio.run (for standalone execution)
asyncio.run(setup_main_server())
app = prowler_mcp_server.http_app()
Generated
+35 -324
View File
@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.2.0 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
[[package]]
name = "about-time"
@@ -834,21 +834,6 @@ typing-extensions = ">=4.6.0"
[package.extras]
aio = ["azure-core[aio] (>=1.30.0)"]
[[package]]
name = "babel"
version = "2.17.0"
description = "Internationalization utilities"
optional = false
python-versions = ">=3.8"
groups = ["docs"]
files = [
{file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"},
{file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"},
]
[package.extras]
dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""]
[[package]]
name = "bandit"
version = "1.8.3"
@@ -994,7 +979,7 @@ version = "2025.7.14"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.7"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
files = [
{file = "certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2"},
{file = "certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995"},
@@ -1126,7 +1111,7 @@ version = "3.4.2"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
files = [
{file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"},
{file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"},
@@ -1240,7 +1225,7 @@ version = "8.1.8"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.7"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
markers = "python_version < \"3.10\""
files = [
{file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"},
@@ -1256,7 +1241,7 @@ version = "8.2.1"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.10"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
markers = "python_version >= \"3.10\""
files = [
{file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"},
@@ -1290,7 +1275,7 @@ version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
@@ -1921,59 +1906,6 @@ files = [
{file = "frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f"},
]
[[package]]
name = "ghp-import"
version = "2.1.0"
description = "Copy your docs directly to the gh-pages branch."
optional = false
python-versions = "*"
groups = ["docs"]
files = [
{file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"},
{file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"},
]
[package.dependencies]
python-dateutil = ">=2.8.1"
[package.extras]
dev = ["flake8", "markdown", "twine", "wheel"]
[[package]]
name = "gitdb"
version = "4.0.12"
description = "Git Object Database"
optional = false
python-versions = ">=3.7"
groups = ["docs"]
files = [
{file = "gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf"},
{file = "gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571"},
]
[package.dependencies]
smmap = ">=3.0.1,<6"
[[package]]
name = "gitpython"
version = "3.1.45"
description = "GitPython is a Python library used to interact with Git repositories"
optional = false
python-versions = ">=3.7"
groups = ["docs"]
files = [
{file = "gitpython-3.1.45-py3-none-any.whl", hash = "sha256:8908cb2e02fb3b93b7eb0f2827125cb699869470432cc885f019b8fd0fccff77"},
{file = "gitpython-3.1.45.tar.gz", hash = "sha256:85b0ee964ceddf211c41b9f27a49086010a190fd8132a24e21f362a4b36a791c"},
]
[package.dependencies]
gitdb = ">=4.0.1,<5"
typing-extensions = {version = ">=3.10.0.2", markers = "python_version < \"3.10\""}
[package.extras]
doc = ["sphinx (>=7.1.2,<7.2)", "sphinx-autodoc-typehints", "sphinx_rtd_theme"]
test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock ; python_version < \"3.8\"", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions ; python_version < \"3.11\""]
[[package]]
name = "google-api-core"
version = "2.25.1"
@@ -2258,7 +2190,7 @@ version = "3.10"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.6"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
files = [
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
@@ -2273,12 +2205,12 @@ version = "8.7.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.9"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
files = [
{file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"},
{file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"},
]
markers = {dev = "python_version < \"3.10\"", docs = "python_version < \"3.10\""}
markers = {dev = "python_version < \"3.10\""}
[package.dependencies]
zipp = ">=3.20"
@@ -2350,7 +2282,7 @@ version = "3.1.6"
description = "A very fast and expressive template engine."
optional = false
python-versions = ">=3.7"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
files = [
{file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"},
{file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"},
@@ -2416,6 +2348,8 @@ python-versions = "*"
groups = ["dev"]
files = [
{file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"},
{file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"},
{file = "jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6"},
]
[package.dependencies]
@@ -2546,7 +2480,7 @@ version = "3.9"
description = "Python implementation of John Gruber's Markdown."
optional = false
python-versions = ">=3.9"
groups = ["main", "docs"]
groups = ["main"]
files = [
{file = "markdown-3.9-py3-none-any.whl", hash = "sha256:9f4d91ed810864ea88a6f32c07ba8bee1346c0cc1f6b1f9f6c822f2a9667d280"},
{file = "markdown-3.9.tar.gz", hash = "sha256:d2900fe1782bd33bdbbd56859defef70c2e78fc46668f8eb9df3128138f2cb6a"},
@@ -2590,7 +2524,7 @@ version = "3.0.2"
description = "Safely add untrusted strings to HTML/XML markup."
optional = false
python-versions = ">=3.9"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
files = [
{file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"},
{file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"},
@@ -2699,18 +2633,6 @@ files = [
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
]
[[package]]
name = "mergedeep"
version = "1.3.4"
description = "A deep merge function for 🐍."
optional = false
python-versions = ">=3.6"
groups = ["docs"]
files = [
{file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"},
{file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"},
]
[[package]]
name = "microsoft-kiota-abstractions"
version = "1.9.2"
@@ -2825,116 +2747,6 @@ files = [
[package.dependencies]
microsoft-kiota-abstractions = ">=1.9.2,<1.10.0"
[[package]]
name = "mkdocs"
version = "1.6.1"
description = "Project documentation with Markdown."
optional = false
python-versions = ">=3.8"
groups = ["docs"]
files = [
{file = "mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e"},
{file = "mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2"},
]
[package.dependencies]
click = ">=7.0"
colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""}
ghp-import = ">=1.0"
importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""}
jinja2 = ">=2.11.1"
markdown = ">=3.3.6"
markupsafe = ">=2.0.1"
mergedeep = ">=1.3.4"
mkdocs-get-deps = ">=0.2.0"
packaging = ">=20.5"
pathspec = ">=0.11.1"
pyyaml = ">=5.1"
pyyaml-env-tag = ">=0.1"
watchdog = ">=2.0"
[package.extras]
i18n = ["babel (>=2.9.0)"]
min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4) ; platform_system == \"Windows\"", "ghp-import (==1.0)", "importlib-metadata (==4.4) ; python_version < \"3.10\"", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"]
[[package]]
name = "mkdocs-get-deps"
version = "0.2.0"
description = "MkDocs extension that lists all dependencies according to a mkdocs.yml file"
optional = false
python-versions = ">=3.8"
groups = ["docs"]
files = [
{file = "mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134"},
{file = "mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c"},
]
[package.dependencies]
importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""}
mergedeep = ">=1.3.4"
platformdirs = ">=2.2.0"
pyyaml = ">=5.1"
[[package]]
name = "mkdocs-git-revision-date-localized-plugin"
version = "1.4.1"
description = "Mkdocs plugin that enables displaying the localized date of the last git modification of a markdown file."
optional = false
python-versions = ">=3.8"
groups = ["docs"]
files = [
{file = "mkdocs_git_revision_date_localized_plugin-1.4.1-py3-none-any.whl", hash = "sha256:bb1eca7f156e0c8a587167662923d76efed7f7e0c06b84471aa5ae72a744a434"},
{file = "mkdocs_git_revision_date_localized_plugin-1.4.1.tar.gz", hash = "sha256:364d7c4c45c4f333c750e34bc298ac685a7a8bf9b7b52890d52b2f90f1812c4b"},
]
[package.dependencies]
babel = ">=2.7.0"
gitpython = ">=3.1.44"
mkdocs = ">=1.0"
pytz = ">=2025.1"
[[package]]
name = "mkdocs-material"
version = "9.6.5"
description = "Documentation that simply works"
optional = false
python-versions = ">=3.8"
groups = ["docs"]
files = [
{file = "mkdocs_material-9.6.5-py3-none-any.whl", hash = "sha256:aad3e6fb860c20870f75fb2a69ef901f1be727891e41adb60b753efcae19453b"},
{file = "mkdocs_material-9.6.5.tar.gz", hash = "sha256:b714679a8c91b0ffe2188e11ed58c44d2523e9c2ae26a29cc652fa7478faa21f"},
]
[package.dependencies]
babel = ">=2.10,<3.0"
colorama = ">=0.4,<1.0"
jinja2 = ">=3.0,<4.0"
markdown = ">=3.2,<4.0"
mkdocs = ">=1.6,<2.0"
mkdocs-material-extensions = ">=1.3,<2.0"
paginate = ">=0.5,<1.0"
pygments = ">=2.16,<3.0"
pymdown-extensions = ">=10.2,<11.0"
regex = ">=2022.4"
requests = ">=2.26,<3.0"
[package.extras]
git = ["mkdocs-git-committers-plugin-2 (>=1.1,<3)", "mkdocs-git-revision-date-localized-plugin (>=1.2.4,<2.0)"]
imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"]
recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"]
[[package]]
name = "mkdocs-material-extensions"
version = "1.3.1"
description = "Extension pack for Python Markdown and MkDocs Material."
optional = false
python-versions = ">=3.8"
groups = ["docs"]
files = [
{file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"},
{file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"},
]
[[package]]
name = "mock"
version = "5.2.0"
@@ -3582,28 +3394,12 @@ version = "25.0"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.8"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
files = [
{file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"},
{file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"},
]
[[package]]
name = "paginate"
version = "0.5.7"
description = "Divides large result sets into pages for easier browsing"
optional = false
python-versions = "*"
groups = ["docs"]
files = [
{file = "paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591"},
{file = "paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945"},
]
[package.extras]
dev = ["pytest", "tox"]
lint = ["black"]
[[package]]
name = "pandas"
version = "2.2.3"
@@ -3709,7 +3505,7 @@ version = "0.12.1"
description = "Utility library for gitignore style pattern matching of file paths."
optional = false
python-versions = ">=3.8"
groups = ["dev", "docs"]
groups = ["dev"]
files = [
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
@@ -3736,7 +3532,7 @@ version = "4.3.8"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
optional = false
python-versions = ">=3.9"
groups = ["dev", "docs"]
groups = ["dev"]
files = [
{file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"},
{file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"},
@@ -4264,7 +4060,7 @@ version = "2.19.2"
description = "Pygments is a syntax highlighting package written in Python."
optional = false
python-versions = ">=3.8"
groups = ["dev", "docs"]
groups = ["dev"]
files = [
{file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"},
{file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"},
@@ -4325,25 +4121,6 @@ typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""
spelling = ["pyenchant (>=3.2,<4.0)"]
testutils = ["gitpython (>3)"]
[[package]]
name = "pymdown-extensions"
version = "10.16"
description = "Extension pack for Python Markdown."
optional = false
python-versions = ">=3.9"
groups = ["docs"]
files = [
{file = "pymdown_extensions-10.16-py3-none-any.whl", hash = "sha256:f5dd064a4db588cb2d95229fc4ee63a1b16cc8b4d0e6145c0899ed8723da1df2"},
{file = "pymdown_extensions-10.16.tar.gz", hash = "sha256:71dac4fca63fabeffd3eb9038b756161a33ec6e8d230853d3cecf562155ab3de"},
]
[package.dependencies]
markdown = ">=3.6"
pyyaml = "*"
[package.extras]
extra = ["pygments (>=2.19.1)"]
[[package]]
name = "pynacl"
version = "1.5.0"
@@ -4509,7 +4286,7 @@ version = "2.9.0.post0"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
files = [
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
@@ -4524,7 +4301,7 @@ version = "2025.1"
description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
groups = ["main", "docs"]
groups = ["main"]
files = [
{file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"},
{file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"},
@@ -4567,7 +4344,7 @@ version = "6.0.2"
description = "YAML parser and emitter for Python"
optional = false
python-versions = ">=3.8"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
files = [
{file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
{file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
@@ -4624,21 +4401,6 @@ files = [
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
]
[[package]]
name = "pyyaml-env-tag"
version = "1.1"
description = "A custom YAML tag for referencing environment variables in YAML files."
optional = false
python-versions = ">=3.9"
groups = ["docs"]
files = [
{file = "pyyaml_env_tag-1.1-py3-none-any.whl", hash = "sha256:17109e1a528561e32f026364712fee1264bc2ea6715120891174ed1b980d2e04"},
{file = "pyyaml_env_tag-1.1.tar.gz", hash = "sha256:2eb38b75a2d21ee0475d6d97ec19c63287a7e140231e4214969d0eac923cd7ff"},
]
[package.dependencies]
pyyaml = "*"
[[package]]
name = "referencing"
version = "0.36.2"
@@ -4662,7 +4424,7 @@ version = "2024.11.6"
description = "Alternative regular expression module, to replace re."
optional = false
python-versions = ">=3.8"
groups = ["dev", "docs"]
groups = ["dev"]
files = [
{file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"},
{file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"},
@@ -4766,7 +4528,7 @@ version = "2.32.4"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.8"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
files = [
{file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"},
{file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"},
@@ -5085,6 +4847,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
@@ -5093,6 +4856,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
@@ -5101,6 +4865,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
@@ -5109,6 +4874,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
@@ -5117,6 +4883,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
@@ -5268,7 +5035,7 @@ version = "1.17.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
files = [
{file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
{file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
@@ -5289,18 +5056,6 @@ files = [
[package.extras]
optional = ["SQLAlchemy (>=1.4,<3)", "aiodns (>1.0)", "aiohttp (>=3.7.3,<4)", "boto3 (<=2)", "websocket-client (>=1,<2)", "websockets (>=9.1,<15)"]
[[package]]
name = "smmap"
version = "5.0.2"
description = "A pure Python implementation of a sliding window memory map manager"
optional = false
python-versions = ">=3.7"
groups = ["docs"]
files = [
{file = "smmap-5.0.2-py3-none-any.whl", hash = "sha256:b30115f0def7d7531d22a0fb6502488d879e75b260a9db4d0819cfb25403af5e"},
{file = "smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5"},
]
[[package]]
name = "sniffio"
version = "1.3.1"
@@ -5474,12 +5229,11 @@ version = "4.14.1"
description = "Backported and Experimental Type Hints for Python 3.9+"
optional = false
python-versions = ">=3.9"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
files = [
{file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"},
{file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"},
]
markers = {docs = "python_version < \"3.10\""}
[[package]]
name = "typing-inspection"
@@ -5544,7 +5298,7 @@ version = "1.26.20"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
markers = "python_version < \"3.10\""
files = [
{file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"},
@@ -5562,7 +5316,7 @@ version = "2.5.0"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.9"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
markers = "python_version >= \"3.10\""
files = [
{file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"},
@@ -5611,49 +5365,6 @@ files = [
[package.dependencies]
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
[[package]]
name = "watchdog"
version = "6.0.0"
description = "Filesystem events monitoring"
optional = false
python-versions = ">=3.9"
groups = ["docs"]
files = [
{file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"},
{file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"},
{file = "watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3"},
{file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c"},
{file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2"},
{file = "watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c"},
{file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948"},
{file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860"},
{file = "watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0"},
{file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c"},
{file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134"},
{file = "watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b"},
{file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e6f0e77c9417e7cd62af82529b10563db3423625c5fce018430b249bf977f9e8"},
{file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90c8e78f3b94014f7aaae121e6b909674df5b46ec24d6bebc45c44c56729af2a"},
{file = "watchdog-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7631a77ffb1f7d2eefa4445ebbee491c720a5661ddf6df3498ebecae5ed375c"},
{file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881"},
{file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11"},
{file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7a0e56874cfbc4b9b05c60c8a1926fedf56324bb08cfbc188969777940aef3aa"},
{file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6439e374fc012255b4ec786ae3c4bc838cd7309a540e5fe0952d03687d8804e"},
{file = "watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13"},
{file = "watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379"},
{file = "watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e"},
{file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f"},
{file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26"},
{file = "watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c"},
{file = "watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2"},
{file = "watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a"},
{file = "watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680"},
{file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"},
{file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"},
]
[package.extras]
watchmedo = ["PyYAML (>=3.10)"]
[[package]]
name = "websocket-client"
version = "1.8.0"
@@ -5927,12 +5638,12 @@ version = "3.23.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.9"
groups = ["main", "dev", "docs"]
groups = ["main", "dev"]
files = [
{file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"},
{file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"},
]
markers = {dev = "python_version < \"3.10\"", docs = "python_version < \"3.10\""}
markers = {dev = "python_version < \"3.10\""}
[package.extras]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
@@ -5945,4 +5656,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.1"
python-versions = ">3.9.1,<3.13"
content-hash = "c2fb8567f1a6be319ae73f8c3a30e7b5be6f6fc65deee567d2a9e09eadd984c9"
content-hash = "ea79d82b4e255ec4604f440a507da6dac38b57af93356761ac793678aa615cf5"
+4
View File
@@ -32,10 +32,14 @@ All notable changes to the **Prowler SDK** are documented in this file.
- Update AWS AppStream service metadata to new format [(#8789)](https://github.com/prowler-cloud/prowler/pull/8789)
- Update AWS API Gateway service metadata to new format [(#8788)](https://github.com/prowler-cloud/prowler/pull/8788)
- Update AWS Athena service metadata to new format [(#8790)](https://github.com/prowler-cloud/prowler/pull/8790)
- Update AWS CloudTrail service metadata to new format [(#8831)](https://github.com/prowler-cloud/prowler/pull/8831)
- Update AWS Auto Scaling service metadata to new format [(#8824)](https://github.com/prowler-cloud/prowler/pull/8824)
- Update AWS Backup service metadata to new format [(#8826)](https://github.com/prowler-cloud/prowler/pull/8826)
- Update AWS CloudFormation service metadata to new format [(#8828)](https://github.com/prowler-cloud/prowler/pull/8828)
- Update AWS Lambda service metadata to new format [(#8825)](https://github.com/prowler-cloud/prowler/pull/8825)
- Update AWS DLM service metadata to new format [(#8860)](https://github.com/prowler-cloud/prowler/pull/8860)
- Update AWS DMS service metadata to new format [(#8861)](https://github.com/prowler-cloud/prowler/pull/8861)
- Update AWS Directory Service service metadata to new format [(#8859)](https://github.com/prowler-cloud/prowler/pull/8859)
- Update AWS CloudFront service metadata to new format [(#8829)](https://github.com/prowler-cloud/prowler/pull/8829)
- Deprecate user authentication for M365 provider [(#8865)](https://github.com/prowler-cloud/prowler/pull/8865)
- Update AWS EFS service metadata to new format [(#8889)](https://github.com/prowler-cloud/prowler/pull/8889)
File diff suppressed because it is too large Load Diff
File diff suppressed because it is too large Load Diff
File diff suppressed because it is too large Load Diff
@@ -819,18 +819,6 @@
"aws-us-gov": []
}
},
"apptest": {
"regions": {
"aws": [
"ap-southeast-2",
"eu-central-1",
"sa-east-1",
"us-east-1"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"aps": {
"regions": {
"aws": [
@@ -8723,6 +8711,7 @@
"ap-southeast-5",
"ca-central-1",
"eu-central-1",
"eu-central-2",
"eu-north-1",
"eu-south-2",
"eu-west-1",
@@ -9207,11 +9196,13 @@
"ap-east-2",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-south-2",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-5",
"ap-southeast-7",
"ca-central-1",
"eu-central-1",
@@ -12436,7 +12427,12 @@
"workspaces-instances": {
"regions": {
"aws": [
"ap-northeast-2"
"ap-east-1",
"ap-northeast-2",
"ap-southeast-5",
"eu-south-2",
"me-central-1",
"us-east-2"
],
"aws-cn": [],
"aws-us-gov": []
@@ -1,33 +1,38 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_bucket_requires_mfa_delete",
"CheckTitle": "Ensure the S3 bucket CloudTrail bucket requires MFA delete",
"CheckTitle": "CloudTrail trail S3 bucket has MFA delete enabled",
"CheckType": [
"Software and Configuration Checks",
"Industry and Regulatory Standards",
"CIS AWS Foundations Benchmark"
"Software and Configuration Checks/AWS Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "AwsCloudTrailTrail",
"Description": "Ensure the S3 bucket CloudTrail bucket requires MFA",
"Risk": "If the S3 bucket CloudTrail bucket does not require MFA, it can be deleted by an attacker.",
"Description": "**CloudTrail log buckets** for actively logging trails are evaluated for **MFA Delete** on the associated S3 bucket. The assessment determines whether `MFA Delete` is configured on the in-account log bucket; *if the bucket resides in another account, its configuration should be verified separately*.",
"Risk": "Without **MFA Delete**, stolen or over-privileged credentials can permanently delete log versions or change versioning, compromising log **integrity** and **availability**. This enables attacker cover-ups, hinders **forensics**, and weakens evidence for investigations.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/AmazonS3/latest/userguide/MultiFactorAuthenticationDelete.html",
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/CloudTrail/cloudtrail-bucket-mfa-delete-enabled.html"
],
"Remediation": {
"Code": {
"CLI": "aws s3api put-bucket-versioning --bucket DOC-EXAMPLE-BUCKET1 --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa \"SERIAL 123456\"",
"CLI": "aws s3api put-bucket-versioning --bucket <CLOUDTRAIL_BUCKET_NAME> --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa \"<MFA_SERIAL> <MFA_CODE>\"",
"NativeIaC": "",
"Other": "",
"Other": "1. Sign in to the AWS Management Console as the root user with MFA enabled\n2. Open AWS CloudShell (from the top navigation bar)\n3. Run:\n ```bash\n aws s3api put-bucket-versioning --bucket <CLOUDTRAIL_BUCKET_NAME> --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa \"<MFA_SERIAL> <MFA_CODE>\"\n ```",
"Terraform": ""
},
"Recommendation": {
"Text": "Configure MFA Delete for the S3 bucket CloudTrail bucket",
"Url": "https://docs.aws.amazon.com/AmazonS3/latest/userguide/MultiFactorAuthenticationDelete.html"
"Text": "Enable `MFA Delete` on the CloudTrail log bucket with versioning enabled. Enforce **least privilege** so only tightly controlled identities can delete or alter logs, and require MFA for such actions. Apply **defense in depth** using a dedicated logging account and log file integrity validation.",
"Url": "https://hub.prowler.com/check/cloudtrail_bucket_requires_mfa_delete"
}
},
"Categories": [
"identity-access",
"forensics-ready"
],
"DependsOn": [],
@@ -1,35 +1,39 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_cloudwatch_logging_enabled",
"CheckTitle": "Ensure CloudTrail trails are integrated with CloudWatch Logs",
"CheckTitle": "CloudTrail trail has delivered logs to CloudWatch Logs in the last 24 hours",
"CheckType": [
"Software and Configuration Checks",
"Industry and Regulatory Standards",
"CIS AWS Foundations Benchmark"
"Software and Configuration Checks/AWS Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceIdTemplate": "",
"Severity": "low",
"ResourceType": "AwsCloudTrailTrail",
"Description": "Ensure CloudTrail trails are integrated with CloudWatch Logs",
"Risk": "Sending CloudTrail logs to CloudWatch Logs will facilitate real-time and historic activity logging based on user, API, resource, and IP address, and provides opportunity to establish alarms and notifications for anomalous or sensitivity account activity.",
"Description": "**CloudTrail trails** are configured to send events to **CloudWatch Logs**, and show recent delivery within the last `24h`. Trails without integration or without recent CloudWatch delivery are identified, across single-Region and multi-Region trails.",
"Risk": "Missing or stale CloudWatch delivery weakens visibility and delays detection, impacting confidentiality and integrity. Adversaries can:\n- Hide **privilege escalation**\n- Perform unauthorized **resource changes**\n- Exfiltrate data via API misuse",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.prowler.com/checks/aws/logging-policies/logging_4#aws-console",
"https://docs.aws.amazon.com/awscloudtrail/latest/userguide/send-cloudtrail-events-to-cloudwatch-logs.html"
],
"Remediation": {
"Code": {
"CLI": "aws cloudtrail update-trail --name <trail_name> --cloudwatch-logs-log-group- arn <cloudtrail_log_group_arn> --cloudwatch-logs-role-arn <cloudtrail_cloudwatchLogs_role_arn>",
"NativeIaC": "",
"Other": "https://docs.prowler.com/checks/aws/logging-policies/logging_4#aws-console",
"Terraform": ""
"CLI": "aws cloudtrail update-trail --name <trail_name> --cloud-watch-logs-log-group-arn <cloudwatch_log_group_arn> --cloud-watch-logs-role-arn <cloudwatch_logs_role_arn>",
"NativeIaC": "```yaml\n# CloudFormation: enable CloudTrail delivery to CloudWatch Logs\nResources:\n <example_resource_name>:\n Type: AWS::CloudTrail::Trail\n Properties:\n S3BucketName: \"<example_resource_name>\"\n CloudWatchLogsLogGroupArn: \"<cloudwatch_log_group_arn>\" # CRITICAL: sends CloudTrail events to CloudWatch Logs\n CloudWatchLogsRoleArn: \"<cloudwatch_logs_role_arn>\" # CRITICAL: role CloudTrail assumes to deliver events\n```",
"Other": "1. In AWS Console, go to CloudTrail > Trails and select the trail\n2. In the CloudWatch Logs section, click Edit\n3. Set CloudWatch Logs to Enabled\n4. Choose an existing Log group (or create new) and select an IAM role with permissions for CreateLogStream/PutLogEvents\n5. Click Save changes\n6. After a few minutes, verify events appear in the chosen CloudWatch Logs log group",
"Terraform": "```hcl\n# Terraform: enable CloudTrail delivery to CloudWatch Logs\nresource \"aws_cloudtrail\" \"<example_resource_name>\" {\n name = \"<example_resource_name>\"\n s3_bucket_name = \"<example_resource_name>\"\n cloud_watch_logs_group_arn = \"<cloudwatch_log_group_arn>\" # CRITICAL: sends CloudTrail events to CloudWatch Logs\n cloud_watch_logs_role_arn = \"<cloudwatch_logs_role_arn>\" # CRITICAL: role CloudTrail assumes to deliver events\n}\n```"
},
"Recommendation": {
"Text": "Validate that the trails in CloudTrail have an arn set in the CloudWatchLogsLogGroupArn property.",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/send-cloudtrail-events-to-cloudwatch-logs.html"
"Text": "Integrate every trail with **CloudWatch Logs** and maintain continuous, near-real-time delivery. Enforce **least privilege** on the delivery role, prefer **multi-Region** coverage, and implement **metric filters and alerts** for sensitive actions. Centralize retention to support **defense in depth**.",
"Url": "https://hub.prowler.com/check/cloudtrail_cloudwatch_logging_enabled"
}
},
"Categories": [
"forensics-ready",
"logging"
"logging",
"forensics-ready"
],
"DependsOn": [],
"RelatedTo": [],
@@ -1,34 +1,39 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_insights_exist",
"CheckTitle": "Ensure CloudTrail Insight is enabled",
"CheckTitle": "CloudTrail trail has Insights enabled",
"CheckType": [
"Software and Configuration Checks",
"Industry and Regulatory Standards",
"CIS AWS Foundations Benchmark"
"Software and Configuration Checks/AWS Security Best Practices/Runtime Behavior Analysis",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceIdTemplate": "",
"Severity": "low",
"ResourceType": "AwsCloudTrailTrail",
"Description": "Ensure CloudTrail Insight is enabled",
"Risk": "CloudTrail Insights provides a powerful way to search and analyze CloudTrail log data using pre-built queries and machine learning algorithms. This can help you to identify potential security threats and suspicious activity in near real-time, such as unauthorized access attempts, policy changes, or resource modifications.",
"Description": "**CloudTrail trails** that are logging are evaluated for **Insights** via `insight selectors`, which enable anomaly detection on management-event patterns (API call and error rates). The finding pinpoints logging trails where these selectors are missing.",
"Risk": "Without **Insights**, abnormal API call or error rates can go unnoticed, delaying detection of credential abuse, privilege escalation, or runaway automation. Attackers may rapidly alter policies, delete resources, or exfiltrate data before response, impacting confidentiality and availability.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/awscloudtrail/latest/userguide/logging-insights-events-with-cloudtrail.html",
"https://awscli.amazonaws.com/v2/documentation/api/2.18.18/reference/cloudtrail/put-insight-selectors.html",
"https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudtrail"
],
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "",
"Terraform": ""
"CLI": "aws cloudtrail put-insight-selectors --trail-name <TRAIL_NAME> --insight-selectors '[{\"InsightType\":\"ApiCallRateInsight\"}]'",
"NativeIaC": "```yaml\nResources:\n <example_resource_name>:\n Type: AWS::CloudTrail::Trail\n Properties:\n TrailName: <example_resource_name>\n S3BucketName: <example_resource_name>\n IsLogging: true\n InsightSelectors:\n - InsightType: ApiCallRateInsight # Critical fix: enables CloudTrail Insights on the trail\n```",
"Other": "1. In the AWS Console, go to CloudTrail > Trails\n2. Select the trail that is logging\n3. Click Edit on the CloudTrail Insights section\n4. Enable Insights and select API call rate (or Error rate)\n5. Save changes",
"Terraform": "```hcl\nresource \"aws_cloudtrail\" \"<example_resource_name>\" {\n name = \"<example_resource_name>\"\n s3_bucket_name = \"<example_resource_name>\"\n enable_logging = true\n\n insight_selector {\n insight_type = \"ApiCallRateInsight\" # Critical fix: enables CloudTrail Insights on the trail\n }\n}\n```"
},
"Recommendation": {
"Text": "Enable CloudTrail Insight",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/logging-insights-events-with-cloudtrail.html"
"Text": "Enable **CloudTrail Insights** on all logging trails (ideally all-Region or organization trails). Activate both `ApiCallRateInsight` and `ApiErrorRateInsight`. Integrate alerts with monitoring and review anomalies regularly. Apply **defense in depth** and least privilege to reduce potential blast radius.",
"Url": "https://hub.prowler.com/check/cloudtrail_insights_exist"
}
},
"Categories": [
"forensics-ready"
"threat-detection"
],
"DependsOn": [],
"RelatedTo": [],
@@ -1,34 +1,39 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_kms_encryption_enabled",
"CheckTitle": "Ensure CloudTrail logs are encrypted at rest using KMS CMKs",
"CheckTitle": "CloudTrail trail logs are encrypted at rest with a KMS key",
"CheckType": [
"Software and Configuration Checks",
"Industry and Regulatory Standards",
"CIS AWS Foundations Benchmark"
"Software and Configuration Checks/AWS Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "AwsCloudTrailTrail",
"Description": "Ensure CloudTrail logs are encrypted at rest using KMS CMKs",
"Risk": "By default, the log files delivered by CloudTrail to your bucket are encrypted by Amazon server-side encryption with Amazon S3-managed encryption keys (SSE-S3). To provide a security layer that is directly manageable, you can instead use server-side encryption with AWS KMSmanaged keys (SSE-KMS) for your CloudTrail log files.",
"Description": "**AWS CloudTrail trails** are evaluated for use of **SSE-KMS** with a customer-managed KMS key to encrypt delivered log files at rest in S3. Trails without a configured KMS key are identified. *Applies to single-Region and multi-Region trails.*",
"Risk": "Absent a **customer-managed KMS key**, log protection relies only on storage permissions. Bucket misconfigurations or stolen credentials can expose audit data, aiding evasion and lateral movement. Missing key-level controls, rotation, and usage audit weaken **confidentiality** and **forensic integrity**.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/awscloudtrail/latest/userguide/encrypting-cloudtrail-log-files-with-aws-kms.html",
"https://trendmicro.com/cloudoneconformity/knowledge-base/aws/CloudTrail/cloudtrail-logs-encrypted.html",
"https://www.stream.security/rules/ensure-cloudtrail-logs-are-encrypted-at-rest",
"https://www.clouddefense.ai/compliance-rules/cis-v130/logging/cis-v130-3-7"
],
"Remediation": {
"Code": {
"CLI": "aws cloudtrail update-trail --name <trail_name> --kms-id <cloudtrail_kms_key> aws kms put-key-policy --key-id <cloudtrail_kms_key> --policy <cloudtrail_kms_key_policy>",
"NativeIaC": "https://docs.prowler.com/checks/aws/logging-policies/logging_7#fix---buildtime",
"Other": "",
"Terraform": ""
"CLI": "aws cloudtrail update-trail --name <trail_name> --kms-key-id <kms_key_arn_or_id>",
"NativeIaC": "```yaml\n# CloudFormation: enable KMS encryption for an existing/new CloudTrail\nResources:\n <example_resource_name>:\n Type: AWS::CloudTrail::Trail\n Properties:\n S3BucketName: <example_resource_name>\n KmsKeyId: <example_resource_id> # Critical: sets the KMS key to encrypt CloudTrail logs at rest\n```",
"Other": "1. In the AWS Console, go to CloudTrail > Trails\n2. Select the trail <trail_name>, click Edit\n3. Under Log file encryption, choose Use a KMS key and select <cloudtrail_kms_key>\n4. Click Save changes",
"Terraform": "```hcl\n# Enable KMS encryption for CloudTrail\nresource \"aws_cloudtrail\" \"<example_resource_name>\" {\n name = \"<example_resource_name>\"\n s3_bucket_name = \"<example_resource_name>\"\n kms_key_id = \"<example_resource_id>\" # Critical: uses this KMS key to encrypt CloudTrail logs\n}\n```"
},
"Recommendation": {
"Text": "This approach has the following advantages: You can create and manage the CMK encryption keys yourself. You can use a single CMK to encrypt and decrypt log files for multiple accounts across all regions. You have control over who can use your key for encrypting and decrypting CloudTrail log files. You can assign permissions for the key to the users. You have enhanced security.",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/encrypting-cloudtrail-log-files-with-aws-kms.html"
"Text": "Enable **SSE-KMS** on every trail using a **customer-managed KMS key**. Apply **least privilege** so only authorized roles can `Decrypt`, and enforce **separation of duties** between key admins and log readers. Rotate keys and monitor key usage to provide **defense in depth** for CloudTrail data.",
"Url": "https://hub.prowler.com/check/cloudtrail_kms_encryption_enabled"
}
},
"Categories": [
"forensics-ready",
"encryption"
],
"DependsOn": [],
@@ -1,33 +1,40 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_log_file_validation_enabled",
"CheckTitle": "Ensure CloudTrail log file validation is enabled",
"CheckTitle": "CloudTrail trail has log file validation enabled",
"CheckType": [
"Software and Configuration Checks",
"Industry and Regulatory Standards",
"CIS AWS Foundations Benchmark"
"Software and Configuration Checks/AWS Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "AwsCloudTrailTrail",
"Description": "Ensure CloudTrail log file validation is enabled",
"Risk": "Enabling log file validation will provide additional integrity checking of CloudTrail logs. ",
"Description": "**AWS CloudTrail trails** are evaluated for **log file integrity validation** being enabled (`LogFileValidationEnabled`).\n\nWhen enabled, CloudTrail generates signed digest files to verify that S3-delivered log files remain unchanged.",
"Risk": "Without validation, adversaries can alter, forge, or delete audit entries without detection, compromising log **integrity** and non-repudiation.\n\nThis impairs investigations, enables alert evasion, and obscures unauthorized changes across regions or accounts.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-log-file-validation-intro.html",
"https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-log-file-validation-enabling.html",
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/CloudTrail/cloudtrail-log-file-integrity-validation.html",
"https://deepwiki.com/acantril/learn-cantrill-io-labs/7.1-cloudtrail-log-file-integrity"
],
"Remediation": {
"Code": {
"CLI": "aws cloudtrail update-trail --name <trail_name> --enable-log-file-validation",
"NativeIaC": "https://docs.prowler.com/checks/aws/logging-policies/logging_2#cloudformation",
"Other": "",
"Terraform": "https://docs.prowler.com/checks/aws/logging-policies/logging_2#terraform"
"CLI": "aws cloudtrail update-trail --name <trail_name> --enable-log-file-validation",
"NativeIaC": "```yaml\n# CloudFormation: Enable log file validation on a CloudTrail trail\nResources:\n <example_resource_name>:\n Type: AWS::CloudTrail::Trail\n Properties:\n S3BucketName: <example_resource_name>\n EnableLogFileValidation: true # Critical: enables integrity validation for delivered log files\n```",
"Other": "1. Open the AWS Console and go to CloudTrail\n2. Click Trails and select <trail_name>\n3. Click Edit\n4. In Additional/Advanced settings, check Enable log file validation\n5. Click Save changes",
"Terraform": "```hcl\n# Enable log file validation on a CloudTrail trail\nresource \"aws_cloudtrail\" \"<example_resource_name>\" {\n name = \"<example_resource_name>\"\n s3_bucket_name = \"<example_resource_name>\"\n enable_log_file_validation = true # Critical: ensures CloudTrail writes signed digests to detect tampering\n}\n```"
},
"Recommendation": {
"Text": "Ensure LogFileValidationEnabled is set to true for each trail.",
"Url": "http://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-log-filevalidation-enabling.html"
"Text": "Enable **log file integrity validation** on all trails (`LogFileValidationEnabled=true`).\n\nEnforce **least privilege** on the logs bucket, retain and protect digest files (e.g., S3 Object Lock/MFA Delete), and monitor validation results to support **defense in depth**.",
"Url": "https://hub.prowler.com/check/cloudtrail_log_file_validation_enabled"
}
},
"Categories": [
"logging",
"forensics-ready"
],
"DependsOn": [],
@@ -1,33 +1,38 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_logs_s3_bucket_access_logging_enabled",
"CheckTitle": "Ensure S3 bucket access logging is enabled on the CloudTrail S3 bucket",
"CheckTitle": "CloudTrail trail destination S3 bucket has access logging enabled",
"CheckType": [
"Software and Configuration Checks",
"Industry and Regulatory Standards",
"CIS AWS Foundations Benchmark"
"Software and Configuration Checks/AWS Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "AwsCloudTrailTrail",
"Description": "Ensure S3 bucket access logging is enabled on the CloudTrail S3 bucket",
"Risk": "Server access logs can assist you in security and access audits, help you learn about your customer base, and understand your Amazon S3 bill.",
"Description": "CloudTrail trails deliver logs to an S3 bucket; this evaluates whether that bucket has **S3 server access logging** enabled to record requests against it.\n\n*If the destination bucket is outside the account or audit scope, a manual review is indicated.*",
"Risk": "Without access logging on the CloudTrail logs bucket, access and changes to log files lack an independent audit trail. Attackers could read, delete, or replace logs without attribution, undermining **log confidentiality** and **integrity**, and slowing **incident response**.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/securityhub/latest/userguide/cloudtrail-controls.html",
"https://docs.aws.amazon.com/AmazonS3/latest/dev/security-best-practices.html"
],
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "https://docs.prowler.com/checks/aws/logging-policies/logging_6#aws-console",
"Terraform": ""
"CLI": "aws s3api put-bucket-logging --bucket <CLOUDTRAIL_BUCKET_NAME> --bucket-logging-status \"{\\\"LoggingEnabled\\\":{\\\"TargetBucket\\\":\\\"<TARGET_BUCKET_NAME>\\\"}}\"",
"NativeIaC": "```yaml\n# CloudFormation: enable S3 access logging on the CloudTrail destination bucket\nResources:\n <example_log_bucket_name>:\n Type: AWS::S3::Bucket\n\n <example_cloudtrail_bucket>:\n Type: AWS::S3::Bucket\n Properties:\n LoggingConfiguration:\n DestinationBucketName: !Ref <example_log_bucket_name> # Critical: turns on server access logging to this destination bucket\n # This enables access logging so the check passes\n```",
"Other": "1. In the AWS Console, go to S3 and open the bucket used by your CloudTrail trail\n2. Select the Properties tab\n3. In Server access logging, click Edit\n4. Enable logging and choose a different destination S3 bucket for the logs\n5. Click Save changes",
"Terraform": "```hcl\n# Enable access logging on the CloudTrail S3 bucket\nresource \"aws_s3_bucket\" \"<example_log_bucket_name>\" {\n bucket = \"<example_log_bucket_name>\"\n}\n\nresource \"aws_s3_bucket\" \"<example_bucket_name>\" {\n bucket = \"<example_bucket_name>\"\n}\n\nresource \"aws_s3_bucket_logging\" \"<example_resource_name>\" {\n bucket = aws_s3_bucket.<example_bucket_name>.id\n target_bucket = aws_s3_bucket.<example_log_bucket_name>.id # Critical: enables server access logging to the target bucket\n}\n```"
},
"Recommendation": {
"Text": "Ensure that S3 buckets have Logging enabled. CloudTrail data events can be used in place of S3 bucket logging. If that is the case, this finding can be considered a false positive.",
"Url": "https://docs.aws.amazon.com/AmazonS3/latest/dev/security-best-practices.html"
"Text": "Enable **S3 server access logging** on the CloudTrail logs bucket and write logs to a separate, tightly controlled bucket. Apply **least privilege**, enable **versioning**, and consider **Object Lock** to deter tampering. Centralize monitoring to support defense-in-depth and rapid investigation.",
"Url": "https://hub.prowler.com/check/cloudtrail_logs_s3_bucket_access_logging_enabled"
}
},
"Categories": [
"logging",
"forensics-ready"
],
"DependsOn": [],
@@ -1,37 +1,45 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_logs_s3_bucket_is_not_publicly_accessible",
"CheckTitle": "Ensure the S3 bucket CloudTrail logs is not publicly accessible",
"CheckTitle": "CloudTrail trail S3 bucket is not publicly accessible",
"CheckType": [
"Software and Configuration Checks",
"Industry and Regulatory Standards",
"CIS AWS Foundations Benchmark"
"Software and Configuration Checks/AWS Security Best Practices/Network Reachability",
"Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"Industry and Regulatory Standards/CIS AWS Foundations Benchmark",
"Effects/Data Exposure"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceIdTemplate": "",
"Severity": "critical",
"ResourceType": "AwsCloudTrailTrail",
"Description": "Ensure the S3 bucket CloudTrail logs to is not publicly accessible",
"Risk": "Allowing public access to CloudTrail log content may aid an adversary in identifying weaknesses in the affected accounts use or configuration.",
"ResourceType": "AwsS3Bucket",
"Description": "CloudTrail log destination **S3 buckets** are inspected for ACL grants that expose data to the public `AllUsers` group.\n\nBuckets hosted in other accounts are flagged for out-of-scope review.",
"Risk": "Exposed CloudTrail logs erode **confidentiality** and **integrity**.\n\nAdversaries can harvest API activity to map accounts, roles, and keys, enabling **reconnaissance** and evasion. If write is allowed, logs can be **poisoned** or deleted, thwarting investigations and compromising incident timelines.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/CloudTrail/cloudtrail-bucket-publicly-accessible.html",
"https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-control-block-public-access.html",
"https://docs.aws.amazon.com/config/latest/developerguide/cloudtrail-s3-bucket-public-access-prohibited.html",
"https://docs.panther.com/alerts/alert-runbooks/built-in-policies/aws-cloudtrail-logs-s3-bucket-not-publicly-accessible"
],
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "https://docs.prowler.com/checks/aws/logging-policies/logging_3#aws-console",
"Terraform": ""
"CLI": "aws s3api put-bucket-acl --bucket <example_resource_name> --acl private",
"NativeIaC": "```yaml\n# CloudFormation: ensure the CloudTrail S3 bucket ACL is not public\nResources:\n CloudTrailLogsBucket:\n Type: AWS::S3::Bucket\n Properties:\n BucketName: <example_resource_name>\n AccessControl: Private # CRITICAL: sets bucket ACL to private, removing any AllUsers (public) grants\n```",
"Other": "1. Open the AWS S3 Console\n2. Select the bucket used by CloudTrail\n3. Go to Permissions > Access control list (ACL)\n4. Click Edit under Public access, remove any grants to \"Everyone (public access)\" (uncheck Read/Write)\n5. Save changes",
"Terraform": "```hcl\n# Ensure the CloudTrail S3 bucket ACL is private\nresource \"aws_s3_bucket_acl\" \"fix_cloudtrail_logs_bucket\" {\n bucket = \"<example_resource_name>\"\n acl = \"private\" # CRITICAL: removes any public (AllUsers) ACL grants\n}\n```"
},
"Recommendation": {
"Text": "Analyze Bucket policy to validate appropriate permissions. Ensure the AllUsers principal is not granted privileges. Ensure the AuthenticatedUsers principal is not granted privileges.",
"Url": "https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_elements_principal.html"
"Text": "Apply **least privilege** to the log bucket:\n- Enable S3 `Block Public Access` (account and bucket)\n- Remove `AllUsers`/`AuthenticatedUsers` ACLs; avoid wildcard principals\n- Permit only CloudTrail and constrain with `aws:SourceArn`\n\nUse a dedicated private bucket and monitor for permission changes.",
"Url": "https://hub.prowler.com/check/cloudtrail_logs_s3_bucket_is_not_publicly_accessible"
}
},
"Categories": [
"forensics-ready",
"internet-exposed"
],
"DependsOn": [],
"DependsOn": [
"s3_bucket_public_access"
],
"RelatedTo": [],
"Notes": ""
}
@@ -1,33 +1,37 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_multi_region_enabled",
"CheckTitle": "Ensure CloudTrail is enabled in all regions",
"CheckTitle": "Region has at least one CloudTrail trail logging",
"CheckType": [
"Software and Configuration Checks",
"Industry and Regulatory Standards",
"CIS AWS Foundations Benchmark"
"Software and Configuration Checks/AWS Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceIdTemplate": "",
"Severity": "high",
"ResourceType": "AwsCloudTrailTrail",
"Description": "Ensure CloudTrail is enabled in all regions",
"Risk": "AWS CloudTrail is a web service that records AWS API calls for your account and delivers log files to you. The recorded information includes the identity of the API caller, the time of the API call, the source IP address of the API caller, the request parameters, and the response elements returned by the AWS service.",
"Description": "**AWS CloudTrail** has at least one trail with `logging` enabled in every region. A **multi-region trail** or a regional trail counts for coverage in that region.",
"Risk": "Missing coverage in any region creates **visibility gaps**.\n\nAttackers can use lesser-monitored regions to run API actions, hide **unauthorized changes**, and exfiltrate data without audit trails, weakening **detective controls**, hindering **forensics**, and delaying response (confidentiality and integrity).",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrailconcepts.html#cloudtrail-concepts-management-events"
],
"Remediation": {
"Code": {
"CLI": "aws cloudtrail create-trail --name <trail_name> --bucket-name <s3_bucket_for_cloudtrail> --is-multi-region-trail aws cloudtrail update-trail --name <trail_name> --is-multi-region-trail ",
"NativeIaC": "https://docs.prowler.com/checks/aws/logging-policies/logging_1#cloudformation",
"Other": "https://docs.prowler.com/checks/aws/logging-policies/logging_1#aws-console",
"Terraform": "https://docs.prowler.com/checks/aws/logging-policies/logging_1#terraform"
"CLI": "",
"NativeIaC": "```yaml\n# CloudFormation: Create a multi-region CloudTrail and start logging\nResources:\n <example_resource_name>:\n Type: AWS::CloudTrail::Trail\n Properties:\n TrailName: <example_resource_name>\n S3BucketName: <example_resource_name>\n IsMultiRegionTrail: true # Critical: applies the trail to all regions\n IsLogging: true # Critical: ensures the trail is logging\n```",
"Other": "1. In the AWS Console, go to CloudTrail > Trails\n2. If no trail exists: Click Create trail, enter a name, choose an S3 bucket, set Apply trail to all regions = Yes, then Create (logging starts)\n3. If a trail exists: Select it, click Edit, set Apply trail to all regions = Yes, Save\n4. If Status shows Not logging, click Start logging",
"Terraform": "```hcl\n# Terraform: Multi-region CloudTrail with logging enabled\nresource \"aws_cloudtrail\" \"<example_resource_name>\" {\n name = \"<example_resource_name>\"\n s3_bucket_name = \"<example_resource_name>\"\n\n is_multi_region_trail = true # Critical: applies the trail to all regions\n enable_logging = true # Critical: ensures the trail is logging\n}\n```"
},
"Recommendation": {
"Text": "Ensure Logging is set to ON on all regions (even if they are not being used at the moment.",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrailconcepts.html#cloudtrail-concepts-management-events"
"Text": "Use a **multi-region CloudTrail trail** or per-region trails so `logging` is active in every region, including unused ones.\n\nCentralize logs, enforce **least privilege** to log stores, and add **defense-in-depth** with encryption, integrity validation, and retention. Continuously monitor trail health to catch gaps.",
"Url": "https://hub.prowler.com/check/cloudtrail_multi_region_enabled"
}
},
"Categories": [
"logging",
"forensics-ready"
],
"DependsOn": [],
@@ -1,31 +1,38 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_multi_region_enabled_logging_management_events",
"CheckTitle": "Ensure CloudTrail logging management events in All Regions",
"CheckTitle": "CloudTrail trail logs management events for read and write operations",
"CheckType": [
"CIS AWS Foundations Benchmark"
"Software and Configuration Checks/AWS Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceIdTemplate": "",
"Severity": "low",
"ResourceType": "AwsCloudTrailTrail",
"Description": "Ensure CloudTrail logging management events in All Regions",
"Risk": "AWS CloudTrail enables governance, compliance, operational auditing, and risk auditing of your AWS account. To meet FTR requirements, you must have management events enabled for all AWS accounts and in all regions and aggregate these logs into an Amazon Simple Storage Service (Amazon S3) bucket owned by a separate AWS account.",
"RelatedUrl": "https://docs.prowler.com/checks/aws/logging-policies/logging_14",
"Description": "**CloudTrail trails** record **management events** (`read` and `write`) in every AWS region and are actively logging, using a multi-region trail or per-region coverage.",
"Risk": "Without region-wide management event logging, changes to identities, networking, and audit settings can go untracked.\n\nAdversaries can operate in overlooked regions to create resources, modify permissions, or disable logging, undermining **integrity**, **confidentiality**, and incident response.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.prowler.com/checks/aws/logging-policies/logging_14#terraform",
"https://docs.prowler.com/checks/aws/logging-policies/logging_14"
],
"Remediation": {
"Code": {
"CLI": "aws cloudtrail update-trail --name <trail_name> --is-multi-region-trail",
"NativeIaC": "",
"Other": "https://docs.prowler.com/checks/aws/logging-policies/logging_14",
"Terraform": "https://docs.prowler.com/checks/aws/logging-policies/logging_14#terraform"
"CLI": "",
"NativeIaC": "```yaml\n# CloudFormation: enable multi-region and log management events (read & write)\nResources:\n <example_resource_name>:\n Type: AWS::CloudTrail::Trail\n Properties:\n S3BucketName: <example_resource_name>\n IsMultiRegionTrail: true # CRITICAL: apply the trail to all regions\n EventSelectors:\n - IncludeManagementEvents: true # CRITICAL: log management events\n ReadWriteType: All # CRITICAL: log both read and write\n```",
"Other": "1. In the AWS Console, go to CloudTrail > Trails and select your trail\n2. Click Edit\n3. Set Apply trail to all regions to Yes\n4. Under Management events, set Read/write events to All\n5. Click Save changes\n6. If Logging is Off, click Start logging",
"Terraform": "```hcl\n# Terraform: enable multi-region and log management events (read & write)\nresource \"aws_cloudtrail\" \"<example_resource_name>\" {\n name = \"<example_resource_name>\"\n s3_bucket_name = \"<example_resource_name>\"\n\n is_multi_region_trail = true # CRITICAL: apply the trail to all regions\n\n event_selector {\n include_management_events = true # CRITICAL: log management events\n read_write_type = \"All\" # CRITICAL: log both read & write\n }\n}\n```"
},
"Recommendation": {
"Text": "Enable CloudTrail logging management events in All Regions",
"Url": "https://docs.prowler.com/checks/aws/logging-policies/logging_14"
"Text": "Enable a **multi-region CloudTrail** that logs **management events** for `read` and `write` in all regions.\n\nCentralize logs in a separate, locked-down account; apply **least privilege**, encryption, retention, and integrity validation; and protect trails and storage with tamper-evident, deny-delete controls for **defense-in-depth**.",
"Url": "https://hub.prowler.com/check/cloudtrail_multi_region_enabled_logging_management_events"
}
},
"Categories": [
"logging",
"forensics-ready"
],
"DependsOn": [],
@@ -1,31 +1,41 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_s3_dataevents_read_enabled",
"CheckTitle": "Check if S3 buckets have Object-level logging for read events is enabled in CloudTrail.",
"CheckTitle": "CloudTrail trail records S3 object-level read events for all S3 buckets",
"CheckType": [
"Logging and Monitoring"
"Software and Configuration Checks/AWS Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceIdTemplate": "",
"Severity": "low",
"ResourceType": "AwsCloudTrailTrail",
"Description": "Ensure that all your AWS CloudTrail trails are configured to log Data events in order to record S3 object-level API operations, such as GetObject, DeleteObject and PutObject, for individual S3 buckets or for all current and future S3 buckets provisioned in your AWS account.",
"Risk": "If logs are not enabled, monitoring of service use and threat analysis is not possible.",
"Description": "**CloudTrail trails** log **S3 object-level read data events** for all buckets, capturing object access (for example `GetObject`) via selectors targeting `AWS::S3::Object`",
"Risk": "Without **object-level read logging**, S3 access is opaque. Attackers or insiders can exfiltrate data via `GetObject` without audit trails, eroding **confidentiality** and hindering **forensics**, anomaly detection, and incident response.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://awswala.medium.com/enable-cloudtrail-data-events-logging-for-objects-in-an-s3-bucket-33cade51ae2b",
"https://docs.aws.amazon.com/securityhub/latest/userguide/s3-controls.html#s3-23",
"https://docs.aws.amazon.com/AmazonS3/latest/userguide/enable-cloudtrail-logging-for-s3.html",
"https://www.plerion.com/cloud-knowledge-base/ensure-object-level-logging-for-read-events-enabled-for-s3-bucket"
],
"Remediation": {
"Code": {
"CLI": "aws cloudtrail put-event-selectors --trail-name <YOUR_TRAIL_NAME_HERE> --event-selectors '[{ 'ReadWriteType': 'ReadOnly', 'IncludeManagementEvents':true, 'DataResources': [{ 'Type': 'AWS::S3::Object', 'Values': ['arn:aws:s3'] }] }]'",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/s3-controls.html#s3-23",
"Terraform": ""
"CLI": "aws cloudtrail put-event-selectors --trail-name <example_resource_name> --event-selectors '[{\"ReadWriteType\":\"ReadOnly\",\"DataResources\":[{\"Type\":\"AWS::S3::Object\",\"Values\":[\"arn:aws:s3\"]}]}]'",
"NativeIaC": "```yaml\n# CloudFormation: enable S3 object-level READ data events for all buckets on a trail\nResources:\n <example_resource_name>:\n Type: AWS::CloudTrail::Trail\n Properties:\n S3BucketName: <example_resource_name>\n EventSelectors:\n - ReadWriteType: ReadOnly # CRITICAL: log read-only data events\n DataResources:\n - Type: AWS::S3::Object # CRITICAL: target S3 object-level events\n Values:\n - arn:aws:s3 # CRITICAL: applies to all S3 buckets/objects\n```",
"Other": "1. In the AWS Console, open CloudTrail and select Trails\n2. Open your trail and go to the Data events section\n3. Add data event for S3 and choose All current and future S3 buckets\n4. Select only Read events (or All if Read-only is unavailable)\n5. Save changes",
"Terraform": "```hcl\n# Terraform: enable S3 object-level READ data events for all buckets on a trail\nresource \"aws_cloudtrail\" \"<example_resource_name>\" {\n name = \"<example_resource_name>\"\n s3_bucket_name = \"<example_resource_name>\"\n\n event_selector {\n read_write_type = \"ReadOnly\" # CRITICAL: log read-only data events\n data_resource {\n type = \"AWS::S3::Object\" # CRITICAL: target S3 object-level events\n values = [\"arn:aws:s3\"] # CRITICAL: apply to all S3 buckets/objects\n }\n }\n}\n```"
},
"Recommendation": {
"Text": "Enable logs. Create an S3 lifecycle policy. Define use cases, metrics and automated responses where applicable.",
"Url": "https://docs.aws.amazon.com/AmazonS3/latest/userguide/enable-cloudtrail-logging-for-s3.html"
"Text": "Enable CloudTrail **data events** for S3 objects with `ReadOnly` (or `All`) across all current and future buckets. Use a multi-Region trail, centralize logs in an encrypted bucket with lifecycle retention, and integrate monitoring/alerts to support **defense in depth** and accountable access.",
"Url": "https://hub.prowler.com/check/cloudtrail_s3_dataevents_read_enabled"
}
},
"Categories": [],
"Categories": [
"logging",
"forensics-ready"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
@@ -1,31 +1,41 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_s3_dataevents_write_enabled",
"CheckTitle": "Check if S3 buckets have Object-level logging for write events is enabled in CloudTrail.",
"CheckTitle": "CloudTrail trail records all S3 object-level API operations for all buckets",
"CheckType": [
"Logging and Monitoring"
"Software and Configuration Checks/AWS Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceIdTemplate": "",
"Severity": "low",
"ResourceType": "AwsCloudTrailTrail",
"Description": "Ensure that all your AWS CloudTrail trails are configured to log Data events in order to record S3 object-level API operations, such as GetObject, DeleteObject and PutObject, for individual S3 buckets or for all current and future S3 buckets provisioned in your AWS account.",
"Risk": "If logs are not enabled, monitoring of service use and threat analysis is not possible.",
"Description": "**CloudTrail trails** include **S3 object-level data events** for **write (or all) operations** across **all current and future buckets**, via classic or advanced selectors. This records actions like `PutObject`, `DeleteObject`, and multipart uploads at the object level.",
"Risk": "Without object-level write logging, unauthorized or accidental changes and deletions can go unobserved, undermining data **integrity** and **availability**. Forensics lose visibility into who modified or removed objects, hindering detection of ransomware, rogue automation, or insider tampering.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/awscloudtrail/latest/userguide/logging-data-events-with-cloudtrail.html",
"https://docs.aws.amazon.com/AmazonS3/latest/userguide/enable-cloudtrail-logging-for-s3.html",
"https://www.go2share.net/article/s3-bucket-logging",
"https://docs.amazonaws.cn/en_us/AmazonS3/latest/userguide/cloudtrail-logging-s3-info.html",
"https://docs.aws.amazon.com/securityhub/latest/userguide/s3-controls.html#s3-22"
],
"Remediation": {
"Code": {
"CLI": "aws cloudtrail put-event-selectors --trail-name <YOUR_TRAIL_NAME_HERE> --event-selectors '[{ 'ReadWriteType': 'WriteOnly', 'IncludeManagementEvents':true, 'DataResources': [{ 'Type': 'AWS::S3::Object', 'Values': ['arn:aws:s3'] }] }]'",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/s3-controls.html#s3-22",
"Terraform": ""
"CLI": "aws cloudtrail put-event-selectors --trail-name <example_resource_name> --event-selectors '[{\"ReadWriteType\":\"WriteOnly\",\"DataResources\":[{\"Type\":\"AWS::S3::Object\",\"Values\":[\"arn:aws:s3\"]}]}]'",
"NativeIaC": "```yaml\nResources:\n <example_resource_name>:\n Type: AWS::CloudTrail::Trail\n Properties:\n TrailName: <example_resource_name>\n S3BucketName: <example_resource_name>\n EventSelectors:\n - ReadWriteType: WriteOnly\n DataResources:\n - Type: AWS::S3::Object\n Values:\n - arn:aws:s3 # Critical: enables S3 object-level write data events for all buckets, fixing the check\n```",
"Other": "1. In the AWS Console, open CloudTrail and go to Trails\n2. Select <your trail> and click Edit under Data events\n3. For Data event source, choose S3\n4. Select All current and future S3 buckets\n5. Check Write events (or All events)\n6. Click Save changes",
"Terraform": "```hcl\nresource \"aws_cloudtrail\" \"<example_resource_name>\" {\n name = \"<example_resource_name>\"\n s3_bucket_name = \"<example_resource_name>\"\n\n event_selector {\n read_write_type = \"WriteOnly\"\n data_resource {\n type = \"AWS::S3::Object\"\n values = [\"arn:aws:s3\"] # Critical: logs S3 object-level write events for all buckets to pass the check\n }\n }\n}\n```"
},
"Recommendation": {
"Text": "Enable logs. Create an S3 lifecycle policy. Define use cases, metrics and automated responses where applicable.",
"Url": "https://docs.aws.amazon.com/AmazonS3/latest/userguide/enable-cloudtrail-logging-for-s3.html"
"Text": "Enable **CloudTrail S3 data events** for object-level **write** (and *optionally* read) across all buckets on a multi-Region trail. Apply **least privilege** to log storage, set **lifecycle** retention, and integrate alerts. Use **advanced selectors** to target sensitive buckets/operations for cost control and **defense in depth**.",
"Url": "https://hub.prowler.com/check/cloudtrail_s3_dataevents_write_enabled"
}
},
"Categories": [
"logging",
"forensics-ready"
],
"DependsOn": [],
@@ -1,26 +1,37 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_threat_detection_enumeration",
"CheckTitle": "Ensure there are no potential enumeration threats in CloudTrail",
"CheckType": [],
"CheckTitle": "CloudTrail logs show no potential enumeration activity",
"CheckType": [
"TTPs/Discovery",
"Software and Configuration Checks/AWS Security Best Practices/Runtime Behavior Analysis",
"Unusual Behaviors/User"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceIdTemplate": "",
"Severity": "critical",
"ResourceType": "AwsCloudTrailTrail",
"Description": "This check ensures that there are no potential enumeration threats in CloudTrail.",
"Risk": "Potential enumeration threats in CloudTrail can lead to unauthorized access to resources.",
"Description": "**CloudTrail activity** is analyzed for AWS identities executing a broad mix of discovery APIs like `List*`, `Describe*`, and `Get*` within a recent time window.\n\nAn identity exceeding a configurable ratio of these actions indicates potential enumeration behavior by that principal.",
"Risk": "Concentrated discovery activity signals **reconnaissance** with valid credentials. Adversaries can map assets and policies to enable **privilege escalation**, target data stores for **exfiltration** (confidentiality), and identify services to disrupt (availability), supporting stealthy lateral movement.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://medium.com/falconforce/falconfriday-detecting-enumeration-in-aws-0xff25-orangecon-25-edition-4aee83651088",
"https://www.elastic.co/guide/en/security/8.19/aws-discovery-api-calls-via-cli-from-a-single-resource.html",
"https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-concepts.html#cloudtrail-concepts-logging-data-events",
"https://aws.plainenglish.io/aws-cloudtrail-event-cheatsheet-a-detection-engineers-guide-to-critical-api-calls-part-1-04fb1588556f",
"https://support.icompaas.com/support/solutions/articles/62000233455-ensure-there-are-no-potential-enumeration-threats-in-cloudtrail-"
],
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "",
"Terraform": ""
"CLI": "aws iam update-access-key --user-name <USER_NAME> --access-key-id <ACCESS_KEY_ID> --status Inactive",
"NativeIaC": "```yaml\n# CloudFormation: deny common enumeration APIs for a specific IAM user\nResources:\n DenyEnumerationPolicy:\n Type: AWS::IAM::Policy\n Properties:\n PolicyName: deny-enumeration\n PolicyDocument:\n Version: \"2012-10-17\"\n Statement:\n - Effect: Deny # CRITICAL: blocks typical enumeration calls\n Action:\n - ec2:Describe* # CRITICAL: deny EC2 describe APIs\n - iam:List* # CRITICAL: deny IAM list APIs\n - s3:List* # CRITICAL: deny S3 list APIs\n - s3:Get* # CRITICAL: deny S3 get APIs (e.g., GetBucketAcl)\n Resource: \"*\"\n Users:\n - \"<example_resource_name>\" # CRITICAL: target the enumerating user\n```",
"Other": "1. In AWS Console, go to IAM > Users and open the user shown in the alert (ARN in the finding)\n2. Select the Security credentials tab\n3. For each active Access key, click Deactivate to set status to Inactive\n4. If the activity came from an EC2 instance role: go to EC2 > Instances > select the instance > Security > IAM role > Detach IAM role\n5. Re-run the check to confirm no new enumeration events occur",
"Terraform": "```hcl\n# Deny common enumeration APIs for a specific IAM user\nresource \"aws_iam_user_policy\" \"<example_resource_name>\" {\n name = \"deny-enumeration\"\n user = \"<example_user_name>\"\n\n policy = jsonencode({\n Version = \"2012-10-17\",\n Statement = [{\n Effect = \"Deny\", # CRITICAL: blocks typical enumeration calls\n Action = [\n \"ec2:Describe*\", # CRITICAL\n \"iam:List*\", # CRITICAL\n \"s3:List*\", # CRITICAL\n \"s3:Get*\" # CRITICAL\n ],\n Resource = \"*\"\n }]\n })\n}\n```"
},
"Recommendation": {
"Text": "To remediate this issue, ensure that there are no potential enumeration threats in CloudTrail.",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-concepts.html#cloudtrail-concepts-logging-data-events"
"Text": "Apply **least privilege** to limit `List*`/`Describe*`/`Get*` to necessary resources and roles; use **separation of duties**.\n- Enforce MFA and short-lived sessions\n- Use **SCPs** to curb unnecessary discovery\n- Baseline expected reads and alert on spikes as **defense in depth**",
"Url": "https://hub.prowler.com/check/cloudtrail_threat_detection_enumeration"
}
},
"Categories": [
@@ -1,30 +1,43 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_threat_detection_llm_jacking",
"CheckTitle": "Ensure there are no potential LLM Jacking threats in CloudTrail.",
"CheckType": [],
"CheckTitle": "No potential LLM jacking activity detected in CloudTrail",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices/Runtime Behavior Analysis",
"TTPs/Discovery",
"TTPs/Execution",
"TTPs/Defense Evasion",
"Effects/Resource Consumption",
"Unusual Behaviors/User"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceIdTemplate": "",
"Severity": "critical",
"ResourceType": "AwsCloudTrailTrail",
"Description": "This check ensures that there are no potential LLM Jacking threats in CloudTrail. LLM Jacking attacks involve unauthorized access to cloud-hosted large language model (LLM) services, such as AWS Bedrock, by exploiting exposed credentials or vulnerabilities. These attacks can lead to resource hijacking, unauthorized model invocations, and high operational costs for the victim organization.",
"Risk": "Potential LLM Jacking threats in CloudTrail can lead to unauthorized access to sensitive AI models, stolen credentials, resource hijacking, or running costly workloads. Attackers may use reverse proxies or malicious credentials to sell access to models, exfiltrate sensitive data, or disrupt business operations.",
"RelatedUrl": "https://sysdig.com/blog/llmjacking-stolen-cloud-credentials-used-in-new-ai-attack/",
"Description": "**CloudTrail Bedrock activity** is analyzed per identity for a high diversity of LLM-related API calls (e.g., `InvokeModel`, `InvokeModelWithResponseStream`, `GetFoundationModelAvailability`). *If an identity's share of these actions exceeds a configured threshold over a recent window*, it is surfaced as potential **LLM-jacking** behavior.",
"Risk": "Such patterns suggest **stolen credential** abuse to drive LLM usage.\n- Availability: cost exhaustion and service disruption\n- Confidentiality: leakage of prompts/outputs and model settings\n- Integrity: misuse of permissions for broader access\nAttackers may use reverse proxies to resell access and obfuscate sources.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://furkangungor.medium.com/automating-anomaly-detection-in-aws-cloudtrail-logs-4efb2ad9b958",
"https://help.sumologic.com/docs/integrations/amazon-aws/amazon-bedrock/",
"https://dzone.com/articles/ai-powered-aws-cloudtrail-analysis-strands-agent-bedrock"
],
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "",
"Terraform": ""
"NativeIaC": "```yaml\n# CloudFormation SCP that blocks all Amazon Bedrock actions to stop LLM jacking\nResources:\n <example_resource_name>:\n Type: AWS::Organizations::Policy\n Properties:\n Name: <example_resource_name>\n Type: SERVICE_CONTROL_POLICY\n TargetIds:\n - \"<example_resource_id>\" # CRITICAL: Attach SCP to the root/OU/account to enforce the deny\n Content:\n Version: \"2012-10-17\"\n Statement:\n - Sid: DenyBedrock\n Effect: Deny\n Action: \"bedrock:*\" # CRITICAL: Denies all Bedrock APIs (Invoke/Converse/list/entitlements/etc.)\n Resource: \"*\" # CRITICAL: Apply deny to all resources\n```",
"Other": "1. In the AWS Console, go to Organizations > Policies > Service control policies\n2. Click Create policy\n3. Set Name to <example_resource_name>\n4. In Policy, paste a deny for Bedrock:\n {\n \"Version\": \"2012-10-17\",\n \"Statement\": [{\"Sid\":\"DenyBedrock\",\"Effect\":\"Deny\",\"Action\":\"bedrock:*\",\"Resource\":\"*\"}]\n }\n5. Save the policy and click Attach\n6. Select the target (Root, OU, or the affected account ID <example_resource_id>) and attach the policy\n7. Wait for propagation; no further Bedrock calls will occur, and the finding will clear after the detection window elapses",
"Terraform": "```hcl\n# SCP denying all Amazon Bedrock actions; attach it to the root/OU/account to halt LLM jacking\nresource \"aws_organizations_policy\" \"main\" {\n name = \"<example_resource_name>\"\n type = \"SERVICE_CONTROL_POLICY\"\n\n content = jsonencode({\n Version = \"2012-10-17\"\n Statement = [{\n Sid = \"DenyBedrock\"\n Effect = \"Deny\"\n Action = \"bedrock:*\" // CRITICAL: blocks all Bedrock APIs (prevents further suspicious activity)\n Resource = \"*\" // CRITICAL: deny across all resources\n }]\n })\n}\n\nresource \"aws_organizations_policy_attachment\" \"attach\" {\n policy_id = aws_organizations_policy.main.id\n target_id = \"<example_resource_id>\" // CRITICAL: attach to the affected account/OU/root to enforce the deny\n}\n```"
},
"Recommendation": {
"Text": "To remediate this issue, enable detailed CloudTrail logging for Bedrock API calls, monitor suspicious activities, and secure sensitive credentials. Enable logging of model invocation inputs and outputs, and restrict access using IAM policies. Review CloudTrail logs regularly for suspicious `InvokeModel` actions or unauthorized access to models.",
"Url": "https://permiso.io/blog/exploiting-hosted-models"
"Text": "Apply **least privilege** to Bedrock; restrict `Invoke*` only to required roles and deny broadly via **SCPs** where unused. Enforce **MFA** and short-lived creds; rotate/remove exposed keys. Enable **model invocation logging** and budgets/quotas. Continuously monitor for Bedrock enumeration plus invoke bursts. Use **defense in depth** across identities and networks.",
"Url": "https://hub.prowler.com/check/cloudtrail_threat_detection_llm_jacking"
}
},
"Categories": [
"threat-detection"
"threat-detection",
"gen-ai"
],
"DependsOn": [],
"RelatedTo": [],
@@ -1,26 +1,34 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_threat_detection_privilege_escalation",
"CheckTitle": "Ensure there are no potential privilege escalation threats in CloudTrail",
"CheckType": [],
"CheckTitle": "No potential privilege escalation activity detected in CloudTrail",
"CheckType": [
"TTPs/Privilege Escalation",
"Software and Configuration Checks/AWS Security Best Practices/Runtime Behavior Analysis"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"ResourceIdTemplate": "",
"Severity": "critical",
"ResourceType": "AwsCloudTrailTrail",
"Description": "This check ensures that there are no potential privilege escalation threats in CloudTrail.",
"Risk": "Potential privilege escalation threats in CloudTrail can lead to unauthorized access to resources.",
"Description": "**CloudTrail** activity is analyzed for **identities** executing high-risk actions linked to **privilege escalation** (e.g., `Attach*Policy`, `PassRole`, `AssumeRole`, `CreateAccessKey`). Identities exceeding a configurable share of such events within a *recent time window* are highlighted for investigation.",
"Risk": "Escalation patterns can grant elevated entitlements, enabling:\n- Confidentiality loss via unauthorized data/secret access\n- Integrity compromise by changing IAM policies/roles\n- Availability impact by tampering with logging or resources\nThis also facilitates lateral movement and persistence.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://rhinosecuritylabs.com/aws/aws-privilege-escalation-methods-mitigation/",
"https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-concepts.html#cloudtrail-concepts-logging-data-events",
"https://signmycode.com/blog/what-is-privilege-escalation-in-aws-recommendations-to-prevent-it"
],
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "",
"Terraform": ""
"NativeIaC": "```yaml\n# CloudFormation: Organization SCP to block common IAM privilege-escalation actions\nResources:\n <example_resource_name>:\n Type: AWS::Organizations::Policy\n Properties:\n Name: deny-iam-privesc\n Type: SERVICE_CONTROL_POLICY\n # Critical: This SCP denies risky IAM actions often used for privilege escalation\n # Explanation: Denying these actions organization-wide prevents future privesc activity detected by CloudTrail\n Content: |\n {\n \"Version\": \"2012-10-17\",\n \"Statement\": [\n {\n \"Effect\": \"Deny\",\n \"Action\": [\n \"iam:AttachUserPolicy\",\n \"iam:AttachRolePolicy\",\n \"iam:PutUserPolicy\",\n \"iam:PutRolePolicy\",\n \"iam:PutGroupPolicy\",\n \"iam:AddUserToGroup\",\n \"iam:CreateAccessKey\",\n \"iam:CreateLoginProfile\",\n \"iam:UpdateLoginProfile\",\n \"iam:UpdateAssumeRolePolicy\",\n \"iam:CreatePolicyVersion\",\n \"iam:SetDefaultPolicyVersion\",\n \"iam:PassRole\"\n ],\n \"Resource\": \"*\"\n }\n ]\n }\n <example_resource_name>Attachment:\n Type: AWS::Organizations::PolicyAttachment\n Properties:\n # Critical: Attach the SCP so it is enforced\n PolicyId: !Ref <example_resource_name>\n TargetId: <example_resource_id> # OU, Root, or Account ID\n```",
"Other": "1. In AWS Console, open IAM and identify the AWS identity shown in the Prowler finding (user or role ARN)\n2. If it is an IAM user:\n - Go to Security credentials > Access keys, set active keys to Inactive\n - Go to Permissions, detach all managed policies and delete inline policies\n - Go to Groups, remove the user from privileged groups\n - Go to Console password, delete the login profile\n3. If it is an IAM role:\n - Go to Permissions, detach managed policies and delete inline policies\n - Go to Trust relationships, remove principals that should not assume the role and save\n4. Re-run the scan after the detection window elapses to confirm no further privilege-escalation activity is detected",
"Terraform": "```hcl\n# SCP to block common IAM privilege-escalation actions\nresource \"aws_organizations_policy\" \"<example_resource_name>\" {\n name = \"deny-iam-privesc\"\n type = \"SERVICE_CONTROL_POLICY\"\n\n # Critical: Deny risky IAM actions to prevent future privesc\n # Explanation: Blocks escalation techniques commonly seen in CloudTrail\n content = jsonencode({\n Version = \"2012-10-17\",\n Statement = [\n {\n Effect = \"Deny\",\n Action = [\n \"iam:AttachUserPolicy\",\n \"iam:AttachRolePolicy\",\n \"iam:PutUserPolicy\",\n \"iam:PutRolePolicy\",\n \"iam:PutGroupPolicy\",\n \"iam:AddUserToGroup\",\n \"iam:CreateAccessKey\",\n \"iam:CreateLoginProfile\",\n \"iam:UpdateLoginProfile\",\n \"iam:UpdateAssumeRolePolicy\",\n \"iam:CreatePolicyVersion\",\n \"iam:SetDefaultPolicyVersion\",\n \"iam:PassRole\"\n ],\n Resource = \"*\"\n }\n ]\n })\n}\n\nresource \"aws_organizations_policy_attachment\" \"<example_resource_name>_attach\" {\n # Critical: Attach the SCP so it takes effect\n policy_id = aws_organizations_policy.<example_resource_name>.id\n target_id = \"<example_resource_id>\" # OU, Root, or Account ID\n}\n```"
},
"Recommendation": {
"Text": "To remediate this issue, ensure that there are no potential privilege escalation threats in CloudTrail.",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-concepts.html#cloudtrail-concepts-logging-data-events"
"Text": "Apply **least privilege** and **defense in depth**:\n- Restrict `PassRole`, `Attach*Policy`, `UpdateAssumeRolePolicy`, `CreateAccessKey`\n- Enforce permission boundaries and SCPs\n- Require MFA and change approvals\n- Use multi-Region CloudTrail, immutable retention, and alerting on anomalous sequences",
"Url": "https://hub.prowler.com/check/cloudtrail_threat_detection_privilege_escalation"
}
},
"Categories": [
@@ -1,26 +1,35 @@
{
"Provider": "aws",
"CheckID": "directoryservice_directory_log_forwarding_enabled",
"CheckTitle": "Directory Service monitoring with CloudWatch logs.",
"CheckType": [],
"CheckTitle": "Directory Service directory has log forwarding to CloudWatch Logs enabled",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices"
],
"ServiceName": "directoryservice",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:codeartifact:region:account-id:directory/directory-id",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "Other",
"Description": "Directory Service monitoring with CloudWatch logs.",
"Risk": "As a best practice, monitor your organization to ensure that changes are logged. This helps you to ensure that any unexpected change can be investigated and unwanted changes can be rolled back.",
"RelatedUrl": "https://docs.aws.amazon.com/directoryservice/latest/admin-guide/incident-response.html",
"Description": "**AWS Directory Service directories** are configured to forward domain controller security event logs to **CloudWatch Logs** using log subscriptions.\n\nEvaluation identifies directories with or without this forwarding in place.",
"Risk": "Without forwarding, visibility into AD security events is lost, delaying detection of suspicious authentications, policy changes, or privilege grants. Attackers can escalate and persist unnoticed, risking unauthorized access (confidentiality) and identity/policy manipulation (integrity), while hampering forensics and response.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.amazonaws.cn/en_us/directoryservice/latest/admin-guide/ms_ad_enable_log_forwarding.html",
"https://docs.aws.amazon.com/directoryservice/latest/admin-guide/incident-response.html",
"https://docs.aws.amazon.com/directoryservice/latest/admin-guide/ms_ad_enable_log_forwarding.html",
"https://support.icompaas.com/support/solutions/articles/62000233528--ensure-directory-service-monitoring-with-cloudwatch-logs"
],
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "",
"Terraform": ""
"NativeIaC": "```yaml\n# CloudFormation: enable Directory Service log forwarding to CloudWatch Logs\nResources:\n LogGroup:\n Type: AWS::Logs::LogGroup\n Properties:\n LogGroupName: /aws/directoryservice/<example_resource_id>\n\n LogsResourcePolicy:\n Type: AWS::Logs::ResourcePolicy\n Properties:\n PolicyName: DSLogSubscription\n PolicyDocument: |\n {\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"Service\":\"ds.amazonaws.com\"},\"Action\":[\"logs:CreateLogStream\",\"logs:PutLogEvents\",\"logs:DescribeLogStreams\"],\"Resource\":\"arn:aws:logs:*:*:log-group:/aws/directoryservice/*\"}]}\n\n DirectoryLogSubscription:\n Type: AWS::DirectoryService::LogSubscription\n Properties:\n DirectoryId: <example_resource_id> # CRITICAL: target Directory Service ID to enable log forwarding\n LogGroupName: /aws/directoryservice/<example_resource_id> # CRITICAL: CloudWatch Logs destination\n```",
"Other": "1. In the AWS Console, go to Directory Service > Directories and open your directory\n2. On the Directory details page, select the Networking & security tab\n3. In Log forwarding, click Enable\n4. Choose Create a new CloudWatch log group (or select an existing one)\n5. Click Enable to start forwarding logs",
"Terraform": "```hcl\n# Enable Directory Service log forwarding to CloudWatch Logs\nresource \"aws_cloudwatch_log_group\" \"ds\" {\n name = \"/aws/directoryservice/<example_resource_id>\"\n}\n\nresource \"aws_cloudwatch_log_resource_policy\" \"ds\" {\n policy_name = \"DSLogSubscription\"\n policy_document = jsonencode({\n Version = \"2012-10-17\",\n Statement = [{\n Effect = \"Allow\",\n Principal = { Service = \"ds.amazonaws.com\" },\n Action = [\"logs:CreateLogStream\", \"logs:PutLogEvents\", \"logs:DescribeLogStreams\"],\n Resource = \"arn:aws:logs:*:*:log-group:/aws/directoryservice/*\"\n }]\n })\n}\n\nresource \"aws_directory_service_log_subscription\" \"enable\" {\n directory_id = \"<example_resource_id>\" # CRITICAL: enables log forwarding for this directory\n log_group_name = aws_cloudwatch_log_group.ds.name # CRITICAL: CloudWatch Logs destination\n}\n```"
},
"Recommendation": {
"Text": "It is recommended that that the export of logs is enabled.",
"Url": "https://docs.aws.amazon.com/directoryservice/latest/admin-guide/incident-response.html"
"Text": "Enable and maintain **log forwarding** to CloudWatch Logs.\n\n- Centralize logs in a protected group with strict access and retention\n- Apply least privilege for delivery roles and readers; prevent tampering (immutability)\n- Alert on high-risk events and aggregate across Regions/accounts for defense in depth",
"Url": "https://hub.prowler.com/check/directoryservice_directory_log_forwarding_enabled"
}
},
"Categories": [
@@ -1,29 +1,37 @@
{
"Provider": "aws",
"CheckID": "directoryservice_directory_monitor_notifications",
"CheckTitle": "Directory Service has SNS Notifications enabled.",
"CheckType": [],
"CheckTitle": "Directory Service directory has SNS notifications enabled",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"ServiceName": "directoryservice",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:codeartifact:region:account-id:directory/directory-id",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "Other",
"Description": "Directory Service has SNS Notifications enabled.",
"Risk": "As a best practice, monitor status of Directory Service. This helps to avoid late actions to fix Directory Service issues.",
"RelatedUrl": "https://docs.aws.amazon.com/directoryservice/latest/admin-guide/ms_ad_enable_notifications.html",
"Description": "**AWS Directory Service** directories are associated with **Amazon SNS topics** to send status change notifications (e.g., `Active` `Impaired`).\n\nThe evaluation looks for directories that have SNS event topics configured for monitoring alerts.",
"Risk": "Missing directory notifications reduces visibility into health changes, causing delayed response to `Impaired` states. This threatens availability of authentication, Kerberos/LDAP lookups, and domain joins; increases MTTR; and can enable silent replication or trust failures that impact integrity across dependent workloads.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/directoryservice/latest/admin-guide/ms_ad_enable_notifications.html",
"https://support.icompaas.com/support/solutions/articles/62000233533-ensure-directory-service-has-sns-notifications-enabled"
],
"Remediation": {
"Code": {
"CLI": "",
"CLI": "aws ds register-event-topic --directory-id <DIRECTORY_ID> --topic-name <SNS_TOPIC_NAME>",
"NativeIaC": "",
"Other": "",
"Other": "1. Open AWS Console > Directory Service > Directories and select your directory\n2. Go to the Maintenance or Monitoring/Notifications section\n3. Click Actions > Create notification (or Set up notifications)\n4. Select an existing SNS topic (or create one) and Save",
"Terraform": ""
},
"Recommendation": {
"Text": "It is recommended set up SNS messaging to send email or text messages when the status of your directory changes.",
"Url": "https://docs.aws.amazon.com/directoryservice/latest/admin-guide/ms_ad_enable_notifications.html"
"Text": "Configure **AWS Directory Service** to publish directory status changes to an **SNS topic**, and subscribe your operations channels for timely alerts.\n\nApply **least privilege** on topic permissions, integrate alerts with incident response, and use **defense in depth** by pairing notifications with logs and dashboards.",
"Url": "https://hub.prowler.com/check/directoryservice_directory_monitor_notifications"
}
},
"Categories": [],
"Categories": [
"logging"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
@@ -1,29 +1,38 @@
{
"Provider": "aws",
"CheckID": "directoryservice_directory_snapshots_limit",
"CheckTitle": "Directory Service Manual Snapshots limit reached.",
"CheckType": [],
"CheckTitle": "Directory Service directory has adequate remaining manual snapshot quota",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Resource Consumption"
],
"ServiceName": "directoryservice",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:codeartifact:region:account-id:directory/directory-id",
"ResourceIdTemplate": "",
"Severity": "low",
"ResourceType": "Other",
"Description": "Directory Service Manual Snapshots limit reached.",
"Risk": "A limit reached can bring unwanted results. The maximum number of manual snapshots is a hard limit.",
"RelatedUrl": "https://docs.aws.amazon.com/general/latest/gr/ds_region.html",
"Description": "**AWS Directory Service** directories with **manual snapshot capacity** fully consumed or nearly exhausted, based on current snapshot count relative to the directory's maximum allowed.",
"Risk": "With no remaining snapshot capacity, you cannot create new recovery points:\n- Reduced availability during outages or ransomware\n- Higher RPO from failed scheduled backups\n- Greater change risk (schema/OS updates) without a safe rollback",
"RelatedUrl": "",
"AdditionalURLs": [
"https://support.icompaas.com/support/solutions/articles/62000233531--ensure-directory-service-manual-snapshots-limit-reached",
"https://docs.aws.amazon.com/directoryservice/latest/admin-guide/ms_ad_limits.html"
],
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "",
"Other": "1. In the AWS Console, go to Directory Service > Directories and open <example_resource_id>\n2. Click Snapshots\n3. Select older snapshots with Type = Manual and click Delete snapshot, confirm\n4. Repeat until the number of manual snapshots is less than (manual limit - 2). For the default limit of 5, keep at most 2 manual snapshots\n5. Verify Remaining manual snapshots > 2 on the Snapshots page",
"Terraform": ""
},
"Recommendation": {
"Text": "Monitor manual snapshots limit to ensure capacity when you need it.",
"Url": "https://docs.aws.amazon.com/general/latest/gr/ds_region.html"
"Text": "Adopt a **snapshot lifecycle policy**: rotate/expire old manual snapshots after verifying restores, and alert on low headroom. Prefer **automated backups** for cadence and retention. Enforce **least privilege** for snapshot creation. Design operations within the *hard per-directory cap* to prevent capacity exhaustion.",
"Url": "https://hub.prowler.com/check/directoryservice_directory_snapshots_limit"
}
},
"Categories": [],
"Categories": [
"resilience"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
@@ -1,29 +1,38 @@
{
"Provider": "aws",
"CheckID": "directoryservice_ldap_certificate_expiration",
"CheckTitle": "Directory Service LDAP Certificates expiration.",
"CheckType": [],
"CheckTitle": "Directory Service LDAP certificate expires in more than 90 days",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices"
],
"ServiceName": "directoryservice",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:codeartifact:region:account-id:directory/directory-id",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "Other",
"Description": "Directory Service Manual Snapshots limit reached.",
"Risk": "Expired certificates can impact service availability.",
"RelatedUrl": "https://docs.aws.amazon.com/directoryservice/latest/admin-guide/ms_ad_ldap.html",
"Description": "**AWS Directory Service** Secure LDAP (LDAPS) certificates are assessed for upcoming expiration by comparing each directory's certificate expiration to the current time and identifying those with `<= 90` days remaining.",
"Risk": "Expired LDAPS certificates cause TLS handshakes to fail, blocking directory binds and queries and disrupting authentication and app integrations (availability). If clients fall back to plain LDAP, credentials and directory data can be intercepted or altered (confidentiality and integrity).",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/directoryservice/latest/admin-guide/ms_ad_ldap.html",
"https://support.icompaas.com/support/solutions/articles/62000229587-ensure-to-monitor-directory-service-ldap-certificates-expiration"
],
"Remediation": {
"Code": {
"CLI": "",
"CLI": "aws ds register-certificate --directory-id <DIRECTORY_ID> --certificate-data file://certificate.pem",
"NativeIaC": "",
"Other": "",
"Other": "1. In the AWS Console, open Directory Service and select your AWS Managed Microsoft AD (<example_resource_id>)\n2. Go to Networking & security > Secure LDAP\n3. Click Edit (Manage certificate)\n4. Choose Replace certificate (or Upload certificate)\n5. Upload a new LDAPS server certificate with private key from a trusted CA (valid for >90 days); enter the password if using a .pfx\n6. Save and wait until the certificate status is Active",
"Terraform": ""
},
"Recommendation": {
"Text": "Monitor certificate expiration and take automated action to alarm responsible team for taking care of the replacement or remove.",
"Url": "https://docs.aws.amazon.com/directoryservice/latest/admin-guide/ms_ad_ldap.html"
"Text": "Adopt certificate lifecycle management: inventory LDAPS certificates, alert well before expiry, and automate renewal with staged rollout and overlap. Enforce TLS-only LDAP and disable plaintext fallback. Apply **least privilege** and **separation of duties** to certificate issuance and deployment.",
"Url": "https://hub.prowler.com/check/directoryservice_ldap_certificate_expiration"
}
},
"Categories": [],
"Categories": [
"encryption"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
@@ -1,29 +1,40 @@
{
"Provider": "aws",
"CheckID": "directoryservice_radius_server_security_protocol",
"CheckTitle": "Ensure Radius server in DS is using the recommended security protocol.",
"CheckType": [],
"CheckTitle": "Directory Service directory RADIUS server uses MS-CHAPv2",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"TTPs/Credential Access"
],
"ServiceName": "directoryservice",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:codeartifact:region:account-id:directory/directory-id",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "Other",
"Description": "Ensure Radius server in DS is using the recommended security protocol.",
"Risk": "As a best practice, you might need to configure the authentication protocol between the Microsoft AD DCs and the RADIUS/MFA server. Supported protocols are PAP, CHAP MS-CHAPv1, and MS-CHAPv2. MS-CHAPv2 is recommended because it provides the strongest security of the three options.",
"RelatedUrl": "https://aws.amazon.com/blogs/security/how-to-enable-multi-factor-authentication-for-amazon-workspaces-and-amazon-quicksight-by-using-microsoft-ad-and-on-premises-credentials/",
"Description": "AWS Directory Service RADIUS configuration uses the **authentication protocol** defined for MFA integration. The finding evaluates whether directories with RADIUS enabled are set to `MS-CHAPv2` instead of weaker options like `PAP`, `CHAP`, or `MS-CHAPv1`.",
"Risk": "Using `PAP`, `CHAP`, or `MS-CHAPv1` weakens RADIUS-based MFA.\n\n`PAP` exposes cleartext credentials, while legacy CHAP variants permit offline cracking and replay, enabling unauthorized access to AD-integrated services and lateral movement, degrading confidentiality and integrity.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.secureauth.com/0903/en/ms-chapv2-and-radius--sp-initiated--for-cisco-and-netscaler-configuration-guide.html",
"https://docs.aws.amazon.com/directoryservice/latest/admin-guide/ms_ad_mfa.html",
"https://www.freeradius.org/documentation/freeradius-server/4.0~alpha1/raddb/mods-available/mschap.html"
],
"Remediation": {
"Code": {
"CLI": "",
"CLI": "aws ds update-radius --directory-id <example_resource_id> --radius-settings AuthenticationProtocol=MS-CHAPv2",
"NativeIaC": "",
"Other": "",
"Terraform": ""
"Other": "1. In the AWS Console, open Directory Service and select your directory\n2. Open the Networking & security tab (Multi-factor authentication section)\n3. Click Actions > Edit (or Enable)\n4. Set Protocol to MS-CHAPv2\n5. Click Save (or Enable) to apply",
"Terraform": "```hcl\nresource \"aws_directory_service_radius_settings\" \"<example_resource_name>\" {\n directory_id = \"<example_resource_id>\"\n radius_servers = [\"<RADIUS_SERVER_IP>\"]\n shared_secret = \"<SHARED_SECRET>\"\n\n authentication_protocol = \"MS-CHAPv2\" # Critical: sets the RADIUS auth protocol to MS-CHAPv2 to pass the check\n}\n```"
},
"Recommendation": {
"Text": "MS-CHAPv2 provides the strongest security of the options supported, and is therefore recommended.",
"Url": "https://aws.amazon.com/blogs/security/how-to-enable-multi-factor-authentication-for-amazon-workspaces-and-amazon-quicksight-by-using-microsoft-ad-and-on-premises-credentials/"
"Text": "Standardize on `MS-CHAPv2` for RADIUS authentication to MFA providers. Disable `PAP`, `CHAP`, and `MS-CHAPv1` to prevent downgrades. Apply least privilege and defense in depth: use strong shared secrets, restrict network access to RADIUS endpoints, and monitor authentication logs for anomalies.",
"Url": "https://hub.prowler.com/check/directoryservice_radius_server_security_protocol"
}
},
"Categories": [],
"Categories": [
"identity-access"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
@@ -1,29 +1,40 @@
{
"Provider": "aws",
"CheckID": "directoryservice_supported_mfa_radius_enabled",
"CheckTitle": "Ensure Multi-Factor Authentication (MFA) using Radius Server is enabled in DS.",
"CheckType": [],
"CheckTitle": "AWS Directory Service directory has RADIUS-based MFA enabled",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices",
"Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"TTPs/Initial Access",
"TTPs/Credential Access"
],
"ServiceName": "directoryservice",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:codeartifact:region:account-id:directory/directory-id",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "Other",
"Description": "Ensure Multi-Factor Authentication (MFA) using Radius Server is enabled in DS.",
"Risk": "Multi-Factor Authentication (MFA) adds an extra layer of authentication assurance beyond traditional username and password.",
"RelatedUrl": "https://docs.aws.amazon.com/directoryservice/latest/admin-guide/ms_ad_mfa.html",
"Description": "**AWS Directory Service directories** are evaluated for **RADIUS-backed multi-factor authentication**, confirming that MFA is configured and the RADIUS integration is active.",
"Risk": "Without **RADIUS MFA**, directory-based sign-ins to AWS-integrated services rely on a single factor, enabling credential stuffing and phishing to succeed. Compromised passwords can grant unauthorized access, drive data exfiltration, and enable privilege escalation, undermining confidentiality and integrity.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/directoryservice/latest/admin-guide/ms_ad_mfa.html",
"https://support.icompaas.com/support/solutions/articles/62000233537-ensure-multi-factor-authentication-mfa-using-a-radius-server-is-enabled-in-directory-service"
],
"Remediation": {
"Code": {
"CLI": "",
"CLI": "aws ds enable-radius --directory-id <example_resource_id> --radius-settings '{\"RadiusServers\":[\"<RADIUS_IP_OR_DNS>\"],\"SharedSecret\":\"<SHARED_SECRET>\"}'",
"NativeIaC": "",
"Other": "",
"Terraform": ""
"Other": "1. Sign in to the AWS Console and open Directory Service\n2. Select your directory and open it\n3. Go to the Networking & security tab\n4. In Multi-factor authentication, click Actions > Enable\n5. Enter RADIUS server IP(s) and the Shared secret, then click Enable\n6. Wait until the RADIUS status shows Completed",
"Terraform": "```hcl\nresource \"aws_directory_service_radius_settings\" \"<example_resource_name>\" {\n directory_id = \"<example_resource_id>\" # Directory to enable RADIUS MFA on\n radius_servers = [\"<RADIUS_IP_OR_DNS>\"] # Critical: RADIUS server endpoint(s)\n shared_secret = \"<SHARED_SECRET>\" # Critical: Shared secret for RADIUS\n}\n```"
},
"Recommendation": {
"Text": "Enabling MFA provides increased security to a user name and password as it requires the user to possess a solution that displays a time-sensitive authentication code.",
"Url": "https://docs.aws.amazon.com/directoryservice/latest/admin-guide/ms_ad_mfa.html"
"Text": "Enable and enforce **RADIUS-based MFA** for all Directory Service authentications. Apply **least privilege**, harden and monitor the RADIUS infrastructure, rotate shared secrets, and restrict network access (e.g., `UDP/1812`). Use **defense in depth** with segmentation and session controls to limit lateral movement and reduce blast radius.",
"Url": "https://hub.prowler.com/check/directoryservice_supported_mfa_radius_enabled"
}
},
"Categories": [],
"Categories": [
"identity-access"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
@@ -1,28 +1,34 @@
{
"Provider": "aws",
"CheckID": "dlm_ebs_snapshot_lifecycle_policy_exists",
"CheckTitle": "Ensure EBS Snapshot lifecycle policies are defined.",
"CheckTitle": "Region with EBS snapshots has at least one EBS snapshot lifecycle policy defined",
"CheckType": [
"Data Protection"
"Software and Configuration Checks/AWS Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices"
],
"ServiceName": "dlm",
"SubServiceName": "ebs",
"ResourceIdTemplate": "arn:aws:iam::account-id:resource-id",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "Other",
"Description": "Ensure EBS Snapshot lifecycle policies are defined.",
"Risk": "With AWS DLM service, you can manage the lifecycle of your EBS volume snapshots. By automating the EBS volume backup management using lifecycle policies, you can protect your EBS data by enforcing a regular backup schedule, retain backups as required by auditors or internal compliance.",
"RelatedUrl": "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/snapshot-lifecycle.html#dlm-elements",
"Description": "**EBS snapshots** are expected to be governed by **Data Lifecycle Manager (DLM) policies** in each Region where snapshots exist.\n\nThe evaluation looks for lifecycle policies that automate snapshot creation, retention, and cleanup for those snapshots.",
"Risk": "Without **automated lifecycle policies**, backups become inconsistent and error-prone, reducing availability and weakening recovery objectives. Missing retention rules cause premature deletion or snapshot sprawl, increasing cost and exposing stale data. Lack of cross-Region/account copies limits resilience to regional outages and malicious deletion.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/DLM/ebs-snapshot-automation.html",
"https://repost.aws/articles/ARmYgZmA8MRQi89pWd9D7eFw/how-to-create-a-automate-backup-aws-data-lifecycle-management-using-snapshots",
"https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/snapshot-lifecycle.html#dlm-elements"
],
"Remediation": {
"Code": {
"CLI": "aws dlm create-lifecycle-policy --region <region> --execution-role-arn <execution-role-arn> --description <description> --state ENABLED --policy-details file://lifecycle-policy-config.json",
"NativeIaC": "",
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/DLM/ebs-snapshot-automation.html",
"Terraform": ""
"CLI": "aws dlm create-lifecycle-policy --region <region> --execution-role-arn <execution-role-arn> --description \"<description>\" --state ENABLED --policy-details '{\"PolicyType\":\"EBS_SNAPSHOT_MANAGEMENT\",\"ResourceTypes\":[\"VOLUME\"],\"TargetTags\":[{\"Key\":\"<tag_key>\",\"Value\":\"<tag_value>\"}],\"Schedules\":[{\"CreateRule\":{\"Interval\":24,\"IntervalUnit\":\"HOURS\"},\"RetainRule\":{\"Count\":1}}]}'",
"NativeIaC": "```yaml\n# CloudFormation: minimal EBS snapshot lifecycle policy\nResources:\n <example_resource_name>:\n Type: AWS::DLM::LifecyclePolicy\n Properties:\n Description: \"<description>\"\n ExecutionRoleArn: \"<example_resource_arn>\"\n State: ENABLED # Critical: enables the policy so it is counted by the check\n PolicyDetails:\n PolicyType: EBS_SNAPSHOT_MANAGEMENT # Critical: creates an EBS snapshot lifecycle policy\n ResourceTypes: [VOLUME]\n TargetTags:\n - Key: \"<tag_key>\" # Critical: selects target volumes by tag\n Value: \"<tag_value>\"\n Schedules:\n - CreateRule:\n Interval: 24\n IntervalUnit: HOURS\n RetainRule:\n Count: 1\n```",
"Other": "1. In the AWS console, switch to the Region that has EBS snapshots\n2. Open EC2 > Lifecycle Manager (DLM) > Create lifecycle policy\n3. Select EBS snapshot policy; Target resource: Volumes\n4. Add Target tags: Key = <tag_key>, Value = <tag_value>\n5. Set Schedule: Create every 24 hours; Retain 1 snapshot\n6. Ensure State is Enabled and click Create policy",
"Terraform": "```hcl\n# Terraform: minimal EBS snapshot lifecycle policy\nresource \"aws_dlm_lifecycle_policy\" \"<example_resource_name>\" {\n description = \"<description>\"\n execution_role_arn = \"<example_resource_arn>\"\n state = \"ENABLED\" # Critical: enables the policy so it is counted by the check\n\n policy_details {\n policy_type = \"EBS_SNAPSHOT_MANAGEMENT\" # Critical: creates an EBS snapshot lifecycle policy\n resource_types = [\"VOLUME\"]\n target_tags = {\n \"<tag_key>\" = \"<tag_value>\" # Critical: selects target volumes by tag\n }\n schedule {\n create_rule {\n interval = 24\n interval_unit = \"HOURS\"\n }\n retain_rule {\n count = 1\n }\n }\n }\n}\n```"
},
"Recommendation": {
"Text": "To use Amazon Data Lifecycle Manager (DLM) service to manage the lifecycle of your EBS volume snapshots, you have to tag your AWS EBS volumes and create data lifecycle policies via Amazon DLM.",
"Url": "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/snapshot-lifecycle.html#dlm-elements"
"Text": "Implement **DLM lifecycle policies** for all volumes that require backup.\n\n- Schedule creations to meet RPO/RTO\n- Define retention to prevent sprawl and enforce least data exposure\n- Use **least privilege** roles and separation of duties\n- Copy snapshots to another Region/account for **defense in depth**\n- Monitor policy health and coverage with tags",
"Url": "https://hub.prowler.com/check/dlm_ebs_snapshot_lifecycle_policy_exists"
}
},
"Categories": [
@@ -1,31 +1,38 @@
{
"Provider": "aws",
"CheckID": "dms_endpoint_mongodb_authentication_enabled",
"CheckTitle": "Check if DMS endpoints for MongoDB have an authentication mechanism enabled.",
"CheckTitle": "DMS MongoDB endpoint has an authentication mechanism enabled",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices"
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure"
],
"ServiceName": "dms",
"SubServiceName": "",
"ResourceIdTemplate": "arn:aws:dms:region:account-id:endpoint/endpoint-id",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "AwsDmsEndpoint",
"Description": "This control checks whether an AWS DMS endpoint for MongoDB is configured with an authentication mechanism. The control fails if an authentication type isn't set for the endpoint.",
"Risk": "Without an authentication mechanism enabled, unauthorized users may gain access to sensitive data during migration, increasing the risk of data breaches and security incidents.",
"RelatedUrl": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Source.MongoDB.html",
"Description": "**AWS DMS MongoDB endpoints** use an authentication mechanism. Configuration expects `AuthType` not `no` (e.g., `password`) with an `authMechanism` such as `scram_sha_1` or `mongodb_cr`.",
"Risk": "Without authentication, unauthenticated connections can access the source, degrading **confidentiality** and **integrity**. Adversaries could read or modify migrated documents, hijack CDC, inject data, or exfiltrate records during replication.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Source.MongoDB.html",
"https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-11"
],
"Remediation": {
"Code": {
"CLI": "aws dms modify-endpoint --endpoint-arn <endpoint-arn> --username <username> --password <password> --authentication-type <authentication-type>",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-11",
"Terraform": ""
"CLI": "aws dms modify-endpoint --endpoint-arn <endpoint-arn> --mongodb-settings '{\"AuthType\":\"password\"}' --username <username> --password <password>",
"NativeIaC": "```yaml\n# CloudFormation: enable authentication on a MongoDB DMS endpoint\nResources:\n <example_resource_name>:\n Type: AWS::DMS::Endpoint\n Properties:\n EndpointIdentifier: <example_resource_name>\n EndpointType: source\n EngineName: mongodb\n MongoDbSettings:\n AuthType: password # CRITICAL: sets authentication mode to 'password' so auth is enabled\n```",
"Other": "1. In the AWS Console, go to Database Migration Service > Endpoints\n2. Select the MongoDB endpoint and click Modify\n3. Under MongoDB settings, set Authentication mode to Password\n4. Enter Username and Password\n5. Click Save changes",
"Terraform": "```hcl\n# Terraform: enable authentication on a MongoDB DMS endpoint\nresource \"aws_dms_endpoint\" \"<example_resource_name>\" {\n endpoint_id = \"<example_resource_name>\"\n endpoint_type = \"source\"\n engine_name = \"mongodb\"\n\n mongodb_settings {\n auth_type = \"password\" # CRITICAL: enables authentication for the MongoDB endpoint\n }\n}\n```"
},
"Recommendation": {
"Text": "Enable an authentication mechanism on DMS endpoints for MongoDB to ensure secure access control during migration.",
"Url": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Source.MongoDB.html"
"Text": "Enforce **strong authentication** on MongoDB endpoints: set `AuthType` to `password` and use `authMechanism` like `scram_sha_1`. Apply **least privilege** database accounts, store secrets in **Secrets Manager**, and pair with **TLS** for defense in depth.",
"Url": "https://hub.prowler.com/check/dms_endpoint_mongodb_authentication_enabled"
}
},
"Categories": [],
"Categories": [
"identity-access"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
@@ -1,31 +1,38 @@
{
"Provider": "aws",
"CheckID": "dms_endpoint_neptune_iam_authorization_enabled",
"CheckTitle": "Check if DMS endpoints for Neptune databases have IAM authorization enabled.",
"CheckTitle": "DMS endpoint for Neptune has IAM authorization enabled",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices"
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure"
],
"ServiceName": "dms",
"SubServiceName": "",
"ResourceIdTemplate": "arn:aws:dms:region:account-id:endpoint/endpoint-id",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "AwsDmsEndpoint",
"Description": "This control checks whether an AWS DMS endpoint for an Amazon Neptune database is configured with IAM authorization. The control fails if the DMS endpoint doesn't have IAM authorization enabled.",
"Risk": "Without IAM authorization, DMS endpoints for Neptune databases may lack granular access control, increasing the risk of unauthorized access to sensitive data.",
"RelatedUrl": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Neptune.html",
"Description": "**DMS Neptune endpoints** have **IAM authorization** enabled via the endpoint setting `IamAuthEnabled`.",
"Risk": "Without **IAM authorization**, migration components can interact with Neptune using broad trust, enabling unauthorized data loads, reads, or alterations.\n\nThis degrades **confidentiality** and **integrity** and increases the chance of privilege abuse and data exfiltration.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Neptune.html",
"https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-10"
],
"Remediation": {
"Code": {
"CLI": "aws dms modify-endpoint --endpoint-arn <endpoint-arn> --service-access-role-arn <iam-role-arn>",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-10",
"CLI": "aws dms modify-endpoint --endpoint-arn <endpoint-arn> --neptune-settings '{\"IamAuthEnabled\":true}'",
"NativeIaC": "```yaml\n# CloudFormation: Enable IAM authorization on a DMS Neptune endpoint\nResources:\n <example_resource_name>:\n Type: AWS::DMS::Endpoint\n Properties:\n EndpointType: target\n EngineName: neptune\n NeptuneSettings:\n ServiceAccessRoleArn: <example_resource_arn>\n S3BucketName: <example_resource_name>\n S3BucketFolder: <example_resource_name>\n IamAuthEnabled: true # Critical: enables IAM authorization for the Neptune endpoint\n```",
"Other": "1. In the AWS Console, go to Database Migration Service > Endpoints\n2. Select the Neptune endpoint and click Modify\n3. Expand Endpoint settings (Neptune settings) and set IAM authorization to Enabled\n4. Ensure Service access role ARN is set, then click Save",
"Terraform": ""
},
"Recommendation": {
"Text": "Enable IAM authorization on DMS endpoints for Neptune databases by specifying a service role in the ServiceAccessRoleARN parameter.",
"Url": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Neptune.html"
"Text": "Enable **IAM authorization** on Neptune endpoints (`IamAuthEnabled=true`) and use a **least privilege** service role limited to minimal Neptune and S3 permissions.\n\nApply **defense in depth**: restrict network paths, separate duties for migration roles, and monitor access with logs and alerts.",
"Url": "https://hub.prowler.com/check/dms_endpoint_neptune_iam_authorization_enabled"
}
},
"Categories": [],
"Categories": [
"identity-access"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
@@ -1,31 +1,41 @@
{
"Provider": "aws",
"CheckID": "dms_endpoint_redis_in_transit_encryption_enabled",
"CheckTitle": "Check if DMS endpoints for Redis OSS are encrypted in transit.",
"CheckTitle": "DMS endpoint for Redis OSS is encrypted in transit",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices"
"Software and Configuration Checks/AWS Security Best Practices/Encryption in Transit",
"Software and Configuration Checks/Industry and Regulatory Standards/NIST 800-53 Controls (USA)",
"Software and Configuration Checks/Industry and Regulatory Standards/PCI-DSS",
"Software and Configuration Checks/Industry and Regulatory Standards/ISO 27001 Controls"
],
"ServiceName": "dms",
"SubServiceName": "",
"ResourceIdTemplate": "arn:aws:dms:region:account-id:endpoint/endpoint-id",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "AwsDmsEndpoint",
"Description": "This control checks whether an AWS DMS endpoint for Redis OSS is configured with a TLS connection. The control fails if the endpoint doesn't have TLS enabled.",
"Risk": "Without TLS, data transmitted between databases may be vulnerable to interception or eavesdropping, increasing the risk of data breaches and other security incidents.",
"RelatedUrl": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Source.Redis.html",
"Description": "**DMS Redis OSS endpoints** are assessed for the presence of **TLS** in their endpoint settings, such as `ssl-encryption`, indicating encrypted connections between the DMS replication instance and Redis.",
"Risk": "Without **TLS**, traffic between DMS and Redis can be intercepted or altered, compromising **confidentiality** and **integrity**.\n\nAttackers can perform **man-in-the-middle** interception, steal auth tokens, and inject or corrupt migrated data.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-12",
"https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Redis.html#CHAP_Target.Redis.EndpointSettings",
"https://support.icompaas.com/support/solutions/articles/62000233450-ensure-encryption-in-transit-for-dms-endpoints-for-redis-oss"
],
"Remediation": {
"Code": {
"CLI": "aws dms modify-endpoint --endpoint-arn <endpoint-arn> --redis-settings '{'SslSecurityProtocol': 'ssl-encryption'}'",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-12",
"Terraform": ""
"CLI": "",
"NativeIaC": "```yaml\n# CloudFormation: Enable TLS for Redis OSS DMS endpoint\nResources:\n <example_resource_name>:\n Type: AWS::DMS::Endpoint\n Properties:\n EndpointIdentifier: <example_resource_name>\n EndpointType: target\n EngineName: redis\n RedisSettings:\n ServerName: <example_resource_name>\n Port: 6379\n AuthType: none\n SslSecurityProtocol: ssl-encryption # Critical: enables TLS for in-transit encryption\n```",
"Other": "1. In the AWS Console, go to Database Migration Service > Endpoints\n2. Select the Redis OSS endpoint and click Modify\n3. Set SSL security protocol (Encryption in transit) to \"SSL encryption\"\n4. Save changes",
"Terraform": "```hcl\n# Enable TLS for Redis OSS DMS endpoint\nresource \"aws_dms_endpoint\" \"<example_resource_name>\" {\n endpoint_id = \"<example_resource_id>\"\n endpoint_type = \"target\"\n engine_name = \"redis\"\n\n redis_settings {\n server_name = \"<example_resource_name>\"\n port = 6379\n auth_type = \"none\"\n ssl_security_protocol = \"ssl-encryption\" # Critical: enables TLS for in-transit encryption\n }\n}\n```"
},
"Recommendation": {
"Text": "Enable TLS for DMS endpoints for Redis OSS to ensure encrypted communication during data migration.",
"Url": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Target.Redis.html#CHAP_Target.Redis.EndpointSettings"
"Text": "Enable **TLS** on Redis OSS endpoints (e.g., `ssl-encryption`) and require server certificate validation. Prohibit plaintext connections, prefer private networking, and enforce **least privilege** for DMS roles to strengthen **defense in depth**.",
"Url": "https://hub.prowler.com/check/dms_endpoint_redis_in_transit_encryption_enabled"
}
},
"Categories": [],
"Categories": [
"encryption"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
@@ -1,32 +1,40 @@
{
"Provider": "aws",
"CheckID": "dms_endpoint_ssl_enabled",
"CheckTitle": "Ensure SSL mode is enabled in DMS endpoint",
"CheckType": ["Effects", "Data Exposure"],
"CheckTitle": "DMS endpoint has SSL enabled",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure"
],
"ServiceName": "dms",
"SubServiceName": "endpoint",
"ResourceIdTemplate": "arn:partition:dms:region:account-id:endpoint:resource-id",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "high",
"ResourceType": "AwsDmsEndpoint",
"Description": "This check ensures that SSL mode is enabled for all AWS Database Migration Service (DMS) endpoints. Enabling SSL provides encryption in transit for data transferred through these endpoints.",
"Risk": "Without SSL enabled, data transferred through DMS endpoints is not encrypted, potentially exposing sensitive information to unauthorized access or interception during transit.",
"RelatedUrl": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Security.SSL.html",
"Description": "**AWS DMS endpoints** have their SSL/TLS mode inspected; any value other than `none` denotes encrypted connections between the replication instance and databases.\n\nSupported modes include `require`, `verify-ca`, and `verify-full`.",
"Risk": "Without TLS, data in transit can be read or altered, affecting:\n- **Confidentiality** via packet sniffing and credential leakage\n- **Integrity** through **MITM** tampering of migration streams\n- **Availability** from session hijack or task disruption",
"RelatedUrl": "",
"AdditionalURLs": [
"https://aws.amazon.com/blogs/database/configuring-ssl-encryption-on-oracle-and-postgresql-endpoints-in-aws-dms/",
"https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Security.SSL.html",
"https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-9"
],
"Remediation": {
"Code": {
"CLI": "aws dms modify-endpoint --endpoint-arn <endpoint_arn> --ssl-mode require",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-9",
"Terraform": ""
},
"Recommendation": {
"Text": "Enable SSL mode for all DMS endpoints. Use 'require' as the minimum SSL mode, and consider using 'verify-ca' or 'verify-full' for higher security.",
"Url": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Security.SSL.html"
}
"Code": {
"CLI": "aws dms modify-endpoint --endpoint-arn <endpoint-arn> --ssl-mode require",
"NativeIaC": "```yaml\n# CloudFormation: Set SSL on a DMS endpoint\nResources:\n <example_resource_name>:\n Type: AWS::DMS::Endpoint\n Properties:\n EndpointIdentifier: <example_resource_name>\n EndpointType: source\n EngineName: sqlserver\n ServerName: <server_name>\n Port: 1433\n Username: <username>\n Password: <password>\n SslMode: require # CRITICAL: enables SSL (not \"none\"), fixing the finding\n```",
"Other": "1. In the AWS DMS console, go to Endpoints\n2. Select the non-compliant endpoint and choose Modify\n3. Set SSL mode to Require (or Verify-ca/Verify-full if required by your engine and certificate is available)\n4. If Verify-ca/Verify-full is selected, choose the appropriate CA certificate\n5. Save changes, then Test connection to confirm",
"Terraform": "```hcl\n# Terraform: Set SSL on a DMS endpoint\nresource \"aws_dms_endpoint\" \"<example_resource_name>\" {\n endpoint_id = \"<example_resource_name>\"\n endpoint_type = \"source\"\n engine_name = \"sqlserver\"\n server_name = \"<server_name>\"\n port = 1433\n username = \"<username>\"\n password = \"<password>\"\n\n ssl_mode = \"require\" # CRITICAL: enables SSL (not \"none\"), fixing the finding\n}\n```"
},
"Recommendation": {
"Text": "Configure endpoints to use SSL/TLS at least `require`; prefer `verify-ca` or `verify-full` where supported. Manage trusted CA material and rotate regularly. Apply **defense in depth** with private connectivity and strict IAM, and enforce this posture via policy-as-code and continuous validation.",
"Url": "https://hub.prowler.com/check/dms_endpoint_ssl_enabled"
}
},
"Categories": [
"encryption"
"encryption"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}
}
@@ -1,29 +1,39 @@
{
"Provider": "aws",
"CheckID": "dms_instance_minor_version_upgrade_enabled",
"CheckTitle": "Ensure DMS instances have auto minor version upgrade enabled.",
"CheckType": [],
"CheckTitle": "DMS replication instance has auto minor version upgrade enabled",
"CheckType": [
"Software and Configuration Checks/Patch Management",
"Software and Configuration Checks/AWS Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices"
],
"ServiceName": "dms",
"SubServiceName": "",
"ResourceIdTemplate": "arn:aws:rdmsds:region:account-id:rep",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "AwsDmsReplicationInstance",
"Description": "Ensure DMS instances have auto minor version upgrade enabled.",
"Risk": "Ensure that your Amazon Database Migration Service (DMS) replication instances have the Auto Minor Version Upgrade feature enabled in order to receive automatically minor engine upgrades.",
"RelatedUrl": "https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-6",
"Description": "**AWS DMS replication instances** are evaluated for the `auto_minor_version_upgrade` setting to confirm **automatic minor engine updates** are enabled during the maintenance window.",
"Risk": "Without **automatic minor upgrades**, DMS engines can miss security patches and fixes, enabling exploitation of known flaws and instability.\n- Confidentiality: exposure via unpatched components\n- Integrity: replication errors or data drift\n- Availability: outages during migration or CDC",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-6",
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/DMS/auto-minor-version-upgrade.html"
],
"Remediation": {
"Code": {
"CLI": "aws dms modify-replication-instance --region <REGION> --replication-instance-arn arn:aws:dms:<REGION>:<ACCOUNT_ID>:rep:<REPLICATION_ID> --auto-minor-version-upgrade --apply-immediately",
"NativeIaC": "https://www.trendmicro.com/cloudoneconformity-staging/knowledge-base/aws/DMS/auto-minor-version-upgrade.html#",
"Other": "https://www.trendmicro.com/cloudoneconformity-staging/knowledge-base/aws/DMS/auto-minor-version-upgrade.html#",
"Terraform": "https://www.trendmicro.com/cloudoneconformity-staging/knowledge-base/aws/DMS/auto-minor-version-upgrade.html#"
"NativeIaC": "```yaml\n# CloudFormation: Enable auto minor version upgrade on a DMS replication instance\nResources:\n <example_resource_name>:\n Type: AWS::DMS::ReplicationInstance\n Properties:\n ReplicationInstanceIdentifier: <example_resource_id>\n ReplicationInstanceClass: dms.t3.micro\n AutoMinorVersionUpgrade: true # CRITICAL: turns on automatic minor version upgrades\n```",
"Other": "1. Open the AWS Console and go to Database Migration Service (DMS)\n2. Click Replication instances and select your instance\n3. Choose Actions > Modify\n4. Check Auto minor version upgrade\n5. Select Apply immediately\n6. Click Modify to save",
"Terraform": "```hcl\n# Terraform: Enable auto minor version upgrade on a DMS replication instance\nresource \"aws_dms_replication_instance\" \"<example_resource_name>\" {\n replication_instance_id = \"<example_resource_id>\"\n replication_instance_class = \"dms.t3.micro\"\n auto_minor_version_upgrade = true # CRITICAL: turns on automatic minor version upgrades\n}\n```"
},
"Recommendation": {
"Text": "Enable auto minor version upgrade for all DMS replication instances.",
"Url": "https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-6"
"Text": "Enable `auto_minor_version_upgrade` on all replication instances to maintain **continuous patching**.\n- Set a maintenance window and validate in non-prod\n- Monitor release notes and health metrics\n- Enforce **least privilege** for change control\n- Keep **backups** for rollback",
"Url": "https://hub.prowler.com/check/dms_instance_minor_version_upgrade_enabled"
}
},
"Categories": [],
"Categories": [
"vulnerabilities"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
@@ -1,30 +1,37 @@
{
"Provider": "aws",
"CheckID": "dms_instance_multi_az_enabled",
"CheckTitle": "Ensure DMS instances have multi az enabled.",
"CheckType": [],
"CheckTitle": "DMS replication instance has Multi-AZ enabled",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Denial of Service"
],
"ServiceName": "dms",
"SubServiceName": "",
"ResourceIdTemplate": "arn:aws:rdmsds:region:account-id:rep",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "AwsDmsReplicationInstance",
"Description": "Ensure DMS instances have multi az enabled.",
"Risk": "Ensure that your Amazon Database Migration Service (DMS) replication instances are using Multi-AZ deployment configurations to provide High Availability (HA) through automatic failover to standby replicas in the event of a failure such as an Availability Zone (AZ) outage, an internal hardware or network outage, a software failure or in case of a planned maintenance session.",
"RelatedUrl": "https://www.trendmicro.com/cloudoneconformity-staging/knowledge-base/aws/DMS/multi-az.html#",
"Description": "**AWS DMS replication instances** are evaluated for **Multi-AZ** configuration. Instances with `multi_az` enabled are treated as having a cross-AZ standby; those without it are identified as single-AZ.",
"Risk": "Without **Multi-AZ**, a single-AZ failure or maintenance event can halt migrations, causing extended downtime (**availability**) and replication gaps or rollbacks (**integrity**). Tasks may stall, increase cutover risk, and require manual recovery when the replication instance is unavailable.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/dms/latest/userguide/CHAP_ReplicationInstance.html",
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/DMS/multi-az.html"
],
"Remediation": {
"Code": {
"CLI": "aws dms modify-replication-instance --region <REGION> --replication-instance-arn arn:aws:dms:<REGION>:<ACCOUNT_ID>:rep:<REPLICATION_ID> --multi-az --apply-immediately",
"NativeIaC": "https://www.trendmicro.com/cloudoneconformity-staging/knowledge-base/aws/DMS/multi-az.html#",
"Other": "https://www.trendmicro.com/cloudoneconformity-staging/knowledge-base/aws/DMS/multi-az.html#",
"Terraform": "https://www.trendmicro.com/cloudoneconformity-staging/knowledge-base/aws/DMS/multi-az.html#"
"CLI": "aws dms modify-replication-instance --replication-instance-arn arn:aws:dms:<REGION>:<ACCOUNT_ID>:rep:<REPLICATION_ID> --multi-az --apply-immediately",
"NativeIaC": "```yaml\n# CloudFormation: enable Multi-AZ on a DMS replication instance\nResources:\n <example_resource_name>:\n Type: AWS::DMS::ReplicationInstance\n Properties:\n ReplicationInstanceClass: dms.t3.micro\n MultiAZ: true # Critical: enables Multi-AZ to pass the check\n```",
"Other": "1. Open the AWS DMS console\n2. Go to Replication instances and select your instance\n3. Click Modify\n4. Check Multi-AZ\n5. Check Apply immediately\n6. Click Modify to save",
"Terraform": "```hcl\n# Enable Multi-AZ on a DMS replication instance\nresource \"aws_dms_replication_instance\" \"<example_resource_name>\" {\n replication_instance_id = \"<example_resource_name>\"\n replication_instance_class = \"dms.t3.micro\"\n multi_az = true # Critical: enables Multi-AZ to pass the check\n}\n```"
},
"Recommendation": {
"Text": "Enable multi az for all DMS replication instances.",
"Url": "https://www.trendmicro.com/cloudoneconformity-staging/knowledge-base/aws/DMS/multi-az.html#"
"Text": "Enable **Multi-AZ** (set `multi_az` to `true`) on DMS replication instances that handle production or time-sensitive migrations to ensure redundancy and automatic failover.\n\nApply HA principles: distribute across AZs, test failover, monitor health, and plan maintenance to minimize impact.",
"Url": "https://hub.prowler.com/check/dms_instance_multi_az_enabled"
}
},
"Categories": [
"redundancy"
"resilience"
],
"DependsOn": [],
"RelatedTo": [],
@@ -1,26 +1,37 @@
{
"Provider": "aws",
"CheckID": "dms_instance_no_public_access",
"CheckTitle": "Ensure DMS instances are not publicly accessible.",
"CheckType": [],
"CheckTitle": "DMS replication instance is not publicly exposed to the Internet",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices/Network Reachability",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark",
"TTPs/Initial Access"
],
"ServiceName": "dms",
"SubServiceName": "",
"ResourceIdTemplate": "arn:aws:rdmsds:region:account-id:rep",
"ResourceIdTemplate": "",
"Severity": "critical",
"ResourceType": "AwsDmsReplicationInstance",
"Description": "Ensure DMS instances are not publicly accessible.",
"Risk": "Ensure that your Amazon Database Migration Service (DMS) are not publicly accessible from the Internet in order to avoid exposing private data and minimize security risks. A DMS replication instance should have a private IP address and the Publicly Accessible feature disabled when both the source and the target databases are in the same network that is connected to the instance's VPC through a VPN, VPC peering connection, or using an AWS Direct Connect dedicated connection.",
"RelatedUrl": "https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-1",
"Description": "**AWS DMS replication instances** are evaluated for **public exposure**. Exposure is identified when `PubliclyAccessible` is enabled and an attached security group allows inbound traffic from any address. Private or allowlisted instances are not considered exposed.",
"Risk": "Publicly reachable replication instances threaten:\n- Confidentiality: migration data and credentials can be intercepted or exfiltrated.\n- Integrity: attackers may alter tasks or inject records.\n- Availability: abuse or DDoS can stall replication and delay cutovers.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-1",
"https://docs.aws.amazon.com/amazonq/detector-library/terraform/restrict-public-access-dms-terraform/",
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/DMS/publicly-accessible.html",
"https://support.icompaas.com/support/solutions/articles/62000233448-ensure-dms-instances-are-not-publicly-accessible"
],
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "https://www.trendmicro.com/cloudoneconformity-staging/knowledge-base/aws/DMS/publicly-accessible.html#",
"Other": "https://www.trendmicro.com/cloudoneconformity-staging/knowledge-base/aws/DMS/publicly-accessible.html#",
"Terraform": "https://www.trendmicro.com/cloudoneconformity-staging/knowledge-base/aws/DMS/publicly-accessible.html#"
"NativeIaC": "```yaml\n# CloudFormation: DMS instance not publicly accessible\nResources:\n <example_resource_name>:\n Type: AWS::DMS::ReplicationInstance\n Properties:\n ReplicationInstanceClass: dms.t3.micro\n PubliclyAccessible: false # Critical: disables public access to prevent Internet exposure\n```",
"Other": "1. In the AWS Console, open Database Migration Service > Replication instances and select the instance\n2. In Details > Networking, click each attached Security Group ID to open it in the EC2 console\n3. In Inbound rules, delete any rule with Source 0.0.0.0/0 or ::/0\n4. Save rules for each security group",
"Terraform": "```hcl\n# DMS instance not publicly accessible\nresource \"aws_dms_replication_instance\" \"<example_resource_name>\" {\n replication_instance_id = \"<example_resource_id>\"\n replication_instance_class = \"dms.t3.micro\"\n publicly_accessible = false # Critical: disables public access to prevent Internet exposure\n}\n```"
},
"Recommendation": {
"Text": "Restrict DMS Replication instances security groups to only required IPs, or re-create these instances that is only accessible privately.",
"Url": "https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-1"
"Text": "Adopt a **private-only** design:\n- Disable `PubliclyAccessible`; place instances in private subnets.\n- Enforce **least privilege** security groups (no `0.0.0.0/0`); allow only required sources/ports.\n- Provide access via **VPN**, peering, or Direct Connect.\n- Layer controls (ACLs, monitoring) and restrict IAM to necessary actions.",
"Url": "https://hub.prowler.com/check/dms_instance_no_public_access"
}
},
"Categories": [
@@ -1,31 +1,39 @@
{
"Provider": "aws",
"CheckID": "dms_replication_task_source_logging_enabled",
"CheckTitle": "Check if DMS replication tasks for the source database have logging enabled.",
"CheckTitle": "DMS replication task has logging enabled and SOURCE_CAPTURE and SOURCE_UNLOAD components set to at least Default severity",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices"
"Software and Configuration Checks/AWS Security Best Practices",
"TTPs/Defense Evasion"
],
"ServiceName": "dms",
"SubServiceName": "",
"ResourceIdTemplate": "arn:aws:dms:region:account-id:task/task-id",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "AwsDmsReplicationTask",
"Description": "This control checks whether logging is enabled with the minimum severity level of LOGGER_SEVERITY_DEFAULT for DMS replication tasks SOURCE_CAPTURE and SOURCE_UNLOAD. The control fails if logging isn't enabled for these tasks or if the minimum severity level is less than LOGGER_SEVERITY_DEFAULT.",
"Risk": "Without logging enabled, issues in data migration may go undetected, affecting the integrity and compliance of replicated data.",
"RelatedUrl": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Monitoring.html#CHAP_Monitoring.ManagingLogs",
"Description": "**AWS DMS replication tasks** have **logging enabled** and configure `SOURCE_CAPTURE` and `SOURCE_UNLOAD` with severity at least `LOGGER_SEVERITY_DEFAULT` (or higher: `LOGGER_SEVERITY_DEBUG`, `LOGGER_SEVERITY_DETAILED_DEBUG`).",
"Risk": "Missing or low-severity source logs hinder visibility into **CDC** and full-load activity, risking undetected errors, stalls, or tampering. This can cause silent **data drift**, broken lineage, and failed recoveries, undermining **integrity** and **availability** and weakening auditability during investigations.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Monitoring.html",
"https://repost.aws/knowledge-center/dms-debug-logging",
"https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-8"
],
"Remediation": {
"Code": {
"CLI": "aws dms modify-replication-task --replication-task-arn <task-arn> --task-settings '{\"Logging\":{\"EnableLogging\":true,\"LogComponents\":[{\"Id\":\"SOURCE_CAPTURE\",\"Severity\":\"LOGGER_SEVERITY_DEFAULT\"},{\"Id\":\"SOURCE_UNLOAD\",\"Severity\":\"LOGGER_SEVERITY_DEFAULT\"}]}}'",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-8",
"Terraform": ""
"CLI": "aws dms modify-replication-task --replication-task-arn <example_resource_arn> --replication-task-settings '{\"Logging\":{\"EnableLogging\":true,\"LogComponents\":[{\"Id\":\"SOURCE_CAPTURE\",\"Severity\":\"LOGGER_SEVERITY_DEFAULT\"},{\"Id\":\"SOURCE_UNLOAD\",\"Severity\":\"LOGGER_SEVERITY_DEFAULT\"}]}}'",
"NativeIaC": "```yaml\n# CloudFormation: enable DMS source logging at minimum DEFAULT severity\nResources:\n <example_resource_name>:\n Type: AWS::DMS::ReplicationTask\n Properties:\n ReplicationInstanceArn: <example_resource_arn>\n SourceEndpointArn: <example_resource_arn>\n TargetEndpointArn: <example_resource_arn>\n MigrationType: full-load\n TableMappings: '{\"rules\":[]}'\n # Critical: Enables logging and sets SOURCE components to at least DEFAULT\n ReplicationTaskSettings: |\n {\n \"Logging\": {\n \"EnableLogging\": true,\n \"LogComponents\": [\n {\"Id\": \"SOURCE_CAPTURE\", \"Severity\": \"LOGGER_SEVERITY_DEFAULT\"},\n {\"Id\": \"SOURCE_UNLOAD\", \"Severity\": \"LOGGER_SEVERITY_DEFAULT\"}\n ]\n }\n }\n```",
"Other": "1. In the AWS console, go to Database Migration Service > Database migration tasks\n2. Select the task and choose Modify\n3. Click Modify task logging\n4. Turn on Enable logging\n5. For SOURCE_CAPTURE and SOURCE_UNLOAD, set Severity to Default (or higher)\n6. Save/Modify to apply",
"Terraform": "```hcl\n# Enable DMS source logging at minimum DEFAULT severity\nresource \"aws_dms_replication_task\" \"<example_resource_name>\" {\n replication_instance_arn = \"<example_resource_arn>\"\n source_endpoint_arn = \"<example_resource_arn>\"\n target_endpoint_arn = \"<example_resource_arn>\"\n migration_type = \"full-load\"\n table_mappings = \"{\\\"rules\\\":[]}\"\n\n # Critical: Enables logging and sets SOURCE components to at least DEFAULT\n replication_task_settings = <<JSON\n{\n \"Logging\": {\n \"EnableLogging\": true,\n \"LogComponents\": [\n {\"Id\": \"SOURCE_CAPTURE\", \"Severity\": \"LOGGER_SEVERITY_DEFAULT\"},\n {\"Id\": \"SOURCE_UNLOAD\", \"Severity\": \"LOGGER_SEVERITY_DEFAULT\"}\n ]\n }\n}\nJSON\n}\n```"
},
"Recommendation": {
"Text": "Enable logging for source database DMS replication tasks with a minimum severity level of LOGGER_SEVERITY_DEFAULT.",
"Url": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Tasks.CustomizingTasks.TaskSettings.Logging.html"
"Text": "Enable and standardize **task logging** for `SOURCE_CAPTURE` and `SOURCE_UNLOAD` at `LOGGER_SEVERITY_DEFAULT` or higher.\n- Centralize logs and alert on anomalies\n- Enforce **least privilege** for log access\n- Set retention to support audits\n- Avoid prolonged `DEBUG` levels, *except during troubleshooting*, to balance visibility and cost",
"Url": "https://hub.prowler.com/check/dms_replication_task_source_logging_enabled"
}
},
"Categories": [],
"Categories": [
"logging"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
@@ -1,31 +1,40 @@
{
"Provider": "aws",
"CheckID": "dms_replication_task_target_logging_enabled",
"CheckTitle": "Check if DMS replication tasks for the target database have logging enabled.",
"CheckTitle": "DMS replication task has TARGET_APPLY and TARGET_LOAD logging enabled with at least default severity",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices"
"Software and Configuration Checks/AWS Security Best Practices",
"TTPs/Defense Evasion"
],
"ServiceName": "dms",
"SubServiceName": "",
"ResourceIdTemplate": "arn:aws:dms:region:account-id:task/task-id",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "AwsDmsReplicationTask",
"Description": "This control checks whether logging is enabled with the minimum severity level of LOGGER_SEVERITY_DEFAULT for DMS replication tasks TARGET_APPLY and TARGET_LOAD. The control fails if logging isn't enabled for these tasks or if the minimum severity level is less than LOGGER_SEVERITY_DEFAULT.",
"Risk": "Without logging enabled, issues in data migration may go undetected, affecting the integrity and compliance of replicated data.",
"RelatedUrl": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Monitoring.html#CHAP_Monitoring.ManagingLogs",
"Description": "**AWS DMS replication tasks** have target logging enabled, including `TARGET_APPLY` and `TARGET_LOAD`, each set to at least `LOGGER_SEVERITY_DEFAULT`.",
"Risk": "Insufficient target logging limits visibility into load/apply activity, masking failures and anomalies. This risks **data integrity** (silent drift, partial loads) and **availability** (longer incident resolution), and reduces **auditability** of migration events.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://repost.aws/knowledge-center/dms-debug-logging",
"https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Tasks.CustomizingTasks.TaskSettings.Logging.html",
"https://stackoverflow.com/questions/46913913/aws-dms-with-cloudformation-enabling-logging-needs-a-log-group",
"https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-7"
],
"Remediation": {
"Code": {
"CLI": "aws dms modify-replication-task --replication-task-arn <task-arn> --task-settings '{\"Logging\":{\"EnableLogging\":true,\"LogComponents\":[{\"Id\":\"TARGET_APPLY\",\"Severity\":\"LOGGER_SEVERITY_DEFAULT\"},{\"Id\":\"TARGET_LOAD\",\"Severity\":\"LOGGER_SEVERITY_DEFAULT\"}]}}'",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/dms-controls.html#dms-7",
"Terraform": ""
"CLI": "aws dms modify-replication-task --replication-task-arn <task-arn> --replication-task-settings '{\"Logging\":{\"EnableLogging\":true,\"LogComponents\":[{\"Id\":\"TARGET_APPLY\",\"Severity\":\"LOGGER_SEVERITY_DEFAULT\"},{\"Id\":\"TARGET_LOAD\",\"Severity\":\"LOGGER_SEVERITY_DEFAULT\"}]}}'",
"NativeIaC": "```yaml\n# CloudFormation: enable DMS task logging for target components\nResources:\n <example_resource_name>:\n Type: AWS::DMS::ReplicationTask\n Properties:\n ReplicationInstanceArn: <example_resource_arn>\n SourceEndpointArn: <example_resource_arn>\n TargetEndpointArn: <example_resource_arn>\n MigrationType: full-load\n TableMappings: |\n {\"rules\":[{\"rule-type\":\"selection\",\"rule-id\":\"1\",\"rule-name\":\"1\",\"object-locator\":{\"schema-name\":\"%\",\"table-name\":\"%\"},\"rule-action\":\"include\"}]}\n ReplicationTaskSettings: |\n {\"Logging\":{\"EnableLogging\":true, \"LogComponents\":[\n {\"Id\":\"TARGET_APPLY\",\"Severity\":\"LOGGER_SEVERITY_DEFAULT\"}, # Critical: ensure TARGET_APPLY logging at default\n {\"Id\":\"TARGET_LOAD\",\"Severity\":\"LOGGER_SEVERITY_DEFAULT\"} # Critical: ensure TARGET_LOAD logging at default\n ]}}\n```",
"Other": "1. Open the AWS DMS console and go to Database migration tasks\n2. Select the replication task and choose Modify\n3. Expand Task settings (JSON) or Logging\n4. Enable CloudWatch logs (EnableLogging = true)\n5. Set log components:\n - TARGET_APPLY severity: DEFAULT\n - TARGET_LOAD severity: DEFAULT\n6. Save changes (Modify task), then rerun the task if required",
"Terraform": "```hcl\n# Enable DMS task logging for target components\nresource \"aws_dms_replication_task\" \"<example_resource_name>\" {\n replication_task_id = \"<example_resource_id>\"\n replication_instance_arn = \"<example_resource_arn>\"\n source_endpoint_arn = \"<example_resource_arn>\"\n target_endpoint_arn = \"<example_resource_arn>\"\n migration_type = \"full-load\"\n table_mappings = jsonencode({ rules = [{\n \"rule-type\" : \"selection\", \"rule-id\" : \"1\", \"rule-name\" : \"1\",\n \"object-locator\" : { \"schema-name\" : \"%\", \"table-name\" : \"%\" },\n \"rule-action\" : \"include\"\n }]} )\n\n # Critical: enables logging and sets TARGET_APPLY and TARGET_LOAD to minimum required severity\n replication_task_settings = jsonencode({\n Logging = {\n EnableLogging = true\n LogComponents = [\n { Id = \"TARGET_APPLY\", Severity = \"LOGGER_SEVERITY_DEFAULT\" },\n { Id = \"TARGET_LOAD\", Severity = \"LOGGER_SEVERITY_DEFAULT\" }\n ]\n }\n })\n}\n```"
},
"Recommendation": {
"Text": "Enable logging for target database DMS replication tasks with a minimum severity level of LOGGER_SEVERITY_DEFAULT.",
"Url": "https://docs.aws.amazon.com/dms/latest/userguide/CHAP_Tasks.CustomizingTasks.TaskSettings.Logging.html"
"Text": "Enable and maintain **CloudWatch logging** at `LOGGER_SEVERITY_DEFAULT` or higher for target components:\n- Configure `TARGET_APPLY` and `TARGET_LOAD`\n- Enforce least-privilege log access\n- Monitor logs/alerts for anomalies\n- Standardize task settings and validate data for **defense in depth**",
"Url": "https://hub.prowler.com/check/dms_replication_task_target_logging_enabled"
}
},
"Categories": [],
"Categories": [
"logging"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
-9
View File
@@ -116,15 +116,6 @@ pytest-xdist = "3.6.1"
safety = "3.2.9"
vulture = "2.14"
[tool.poetry.group.docs]
optional = true
[tool.poetry.group.docs.dependencies]
mkdocs = "1.6.1"
mkdocs-git-revision-date-localized-plugin = "1.4.1"
mkdocs-material = "9.6.5"
mkdocs-material-extensions = "1.3.1"
[tool.poetry-version-plugin]
source = "init"
+2 -1
View File
@@ -33,4 +33,5 @@ yarn-error.log*
# typescript
*.tsbuildinfo
next-env.d.ts
next-env.d.ts
playwright/.auth
+2
View File
@@ -16,6 +16,7 @@ All notable changes to the **Prowler UI** are documented in this file.
- API key management in user profile [(#8308)](https://github.com/prowler-cloud/prowler/pull/8308)
- Refresh access token error handling [(#8864)](https://github.com/prowler-cloud/prowler/pull/8864)
- Support Common Cloud Controls for AWS, Azure and GCP [(#8000)](https://github.com/prowler-cloud/prowler/pull/8000)
- New M365 credentials certificate authentication method [(#8929)](https://github.com/prowler-cloud/prowler/pull/8929)
### 🔄 Changed
@@ -36,6 +37,7 @@ All notable changes to the **Prowler UI** are documented in this file.
- ThreatScore for each pillar in Prowler ThreatScore specific view [(#8582)](https://github.com/prowler-cloud/prowler/pull/8582)
- Remove maxTokens model param for GPT-5 models [(#8843)](https://github.com/prowler-cloud/prowler/pull/8843)
- MITRE ATTACK compliance view now shows all requirements in charts [(#8886)](https://github.com/prowler-cloud/prowler/pull/8886)
- Mutelist menu item now doesn't blink [(#8932)](https://github.com/prowler-cloud/prowler/pull/8932)
---
@@ -10,6 +10,7 @@ import {
SelectViaGCP,
} from "@/components/providers/workflow/forms/select-credentials-type/gcp";
import { SelectViaGitHub } from "@/components/providers/workflow/forms/select-credentials-type/github";
import { SelectViaM365 } from "@/components/providers/workflow/forms/select-credentials-type/m365";
import { getProviderFormType } from "@/lib/provider-helpers";
import { ProviderType } from "@/types/providers";
@@ -28,6 +29,7 @@ export default async function AddCredentialsPage({ searchParams }: Props) {
if (providerType === "gcp") return <SelectViaGCP initialVia={via} />;
if (providerType === "github")
return <SelectViaGitHub initialVia={via} />;
if (providerType === "m365") return <SelectViaM365 initialVia={via} />;
return null;
case "credentials":
+25 -27
View File
@@ -28,31 +28,36 @@ export default async function Providers({
<ContentLayout title="Cloud Providers" icon="lucide:cloud-cog">
<FilterControls search customFilters={filterProviders || []} />
<Spacer y={8} />
<Suspense
key={searchParamsKey}
fallback={
<>
<div className="flex items-center gap-4 md:justify-end">
<ManageGroupsButton />
<MutedFindingsConfigButton />
<AddProviderButton />
</div>
<Spacer y={8} />
<div className="grid grid-cols-12 gap-4">
<div className="col-span-12">
<SkeletonTableProviders />
</div>
</div>
</>
}
>
<ProvidersContent searchParams={resolvedSearchParams} />
<ProvidersActions />
<Spacer y={8} />
<Suspense key={searchParamsKey} fallback={<ProvidersTableFallback />}>
<ProvidersTable searchParams={resolvedSearchParams} />
</Suspense>
</ContentLayout>
);
}
const ProvidersContent = async ({
const ProvidersActions = () => {
return (
<div className="flex items-center gap-4 md:justify-end">
<ManageGroupsButton />
<MutedFindingsConfigButton />
<AddProviderButton />
</div>
);
};
const ProvidersTableFallback = () => {
return (
<div className="grid grid-cols-12 gap-4">
<div className="col-span-12">
<SkeletonTableProviders />
</div>
</div>
);
};
const ProvidersTable = async ({
searchParams,
}: {
searchParams: SearchParamsProps;
@@ -97,13 +102,6 @@ const ProvidersContent = async ({
return (
<>
<div className="flex items-center gap-4 md:justify-end">
<ManageGroupsButton />
<MutedFindingsConfigButton />
<AddProviderButton />
</div>
<Spacer y={8} />
<div className="grid grid-cols-12 gap-4">
<div className="col-span-12">
<DataTable
+21
View File
@@ -0,0 +1,21 @@
{
"$schema": "https://ui.shadcn.com/schema.json",
"style": "default",
"rsc": true,
"tsx": true,
"tailwind": {
"config": "",
"css": "styles/globals.css",
"baseColor": "neutral",
"cssVariables": true,
"prefix": ""
},
"aliases": {
"components": "@/components",
"utils": "@/lib/utils",
"ui": "@/components/shadcn",
"lib": "@/lib",
"hooks": "@/hooks"
},
"iconLibrary": "lucide"
}
@@ -3,6 +3,7 @@
import { SelectViaAWS } from "@/components/providers/workflow/forms/select-credentials-type/aws";
import { SelectViaGCP } from "@/components/providers/workflow/forms/select-credentials-type/gcp";
import { SelectViaGitHub } from "@/components/providers/workflow/forms/select-credentials-type/github";
import { SelectViaM365 } from "@/components/providers/workflow/forms/select-credentials-type/m365";
import { ProviderType } from "@/types/providers";
interface UpdateCredentialsInfoProps {
@@ -24,6 +25,9 @@ export const CredentialsUpdateInfo = ({
if (providerType === "github") {
return <SelectViaGitHub initialVia={initialVia} />;
}
if (providerType === "m365") {
return <SelectViaM365 initialVia={initialVia} />;
}
return null;
};
@@ -1,6 +1,8 @@
"use client";
import { SettingsIcon } from "lucide-react";
import { usePathname } from "next/navigation";
import { useEffect } from "react";
import { CustomAlertModal, CustomButton } from "@/components/ui/custom";
import { useUIStore } from "@/store/ui/store";
@@ -8,13 +10,38 @@ import { useUIStore } from "@/store/ui/store";
import { MutedFindingsConfigForm } from "./forms";
export const MutedFindingsConfigButton = () => {
const pathname = usePathname();
const {
isMutelistModalOpen,
openMutelistModal,
closeMutelistModal,
hasProviders,
shouldAutoOpenMutelist,
resetMutelistModalRequest,
} = useUIStore();
useEffect(() => {
if (!shouldAutoOpenMutelist) {
return;
}
if (pathname !== "/providers") {
return;
}
if (hasProviders) {
openMutelistModal();
}
resetMutelistModalRequest();
}, [
hasProviders,
openMutelistModal,
pathname,
resetMutelistModalRequest,
shouldAutoOpenMutelist,
]);
const handleOpenModal = () => {
if (hasProviders) {
openMutelistModal();
@@ -17,7 +17,8 @@ import {
GCPDefaultCredentials,
GCPServiceAccountKey,
KubernetesCredentials,
M365Credentials,
M365CertificateCredentials,
M365ClientSecretCredentials,
ProviderType,
} from "@/types";
@@ -26,10 +27,13 @@ import { AWSStaticCredentialsForm } from "./select-credentials-type/aws/credenti
import { AWSRoleCredentialsForm } from "./select-credentials-type/aws/credentials-type/aws-role-credentials-form";
import { GCPDefaultCredentialsForm } from "./select-credentials-type/gcp/credentials-type";
import { GCPServiceAccountKeyForm } from "./select-credentials-type/gcp/credentials-type/gcp-service-account-key-form";
import {
M365CertificateCredentialsForm,
M365ClientSecretCredentialsForm,
} from "./select-credentials-type/m365";
import { AzureCredentialsForm } from "./via-credentials/azure-credentials-form";
import { GitHubCredentialsForm } from "./via-credentials/github-credentials-form";
import { KubernetesCredentialsForm } from "./via-credentials/k8s-credentials-form";
import { M365CredentialsForm } from "./via-credentials/m365-credentials-form";
type BaseCredentialsFormProps = {
providerType: ProviderType;
@@ -103,11 +107,22 @@ export const BaseCredentialsForm = ({
control={form.control as unknown as Control<AzureCredentials>}
/>
)}
{providerType === "m365" && (
<M365CredentialsForm
control={form.control as unknown as Control<M365Credentials>}
/>
)}
{providerType === "m365" &&
searchParamsObj.get("via") === "app_client_secret" && (
<M365ClientSecretCredentialsForm
control={
form.control as unknown as Control<M365ClientSecretCredentials>
}
/>
)}
{providerType === "m365" &&
searchParamsObj.get("via") === "app_certificate" && (
<M365CertificateCredentialsForm
control={
form.control as unknown as Control<M365CertificateCredentials>
}
/>
)}
{providerType === "gcp" &&
searchParamsObj.get("via") === "service-account" && (
<GCPServiceAccountKeyForm
@@ -159,6 +174,15 @@ export const BaseCredentialsForm = ({
size="lg"
isLoading={isLoading}
endContent={!isLoading && <ChevronRightIcon size={24} />}
onPress={(e) => {
const formElement = e.target as HTMLElement;
const form = formElement.closest("form");
if (form) {
form.dispatchEvent(
new Event("submit", { bubbles: true, cancelable: true }),
);
}
}}
>
{isLoading ? <>Loading</> : <span>{submitButtonText}</span>}
</CustomButton>
@@ -0,0 +1,2 @@
export { M365CertificateCredentialsForm } from "./m365-certificate-credentials-form";
export { M365ClientSecretCredentialsForm } from "./m365-client-secret-credentials-form";
@@ -0,0 +1,72 @@
"use client";
import { Control } from "react-hook-form";
import { CustomInput, CustomTextarea } from "@/components/ui/custom";
import { CustomLink } from "@/components/ui/custom/custom-link";
import { M365CertificateCredentials } from "@/types";
export const M365CertificateCredentialsForm = ({
control,
}: {
control: Control<M365CertificateCredentials>;
}) => {
return (
<>
<div className="flex flex-col">
<div className="text-md text-default-foreground leading-9 font-bold">
App Certificate Credentials
</div>
<div className="text-default-500 text-sm">
Please provide your Microsoft 365 application credentials with
certificate authentication.
</div>
</div>
<CustomInput
control={control}
name="tenant_id"
type="text"
label="Tenant ID"
labelPlacement="inside"
placeholder="Enter the Tenant ID"
variant="bordered"
isRequired
isInvalid={!!control._formState.errors.tenant_id}
/>
<CustomInput
control={control}
name="client_id"
type="text"
label="Client ID"
labelPlacement="inside"
placeholder="Enter the Client ID"
variant="bordered"
isRequired
isInvalid={!!control._formState.errors.client_id}
/>
<CustomTextarea
control={control}
name="certificate_content"
label="Certificate Content"
labelPlacement="inside"
placeholder="Enter the base64 encoded certificate content"
variant="bordered"
isRequired
isInvalid={!!control._formState.errors.certificate_content}
minRows={4}
/>
<p className="text-default-500 text-sm">
The certificate content must be base64 encoded from an unsigned
certificate. For detailed instructions on how to generate and encode
your certificate, please refer to the{" "}
<CustomLink
href="https://docs.prowler.com/user-guide/providers/microsoft365/authentication#generate-the-certificate"
size="sm"
>
certificate generation guide
</CustomLink>
.
</p>
</>
);
};
@@ -0,0 +1,58 @@
"use client";
import { Control } from "react-hook-form";
import { CustomInput } from "@/components/ui/custom";
import { M365ClientSecretCredentials } from "@/types";
export const M365ClientSecretCredentialsForm = ({
control,
}: {
control: Control<M365ClientSecretCredentials>;
}) => {
return (
<>
<div className="flex flex-col">
<div className="text-md text-default-foreground leading-9 font-bold">
App Client Secret Credentials
</div>
<div className="text-default-500 text-sm">
Please provide your Microsoft 365 application credentials.
</div>
</div>
<CustomInput
control={control}
name="tenant_id"
type="text"
label="Tenant ID"
labelPlacement="inside"
placeholder="Enter the Tenant ID"
variant="bordered"
isRequired
isInvalid={!!control._formState.errors.tenant_id}
/>
<CustomInput
control={control}
name="client_id"
type="text"
label="Client ID"
labelPlacement="inside"
placeholder="Enter the Client ID"
variant="bordered"
isRequired
isInvalid={!!control._formState.errors.client_id}
/>
<CustomInput
control={control}
name="client_secret"
type="password"
label="Client Secret"
labelPlacement="inside"
placeholder="Enter the Client Secret"
variant="bordered"
isRequired
isInvalid={!!control._formState.errors.client_secret}
/>
</>
);
};
@@ -0,0 +1,5 @@
export {
M365CertificateCredentialsForm,
M365ClientSecretCredentialsForm,
} from "./credentials-type";
export { SelectViaM365 } from "./select-via-m365";
@@ -0,0 +1,72 @@
"use client";
import { RadioGroup } from "@heroui/radio";
import React from "react";
import { Control, Controller } from "react-hook-form";
import { CustomRadio } from "@/components/ui/custom";
import { FormMessage } from "@/components/ui/form";
type RadioGroupM365ViaCredentialsFormProps = {
control: Control<any>;
isInvalid: boolean;
errorMessage?: string;
onChange?: (value: string) => void;
};
export const RadioGroupM365ViaCredentialsTypeForm = ({
control,
isInvalid,
errorMessage,
onChange,
}: RadioGroupM365ViaCredentialsFormProps) => {
return (
<Controller
name="m365CredentialsType"
control={control}
render={({ field }) => (
<>
<RadioGroup
className="flex flex-wrap"
isInvalid={isInvalid}
{...field}
value={field.value || ""}
onValueChange={(value) => {
field.onChange(value);
if (onChange) {
onChange(value);
}
}}
>
<div className="flex flex-col gap-4">
<span className="text-default-500 text-sm">
Select Authentication Method
</span>
<CustomRadio
description="Connect using Application Client Secret"
value="app_client_secret"
>
<div className="flex items-center">
<span className="ml-2">App Client Secret Credentials</span>
</div>
</CustomRadio>
<CustomRadio
description="Connect using Application Certificate"
value="app_certificate"
>
<div className="flex items-center">
<span className="ml-2">App Certificate Credentials</span>
</div>
</CustomRadio>
</div>
</RadioGroup>
{errorMessage && (
<FormMessage className="text-system-error dark:text-system-error">
{errorMessage}
</FormMessage>
)}
</>
)}
/>
);
};
@@ -0,0 +1,38 @@
"use client";
import { useRouter } from "next/navigation";
import { useForm } from "react-hook-form";
import { Form } from "@/components/ui/form";
import { RadioGroupM365ViaCredentialsTypeForm } from "./radio-group-m365-via-credentials-type-form";
interface SelectViaM365Props {
initialVia?: string;
}
export const SelectViaM365 = ({ initialVia }: SelectViaM365Props) => {
const router = useRouter();
const form = useForm({
defaultValues: {
m365CredentialsType: initialVia || "",
},
});
const handleSelectionChange = (value: string) => {
const url = new URL(window.location.href);
url.searchParams.set("via", value);
router.push(url.toString());
};
return (
<Form {...form}>
<RadioGroupM365ViaCredentialsTypeForm
control={form.control}
isInvalid={!!form.formState.errors.m365CredentialsType}
errorMessage={form.formState.errors.m365CredentialsType?.message}
onChange={handleSelectionChange}
/>
</Form>
);
};
@@ -1,4 +1,3 @@
export * from "./azure-credentials-form";
export * from "./github-credentials-form";
export * from "./k8s-credentials-form";
export * from "./m365-credentials-form";
@@ -1,109 +0,0 @@
import { Control } from "react-hook-form";
import { InfoIcon } from "@/components/icons";
import { CustomInput } from "@/components/ui/custom";
import { CustomLink } from "@/components/ui/custom/custom-link";
import { M365Credentials } from "@/types";
export const M365CredentialsForm = ({
control,
}: {
control: Control<M365Credentials>;
}) => {
return (
<>
<div className="flex flex-col">
<div className="text-md text-default-foreground leading-9 font-bold">
Connect via Credentials
</div>
<div className="text-default-500 text-sm">
Please provide the information for your Microsoft 365 credentials.
</div>
</div>
<CustomInput
control={control}
name="client_id"
type="text"
label="Client ID"
labelPlacement="inside"
placeholder="Enter the Client ID"
variant="bordered"
isRequired
isInvalid={!!control._formState.errors.client_id}
/>
<CustomInput
control={control}
name="client_secret"
type="password"
label="Client Secret"
labelPlacement="inside"
placeholder="Enter the Client Secret"
variant="bordered"
isRequired
isInvalid={!!control._formState.errors.client_secret}
/>
<CustomInput
control={control}
name="tenant_id"
type="text"
label="Tenant ID"
labelPlacement="inside"
placeholder="Enter the Tenant ID"
variant="bordered"
isRequired
isInvalid={!!control._formState.errors.tenant_id}
/>
<p className="text-default-500 text-sm">
{" "}
User and password authentication is being deprecated due to
Microsoft&apos;s on-going MFA enforcement across all tenants (see{" "}
<CustomLink
href="https://azure.microsoft.com/en-us/blog/announcing-mandatory-multi-factor-authentication-for-azure-sign-in/"
size="sm"
>
Microsoft docs
</CustomLink>
).
</p>
<div className="border-system-warning bg-system-warning-medium dark:text-default-300 flex items-center rounded-lg border p-2 text-sm">
<InfoIcon className="mr-2 inline h-4 w-4 shrink-0" />
<p className="text-xs font-extrabold">
By October 2025, MFA will be mandatory.
</p>
</div>
<p className="text-default-500 text-sm">
Due to that change, you must only{" "}
<CustomLink
href="https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/microsoft365/getting-started-m365/#step-3-configure-your-m365-account"
size="sm"
>
use application authentication
</CustomLink>{" "}
to maintain all Prowler M365 scan capabilities.
</p>
<CustomInput
control={control}
name="user"
type="text"
label="User"
labelPlacement="inside"
placeholder="Enter the User"
variant="bordered"
isRequired={false}
isInvalid={!!control._formState.errors.user}
/>
<CustomInput
control={control}
name="password"
type="password"
label="Password"
labelPlacement="inside"
placeholder="Enter the Password"
variant="bordered"
isRequired={false}
isInvalid={!!control._formState.errors.password}
/>
</>
);
};
+57
View File
@@ -0,0 +1,57 @@
# shadcn Components
This directory contains all shadcn/ui based components for the Prowler application.
## Directory Structure
```
shadcn/
├── card.tsx # shadcn Card component
├── resource-stats-card/ # Custom ResourceStatsCard built on shadcn
│ ├── resource-stats-card.tsx
│ ├── resource-stats-card.example.tsx
│ └── index.ts
├── index.ts # Barrel exports
└── README.md
```
## Usage
All shadcn components can be imported from `@/components/shadcn`:
```tsx
import { Card, CardHeader, CardContent } from "@/components/shadcn";
import { ResourceStatsCard } from "@/components/shadcn";
```
## Adding New shadcn Components
When adding new shadcn components using the CLI:
```bash
npx shadcn@latest add [component-name]
```
The component will be automatically added to this directory due to the configuration in `components.json`:
```json
{
"aliases": {
"ui": "@/components/shadcn"
}
}
```
## Component Guidelines
1. **shadcn base components** - Use as-is from shadcn/ui (e.g., `card.tsx`)
2. **Custom components built on shadcn** - Create in subdirectories (e.g., `resource-stats-card/`)
3. **CVA variants** - Use Class Variance Authority for type-safe variants
4. **Theme support** - Include `dark:` classes for dark/light theme compatibility
5. **TypeScript** - Always export types and use proper typing
## Resources
- [shadcn/ui Documentation](https://ui.shadcn.com)
- [CVA Documentation](https://cva.style/docs)
- [Tailwind CSS Documentation](https://tailwindcss.com/docs)
+92
View File
@@ -0,0 +1,92 @@
import * as React from "react";
import { cn } from "@/lib/utils";
function Card({ className, ...props }: React.ComponentProps<"div">) {
return (
<div
data-slot="card"
className={cn(
"bg-card text-card-foreground flex flex-col gap-6 rounded-xl border py-6 shadow-sm",
className,
)}
{...props}
/>
);
}
function CardHeader({ className, ...props }: React.ComponentProps<"div">) {
return (
<div
data-slot="card-header"
className={cn(
"@container/card-header grid auto-rows-min grid-rows-[auto_auto] items-start gap-2 px-6 has-data-[slot=card-action]:grid-cols-[1fr_auto] [.border-b]:pb-6",
className,
)}
{...props}
/>
);
}
function CardTitle({ className, ...props }: React.ComponentProps<"div">) {
return (
<div
data-slot="card-title"
className={cn("leading-none font-semibold", className)}
{...props}
/>
);
}
function CardDescription({ className, ...props }: React.ComponentProps<"div">) {
return (
<div
data-slot="card-description"
className={cn("text-muted-foreground text-sm", className)}
{...props}
/>
);
}
function CardAction({ className, ...props }: React.ComponentProps<"div">) {
return (
<div
data-slot="card-action"
className={cn(
"col-start-2 row-span-2 row-start-1 self-start justify-self-end",
className,
)}
{...props}
/>
);
}
function CardContent({ className, ...props }: React.ComponentProps<"div">) {
return (
<div
data-slot="card-content"
className={cn("px-6", className)}
{...props}
/>
);
}
function CardFooter({ className, ...props }: React.ComponentProps<"div">) {
return (
<div
data-slot="card-footer"
className={cn("flex items-center px-6 [.border-t]:pt-6", className)}
{...props}
/>
);
}
export {
Card,
CardAction,
CardContent,
CardDescription,
CardFooter,
CardHeader,
CardTitle,
};
+21
View File
@@ -0,0 +1,21 @@
export {
Card,
CardContent,
CardDescription,
CardFooter,
CardHeader,
CardTitle,
} from "./card";
export {
ResourceStatsCard,
ResourceStatsCardContainer,
type ResourceStatsCardContainerProps,
ResourceStatsCardContent,
type ResourceStatsCardContentProps,
ResourceStatsCardDivider,
type ResourceStatsCardDividerProps,
ResourceStatsCardHeader,
type ResourceStatsCardHeaderProps,
type ResourceStatsCardProps,
type StatItem,
} from "./resource-stats-card";
@@ -0,0 +1,13 @@
export type { ResourceStatsCardProps } from "./resource-stats-card";
export { ResourceStatsCard } from "./resource-stats-card";
export type { ResourceStatsCardContainerProps } from "./resource-stats-card-container";
export { ResourceStatsCardContainer } from "./resource-stats-card-container";
export type {
ResourceStatsCardContentProps,
StatItem,
} from "./resource-stats-card-content";
export { ResourceStatsCardContent } from "./resource-stats-card-content";
export type { ResourceStatsCardDividerProps } from "./resource-stats-card-divider";
export { ResourceStatsCardDivider } from "./resource-stats-card-divider";
export type { ResourceStatsCardHeaderProps } from "./resource-stats-card-header";
export { ResourceStatsCardHeader } from "./resource-stats-card-header";
@@ -0,0 +1,55 @@
import { cva, type VariantProps } from "class-variance-authority";
import { cn } from "@/lib/utils";
const containerVariants = cva(
[
"flex",
"rounded-[12px]",
"border",
"backdrop-blur-[46px]",
"border-[rgba(38,38,38,0.70)]",
"bg-[rgba(23,23,23,0.50)]",
"dark:border-[rgba(38,38,38,0.70)]",
"dark:bg-[rgba(23,23,23,0.50)]",
],
{
variants: {
padding: {
sm: "px-3 py-2",
md: "px-[19px] py-[9px]",
lg: "px-6 py-3",
none: "p-0",
},
},
defaultVariants: {
padding: "md",
},
},
);
export interface ResourceStatsCardContainerProps
extends React.HTMLAttributes<HTMLDivElement>,
VariantProps<typeof containerVariants> {
ref?: React.Ref<HTMLDivElement>;
}
export const ResourceStatsCardContainer = ({
className,
children,
padding,
ref,
...props
}: ResourceStatsCardContainerProps) => {
return (
<div
ref={ref}
className={cn(containerVariants({ padding }), className)}
{...props}
>
{children}
</div>
);
};
ResourceStatsCardContainer.displayName = "ResourceStatsCardContainer";
@@ -0,0 +1,204 @@
import { cva } from "class-variance-authority";
import { LucideIcon } from "lucide-react";
import { cn } from "@/lib/utils";
export interface StatItem {
icon: LucideIcon;
label: string;
}
export const CardVariant = {
default: "default",
fail: "fail",
pass: "pass",
warning: "warning",
info: "info",
} as const;
export type CardVariant = (typeof CardVariant)[keyof typeof CardVariant];
const variantColors = {
default: "#868994",
fail: "#f54280",
pass: "#4ade80",
warning: "#fbbf24",
info: "#60a5fa",
} as const;
type BadgeVariant = keyof typeof variantColors;
const badgeVariants = cva(
["flex", "items-center", "justify-center", "gap-0.5", "rounded-full"],
{
variants: {
variant: {
[CardVariant.default]: "bg-[#535359]",
[CardVariant.fail]: "bg-[#432232]",
[CardVariant.pass]: "bg-[#204237]",
[CardVariant.warning]: "bg-[#3d3520]",
[CardVariant.info]: "bg-[#1e3a5f]",
},
size: {
sm: "px-1 text-xs",
md: "px-1.5 text-sm",
lg: "px-2 text-base",
},
},
defaultVariants: {
variant: CardVariant.fail,
size: "md",
},
},
);
const badgeIconVariants = cva("", {
variants: {
size: {
sm: "h-2.5 w-2.5",
md: "h-3 w-3",
lg: "h-4 w-4",
},
},
defaultVariants: {
size: "md",
},
});
const labelTextVariants = cva(
"leading-6 font-semibold text-zinc-300 dark:text-zinc-300",
{
variants: {
size: {
sm: "text-xs",
md: "text-sm",
lg: "text-base",
},
},
defaultVariants: {
size: "md",
},
},
);
const statIconVariants = cva("text-zinc-300 dark:text-zinc-300", {
variants: {
size: {
sm: "h-2.5 w-2.5",
md: "h-3 w-3",
lg: "h-3.5 w-3.5",
},
},
defaultVariants: {
size: "md",
},
});
const statLabelVariants = cva(
"leading-5 font-medium text-zinc-300 dark:text-zinc-300",
{
variants: {
size: {
sm: "text-xs",
md: "text-sm",
lg: "text-base",
},
},
defaultVariants: {
size: "md",
},
},
);
export interface ResourceStatsCardContentProps
extends React.HTMLAttributes<HTMLDivElement> {
badge: {
icon: LucideIcon;
count: number | string;
variant?: CardVariant;
};
label: string;
stats?: StatItem[];
accentColor?: string;
size?: "sm" | "md" | "lg";
ref?: React.Ref<HTMLDivElement>;
}
export const ResourceStatsCardContent = ({
badge,
label,
stats = [],
accentColor,
size = "md",
className,
ref,
...props
}: ResourceStatsCardContentProps) => {
const BadgeIcon = badge.icon;
const badgeVariant: BadgeVariant = badge.variant || "fail";
// Determine accent line color
const lineColor = accentColor || variantColors[badgeVariant] || "#d4d4d8";
return (
<div
ref={ref}
className={cn("flex flex-col gap-[5px]", className)}
{...props}
>
{/* Badge and Label Row */}
<div className="flex w-full items-center gap-1">
{/* Badge */}
<div className={cn(badgeVariants({ variant: badgeVariant, size }))}>
<BadgeIcon
className={badgeIconVariants({ size })}
strokeWidth={2.5}
style={{ color: variantColors[badgeVariant] }}
/>
<span
className="leading-6 font-bold"
style={{ color: variantColors[badgeVariant] }}
>
{badge.count}
</span>
</div>
{/* Label */}
<span className={labelTextVariants({ size })}>{label}</span>
</div>
{/* Stats Section */}
{stats.length > 0 && (
<div className="flex w-full items-stretch gap-0">
{/* Vertical Accent Line */}
<div className="flex items-stretch px-3 py-1">
<div
className="w-px rounded-full"
style={{ backgroundColor: lineColor }}
/>
</div>
{/* Stats List */}
<div className="flex flex-1 flex-col gap-0.5">
{stats.map((stat, index) => {
const StatIcon = stat.icon;
return (
<div key={index} className="flex items-center gap-1">
<StatIcon
className={statIconVariants({ size })}
strokeWidth={2}
/>
<span className={statLabelVariants({ size })}>
{stat.label}
</span>
</div>
);
})}
</div>
</div>
)}
</div>
);
};
ResourceStatsCardContent.displayName = "ResourceStatsCardContent";
@@ -0,0 +1,59 @@
import { cva, type VariantProps } from "class-variance-authority";
import { cn } from "@/lib/utils";
const dividerVariants = cva("flex items-center justify-center", {
variants: {
spacing: {
sm: "px-2",
md: "px-[23px]",
lg: "px-8",
},
orientation: {
vertical: "h-full",
horizontal: "w-full",
},
},
defaultVariants: {
spacing: "md",
orientation: "vertical",
},
});
const lineVariants = cva("bg-[rgba(39,39,42,1)]", {
variants: {
orientation: {
vertical: "h-full w-px",
horizontal: "w-full h-px",
},
},
defaultVariants: {
orientation: "vertical",
},
});
export interface ResourceStatsCardDividerProps
extends React.HTMLAttributes<HTMLDivElement>,
VariantProps<typeof dividerVariants> {
ref?: React.Ref<HTMLDivElement>;
}
export const ResourceStatsCardDivider = ({
className,
spacing,
orientation,
ref,
...props
}: ResourceStatsCardDividerProps) => {
return (
<div
ref={ref}
className={cn(dividerVariants({ spacing, orientation }), className)}
{...props}
>
<div className={lineVariants({ orientation })} />
</div>
);
};
ResourceStatsCardDivider.displayName = "ResourceStatsCardDivider";
@@ -0,0 +1,103 @@
import { cva, type VariantProps } from "class-variance-authority";
import { LucideIcon } from "lucide-react";
import { cn } from "@/lib/utils";
const headerVariants = cva("flex w-full items-center gap-1", {
variants: {
size: {
sm: "",
md: "",
lg: "",
},
},
defaultVariants: {
size: "md",
},
});
const iconVariants = cva("text-zinc-300 dark:text-zinc-300", {
variants: {
size: {
sm: "h-3.5 w-3.5",
md: "h-4 w-4",
lg: "h-5 w-5",
},
},
defaultVariants: {
size: "md",
},
});
const titleVariants = cva(
"leading-7 font-semibold text-zinc-300 dark:text-zinc-300",
{
variants: {
size: {
sm: "text-sm",
md: "text-base",
lg: "text-lg",
},
},
defaultVariants: {
size: "md",
},
},
);
const countVariants = cva(
"leading-4 font-normal text-zinc-300 dark:text-zinc-300",
{
variants: {
size: {
sm: "text-[9px]",
md: "text-[10px]",
lg: "text-xs",
},
},
defaultVariants: {
size: "md",
},
},
);
export interface ResourceStatsCardHeaderProps
extends React.HTMLAttributes<HTMLDivElement>,
VariantProps<typeof headerVariants> {
icon: LucideIcon;
title: string;
resourceCount?: number | string;
ref?: React.Ref<HTMLDivElement>;
}
export const ResourceStatsCardHeader = ({
icon: Icon,
title,
resourceCount,
size = "md",
className,
ref,
...props
}: ResourceStatsCardHeaderProps) => {
return (
<div
ref={ref}
className={cn(headerVariants({ size }), className)}
{...props}
>
<div className="flex flex-1 items-center gap-1">
<Icon className={iconVariants({ size })} strokeWidth={2} />
<span className={titleVariants({ size })}>{title}</span>
</div>
{resourceCount !== undefined && (
<span className={countVariants({ size })}>
{typeof resourceCount === "number"
? `${resourceCount} Resources`
: resourceCount}
</span>
)}
</div>
);
};
ResourceStatsCardHeader.displayName = "ResourceStatsCardHeader";

Some files were not shown because too many files have changed in this diff Show More