mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-01-25 02:08:11 +00:00
Compare commits
15 Commits
trigger-pr
...
feat/cloud
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a534e50df4 | ||
|
|
4b160257b9 | ||
|
|
6184de52d9 | ||
|
|
fdf45ea777 | ||
|
|
b7ce9ae5f3 | ||
|
|
2039a5005c | ||
|
|
52ed92ac6a | ||
|
|
f5cccecac6 | ||
|
|
a47f6444f8 | ||
|
|
f8c8dee2b3 | ||
|
|
6656629391 | ||
|
|
9f372902ad | ||
|
|
b4ff1dcc75 | ||
|
|
f596907223 | ||
|
|
fe768c0a3e |
32
.github/workflows/find-secrets.yml
vendored
32
.github/workflows/find-secrets.yml
vendored
@@ -1,19 +1,33 @@
|
||||
name: 'Tools: TruffleHog'
|
||||
|
||||
on: pull_request
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
trufflehog:
|
||||
scan-secrets:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@466da5b0bb161144f6afca9afe5d57975828c410 # v3.90.8
|
||||
|
||||
- name: Scan for secrets with TruffleHog
|
||||
uses: trufflesecurity/trufflehog@ad6fc8fb446b8fafbf7ea8193d2d6bfd42f45690 # v3.90.11
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
head: HEAD
|
||||
extra_args: --only-verified
|
||||
extra_args: '--results=verified,unknown'
|
||||
|
||||
28
.github/workflows/labeler.yml
vendored
28
.github/workflows/labeler.yml
vendored
@@ -1,17 +1,29 @@
|
||||
name: Prowler - PR Labeler
|
||||
name: 'Tools: PR Labeler'
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
pull_request_target:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
types:
|
||||
- 'opened'
|
||||
- 'reopened'
|
||||
- 'synchronize'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
||||
- name: Apply labels to PR
|
||||
uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
||||
with:
|
||||
sync-labels: true
|
||||
|
||||
31
.github/workflows/mcp-container-build-push.yml
vendored
31
.github/workflows/mcp-container-build-push.yml
vendored
@@ -3,21 +3,13 @@ name: 'MCP: Container Build and Push'
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- 'master'
|
||||
paths:
|
||||
- "mcp_server/**"
|
||||
- ".github/workflows/mcp-container-build-push.yml"
|
||||
|
||||
# Uncomment to test this workflow on PRs
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - "master"
|
||||
# paths:
|
||||
# - "mcp_server/**"
|
||||
# - ".github/workflows/mcp-container-build-push.yml"
|
||||
|
||||
- 'mcp_server/**'
|
||||
- '.github/workflows/mcp-container-build-push.yml'
|
||||
release:
|
||||
types: [published]
|
||||
types:
|
||||
- 'published'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -41,6 +33,7 @@ jobs:
|
||||
setup:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
outputs:
|
||||
short-sha: ${{ steps.set-short-sha.outputs.short-sha }}
|
||||
steps:
|
||||
@@ -51,8 +44,12 @@ jobs:
|
||||
container-build-push:
|
||||
needs: setup
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Login to DockerHub
|
||||
@@ -64,7 +61,7 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push container (latest)
|
||||
- name: Build and push MCP container (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
@@ -83,7 +80,7 @@ jobs:
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Build and push container (release)
|
||||
- name: Build and push MCP container (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
@@ -103,7 +100,7 @@ jobs:
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Trigger deployment
|
||||
- name: Trigger MCP deployment
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
|
||||
103
.github/workflows/pr-check-changelog.yml
vendored
Normal file
103
.github/workflows/pr-check-changelog.yml
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
name: 'Tools: Check Changelog'
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- 'opened'
|
||||
- 'synchronize'
|
||||
- 'reopened'
|
||||
- 'labeled'
|
||||
- 'unlabeled'
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check-changelog:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
MONITORED_FOLDERS: 'api ui prowler mcp_server'
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
api/**
|
||||
ui/**
|
||||
prowler/**
|
||||
mcp_server/**
|
||||
|
||||
- name: Check for folder changes and changelog presence
|
||||
id: check-folders
|
||||
run: |
|
||||
missing_changelogs=""
|
||||
|
||||
# Check api folder
|
||||
if [[ "${{ steps.changed-files.outputs.any_changed }}" == "true" ]]; then
|
||||
for folder in $MONITORED_FOLDERS; do
|
||||
# Get files changed in this folder
|
||||
changed_in_folder=$(echo "${{ steps.changed-files.outputs.all_changed_files }}" | tr ' ' '\n' | grep "^${folder}/" || true)
|
||||
|
||||
if [ -n "$changed_in_folder" ]; then
|
||||
echo "Detected changes in ${folder}/"
|
||||
|
||||
# Check if CHANGELOG.md was updated
|
||||
if ! echo "$changed_in_folder" | grep -q "^${folder}/CHANGELOG.md$"; then
|
||||
echo "No changelog update found for ${folder}/"
|
||||
missing_changelogs="${missing_changelogs}- \`${folder}\`"$'\n'
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
{
|
||||
echo "missing_changelogs<<EOF"
|
||||
echo -e "${missing_changelogs}"
|
||||
echo "EOF"
|
||||
} >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing changelog comment
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
id: find-comment
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '<!-- changelog-check -->'
|
||||
|
||||
- name: Update PR comment with changelog status
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
<!-- changelog-check -->
|
||||
${{ steps.check-folders.outputs.missing_changelogs != '' && format('⚠️ **Changes detected in the following folders without a corresponding update to the `CHANGELOG.md`:**
|
||||
|
||||
{0}
|
||||
|
||||
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.', steps.check-folders.outputs.missing_changelogs) || '✅ All necessary `CHANGELOG.md` files have been updated.' }}
|
||||
|
||||
- name: Fail if changelog is missing
|
||||
if: steps.check-folders.outputs.missing_changelogs != ''
|
||||
run: |
|
||||
echo "::error::Missing changelog updates in some folders"
|
||||
exit 1
|
||||
156
.github/workflows/pr-conflict-checker.yml
vendored
156
.github/workflows/pr-conflict-checker.yml
vendored
@@ -1,42 +1,40 @@
|
||||
name: Prowler - PR Conflict Checker
|
||||
name: 'Tools: PR Conflict Checker'
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
pull_request_target:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
- reopened
|
||||
- 'opened'
|
||||
- 'synchronize'
|
||||
- 'reopened'
|
||||
branches:
|
||||
- "master"
|
||||
- "v5.*"
|
||||
# Leaving this commented until we find a way to run it for forks but in Prowler's context
|
||||
# pull_request_target:
|
||||
# types:
|
||||
# - opened
|
||||
# - synchronize
|
||||
# - reopened
|
||||
# branches:
|
||||
# - "master"
|
||||
# - "v5.*"
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
conflict-checker:
|
||||
check-conflicts:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
- name: Checkout PR head
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
|
||||
with:
|
||||
files: |
|
||||
**
|
||||
files: '**'
|
||||
|
||||
- name: Check for conflict markers
|
||||
id: conflict-check
|
||||
@@ -51,10 +49,10 @@ jobs:
|
||||
if [ -f "$file" ]; then
|
||||
echo "Checking file: $file"
|
||||
|
||||
# Look for conflict markers
|
||||
if grep -l "^<<<<<<<\|^=======\|^>>>>>>>" "$file" 2>/dev/null; then
|
||||
# Look for conflict markers (more precise regex)
|
||||
if grep -qE '^(<<<<<<<|=======|>>>>>>>)' "$file" 2>/dev/null; then
|
||||
echo "Conflict markers found in: $file"
|
||||
CONFLICT_FILES="$CONFLICT_FILES$file "
|
||||
CONFLICT_FILES="${CONFLICT_FILES}- \`${file}\`"$'\n'
|
||||
HAS_CONFLICTS=true
|
||||
fi
|
||||
fi
|
||||
@@ -62,114 +60,64 @@ jobs:
|
||||
|
||||
if [ "$HAS_CONFLICTS" = true ]; then
|
||||
echo "has_conflicts=true" >> $GITHUB_OUTPUT
|
||||
echo "conflict_files=$CONFLICT_FILES" >> $GITHUB_OUTPUT
|
||||
echo "Conflict markers detected in files: $CONFLICT_FILES"
|
||||
{
|
||||
echo "conflict_files<<EOF"
|
||||
echo "$CONFLICT_FILES"
|
||||
echo "EOF"
|
||||
} >> $GITHUB_OUTPUT
|
||||
echo "Conflict markers detected"
|
||||
else
|
||||
echo "has_conflicts=false" >> $GITHUB_OUTPUT
|
||||
echo "No conflict markers found in changed files"
|
||||
fi
|
||||
|
||||
- name: Add conflict label
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
script: |
|
||||
const { data: labels } = await github.rest.issues.listLabelsOnIssue({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
});
|
||||
- name: Manage conflict label
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
HAS_CONFLICTS: ${{ steps.conflict-check.outputs.has_conflicts }}
|
||||
run: |
|
||||
LABEL_NAME="has-conflicts"
|
||||
|
||||
const hasConflictLabel = labels.some(label => label.name === 'has-conflicts');
|
||||
# Add or remove label based on conflict status
|
||||
if [ "$HAS_CONFLICTS" = "true" ]; then
|
||||
echo "Adding conflict label to PR #${PR_NUMBER}..."
|
||||
gh pr edit "$PR_NUMBER" --add-label "$LABEL_NAME" --repo ${{ github.repository }} || true
|
||||
else
|
||||
echo "Removing conflict label from PR #${PR_NUMBER}..."
|
||||
gh pr edit "$PR_NUMBER" --remove-label "$LABEL_NAME" --repo ${{ github.repository }} || true
|
||||
fi
|
||||
|
||||
if (!hasConflictLabel) {
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
labels: ['has-conflicts']
|
||||
});
|
||||
console.log('Added has-conflicts label');
|
||||
} else {
|
||||
console.log('has-conflicts label already exists');
|
||||
}
|
||||
|
||||
- name: Remove conflict label
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'false'
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
script: |
|
||||
try {
|
||||
await github.rest.issues.removeLabel({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
name: 'has-conflicts'
|
||||
});
|
||||
console.log('Removed has-conflicts label');
|
||||
} catch (error) {
|
||||
if (error.status === 404) {
|
||||
console.log('has-conflicts label was not present');
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
- name: Find existing conflict comment
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
- name: Find existing comment
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
|
||||
id: find-comment
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-regex: '(⚠️ \*\*Conflict Markers Detected\*\*|✅ \*\*Conflict Markers Resolved\*\*)'
|
||||
body-includes: '<!-- conflict-checker-comment -->'
|
||||
|
||||
- name: Create or update conflict comment
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
- name: Create or update comment
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
⚠️ **Conflict Markers Detected**
|
||||
<!-- conflict-checker-comment -->
|
||||
${{ steps.conflict-check.outputs.has_conflicts == 'true' && '⚠️ **Conflict Markers Detected**' || '✅ **Conflict Markers Resolved**' }}
|
||||
|
||||
This pull request contains unresolved conflict markers in the following files:
|
||||
```
|
||||
${{ steps.conflict-check.outputs.conflict_files }}
|
||||
```
|
||||
${{ steps.conflict-check.outputs.has_conflicts == 'true' && format('This pull request contains unresolved conflict markers in the following files:
|
||||
|
||||
{0}
|
||||
|
||||
Please resolve these conflicts by:
|
||||
1. Locating the conflict markers: `<<<<<<<`, `=======`, and `>>>>>>>`
|
||||
2. Manually editing the files to resolve the conflicts
|
||||
3. Removing all conflict markers
|
||||
4. Committing and pushing the changes
|
||||
|
||||
- name: Find existing conflict comment when resolved
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'false'
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
|
||||
id: find-resolved-comment
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-regex: '(⚠️ \*\*Conflict Markers Detected\*\*|✅ \*\*Conflict Markers Resolved\*\*)'
|
||||
|
||||
- name: Update comment when conflicts resolved
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'false' && steps.find-resolved-comment.outputs.comment-id != ''
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
comment-id: ${{ steps.find-resolved-comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
✅ **Conflict Markers Resolved**
|
||||
|
||||
All conflict markers have been successfully resolved in this pull request.
|
||||
4. Committing and pushing the changes', steps.conflict-check.outputs.conflict_files) || 'All conflict markers have been successfully resolved in this pull request.' }}
|
||||
|
||||
- name: Fail workflow if conflicts detected
|
||||
if: steps.conflict-check.outputs.has_conflicts == 'true'
|
||||
run: |
|
||||
echo "::error::Workflow failed due to conflict markers in files: ${{ steps.conflict-check.outputs.conflict_files }}"
|
||||
echo "::error::Workflow failed due to conflict markers detected in the PR"
|
||||
exit 1
|
||||
|
||||
@@ -1,27 +1,31 @@
|
||||
name: Prowler - Merged Pull Request
|
||||
name: 'Tools: PR Merged'
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ['master']
|
||||
types: ['closed']
|
||||
branches:
|
||||
- 'master'
|
||||
types:
|
||||
- 'closed'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
trigger-cloud-pull-request:
|
||||
name: Trigger Cloud Pull Request
|
||||
if: github.event.pull_request.merged == true && github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.merge_commit_sha }}
|
||||
|
||||
- name: Set short git commit SHA
|
||||
- name: Calculate short commit SHA
|
||||
id: vars
|
||||
run: |
|
||||
shortSha=$(git rev-parse --short ${{ github.event.pull_request.merge_commit_sha }})
|
||||
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
|
||||
SHORT_SHA="${{ github.event.pull_request.merge_commit_sha }}"
|
||||
echo "SHORT_SHA=${SHORT_SHA::7}" >> $GITHUB_ENV
|
||||
|
||||
- name: Trigger pull request
|
||||
- name: Trigger Cloud repository pull request
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
@@ -31,8 +35,12 @@ jobs:
|
||||
{
|
||||
"PROWLER_COMMIT_SHA": "${{ github.event.pull_request.merge_commit_sha }}",
|
||||
"PROWLER_COMMIT_SHORT_SHA": "${{ env.SHORT_SHA }}",
|
||||
"PROWLER_PR_NUMBER": "${{ github.event.pull_request.number }}",
|
||||
"PROWLER_PR_TITLE": ${{ toJson(github.event.pull_request.title) }},
|
||||
"PROWLER_PR_LABELS": ${{ toJson(github.event.pull_request.labels.*.name) }},
|
||||
"PROWLER_PR_BODY": ${{ toJson(github.event.pull_request.body) }},
|
||||
"PROWLER_PR_URL": ${{ toJson(github.event.pull_request.html_url) }}
|
||||
"PROWLER_PR_URL": ${{ toJson(github.event.pull_request.html_url) }},
|
||||
"PROWLER_PR_MERGED_BY": "${{ github.event.pull_request.merged_by.login }}",
|
||||
"PROWLER_PR_BASE_BRANCH": "${{ github.event.pull_request.base.ref }}",
|
||||
"PROWLER_PR_HEAD_BRANCH": "${{ github.event.pull_request.head.ref }}"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
name: Prowler - Release Preparation
|
||||
name: 'Tools: Prepare Release'
|
||||
|
||||
run-name: Prowler Release Preparation for ${{ inputs.prowler_version }}
|
||||
run-name: 'Prepare Release for Prowler ${{ inputs.prowler_version }}'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@@ -10,18 +10,23 @@ on:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ inputs.prowler_version }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.inputs.prowler_version }}
|
||||
PROWLER_VERSION: ${{ inputs.prowler_version }}
|
||||
|
||||
jobs:
|
||||
prepare-release:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
if: github.event_name == 'workflow_dispatch' && github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout code
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -34,15 +39,15 @@ jobs:
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
python3 -m pip install --user poetry
|
||||
python3 -m pip install --user poetry==2.1.1
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global user.name "prowler-bot"
|
||||
git config --global user.email "179230569+prowler-bot@users.noreply.github.com"
|
||||
git config --global user.name 'prowler-bot'
|
||||
git config --global user.email '179230569+prowler-bot@users.noreply.github.com'
|
||||
|
||||
- name: Parse version and determine branch
|
||||
- name: Parse version and read changelogs
|
||||
run: |
|
||||
# Validate version format (reusing pattern from sdk-bump-version.yml)
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
@@ -119,7 +124,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Extract changelog entries
|
||||
- name: Extract and combine changelog entries
|
||||
run: |
|
||||
set -e
|
||||
|
||||
@@ -245,7 +250,7 @@ jobs:
|
||||
echo "Combined changelog preview:"
|
||||
cat combined_changelog.md
|
||||
|
||||
- name: Checkout existing branch for patch release
|
||||
- name: Checkout release branch for patch release
|
||||
if: ${{ env.PATCH_VERSION != '0' }}
|
||||
run: |
|
||||
echo "Patch release detected, checking out existing branch $BRANCH_NAME..."
|
||||
@@ -260,7 +265,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Verify version in pyproject.toml
|
||||
- name: Verify SDK version in pyproject.toml
|
||||
run: |
|
||||
CURRENT_VERSION=$(grep '^version = ' pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
|
||||
@@ -270,7 +275,7 @@ jobs:
|
||||
fi
|
||||
echo "✓ pyproject.toml version: $CURRENT_VERSION"
|
||||
|
||||
- name: Verify version in prowler/config/config.py
|
||||
- name: Verify SDK version in prowler/config/config.py
|
||||
run: |
|
||||
CURRENT_VERSION=$(grep '^prowler_version = ' prowler/config/config.py | sed -E 's/prowler_version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
|
||||
@@ -280,7 +285,7 @@ jobs:
|
||||
fi
|
||||
echo "✓ prowler/config/config.py version: $CURRENT_VERSION"
|
||||
|
||||
- name: Verify version in api/pyproject.toml
|
||||
- name: Verify API version in api/pyproject.toml
|
||||
if: ${{ env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep '^version = ' api/pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
|
||||
@@ -291,7 +296,7 @@ jobs:
|
||||
fi
|
||||
echo "✓ api/pyproject.toml version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Verify prowler dependency in api/pyproject.toml
|
||||
- name: Verify API prowler dependency in api/pyproject.toml
|
||||
if: ${{ env.PATCH_VERSION != '0' && env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
@@ -302,7 +307,7 @@ jobs:
|
||||
fi
|
||||
echo "✓ api/pyproject.toml prowler dependency: $CURRENT_PROWLER_REF"
|
||||
|
||||
- name: Verify version in api/src/backend/api/v1/views.py
|
||||
- name: Verify API version in api/src/backend/api/v1/views.py
|
||||
if: ${{ env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep 'spectacular_settings.VERSION = ' api/src/backend/api/v1/views.py | sed -E 's/.*spectacular_settings.VERSION = "([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
@@ -313,7 +318,7 @@ jobs:
|
||||
fi
|
||||
echo "✓ api/src/backend/api/v1/views.py version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Checkout existing release branch for minor release
|
||||
- name: Checkout release branch for minor release
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
run: |
|
||||
echo "Minor release detected (patch = 0), checking out existing branch $BRANCH_NAME..."
|
||||
@@ -325,7 +330,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Prepare prowler dependency update for minor release
|
||||
- name: Update API prowler dependency for minor release
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
run: |
|
||||
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
|
||||
@@ -362,7 +367,7 @@ jobs:
|
||||
|
||||
echo "✓ Prepared prowler dependency update to: $UPDATED_PROWLER_REF"
|
||||
|
||||
- name: Create Pull Request against release branch
|
||||
- name: Create PR for API dependency update
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
@@ -1,77 +0,0 @@
|
||||
name: Prowler - Check Changelog
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, labeled, unlabeled]
|
||||
|
||||
jobs:
|
||||
check-changelog:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
pull-requests: write
|
||||
env:
|
||||
MONITORED_FOLDERS: "api ui prowler mcp_server"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get list of changed files
|
||||
id: changed_files
|
||||
run: |
|
||||
git fetch origin ${{ github.base_ref }}
|
||||
git diff --name-only origin/${{ github.base_ref }}...HEAD > changed_files.txt
|
||||
cat changed_files.txt
|
||||
|
||||
- name: Check for folder changes and changelog presence
|
||||
id: check_folders
|
||||
run: |
|
||||
missing_changelogs=""
|
||||
|
||||
for folder in $MONITORED_FOLDERS; do
|
||||
if grep -q "^${folder}/" changed_files.txt; then
|
||||
echo "Detected changes in ${folder}/"
|
||||
if ! grep -q "^${folder}/CHANGELOG.md$" changed_files.txt; then
|
||||
echo "No changelog update found for ${folder}/"
|
||||
missing_changelogs="${missing_changelogs}- \`${folder}\`\n"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo "missing_changelogs<<EOF" >> $GITHUB_OUTPUT
|
||||
echo -e "${missing_changelogs}" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find existing changelog comment
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
id: find_comment
|
||||
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad #v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '<!-- changelog-check -->'
|
||||
|
||||
- name: Update PR comment with changelog status
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.find_comment.outputs.comment-id }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
<!-- changelog-check -->
|
||||
${{ steps.check_folders.outputs.missing_changelogs != '' && format('⚠️ **Changes detected in the following folders without a corresponding update to the `CHANGELOG.md`:**
|
||||
|
||||
{0}
|
||||
|
||||
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.', steps.check_folders.outputs.missing_changelogs) || '✅ All necessary `CHANGELOG.md` files have been updated. Great job! 🎉' }}
|
||||
|
||||
- name: Fail if changelog is missing
|
||||
if: steps.check_folders.outputs.missing_changelogs != ''
|
||||
run: |
|
||||
echo "ERROR: Missing changelog updates in some folders."
|
||||
exit 1
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -83,3 +83,6 @@ CLAUDE.md
|
||||
# MCP Server
|
||||
mcp_server/prowler_mcp_server/prowler_app/server.py
|
||||
mcp_server/prowler_mcp_server/prowler_app/utils/schema.yaml
|
||||
|
||||
# Compliance report
|
||||
*.pdf
|
||||
|
||||
10
Makefile
10
Makefile
@@ -46,6 +46,14 @@ help: ## Show this help.
|
||||
@echo "Prowler Makefile"
|
||||
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||
|
||||
##@ Build no cache
|
||||
build-no-cache-dev:
|
||||
docker compose -f docker-compose-dev.yml build --no-cache api-dev worker-dev worker-beat
|
||||
|
||||
##@ Development Environment
|
||||
run-api-dev: ## Start development environment with API, PostgreSQL, Valkey, and workers
|
||||
docker compose -f docker-compose-dev.yml up api-dev postgres valkey worker-dev worker-beat --build
|
||||
docker compose -f docker-compose-dev.yml up api-dev postgres valkey worker-dev worker-beat
|
||||
|
||||
##@ Development Environment
|
||||
build-and-run-api-dev: build-no-cache-dev run-api-dev
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
- API Key support [(#8805)](https://github.com/prowler-cloud/prowler/pull/8805)
|
||||
- SAML role mapping protection for single-admin tenants to prevent accidental lockout [(#8882)](https://github.com/prowler-cloud/prowler/pull/8882)
|
||||
- Support for `passed_findings` and `total_findings` fields in compliance requirement overview for accurate Prowler ThreatScore calculation [(#8582)](https://github.com/prowler-cloud/prowler/pull/8582)
|
||||
- PDF reporting for Prowler ThreatScore [(#8867)](https://github.com/prowler-cloud/prowler/pull/8867)
|
||||
- Database read replica support [(#8869)](https://github.com/prowler-cloud/prowler/pull/8869)
|
||||
- Support Common Cloud Controls for AWS, Azure and GCP [(#8000)](https://github.com/prowler-cloud/prowler/pull/8000)
|
||||
- Add `provider_id__in` filter support to findings and findings severity overview endpoints [(#8951)](https://github.com/prowler-cloud/prowler/pull/8951)
|
||||
|
||||
529
api/poetry.lock
generated
529
api/poetry.lock
generated
@@ -1256,6 +1256,98 @@ files = [
|
||||
{file = "contextlib2-21.6.0.tar.gz", hash = "sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "contourpy"
|
||||
version = "1.3.3"
|
||||
description = "Python library for calculating contours of 2D quadrilateral grids"
|
||||
optional = false
|
||||
python-versions = ">=3.11"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "contourpy-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:709a48ef9a690e1343202916450bc48b9e51c049b089c7f79a267b46cffcdaa1"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23416f38bfd74d5d28ab8429cc4d63fa67d5068bd711a85edb1c3fb0c3e2f381"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:929ddf8c4c7f348e4c0a5a3a714b5c8542ffaa8c22954862a46ca1813b667ee7"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9e999574eddae35f1312c2b4b717b7885d4edd6cb46700e04f7f02db454e67c1"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf67e0e3f482cb69779dd3061b534eb35ac9b17f163d851e2a547d56dba0a3a"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51e79c1f7470158e838808d4a996fa9bac72c498e93d8ebe5119bc1e6becb0db"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:598c3aaece21c503615fd59c92a3598b428b2f01bfb4b8ca9c4edeecc2438620"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:322ab1c99b008dad206d406bb61d014cf0174df491ae9d9d0fac6a6fda4f977f"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-win32.whl", hash = "sha256:fd907ae12cd483cd83e414b12941c632a969171bf90fc937d0c9f268a31cafff"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:3519428f6be58431c56581f1694ba8e50626f2dd550af225f82fb5f5814d2a42"},
|
||||
{file = "contourpy-1.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:15ff10bfada4bf92ec8b31c62bf7c1834c244019b4a33095a68000d7075df470"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b"},
|
||||
{file = "contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:177fb367556747a686509d6fef71d221a4b198a3905fe824430e5ea0fda54eb5"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d002b6f00d73d69333dac9d0b8d5e84d9724ff9ef044fd63c5986e62b7c9e1b1"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:348ac1f5d4f1d66d3322420f01d42e43122f43616e0f194fc1c9f5d830c5b286"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:655456777ff65c2c548b7c454af9c6f33f16c8884f11083244b5819cc214f1b5"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:644a6853d15b2512d67881586bd03f462c7ab755db95f16f14d7e238f2852c67"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4debd64f124ca62069f313a9cb86656ff087786016d76927ae2cf37846b006c9"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a15459b0f4615b00bbd1e91f1b9e19b7e63aea7483d03d804186f278c0af2659"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca0fdcd73925568ca027e0b17ab07aad764be4706d0a925b89227e447d9737b7"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-win32.whl", hash = "sha256:b20c7c9a3bf701366556e1b1984ed2d0cedf999903c51311417cf5f591d8c78d"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:1cadd8b8969f060ba45ed7c1b714fe69185812ab43bd6b86a9123fe8f99c3263"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:fd914713266421b7536de2bfa8181aa8c699432b6763a0ea64195ebe28bff6a9"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:88df9880d507169449d434c293467418b9f6cbe82edd19284aa0409e7fdb933d"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d06bb1f751ba5d417047db62bca3c8fde202b8c11fb50742ab3ab962c81e8216"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4e6b05a45525357e382909a4c1600444e2a45b4795163d3b22669285591c1ae"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab3074b48c4e2cf1a960e6bbeb7f04566bf36b1861d5c9d4d8ac04b82e38ba20"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c3d53c796f8647d6deb1abe867daeb66dcc8a97e8455efa729516b997b8ed99"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50ed930df7289ff2a8d7afeb9603f8289e5704755c7e5c3bbd929c90c817164b"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4feffb6537d64b84877da813a5c30f1422ea5739566abf0bd18065ac040e120a"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2b7e9480ffe2b0cd2e787e4df64270e3a0440d9db8dc823312e2c940c167df7e"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-win32.whl", hash = "sha256:283edd842a01e3dcd435b1c5116798d661378d83d36d337b8dde1d16a5fc9ba3"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:87acf5963fc2b34825e5b6b048f40e3635dd547f590b04d2ab317c2619ef7ae8"},
|
||||
{file = "contourpy-1.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:3c30273eb2a55024ff31ba7d052dde990d7d8e5450f4bbb6e913558b3d6c2301"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fde6c716d51c04b1c25d0b90364d0be954624a0ee9d60e23e850e8d48353d07a"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cbedb772ed74ff5be440fa8eee9bd49f64f6e3fc09436d9c7d8f1c287b121d77"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22e9b1bd7a9b1d652cd77388465dc358dafcd2e217d35552424aa4f996f524f5"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a22738912262aa3e254e4f3cb079a95a67132fc5a063890e224393596902f5a4"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:afe5a512f31ee6bd7d0dda52ec9864c984ca3d66664444f2d72e0dc4eb832e36"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f64836de09927cba6f79dcd00fdd7d5329f3fccc633468507079c829ca4db4e3"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1fd43c3be4c8e5fd6e4f2baeae35ae18176cf2e5cced681cca908addf1cdd53b"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6afc576f7b33cf00996e5c1102dc2a8f7cc89e39c0b55df93a0b78c1bd992b36"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-win32.whl", hash = "sha256:66c8a43a4f7b8df8b71ee1840e4211a3c8d93b214b213f590e18a1beca458f7d"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:cf9022ef053f2694e31d630feaacb21ea24224be1c3ad0520b13d844274614fd"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:95b181891b4c71de4bb404c6621e7e2390745f887f2a026b2d99e92c17892339"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:33c82d0138c0a062380332c861387650c82e4cf1747aaa6938b9b6516762e772"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ea37e7b45949df430fe649e5de8351c423430046a2af20b1c1961cae3afcda77"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d304906ecc71672e9c89e87c4675dc5c2645e1f4269a5063b99b0bb29f232d13"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca658cd1a680a5c9ea96dc61cdbae1e85c8f25849843aa799dfd3cb370ad4fbe"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ab2fd90904c503739a75b7c8c5c01160130ba67944a7b77bbf36ef8054576e7f"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7301b89040075c30e5768810bc96a8e8d78085b47d8be6e4c3f5a0b4ed478a0"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2a2a8b627d5cc6b7c41a4beff6c5ad5eb848c88255fda4a8745f7e901b32d8e4"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fd6ec6be509c787f1caf6b247f0b1ca598bef13f4ddeaa126b7658215529ba0f"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc"},
|
||||
{file = "contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b"},
|
||||
{file = "contourpy-1.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cd5dfcaeb10f7b7f9dc8941717c6c2ade08f587be2226222c12b25f0483ed497"},
|
||||
{file = "contourpy-1.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0c1fc238306b35f246d61a1d416a627348b5cf0648648a031e14bb8705fcdfe8"},
|
||||
{file = "contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f9aad7de812d6541d29d2bbf8feb22ff7e1c299523db288004e3157ff4674e"},
|
||||
{file = "contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed3657edf08512fc3fe81b510e35c2012fbd3081d2e26160f27ca28affec989"},
|
||||
{file = "contourpy-1.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3d1a3799d62d45c18bafd41c5fa05120b96a28079f2393af559b843d1a966a77"},
|
||||
{file = "contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
numpy = ">=1.25"
|
||||
|
||||
[package.extras]
|
||||
bokeh = ["bokeh", "selenium"]
|
||||
docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"]
|
||||
mypy = ["bokeh", "contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.17.0)", "types-Pillow"]
|
||||
test = ["Pillow", "contourpy[test-no-images]", "matplotlib"]
|
||||
test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.5.4"
|
||||
@@ -1390,6 +1482,22 @@ ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
name = "cycler"
|
||||
version = "0.12.1"
|
||||
description = "Composable style cycles"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"},
|
||||
{file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["ipython", "matplotlib", "numpydoc", "sphinx"]
|
||||
tests = ["pytest", "pytest-cov", "pytest-xdist"]
|
||||
|
||||
[[package]]
|
||||
name = "dash"
|
||||
version = "3.1.1"
|
||||
@@ -2120,6 +2228,87 @@ werkzeug = ">=3.1.0"
|
||||
async = ["asgiref (>=3.2)"]
|
||||
dotenv = ["python-dotenv"]
|
||||
|
||||
[[package]]
|
||||
name = "fonttools"
|
||||
version = "4.60.1"
|
||||
description = "Tools to manipulate font files"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "fonttools-4.60.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9a52f254ce051e196b8fe2af4634c2d2f02c981756c6464dc192f1b6050b4e28"},
|
||||
{file = "fonttools-4.60.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7420a2696a44650120cdd269a5d2e56a477e2bfa9d95e86229059beb1c19e15"},
|
||||
{file = "fonttools-4.60.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee0c0b3b35b34f782afc673d503167157094a16f442ace7c6c5e0ca80b08f50c"},
|
||||
{file = "fonttools-4.60.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:282dafa55f9659e8999110bd8ed422ebe1c8aecd0dc396550b038e6c9a08b8ea"},
|
||||
{file = "fonttools-4.60.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4ba4bd646e86de16160f0fb72e31c3b9b7d0721c3e5b26b9fa2fc931dfdb2652"},
|
||||
{file = "fonttools-4.60.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0b0835ed15dd5b40d726bb61c846a688f5b4ce2208ec68779bc81860adb5851a"},
|
||||
{file = "fonttools-4.60.1-cp310-cp310-win32.whl", hash = "sha256:1525796c3ffe27bb6268ed2a1bb0dcf214d561dfaf04728abf01489eb5339dce"},
|
||||
{file = "fonttools-4.60.1-cp310-cp310-win_amd64.whl", hash = "sha256:268ecda8ca6cb5c4f044b1fb9b3b376e8cd1b361cef275082429dc4174907038"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7b4c32e232a71f63a5d00259ca3d88345ce2a43295bb049d21061f338124246f"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3630e86c484263eaac71d117085d509cbcf7b18f677906824e4bace598fb70d2"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5c1015318e4fec75dd4943ad5f6a206d9727adf97410d58b7e32ab644a807914"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e6c58beb17380f7c2ea181ea11e7db8c0ceb474c9dd45f48e71e2cb577d146a1"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec3681a0cb34c255d76dd9d865a55f260164adb9fa02628415cdc2d43ee2c05d"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f4b5c37a5f40e4d733d3bbaaef082149bee5a5ea3156a785ff64d949bd1353fa"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-win32.whl", hash = "sha256:398447f3d8c0c786cbf1209711e79080a40761eb44b27cdafffb48f52bcec258"},
|
||||
{file = "fonttools-4.60.1-cp311-cp311-win_amd64.whl", hash = "sha256:d066ea419f719ed87bc2c99a4a4bfd77c2e5949cb724588b9dd58f3fd90b92bf"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:7b0c6d57ab00dae9529f3faf187f2254ea0aa1e04215cf2f1a8ec277c96661bc"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:839565cbf14645952d933853e8ade66a463684ed6ed6c9345d0faf1f0e868877"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8177ec9676ea6e1793c8a084a90b65a9f778771998eb919d05db6d4b1c0b114c"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:996a4d1834524adbb423385d5a629b868ef9d774670856c63c9a0408a3063401"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a46b2f450bc79e06ef3b6394f0c68660529ed51692606ad7f953fc2e448bc903"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6ec722ee589e89a89f5b7574f5c45604030aa6ae24cb2c751e2707193b466fed"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-win32.whl", hash = "sha256:b2cf105cee600d2de04ca3cfa1f74f1127f8455b71dbad02b9da6ec266e116d6"},
|
||||
{file = "fonttools-4.60.1-cp312-cp312-win_amd64.whl", hash = "sha256:992775c9fbe2cf794786fa0ffca7f09f564ba3499b8fe9f2f80bd7197db60383"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6f68576bb4bbf6060c7ab047b1574a1ebe5c50a17de62830079967b211059ebb"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:eedacb5c5d22b7097482fa834bda0dafa3d914a4e829ec83cdea2a01f8c813c4"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b33a7884fabd72bdf5f910d0cf46be50dce86a0362a65cfc746a4168c67eb96c"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2409d5fb7b55fd70f715e6d34e7a6e4f7511b8ad29a49d6df225ee76da76dd77"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c8651e0d4b3bdeda6602b85fdc2abbefc1b41e573ecb37b6779c4ca50753a199"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:145daa14bf24824b677b9357c5e44fd8895c2a8f53596e1b9ea3496081dc692c"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-win32.whl", hash = "sha256:2299df884c11162617a66b7c316957d74a18e3758c0274762d2cc87df7bc0272"},
|
||||
{file = "fonttools-4.60.1-cp313-cp313-win_amd64.whl", hash = "sha256:a3db56f153bd4c5c2b619ab02c5db5192e222150ce5a1bc10f16164714bc39ac"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:a884aef09d45ba1206712c7dbda5829562d3fea7726935d3289d343232ecb0d3"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8a44788d9d91df72d1a5eac49b31aeb887a5f4aab761b4cffc4196c74907ea85"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e852d9dda9f93ad3651ae1e3bb770eac544ec93c3807888798eccddf84596537"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:154cb6ee417e417bf5f7c42fe25858c9140c26f647c7347c06f0cc2d47eff003"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5664fd1a9ea7f244487ac8f10340c4e37664675e8667d6fee420766e0fb3cf08"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:583b7f8e3c49486e4d489ad1deacfb8d5be54a8ef34d6df824f6a171f8511d99"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-win32.whl", hash = "sha256:66929e2ea2810c6533a5184f938502cfdaea4bc3efb7130d8cc02e1c1b4108d6"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314-win_amd64.whl", hash = "sha256:f3d5be054c461d6a2268831f04091dc82753176f6ea06dc6047a5e168265a987"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:b6379e7546ba4ae4b18f8ae2b9bc5960936007a1c0e30b342f662577e8bc3299"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9d0ced62b59e0430b3690dbc5373df1c2aa7585e9a8ce38eff87f0fd993c5b01"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:875cb7764708b3132637f6c5fb385b16eeba0f7ac9fa45a69d35e09b47045801"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a184b2ea57b13680ab6d5fbde99ccef152c95c06746cb7718c583abd8f945ccc"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:026290e4ec76583881763fac284aca67365e0be9f13a7fb137257096114cb3bc"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f0e8817c7d1a0c2eedebf57ef9a9896f3ea23324769a9a2061a80fe8852705ed"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-win32.whl", hash = "sha256:1410155d0e764a4615774e5c2c6fc516259fe3eca5882f034eb9bfdbee056259"},
|
||||
{file = "fonttools-4.60.1-cp314-cp314t-win_amd64.whl", hash = "sha256:022beaea4b73a70295b688f817ddc24ed3e3418b5036ffcd5658141184ef0d0c"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:122e1a8ada290423c493491d002f622b1992b1ab0b488c68e31c413390dc7eb2"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a140761c4ff63d0cb9256ac752f230460ee225ccef4ad8f68affc723c88e2036"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0eae96373e4b7c9e45d099d7a523444e3554360927225c1cdae221a58a45b856"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:596ecaca36367027d525b3b426d8a8208169d09edcf8c7506aceb3a38bfb55c7"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2ee06fc57512144d8b0445194c2da9f190f61ad51e230f14836286470c99f854"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b42d86938e8dda1cd9a1a87a6d82f1818eaf933348429653559a458d027446da"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-win32.whl", hash = "sha256:8b4eb332f9501cb1cd3d4d099374a1e1306783ff95489a1026bde9eb02ccc34a"},
|
||||
{file = "fonttools-4.60.1-cp39-cp39-win_amd64.whl", hash = "sha256:7473a8ed9ed09aeaa191301244a5a9dbe46fe0bf54f9d6cd21d83044c3321217"},
|
||||
{file = "fonttools-4.60.1-py3-none-any.whl", hash = "sha256:906306ac7afe2156fcf0042173d6ebbb05416af70f6b370967b47f8f00103bbb"},
|
||||
{file = "fonttools-4.60.1.tar.gz", hash = "sha256:ef00af0439ebfee806b25f24c8f92109157ff3fac5731dc7867957812e87b8d9"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"]
|
||||
graphite = ["lz4 (>=1.7.4.2)"]
|
||||
interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""]
|
||||
lxml = ["lxml (>=4.0)"]
|
||||
pathops = ["skia-pathops (>=0.5.0)"]
|
||||
plot = ["matplotlib"]
|
||||
repacker = ["uharfbuzz (>=0.23.0)"]
|
||||
symfont = ["sympy"]
|
||||
type1 = ["xattr ; sys_platform == \"darwin\""]
|
||||
unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""]
|
||||
woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"]
|
||||
|
||||
[[package]]
|
||||
name = "freezegun"
|
||||
version = "1.5.1"
|
||||
@@ -2787,6 +2976,117 @@ files = [
|
||||
[package.dependencies]
|
||||
referencing = ">=0.31.0"
|
||||
|
||||
[[package]]
|
||||
name = "kiwisolver"
|
||||
version = "1.4.9"
|
||||
description = "A fast implementation of the Cassowary constraint solver"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b4b4d74bda2b8ebf4da5bd42af11d02d04428b2c32846e4c2c93219df8a7987b"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fb3b8132019ea572f4611d770991000d7f58127560c4889729248eb5852a102f"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84fd60810829c27ae375114cd379da1fa65e6918e1da405f356a775d49a62bcf"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b78efa4c6e804ecdf727e580dbb9cba85624d2e1c6b5cb059c66290063bd99a9"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4efec7bcf21671db6a3294ff301d2fc861c31faa3c8740d1a94689234d1b415"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:90f47e70293fc3688b71271100a1a5453aa9944a81d27ff779c108372cf5567b"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fdca1def57a2e88ef339de1737a1449d6dbf5fab184c54a1fca01d541317154"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cf554f21be770f5111a1690d42313e140355e687e05cf82cb23d0a721a64a48"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fc1795ac5cd0510207482c3d1d3ed781143383b8cfd36f5c645f3897ce066220"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:ccd09f20ccdbbd341b21a67ab50a119b64a403b09288c27481575105283c1586"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:540c7c72324d864406a009d72f5d6856f49693db95d1fbb46cf86febef873634"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-win_amd64.whl", hash = "sha256:ede8c6d533bc6601a47ad4046080d36b8fc99f81e6f1c17b0ac3c2dc91ac7611"},
|
||||
{file = "kiwisolver-1.4.9-cp310-cp310-win_arm64.whl", hash = "sha256:7b4da0d01ac866a57dd61ac258c5607b4cd677f63abaec7b148354d2b2cdd536"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eb14a5da6dc7642b0f3a18f13654847cd8b7a2550e2645a5bda677862b03ba16"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39a219e1c81ae3b103643d2aedb90f1ef22650deb266ff12a19e7773f3e5f089"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2405a7d98604b87f3fc28b1716783534b1b4b8510d8142adca34ee0bc3c87543"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dc1ae486f9abcef254b5618dfb4113dd49f94c68e3e027d03cf0143f3f772b61"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a1f570ce4d62d718dce3f179ee78dac3b545ac16c0c04bb363b7607a949c0d1"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb27e7b78d716c591e88e0a09a2139c6577865d7f2e152488c2cc6257f460872"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:15163165efc2f627eb9687ea5f3a28137217d217ac4024893d753f46bce9de26"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bdee92c56a71d2b24c33a7d4c2856bd6419d017e08caa7802d2963870e315028"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:412f287c55a6f54b0650bd9b6dce5aceddb95864a1a90c87af16979d37c89771"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2c93f00dcba2eea70af2be5f11a830a742fe6b579a1d4e00f47760ef13be247a"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f117e1a089d9411663a3207ba874f31be9ac8eaa5b533787024dc07aeb74f464"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-win_amd64.whl", hash = "sha256:be6a04e6c79819c9a8c2373317d19a96048e5a3f90bec587787e86a1153883c2"},
|
||||
{file = "kiwisolver-1.4.9-cp311-cp311-win_arm64.whl", hash = "sha256:0ae37737256ba2de764ddc12aed4956460277f00c4996d51a197e72f62f5eec7"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54"},
|
||||
{file = "kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5d0432ccf1c7ab14f9949eec60c5d1f924f17c037e9f8b33352fa05799359b8"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efb3a45b35622bb6c16dbfab491a8f5a391fe0e9d45ef32f4df85658232ca0e2"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a12cf6398e8a0a001a059747a1cbf24705e18fe413bc22de7b3d15c67cffe3f"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b67e6efbf68e077dd71d1a6b37e43e1a99d0bff1a3d51867d45ee8908b931098"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5656aa670507437af0207645273ccdfee4f14bacd7f7c67a4306d0dcaeaf6eed"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bfc08add558155345129c7803b3671cf195e6a56e7a12f3dde7c57d9b417f525"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:40092754720b174e6ccf9e845d0d8c7d8e12c3d71e7fc35f55f3813e96376f78"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:497d05f29a1300d14e02e6441cf0f5ee81c1ff5a304b0d9fb77423974684e08b"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdd1a81a1860476eb41ac4bc1e07b3f07259e6d55bbf739b79c8aaedcf512799"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e6b93f13371d341afee3be9f7c5964e3fe61d5fa30f6a30eb49856935dfe4fc3"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d75aa530ccfaa593da12834b86a0724f58bff12706659baa9227c2ccaa06264c"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-win_amd64.whl", hash = "sha256:dd0a578400839256df88c16abddf9ba14813ec5f21362e1fe65022e00c883d4d"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313-win_arm64.whl", hash = "sha256:d4188e73af84ca82468f09cadc5ac4db578109e52acb4518d8154698d3a87ca2"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:5a0f2724dfd4e3b3ac5a82436a8e6fd16baa7d507117e4279b660fe8ca38a3a1"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b11d6a633e4ed84fc0ddafd4ebfd8ea49b3f25082c04ad12b8315c11d504dc1"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61874cdb0a36016354853593cffc38e56fc9ca5aa97d2c05d3dcf6922cd55a11"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:60c439763a969a6af93b4881db0eed8fadf93ee98e18cbc35bc8da868d0c4f0c"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92a2f997387a1b79a75e7803aa7ded2cfbe2823852ccf1ba3bcf613b62ae3197"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31d512c812daea6d8b3be3b2bfcbeb091dbb09177706569bcfc6240dcf8b41c"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:52a15b0f35dad39862d376df10c5230155243a2c1a436e39eb55623ccbd68185"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a30fd6fdef1430fd9e1ba7b3398b5ee4e2887783917a687d86ba69985fb08748"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cc9617b46837c6468197b5945e196ee9ca43057bb7d9d1ae688101e4e1dddf64"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:0ab74e19f6a2b027ea4f845a78827969af45ce790e6cb3e1ebab71bdf9f215ff"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dba5ee5d3981160c28d5490f0d1b7ed730c22470ff7f6cc26cfcfaacb9896a07"},
|
||||
{file = "kiwisolver-1.4.9-cp313-cp313t-win_arm64.whl", hash = "sha256:0749fd8f4218ad2e851e11cc4dc05c7cbc0cbc4267bdfdb31782e65aace4ee9c"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9928fe1eb816d11ae170885a74d074f57af3a0d65777ca47e9aeb854a1fba386"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d0005b053977e7b43388ddec89fa567f43d4f6d5c2c0affe57de5ebf290dc552"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2635d352d67458b66fd0667c14cb1d4145e9560d503219034a18a87e971ce4f3"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:767c23ad1c58c9e827b649a9ab7809fd5fd9db266a9cf02b0e926ddc2c680d58"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72d0eb9fba308b8311685c2268cf7d0a0639a6cd027d8128659f72bdd8a024b4"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f68e4f3eeca8fb22cc3d731f9715a13b652795ef657a13df1ad0c7dc0e9731df"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d84cd4061ae292d8ac367b2c3fa3aad11cb8625a95d135fe93f286f914f3f5a6"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a60ea74330b91bd22a29638940d115df9dc00af5035a9a2a6ad9399ffb4ceca5"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ce6a3a4e106cf35c2d9c4fa17c05ce0b180db622736845d4315519397a77beaf"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:77937e5e2a38a7b48eef0585114fe7930346993a88060d0bf886086d2aa49ef5"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:24c175051354f4a28c5d6a31c93906dc653e2bf234e8a4bbfb964892078898ce"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-win_amd64.whl", hash = "sha256:0763515d4df10edf6d06a3c19734e2566368980d21ebec439f33f9eb936c07b7"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314-win_arm64.whl", hash = "sha256:0e4e2bf29574a6a7b7f6cb5fa69293b9f96c928949ac4a53ba3f525dffb87f9c"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d976bbb382b202f71c67f77b0ac11244021cfa3f7dfd9e562eefcea2df711548"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2489e4e5d7ef9a1c300a5e0196e43d9c739f066ef23270607d45aba368b91f2d"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e2ea9f7ab7fbf18fffb1b5434ce7c69a07582f7acc7717720f1d69f3e806f90c"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b34e51affded8faee0dfdb705416153819d8ea9250bbbf7ea1b249bdeb5f1122"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8aacd3d4b33b772542b2e01beb50187536967b514b00003bdda7589722d2a64"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7cf974dd4e35fa315563ac99d6287a1024e4dc2077b8a7d7cd3d2fb65d283134"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85bd218b5ecfbee8c8a82e121802dcb519a86044c9c3b2e4aef02fa05c6da370"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0856e241c2d3df4efef7c04a1e46b1936b6120c9bcf36dd216e3acd84bc4fb21"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9af39d6551f97d31a4deebeac6f45b156f9755ddc59c07b402c148f5dbb6482a"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:bb4ae2b57fc1d8cbd1cf7b1d9913803681ffa903e7488012be5b76dedf49297f"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:aedff62918805fb62d43a4aa2ecd4482c380dc76cd31bd7c8878588a61bd0369"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-win_amd64.whl", hash = "sha256:1fa333e8b2ce4d9660f2cda9c0e1b6bafcfb2457a9d259faa82289e73ec24891"},
|
||||
{file = "kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32"},
|
||||
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4d1d9e582ad4d63062d34077a9a1e9f3c34088a2ec5135b1f7190c07cf366527"},
|
||||
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:deed0c7258ceb4c44ad5ec7d9918f9f14fd05b2be86378d86cf50e63d1e7b771"},
|
||||
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a590506f303f512dff6b7f75fd2fd18e16943efee932008fe7140e5fa91d80e"},
|
||||
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e09c2279a4d01f099f52d5c4b3d9e208e91edcbd1a175c9662a8b16e000fece9"},
|
||||
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c9e7cdf45d594ee04d5be1b24dd9d49f3d1590959b2271fb30b5ca2b262c00fb"},
|
||||
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:720e05574713db64c356e86732c0f3c5252818d05f9df320f0ad8380641acea5"},
|
||||
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:17680d737d5335b552994a2008fab4c851bcd7de33094a82067ef3a576ff02fa"},
|
||||
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85b5352f94e490c028926ea567fc569c52ec79ce131dadb968d3853e809518c2"},
|
||||
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:464415881e4801295659462c49461a24fb107c140de781d55518c4b80cb6790f"},
|
||||
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:fb940820c63a9590d31d88b815e7a3aa5915cad3ce735ab45f0c730b39547de1"},
|
||||
{file = "kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kombu"
|
||||
version = "5.5.4"
|
||||
@@ -3137,6 +3437,85 @@ dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"]
|
||||
docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"]
|
||||
tests = ["pytest", "simplejson"]
|
||||
|
||||
[[package]]
|
||||
name = "matplotlib"
|
||||
version = "3.10.6"
|
||||
description = "Python plotting package"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "matplotlib-3.10.6-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:bc7316c306d97463a9866b89d5cc217824e799fa0de346c8f68f4f3d27c8693d"},
|
||||
{file = "matplotlib-3.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d00932b0d160ef03f59f9c0e16d1e3ac89646f7785165ce6ad40c842db16cc2e"},
|
||||
{file = "matplotlib-3.10.6-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8fa4c43d6bfdbfec09c733bca8667de11bfa4970e8324c471f3a3632a0301c15"},
|
||||
{file = "matplotlib-3.10.6-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ea117a9c1627acaa04dbf36265691921b999cbf515a015298e54e1a12c3af837"},
|
||||
{file = "matplotlib-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:08fc803293b4e1694ee325896030de97f74c141ccff0be886bb5915269247676"},
|
||||
{file = "matplotlib-3.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:2adf92d9b7527fbfb8818e050260f0ebaa460f79d61546374ce73506c9421d09"},
|
||||
{file = "matplotlib-3.10.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:905b60d1cb0ee604ce65b297b61cf8be9f4e6cfecf95a3fe1c388b5266bc8f4f"},
|
||||
{file = "matplotlib-3.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bac38d816637343e53d7185d0c66677ff30ffb131044a81898b5792c956ba76"},
|
||||
{file = "matplotlib-3.10.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:942a8de2b5bfff1de31d95722f702e2966b8a7e31f4e68f7cd963c7cd8861cf6"},
|
||||
{file = "matplotlib-3.10.6-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3276c85370bc0dfca051ec65c5817d1e0f8f5ce1b7787528ec8ed2d524bbc2f"},
|
||||
{file = "matplotlib-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9df5851b219225731f564e4b9e7f2ac1e13c9e6481f941b5631a0f8e2d9387ce"},
|
||||
{file = "matplotlib-3.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:abb5d9478625dd9c9eb51a06d39aae71eda749ae9b3138afb23eb38824026c7e"},
|
||||
{file = "matplotlib-3.10.6-cp311-cp311-win_arm64.whl", hash = "sha256:886f989ccfae63659183173bb3fced7fd65e9eb793c3cc21c273add368536951"},
|
||||
{file = "matplotlib-3.10.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:31ca662df6a80bd426f871105fdd69db7543e28e73a9f2afe80de7e531eb2347"},
|
||||
{file = "matplotlib-3.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1678bb61d897bb4ac4757b5ecfb02bfb3fddf7f808000fb81e09c510712fda75"},
|
||||
{file = "matplotlib-3.10.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:56cd2d20842f58c03d2d6e6c1f1cf5548ad6f66b91e1e48f814e4fb5abd1cb95"},
|
||||
{file = "matplotlib-3.10.6-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:662df55604a2f9a45435566d6e2660e41efe83cd94f4288dfbf1e6d1eae4b0bb"},
|
||||
{file = "matplotlib-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:08f141d55148cd1fc870c3387d70ca4df16dee10e909b3b038782bd4bda6ea07"},
|
||||
{file = "matplotlib-3.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:590f5925c2d650b5c9d813c5b3b5fc53f2929c3f8ef463e4ecfa7e052044fb2b"},
|
||||
{file = "matplotlib-3.10.6-cp312-cp312-win_arm64.whl", hash = "sha256:f44c8d264a71609c79a78d50349e724f5d5fc3684ead7c2a473665ee63d868aa"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:819e409653c1106c8deaf62e6de6b8611449c2cd9939acb0d7d4e57a3d95cc7a"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:59c8ac8382fefb9cb71308dde16a7c487432f5255d8f1fd32473523abecfecdf"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:84e82d9e0fd70c70bc55739defbd8055c54300750cbacf4740c9673a24d6933a"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25f7a3eb42d6c1c56e89eacd495661fc815ffc08d9da750bca766771c0fd9110"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f9c862d91ec0b7842920a4cfdaaec29662195301914ea54c33e01f1a28d014b2"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313-win_amd64.whl", hash = "sha256:1b53bd6337eba483e2e7d29c5ab10eee644bc3a2491ec67cc55f7b44583ffb18"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313-win_arm64.whl", hash = "sha256:cbd5eb50b7058b2892ce45c2f4e92557f395c9991f5c886d1bb74a1582e70fd6"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:acc86dd6e0e695c095001a7fccff158c49e45e0758fdf5dcdbb0103318b59c9f"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e228cd2ffb8f88b7d0b29e37f68ca9aaf83e33821f24a5ccc4f082dd8396bc27"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:658bc91894adeab669cf4bb4a186d049948262987e80f0857216387d7435d833"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8913b7474f6dd83ac444c9459c91f7f0f2859e839f41d642691b104e0af056aa"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:091cea22e059b89f6d7d1a18e2c33a7376c26eee60e401d92a4d6726c4e12706"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313t-win_amd64.whl", hash = "sha256:491e25e02a23d7207629d942c666924a6b61e007a48177fdd231a0097b7f507e"},
|
||||
{file = "matplotlib-3.10.6-cp313-cp313t-win_arm64.whl", hash = "sha256:3d80d60d4e54cda462e2cd9a086d85cd9f20943ead92f575ce86885a43a565d5"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:70aaf890ce1d0efd482df969b28a5b30ea0b891224bb315810a3940f67182899"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1565aae810ab79cb72e402b22facfa6501365e73ebab70a0fdfb98488d2c3c0c"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3b23315a01981689aa4e1a179dbf6ef9fbd17143c3eea77548c2ecfb0499438"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:30fdd37edf41a4e6785f9b37969de57aea770696cb637d9946eb37470c94a453"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bc31e693da1c08012c764b053e702c1855378e04102238e6a5ee6a7117c53a47"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314-win_amd64.whl", hash = "sha256:05be9bdaa8b242bc6ff96330d18c52f1fc59c6fb3a4dd411d953d67e7e1baf98"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314-win_arm64.whl", hash = "sha256:f56a0d1ab05d34c628592435781d185cd99630bdfd76822cd686fb5a0aecd43a"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:94f0b4cacb23763b64b5dace50d5b7bfe98710fed5f0cef5c08135a03399d98b"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cc332891306b9fb39462673d8225d1b824c89783fee82840a709f96714f17a5c"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee1d607b3fb1590deb04b69f02ea1d53ed0b0bf75b2b1a5745f269afcbd3cdd3"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:376a624a218116461696b27b2bbf7a8945053e6d799f6502fc03226d077807bf"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:83847b47f6524c34b4f2d3ce726bb0541c48c8e7692729865c3df75bfa0f495a"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c7e0518e0d223683532a07f4b512e2e0729b62674f1b3a1a69869f98e6b1c7e3"},
|
||||
{file = "matplotlib-3.10.6-cp314-cp314t-win_arm64.whl", hash = "sha256:4dd83e029f5b4801eeb87c64efd80e732452781c16a9cf7415b7b63ec8f374d7"},
|
||||
{file = "matplotlib-3.10.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:13fcd07ccf17e354398358e0307a1f53f5325dca22982556ddb9c52837b5af41"},
|
||||
{file = "matplotlib-3.10.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:470fc846d59d1406e34fa4c32ba371039cd12c2fe86801159a965956f2575bd1"},
|
||||
{file = "matplotlib-3.10.6-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f7173f8551b88f4ef810a94adae3128c2530e0d07529f7141be7f8d8c365f051"},
|
||||
{file = "matplotlib-3.10.6-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f2d684c3204fa62421bbf770ddfebc6b50130f9cad65531eeba19236d73bb488"},
|
||||
{file = "matplotlib-3.10.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:6f4a69196e663a41d12a728fab8751177215357906436804217d6d9cf0d4d6cf"},
|
||||
{file = "matplotlib-3.10.6-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d6ca6ef03dfd269f4ead566ec6f3fb9becf8dab146fb999022ed85ee9f6b3eb"},
|
||||
{file = "matplotlib-3.10.6.tar.gz", hash = "sha256:ec01b645840dd1996df21ee37f208cd8ba57644779fa20464010638013d3203c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
contourpy = ">=1.0.1"
|
||||
cycler = ">=0.10"
|
||||
fonttools = ">=4.22.0"
|
||||
kiwisolver = ">=1.3.1"
|
||||
numpy = ">=1.23"
|
||||
packaging = ">=20.0"
|
||||
pillow = ">=8"
|
||||
pyparsing = ">=2.3.1"
|
||||
python-dateutil = ">=2.7"
|
||||
|
||||
[package.extras]
|
||||
dev = ["meson-python (>=0.13.1,<0.17.0)", "pybind11 (>=2.13.2,!=2.13.3)", "setuptools (>=64)", "setuptools_scm (>=7)"]
|
||||
|
||||
[[package]]
|
||||
name = "mccabe"
|
||||
version = "0.7.0"
|
||||
@@ -3857,6 +4236,131 @@ files = [
|
||||
[package.dependencies]
|
||||
setuptools = "*"
|
||||
|
||||
[[package]]
|
||||
name = "pillow"
|
||||
version = "11.3.0"
|
||||
description = "Python Imaging Library (Fork)"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f"},
|
||||
{file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac"},
|
||||
{file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d"},
|
||||
{file = "pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51"},
|
||||
{file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31"},
|
||||
{file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635"},
|
||||
{file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db"},
|
||||
{file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:97afb3a00b65cc0804d1c7abddbf090a81eaac02768af58cbdcaaa0a931e0b6d"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-win32.whl", hash = "sha256:ea944117a7974ae78059fcc1800e5d3295172bb97035c0c1d9345fca1419da71"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:e5c5858ad8ec655450a7c7df532e9842cf8df7cc349df7225c60d5d348c8aada"},
|
||||
{file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"},
|
||||
{file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"},
|
||||
{file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"},
|
||||
{file = "pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"]
|
||||
fpx = ["olefile"]
|
||||
mic = ["olefile"]
|
||||
test-arrow = ["pyarrow"]
|
||||
tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "trove-classifiers (>=2024.10.12)"]
|
||||
typing = ["typing-extensions ; python_version < \"3.10\""]
|
||||
xmp = ["defusedxml"]
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "4.3.8"
|
||||
@@ -5016,6 +5520,29 @@ attrs = ">=22.2.0"
|
||||
rpds-py = ">=0.7.0"
|
||||
typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""}
|
||||
|
||||
[[package]]
|
||||
name = "reportlab"
|
||||
version = "4.4.4"
|
||||
description = "The Reportlab Toolkit"
|
||||
optional = false
|
||||
python-versions = "<4,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "reportlab-4.4.4-py3-none-any.whl", hash = "sha256:299b3b0534e7202bb94ed2ddcd7179b818dcda7de9d8518a57c85a58a1ebaadb"},
|
||||
{file = "reportlab-4.4.4.tar.gz", hash = "sha256:cb2f658b7f4a15be2cc68f7203aa67faef67213edd4f2d4bdd3eb20dab75a80d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
charset-normalizer = "*"
|
||||
pillow = ">=9.0.0"
|
||||
|
||||
[package.extras]
|
||||
accel = ["rl_accel (>=0.9.0,<1.1)"]
|
||||
bidi = ["rlbidi"]
|
||||
pycairo = ["freetype-py (>=2.3.0,<2.4)", "rlPyCairo (>=0.2.0,<1)"]
|
||||
renderpm = ["rl_renderPM (>=4.0.3,<4.1)"]
|
||||
shaping = ["uharfbuzz"]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.5"
|
||||
@@ -6259,4 +6786,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.11,<3.13"
|
||||
content-hash = "03442fd4673006c5a74374f90f53621fd1c9d117279fe6cc0355ef833eb7f9bb"
|
||||
content-hash = "3c9164d668d37d6373eb5200bbe768232ead934d9312b9c68046b1df922789f3"
|
||||
|
||||
@@ -33,7 +33,9 @@ dependencies = [
|
||||
"xmlsec==1.3.14",
|
||||
"h2 (==4.3.0)",
|
||||
"markdown (>=3.9,<4.0)",
|
||||
"drf-simple-apikey (==2.2.1)"
|
||||
"drf-simple-apikey (==2.2.1)",
|
||||
"matplotlib (>=3.10.6,<4.0.0)",
|
||||
"reportlab (>=4.4.4,<5.0.0)"
|
||||
]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
|
||||
@@ -1593,6 +1593,25 @@ class ProviderViewSet(BaseRLSViewSet):
|
||||
},
|
||||
request=None,
|
||||
),
|
||||
threatscore=extend_schema(
|
||||
tags=["Scan"],
|
||||
summary="Retrieve threatscore report",
|
||||
description="Download a specific threatscore report (e.g., 'prowler_threatscore_aws') as a PDF file.",
|
||||
request=None,
|
||||
responses={
|
||||
200: OpenApiResponse(
|
||||
description="PDF file containing the threatscore report"
|
||||
),
|
||||
202: OpenApiResponse(description="The task is in progress"),
|
||||
401: OpenApiResponse(
|
||||
description="API key missing or user not Authenticated"
|
||||
),
|
||||
403: OpenApiResponse(description="There is a problem with credentials"),
|
||||
404: OpenApiResponse(
|
||||
description="The scan has no threatscore reports, or the threatscore report generation task has not started yet"
|
||||
),
|
||||
},
|
||||
),
|
||||
)
|
||||
@method_decorator(CACHE_DECORATOR, name="list")
|
||||
@method_decorator(CACHE_DECORATOR, name="retrieve")
|
||||
@@ -1649,6 +1668,9 @@ class ScanViewSet(BaseRLSViewSet):
|
||||
if hasattr(self, "response_serializer_class"):
|
||||
return self.response_serializer_class
|
||||
return ScanComplianceReportSerializer
|
||||
elif self.action == "threatscore":
|
||||
if hasattr(self, "response_serializer_class"):
|
||||
return self.response_serializer_class
|
||||
return super().get_serializer_class()
|
||||
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
@@ -1880,6 +1902,45 @@ class ScanViewSet(BaseRLSViewSet):
|
||||
content, filename = loader
|
||||
return self._serve_file(content, filename, "text/csv")
|
||||
|
||||
@action(
|
||||
detail=True,
|
||||
methods=["get"],
|
||||
url_name="threatscore",
|
||||
)
|
||||
def threatscore(self, request, pk=None):
|
||||
scan = self.get_object()
|
||||
running_resp = self._get_task_status(scan)
|
||||
if running_resp:
|
||||
return running_resp
|
||||
|
||||
if not scan.output_location:
|
||||
return Response(
|
||||
{
|
||||
"detail": "The scan has no reports, or the threatscore report generation task has not started yet."
|
||||
},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
if scan.output_location.startswith("s3://"):
|
||||
bucket = env.str("DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET", "")
|
||||
key_prefix = scan.output_location.removeprefix(f"s3://{bucket}/")
|
||||
prefix = os.path.join(
|
||||
os.path.dirname(key_prefix),
|
||||
"threatscore",
|
||||
"*_threatscore_report.pdf",
|
||||
)
|
||||
loader = self._load_file(prefix, s3=True, bucket=bucket, list_objects=True)
|
||||
else:
|
||||
base = os.path.dirname(scan.output_location)
|
||||
pattern = os.path.join(base, "threatscore", "*_threatscore_report.pdf")
|
||||
loader = self._load_file(pattern, s3=False)
|
||||
|
||||
if isinstance(loader, Response):
|
||||
return loader
|
||||
|
||||
content, filename = loader
|
||||
return self._serve_file(content, filename, "application/pdf")
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
input_serializer = self.get_serializer(data=request.data)
|
||||
input_serializer.is_valid(raise_exception=True)
|
||||
|
||||
BIN
api/src/backend/tasks/assets/fonts/FiraCode-Regular.ttf
Normal file
BIN
api/src/backend/tasks/assets/fonts/FiraCode-Regular.ttf
Normal file
Binary file not shown.
BIN
api/src/backend/tasks/assets/fonts/PlusJakartaSans-Regular.ttf
Normal file
BIN
api/src/backend/tasks/assets/fonts/PlusJakartaSans-Regular.ttf
Normal file
Binary file not shown.
BIN
api/src/backend/tasks/assets/img/prowler_logo.png
Normal file
BIN
api/src/backend/tasks/assets/img/prowler_logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 24 KiB |
@@ -229,7 +229,7 @@ def _upload_to_s3(tenant_id: str, zip_path: str, scan_id: str) -> str | None:
|
||||
|
||||
def _generate_output_directory(
|
||||
output_directory, prowler_provider: object, tenant_id: str, scan_id: str
|
||||
) -> tuple[str, str]:
|
||||
) -> tuple[str, str, str]:
|
||||
"""
|
||||
Generate a file system path for the output directory of a prowler scan.
|
||||
|
||||
@@ -256,6 +256,7 @@ def _generate_output_directory(
|
||||
>>> _generate_output_directory("/tmp", "aws", "tenant-1234", "scan-5678")
|
||||
'/tmp/tenant-1234/aws/scan-5678/prowler-output-2023-02-15T12:34:56',
|
||||
'/tmp/tenant-1234/aws/scan-5678/compliance/prowler-output-2023-02-15T12:34:56'
|
||||
'/tmp/tenant-1234/aws/scan-5678/threatscore/prowler-output-2023-02-15T12:34:56'
|
||||
"""
|
||||
# Sanitize the prowler provider name to ensure it is a valid directory name
|
||||
prowler_provider_sanitized = re.sub(r"[^\w\-]", "-", prowler_provider)
|
||||
@@ -276,4 +277,10 @@ def _generate_output_directory(
|
||||
)
|
||||
os.makedirs("/".join(compliance_path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
return path, compliance_path
|
||||
threatscore_path = (
|
||||
f"{output_directory}/{tenant_id}/{scan_id}/threatscore/prowler-output-"
|
||||
f"{prowler_provider_sanitized}-{timestamp}"
|
||||
)
|
||||
os.makedirs("/".join(threatscore_path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
return path, compliance_path, threatscore_path
|
||||
|
||||
1332
api/src/backend/tasks/jobs/report.py
Normal file
1332
api/src/backend/tasks/jobs/report.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -26,6 +26,7 @@ from tasks.jobs.integrations import (
|
||||
upload_s3_integration,
|
||||
upload_security_hub_integration,
|
||||
)
|
||||
from tasks.jobs.report import generate_threatscore_report_job
|
||||
from tasks.jobs.scan import (
|
||||
aggregate_findings,
|
||||
create_compliance_requirements,
|
||||
@@ -64,10 +65,15 @@ def _perform_scan_complete_tasks(tenant_id: str, scan_id: str, provider_id: str)
|
||||
generate_outputs_task.si(
|
||||
scan_id=scan_id, provider_id=provider_id, tenant_id=tenant_id
|
||||
),
|
||||
check_integrations_task.si(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
scan_id=scan_id,
|
||||
group(
|
||||
generate_threatscore_report_task.si(
|
||||
tenant_id=tenant_id, scan_id=scan_id, provider_id=provider_id
|
||||
),
|
||||
check_integrations_task.si(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
scan_id=scan_id,
|
||||
),
|
||||
),
|
||||
).apply_async()
|
||||
|
||||
@@ -304,7 +310,7 @@ def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
|
||||
frameworks_bulk = Compliance.get_bulk(provider_type)
|
||||
frameworks_avail = get_compliance_frameworks(provider_type)
|
||||
out_dir, comp_dir = _generate_output_directory(
|
||||
out_dir, comp_dir, _ = _generate_output_directory(
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY, provider_uid, tenant_id, scan_id
|
||||
)
|
||||
|
||||
@@ -617,3 +623,21 @@ def jira_integration_task(
|
||||
return send_findings_to_jira(
|
||||
tenant_id, integration_id, project_key, issue_type, finding_ids
|
||||
)
|
||||
|
||||
|
||||
@shared_task(
|
||||
base=RLSTask,
|
||||
name="scan-threatscore-report",
|
||||
queue="scan-reports",
|
||||
)
|
||||
def generate_threatscore_report_task(tenant_id: str, scan_id: str, provider_id: str):
|
||||
"""
|
||||
Task to generate a threatscore report for a given scan.
|
||||
Args:
|
||||
tenant_id (str): The tenant identifier.
|
||||
scan_id (str): The scan identifier.
|
||||
provider_id (str): The provider identifier.
|
||||
"""
|
||||
return generate_threatscore_report_job(
|
||||
tenant_id=tenant_id, scan_id=scan_id, provider_id=provider_id
|
||||
)
|
||||
|
||||
@@ -150,15 +150,17 @@ class TestOutputs:
|
||||
provider = "aws"
|
||||
expected_timestamp = "20230615103045"
|
||||
|
||||
path, compliance = _generate_output_directory(
|
||||
path, compliance, threatscore = _generate_output_directory(
|
||||
base_dir, provider, tenant_id, scan_id
|
||||
)
|
||||
|
||||
assert os.path.isdir(os.path.dirname(path))
|
||||
assert os.path.isdir(os.path.dirname(compliance))
|
||||
assert os.path.isdir(os.path.dirname(threatscore))
|
||||
|
||||
assert path.endswith(f"{provider}-{expected_timestamp}")
|
||||
assert compliance.endswith(f"{provider}-{expected_timestamp}")
|
||||
assert threatscore.endswith(f"{provider}-{expected_timestamp}")
|
||||
|
||||
@patch("tasks.jobs.export.rls_transaction")
|
||||
@patch("tasks.jobs.export.Scan")
|
||||
@@ -181,12 +183,14 @@ class TestOutputs:
|
||||
provider = "aws/test@check"
|
||||
expected_timestamp = "20230615103045"
|
||||
|
||||
path, compliance = _generate_output_directory(
|
||||
path, compliance, threatscore = _generate_output_directory(
|
||||
base_dir, provider, tenant_id, scan_id
|
||||
)
|
||||
|
||||
assert os.path.isdir(os.path.dirname(path))
|
||||
assert os.path.isdir(os.path.dirname(compliance))
|
||||
assert os.path.isdir(os.path.dirname(threatscore))
|
||||
|
||||
assert path.endswith(f"aws-test-check-{expected_timestamp}")
|
||||
assert compliance.endswith(f"aws-test-check-{expected_timestamp}")
|
||||
assert threatscore.endswith(f"aws-test-check-{expected_timestamp}")
|
||||
|
||||
957
api/src/backend/tasks/tests/test_report.py
Normal file
957
api/src/backend/tasks/tests/test_report.py
Normal file
@@ -0,0 +1,957 @@
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import matplotlib
|
||||
import pytest
|
||||
from tasks.jobs.report import (
|
||||
_aggregate_requirement_statistics_from_database,
|
||||
_calculate_requirements_data_from_statistics,
|
||||
_load_findings_for_requirement_checks,
|
||||
generate_threatscore_report,
|
||||
generate_threatscore_report_job,
|
||||
)
|
||||
from tasks.tasks import generate_threatscore_report_task
|
||||
|
||||
from api.models import Finding, StatusChoices
|
||||
from prowler.lib.check.models import Severity
|
||||
|
||||
matplotlib.use("Agg") # Use non-interactive backend for tests
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestGenerateThreatscoreReport:
|
||||
def setup_method(self):
|
||||
self.scan_id = str(uuid.uuid4())
|
||||
self.provider_id = str(uuid.uuid4())
|
||||
self.tenant_id = str(uuid.uuid4())
|
||||
|
||||
def test_no_findings_returns_early(self):
|
||||
with patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = False
|
||||
|
||||
result = generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": False}
|
||||
mock_filter.assert_called_once_with(scan_id=self.scan_id)
|
||||
|
||||
@patch("tasks.jobs.report.rmtree")
|
||||
@patch("tasks.jobs.report._upload_to_s3")
|
||||
@patch("tasks.jobs.report.generate_threatscore_report")
|
||||
@patch("tasks.jobs.report._generate_output_directory")
|
||||
@patch("tasks.jobs.report.Provider.objects.get")
|
||||
@patch("tasks.jobs.report.ScanSummary.objects.filter")
|
||||
def test_generate_threatscore_report_happy_path(
|
||||
self,
|
||||
mock_scan_summary_filter,
|
||||
mock_provider_get,
|
||||
mock_generate_output_directory,
|
||||
mock_generate_report,
|
||||
mock_upload,
|
||||
mock_rmtree,
|
||||
):
|
||||
mock_scan_summary_filter.return_value.exists.return_value = True
|
||||
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.uid = "provider-uid"
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
mock_generate_output_directory.return_value = (
|
||||
"/tmp/output",
|
||||
"/tmp/compressed",
|
||||
"/tmp/threatscore_path",
|
||||
)
|
||||
|
||||
mock_upload.return_value = "s3://bucket/threatscore_report.pdf"
|
||||
|
||||
result = generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": True}
|
||||
mock_generate_report.assert_called_once_with(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
compliance_id="prowler_threatscore_aws",
|
||||
output_path="/tmp/threatscore_path_threatscore_report.pdf",
|
||||
provider_id=self.provider_id,
|
||||
only_failed=True,
|
||||
min_risk_level=4,
|
||||
)
|
||||
mock_rmtree.assert_called_once_with(
|
||||
Path("/tmp/threatscore_path_threatscore_report.pdf").parent,
|
||||
ignore_errors=True,
|
||||
)
|
||||
|
||||
def test_generate_threatscore_report_fails_upload(self):
|
||||
with (
|
||||
patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.jobs.report.Provider.objects.get") as mock_provider_get,
|
||||
patch("tasks.jobs.report._generate_output_directory") as mock_gen_dir,
|
||||
patch("tasks.jobs.report.generate_threatscore_report"),
|
||||
patch("tasks.jobs.report._upload_to_s3", return_value=None),
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
|
||||
# Mock provider
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.uid = "aws-provider-uid"
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
mock_gen_dir.return_value = (
|
||||
"/tmp/output",
|
||||
"/tmp/compressed",
|
||||
"/tmp/threatscore_path",
|
||||
)
|
||||
|
||||
result = generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": False}
|
||||
|
||||
def test_generate_threatscore_report_logs_rmtree_exception(self, caplog):
|
||||
with (
|
||||
patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.jobs.report.Provider.objects.get") as mock_provider_get,
|
||||
patch("tasks.jobs.report._generate_output_directory") as mock_gen_dir,
|
||||
patch("tasks.jobs.report.generate_threatscore_report"),
|
||||
patch(
|
||||
"tasks.jobs.report._upload_to_s3", return_value="s3://bucket/report.pdf"
|
||||
),
|
||||
patch(
|
||||
"tasks.jobs.report.rmtree", side_effect=Exception("Test deletion error")
|
||||
),
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
|
||||
# Mock provider
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.uid = "aws-provider-uid"
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
mock_gen_dir.return_value = (
|
||||
"/tmp/output",
|
||||
"/tmp/compressed",
|
||||
"/tmp/threatscore_path",
|
||||
)
|
||||
|
||||
with caplog.at_level("ERROR"):
|
||||
generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
assert "Error deleting output files" in caplog.text
|
||||
|
||||
def test_generate_threatscore_report_azure_provider(self):
|
||||
with (
|
||||
patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.jobs.report.Provider.objects.get") as mock_provider_get,
|
||||
patch("tasks.jobs.report._generate_output_directory") as mock_gen_dir,
|
||||
patch("tasks.jobs.report.generate_threatscore_report") as mock_generate,
|
||||
patch(
|
||||
"tasks.jobs.report._upload_to_s3", return_value="s3://bucket/report.pdf"
|
||||
),
|
||||
patch("tasks.jobs.report.rmtree"),
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.uid = "azure-provider-uid"
|
||||
mock_provider.provider = "azure"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
mock_gen_dir.return_value = (
|
||||
"/tmp/output",
|
||||
"/tmp/compressed",
|
||||
"/tmp/threatscore_path",
|
||||
)
|
||||
|
||||
generate_threatscore_report_job(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
mock_generate.assert_called_once_with(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
compliance_id="prowler_threatscore_azure",
|
||||
output_path="/tmp/threatscore_path_threatscore_report.pdf",
|
||||
provider_id=self.provider_id,
|
||||
only_failed=True,
|
||||
min_risk_level=4,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestAggregateRequirementStatistics:
|
||||
"""Test suite for _aggregate_requirement_statistics_from_database function."""
|
||||
|
||||
def test_aggregates_findings_correctly(self, tenants_fixture, scans_fixture):
|
||||
"""Verify correct pass/total counts per check are aggregated from database."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
# Create findings with different check_ids and statuses
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-1",
|
||||
check_id="check_1",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.high,
|
||||
impact=Severity.high,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-2",
|
||||
check_id="check_1",
|
||||
status=StatusChoices.FAIL,
|
||||
severity=Severity.high,
|
||||
impact=Severity.high,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-3",
|
||||
check_id="check_2",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
result = _aggregate_requirement_statistics_from_database(
|
||||
str(tenant.id), str(scan.id)
|
||||
)
|
||||
|
||||
assert result == {
|
||||
"check_1": {"passed": 1, "total": 2},
|
||||
"check_2": {"passed": 1, "total": 1},
|
||||
}
|
||||
|
||||
def test_handles_empty_scan(self, tenants_fixture, scans_fixture):
|
||||
"""Return empty dict when no findings exist for the scan."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
result = _aggregate_requirement_statistics_from_database(
|
||||
str(tenant.id), str(scan.id)
|
||||
)
|
||||
|
||||
assert result == {}
|
||||
|
||||
def test_multiple_findings_same_check(self, tenants_fixture, scans_fixture):
|
||||
"""Aggregate multiple findings for same check_id correctly."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
# Create 5 findings for same check, 3 passed
|
||||
for i in range(3):
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid=f"finding-pass-{i}",
|
||||
check_id="check_same",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
for i in range(2):
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid=f"finding-fail-{i}",
|
||||
check_id="check_same",
|
||||
status=StatusChoices.FAIL,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
result = _aggregate_requirement_statistics_from_database(
|
||||
str(tenant.id), str(scan.id)
|
||||
)
|
||||
|
||||
assert result == {"check_same": {"passed": 3, "total": 5}}
|
||||
|
||||
def test_only_failed_findings(self, tenants_fixture, scans_fixture):
|
||||
"""Correctly count when all findings are FAIL status."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-fail-1",
|
||||
check_id="check_fail",
|
||||
status=StatusChoices.FAIL,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-fail-2",
|
||||
check_id="check_fail",
|
||||
status=StatusChoices.FAIL,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
result = _aggregate_requirement_statistics_from_database(
|
||||
str(tenant.id), str(scan.id)
|
||||
)
|
||||
|
||||
assert result == {"check_fail": {"passed": 0, "total": 2}}
|
||||
|
||||
def test_mixed_statuses(self, tenants_fixture, scans_fixture):
|
||||
"""Test with PASS, FAIL, and MANUAL statuses mixed."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-pass",
|
||||
check_id="check_mixed",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-fail",
|
||||
check_id="check_mixed",
|
||||
status=StatusChoices.FAIL,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-manual",
|
||||
check_id="check_mixed",
|
||||
status=StatusChoices.MANUAL,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
result = _aggregate_requirement_statistics_from_database(
|
||||
str(tenant.id), str(scan.id)
|
||||
)
|
||||
|
||||
# Only PASS status is counted as passed
|
||||
assert result == {"check_mixed": {"passed": 1, "total": 3}}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestLoadFindingsForChecks:
|
||||
"""Test suite for _load_findings_for_requirement_checks function."""
|
||||
|
||||
def test_loads_only_requested_checks(
|
||||
self, tenants_fixture, scans_fixture, providers_fixture
|
||||
):
|
||||
"""Verify only findings for specified check_ids are loaded."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
providers_fixture[0]
|
||||
|
||||
# Create findings with different check_ids
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-1",
|
||||
check_id="check_requested",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-2",
|
||||
check_id="check_not_requested",
|
||||
status=StatusChoices.FAIL,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
mock_provider = MagicMock()
|
||||
|
||||
with patch(
|
||||
"tasks.jobs.report.FindingOutput.transform_api_finding"
|
||||
) as mock_transform:
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.check_id = "check_requested"
|
||||
mock_transform.return_value = mock_finding_output
|
||||
|
||||
result = _load_findings_for_requirement_checks(
|
||||
str(tenant.id), str(scan.id), ["check_requested"], mock_provider
|
||||
)
|
||||
|
||||
# Only one finding should be loaded
|
||||
assert "check_requested" in result
|
||||
assert "check_not_requested" not in result
|
||||
assert len(result["check_requested"]) == 1
|
||||
assert mock_transform.call_count == 1
|
||||
|
||||
def test_empty_check_ids_returns_empty(
|
||||
self, tenants_fixture, scans_fixture, providers_fixture
|
||||
):
|
||||
"""Return empty dict when check_ids list is empty."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
mock_provider = MagicMock()
|
||||
|
||||
result = _load_findings_for_requirement_checks(
|
||||
str(tenant.id), str(scan.id), [], mock_provider
|
||||
)
|
||||
|
||||
assert result == {}
|
||||
|
||||
def test_groups_by_check_id(
|
||||
self, tenants_fixture, scans_fixture, providers_fixture
|
||||
):
|
||||
"""Multiple findings for same check are grouped correctly."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
# Create multiple findings for same check
|
||||
for i in range(3):
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid=f"finding-{i}",
|
||||
check_id="check_group",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
mock_provider = MagicMock()
|
||||
|
||||
with patch(
|
||||
"tasks.jobs.report.FindingOutput.transform_api_finding"
|
||||
) as mock_transform:
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.check_id = "check_group"
|
||||
mock_transform.return_value = mock_finding_output
|
||||
|
||||
result = _load_findings_for_requirement_checks(
|
||||
str(tenant.id), str(scan.id), ["check_group"], mock_provider
|
||||
)
|
||||
|
||||
assert len(result["check_group"]) == 3
|
||||
|
||||
def test_transforms_to_finding_output(
|
||||
self, tenants_fixture, scans_fixture, providers_fixture
|
||||
):
|
||||
"""Findings are transformed using FindingOutput.transform_api_finding."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid="finding-transform",
|
||||
check_id="check_transform",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
mock_provider = MagicMock()
|
||||
|
||||
with patch(
|
||||
"tasks.jobs.report.FindingOutput.transform_api_finding"
|
||||
) as mock_transform:
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.check_id = "check_transform"
|
||||
mock_transform.return_value = mock_finding_output
|
||||
|
||||
result = _load_findings_for_requirement_checks(
|
||||
str(tenant.id), str(scan.id), ["check_transform"], mock_provider
|
||||
)
|
||||
|
||||
# Verify transform was called
|
||||
mock_transform.assert_called_once()
|
||||
# Verify the transformed output is in the result
|
||||
assert result["check_transform"][0] == mock_finding_output
|
||||
|
||||
def test_batched_iteration(self, tenants_fixture, scans_fixture, providers_fixture):
|
||||
"""Works correctly with multiple batches of findings."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
# Create enough findings to ensure batching (assuming batch size > 1)
|
||||
for i in range(10):
|
||||
Finding.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
scan=scan,
|
||||
uid=f"finding-batch-{i}",
|
||||
check_id="check_batch",
|
||||
status=StatusChoices.PASS,
|
||||
severity=Severity.medium,
|
||||
impact=Severity.medium,
|
||||
check_metadata={},
|
||||
raw_result={},
|
||||
)
|
||||
|
||||
mock_provider = MagicMock()
|
||||
|
||||
with patch(
|
||||
"tasks.jobs.report.FindingOutput.transform_api_finding"
|
||||
) as mock_transform:
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.check_id = "check_batch"
|
||||
mock_transform.return_value = mock_finding_output
|
||||
|
||||
result = _load_findings_for_requirement_checks(
|
||||
str(tenant.id), str(scan.id), ["check_batch"], mock_provider
|
||||
)
|
||||
|
||||
# All 10 findings should be loaded regardless of batching
|
||||
assert len(result["check_batch"]) == 10
|
||||
assert mock_transform.call_count == 10
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestCalculateRequirementsData:
|
||||
"""Test suite for _calculate_requirements_data_from_statistics function."""
|
||||
|
||||
def test_requirement_status_all_pass(self):
|
||||
"""Status is PASS when all findings for requirement checks pass."""
|
||||
mock_compliance = MagicMock()
|
||||
mock_compliance.Framework = "TestFramework"
|
||||
mock_compliance.Version = "1.0"
|
||||
|
||||
mock_requirement = MagicMock()
|
||||
mock_requirement.Id = "req_1"
|
||||
mock_requirement.Description = "Test requirement"
|
||||
mock_requirement.Checks = ["check_1", "check_2"]
|
||||
mock_requirement.Attributes = [MagicMock()]
|
||||
|
||||
mock_compliance.Requirements = [mock_requirement]
|
||||
|
||||
requirement_statistics = {
|
||||
"check_1": {"passed": 5, "total": 5},
|
||||
"check_2": {"passed": 3, "total": 3},
|
||||
}
|
||||
|
||||
attributes_by_id, requirements_list = (
|
||||
_calculate_requirements_data_from_statistics(
|
||||
mock_compliance, requirement_statistics
|
||||
)
|
||||
)
|
||||
|
||||
assert len(requirements_list) == 1
|
||||
assert requirements_list[0]["attributes"]["status"] == StatusChoices.PASS
|
||||
assert requirements_list[0]["attributes"]["passed_findings"] == 8
|
||||
assert requirements_list[0]["attributes"]["total_findings"] == 8
|
||||
|
||||
def test_requirement_status_some_fail(self):
|
||||
"""Status is FAIL when some findings fail."""
|
||||
mock_compliance = MagicMock()
|
||||
mock_compliance.Framework = "TestFramework"
|
||||
mock_compliance.Version = "1.0"
|
||||
|
||||
mock_requirement = MagicMock()
|
||||
mock_requirement.Id = "req_2"
|
||||
mock_requirement.Description = "Test requirement with failures"
|
||||
mock_requirement.Checks = ["check_3"]
|
||||
mock_requirement.Attributes = [MagicMock()]
|
||||
|
||||
mock_compliance.Requirements = [mock_requirement]
|
||||
|
||||
requirement_statistics = {
|
||||
"check_3": {"passed": 2, "total": 5},
|
||||
}
|
||||
|
||||
attributes_by_id, requirements_list = (
|
||||
_calculate_requirements_data_from_statistics(
|
||||
mock_compliance, requirement_statistics
|
||||
)
|
||||
)
|
||||
|
||||
assert len(requirements_list) == 1
|
||||
assert requirements_list[0]["attributes"]["status"] == StatusChoices.FAIL
|
||||
assert requirements_list[0]["attributes"]["passed_findings"] == 2
|
||||
assert requirements_list[0]["attributes"]["total_findings"] == 5
|
||||
|
||||
def test_requirement_status_no_findings(self):
|
||||
"""Status is MANUAL when no findings exist for requirement."""
|
||||
mock_compliance = MagicMock()
|
||||
mock_compliance.Framework = "TestFramework"
|
||||
mock_compliance.Version = "1.0"
|
||||
|
||||
mock_requirement = MagicMock()
|
||||
mock_requirement.Id = "req_3"
|
||||
mock_requirement.Description = "Manual requirement"
|
||||
mock_requirement.Checks = ["check_nonexistent"]
|
||||
mock_requirement.Attributes = [MagicMock()]
|
||||
|
||||
mock_compliance.Requirements = [mock_requirement]
|
||||
|
||||
requirement_statistics = {}
|
||||
|
||||
attributes_by_id, requirements_list = (
|
||||
_calculate_requirements_data_from_statistics(
|
||||
mock_compliance, requirement_statistics
|
||||
)
|
||||
)
|
||||
|
||||
assert len(requirements_list) == 1
|
||||
assert requirements_list[0]["attributes"]["status"] == StatusChoices.MANUAL
|
||||
assert requirements_list[0]["attributes"]["passed_findings"] == 0
|
||||
assert requirements_list[0]["attributes"]["total_findings"] == 0
|
||||
|
||||
def test_aggregates_multiple_checks(self):
|
||||
"""Correctly sum stats across multiple checks in requirement."""
|
||||
mock_compliance = MagicMock()
|
||||
mock_compliance.Framework = "TestFramework"
|
||||
mock_compliance.Version = "1.0"
|
||||
|
||||
mock_requirement = MagicMock()
|
||||
mock_requirement.Id = "req_4"
|
||||
mock_requirement.Description = "Multi-check requirement"
|
||||
mock_requirement.Checks = ["check_a", "check_b", "check_c"]
|
||||
mock_requirement.Attributes = [MagicMock()]
|
||||
|
||||
mock_compliance.Requirements = [mock_requirement]
|
||||
|
||||
requirement_statistics = {
|
||||
"check_a": {"passed": 10, "total": 15},
|
||||
"check_b": {"passed": 5, "total": 10},
|
||||
"check_c": {"passed": 0, "total": 5},
|
||||
}
|
||||
|
||||
attributes_by_id, requirements_list = (
|
||||
_calculate_requirements_data_from_statistics(
|
||||
mock_compliance, requirement_statistics
|
||||
)
|
||||
)
|
||||
|
||||
assert len(requirements_list) == 1
|
||||
# 10 + 5 + 0 = 15 passed
|
||||
assert requirements_list[0]["attributes"]["passed_findings"] == 15
|
||||
# 15 + 10 + 5 = 30 total
|
||||
assert requirements_list[0]["attributes"]["total_findings"] == 30
|
||||
# Not all passed, so should be FAIL
|
||||
assert requirements_list[0]["attributes"]["status"] == StatusChoices.FAIL
|
||||
|
||||
def test_returns_correct_structure(self):
|
||||
"""Verify tuple structure and dict keys are correct."""
|
||||
mock_compliance = MagicMock()
|
||||
mock_compliance.Framework = "TestFramework"
|
||||
mock_compliance.Version = "1.0"
|
||||
|
||||
mock_attribute = MagicMock()
|
||||
mock_requirement = MagicMock()
|
||||
mock_requirement.Id = "req_5"
|
||||
mock_requirement.Description = "Structure test"
|
||||
mock_requirement.Checks = ["check_struct"]
|
||||
mock_requirement.Attributes = [mock_attribute]
|
||||
|
||||
mock_compliance.Requirements = [mock_requirement]
|
||||
|
||||
requirement_statistics = {"check_struct": {"passed": 1, "total": 1}}
|
||||
|
||||
attributes_by_id, requirements_list = (
|
||||
_calculate_requirements_data_from_statistics(
|
||||
mock_compliance, requirement_statistics
|
||||
)
|
||||
)
|
||||
|
||||
# Verify attributes_by_id structure
|
||||
assert "req_5" in attributes_by_id
|
||||
assert "attributes" in attributes_by_id["req_5"]
|
||||
assert "description" in attributes_by_id["req_5"]
|
||||
assert "req_attributes" in attributes_by_id["req_5"]["attributes"]
|
||||
assert "checks" in attributes_by_id["req_5"]["attributes"]
|
||||
|
||||
# Verify requirements_list structure
|
||||
assert len(requirements_list) == 1
|
||||
req = requirements_list[0]
|
||||
assert "id" in req
|
||||
assert "attributes" in req
|
||||
assert "framework" in req["attributes"]
|
||||
assert "version" in req["attributes"]
|
||||
assert "status" in req["attributes"]
|
||||
assert "description" in req["attributes"]
|
||||
assert "passed_findings" in req["attributes"]
|
||||
assert "total_findings" in req["attributes"]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestGenerateThreatscoreReportFunction:
|
||||
def setup_method(self):
|
||||
self.scan_id = str(uuid.uuid4())
|
||||
self.provider_id = str(uuid.uuid4())
|
||||
self.tenant_id = str(uuid.uuid4())
|
||||
self.compliance_id = "prowler_threatscore_aws"
|
||||
self.output_path = "/tmp/test_threatscore_report.pdf"
|
||||
|
||||
@patch("tasks.jobs.report.initialize_prowler_provider")
|
||||
@patch("tasks.jobs.report.Provider.objects.get")
|
||||
@patch("tasks.jobs.report.Compliance.get_bulk")
|
||||
@patch("tasks.jobs.report._aggregate_requirement_statistics_from_database")
|
||||
@patch("tasks.jobs.report._calculate_requirements_data_from_statistics")
|
||||
@patch("tasks.jobs.report._load_findings_for_requirement_checks")
|
||||
@patch("tasks.jobs.report.SimpleDocTemplate")
|
||||
@patch("tasks.jobs.report.Image")
|
||||
@patch("tasks.jobs.report.Spacer")
|
||||
@patch("tasks.jobs.report.Paragraph")
|
||||
@patch("tasks.jobs.report.PageBreak")
|
||||
@patch("tasks.jobs.report.Table")
|
||||
@patch("tasks.jobs.report.TableStyle")
|
||||
@patch("tasks.jobs.report.plt.subplots")
|
||||
@patch("tasks.jobs.report.plt.savefig")
|
||||
@patch("tasks.jobs.report.io.BytesIO")
|
||||
def test_generate_threatscore_report_success(
|
||||
self,
|
||||
mock_bytesio,
|
||||
mock_savefig,
|
||||
mock_subplots,
|
||||
mock_table_style,
|
||||
mock_table,
|
||||
mock_page_break,
|
||||
mock_paragraph,
|
||||
mock_spacer,
|
||||
mock_image,
|
||||
mock_doc_template,
|
||||
mock_load_findings,
|
||||
mock_calculate_requirements,
|
||||
mock_aggregate_statistics,
|
||||
mock_compliance_get_bulk,
|
||||
mock_provider_get,
|
||||
mock_initialize_provider,
|
||||
):
|
||||
"""Test the updated generate_threatscore_report using new memory-efficient architecture."""
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider_get.return_value = mock_provider
|
||||
|
||||
prowler_provider = MagicMock()
|
||||
mock_initialize_provider.return_value = prowler_provider
|
||||
|
||||
# Mock compliance object with requirements
|
||||
mock_compliance_obj = MagicMock()
|
||||
mock_compliance_obj.Framework = "ProwlerThreatScore"
|
||||
mock_compliance_obj.Version = "1.0"
|
||||
mock_compliance_obj.Description = "Test Description"
|
||||
|
||||
# Configure requirement with properly set numeric attributes for chart generation
|
||||
mock_requirement = MagicMock()
|
||||
mock_requirement.Id = "req_1"
|
||||
mock_requirement.Description = "Test requirement"
|
||||
mock_requirement.Checks = ["check_1"]
|
||||
|
||||
# Create a properly configured attribute mock with numeric values
|
||||
mock_requirement_attr = MagicMock()
|
||||
mock_requirement_attr.Section = "1. IAM"
|
||||
mock_requirement_attr.SubSection = "1.1 Identity"
|
||||
mock_requirement_attr.Title = "Test Requirement Title"
|
||||
mock_requirement_attr.LevelOfRisk = 3
|
||||
mock_requirement_attr.Weight = 100
|
||||
mock_requirement_attr.AttributeDescription = "Test requirement description"
|
||||
mock_requirement_attr.AdditionalInformation = "Additional test information"
|
||||
|
||||
mock_requirement.Attributes = [mock_requirement_attr]
|
||||
mock_compliance_obj.Requirements = [mock_requirement]
|
||||
|
||||
mock_compliance_get_bulk.return_value = {
|
||||
self.compliance_id: mock_compliance_obj
|
||||
}
|
||||
|
||||
# Mock the aggregated statistics from database
|
||||
mock_aggregate_statistics.return_value = {"check_1": {"passed": 5, "total": 10}}
|
||||
|
||||
# Mock the calculated requirements data with properly configured attributes
|
||||
mock_attributes_by_id = {
|
||||
"req_1": {
|
||||
"attributes": {
|
||||
"req_attributes": [mock_requirement_attr],
|
||||
"checks": ["check_1"],
|
||||
},
|
||||
"description": "Test requirement",
|
||||
}
|
||||
}
|
||||
mock_requirements_list = [
|
||||
{
|
||||
"id": "req_1",
|
||||
"attributes": {
|
||||
"framework": "ProwlerThreatScore",
|
||||
"version": "1.0",
|
||||
"status": StatusChoices.FAIL,
|
||||
"description": "Test requirement",
|
||||
"passed_findings": 5,
|
||||
"total_findings": 10,
|
||||
},
|
||||
}
|
||||
]
|
||||
mock_calculate_requirements.return_value = (
|
||||
mock_attributes_by_id,
|
||||
mock_requirements_list,
|
||||
)
|
||||
|
||||
# Mock the on-demand loaded findings
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.check_id = "check_1"
|
||||
mock_finding_output.status = "FAIL"
|
||||
mock_finding_output.metadata = MagicMock()
|
||||
mock_finding_output.metadata.CheckTitle = "Test Check"
|
||||
mock_finding_output.metadata.Severity = "HIGH"
|
||||
mock_finding_output.resource_name = "test-resource"
|
||||
mock_finding_output.region = "us-east-1"
|
||||
|
||||
mock_load_findings.return_value = {"check_1": [mock_finding_output]}
|
||||
|
||||
# Mock PDF generation components
|
||||
mock_doc = MagicMock()
|
||||
mock_doc_template.return_value = mock_doc
|
||||
|
||||
mock_fig, mock_ax = MagicMock(), MagicMock()
|
||||
mock_subplots.return_value = (mock_fig, mock_ax)
|
||||
mock_buffer = MagicMock()
|
||||
mock_bytesio.return_value = mock_buffer
|
||||
|
||||
mock_image.return_value = MagicMock()
|
||||
mock_spacer.return_value = MagicMock()
|
||||
mock_paragraph.return_value = MagicMock()
|
||||
mock_page_break.return_value = MagicMock()
|
||||
mock_table.return_value = MagicMock()
|
||||
mock_table_style.return_value = MagicMock()
|
||||
|
||||
# Execute the function
|
||||
generate_threatscore_report(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
compliance_id=self.compliance_id,
|
||||
output_path=self.output_path,
|
||||
provider_id=self.provider_id,
|
||||
only_failed=True,
|
||||
min_risk_level=4,
|
||||
)
|
||||
|
||||
# Verify the new workflow was followed
|
||||
mock_provider_get.assert_called_once_with(id=self.provider_id)
|
||||
mock_initialize_provider.assert_called_once_with(mock_provider)
|
||||
mock_compliance_get_bulk.assert_called_once_with("aws")
|
||||
|
||||
# Verify the new functions were called in correct order with correct parameters
|
||||
mock_aggregate_statistics.assert_called_once_with(self.tenant_id, self.scan_id)
|
||||
mock_calculate_requirements.assert_called_once_with(
|
||||
mock_compliance_obj, {"check_1": {"passed": 5, "total": 10}}
|
||||
)
|
||||
mock_load_findings.assert_called_once_with(
|
||||
self.tenant_id, self.scan_id, ["check_1"], prowler_provider
|
||||
)
|
||||
|
||||
# Verify PDF was built
|
||||
mock_doc_template.assert_called_once()
|
||||
mock_doc.build.assert_called_once()
|
||||
|
||||
@patch("tasks.jobs.report.initialize_prowler_provider")
|
||||
@patch("tasks.jobs.report.Provider.objects.get")
|
||||
@patch("tasks.jobs.report.Compliance.get_bulk")
|
||||
@patch("tasks.jobs.report.Finding.all_objects.filter")
|
||||
def test_generate_threatscore_report_exception_handling(
|
||||
self,
|
||||
mock_finding_filter,
|
||||
mock_compliance_get_bulk,
|
||||
mock_provider_get,
|
||||
mock_initialize_provider,
|
||||
):
|
||||
mock_provider_get.side_effect = Exception("Provider not found")
|
||||
|
||||
with pytest.raises(Exception, match="Provider not found"):
|
||||
generate_threatscore_report(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
compliance_id=self.compliance_id,
|
||||
output_path=self.output_path,
|
||||
provider_id=self.provider_id,
|
||||
only_failed=True,
|
||||
min_risk_level=4,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestGenerateThreatscoreReportTask:
|
||||
def setup_method(self):
|
||||
self.scan_id = str(uuid.uuid4())
|
||||
self.provider_id = str(uuid.uuid4())
|
||||
self.tenant_id = str(uuid.uuid4())
|
||||
|
||||
@patch("tasks.tasks.generate_threatscore_report_job")
|
||||
def test_generate_threatscore_report_task_calls_job(self, mock_generate_job):
|
||||
mock_generate_job.return_value = {"upload": True}
|
||||
|
||||
result = generate_threatscore_report_task(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
assert result == {"upload": True}
|
||||
mock_generate_job.assert_called_once_with(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
|
||||
@patch("tasks.tasks.generate_threatscore_report_job")
|
||||
def test_generate_threatscore_report_task_handles_job_exception(
|
||||
self, mock_generate_job
|
||||
):
|
||||
mock_generate_job.side_effect = Exception("Job failed")
|
||||
|
||||
with pytest.raises(Exception, match="Job failed"):
|
||||
generate_threatscore_report_task(
|
||||
tenant_id=self.tenant_id,
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
)
|
||||
@@ -98,7 +98,11 @@ class TestGenerateOutputs:
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("out-dir", "comp-dir"),
|
||||
return_value=(
|
||||
"/tmp/test/out-dir",
|
||||
"/tmp/test/comp-dir",
|
||||
"/tmp/test/threat-dir",
|
||||
),
|
||||
),
|
||||
patch("tasks.tasks.Scan.all_objects.filter") as mock_scan_update,
|
||||
patch("tasks.tasks.rmtree"),
|
||||
@@ -126,7 +130,8 @@ class TestGenerateOutputs:
|
||||
patch("tasks.tasks.get_compliance_frameworks"),
|
||||
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("/tmp/test/out", "/tmp/test/comp", "/tmp/test/threat"),
|
||||
),
|
||||
patch("tasks.tasks.FindingOutput._transform_findings_stats"),
|
||||
patch("tasks.tasks.FindingOutput.transform_api_finding"),
|
||||
@@ -168,15 +173,35 @@ class TestGenerateOutputs:
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.compliance = {"cis": ["requirement-1", "requirement-2"]}
|
||||
|
||||
html_writer_mock = MagicMock()
|
||||
html_writer_mock._data = []
|
||||
html_writer_mock.close_file = False
|
||||
html_writer_mock.transform = MagicMock()
|
||||
html_writer_mock.batch_write_data_to_file = MagicMock()
|
||||
|
||||
compliance_writer_mock = MagicMock()
|
||||
compliance_writer_mock._data = []
|
||||
compliance_writer_mock.close_file = False
|
||||
compliance_writer_mock.transform = MagicMock()
|
||||
compliance_writer_mock.batch_write_data_to_file = MagicMock()
|
||||
|
||||
# Create a mock class that returns our mock instance when called
|
||||
mock_compliance_class = MagicMock(return_value=compliance_writer_mock)
|
||||
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider.uid = "test-provider-uid"
|
||||
|
||||
with (
|
||||
patch("tasks.tasks.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.tasks.Provider.objects.get"),
|
||||
patch("tasks.tasks.Provider.objects.get", return_value=mock_provider),
|
||||
patch("tasks.tasks.initialize_prowler_provider"),
|
||||
patch("tasks.tasks.Compliance.get_bulk", return_value={"cis": MagicMock()}),
|
||||
patch("tasks.tasks.get_compliance_frameworks", return_value=["cis"]),
|
||||
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("/tmp/test/out", "/tmp/test/comp", "/tmp/test/threat"),
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.FindingOutput._transform_findings_stats",
|
||||
@@ -190,6 +215,20 @@ class TestGenerateOutputs:
|
||||
patch("tasks.tasks._upload_to_s3", return_value="s3://bucket/f.zip"),
|
||||
patch("tasks.tasks.Scan.all_objects.filter"),
|
||||
patch("tasks.tasks.rmtree"),
|
||||
patch(
|
||||
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
|
||||
{
|
||||
"html": {
|
||||
"class": lambda *args, **kwargs: html_writer_mock,
|
||||
"suffix": ".html",
|
||||
"kwargs": {},
|
||||
}
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.COMPLIANCE_CLASS_MAP",
|
||||
{"aws": [(lambda x: True, mock_compliance_class)]},
|
||||
),
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_findings.return_value.order_by.return_value.iterator.return_value = [
|
||||
@@ -197,29 +236,12 @@ class TestGenerateOutputs:
|
||||
True,
|
||||
]
|
||||
|
||||
html_writer_mock = MagicMock()
|
||||
with (
|
||||
patch(
|
||||
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
|
||||
{
|
||||
"html": {
|
||||
"class": lambda *args, **kwargs: html_writer_mock,
|
||||
"suffix": ".html",
|
||||
"kwargs": {},
|
||||
}
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.COMPLIANCE_CLASS_MAP",
|
||||
{"aws": [(lambda x: True, MagicMock())]},
|
||||
),
|
||||
):
|
||||
generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
html_writer_mock.batch_write_data_to_file.assert_called_once()
|
||||
generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
html_writer_mock.batch_write_data_to_file.assert_called_once()
|
||||
|
||||
def test_transform_called_only_on_second_batch(self):
|
||||
raw1 = MagicMock()
|
||||
@@ -256,7 +278,11 @@ class TestGenerateOutputs:
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("outdir", "compdir"),
|
||||
return_value=(
|
||||
"/tmp/test/outdir",
|
||||
"/tmp/test/compdir",
|
||||
"/tmp/test/threatdir",
|
||||
),
|
||||
),
|
||||
patch("tasks.tasks._compress_output_files", return_value="outdir.zip"),
|
||||
patch("tasks.tasks._upload_to_s3", return_value="s3://bucket/outdir.zip"),
|
||||
@@ -303,12 +329,14 @@ class TestGenerateOutputs:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.transform_calls = []
|
||||
self._data = []
|
||||
self.close_file = False
|
||||
writer_instances.append(self)
|
||||
|
||||
def transform(self, fos, comp_obj, name):
|
||||
self.transform_calls.append((fos, comp_obj, name))
|
||||
|
||||
def batch_write_data_to_file(self):
|
||||
# Mock implementation - do nothing
|
||||
pass
|
||||
|
||||
two_batches = [
|
||||
@@ -329,7 +357,11 @@ class TestGenerateOutputs:
|
||||
patch("tasks.tasks.get_compliance_frameworks", return_value=["cis"]),
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("outdir", "compdir"),
|
||||
return_value=(
|
||||
"/tmp/test/outdir",
|
||||
"/tmp/test/compdir",
|
||||
"/tmp/test/threatdir",
|
||||
),
|
||||
),
|
||||
patch("tasks.tasks.FindingOutput._transform_findings_stats"),
|
||||
patch(
|
||||
@@ -368,15 +400,35 @@ class TestGenerateOutputs:
|
||||
mock_finding_output = MagicMock()
|
||||
mock_finding_output.compliance = {"cis": ["requirement-1", "requirement-2"]}
|
||||
|
||||
json_writer_mock = MagicMock()
|
||||
json_writer_mock._data = []
|
||||
json_writer_mock.close_file = False
|
||||
json_writer_mock.transform = MagicMock()
|
||||
json_writer_mock.batch_write_data_to_file = MagicMock()
|
||||
|
||||
compliance_writer_mock = MagicMock()
|
||||
compliance_writer_mock._data = []
|
||||
compliance_writer_mock.close_file = False
|
||||
compliance_writer_mock.transform = MagicMock()
|
||||
compliance_writer_mock.batch_write_data_to_file = MagicMock()
|
||||
|
||||
# Create a mock class that returns our mock instance when called
|
||||
mock_compliance_class = MagicMock(return_value=compliance_writer_mock)
|
||||
|
||||
mock_provider = MagicMock()
|
||||
mock_provider.provider = "aws"
|
||||
mock_provider.uid = "test-provider-uid"
|
||||
|
||||
with (
|
||||
patch("tasks.tasks.ScanSummary.objects.filter") as mock_filter,
|
||||
patch("tasks.tasks.Provider.objects.get"),
|
||||
patch("tasks.tasks.Provider.objects.get", return_value=mock_provider),
|
||||
patch("tasks.tasks.initialize_prowler_provider"),
|
||||
patch("tasks.tasks.Compliance.get_bulk", return_value={"cis": MagicMock()}),
|
||||
patch("tasks.tasks.get_compliance_frameworks", return_value=["cis"]),
|
||||
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("/tmp/test/out", "/tmp/test/comp", "/tmp/test/threat"),
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.FindingOutput._transform_findings_stats",
|
||||
@@ -390,6 +442,20 @@ class TestGenerateOutputs:
|
||||
patch("tasks.tasks._upload_to_s3", return_value="s3://bucket/file.zip"),
|
||||
patch("tasks.tasks.Scan.all_objects.filter"),
|
||||
patch("tasks.tasks.rmtree", side_effect=Exception("Test deletion error")),
|
||||
patch(
|
||||
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
|
||||
{
|
||||
"json": {
|
||||
"class": lambda *args, **kwargs: json_writer_mock,
|
||||
"suffix": ".json",
|
||||
"kwargs": {},
|
||||
}
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.COMPLIANCE_CLASS_MAP",
|
||||
{"aws": [(lambda x: True, mock_compliance_class)]},
|
||||
),
|
||||
):
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_findings.return_value.order_by.return_value.iterator.return_value = [
|
||||
@@ -397,29 +463,13 @@ class TestGenerateOutputs:
|
||||
True,
|
||||
]
|
||||
|
||||
with (
|
||||
patch(
|
||||
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
|
||||
{
|
||||
"json": {
|
||||
"class": lambda *args, **kwargs: MagicMock(),
|
||||
"suffix": ".json",
|
||||
"kwargs": {},
|
||||
}
|
||||
},
|
||||
),
|
||||
patch(
|
||||
"tasks.tasks.COMPLIANCE_CLASS_MAP",
|
||||
{"aws": [(lambda x: True, MagicMock())]},
|
||||
),
|
||||
):
|
||||
with caplog.at_level("ERROR"):
|
||||
generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
assert "Error deleting output files" in caplog.text
|
||||
with caplog.at_level("ERROR"):
|
||||
generate_outputs_task(
|
||||
scan_id=self.scan_id,
|
||||
provider_id=self.provider_id,
|
||||
tenant_id=self.tenant_id,
|
||||
)
|
||||
assert "Error deleting output files" in caplog.text
|
||||
|
||||
@patch("tasks.tasks.rls_transaction")
|
||||
@patch("tasks.tasks.Integration.objects.filter")
|
||||
@@ -435,7 +485,8 @@ class TestGenerateOutputs:
|
||||
patch("tasks.tasks.get_compliance_frameworks", return_value=[]),
|
||||
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
|
||||
patch(
|
||||
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
|
||||
"tasks.tasks._generate_output_directory",
|
||||
return_value=("/tmp/test/out", "/tmp/test/comp", "/tmp/test/threat"),
|
||||
),
|
||||
patch("tasks.tasks.FindingOutput._transform_findings_stats"),
|
||||
patch("tasks.tasks.FindingOutput.transform_api_finding"),
|
||||
@@ -476,8 +527,15 @@ class TestScanCompleteTasks:
|
||||
@patch("tasks.tasks.create_compliance_requirements_task.apply_async")
|
||||
@patch("tasks.tasks.perform_scan_summary_task.si")
|
||||
@patch("tasks.tasks.generate_outputs_task.si")
|
||||
@patch("tasks.tasks.generate_threatscore_report_task.si")
|
||||
@patch("tasks.tasks.check_integrations_task.si")
|
||||
def test_scan_complete_tasks(
|
||||
self, mock_outputs_task, mock_scan_summary_task, mock_compliance_tasks
|
||||
self,
|
||||
mock_check_integrations_task,
|
||||
mock_threatscore_task,
|
||||
mock_outputs_task,
|
||||
mock_scan_summary_task,
|
||||
mock_compliance_tasks,
|
||||
):
|
||||
_perform_scan_complete_tasks("tenant-id", "scan-id", "provider-id")
|
||||
mock_compliance_tasks.assert_called_once_with(
|
||||
@@ -492,6 +550,16 @@ class TestScanCompleteTasks:
|
||||
provider_id="provider-id",
|
||||
tenant_id="tenant-id",
|
||||
)
|
||||
mock_threatscore_task.assert_called_once_with(
|
||||
tenant_id="tenant-id",
|
||||
scan_id="scan-id",
|
||||
provider_id="provider-id",
|
||||
)
|
||||
mock_check_integrations_task.assert_called_once_with(
|
||||
tenant_id="tenant-id",
|
||||
provider_id="provider-id",
|
||||
scan_id="scan-id",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -662,7 +730,7 @@ class TestCheckIntegrationsTask:
|
||||
mock_initialize_provider.return_value = MagicMock()
|
||||
mock_compliance_bulk.return_value = {}
|
||||
mock_get_frameworks.return_value = []
|
||||
mock_generate_dir.return_value = ("out-dir", "comp-dir")
|
||||
mock_generate_dir.return_value = ("out-dir", "comp-dir", "threat-dir")
|
||||
mock_transform_stats.return_value = {"stats": "data"}
|
||||
|
||||
# Mock findings
|
||||
@@ -787,7 +855,7 @@ class TestCheckIntegrationsTask:
|
||||
mock_initialize_provider.return_value = MagicMock()
|
||||
mock_compliance_bulk.return_value = {}
|
||||
mock_get_frameworks.return_value = []
|
||||
mock_generate_dir.return_value = ("out-dir", "comp-dir")
|
||||
mock_generate_dir.return_value = ("out-dir", "comp-dir", "threat-dir")
|
||||
mock_transform_stats.return_value = {"stats": "data"}
|
||||
|
||||
# Mock findings
|
||||
@@ -903,7 +971,7 @@ class TestCheckIntegrationsTask:
|
||||
mock_initialize_provider.return_value = MagicMock()
|
||||
mock_compliance_bulk.return_value = {}
|
||||
mock_get_frameworks.return_value = []
|
||||
mock_generate_dir.return_value = ("out-dir", "comp-dir")
|
||||
mock_generate_dir.return_value = ("out-dir", "comp-dir", "threat-dir")
|
||||
mock_transform_stats.return_value = {"stats": "data"}
|
||||
|
||||
# Mock findings
|
||||
|
||||
@@ -25,6 +25,9 @@ Prowler configuration is based in `.env` files. Every version of Prowler can hav
|
||||
curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/master/.env
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
> Containers are built for `linux/amd64`. If your workstation's architecture is different, please set `DOCKER_DEFAULT_PLATFORM=linux/amd64` in your environment or use the `--platform linux/amd64` flag in the docker command.
|
||||
|
||||
</Tab>
|
||||
<Tab title="GitHub">
|
||||
_Requirements_:
|
||||
|
||||
@@ -17,6 +17,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- Oracle Cloud provider with CIS 3.0 benchmark [(#8893)](https://github.com/prowler-cloud/prowler/pull/8893)
|
||||
- Support for Atlassian Document Format (ADF) in Jira integration [(#8878)](https://github.com/prowler-cloud/prowler/pull/8878)
|
||||
- Add Common Cloud Controls for AWS, Azure and GCP [(#8000)](https://github.com/prowler-cloud/prowler/pull/8000)
|
||||
- `cloudstorage_bucket_lifecycle_management_enabled` check for GCP provider [(#8936)](https://github.com/prowler-cloud/prowler/pull/8936)
|
||||
|
||||
### Changed
|
||||
|
||||
@@ -51,12 +52,6 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- Add missing attributes for Mitre Attack AWS, Azure and GCP [(#8907)](https://github.com/prowler-cloud/prowler/pull/8907)
|
||||
- Fix KeyError in CloudSQL and Monitoring services in GCP provider [(#8909)](https://github.com/prowler-cloud/prowler/pull/8909)
|
||||
- Fix ResourceName in GCP provider [(#8928)](https://github.com/prowler-cloud/prowler/pull/8928)
|
||||
|
||||
---
|
||||
|
||||
## [v5.12.4] (Prowler UNRELEASED)
|
||||
|
||||
### Fixed
|
||||
- Fix KeyError in `elb_ssl_listeners_use_acm_certificate` check and handle None cluster version in `eks_cluster_uses_a_supported_version` check [(#8791)](https://github.com/prowler-cloud/prowler/pull/8791)
|
||||
- Fix file extension parsing for compliance reports [(#8791)](https://github.com/prowler-cloud/prowler/pull/8791)
|
||||
- Added user pagination to Entra and Admincenter services [(#8858)](https://github.com/prowler-cloud/prowler/pull/8858)
|
||||
|
||||
@@ -49,10 +49,10 @@ from prowler.lib.outputs.asff.asff import ASFF
|
||||
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
|
||||
AWSWellArchitected,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.c5.c5_aws import AWSC5
|
||||
from prowler.lib.outputs.compliance.ccc.ccc_aws import CCC_AWS
|
||||
from prowler.lib.outputs.compliance.ccc.ccc_azure import CCC_Azure
|
||||
from prowler.lib.outputs.compliance.ccc.ccc_gcp import CCC_GCP
|
||||
from prowler.lib.outputs.compliance.c5.c5_aws import AWSC5
|
||||
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
|
||||
@@ -102,6 +102,7 @@ from prowler.providers.aws.lib.s3.s3 import S3
|
||||
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHub
|
||||
from prowler.providers.aws.models import AWSOutputOptions
|
||||
from prowler.providers.azure.models import AzureOutputOptions
|
||||
from prowler.providers.cloudflare.models import CloudflareOutputOptions
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.common.quick_inventory import run_provider_quick_inventory
|
||||
from prowler.providers.gcp.models import GCPOutputOptions
|
||||
@@ -336,6 +337,10 @@ def prowler():
|
||||
output_options = OCIOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
elif provider == "cloudflare":
|
||||
output_options = CloudflareOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
|
||||
# Run the quick inventory for the provider if available
|
||||
if hasattr(args, "quick_inventory") and args.quick_inventory:
|
||||
|
||||
0
prowler/compliance/cloudflare/.gitkeep
Normal file
0
prowler/compliance/cloudflare/.gitkeep
Normal file
@@ -694,6 +694,37 @@ class CheckReportGithub(Check_Report):
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class CheckReportCloudflare(Check_Report):
|
||||
"""Contains the Cloudflare Check's finding information."""
|
||||
|
||||
resource_name: str
|
||||
resource_id: str
|
||||
zone_name: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
metadata: Dict,
|
||||
resource: Any,
|
||||
resource_name: str = None,
|
||||
resource_id: str = None,
|
||||
zone_name: str = None,
|
||||
) -> None:
|
||||
"""Initialize the Cloudflare Check's finding information.
|
||||
|
||||
Args:
|
||||
metadata: The metadata of the check.
|
||||
resource: Basic information about the resource. Defaults to None.
|
||||
resource_name: The name of the resource related with the finding.
|
||||
resource_id: The id of the resource related with the finding.
|
||||
zone_name: The zone name of the resource related with the finding.
|
||||
"""
|
||||
super().__init__(metadata, resource)
|
||||
self.resource_name = resource_name or getattr(resource, "name", "")
|
||||
self.resource_id = resource_id or getattr(resource, "id", "")
|
||||
self.zone_name = zone_name or getattr(resource, "zone_name", "")
|
||||
|
||||
|
||||
@dataclass
|
||||
class CheckReportM365(Check_Report):
|
||||
"""Contains the M365 Check's finding information."""
|
||||
|
||||
@@ -337,6 +337,21 @@ class Finding(BaseModel):
|
||||
output_data["resource_uid"] = check_output.resource_id
|
||||
output_data["region"] = check_output.region
|
||||
|
||||
elif provider.type == "cloudflare":
|
||||
output_data["auth_method"] = provider.auth_method
|
||||
output_data["account_uid"] = get_nested_attribute(
|
||||
provider, "identity.account_id"
|
||||
)
|
||||
output_data["account_name"] = get_nested_attribute(
|
||||
provider, "identity.account_name"
|
||||
)
|
||||
output_data["account_email"] = get_nested_attribute(
|
||||
provider, "identity.account_email"
|
||||
)
|
||||
output_data["resource_name"] = check_output.resource_name
|
||||
output_data["resource_uid"] = check_output.resource_id
|
||||
output_data["region"] = check_output.zone_name
|
||||
|
||||
# check_output Unique ID
|
||||
# TODO: move this to a function
|
||||
# TODO: in Azure, GCP and K8s there are findings without resource_name
|
||||
|
||||
@@ -67,6 +67,9 @@ def display_summary_table(
|
||||
elif provider.type == "llm":
|
||||
entity_type = "LLM"
|
||||
audited_entities = provider.model
|
||||
elif provider.type == "cloudflare":
|
||||
entity_type = "Account"
|
||||
audited_entities = provider.identity.account_name
|
||||
elif provider.type == "oci":
|
||||
entity_type = "Tenancy"
|
||||
audited_entities = (
|
||||
|
||||
178
prowler/providers/cloudflare/README.md
Normal file
178
prowler/providers/cloudflare/README.md
Normal file
@@ -0,0 +1,178 @@
|
||||
# Cloudflare Provider for Prowler
|
||||
|
||||
This directory contains the Cloudflare provider implementation for Prowler, enabling Cloud Security Posture Management (CSPM) for Cloudflare infrastructure.
|
||||
|
||||
## Overview
|
||||
|
||||
The Cloudflare provider allows Prowler to scan and assess the security posture of your Cloudflare zones, firewall rules, SSL/TLS settings, and other security configurations.
|
||||
|
||||
## Authentication
|
||||
|
||||
The Cloudflare provider supports two authentication methods:
|
||||
|
||||
### 1. API Token (Recommended)
|
||||
|
||||
Create an API token with the necessary permissions at https://dash.cloudflare.com/profile/api-tokens
|
||||
|
||||
```bash
|
||||
export CLOUDFLARE_API_TOKEN="your-api-token"
|
||||
prowler cloudflare
|
||||
```
|
||||
|
||||
Or pass it directly:
|
||||
|
||||
```bash
|
||||
prowler cloudflare --api-token "your-api-token"
|
||||
```
|
||||
|
||||
### 2. API Key + Email
|
||||
|
||||
Use your Global API Key and email:
|
||||
|
||||
```bash
|
||||
export CLOUDFLARE_API_KEY="your-api-key"
|
||||
export CLOUDFLARE_API_EMAIL="your@email.com"
|
||||
prowler cloudflare
|
||||
```
|
||||
|
||||
Or pass them directly:
|
||||
|
||||
```bash
|
||||
prowler cloudflare --api-key "your-api-key" --api-email "your@email.com"
|
||||
```
|
||||
|
||||
## Scoping
|
||||
|
||||
You can scope your scan to specific accounts or zones:
|
||||
|
||||
```bash
|
||||
# Scan specific zones
|
||||
prowler cloudflare --zone-id zone_id_1 zone_id_2
|
||||
|
||||
# Scan specific accounts
|
||||
prowler cloudflare --account-id account_id_1 account_id_2
|
||||
```
|
||||
|
||||
## Available Services
|
||||
|
||||
The Cloudflare provider currently includes the following services:
|
||||
|
||||
- **firewall**: Firewall rules and Web Application Firewall (WAF) settings
|
||||
- **ssl**: SSL/TLS configuration and certificate settings
|
||||
|
||||
## Security Checks
|
||||
|
||||
### Firewall Service
|
||||
|
||||
- `firewall_waf_enabled`: Ensures Web Application Firewall (WAF) is enabled for zones
|
||||
|
||||
### SSL Service
|
||||
|
||||
- `ssl_tls_minimum_version`: Ensures minimum TLS version is set to 1.2 or higher
|
||||
- `ssl_always_use_https`: Ensures 'Always Use HTTPS' is enabled for automatic HTTP to HTTPS redirects
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
cloudflare/
|
||||
├── cloudflare_provider.py # Main provider class
|
||||
├── models.py # Cloudflare-specific models
|
||||
├── exceptions/ # Cloudflare-specific exceptions
|
||||
│ └── exceptions.py
|
||||
├── lib/
|
||||
│ ├── arguments/ # CLI argument definitions
|
||||
│ ├── mutelist/ # Mutelist functionality
|
||||
│ └── service/ # Base service class
|
||||
└── services/ # Cloudflare services
|
||||
├── firewall/ # Firewall service and checks
|
||||
│ ├── firewall_service.py
|
||||
│ ├── firewall_client.py
|
||||
│ └── firewall_waf_enabled/
|
||||
└── ssl/ # SSL/TLS service and checks
|
||||
├── ssl_service.py
|
||||
├── ssl_client.py
|
||||
├── ssl_tls_minimum_version/
|
||||
└── ssl_always_use_https/
|
||||
```
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Basic Scan
|
||||
|
||||
```bash
|
||||
prowler cloudflare
|
||||
```
|
||||
|
||||
### Scan with API Token
|
||||
|
||||
```bash
|
||||
prowler cloudflare --api-token "your-api-token"
|
||||
```
|
||||
|
||||
### Scan Specific Zones
|
||||
|
||||
```bash
|
||||
prowler cloudflare --zone-id zone_123 zone_456
|
||||
```
|
||||
|
||||
### Run Specific Checks
|
||||
|
||||
```bash
|
||||
prowler cloudflare -c ssl_tls_minimum_version ssl_always_use_https
|
||||
```
|
||||
|
||||
### Generate JSON Output
|
||||
|
||||
```bash
|
||||
prowler cloudflare -o json
|
||||
```
|
||||
|
||||
## Required Permissions
|
||||
|
||||
For the API token, you need the following permissions:
|
||||
|
||||
- **Zone:Read** - To list and read zone information
|
||||
- **Zone Settings:Read** - To read zone settings including SSL/TLS configurations
|
||||
- **Firewall Services:Read** - To read firewall rules and WAF settings
|
||||
- **User:Read** - To verify authentication
|
||||
|
||||
## Adding New Checks
|
||||
|
||||
To add a new security check:
|
||||
|
||||
1. Create a new directory under the appropriate service (e.g., `services/firewall/new_check_name/`)
|
||||
2. Create the check file: `new_check_name.py`
|
||||
3. Create the metadata file: `new_check_name.metadata.json`
|
||||
4. Implement the check class inheriting from `Check`
|
||||
5. Use `CheckReportCloudflare` for findings
|
||||
|
||||
Example check structure:
|
||||
|
||||
```python
|
||||
from typing import List
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.service_name.service_client import service_client
|
||||
|
||||
class check_name(Check):
|
||||
def execute(self) -> List[CheckReportCloudflare]:
|
||||
findings = []
|
||||
for resource_id, resource in service_client.resources.items():
|
||||
report = CheckReportCloudflare(metadata=self.metadata(), resource=resource)
|
||||
# Implement your check logic here
|
||||
findings.append(report)
|
||||
return findings
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
When contributing new services or checks:
|
||||
|
||||
1. Follow the existing directory structure
|
||||
2. Include comprehensive metadata for each check
|
||||
3. Add appropriate error handling
|
||||
4. Update this README with new services/checks
|
||||
5. Test thoroughly with various Cloudflare configurations
|
||||
|
||||
## Support
|
||||
|
||||
For issues, questions, or contributions, please refer to the main Prowler repository.
|
||||
0
prowler/providers/cloudflare/__init__.py
Normal file
0
prowler/providers/cloudflare/__init__.py
Normal file
406
prowler/providers/cloudflare/cloudflare_provider.py
Normal file
406
prowler/providers/cloudflare/cloudflare_provider.py
Normal file
@@ -0,0 +1,406 @@
|
||||
import os
|
||||
from os import environ
|
||||
|
||||
import requests
|
||||
from colorama import Fore, Style
|
||||
|
||||
from prowler.config.config import (
|
||||
default_config_file_path,
|
||||
get_default_mute_file_path,
|
||||
load_and_validate_config_file,
|
||||
)
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.mutelist.mutelist import Mutelist
|
||||
from prowler.lib.utils.utils import print_boxes
|
||||
from prowler.providers.cloudflare.exceptions.exceptions import (
|
||||
CloudflareEnvironmentVariableError,
|
||||
CloudflareInvalidCredentialsError,
|
||||
CloudflareSetUpIdentityError,
|
||||
CloudflareSetUpSessionError,
|
||||
)
|
||||
from prowler.providers.cloudflare.lib.mutelist.mutelist import CloudflareMutelist
|
||||
from prowler.providers.cloudflare.models import (
|
||||
CloudflareIdentityInfo,
|
||||
CloudflareSession,
|
||||
)
|
||||
from prowler.providers.common.models import Audit_Metadata, Connection
|
||||
from prowler.providers.common.provider import Provider
|
||||
|
||||
|
||||
class CloudflareProvider(Provider):
|
||||
"""
|
||||
Cloudflare Provider class
|
||||
|
||||
This class is responsible for setting up the Cloudflare provider, including the session, identity,
|
||||
audit configuration, fixer configuration, and mutelist.
|
||||
|
||||
Attributes:
|
||||
_type (str): The type of the provider.
|
||||
_auth_method (str): The authentication method used by the provider.
|
||||
_session (CloudflareSession): The session object for the provider.
|
||||
_identity (CloudflareIdentityInfo): The identity information for the provider.
|
||||
_audit_config (dict): The audit configuration for the provider.
|
||||
_fixer_config (dict): The fixer configuration for the provider.
|
||||
_mutelist (Mutelist): The mutelist for the provider.
|
||||
_account_ids (list): List of account IDs to scan.
|
||||
_zone_ids (list): List of zone IDs to scan.
|
||||
audit_metadata (Audit_Metadata): The audit metadata for the provider.
|
||||
"""
|
||||
|
||||
_type: str = "cloudflare"
|
||||
_auth_method: str = None
|
||||
_session: CloudflareSession
|
||||
_identity: CloudflareIdentityInfo
|
||||
_audit_config: dict
|
||||
_mutelist: Mutelist
|
||||
_account_ids: list
|
||||
_zone_ids: list
|
||||
audit_metadata: Audit_Metadata
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
# Authentication credentials
|
||||
api_token: str = "",
|
||||
api_key: str = "",
|
||||
api_email: str = "",
|
||||
# Provider configuration
|
||||
config_path: str = None,
|
||||
config_content: dict = None,
|
||||
fixer_config: dict = {},
|
||||
mutelist_path: str = None,
|
||||
mutelist_content: dict = None,
|
||||
account_ids: list = None,
|
||||
zone_ids: list = None,
|
||||
):
|
||||
"""
|
||||
Cloudflare Provider constructor
|
||||
|
||||
Args:
|
||||
api_token (str): Cloudflare API Token.
|
||||
api_key (str): Cloudflare API Key.
|
||||
api_email (str): Cloudflare API Email (used with API Key).
|
||||
config_path (str): Path to the audit configuration file.
|
||||
config_content (dict): Audit configuration content.
|
||||
fixer_config (dict): Fixer configuration content.
|
||||
mutelist_path (str): Path to the mutelist file.
|
||||
mutelist_content (dict): Mutelist content.
|
||||
account_ids (list): List of account IDs to scan.
|
||||
zone_ids (list): List of zone IDs to scan.
|
||||
"""
|
||||
logger.info("Instantiating Cloudflare Provider...")
|
||||
|
||||
# Set scoping parameters
|
||||
self._account_ids = account_ids or []
|
||||
self._zone_ids = zone_ids or []
|
||||
|
||||
self._session = CloudflareProvider.setup_session(api_token, api_key, api_email)
|
||||
|
||||
# Set the authentication method
|
||||
if api_token:
|
||||
self._auth_method = "API Token"
|
||||
elif api_key and api_email:
|
||||
self._auth_method = "API Key + Email"
|
||||
elif environ.get("CLOUDFLARE_API_TOKEN", ""):
|
||||
self._auth_method = "Environment Variable for API Token"
|
||||
elif environ.get("CLOUDFLARE_API_KEY", "") and environ.get(
|
||||
"CLOUDFLARE_API_EMAIL", ""
|
||||
):
|
||||
self._auth_method = "Environment Variables for API Key and Email"
|
||||
|
||||
self._identity = CloudflareProvider.setup_identity(self._session)
|
||||
|
||||
# Audit Config
|
||||
if config_content:
|
||||
self._audit_config = config_content
|
||||
else:
|
||||
if not config_path:
|
||||
config_path = default_config_file_path
|
||||
self._audit_config = load_and_validate_config_file(self._type, config_path)
|
||||
|
||||
# Fixer Config
|
||||
self._fixer_config = fixer_config
|
||||
|
||||
# Mutelist
|
||||
if mutelist_content:
|
||||
self._mutelist = CloudflareMutelist(
|
||||
mutelist_content=mutelist_content,
|
||||
)
|
||||
else:
|
||||
if not mutelist_path:
|
||||
mutelist_path = get_default_mute_file_path(self.type)
|
||||
self._mutelist = CloudflareMutelist(
|
||||
mutelist_path=mutelist_path,
|
||||
)
|
||||
Provider.set_global_provider(self)
|
||||
|
||||
@property
|
||||
def auth_method(self):
|
||||
"""Returns the authentication method for the Cloudflare provider."""
|
||||
return self._auth_method
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
"""Returns the session object for the Cloudflare provider."""
|
||||
return self._session
|
||||
|
||||
@property
|
||||
def identity(self):
|
||||
"""Returns the identity information for the Cloudflare provider."""
|
||||
return self._identity
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
"""Returns the type of the Cloudflare provider."""
|
||||
return self._type
|
||||
|
||||
@property
|
||||
def audit_config(self):
|
||||
return self._audit_config
|
||||
|
||||
@property
|
||||
def fixer_config(self):
|
||||
return self._fixer_config
|
||||
|
||||
@property
|
||||
def mutelist(self) -> CloudflareMutelist:
|
||||
"""
|
||||
mutelist method returns the provider's mutelist.
|
||||
"""
|
||||
return self._mutelist
|
||||
|
||||
@property
|
||||
def account_ids(self) -> list:
|
||||
"""
|
||||
account_ids method returns the provider's account ID list for scoping.
|
||||
"""
|
||||
return self._account_ids
|
||||
|
||||
@property
|
||||
def zone_ids(self) -> list:
|
||||
"""
|
||||
zone_ids method returns the provider's zone ID list for scoping.
|
||||
"""
|
||||
return self._zone_ids
|
||||
|
||||
@staticmethod
|
||||
def setup_session(
|
||||
api_token: str = None,
|
||||
api_key: str = None,
|
||||
api_email: str = None,
|
||||
) -> CloudflareSession:
|
||||
"""
|
||||
Returns the Cloudflare session with authentication credentials.
|
||||
|
||||
Args:
|
||||
api_token (str): Cloudflare API Token.
|
||||
api_key (str): Cloudflare API Key.
|
||||
api_email (str): Cloudflare API Email.
|
||||
|
||||
Returns:
|
||||
CloudflareSession: Authenticated session credentials for API requests.
|
||||
"""
|
||||
|
||||
session_api_token = ""
|
||||
session_api_key = ""
|
||||
session_api_email = ""
|
||||
|
||||
try:
|
||||
# Ensure that at least one authentication method is selected
|
||||
if api_token:
|
||||
session_api_token = api_token
|
||||
elif api_key and api_email:
|
||||
session_api_key = api_key
|
||||
session_api_email = api_email
|
||||
else:
|
||||
# Try API Token from environment variable
|
||||
logger.info(
|
||||
"Looking for CLOUDFLARE_API_TOKEN environment variable as user has not provided any credentials...."
|
||||
)
|
||||
session_api_token = environ.get("CLOUDFLARE_API_TOKEN", "")
|
||||
|
||||
if not session_api_token:
|
||||
# Try API Key + Email from environment variables
|
||||
logger.info(
|
||||
"Looking for CLOUDFLARE_API_KEY and CLOUDFLARE_API_EMAIL environment variables...."
|
||||
)
|
||||
session_api_key = environ.get("CLOUDFLARE_API_KEY", "")
|
||||
session_api_email = environ.get("CLOUDFLARE_API_EMAIL", "")
|
||||
|
||||
if not session_api_token and not (session_api_key and session_api_email):
|
||||
raise CloudflareEnvironmentVariableError(
|
||||
file=os.path.basename(__file__),
|
||||
message="No authentication method selected and no environment variables were found.",
|
||||
)
|
||||
|
||||
credentials = CloudflareSession(
|
||||
api_token=session_api_token,
|
||||
api_key=session_api_key,
|
||||
api_email=session_api_email,
|
||||
)
|
||||
|
||||
return credentials
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
raise CloudflareSetUpSessionError(
|
||||
original_exception=error,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def setup_identity(session: CloudflareSession) -> CloudflareIdentityInfo:
|
||||
"""
|
||||
Returns the Cloudflare identity information
|
||||
|
||||
Returns:
|
||||
CloudflareIdentityInfo: An instance of CloudflareIdentityInfo containing the identity information.
|
||||
"""
|
||||
|
||||
try:
|
||||
# Setup headers for API requests
|
||||
headers = CloudflareProvider._get_headers(session)
|
||||
|
||||
# Verify user endpoint to get account information
|
||||
response = requests.get(
|
||||
"https://api.cloudflare.com/client/v4/user", headers=headers, timeout=10
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise CloudflareInvalidCredentialsError(
|
||||
message=f"Failed to authenticate with Cloudflare API: {response.status_code} - {response.text}"
|
||||
)
|
||||
|
||||
try:
|
||||
user_data = response.json()
|
||||
except Exception as json_error:
|
||||
raise CloudflareInvalidCredentialsError(
|
||||
message=f"Failed to parse Cloudflare API response: {json_error}. Response text: {response.text[:200]}"
|
||||
)
|
||||
|
||||
if not user_data:
|
||||
raise CloudflareInvalidCredentialsError(
|
||||
message=f"Cloudflare API returned empty response. Status: {response.status_code}"
|
||||
)
|
||||
|
||||
if not user_data.get("success", False):
|
||||
error_messages = user_data.get("errors", [])
|
||||
raise CloudflareInvalidCredentialsError(
|
||||
message=f"Cloudflare API authentication failed: {error_messages}"
|
||||
)
|
||||
|
||||
result = user_data.get("result")
|
||||
if not result:
|
||||
raise CloudflareInvalidCredentialsError(
|
||||
message=f"Cloudflare API returned empty result. Full response: {user_data}"
|
||||
)
|
||||
|
||||
identity = CloudflareIdentityInfo(
|
||||
account_id=str(result.get("id", "")),
|
||||
account_name=result.get("username") or result.get("email", "Unknown"),
|
||||
account_email=result.get("email", ""),
|
||||
)
|
||||
|
||||
return identity
|
||||
|
||||
except CloudflareInvalidCredentialsError:
|
||||
raise
|
||||
except Exception as error:
|
||||
# Get line number safely
|
||||
lineno = error.__traceback__.tb_lineno if error.__traceback__ else "unknown"
|
||||
logger.critical(f"{error.__class__.__name__}[{lineno}]: {error}")
|
||||
raise CloudflareSetUpIdentityError(
|
||||
original_exception=error,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _get_headers(session: CloudflareSession) -> dict:
|
||||
"""
|
||||
Returns HTTP headers for Cloudflare API requests.
|
||||
|
||||
Args:
|
||||
session (CloudflareSession): The Cloudflare session with authentication.
|
||||
|
||||
Returns:
|
||||
dict: Headers dictionary with authentication credentials.
|
||||
"""
|
||||
headers = {"Content-Type": "application/json"}
|
||||
|
||||
if session.api_token:
|
||||
headers["Authorization"] = f"Bearer {session.api_token}"
|
||||
elif session.api_key and session.api_email:
|
||||
headers["X-Auth-Key"] = session.api_key
|
||||
headers["X-Auth-Email"] = session.api_email
|
||||
|
||||
return headers
|
||||
|
||||
def print_credentials(self):
|
||||
"""
|
||||
Prints the Cloudflare credentials.
|
||||
|
||||
Usage:
|
||||
>>> self.print_credentials()
|
||||
"""
|
||||
report_lines = [
|
||||
f"Cloudflare Account ID: {Fore.YELLOW}{self.identity.account_id}{Style.RESET_ALL}",
|
||||
f"Cloudflare Account Name: {Fore.YELLOW}{self.identity.account_name}{Style.RESET_ALL}",
|
||||
f"Cloudflare Account Email: {Fore.YELLOW}{self.identity.account_email}{Style.RESET_ALL}",
|
||||
f"Authentication Method: {Fore.YELLOW}{self.auth_method}{Style.RESET_ALL}",
|
||||
]
|
||||
report_title = (
|
||||
f"{Style.BRIGHT}Using the Cloudflare credentials below:{Style.RESET_ALL}"
|
||||
)
|
||||
print_boxes(report_lines, report_title)
|
||||
|
||||
@staticmethod
|
||||
def test_connection(
|
||||
api_token: str = "",
|
||||
api_key: str = "",
|
||||
api_email: str = "",
|
||||
raise_on_exception: bool = True,
|
||||
) -> Connection:
|
||||
"""Test connection to Cloudflare.
|
||||
|
||||
Test the connection to Cloudflare using the provided credentials.
|
||||
|
||||
Args:
|
||||
api_token (str): Cloudflare API Token.
|
||||
api_key (str): Cloudflare API Key.
|
||||
api_email (str): Cloudflare API Email.
|
||||
raise_on_exception (bool): Flag indicating whether to raise an exception if the connection fails.
|
||||
|
||||
Returns:
|
||||
Connection: Connection object with success status or error information.
|
||||
|
||||
Raises:
|
||||
Exception: If failed to test the connection to Cloudflare.
|
||||
CloudflareEnvironmentVariableError: If environment variables are missing.
|
||||
CloudflareInvalidCredentialsError: If the provided credentials are invalid.
|
||||
CloudflareSetUpSessionError: If there is an error setting up the session.
|
||||
CloudflareSetUpIdentityError: If there is an error setting up the identity.
|
||||
|
||||
Examples:
|
||||
>>> CloudflareProvider.test_connection(api_token="your-api-token")
|
||||
Connection(is_connected=True)
|
||||
>>> CloudflareProvider.test_connection(api_key="your-api-key", api_email="your@email.com")
|
||||
Connection(is_connected=True)
|
||||
"""
|
||||
try:
|
||||
# Set up the Cloudflare session
|
||||
session = CloudflareProvider.setup_session(
|
||||
api_token=api_token,
|
||||
api_key=api_key,
|
||||
api_email=api_email,
|
||||
)
|
||||
|
||||
# Set up the identity to test the connection
|
||||
CloudflareProvider.setup_identity(session)
|
||||
|
||||
return Connection(is_connected=True)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
if raise_on_exception:
|
||||
raise error
|
||||
return Connection(error=error)
|
||||
13
prowler/providers/cloudflare/exceptions/__init__.py
Normal file
13
prowler/providers/cloudflare/exceptions/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from prowler.providers.cloudflare.exceptions.exceptions import (
|
||||
CloudflareEnvironmentVariableError,
|
||||
CloudflareInvalidCredentialsError,
|
||||
CloudflareSetUpIdentityError,
|
||||
CloudflareSetUpSessionError,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"CloudflareEnvironmentVariableError",
|
||||
"CloudflareInvalidCredentialsError",
|
||||
"CloudflareSetUpIdentityError",
|
||||
"CloudflareSetUpSessionError",
|
||||
]
|
||||
71
prowler/providers/cloudflare/exceptions/exceptions.py
Normal file
71
prowler/providers/cloudflare/exceptions/exceptions.py
Normal file
@@ -0,0 +1,71 @@
|
||||
from prowler.exceptions.exceptions import ProwlerException
|
||||
|
||||
|
||||
class CloudflareException(ProwlerException):
|
||||
"""Base class for Cloudflare Provider exceptions"""
|
||||
|
||||
CLOUDFLARE_ERROR_CODES = {
|
||||
(1000, "CloudflareEnvironmentVariableError"): {
|
||||
"message": "Cloudflare environment variables are not set correctly",
|
||||
"remediation": "Ensure that CLOUDFLARE_API_TOKEN or CLOUDFLARE_API_KEY and CLOUDFLARE_API_EMAIL environment variables are set correctly.",
|
||||
},
|
||||
(1001, "CloudflareInvalidCredentialsError"): {
|
||||
"message": "Cloudflare credentials are invalid",
|
||||
"remediation": "Ensure that the provided Cloudflare API credentials are valid and have the necessary permissions.",
|
||||
},
|
||||
(1002, "CloudflareSetUpSessionError"): {
|
||||
"message": "Error setting up Cloudflare session",
|
||||
"remediation": "Check your Cloudflare API credentials and network connectivity.",
|
||||
},
|
||||
(1003, "CloudflareSetUpIdentityError"): {
|
||||
"message": "Error setting up Cloudflare identity",
|
||||
"remediation": "Ensure that your Cloudflare API credentials have the necessary permissions to retrieve account information.",
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, code, file=None, original_exception=None, message=None):
|
||||
provider = "Cloudflare"
|
||||
error_info = self.CLOUDFLARE_ERROR_CODES.get((code, self.__class__.__name__))
|
||||
if not error_info:
|
||||
error_info = {
|
||||
"message": "Unknown Cloudflare error",
|
||||
"remediation": "Please check your configuration.",
|
||||
}
|
||||
if message:
|
||||
error_info = error_info.copy()
|
||||
error_info["message"] = message
|
||||
super().__init__(
|
||||
code=code,
|
||||
source=provider,
|
||||
file=file,
|
||||
original_exception=original_exception,
|
||||
error_info=error_info,
|
||||
)
|
||||
|
||||
|
||||
class CloudflareEnvironmentVariableError(CloudflareException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1000, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class CloudflareInvalidCredentialsError(CloudflareException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1001, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class CloudflareSetUpSessionError(CloudflareException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1002, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
|
||||
|
||||
class CloudflareSetUpIdentityError(CloudflareException):
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
1003, file=file, original_exception=original_exception, message=message
|
||||
)
|
||||
76
prowler/providers/cloudflare/lib/arguments/arguments.py
Normal file
76
prowler/providers/cloudflare/lib/arguments/arguments.py
Normal file
@@ -0,0 +1,76 @@
|
||||
def init_parser(self):
|
||||
"""Init the Cloudflare Provider CLI parser"""
|
||||
cloudflare_parser = self.subparsers.add_parser(
|
||||
"cloudflare",
|
||||
parents=[self.common_providers_parser],
|
||||
help="Cloudflare Provider",
|
||||
)
|
||||
cloudflare_auth_subparser = cloudflare_parser.add_argument_group(
|
||||
"Authentication Modes"
|
||||
)
|
||||
# Authentication Modes
|
||||
cloudflare_auth_subparser.add_argument(
|
||||
"--api-token",
|
||||
nargs="?",
|
||||
help="Cloudflare API Token for authentication",
|
||||
default=None,
|
||||
metavar="CLOUDFLARE_API_TOKEN",
|
||||
)
|
||||
|
||||
cloudflare_auth_subparser.add_argument(
|
||||
"--api-key",
|
||||
nargs="?",
|
||||
help="Cloudflare API Key for authentication (requires --api-email)",
|
||||
default=None,
|
||||
metavar="CLOUDFLARE_API_KEY",
|
||||
)
|
||||
|
||||
cloudflare_auth_subparser.add_argument(
|
||||
"--api-email",
|
||||
nargs="?",
|
||||
help="Cloudflare API Email for authentication (used with --api-key)",
|
||||
default=None,
|
||||
metavar="CLOUDFLARE_API_EMAIL",
|
||||
)
|
||||
|
||||
cloudflare_scoping_subparser = cloudflare_parser.add_argument_group("Scan Scoping")
|
||||
cloudflare_scoping_subparser.add_argument(
|
||||
"--account-id",
|
||||
"--account-ids",
|
||||
nargs="*",
|
||||
help="Cloudflare Account ID(s) to scan",
|
||||
default=None,
|
||||
metavar="ACCOUNT_ID",
|
||||
)
|
||||
|
||||
cloudflare_scoping_subparser.add_argument(
|
||||
"--zone-id",
|
||||
"--zone-ids",
|
||||
nargs="*",
|
||||
help="Cloudflare Zone ID(s) to scan",
|
||||
default=None,
|
||||
metavar="ZONE_ID",
|
||||
)
|
||||
|
||||
|
||||
def validate_arguments(arguments):
|
||||
"""
|
||||
Validate Cloudflare provider arguments.
|
||||
|
||||
Returns:
|
||||
tuple: (is_valid, error_message)
|
||||
"""
|
||||
# If API key is provided, email must also be provided
|
||||
if arguments.api_key and not arguments.api_email:
|
||||
return (
|
||||
False,
|
||||
"Cloudflare API Key requires API Email. Please provide --api-email",
|
||||
)
|
||||
|
||||
if arguments.api_email and not arguments.api_key:
|
||||
return (
|
||||
False,
|
||||
"Cloudflare API Email requires API Key. Please provide --api-key",
|
||||
)
|
||||
|
||||
return (True, "")
|
||||
34
prowler/providers/cloudflare/lib/mutelist/mutelist.py
Normal file
34
prowler/providers/cloudflare/lib/mutelist/mutelist.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from prowler.lib.check.models import CheckReportCloudflare
|
||||
from prowler.lib.mutelist.mutelist import Mutelist
|
||||
from prowler.lib.outputs.utils import unroll_dict, unroll_tags
|
||||
|
||||
|
||||
class CloudflareMutelist(Mutelist):
|
||||
"""
|
||||
CloudflareMutelist class extends the Mutelist class to provide Cloudflare-specific mutelist functionality.
|
||||
|
||||
This class is used to manage muted findings for Cloudflare resources.
|
||||
"""
|
||||
|
||||
def is_finding_muted(
|
||||
self,
|
||||
finding: CheckReportCloudflare,
|
||||
account_name: str,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if a finding is muted based on the mutelist configuration.
|
||||
|
||||
Args:
|
||||
finding (CheckReportCloudflare): The finding to check
|
||||
account_name (str): The Cloudflare account name
|
||||
|
||||
Returns:
|
||||
bool: True if the finding is muted, False otherwise
|
||||
"""
|
||||
return self.is_muted(
|
||||
account_name,
|
||||
finding.check_metadata.CheckID,
|
||||
"*", # Cloudflare doesn't have regions
|
||||
finding.resource_name,
|
||||
unroll_dict(unroll_tags(finding.resource_tags)),
|
||||
)
|
||||
169
prowler/providers/cloudflare/lib/service/service.py
Normal file
169
prowler/providers/cloudflare/lib/service/service.py
Normal file
@@ -0,0 +1,169 @@
|
||||
import requests
|
||||
from colorama import Fore, Style
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
class CloudflareService:
|
||||
"""
|
||||
Base class for Cloudflare services
|
||||
|
||||
This class provides common functionality for all Cloudflare services,
|
||||
including API client setup and error handling.
|
||||
"""
|
||||
|
||||
def __init__(self, service_name: str, provider):
|
||||
"""
|
||||
Initialize CloudflareService
|
||||
|
||||
Args:
|
||||
service_name (str): Name of the service
|
||||
provider: Cloudflare provider instance
|
||||
"""
|
||||
self.service = service_name
|
||||
self.provider = provider
|
||||
self.session = provider.session
|
||||
self.api_base_url = "https://api.cloudflare.com/client/v4"
|
||||
self.headers = self._get_headers()
|
||||
|
||||
def _get_headers(self) -> dict:
|
||||
"""
|
||||
Returns HTTP headers for Cloudflare API requests.
|
||||
|
||||
Returns:
|
||||
dict: Headers dictionary with authentication credentials.
|
||||
"""
|
||||
headers = {"Content-Type": "application/json"}
|
||||
|
||||
if self.session.api_token:
|
||||
headers["Authorization"] = f"Bearer {self.session.api_token}"
|
||||
elif self.session.api_key and self.session.api_email:
|
||||
headers["X-Auth-Key"] = self.session.api_key
|
||||
headers["X-Auth-Email"] = self.session.api_email
|
||||
|
||||
return headers
|
||||
|
||||
def _api_request(
|
||||
self, method: str, endpoint: str, params: dict = None, json_data: dict = None
|
||||
) -> dict:
|
||||
"""
|
||||
Make an API request to Cloudflare
|
||||
|
||||
Args:
|
||||
method (str): HTTP method (GET, POST, PUT, DELETE)
|
||||
endpoint (str): API endpoint (e.g., "/accounts")
|
||||
params (dict): Query parameters
|
||||
json_data (dict): JSON data for POST/PUT requests
|
||||
|
||||
Returns:
|
||||
dict: API response data
|
||||
|
||||
Raises:
|
||||
Exception: If the API request fails
|
||||
"""
|
||||
url = f"{self.api_base_url}{endpoint}"
|
||||
|
||||
try:
|
||||
response = requests.request(
|
||||
method=method,
|
||||
url=url,
|
||||
headers=self.headers,
|
||||
params=params,
|
||||
json=json_data,
|
||||
timeout=30,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
if not data.get("success"):
|
||||
errors = data.get("errors", [])
|
||||
logger.error(
|
||||
f"{Fore.RED}Cloudflare API Error:{Style.RESET_ALL} {errors}"
|
||||
)
|
||||
return {}
|
||||
|
||||
return data.get("result", {})
|
||||
|
||||
except requests.exceptions.RequestException as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return {}
|
||||
|
||||
def _api_request_paginated(
|
||||
self, endpoint: str, params: dict = None, page_size: int = 50
|
||||
) -> list:
|
||||
"""
|
||||
Make a paginated API request to Cloudflare
|
||||
|
||||
Args:
|
||||
endpoint (str): API endpoint
|
||||
params (dict): Query parameters
|
||||
page_size (int): Number of results per page
|
||||
|
||||
Returns:
|
||||
list: Combined results from all pages
|
||||
"""
|
||||
all_results = []
|
||||
page = 1
|
||||
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
params["per_page"] = page_size
|
||||
|
||||
while True:
|
||||
params["page"] = page
|
||||
url = f"{self.api_base_url}{endpoint}"
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
url, headers=self.headers, params=params, timeout=30
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
if not data.get("success"):
|
||||
break
|
||||
|
||||
result = data.get("result", [])
|
||||
if not result:
|
||||
break
|
||||
|
||||
all_results.extend(result)
|
||||
|
||||
# Check if there are more pages
|
||||
result_info = data.get("result_info", {})
|
||||
if page >= result_info.get("total_pages", 0):
|
||||
break
|
||||
|
||||
page += 1
|
||||
|
||||
except requests.exceptions.RequestException as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
break
|
||||
|
||||
return all_results
|
||||
|
||||
def _handle_cloudflare_api_error(
|
||||
self, error: Exception, action: str, resource: str = ""
|
||||
):
|
||||
"""
|
||||
Handle Cloudflare API errors with consistent logging
|
||||
|
||||
Args:
|
||||
error (Exception): The exception that occurred
|
||||
action (str): Description of the action being performed
|
||||
resource (str): The resource being accessed
|
||||
"""
|
||||
error_message = f"Error {action}"
|
||||
if resource:
|
||||
error_message += f" for {resource}"
|
||||
error_message += f": {error}"
|
||||
|
||||
logger.error(
|
||||
f"{Fore.RED}{error_message}{Style.RESET_ALL} ({error.__class__.__name__})"
|
||||
)
|
||||
40
prowler/providers/cloudflare/models.py
Normal file
40
prowler/providers/cloudflare/models.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
from prowler.config.config import output_file_timestamp
|
||||
from prowler.providers.common.models import ProviderOutputOptions
|
||||
|
||||
|
||||
class CloudflareSession(BaseModel):
|
||||
"""Cloudflare session model storing authentication credentials"""
|
||||
|
||||
api_token: Optional[str] = None
|
||||
api_key: Optional[str] = None
|
||||
api_email: Optional[str] = None
|
||||
|
||||
|
||||
class CloudflareIdentityInfo(BaseModel):
|
||||
"""Cloudflare account identity information"""
|
||||
|
||||
account_id: str
|
||||
account_name: str
|
||||
account_email: str
|
||||
|
||||
|
||||
class CloudflareOutputOptions(ProviderOutputOptions):
|
||||
"""Cloudflare-specific output options"""
|
||||
|
||||
def __init__(self, arguments, bulk_checks_metadata, identity):
|
||||
# First call ProviderOutputOptions init
|
||||
super().__init__(arguments, bulk_checks_metadata)
|
||||
# Check if custom output filename was input, if not, set the default
|
||||
if (
|
||||
not hasattr(arguments, "output_filename")
|
||||
or arguments.output_filename is None
|
||||
):
|
||||
self.output_filename = (
|
||||
f"prowler-output-{identity.account_name}-{output_file_timestamp}"
|
||||
)
|
||||
else:
|
||||
self.output_filename = arguments.output_filename
|
||||
0
prowler/providers/cloudflare/services/__init__.py
Normal file
0
prowler/providers/cloudflare/services/__init__.py
Normal file
3
prowler/providers/cloudflare/services/dns/__init__.py
Normal file
3
prowler/providers/cloudflare/services/dns/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .dns_service import DNS
|
||||
|
||||
dns_client = DNS
|
||||
4
prowler/providers/cloudflare/services/dns/dns_client.py
Normal file
4
prowler/providers/cloudflare/services/dns/dns_client.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from prowler.providers.cloudflare.services.dns.dns_service import DNS
|
||||
from prowler.providers.common.provider import Provider
|
||||
|
||||
dns_client = DNS(Provider.get_global_provider())
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "dns_dnssec_enabled",
|
||||
"CheckTitle": "Ensure DNSSEC is enabled to prevent DNS spoofing",
|
||||
"CheckType": [],
|
||||
"ServiceName": "dns",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "This check ensures that DNSSEC (DNS Security Extensions) is enabled for Cloudflare zones to prevent DNS spoofing attacks and ensure data integrity by cryptographically signing DNS records.",
|
||||
"Risk": "Without DNSSEC enabled, attackers can perform DNS spoofing (cache poisoning) attacks, redirecting users to malicious sites and intercepting sensitive information.",
|
||||
"RelatedUrl": "https://developers.cloudflare.com/dns/dnssec/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "cloudflare dns dnssec enable --zone-id <zone_id>",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://dash.cloudflare.com/ -> Select Zone -> DNS -> Settings -> DNSSEC -> Enable DNSSEC",
|
||||
"Terraform": "resource \"cloudflare_zone_dnssec\" \"example\" {\n zone_id = var.zone_id\n}"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable DNSSEC for all Cloudflare zones to prevent DNS spoofing and ensure DNS data integrity. After enabling, add DS records to your domain registrar.",
|
||||
"Url": "https://developers.cloudflare.com/dns/dnssec/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"internet-exposed"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "After enabling DNSSEC in Cloudflare, you must add the DS records to your domain registrar for DNSSEC to function properly."
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.dns.dns_client import dns_client
|
||||
|
||||
|
||||
class dns_dnssec_enabled(Check):
|
||||
"""Check if DNSSEC is enabled to prevent DNS spoofing"""
|
||||
|
||||
def execute(self) -> List[CheckReportCloudflare]:
|
||||
findings = []
|
||||
|
||||
for zone_id, dnssec_settings in dns_client.dnssec_settings.items():
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(),
|
||||
resource=dnssec_settings,
|
||||
resource_name=dnssec_settings.zone_name,
|
||||
resource_id=zone_id,
|
||||
zone_name=dnssec_settings.zone_name,
|
||||
)
|
||||
|
||||
if dnssec_settings.dnssec_enabled:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Zone {dnssec_settings.zone_name} has DNSSEC enabled (status: {dnssec_settings.dnssec_status}), preventing DNS spoofing and ensuring data integrity."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {dnssec_settings.zone_name} does not have DNSSEC enabled (status: {dnssec_settings.dnssec_status}). Enable DNSSEC to prevent DNS spoofing and ensure data integrity."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
107
prowler/providers/cloudflare/services/dns/dns_service.py
Normal file
107
prowler/providers/cloudflare/services/dns/dns_service.py
Normal file
@@ -0,0 +1,107 @@
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.cloudflare.lib.service.service import CloudflareService
|
||||
|
||||
|
||||
class DNS(CloudflareService):
|
||||
"""Cloudflare DNS service for managing DNS settings"""
|
||||
|
||||
def __init__(self, provider):
|
||||
super().__init__(__class__.__name__, provider)
|
||||
self.zones = self._list_zones()
|
||||
self.dnssec_settings = self._get_dnssec_settings()
|
||||
|
||||
def _list_zones(self) -> dict:
|
||||
"""
|
||||
List all Cloudflare zones
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of zones keyed by zone ID
|
||||
"""
|
||||
logger.info("DNS - Listing Zones...")
|
||||
zones = {}
|
||||
|
||||
try:
|
||||
# If specific zone IDs are provided, use those
|
||||
if self.provider.zone_ids:
|
||||
for zone_id in self.provider.zone_ids:
|
||||
zone_data = self._api_request("GET", f"/zones/{zone_id}")
|
||||
if zone_data:
|
||||
zones[zone_data["id"]] = Zone(
|
||||
id=zone_data["id"],
|
||||
name=zone_data["name"],
|
||||
account_id=zone_data.get("account", {}).get("id", ""),
|
||||
)
|
||||
else:
|
||||
# List all zones
|
||||
all_zones = self._api_request_paginated("/zones")
|
||||
for zone_data in all_zones:
|
||||
zones[zone_data["id"]] = Zone(
|
||||
id=zone_data["id"],
|
||||
name=zone_data["name"],
|
||||
account_id=zone_data.get("account", {}).get("id", ""),
|
||||
)
|
||||
|
||||
logger.info(f"Found {len(zones)} zone(s) for DNS checks")
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
return zones
|
||||
|
||||
def _get_dnssec_settings(self) -> dict:
|
||||
"""
|
||||
Get DNSSEC settings for all zones
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of DNSSEC settings keyed by zone ID
|
||||
"""
|
||||
logger.info("DNS - Getting DNSSEC Settings...")
|
||||
dnssec_settings = {}
|
||||
|
||||
try:
|
||||
for zone_id, zone in self.zones.items():
|
||||
# Get DNSSEC status
|
||||
dnssec = self._api_request("GET", f"/zones/{zone_id}/dnssec")
|
||||
|
||||
dnssec_settings[zone_id] = DNSSECSettings(
|
||||
zone_id=zone_id,
|
||||
zone_name=zone.name,
|
||||
dnssec_enabled=(
|
||||
dnssec.get("status", "disabled") == "active"
|
||||
if dnssec
|
||||
else False
|
||||
),
|
||||
dnssec_status=(
|
||||
dnssec.get("status", "disabled") if dnssec else "disabled"
|
||||
),
|
||||
)
|
||||
|
||||
logger.info(f"Retrieved DNSSEC settings for {len(dnssec_settings)} zone(s)")
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
return dnssec_settings
|
||||
|
||||
|
||||
class Zone(BaseModel):
|
||||
"""Model for Cloudflare Zone"""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
account_id: str
|
||||
|
||||
|
||||
class DNSSECSettings(BaseModel):
|
||||
"""Model for Cloudflare DNSSEC Settings"""
|
||||
|
||||
zone_id: str
|
||||
zone_name: str
|
||||
dnssec_enabled: bool
|
||||
dnssec_status: str
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "firewall_browser_integrity_check_enabled",
|
||||
"CheckTitle": "Ensure Browser Integrity Check is enabled to filter malicious traffic",
|
||||
"CheckType": [],
|
||||
"ServiceName": "firewall",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "This check ensures that Browser Integrity Check is enabled for Cloudflare zones to filter malicious traffic based on HTTP header anomalies and known attack patterns.",
|
||||
"Risk": "Without Browser Integrity Check enabled, malicious bots and automated tools with suspicious HTTP headers can access your site, increasing the risk of attacks.",
|
||||
"RelatedUrl": "https://developers.cloudflare.com/waf/tools/browser-integrity-check/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "cloudflare firewall browser-check enable --zone-id <zone_id>",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://dash.cloudflare.com/ -> Select Zone -> Security -> Settings -> Browser Integrity Check -> Enable",
|
||||
"Terraform": "resource \"cloudflare_zone_settings_override\" \"example\" {\n zone_id = var.zone_id\n settings {\n browser_check = \"on\"\n }\n}"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable Browser Integrity Check for all Cloudflare zones to filter malicious traffic based on HTTP header anomalies.",
|
||||
"Url": "https://developers.cloudflare.com/waf/tools/browser-integrity-check/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"internet-exposed"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Browser Integrity Check may occasionally block legitimate traffic from older browsers or automated tools. Monitor and adjust if needed."
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.firewall.firewall_client import (
|
||||
firewall_client,
|
||||
)
|
||||
|
||||
|
||||
class firewall_browser_integrity_check_enabled(Check):
|
||||
"""Check if Browser Integrity Check is enabled to filter malicious traffic"""
|
||||
|
||||
def execute(self) -> List[CheckReportCloudflare]:
|
||||
findings = []
|
||||
|
||||
for zone_id, security_settings in firewall_client.security_settings.items():
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(),
|
||||
resource=security_settings,
|
||||
resource_name=security_settings.zone_name,
|
||||
resource_id=zone_id,
|
||||
zone_name=security_settings.zone_name,
|
||||
)
|
||||
|
||||
if security_settings.browser_integrity_check:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Zone {security_settings.zone_name} has Browser Integrity Check enabled, filtering malicious traffic based on HTTP header anomalies."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {security_settings.zone_name} does not have Browser Integrity Check enabled. Enable it to filter malicious traffic based on HTTP header anomalies."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "firewall_challenge_passage_configured",
|
||||
"CheckTitle": "Ensure Challenge Passage is configured appropriately",
|
||||
"CheckType": [],
|
||||
"ServiceName": "firewall",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "This check ensures that Challenge Passage (challenge TTL) is configured to an appropriate value (recommended: 1 hour / 3600 seconds) to reduce friction for verified visitors while maintaining a security window.",
|
||||
"Risk": "Setting Challenge Passage too short causes excessive challenges for legitimate users, degrading experience. Setting it too long may allow attackers more time to exploit compromised sessions.",
|
||||
"RelatedUrl": "https://developers.cloudflare.com/waf/tools/challenge-passage/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "cloudflare firewall challenge-ttl set --zone-id <zone_id> --ttl 3600",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://dash.cloudflare.com/ -> Select Zone -> Security -> Settings -> Challenge Passage -> Set to 1 hour",
|
||||
"Terraform": "resource \"cloudflare_zone_settings_override\" \"example\" {\n zone_id = var.zone_id\n settings {\n challenge_ttl = 3600\n }\n}"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Set Challenge Passage to 1 hour (3600 seconds) for all Cloudflare zones to balance security with user experience.",
|
||||
"Url": "https://developers.cloudflare.com/waf/tools/challenge-passage/"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Challenge Passage determines how long a visitor who passes a challenge can access the site without being challenged again."
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.firewall.firewall_client import (
|
||||
firewall_client,
|
||||
)
|
||||
|
||||
|
||||
class firewall_challenge_passage_configured(Check):
|
||||
"""Check if Challenge Passage is configured appropriately"""
|
||||
|
||||
def execute(self) -> List[CheckReportCloudflare]:
|
||||
findings = []
|
||||
# Recommended challenge TTL is 1 hour (3600 seconds) to balance security and user experience
|
||||
recommended_ttl = 3600
|
||||
|
||||
for zone_id, security_settings in firewall_client.security_settings.items():
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(),
|
||||
resource=security_settings,
|
||||
resource_name=security_settings.zone_name,
|
||||
resource_id=zone_id,
|
||||
zone_name=security_settings.zone_name,
|
||||
)
|
||||
|
||||
if security_settings.challenge_ttl == recommended_ttl:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Zone {security_settings.zone_name} has Challenge Passage set to {security_settings.challenge_ttl} seconds (recommended: {recommended_ttl}), balancing security with user experience."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {security_settings.zone_name} has Challenge Passage set to {security_settings.challenge_ttl} seconds. Recommended: {recommended_ttl} seconds (1 hour) to reduce friction for verified visitors while maintaining security."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,4 @@
|
||||
from prowler.providers.cloudflare.services.firewall.firewall_service import Firewall
|
||||
from prowler.providers.common.provider import Provider
|
||||
|
||||
firewall_client = Firewall(Provider.get_global_provider())
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "firewall_security_level_medium_or_higher",
|
||||
"CheckTitle": "Ensure Security Level is set to Medium or higher",
|
||||
"CheckType": [],
|
||||
"ServiceName": "firewall",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "This check ensures that Security Level is set to Medium or higher for Cloudflare zones to balance protection with user accessibility by filtering suspicious traffic.",
|
||||
"Risk": "Setting Security Level too low (off, essentially off, or low) may allow malicious traffic to reach your origin server, increasing the risk of attacks.",
|
||||
"RelatedUrl": "https://developers.cloudflare.com/waf/tools/security-level/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "cloudflare firewall security-level set --zone-id <zone_id> --level medium",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://dash.cloudflare.com/ -> Select Zone -> Security -> Settings -> Security Level -> Set to Medium or higher",
|
||||
"Terraform": "resource \"cloudflare_zone_settings_override\" \"example\" {\n zone_id = var.zone_id\n settings {\n security_level = \"medium\"\n }\n}"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Set Security Level to Medium for all Cloudflare zones. Adjust to High or Under Attack during active attacks.",
|
||||
"Url": "https://developers.cloudflare.com/waf/tools/security-level/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"internet-exposed"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Security Level can be temporarily increased to High or Under Attack during active attacks, but Medium is recommended for normal operation."
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.firewall.firewall_client import (
|
||||
firewall_client,
|
||||
)
|
||||
|
||||
|
||||
class firewall_security_level_medium_or_higher(Check):
|
||||
"""Check if Security Level is set to Medium or higher"""
|
||||
|
||||
def execute(self) -> List[CheckReportCloudflare]:
|
||||
findings = []
|
||||
# Security levels in order: off, essentially_off, low, medium, high, under_attack
|
||||
acceptable_levels = ["medium", "high", "under_attack"]
|
||||
|
||||
for zone_id, security_settings in firewall_client.security_settings.items():
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(),
|
||||
resource=security_settings,
|
||||
resource_name=security_settings.zone_name,
|
||||
resource_id=zone_id,
|
||||
zone_name=security_settings.zone_name,
|
||||
)
|
||||
|
||||
if security_settings.security_level in acceptable_levels:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Zone {security_settings.zone_name} has Security Level set to '{security_settings.security_level}', providing adequate protection."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {security_settings.zone_name} has Security Level set to '{security_settings.security_level}'. Recommended: 'medium' or higher to balance protection with user accessibility."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,191 @@
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.cloudflare.lib.service.service import CloudflareService
|
||||
|
||||
|
||||
class Firewall(CloudflareService):
|
||||
"""Cloudflare Firewall service for managing firewall rules and WAF settings"""
|
||||
|
||||
def __init__(self, provider):
|
||||
super().__init__(__class__.__name__, provider)
|
||||
self.zones = self._list_zones()
|
||||
self.firewall_rules = self._list_firewall_rules()
|
||||
self.security_settings = self._get_security_settings()
|
||||
|
||||
def _list_zones(self) -> dict:
|
||||
"""
|
||||
List all Cloudflare zones
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of zones keyed by zone ID
|
||||
"""
|
||||
logger.info("Firewall - Listing Zones...")
|
||||
zones = {}
|
||||
|
||||
try:
|
||||
# If specific zone IDs are provided, use those
|
||||
if self.provider.zone_ids:
|
||||
for zone_id in self.provider.zone_ids:
|
||||
zone_data = self._api_request("GET", f"/zones/{zone_id}")
|
||||
if zone_data:
|
||||
zones[zone_data["id"]] = Zone(
|
||||
id=zone_data["id"],
|
||||
name=zone_data["name"],
|
||||
account_id=zone_data.get("account", {}).get("id", ""),
|
||||
status=zone_data.get("status", ""),
|
||||
plan=zone_data.get("plan", {}).get("name", ""),
|
||||
)
|
||||
else:
|
||||
# List all zones
|
||||
all_zones = self._api_request_paginated("/zones")
|
||||
for zone_data in all_zones:
|
||||
zones[zone_data["id"]] = Zone(
|
||||
id=zone_data["id"],
|
||||
name=zone_data["name"],
|
||||
account_id=zone_data.get("account", {}).get("id", ""),
|
||||
status=zone_data.get("status", ""),
|
||||
plan=zone_data.get("plan", {}).get("name", ""),
|
||||
)
|
||||
|
||||
logger.info(f"Found {len(zones)} zone(s)")
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
return zones
|
||||
|
||||
def _list_firewall_rules(self) -> dict:
|
||||
"""
|
||||
List firewall rules for all zones
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of firewall rules keyed by rule ID
|
||||
"""
|
||||
logger.info("Firewall - Listing Firewall Rules...")
|
||||
firewall_rules = {}
|
||||
|
||||
try:
|
||||
for zone_id, zone in self.zones.items():
|
||||
# Get firewall rules for the zone
|
||||
rules_data = self._api_request_paginated(
|
||||
f"/zones/{zone_id}/firewall/rules"
|
||||
)
|
||||
|
||||
for rule in rules_data:
|
||||
firewall_rules[rule["id"]] = FirewallRule(
|
||||
id=rule["id"],
|
||||
zone_id=zone_id,
|
||||
zone_name=zone.name,
|
||||
paused=rule.get("paused", False),
|
||||
description=rule.get("description", ""),
|
||||
action=rule.get("action", ""),
|
||||
priority=rule.get("priority", 0),
|
||||
filter_id=rule.get("filter", {}).get("id", ""),
|
||||
)
|
||||
|
||||
# Get WAF settings for the zone
|
||||
waf_settings = self._api_request(
|
||||
"GET", f"/zones/{zone_id}/firewall/waf/packages"
|
||||
)
|
||||
if waf_settings:
|
||||
zone.waf_enabled = True
|
||||
|
||||
logger.info(f"Found {len(firewall_rules)} firewall rule(s)")
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
return firewall_rules
|
||||
|
||||
def _get_security_settings(self) -> dict:
|
||||
"""
|
||||
Get security settings for all zones
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of security settings keyed by zone ID
|
||||
"""
|
||||
logger.info("Firewall - Getting Security Settings...")
|
||||
security_settings = {}
|
||||
|
||||
try:
|
||||
for zone_id, zone in self.zones.items():
|
||||
# Get security level
|
||||
security_level = self._api_request(
|
||||
"GET", f"/zones/{zone_id}/settings/security_level"
|
||||
)
|
||||
|
||||
# Get browser integrity check
|
||||
browser_check = self._api_request(
|
||||
"GET", f"/zones/{zone_id}/settings/browser_check"
|
||||
)
|
||||
|
||||
# Get challenge passage
|
||||
challenge_ttl = self._api_request(
|
||||
"GET", f"/zones/{zone_id}/settings/challenge_ttl"
|
||||
)
|
||||
|
||||
security_settings[zone_id] = SecuritySettings(
|
||||
zone_id=zone_id,
|
||||
zone_name=zone.name,
|
||||
security_level=(
|
||||
security_level.get("value", "") if security_level else ""
|
||||
),
|
||||
browser_integrity_check=(
|
||||
browser_check.get("value", "off") == "on"
|
||||
if browser_check
|
||||
else False
|
||||
),
|
||||
challenge_ttl=(
|
||||
challenge_ttl.get("value", 0) if challenge_ttl else 0
|
||||
),
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Retrieved security settings for {len(security_settings)} zone(s)"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
return security_settings
|
||||
|
||||
|
||||
class Zone(BaseModel):
|
||||
"""Model for Cloudflare Zone"""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
account_id: str
|
||||
status: str
|
||||
plan: str
|
||||
waf_enabled: bool = False
|
||||
|
||||
|
||||
class FirewallRule(BaseModel):
|
||||
"""Model for Cloudflare Firewall Rule"""
|
||||
|
||||
id: str
|
||||
zone_id: str
|
||||
zone_name: str
|
||||
paused: bool
|
||||
description: str
|
||||
action: str
|
||||
priority: int
|
||||
filter_id: str
|
||||
|
||||
|
||||
class SecuritySettings(BaseModel):
|
||||
"""Model for Cloudflare Security Settings"""
|
||||
|
||||
zone_id: str
|
||||
zone_name: str
|
||||
security_level: str
|
||||
browser_integrity_check: bool
|
||||
challenge_ttl: int
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "firewall_waf_enabled",
|
||||
"CheckTitle": "Ensure Web Application Firewall (WAF) is enabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "firewall",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "zone_id",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "This check ensures that Web Application Firewall (WAF) is enabled for Cloudflare zones to protect against common web application attacks such as SQL injection, cross-site scripting (XSS), and other OWASP Top 10 vulnerabilities.",
|
||||
"Risk": "Without WAF enabled, web applications are vulnerable to common attacks that could lead to data breaches, service disruptions, or unauthorized access.",
|
||||
"RelatedUrl": "https://developers.cloudflare.com/waf/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "cloudflare firewall waf enable --zone-id <zone_id>",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://dash.cloudflare.com/ -> Select Zone -> Security -> WAF -> Enable",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable Web Application Firewall (WAF) for all Cloudflare zones to protect against common web application attacks.",
|
||||
"Url": "https://developers.cloudflare.com/waf/managed-rules/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"internet-exposed"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "WAF is available on Pro, Business, and Enterprise plans. Free plans have limited WAF capabilities."
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.firewall.firewall_client import (
|
||||
firewall_client,
|
||||
)
|
||||
|
||||
|
||||
class firewall_waf_enabled(Check):
|
||||
"""Check if Web Application Firewall (WAF) is enabled for Cloudflare zones
|
||||
|
||||
This class verifies whether each Cloudflare zone has WAF enabled to protect
|
||||
against common web application attacks.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportCloudflare]:
|
||||
"""Execute the Cloudflare WAF enabled check
|
||||
|
||||
Iterates over all zones and checks if WAF is enabled.
|
||||
|
||||
Returns:
|
||||
List[CheckReportCloudflare]: A list of reports for each zone
|
||||
"""
|
||||
findings = []
|
||||
for zone_id, zone in firewall_client.zones.items():
|
||||
report = CheckReportCloudflare(metadata=self.metadata(), resource=zone)
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {zone.name} does not have WAF enabled."
|
||||
|
||||
if zone.waf_enabled:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Zone {zone.name} has WAF enabled."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "ssl_always_use_https",
|
||||
"CheckTitle": "Ensure 'Always Use HTTPS' is enabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "ssl",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "zone_id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "This check ensures that 'Always Use HTTPS' is enabled for Cloudflare zones to automatically redirect all HTTP requests to HTTPS, ensuring all traffic is encrypted.",
|
||||
"Risk": "Without 'Always Use HTTPS' enabled, visitors may access the website over unencrypted HTTP connections, exposing sensitive data to interception and man-in-the-middle attacks.",
|
||||
"RelatedUrl": "https://developers.cloudflare.com/ssl/edge-certificates/additional-options/always-use-https/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "curl -X PATCH \"https://api.cloudflare.com/v4/zones/<zone_id>/settings/always_use_https\" -H \"Authorization: Bearer <api_token>\" -H \"Content-Type: application/json\" -d '{\"value\":\"on\"}'",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://dash.cloudflare.com/ -> Select Zone -> SSL/TLS -> Edge Certificates -> Always Use HTTPS -> On",
|
||||
"Terraform": "resource \"cloudflare_zone_settings_override\" \"example\" {\n zone_id = var.zone_id\n settings {\n always_use_https = \"on\"\n }\n}"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable 'Always Use HTTPS' for all Cloudflare zones to ensure all traffic is encrypted and secure.",
|
||||
"Url": "https://developers.cloudflare.com/ssl/edge-certificates/additional-options/always-use-https/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"encryption"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "This setting redirects all HTTP requests to HTTPS using a 301 permanent redirect."
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.ssl.ssl_client import ssl_client
|
||||
|
||||
|
||||
class ssl_always_use_https(Check):
|
||||
"""Check if Cloudflare zones have 'Always Use HTTPS' enabled
|
||||
|
||||
This class verifies that each Cloudflare zone has 'Always Use HTTPS' enabled
|
||||
to automatically redirect HTTP requests to HTTPS.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportCloudflare]:
|
||||
"""Execute the Cloudflare Always Use HTTPS check
|
||||
|
||||
Iterates over all SSL settings and checks if Always Use HTTPS is enabled.
|
||||
|
||||
Returns:
|
||||
List[CheckReportCloudflare]: A list of reports for each zone
|
||||
"""
|
||||
findings = []
|
||||
for zone_id, ssl_settings in ssl_client.ssl_settings.items():
|
||||
zone = ssl_client.zones.get(zone_id)
|
||||
if not zone:
|
||||
continue
|
||||
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(), resource=ssl_settings
|
||||
)
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} does not have 'Always Use HTTPS' enabled."
|
||||
|
||||
if ssl_settings.always_use_https:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Zone {ssl_settings.zone_name} has 'Always Use HTTPS' enabled."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "ssl_automatic_https_rewrites_enabled",
|
||||
"CheckTitle": "Ensure Automatic HTTPS Rewrites is enabled to resolve mixed content issues",
|
||||
"CheckType": [],
|
||||
"ServiceName": "ssl",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "This check ensures that Automatic HTTPS Rewrites is enabled for Cloudflare zones to automatically rewrite insecure HTTP links to secure HTTPS links, resolving mixed content issues and enhancing site security.",
|
||||
"Risk": "Without Automatic HTTPS Rewrites, pages may contain mixed content (HTTP resources loaded over HTTPS pages), which browsers block or warn about, degrading user experience and security.",
|
||||
"RelatedUrl": "https://developers.cloudflare.com/ssl/edge-certificates/additional-options/automatic-https-rewrites/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "cloudflare ssl automatic-https-rewrites enable --zone-id <zone_id>",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://dash.cloudflare.com/ -> Select Zone -> SSL/TLS -> Edge Certificates -> Enable Automatic HTTPS Rewrites",
|
||||
"Terraform": "resource \"cloudflare_zone_settings_override\" \"example\" {\n zone_id = var.zone_id\n settings {\n automatic_https_rewrites = \"on\"\n }\n}"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable Automatic HTTPS Rewrites for all Cloudflare zones to prevent mixed content warnings and ensure all resources load securely.",
|
||||
"Url": "https://developers.cloudflare.com/ssl/edge-certificates/additional-options/automatic-https-rewrites/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"encryption"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "This feature works best when combined with Always Use HTTPS to ensure the entire site is served over HTTPS."
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.ssl.ssl_client import ssl_client
|
||||
|
||||
|
||||
class ssl_automatic_https_rewrites_enabled(Check):
|
||||
"""Check if Automatic HTTPS Rewrites is enabled to resolve mixed content issues"""
|
||||
|
||||
def execute(self) -> List[CheckReportCloudflare]:
|
||||
findings = []
|
||||
|
||||
for zone_id, ssl_settings in ssl_client.ssl_settings.items():
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(),
|
||||
resource=ssl_settings,
|
||||
resource_name=ssl_settings.zone_name,
|
||||
resource_id=zone_id,
|
||||
zone_name=ssl_settings.zone_name,
|
||||
)
|
||||
|
||||
if ssl_settings.automatic_https_rewrites:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} has Automatic HTTPS Rewrites enabled, resolving mixed content issues and enhancing site security."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} does not have Automatic HTTPS Rewrites enabled. Enable it to automatically rewrite HTTP links to HTTPS and prevent mixed content warnings."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
4
prowler/providers/cloudflare/services/ssl/ssl_client.py
Normal file
4
prowler/providers/cloudflare/services/ssl/ssl_client.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from prowler.providers.cloudflare.services.ssl.ssl_service import SSL
|
||||
from prowler.providers.common.provider import Provider
|
||||
|
||||
ssl_client = SSL(Provider.get_global_provider())
|
||||
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "ssl_hsts_enabled",
|
||||
"CheckTitle": "Ensure HSTS (HTTP Strict Transport Security) is enabled with recommended max-age",
|
||||
"CheckType": [],
|
||||
"ServiceName": "ssl",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "This check ensures that HSTS (HTTP Strict Transport Security) is enabled for Cloudflare zones with a recommended max-age of at least 6 months (15768000 seconds) to prevent SSL stripping and man-in-the-middle attacks.",
|
||||
"Risk": "Without HSTS enabled, browsers may initially connect over HTTP, making the connection vulnerable to SSL stripping attacks where an attacker downgrades the connection to unencrypted HTTP.",
|
||||
"RelatedUrl": "https://developers.cloudflare.com/ssl/edge-certificates/additional-options/http-strict-transport-security/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "cloudflare ssl hsts enable --zone-id <zone_id> --max-age 31536000",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://dash.cloudflare.com/ -> Select Zone -> SSL/TLS -> Edge Certificates -> Enable HSTS",
|
||||
"Terraform": "resource \"cloudflare_zone_settings_override\" \"example\" {\n zone_id = var.zone_id\n settings {\n security_header {\n enabled = true\n max_age = 31536000\n include_subdomains = true\n preload = true\n }\n }\n}"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable HSTS for all Cloudflare zones with a max-age of at least 6 months (15768000 seconds) to prevent SSL stripping attacks.",
|
||||
"Url": "https://developers.cloudflare.com/ssl/edge-certificates/additional-options/http-strict-transport-security/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"encryption",
|
||||
"internet-exposed"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "HSTS requires HTTPS to be properly configured. Ensure all resources are accessible via HTTPS before enabling HSTS with a long max-age."
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.ssl.ssl_client import ssl_client
|
||||
|
||||
|
||||
class ssl_hsts_enabled(Check):
|
||||
"""Check if HSTS (HTTP Strict Transport Security) is enabled with recommended max-age"""
|
||||
|
||||
def execute(self) -> List[CheckReportCloudflare]:
|
||||
findings = []
|
||||
# Recommended minimum max-age is 6 months (15768000 seconds)
|
||||
recommended_max_age = 15768000
|
||||
|
||||
for zone_id, ssl_settings in ssl_client.ssl_settings.items():
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(),
|
||||
resource=ssl_settings,
|
||||
resource_name=ssl_settings.zone_name,
|
||||
resource_id=zone_id,
|
||||
zone_name=ssl_settings.zone_name,
|
||||
)
|
||||
|
||||
if ssl_settings.hsts_enabled:
|
||||
if ssl_settings.hsts_max_age >= recommended_max_age:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} has HSTS enabled with max-age of {ssl_settings.hsts_max_age} seconds (>= {recommended_max_age} recommended)."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} has HSTS enabled but max-age is {ssl_settings.hsts_max_age} seconds (< {recommended_max_age} recommended). Increase max-age for better security."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} does not have HSTS enabled. Enable HSTS to prevent SSL stripping and man-in-the-middle attacks."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "ssl_hsts_include_subdomains",
|
||||
"CheckTitle": "Ensure HSTS includes subdomains for comprehensive protection",
|
||||
"CheckType": [],
|
||||
"ServiceName": "ssl",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "This check ensures that HSTS (HTTP Strict Transport Security) is configured with the includeSubDomains directive to apply HSTS policy uniformly across the entire domain including all subdomains.",
|
||||
"Risk": "Without includeSubDomains directive, subdomains may be vulnerable to SSL stripping attacks even if the main domain has HSTS enabled.",
|
||||
"RelatedUrl": "https://developers.cloudflare.com/ssl/edge-certificates/additional-options/http-strict-transport-security/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "cloudflare ssl hsts enable --zone-id <zone_id> --include-subdomains",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://dash.cloudflare.com/ -> Select Zone -> SSL/TLS -> Edge Certificates -> HSTS -> Enable 'Include subdomains'",
|
||||
"Terraform": "resource \"cloudflare_zone_settings_override\" \"example\" {\n zone_id = var.zone_id\n settings {\n security_header {\n enabled = true\n include_subdomains = true\n max_age = 31536000\n }\n }\n}"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable HSTS with includeSubDomains directive for all Cloudflare zones to ensure all subdomains are protected.",
|
||||
"Url": "https://developers.cloudflare.com/ssl/edge-certificates/additional-options/http-strict-transport-security/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"encryption"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Ensure all subdomains are accessible via HTTPS before enabling includeSubDomains to avoid accessibility issues."
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.ssl.ssl_client import ssl_client
|
||||
|
||||
|
||||
class ssl_hsts_include_subdomains(Check):
|
||||
"""Check if HSTS includes subdomains for comprehensive protection"""
|
||||
|
||||
def execute(self) -> List[CheckReportCloudflare]:
|
||||
findings = []
|
||||
|
||||
for zone_id, ssl_settings in ssl_client.ssl_settings.items():
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(),
|
||||
resource=ssl_settings,
|
||||
resource_name=ssl_settings.zone_name,
|
||||
resource_id=zone_id,
|
||||
zone_name=ssl_settings.zone_name,
|
||||
)
|
||||
|
||||
if ssl_settings.hsts_enabled and ssl_settings.hsts_include_subdomains:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} has HSTS enabled with includeSubDomains directive, protecting all subdomains."
|
||||
elif ssl_settings.hsts_enabled and not ssl_settings.hsts_include_subdomains:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} has HSTS enabled but does not include subdomains. Enable includeSubDomains to protect all subdomains."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} does not have HSTS enabled. Enable HSTS with includeSubDomains directive."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "ssl_mode_full_strict",
|
||||
"CheckTitle": "Ensure SSL/TLS mode is set to Full (strict) for end-to-end encryption",
|
||||
"CheckType": [],
|
||||
"ServiceName": "ssl",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "This check ensures that SSL/TLS mode is set to Full (strict) for Cloudflare zones to ensure end-to-end encryption with certificate validation between Cloudflare and origin servers.",
|
||||
"Risk": "Using flexible or off SSL modes can expose traffic between Cloudflare and origin servers to interception. Full (strict) mode ensures encrypted connections and validates origin server certificates.",
|
||||
"RelatedUrl": "https://developers.cloudflare.com/ssl/origin-configuration/ssl-modes/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "cloudflare ssl mode update --zone-id <zone_id> --mode full",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://dash.cloudflare.com/ -> Select Zone -> SSL/TLS -> Overview -> Set to 'Full (strict)'",
|
||||
"Terraform": "resource \"cloudflare_zone_settings_override\" \"example\" {\n zone_id = var.zone_id\n settings {\n ssl = \"full\"\n }\n}"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Set SSL/TLS mode to Full (strict) for all Cloudflare zones to ensure end-to-end encryption with proper certificate validation.",
|
||||
"Url": "https://developers.cloudflare.com/ssl/origin-configuration/ssl-modes/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"encryption"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Full (strict) mode requires a valid SSL certificate on the origin server. Ensure your origin has a trusted certificate before enabling."
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.ssl.ssl_client import ssl_client
|
||||
|
||||
|
||||
class ssl_mode_full_strict(Check):
|
||||
"""Check if SSL/TLS mode is set to Full (strict) for end-to-end encryption"""
|
||||
|
||||
def execute(self) -> List[CheckReportCloudflare]:
|
||||
findings = []
|
||||
for zone_id, ssl_settings in ssl_client.ssl_settings.items():
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(),
|
||||
resource=ssl_settings,
|
||||
resource_name=ssl_settings.zone_name,
|
||||
resource_id=zone_id,
|
||||
zone_name=ssl_settings.zone_name,
|
||||
)
|
||||
|
||||
# SSL mode should be "full" or "strict" for end-to-end encryption
|
||||
if ssl_settings.ssl_mode in ["full", "strict"]:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} has SSL/TLS mode set to '{ssl_settings.ssl_mode}' ensuring end-to-end encryption."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} has SSL/TLS mode set to '{ssl_settings.ssl_mode}'. Recommended: 'full' or 'strict' for end-to-end encryption with certificate validation."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "ssl_opportunistic_encryption_enabled",
|
||||
"CheckTitle": "Ensure Opportunistic Encryption is enabled for HTTP/2 benefits",
|
||||
"CheckType": [],
|
||||
"ServiceName": "ssl",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "This check ensures that Opportunistic Encryption is enabled for Cloudflare zones to provide HTTP/2 benefits over encrypted connections, even for visitors using HTTP.",
|
||||
"Risk": "Without Opportunistic Encryption, HTTP visitors cannot benefit from HTTP/2 performance improvements such as multiplexing and server push.",
|
||||
"RelatedUrl": "https://developers.cloudflare.com/ssl/edge-certificates/additional-options/opportunistic-encryption/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "cloudflare ssl opportunistic-encryption enable --zone-id <zone_id>",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://dash.cloudflare.com/ -> Select Zone -> SSL/TLS -> Edge Certificates -> Enable Opportunistic Encryption",
|
||||
"Terraform": "resource \"cloudflare_zone_settings_override\" \"example\" {\n zone_id = var.zone_id\n settings {\n opportunistic_encryption = \"on\"\n }\n}"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable Opportunistic Encryption for all Cloudflare zones to provide HTTP/2 benefits to all visitors.",
|
||||
"Url": "https://developers.cloudflare.com/ssl/edge-certificates/additional-options/opportunistic-encryption/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"encryption"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "Opportunistic Encryption allows HTTP/2 over TLS for HTTP visitors, providing performance benefits without requiring HTTPS."
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.ssl.ssl_client import ssl_client
|
||||
|
||||
|
||||
class ssl_opportunistic_encryption_enabled(Check):
|
||||
"""Check if Opportunistic Encryption is enabled for HTTP/2 benefits"""
|
||||
|
||||
def execute(self) -> List[CheckReportCloudflare]:
|
||||
findings = []
|
||||
|
||||
for zone_id, ssl_settings in ssl_client.ssl_settings.items():
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(),
|
||||
resource=ssl_settings,
|
||||
resource_name=ssl_settings.zone_name,
|
||||
resource_id=zone_id,
|
||||
zone_name=ssl_settings.zone_name,
|
||||
)
|
||||
|
||||
if ssl_settings.opportunistic_encryption:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} has Opportunistic Encryption enabled, providing HTTP/2 benefits over encrypted connections."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} does not have Opportunistic Encryption enabled. Enable it to provide HTTP/2 benefits over encrypted connections."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
181
prowler/providers/cloudflare/services/ssl/ssl_service.py
Normal file
181
prowler/providers/cloudflare/services/ssl/ssl_service.py
Normal file
@@ -0,0 +1,181 @@
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.cloudflare.lib.service.service import CloudflareService
|
||||
|
||||
|
||||
class SSL(CloudflareService):
|
||||
"""Cloudflare SSL/TLS service for managing SSL settings"""
|
||||
|
||||
def __init__(self, provider):
|
||||
super().__init__(__class__.__name__, provider)
|
||||
self.zones = self._list_zones()
|
||||
self.ssl_settings = self._get_ssl_settings()
|
||||
|
||||
def _list_zones(self) -> dict:
|
||||
"""
|
||||
List all Cloudflare zones
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of zones keyed by zone ID
|
||||
"""
|
||||
logger.info("SSL - Listing Zones...")
|
||||
zones = {}
|
||||
|
||||
try:
|
||||
# If specific zone IDs are provided, use those
|
||||
if self.provider.zone_ids:
|
||||
for zone_id in self.provider.zone_ids:
|
||||
zone_data = self._api_request("GET", f"/zones/{zone_id}")
|
||||
if zone_data:
|
||||
zones[zone_data["id"]] = Zone(
|
||||
id=zone_data["id"],
|
||||
name=zone_data["name"],
|
||||
account_id=zone_data.get("account", {}).get("id", ""),
|
||||
)
|
||||
else:
|
||||
# List all zones
|
||||
all_zones = self._api_request_paginated("/zones")
|
||||
for zone_data in all_zones:
|
||||
zones[zone_data["id"]] = Zone(
|
||||
id=zone_data["id"],
|
||||
name=zone_data["name"],
|
||||
account_id=zone_data.get("account", {}).get("id", ""),
|
||||
)
|
||||
|
||||
logger.info(f"Found {len(zones)} zone(s) for SSL checks")
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
return zones
|
||||
|
||||
def _get_ssl_settings(self) -> dict:
|
||||
"""
|
||||
Get SSL/TLS settings for all zones
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of SSL settings keyed by zone ID
|
||||
"""
|
||||
logger.info("SSL - Getting SSL/TLS Settings...")
|
||||
ssl_settings = {}
|
||||
|
||||
try:
|
||||
for zone_id, zone in self.zones.items():
|
||||
# Get SSL/TLS mode
|
||||
ssl_mode = self._api_request("GET", f"/zones/{zone_id}/settings/ssl")
|
||||
|
||||
# Get minimum TLS version
|
||||
min_tls = self._api_request(
|
||||
"GET", f"/zones/{zone_id}/settings/min_tls_version"
|
||||
)
|
||||
|
||||
# Get TLS 1.3 setting
|
||||
tls_1_3 = self._api_request("GET", f"/zones/{zone_id}/settings/tls_1_3")
|
||||
|
||||
# Get automatic HTTPS rewrites
|
||||
auto_https = self._api_request(
|
||||
"GET", f"/zones/{zone_id}/settings/automatic_https_rewrites"
|
||||
)
|
||||
|
||||
# Get always use HTTPS
|
||||
always_https = self._api_request(
|
||||
"GET", f"/zones/{zone_id}/settings/always_use_https"
|
||||
)
|
||||
|
||||
# Get opportunistic encryption
|
||||
opportunistic = self._api_request(
|
||||
"GET", f"/zones/{zone_id}/settings/opportunistic_encryption"
|
||||
)
|
||||
|
||||
# Get HSTS settings
|
||||
hsts = self._api_request(
|
||||
"GET", f"/zones/{zone_id}/settings/security_header"
|
||||
)
|
||||
|
||||
ssl_settings[zone_id] = SSLSettings(
|
||||
zone_id=zone_id,
|
||||
zone_name=zone.name,
|
||||
ssl_mode=ssl_mode.get("value", "") if ssl_mode else "",
|
||||
min_tls_version=(min_tls.get("value", "") if min_tls else "1.0"),
|
||||
tls_1_3_enabled=(
|
||||
tls_1_3.get("value", "off") == "on" if tls_1_3 else False
|
||||
),
|
||||
automatic_https_rewrites=(
|
||||
auto_https.get("value", "off") == "on" if auto_https else False
|
||||
),
|
||||
always_use_https=(
|
||||
always_https.get("value", "off") == "on"
|
||||
if always_https
|
||||
else False
|
||||
),
|
||||
opportunistic_encryption=(
|
||||
opportunistic.get("value", "off") == "on"
|
||||
if opportunistic
|
||||
else False
|
||||
),
|
||||
hsts_enabled=(
|
||||
hsts.get("value", {})
|
||||
.get("strict_transport_security", {})
|
||||
.get("enabled", False)
|
||||
if hsts
|
||||
else False
|
||||
),
|
||||
hsts_max_age=(
|
||||
hsts.get("value", {})
|
||||
.get("strict_transport_security", {})
|
||||
.get("max_age", 0)
|
||||
if hsts
|
||||
else 0
|
||||
),
|
||||
hsts_include_subdomains=(
|
||||
hsts.get("value", {})
|
||||
.get("strict_transport_security", {})
|
||||
.get("include_subdomains", False)
|
||||
if hsts
|
||||
else False
|
||||
),
|
||||
hsts_preload=(
|
||||
hsts.get("value", {})
|
||||
.get("strict_transport_security", {})
|
||||
.get("preload", False)
|
||||
if hsts
|
||||
else False
|
||||
),
|
||||
)
|
||||
|
||||
logger.info(f"Retrieved SSL settings for {len(ssl_settings)} zone(s)")
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
return ssl_settings
|
||||
|
||||
|
||||
class Zone(BaseModel):
|
||||
"""Model for Cloudflare Zone"""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
account_id: str
|
||||
|
||||
|
||||
class SSLSettings(BaseModel):
|
||||
"""Model for Cloudflare SSL/TLS Settings"""
|
||||
|
||||
zone_id: str
|
||||
zone_name: str
|
||||
ssl_mode: str
|
||||
min_tls_version: str
|
||||
tls_1_3_enabled: bool
|
||||
automatic_https_rewrites: bool
|
||||
always_use_https: bool
|
||||
opportunistic_encryption: bool
|
||||
hsts_enabled: bool
|
||||
hsts_max_age: int
|
||||
hsts_include_subdomains: bool
|
||||
hsts_preload: bool
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "ssl_tls_1_3_enabled",
|
||||
"CheckTitle": "Ensure TLS 1.3 is enabled for enhanced security and performance",
|
||||
"CheckType": [],
|
||||
"ServiceName": "ssl",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "This check ensures that TLS 1.3 is enabled for Cloudflare zones to activate the latest TLS protocol, which streamlines the TLS handshake, enhances security, and reduces connection time.",
|
||||
"Risk": "Without TLS 1.3 enabled, connections use older TLS versions which have longer handshake times and may be vulnerable to known attacks.",
|
||||
"RelatedUrl": "https://developers.cloudflare.com/ssl/edge-certificates/additional-options/tls-13/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "cloudflare ssl tls-1-3 enable --zone-id <zone_id>",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://dash.cloudflare.com/ -> Select Zone -> SSL/TLS -> Edge Certificates -> Enable TLS 1.3",
|
||||
"Terraform": "resource \"cloudflare_zone_settings_override\" \"example\" {\n zone_id = var.zone_id\n settings {\n tls_1_3 = \"on\"\n }\n}"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable TLS 1.3 for all Cloudflare zones to take advantage of improved security and performance.",
|
||||
"Url": "https://developers.cloudflare.com/ssl/edge-certificates/additional-options/tls-13/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"encryption"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "TLS 1.3 is supported by modern browsers and provides significant security and performance improvements."
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.ssl.ssl_client import ssl_client
|
||||
|
||||
|
||||
class ssl_tls_1_3_enabled(Check):
|
||||
"""Check if TLS 1.3 is enabled for enhanced security and performance"""
|
||||
|
||||
def execute(self) -> List[CheckReportCloudflare]:
|
||||
findings = []
|
||||
|
||||
for zone_id, ssl_settings in ssl_client.ssl_settings.items():
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(),
|
||||
resource=ssl_settings,
|
||||
resource_name=ssl_settings.zone_name,
|
||||
resource_id=zone_id,
|
||||
zone_name=ssl_settings.zone_name,
|
||||
)
|
||||
|
||||
if ssl_settings.tls_1_3_enabled:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} has TLS 1.3 enabled, providing enhanced security and reduced connection time."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} does not have TLS 1.3 enabled. Enable TLS 1.3 for improved security and performance."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"Provider": "cloudflare",
|
||||
"CheckID": "ssl_tls_minimum_version",
|
||||
"CheckTitle": "Ensure minimum TLS version is set to 1.2 or higher",
|
||||
"CheckType": [],
|
||||
"ServiceName": "ssl",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "zone_id",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Zone",
|
||||
"Description": "This check ensures that Cloudflare zones enforce a minimum TLS version of 1.2 or higher. TLS 1.0 and 1.1 are deprecated and have known security vulnerabilities.",
|
||||
"Risk": "Using outdated TLS versions (1.0 and 1.1) exposes connections to known security vulnerabilities and does not meet modern security standards. This can lead to man-in-the-middle attacks and data interception.",
|
||||
"RelatedUrl": "https://developers.cloudflare.com/ssl/edge-certificates/additional-options/minimum-tls/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "curl -X PATCH \"https://api.cloudflare.com/v4/zones/<zone_id>/settings/min_tls_version\" -H \"Authorization: Bearer <api_token>\" -H \"Content-Type: application/json\" -d '{\"value\":\"1.2\"}'",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://dash.cloudflare.com/ -> Select Zone -> SSL/TLS -> Edge Certificates -> Minimum TLS Version -> Set to 1.2 or higher",
|
||||
"Terraform": "resource \"cloudflare_zone_settings_override\" \"example\" {\n zone_id = var.zone_id\n settings {\n min_tls_version = \"1.2\"\n }\n}"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Set the minimum TLS version to 1.2 or 1.3 for all Cloudflare zones to ensure secure encrypted connections.",
|
||||
"Url": "https://developers.cloudflare.com/ssl/edge-certificates/additional-options/minimum-tls/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"encryption"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "TLS 1.0 and 1.1 were deprecated by major browsers in 2020. TLS 1.2 is the current recommended minimum version."
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportCloudflare
|
||||
from prowler.providers.cloudflare.services.ssl.ssl_client import ssl_client
|
||||
|
||||
|
||||
class ssl_tls_minimum_version(Check):
|
||||
"""Check if Cloudflare zones have minimum TLS version set to 1.2 or higher
|
||||
|
||||
This class verifies that each Cloudflare zone enforces a minimum TLS version
|
||||
of 1.2 or higher to ensure secure connections.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportCloudflare]:
|
||||
"""Execute the Cloudflare minimum TLS version check
|
||||
|
||||
Iterates over all SSL settings and checks the minimum TLS version.
|
||||
|
||||
Returns:
|
||||
List[CheckReportCloudflare]: A list of reports for each zone
|
||||
"""
|
||||
findings = []
|
||||
for zone_id, ssl_settings in ssl_client.ssl_settings.items():
|
||||
zone = ssl_client.zones.get(zone_id)
|
||||
if not zone:
|
||||
continue
|
||||
|
||||
report = CheckReportCloudflare(
|
||||
metadata=self.metadata(), resource=ssl_settings
|
||||
)
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} has minimum TLS version set to {ssl_settings.min_tls_version}, which is below the recommended 1.2."
|
||||
|
||||
# Check if minimum TLS version is 1.2 or higher
|
||||
if ssl_settings.min_tls_version in ["1.2", "1.3"]:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Zone {ssl_settings.zone_name} has minimum TLS version set to {ssl_settings.min_tls_version}."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -302,6 +302,17 @@ class Provider(ABC):
|
||||
fixer_config=fixer_config,
|
||||
use_instance_principal=arguments.use_instance_principal,
|
||||
)
|
||||
elif "cloudflare" in provider_class_name.lower():
|
||||
provider_class(
|
||||
api_token=arguments.api_token,
|
||||
api_key=arguments.api_key,
|
||||
api_email=arguments.api_email,
|
||||
account_ids=arguments.account_id,
|
||||
zone_ids=arguments.zone_id,
|
||||
config_path=arguments.config_file,
|
||||
mutelist_path=arguments.mutelist_file,
|
||||
fixer_config=fixer_config,
|
||||
)
|
||||
|
||||
except TypeError as error:
|
||||
logger.critical(
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"Provider": "gcp",
|
||||
"CheckID": "cloudstorage_bucket_lifecycle_management_enabled",
|
||||
"CheckTitle": "Cloud Storage buckets have lifecycle management enabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "cloudstorage",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "storage.googleapis.com/Bucket",
|
||||
"Description": "**Google Cloud Storage buckets** are evaluated for the presence of **lifecycle management** with at least one valid rule (supported action and non-empty condition) to automatically transition or delete objects and optimize storage costs.",
|
||||
"Risk": "Buckets without lifecycle rules can accumulate stale data, increase storage costs, and fail to meet data retention and internal compliance requirements.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudStorage/enable-lifecycle-management.html",
|
||||
"https://cloud.google.com/storage/docs/lifecycle"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "gcloud storage buckets update gs://<BUCKET_NAME> --lifecycle-file=<PATH_TO_JSON>",
|
||||
"NativeIaC": "",
|
||||
"Other": "1) Open Google Cloud Console → Storage → Buckets → <BUCKET_NAME>\n2) Tab 'Lifecycle'\n3) Add rule(s) to delete or transition objects (e.g., delete after 365 days; transition STANDARD→NEARLINE after 90 days)\n4) Save",
|
||||
"Terraform": "```hcl\n# Example: enable lifecycle to transition and delete objects\nresource \"google_storage_bucket\" \"example\" {\n name = var.bucket_name\n location = var.location\n\n # Transition STANDARD → NEARLINE after 90 days\n lifecycle_rule {\n action {\n type = \"SetStorageClass\"\n storage_class = \"NEARLINE\"\n }\n condition {\n age = 90\n matches_storage_class = [\"STANDARD\"]\n }\n }\n\n # Delete objects after 365 days\n lifecycle_rule {\n action {\n type = \"Delete\"\n }\n condition {\n age = 365\n }\n }\n}\n```"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Configure lifecycle rules to automatically delete stale objects or transition them to colder storage classes according to your organization's retention and cost-optimization policy.",
|
||||
"Url": "https://hub.prowler.com/check/cloudstorage_bucket_lifecycle_management_enabled"
|
||||
}
|
||||
},
|
||||
"Categories": [],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_GCP
|
||||
from prowler.providers.gcp.services.cloudstorage.cloudstorage_client import (
|
||||
cloudstorage_client,
|
||||
)
|
||||
|
||||
|
||||
class cloudstorage_bucket_lifecycle_management_enabled(Check):
|
||||
"""Ensure Cloud Storage buckets have lifecycle management enabled with at least one valid rule.
|
||||
|
||||
Reports PASS if a bucket has at least one valid lifecycle rule
|
||||
(with a supported action and condition), otherwise FAIL.
|
||||
|
||||
"""
|
||||
|
||||
def execute(self) -> list[Check_Report_GCP]:
|
||||
"""Run the lifecycle management check for each Cloud Storage bucket.
|
||||
|
||||
Returns:
|
||||
list[Check_Report_GCP]: Results for all evaluated buckets.
|
||||
"""
|
||||
|
||||
findings = []
|
||||
for bucket in cloudstorage_client.buckets:
|
||||
report = Check_Report_GCP(metadata=self.metadata(), resource=bucket)
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Bucket {bucket.name} does not have lifecycle management enabled."
|
||||
)
|
||||
|
||||
rules = bucket.lifecycle_rules
|
||||
|
||||
if rules:
|
||||
valid_rules = []
|
||||
for rule in rules:
|
||||
action_type = rule.get("action", {}).get("type")
|
||||
condition = rule.get("condition")
|
||||
if action_type and condition:
|
||||
valid_rules.append(rule)
|
||||
|
||||
if valid_rules:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Bucket {bucket.name} has lifecycle management enabled with {len(valid_rules)} valid rule(s)."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Bucket {bucket.name} has lifecycle rules configured but none are valid."
|
||||
|
||||
findings.append(report)
|
||||
return findings
|
||||
@@ -31,6 +31,14 @@ class CloudStorage(GCPService):
|
||||
bucket_iam
|
||||
) or "allUsers" in str(bucket_iam):
|
||||
public = True
|
||||
|
||||
lifecycle_rules = None
|
||||
lifecycle = bucket.get("lifecycle")
|
||||
if isinstance(lifecycle, dict):
|
||||
rules = lifecycle.get("rule")
|
||||
if isinstance(rules, list):
|
||||
lifecycle_rules = rules
|
||||
|
||||
self.buckets.append(
|
||||
Bucket(
|
||||
name=bucket["name"],
|
||||
@@ -42,6 +50,7 @@ class CloudStorage(GCPService):
|
||||
public=public,
|
||||
retention_policy=bucket.get("retentionPolicy"),
|
||||
project_id=project_id,
|
||||
lifecycle_rules=lifecycle_rules,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -62,3 +71,4 @@ class Bucket(BaseModel):
|
||||
public: bool
|
||||
project_id: str
|
||||
retention_policy: Optional[dict] = None
|
||||
lifecycle_rules: Optional[list[dict]] = None
|
||||
|
||||
@@ -0,0 +1,223 @@
|
||||
from unittest import mock
|
||||
|
||||
from tests.providers.gcp.gcp_fixtures import (
|
||||
GCP_PROJECT_ID,
|
||||
GCP_US_CENTER1_LOCATION,
|
||||
set_mocked_gcp_provider,
|
||||
)
|
||||
|
||||
|
||||
class TestCloudStorageBucketLifecycleManagementEnabled:
|
||||
def test_bucket_without_lifecycle_rules(self):
|
||||
cloudstorage_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_client",
|
||||
new=cloudstorage_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled import (
|
||||
cloudstorage_bucket_lifecycle_management_enabled,
|
||||
)
|
||||
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
|
||||
Bucket,
|
||||
)
|
||||
|
||||
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
|
||||
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
|
||||
|
||||
cloudstorage_client.buckets = [
|
||||
Bucket(
|
||||
name="no-lifecycle",
|
||||
id="no-lifecycle",
|
||||
region=GCP_US_CENTER1_LOCATION,
|
||||
uniform_bucket_level_access=True,
|
||||
public=False,
|
||||
retention_policy=None,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
lifecycle_rules=[],
|
||||
)
|
||||
]
|
||||
|
||||
check = cloudstorage_bucket_lifecycle_management_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Bucket {cloudstorage_client.buckets[0].name} does not have lifecycle management enabled."
|
||||
)
|
||||
assert result[0].resource_id == "no-lifecycle"
|
||||
assert result[0].resource_name == "no-lifecycle"
|
||||
assert result[0].location == GCP_US_CENTER1_LOCATION
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
def test_bucket_with_minimal_delete_rule(self):
|
||||
cloudstorage_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_client",
|
||||
new=cloudstorage_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled import (
|
||||
cloudstorage_bucket_lifecycle_management_enabled,
|
||||
)
|
||||
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
|
||||
Bucket,
|
||||
)
|
||||
|
||||
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
|
||||
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
|
||||
|
||||
cloudstorage_client.buckets = [
|
||||
Bucket(
|
||||
name="delete-rule",
|
||||
id="delete-rule",
|
||||
region=GCP_US_CENTER1_LOCATION,
|
||||
uniform_bucket_level_access=True,
|
||||
public=False,
|
||||
retention_policy=None,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
lifecycle_rules=[
|
||||
{"action": {"type": "Delete"}, "condition": {"age": 30}}
|
||||
],
|
||||
)
|
||||
]
|
||||
|
||||
check = cloudstorage_bucket_lifecycle_management_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Bucket {cloudstorage_client.buckets[0].name} has lifecycle management enabled with 1 valid rule(s)."
|
||||
)
|
||||
assert result[0].resource_id == "delete-rule"
|
||||
assert result[0].resource_name == "delete-rule"
|
||||
assert result[0].location == GCP_US_CENTER1_LOCATION
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
def test_bucket_with_transition_and_delete_rules(self):
|
||||
cloudstorage_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_client",
|
||||
new=cloudstorage_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled import (
|
||||
cloudstorage_bucket_lifecycle_management_enabled,
|
||||
)
|
||||
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
|
||||
Bucket,
|
||||
)
|
||||
|
||||
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
|
||||
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
|
||||
|
||||
cloudstorage_client.buckets = [
|
||||
Bucket(
|
||||
name="transition-delete",
|
||||
id="transition-delete",
|
||||
region=GCP_US_CENTER1_LOCATION,
|
||||
uniform_bucket_level_access=True,
|
||||
public=False,
|
||||
retention_policy=None,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
lifecycle_rules=[
|
||||
{
|
||||
"action": {
|
||||
"type": "SetStorageClass",
|
||||
"storageClass": "NEARLINE",
|
||||
},
|
||||
"condition": {"matchesStorageClass": ["STANDARD"]},
|
||||
},
|
||||
{"action": {"type": "Delete"}, "condition": {"age": 365}},
|
||||
],
|
||||
)
|
||||
]
|
||||
|
||||
check = cloudstorage_bucket_lifecycle_management_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Bucket {cloudstorage_client.buckets[0].name} has lifecycle management enabled with 2 valid rule(s)."
|
||||
)
|
||||
assert result[0].resource_id == "transition-delete"
|
||||
assert result[0].resource_name == "transition-delete"
|
||||
assert result[0].location == GCP_US_CENTER1_LOCATION
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
def test_bucket_with_invalid_lifecycle_rules(self):
|
||||
cloudstorage_client = mock.MagicMock()
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_client",
|
||||
new=cloudstorage_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled import (
|
||||
cloudstorage_bucket_lifecycle_management_enabled,
|
||||
)
|
||||
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
|
||||
Bucket,
|
||||
)
|
||||
|
||||
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
|
||||
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
|
||||
|
||||
cloudstorage_client.buckets = [
|
||||
Bucket(
|
||||
name="invalid-rules",
|
||||
id="invalid-rules",
|
||||
region=GCP_US_CENTER1_LOCATION,
|
||||
uniform_bucket_level_access=True,
|
||||
public=False,
|
||||
retention_policy=None,
|
||||
project_id=GCP_PROJECT_ID,
|
||||
lifecycle_rules=[
|
||||
{"action": {}, "condition": {"age": 30}},
|
||||
{"action": {"type": "Delete"}, "condition": {}},
|
||||
],
|
||||
)
|
||||
]
|
||||
|
||||
check = cloudstorage_bucket_lifecycle_management_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Bucket {cloudstorage_client.buckets[0].name} has lifecycle rules configured but none are valid."
|
||||
)
|
||||
assert result[0].resource_id == "invalid-rules"
|
||||
assert result[0].resource_name == "invalid-rules"
|
||||
assert result[0].location == GCP_US_CENTER1_LOCATION
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
@@ -12,6 +12,7 @@ All notable changes to the **Prowler UI** are documented in this file.
|
||||
- React Compiler support for automatic optimization [(#8748)](https://github.com/prowler-cloud/prowler/pull/8748)
|
||||
- Turbopack support for faster development builds [(#8748)](https://github.com/prowler-cloud/prowler/pull/8748)
|
||||
- Add compliance name in compliance detail view [(#8775)](https://github.com/prowler-cloud/prowler/pull/8775)
|
||||
- PDF reporting for Prowler ThreatScore [(#8867)](https://github.com/prowler-cloud/prowler/pull/8867)
|
||||
- Support C5 compliance framework for the AWS provider [(#8830)](https://github.com/prowler-cloud/prowler/pull/8830)
|
||||
- API key management in user profile [(#8308)](https://github.com/prowler-cloud/prowler/pull/8308)
|
||||
- Refresh access token error handling [(#8864)](https://github.com/prowler-cloud/prowler/pull/8864)
|
||||
|
||||
@@ -268,3 +268,45 @@ export const getComplianceCsv = async (
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
export const getThreatScorePdf = async (scanId: string) => {
|
||||
const headers = await getAuthHeaders({ contentType: false });
|
||||
|
||||
const url = new URL(`${apiBaseUrl}/scans/${scanId}/threatscore`);
|
||||
|
||||
try {
|
||||
const response = await fetch(url.toString(), { headers });
|
||||
|
||||
if (response.status === 202) {
|
||||
const json = await response.json();
|
||||
const taskId = json?.data?.id;
|
||||
const state = json?.data?.attributes?.state;
|
||||
return {
|
||||
pending: true,
|
||||
state,
|
||||
taskId,
|
||||
};
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
throw new Error(
|
||||
errorData?.errors?.detail ||
|
||||
"Unable to retrieve ThreatScore PDF report. Contact support if the issue continues.",
|
||||
);
|
||||
}
|
||||
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
const base64 = Buffer.from(arrayBuffer).toString("base64");
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: base64,
|
||||
filename: `scan-${scanId}-threatscore.pdf`,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
error: getErrorMessage(error),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
@@ -29,6 +29,8 @@ import {
|
||||
} from "@/types/compliance";
|
||||
import { ScanEntity } from "@/types/scans";
|
||||
|
||||
import { ThreatScoreDownloadButton } from "./threatscore-download-button";
|
||||
|
||||
interface ComplianceDetailSearchParams {
|
||||
complianceId: string;
|
||||
version?: string;
|
||||
@@ -143,13 +145,24 @@ export default async function ComplianceDetail({
|
||||
<Spacer y={8} />
|
||||
</div>
|
||||
)}
|
||||
<ComplianceHeader
|
||||
scans={[]}
|
||||
uniqueRegions={uniqueRegions}
|
||||
showSearch={false}
|
||||
framework={compliancetitle}
|
||||
showProviders={false}
|
||||
/>
|
||||
<div className="flex items-start justify-between gap-4">
|
||||
<div className="flex-1">
|
||||
<ComplianceHeader
|
||||
scans={[]}
|
||||
uniqueRegions={uniqueRegions}
|
||||
showSearch={false}
|
||||
framework={compliancetitle}
|
||||
showProviders={false}
|
||||
/>
|
||||
</div>
|
||||
{attributesData?.data?.[0]?.attributes?.framework ===
|
||||
"ProwlerThreatScore" &&
|
||||
selectedScanId && (
|
||||
<div className="flex-shrink-0 pt-1">
|
||||
<ThreatScoreDownloadButton scanId={selectedScanId} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<Suspense
|
||||
key={searchParamsKey}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user