Compare commits

..

7 Commits

105 changed files with 1753 additions and 12335 deletions
+9 -23
View File
@@ -1,33 +1,19 @@
name: 'Tools: TruffleHog'
on:
push:
branches:
- 'master'
- 'v5.*'
pull_request:
branches:
- 'master'
- 'v5.*'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
on: pull_request
jobs:
scan-secrets:
trufflehog:
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
steps:
- name: Checkout repository
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
- name: Scan for secrets with TruffleHog
uses: trufflesecurity/trufflehog@ad6fc8fb446b8fafbf7ea8193d2d6bfd42f45690 # v3.90.11
- name: TruffleHog OSS
uses: trufflesecurity/trufflehog@466da5b0bb161144f6afca9afe5d57975828c410 # v3.90.8
with:
extra_args: '--results=verified,unknown'
path: ./
base: ${{ github.event.repository.default_branch }}
head: HEAD
extra_args: --only-verified
+8 -20
View File
@@ -1,29 +1,17 @@
name: 'Tools: PR Labeler'
name: Prowler - PR Labeler
on:
pull_request_target:
branches:
- 'master'
- 'v5.*'
types:
- 'opened'
- 'reopened'
- 'synchronize'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
pull_request_target:
branches:
- "master"
- "v3"
- "v4.*"
jobs:
labeler:
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-latest
steps:
- name: Apply labels to PR
uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
with:
sync-labels: true
- uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
+17 -14
View File
@@ -3,13 +3,21 @@ name: 'MCP: Container Build and Push'
on:
push:
branches:
- 'master'
- "master"
paths:
- 'mcp_server/**'
- '.github/workflows/mcp-container-build-push.yml'
- "mcp_server/**"
- ".github/workflows/mcp-container-build-push.yml"
# Uncomment to test this workflow on PRs
# pull_request:
# branches:
# - "master"
# paths:
# - "mcp_server/**"
# - ".github/workflows/mcp-container-build-push.yml"
release:
types:
- 'published'
types: [published]
permissions:
contents: read
@@ -33,7 +41,6 @@ jobs:
setup:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
timeout-minutes: 5
outputs:
short-sha: ${{ steps.set-short-sha.outputs.short-sha }}
steps:
@@ -44,12 +51,8 @@ jobs:
container-build-push:
needs: setup
runs-on: ubuntu-latest
timeout-minutes: 30
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Login to DockerHub
@@ -61,7 +64,7 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Build and push MCP container (latest)
- name: Build and push container (latest)
if: github.event_name == 'push'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
@@ -80,7 +83,7 @@ jobs:
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push MCP container (release)
- name: Build and push container (release)
if: github.event_name == 'release'
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
@@ -100,7 +103,7 @@ jobs:
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Trigger MCP deployment
- name: Trigger deployment
if: github.event_name == 'push'
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
with:
-103
View File
@@ -1,103 +0,0 @@
name: 'Tools: Check Changelog'
on:
pull_request:
types:
- 'opened'
- 'synchronize'
- 'reopened'
- 'labeled'
- 'unlabeled'
branches:
- 'master'
- 'v5.*'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
jobs:
check-changelog:
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
pull-requests: write
env:
MONITORED_FOLDERS: 'api ui prowler mcp_server'
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
api/**
ui/**
prowler/**
mcp_server/**
- name: Check for folder changes and changelog presence
id: check-folders
run: |
missing_changelogs=""
# Check api folder
if [[ "${{ steps.changed-files.outputs.any_changed }}" == "true" ]]; then
for folder in $MONITORED_FOLDERS; do
# Get files changed in this folder
changed_in_folder=$(echo "${{ steps.changed-files.outputs.all_changed_files }}" | tr ' ' '\n' | grep "^${folder}/" || true)
if [ -n "$changed_in_folder" ]; then
echo "Detected changes in ${folder}/"
# Check if CHANGELOG.md was updated
if ! echo "$changed_in_folder" | grep -q "^${folder}/CHANGELOG.md$"; then
echo "No changelog update found for ${folder}/"
missing_changelogs="${missing_changelogs}- \`${folder}\`"$'\n'
fi
fi
done
fi
{
echo "missing_changelogs<<EOF"
echo -e "${missing_changelogs}"
echo "EOF"
} >> $GITHUB_OUTPUT
- name: Find existing changelog comment
if: github.event.pull_request.head.repo.full_name == github.repository
id: find-comment
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: '<!-- changelog-check -->'
- name: Update PR comment with changelog status
if: github.event.pull_request.head.repo.full_name == github.repository
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
with:
issue-number: ${{ github.event.pull_request.number }}
comment-id: ${{ steps.find-comment.outputs.comment-id }}
edit-mode: replace
body: |
<!-- changelog-check -->
${{ steps.check-folders.outputs.missing_changelogs != '' && format('⚠️ **Changes detected in the following folders without a corresponding update to the `CHANGELOG.md`:**
{0}
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.', steps.check-folders.outputs.missing_changelogs) || '✅ All necessary `CHANGELOG.md` files have been updated.' }}
- name: Fail if changelog is missing
if: steps.check-folders.outputs.missing_changelogs != ''
run: |
echo "::error::Missing changelog updates in some folders"
exit 1
+104 -52
View File
@@ -1,40 +1,42 @@
name: 'Tools: PR Conflict Checker'
name: Prowler - PR Conflict Checker
on:
pull_request_target:
pull_request:
types:
- 'opened'
- 'synchronize'
- 'reopened'
- opened
- synchronize
- reopened
branches:
- 'master'
- 'v5.*'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: true
- "master"
- "v5.*"
# Leaving this commented until we find a way to run it for forks but in Prowler's context
# pull_request_target:
# types:
# - opened
# - synchronize
# - reopened
# branches:
# - "master"
# - "v5.*"
jobs:
check-conflicts:
conflict-checker:
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
pull-requests: write
issues: write
steps:
- name: Checkout PR head
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: '**'
files: |
**
- name: Check for conflict markers
id: conflict-check
@@ -49,10 +51,10 @@ jobs:
if [ -f "$file" ]; then
echo "Checking file: $file"
# Look for conflict markers (more precise regex)
if grep -qE '^(<<<<<<<|=======|>>>>>>>)' "$file" 2>/dev/null; then
# Look for conflict markers
if grep -l "^<<<<<<<\|^=======\|^>>>>>>>" "$file" 2>/dev/null; then
echo "Conflict markers found in: $file"
CONFLICT_FILES="${CONFLICT_FILES}- \`${file}\`"$'\n'
CONFLICT_FILES="$CONFLICT_FILES$file "
HAS_CONFLICTS=true
fi
fi
@@ -60,64 +62,114 @@ jobs:
if [ "$HAS_CONFLICTS" = true ]; then
echo "has_conflicts=true" >> $GITHUB_OUTPUT
{
echo "conflict_files<<EOF"
echo "$CONFLICT_FILES"
echo "EOF"
} >> $GITHUB_OUTPUT
echo "Conflict markers detected"
echo "conflict_files=$CONFLICT_FILES" >> $GITHUB_OUTPUT
echo "Conflict markers detected in files: $CONFLICT_FILES"
else
echo "has_conflicts=false" >> $GITHUB_OUTPUT
echo "No conflict markers found in changed files"
fi
- name: Manage conflict label
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number }}
HAS_CONFLICTS: ${{ steps.conflict-check.outputs.has_conflicts }}
run: |
LABEL_NAME="has-conflicts"
- name: Add conflict label
if: steps.conflict-check.outputs.has_conflicts == 'true'
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
github-token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
script: |
const { data: labels } = await github.rest.issues.listLabelsOnIssue({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
});
# Add or remove label based on conflict status
if [ "$HAS_CONFLICTS" = "true" ]; then
echo "Adding conflict label to PR #${PR_NUMBER}..."
gh pr edit "$PR_NUMBER" --add-label "$LABEL_NAME" --repo ${{ github.repository }} || true
else
echo "Removing conflict label from PR #${PR_NUMBER}..."
gh pr edit "$PR_NUMBER" --remove-label "$LABEL_NAME" --repo ${{ github.repository }} || true
fi
const hasConflictLabel = labels.some(label => label.name === 'has-conflicts');
- name: Find existing comment
if (!hasConflictLabel) {
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
labels: ['has-conflicts']
});
console.log('Added has-conflicts label');
} else {
console.log('has-conflicts label already exists');
}
- name: Remove conflict label
if: steps.conflict-check.outputs.has_conflicts == 'false'
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
github-token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
script: |
try {
await github.rest.issues.removeLabel({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
name: 'has-conflicts'
});
console.log('Removed has-conflicts label');
} catch (error) {
if (error.status === 404) {
console.log('has-conflicts label was not present');
} else {
throw error;
}
}
- name: Find existing conflict comment
if: steps.conflict-check.outputs.has_conflicts == 'true'
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
id: find-comment
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: '<!-- conflict-checker-comment -->'
body-regex: '(⚠️ \*\*Conflict Markers Detected\*\*|✅ \*\*Conflict Markers Resolved\*\*)'
- name: Create or update comment
- name: Create or update conflict comment
if: steps.conflict-check.outputs.has_conflicts == 'true'
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
with:
comment-id: ${{ steps.find-comment.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
edit-mode: replace
body: |
<!-- conflict-checker-comment -->
${{ steps.conflict-check.outputs.has_conflicts == 'true' && '⚠️ **Conflict Markers Detected**' || '✅ **Conflict Markers Resolved**' }}
⚠️ **Conflict Markers Detected**
${{ steps.conflict-check.outputs.has_conflicts == 'true' && format('This pull request contains unresolved conflict markers in the following files:
{0}
This pull request contains unresolved conflict markers in the following files:
```
${{ steps.conflict-check.outputs.conflict_files }}
```
Please resolve these conflicts by:
1. Locating the conflict markers: `<<<<<<<`, `=======`, and `>>>>>>>`
2. Manually editing the files to resolve the conflicts
3. Removing all conflict markers
4. Committing and pushing the changes', steps.conflict-check.outputs.conflict_files) || 'All conflict markers have been successfully resolved in this pull request.' }}
4. Committing and pushing the changes
- name: Find existing conflict comment when resolved
if: steps.conflict-check.outputs.has_conflicts == 'false'
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad # v4.0.0
id: find-resolved-comment
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-regex: '(⚠️ \*\*Conflict Markers Detected\*\*|✅ \*\*Conflict Markers Resolved\*\*)'
- name: Update comment when conflicts resolved
if: steps.conflict-check.outputs.has_conflicts == 'false' && steps.find-resolved-comment.outputs.comment-id != ''
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
with:
comment-id: ${{ steps.find-resolved-comment.outputs.comment-id }}
issue-number: ${{ github.event.pull_request.number }}
edit-mode: replace
body: |
✅ **Conflict Markers Resolved**
All conflict markers have been successfully resolved in this pull request.
- name: Fail workflow if conflicts detected
if: steps.conflict-check.outputs.has_conflicts == 'true'
run: |
echo "::error::Workflow failed due to conflict markers detected in the PR"
echo "::error::Workflow failed due to conflict markers in files: ${{ steps.conflict-check.outputs.conflict_files }}"
exit 1
@@ -1,6 +1,6 @@
name: 'Tools: Prepare Release'
name: Prowler - Release Preparation
run-name: 'Prepare Release for Prowler ${{ inputs.prowler_version }}'
run-name: Prowler Release Preparation for ${{ inputs.prowler_version }}
on:
workflow_dispatch:
@@ -10,23 +10,18 @@ on:
required: true
type: string
concurrency:
group: ${{ github.workflow }}-${{ inputs.prowler_version }}
cancel-in-progress: false
env:
PROWLER_VERSION: ${{ inputs.prowler_version }}
PROWLER_VERSION: ${{ github.event.inputs.prowler_version }}
jobs:
prepare-release:
if: github.event_name == 'workflow_dispatch' && github.repository == 'prowler-cloud/prowler'
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
timeout-minutes: 30
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout repository
- name: Checkout code
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
@@ -39,15 +34,15 @@ jobs:
- name: Install Poetry
run: |
python3 -m pip install --user poetry==2.1.1
python3 -m pip install --user poetry
echo "$HOME/.local/bin" >> $GITHUB_PATH
- name: Configure Git
run: |
git config --global user.name 'prowler-bot'
git config --global user.email '179230569+prowler-bot@users.noreply.github.com'
git config --global user.name "prowler-bot"
git config --global user.email "179230569+prowler-bot@users.noreply.github.com"
- name: Parse version and read changelogs
- name: Parse version and determine branch
run: |
# Validate version format (reusing pattern from sdk-bump-version.yml)
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
@@ -124,7 +119,7 @@ jobs:
exit 1
fi
- name: Extract and combine changelog entries
- name: Extract changelog entries
run: |
set -e
@@ -250,7 +245,7 @@ jobs:
echo "Combined changelog preview:"
cat combined_changelog.md
- name: Checkout release branch for patch release
- name: Checkout existing branch for patch release
if: ${{ env.PATCH_VERSION != '0' }}
run: |
echo "Patch release detected, checking out existing branch $BRANCH_NAME..."
@@ -265,7 +260,7 @@ jobs:
exit 1
fi
- name: Verify SDK version in pyproject.toml
- name: Verify version in pyproject.toml
run: |
CURRENT_VERSION=$(grep '^version = ' pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
@@ -275,7 +270,7 @@ jobs:
fi
echo "✓ pyproject.toml version: $CURRENT_VERSION"
- name: Verify SDK version in prowler/config/config.py
- name: Verify version in prowler/config/config.py
run: |
CURRENT_VERSION=$(grep '^prowler_version = ' prowler/config/config.py | sed -E 's/prowler_version = "([^"]+)"/\1/' | tr -d '[:space:]')
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
@@ -285,7 +280,7 @@ jobs:
fi
echo "✓ prowler/config/config.py version: $CURRENT_VERSION"
- name: Verify API version in api/pyproject.toml
- name: Verify version in api/pyproject.toml
if: ${{ env.HAS_API_CHANGES == 'true' }}
run: |
CURRENT_API_VERSION=$(grep '^version = ' api/pyproject.toml | sed -E 's/version = "([^"]+)"/\1/' | tr -d '[:space:]')
@@ -296,7 +291,7 @@ jobs:
fi
echo "✓ api/pyproject.toml version: $CURRENT_API_VERSION"
- name: Verify API prowler dependency in api/pyproject.toml
- name: Verify prowler dependency in api/pyproject.toml
if: ${{ env.PATCH_VERSION != '0' && env.HAS_API_CHANGES == 'true' }}
run: |
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
@@ -307,7 +302,7 @@ jobs:
fi
echo "✓ api/pyproject.toml prowler dependency: $CURRENT_PROWLER_REF"
- name: Verify API version in api/src/backend/api/v1/views.py
- name: Verify version in api/src/backend/api/v1/views.py
if: ${{ env.HAS_API_CHANGES == 'true' }}
run: |
CURRENT_API_VERSION=$(grep 'spectacular_settings.VERSION = ' api/src/backend/api/v1/views.py | sed -E 's/.*spectacular_settings.VERSION = "([^"]+)".*/\1/' | tr -d '[:space:]')
@@ -318,7 +313,7 @@ jobs:
fi
echo "✓ api/src/backend/api/v1/views.py version: $CURRENT_API_VERSION"
- name: Checkout release branch for minor release
- name: Checkout existing release branch for minor release
if: ${{ env.PATCH_VERSION == '0' }}
run: |
echo "Minor release detected (patch = 0), checking out existing branch $BRANCH_NAME..."
@@ -330,7 +325,7 @@ jobs:
exit 1
fi
- name: Update API prowler dependency for minor release
- name: Prepare prowler dependency update for minor release
if: ${{ env.PATCH_VERSION == '0' }}
run: |
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
@@ -367,7 +362,7 @@ jobs:
echo "✓ Prepared prowler dependency update to: $UPDATED_PROWLER_REF"
- name: Create PR for API dependency update
- name: Create Pull Request against release branch
if: ${{ env.PATCH_VERSION == '0' }}
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
@@ -0,0 +1,77 @@
name: Prowler - Check Changelog
on:
pull_request:
types: [opened, synchronize, reopened, labeled, unlabeled]
jobs:
check-changelog:
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
pull-requests: write
env:
MONITORED_FOLDERS: "api ui prowler mcp_server"
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
- name: Get list of changed files
id: changed_files
run: |
git fetch origin ${{ github.base_ref }}
git diff --name-only origin/${{ github.base_ref }}...HEAD > changed_files.txt
cat changed_files.txt
- name: Check for folder changes and changelog presence
id: check_folders
run: |
missing_changelogs=""
for folder in $MONITORED_FOLDERS; do
if grep -q "^${folder}/" changed_files.txt; then
echo "Detected changes in ${folder}/"
if ! grep -q "^${folder}/CHANGELOG.md$" changed_files.txt; then
echo "No changelog update found for ${folder}/"
missing_changelogs="${missing_changelogs}- \`${folder}\`\n"
fi
fi
done
echo "missing_changelogs<<EOF" >> $GITHUB_OUTPUT
echo -e "${missing_changelogs}" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Find existing changelog comment
if: github.event.pull_request.head.repo.full_name == github.repository
id: find_comment
uses: peter-evans/find-comment@b30e6a3c0ed37e7c023ccd3f1db5c6c0b0c23aad #v4.0.0
with:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: '<!-- changelog-check -->'
- name: Update PR comment with changelog status
if: github.event.pull_request.head.repo.full_name == github.repository
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
with:
issue-number: ${{ github.event.pull_request.number }}
comment-id: ${{ steps.find_comment.outputs.comment-id }}
edit-mode: replace
body: |
<!-- changelog-check -->
${{ steps.check_folders.outputs.missing_changelogs != '' && format('⚠️ **Changes detected in the following folders without a corresponding update to the `CHANGELOG.md`:**
{0}
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.', steps.check_folders.outputs.missing_changelogs) || '✅ All necessary `CHANGELOG.md` files have been updated. Great job! 🎉' }}
- name: Fail if changelog is missing
if: steps.check_folders.outputs.missing_changelogs != ''
run: |
echo "ERROR: Missing changelog updates in some folders."
exit 1
@@ -1,31 +1,27 @@
name: 'Tools: PR Merged'
name: Prowler - Merged Pull Request
on:
pull_request_target:
branches:
- 'master'
types:
- 'closed'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number }}
cancel-in-progress: false
branches: ['master']
types: ['closed']
jobs:
trigger-cloud-pull-request:
name: Trigger Cloud Pull Request
if: github.event.pull_request.merged == true && github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
timeout-minutes: 10
permissions:
contents: read
steps:
- name: Calculate short commit SHA
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: ${{ github.event.pull_request.merge_commit_sha }}
- name: Set short git commit SHA
id: vars
run: |
SHORT_SHA="${{ github.event.pull_request.merge_commit_sha }}"
echo "SHORT_SHA=${SHORT_SHA::7}" >> $GITHUB_ENV
shortSha=$(git rev-parse --short ${{ github.event.pull_request.merge_commit_sha }})
echo "SHORT_SHA=${shortSha}" >> $GITHUB_ENV
- name: Trigger Cloud repository pull request
- name: Trigger pull request
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
@@ -35,12 +31,8 @@ jobs:
{
"PROWLER_COMMIT_SHA": "${{ github.event.pull_request.merge_commit_sha }}",
"PROWLER_COMMIT_SHORT_SHA": "${{ env.SHORT_SHA }}",
"PROWLER_PR_NUMBER": "${{ github.event.pull_request.number }}",
"PROWLER_PR_TITLE": ${{ toJson(github.event.pull_request.title) }},
"PROWLER_PR_LABELS": ${{ toJson(github.event.pull_request.labels.*.name) }},
"PROWLER_PR_BODY": ${{ toJson(github.event.pull_request.body) }},
"PROWLER_PR_URL": ${{ toJson(github.event.pull_request.html_url) }},
"PROWLER_PR_MERGED_BY": "${{ github.event.pull_request.merged_by.login }}",
"PROWLER_PR_BASE_BRANCH": "${{ github.event.pull_request.base.ref }}",
"PROWLER_PR_HEAD_BRANCH": "${{ github.event.pull_request.head.ref }}"
"PROWLER_PR_URL": ${{ toJson(github.event.pull_request.html_url) }}
}
+243 -460
View File
@@ -1,503 +1,286 @@
name: 'SDK: Pull Request'
name: SDK - Pull Request
on:
push:
branches:
- 'master'
- 'v5.*'
paths:
- 'prowler/**'
- 'tests/**'
- 'pyproject.toml'
- 'poetry.lock'
- 'Dockerfile'
- '.github/workflows/sdk-pull-request.yml'
- '!prowler/CHANGELOG.md'
- '!docs/**'
- '!permissions/**'
- '!api/**'
- '!ui/**'
- '!dashboard/**'
- '!mcp_server/**'
- '!README.md'
- '!mkdocs.yml'
- '!.backportrc.json'
- '!.env'
- '!docker-compose*'
- '!examples/**'
- '!.gitignore'
- '!contrib/**'
- "master"
- "v3"
- "v4.*"
- "v5.*"
pull_request:
branches:
- 'master'
- 'v5.*'
paths:
- 'prowler/**'
- 'tests/**'
- 'pyproject.toml'
- 'poetry.lock'
- 'Dockerfile'
- '.github/workflows/sdk-pull-request.yml'
- '!prowler/CHANGELOG.md'
- '!docs/**'
- '!permissions/**'
- '!api/**'
- '!ui/**'
- '!dashboard/**'
- '!mcp_server/**'
- '!README.md'
- '!mkdocs.yml'
- '!.backportrc.json'
- '!.env'
- '!docker-compose*'
- '!examples/**'
- '!.gitignore'
- '!contrib/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
IMAGE_NAME: prowler
- "master"
- "v3"
- "v4.*"
- "v5.*"
jobs:
code-quality:
if: github.repository == 'prowler-cloud/prowler'
build:
runs-on: ubuntu-latest
timeout-minutes: 20
permissions:
contents: read
strategy:
matrix:
python-version:
- '3.9'
- '3.10'
- '3.11'
- '3.12'
python-version: ["3.9", "3.10", "3.11", "3.12"]
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Poetry
run: pipx install poetry==2.1.1
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: ./**
files_ignore: |
.github/**
docs/**
permissions/**
api/**
ui/**
prowler/CHANGELOG.md
README.md
mkdocs.yml
.backportrc.json
.env
docker-compose*
examples/**
.gitignore
- name: Install poetry
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
python -m pip install --upgrade pip
pipx install poetry==2.1.1
- name: Set up Python ${{ matrix.python-version }}
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: ${{ matrix.python-version }}
cache: 'poetry'
cache: "poetry"
- name: Install dependencies
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry install --no-root
poetry run pip list
VERSION=$(curl --silent "https://api.github.com/repos/hadolint/hadolint/releases/latest" | \
grep '"tag_name":' | \
sed -E 's/.*"v([^"]+)".*/\1/' \
) && curl -L -o /tmp/hadolint "https://github.com/hadolint/hadolint/releases/download/v${VERSION}/hadolint-Linux-x86_64" \
&& chmod +x /tmp/hadolint
- name: Check Poetry lock file
run: poetry check --lock
- name: Poetry check
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry check --lock
- name: Lint with flake8
run: poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib,ui,api
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run flake8 . --ignore=E266,W503,E203,E501,W605,E128 --exclude contrib,ui,api
- name: Check format with black
run: poetry run black --exclude api ui --check .
- name: Checking format with black
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run black --exclude api ui --check .
- name: Lint with pylint
run: poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pylint --disable=W,C,R,E -j 0 -rn -sn prowler/
security-scans:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
- name: Bandit
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run bandit -q -lll -x '*_test.py,./contrib/,./api/,./ui' -r .
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Safety
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run safety check --ignore 70612 -r pyproject.toml
- name: Install Poetry
run: pipx install poetry==2.1.1
- name: Vulture
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
- name: Set up Python 3.12
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: '3.12'
cache: 'poetry'
- name: Install dependencies
run: poetry install --no-root
- name: Security scan with Bandit
run: poetry run bandit -q -lll -x '*_test.py,./contrib/,./api/,./ui' -r .
- name: Security scan with Safety
run: poetry run safety check --ignore 70612 -r pyproject.toml
- name: Dead code detection with Vulture
run: poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
tests:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
timeout-minutes: 120
permissions:
contents: read
strategy:
matrix:
python-version:
- '3.9'
- '3.10'
- '3.11'
- '3.12'
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install Poetry
run: pipx install poetry==2.1.1
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: ${{ matrix.python-version }}
cache: 'poetry'
- name: Install dependencies
run: poetry install --no-root
# AWS Provider
- name: Check if AWS files changed
id: changed-aws
- name: Dockerfile - Check if Dockerfile has changed
id: dockerfile-changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/**/aws/**
./tests/**/aws/**
Dockerfile
- name: Hadolint
if: steps.dockerfile-changed-files.outputs.any_changed == 'true'
run: |
/tmp/hadolint Dockerfile --ignore=DL3013
# Test AWS
- name: AWS - Check if any file has changed
id: aws-changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/providers/aws/**
./tests/providers/aws/**
./poetry.lock
- name: Run AWS tests
if: steps.changed-aws.outputs.any_changed == 'true'
run: poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
- name: AWS - Test
if: steps.aws-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
- name: Upload AWS coverage to Codecov
if: steps.changed-aws.outputs.any_changed == 'true'
# Test Azure
- name: Azure - Check if any file has changed
id: azure-changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/providers/azure/**
./tests/providers/azure/**
./poetry.lock
- name: Azure - Test
if: steps.azure-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/azure --cov-report=xml:azure_coverage.xml tests/providers/azure
# Test GCP
- name: GCP - Check if any file has changed
id: gcp-changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/providers/gcp/**
./tests/providers/gcp/**
./poetry.lock
- name: GCP - Test
if: steps.gcp-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/gcp --cov-report=xml:gcp_coverage.xml tests/providers/gcp
# Test Kubernetes
- name: Kubernetes - Check if any file has changed
id: kubernetes-changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/providers/kubernetes/**
./tests/providers/kubernetes/**
./poetry.lock
- name: Kubernetes - Test
if: steps.kubernetes-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/kubernetes --cov-report=xml:kubernetes_coverage.xml tests/providers/kubernetes
# Test GitHub
- name: GitHub - Check if any file has changed
id: github-changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/providers/github/**
./tests/providers/github/**
./poetry.lock
- name: GitHub - Test
if: steps.github-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/github --cov-report=xml:github_coverage.xml tests/providers/github
# Test NHN
- name: NHN - Check if any file has changed
id: nhn-changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/providers/nhn/**
./tests/providers/nhn/**
./poetry.lock
- name: NHN - Test
if: steps.nhn-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/nhn --cov-report=xml:nhn_coverage.xml tests/providers/nhn
# Test M365
- name: M365 - Check if any file has changed
id: m365-changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/providers/m365/**
./tests/providers/m365/**
./poetry.lock
- name: M365 - Test
if: steps.m365-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/m365 --cov-report=xml:m365_coverage.xml tests/providers/m365
# Test IaC
- name: IaC - Check if any file has changed
id: iac-changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/providers/iac/**
./tests/providers/iac/**
./poetry.lock
- name: IaC - Test
if: steps.iac-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/iac --cov-report=xml:iac_coverage.xml tests/providers/iac
# Test MongoDB Atlas
- name: MongoDB Atlas - Check if any file has changed
id: mongodb-atlas-changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/providers/mongodbatlas/**
./tests/providers/mongodbatlas/**
.poetry.lock
- name: MongoDB Atlas - Test
if: steps.mongodb-atlas-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/mongodbatlas --cov-report=xml:mongodb_atlas_coverage.xml tests/providers/mongodbatlas
# Test OCI
- name: OCI - Check if any file has changed
id: oci-changed-files
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/providers/oraclecloud/**
./tests/providers/oraclecloud/**
./poetry.lock
- name: OCI - Test
if: steps.oci-changed-files.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/providers/oraclecloud --cov-report=xml:oci_coverage.xml tests/providers/oraclecloud
# Common Tests
- name: Lib - Test
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/lib --cov-report=xml:lib_coverage.xml tests/lib
- name: Config - Test
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run pytest -n auto --cov=./prowler/config --cov-report=xml:config_coverage.xml tests/config
# Codecov
- name: Upload coverage reports to Codecov
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler-py${{ matrix.python-version }}-aws
files: ./aws_coverage.xml
# Azure Provider
- name: Check if Azure files changed
id: changed-azure
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/**/azure/**
./tests/**/azure/**
./poetry.lock
- name: Run Azure tests
if: steps.changed-azure.outputs.any_changed == 'true'
run: poetry run pytest -n auto --cov=./prowler/providers/azure --cov-report=xml:azure_coverage.xml tests/providers/azure
- name: Upload Azure coverage to Codecov
if: steps.changed-azure.outputs.any_changed == 'true'
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler-py${{ matrix.python-version }}-azure
files: ./azure_coverage.xml
# GCP Provider
- name: Check if GCP files changed
id: changed-gcp
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/**/gcp/**
./tests/**/gcp/**
./poetry.lock
- name: Run GCP tests
if: steps.changed-gcp.outputs.any_changed == 'true'
run: poetry run pytest -n auto --cov=./prowler/providers/gcp --cov-report=xml:gcp_coverage.xml tests/providers/gcp
- name: Upload GCP coverage to Codecov
if: steps.changed-gcp.outputs.any_changed == 'true'
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler-py${{ matrix.python-version }}-gcp
files: ./gcp_coverage.xml
# Kubernetes Provider
- name: Check if Kubernetes files changed
id: changed-kubernetes
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/**/kubernetes/**
./tests/**/kubernetes/**
./poetry.lock
- name: Run Kubernetes tests
if: steps.changed-kubernetes.outputs.any_changed == 'true'
run: poetry run pytest -n auto --cov=./prowler/providers/kubernetes --cov-report=xml:kubernetes_coverage.xml tests/providers/kubernetes
- name: Upload Kubernetes coverage to Codecov
if: steps.changed-kubernetes.outputs.any_changed == 'true'
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler-py${{ matrix.python-version }}-kubernetes
files: ./kubernetes_coverage.xml
# GitHub Provider
- name: Check if GitHub files changed
id: changed-github
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/**/github/**
./tests/**/github/**
./poetry.lock
- name: Run GitHub tests
if: steps.changed-github.outputs.any_changed == 'true'
run: poetry run pytest -n auto --cov=./prowler/providers/github --cov-report=xml:github_coverage.xml tests/providers/github
- name: Upload GitHub coverage to Codecov
if: steps.changed-github.outputs.any_changed == 'true'
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler-py${{ matrix.python-version }}-github
files: ./github_coverage.xml
# NHN Provider
- name: Check if NHN files changed
id: changed-nhn
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/**/nhn/**
./tests/**/nhn/**
./poetry.lock
- name: Run NHN tests
if: steps.changed-nhn.outputs.any_changed == 'true'
run: poetry run pytest -n auto --cov=./prowler/providers/nhn --cov-report=xml:nhn_coverage.xml tests/providers/nhn
- name: Upload NHN coverage to Codecov
if: steps.changed-nhn.outputs.any_changed == 'true'
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler-py${{ matrix.python-version }}-nhn
files: ./nhn_coverage.xml
# M365 Provider
- name: Check if M365 files changed
id: changed-m365
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/**/m365/**
./tests/**/m365/**
./poetry.lock
- name: Run M365 tests
if: steps.changed-m365.outputs.any_changed == 'true'
run: poetry run pytest -n auto --cov=./prowler/providers/m365 --cov-report=xml:m365_coverage.xml tests/providers/m365
- name: Upload M365 coverage to Codecov
if: steps.changed-m365.outputs.any_changed == 'true'
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler-py${{ matrix.python-version }}-m365
files: ./m365_coverage.xml
# IaC Provider
- name: Check if IaC files changed
id: changed-iac
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/**/iac/**
./tests/**/iac/**
./poetry.lock
- name: Run IaC tests
if: steps.changed-iac.outputs.any_changed == 'true'
run: poetry run pytest -n auto --cov=./prowler/providers/iac --cov-report=xml:iac_coverage.xml tests/providers/iac
- name: Upload IaC coverage to Codecov
if: steps.changed-iac.outputs.any_changed == 'true'
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler-py${{ matrix.python-version }}-iac
files: ./iac_coverage.xml
# MongoDB Atlas Provider
- name: Check if MongoDB Atlas files changed
id: changed-mongodbatlas
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/**/mongodbatlas/**
./tests/**/mongodbatlas/**
./poetry.lock
- name: Run MongoDB Atlas tests
if: steps.changed-mongodbatlas.outputs.any_changed == 'true'
run: poetry run pytest -n auto --cov=./prowler/providers/mongodbatlas --cov-report=xml:mongodbatlas_coverage.xml tests/providers/mongodbatlas
- name: Upload MongoDB Atlas coverage to Codecov
if: steps.changed-mongodbatlas.outputs.any_changed == 'true'
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler-py${{ matrix.python-version }}-mongodbatlas
files: ./mongodbatlas_coverage.xml
# OCI Provider
- name: Check if OCI files changed
id: changed-oraclecloud
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/**/oraclecloud/**
./tests/**/oraclecloud/**
./poetry.lock
- name: Run OCI tests
if: steps.changed-oraclecloud.outputs.any_changed == 'true'
run: poetry run pytest -n auto --cov=./prowler/providers/oraclecloud --cov-report=xml:oraclecloud_coverage.xml tests/providers/oraclecloud
- name: Upload OCI coverage to Codecov
if: steps.changed-oraclecloud.outputs.any_changed == 'true'
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler-py${{ matrix.python-version }}-oraclecloud
files: ./oraclecloud_coverage.xml
# Lib
- name: Check if Lib files changed
id: changed-lib
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/lib/**
./tests/lib/**
./poetry.lock
- name: Run Lib tests
if: steps.changed-lib.outputs.any_changed == 'true'
run: poetry run pytest -n auto --cov=./prowler/lib --cov-report=xml:lib_coverage.xml tests/lib
- name: Upload Lib coverage to Codecov
if: steps.changed-lib.outputs.any_changed == 'true'
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler-py${{ matrix.python-version }}-lib
files: ./lib_coverage.xml
# Config
- name: Check if Config files changed
id: changed-config
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: |
./prowler/config/**
./tests/config/**
./poetry.lock
- name: Run Config tests
if: steps.changed-config.outputs.any_changed == 'true'
run: poetry run pytest -n auto --cov=./prowler/config --cov-report=xml:config_coverage.xml tests/config
- name: Upload Config coverage to Codecov
if: steps.changed-config.outputs.any_changed == 'true'
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
flags: prowler-py${{ matrix.python-version }}-config
files: ./config_coverage.xml
dockerfile-lint:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
timeout-minutes: 10
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Check if Dockerfile changed
id: dockerfile-changed
uses: tj-actions/changed-files@24d32ffd492484c1d75e0c0b894501ddb9d30d62 # v47.0.0
with:
files: Dockerfile
- name: Lint Dockerfile with Hadolint
if: steps.dockerfile-changed.outputs.any_changed == 'true'
uses: hadolint/hadolint-action@2332a7b74a6de0dda2e2221d575162eba76ba5e5 # v3.3.0
with:
dockerfile: Dockerfile
ignore: DL3013
container-build-and-scan:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
timeout-minutes: 30
permissions:
contents: read
security-events: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Build SDK container
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
context: .
push: false
load: true
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Scan SDK container with Trivy
uses: ./.github/actions/trivy-scan
with:
image-name: ${{ env.IMAGE_NAME }}
image-tag: ${{ github.sha }}
fail-on-critical: 'false'
severity: 'CRITICAL'
flags: prowler
files: ./aws_coverage.xml,./azure_coverage.xml,./gcp_coverage.xml,./kubernetes_coverage.xml,./github_coverage.xml,./nhn_coverage.xml,./m365_coverage.xml,./oci_coverage.xml,./lib_coverage.xml,./config_coverage.xml
+28 -78
View File
@@ -1,115 +1,65 @@
name: 'UI: Pull Request'
name: UI - Pull Request
on:
push:
branches:
- 'master'
- 'v5.*'
- "master"
- "v5.*"
paths:
- 'ui/**'
- '.github/workflows/ui-pull-request.yml'
- '!ui/CHANGELOG.md'
- '!ui/README.md'
- ".github/workflows/ui-pull-request.yml"
- "ui/**"
pull_request:
branches:
- 'master'
- 'v5.*'
- master
- "v5.*"
paths:
- 'ui/**'
- '.github/workflows/ui-pull-request.yml'
- '!ui/CHANGELOG.md'
- '!ui/README.md'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
UI_WORKING_DIR: ./ui
IMAGE_NAME: prowler-ui
NODE_VERSION: '20.x'
jobs:
code-quality-and-build:
if: github.repository == 'prowler-cloud/prowler'
test-and-coverage:
runs-on: ubuntu-latest
timeout-minutes: 20
permissions:
contents: read
defaults:
run:
working-directory: ./ui
strategy:
matrix:
os: [ubuntu-latest]
node-version: [20.x]
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Setup Node.js ${{ env.NODE_VERSION }}
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: ${{ env.NODE_VERSION }}
persist-credentials: false
- name: Setup Node.js ${{ matrix.node-version }}
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version: ${{ matrix.node-version }}
cache: 'npm'
cache-dependency-path: './ui/package-lock.json'
- name: Install dependencies
working-directory: ./ui
run: npm ci
- name: Run healthcheck
- name: Run Healthcheck
working-directory: ./ui
run: npm run healthcheck
- name: Build application
- name: Build the application
working-directory: ./ui
run: npm run build
dockerfile-lint:
if: github.repository == 'prowler-cloud/prowler'
test-container-build:
runs-on: ubuntu-latest
timeout-minutes: 10
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Lint Dockerfile with Hadolint
uses: hadolint/hadolint-action@2332a7b74a6de0dda2e2221d575162eba76ba5e5 # v3.3.0
with:
dockerfile: ui/Dockerfile
ignore: DL3018
container-build-and-scan:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
timeout-minutes: 30
permissions:
contents: read
security-events: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Build UI container
- name: Build Container
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
context: ${{ env.UI_WORKING_DIR }}
# Always build using `prod` target
target: prod
push: false
load: true
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ env.IMAGE_NAME }}:latest
outputs: type=docker
build-args: |
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_51LwpXXXX
- name: Scan UI container with Trivy
uses: ./.github/actions/trivy-scan
with:
image-name: ${{ env.IMAGE_NAME }}
image-tag: ${{ github.sha }}
fail-on-critical: 'false'
severity: 'CRITICAL'
-3
View File
@@ -83,6 +83,3 @@ CLAUDE.md
# MCP Server
mcp_server/prowler_mcp_server/prowler_app/server.py
mcp_server/prowler_mcp_server/prowler_app/utils/schema.yaml
# Compliance report
*.pdf
+1 -9
View File
@@ -46,14 +46,6 @@ help: ## Show this help.
@echo "Prowler Makefile"
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
##@ Build no cache
build-no-cache-dev:
docker compose -f docker-compose-dev.yml build --no-cache api-dev worker-dev worker-beat
##@ Development Environment
run-api-dev: ## Start development environment with API, PostgreSQL, Valkey, and workers
docker compose -f docker-compose-dev.yml up api-dev postgres valkey worker-dev worker-beat
##@ Development Environment
build-and-run-api-dev: build-no-cache-dev run-api-dev
docker compose -f docker-compose-dev.yml up api-dev postgres valkey worker-dev worker-beat --build
+1 -2
View File
@@ -2,7 +2,7 @@
All notable changes to the **Prowler API** are documented in this file.
## [1.14.0] (Prowler 5.13.0)
## [1.14.0] (Prowler UNRELEASED)
### Added
- Default JWT keys are generated and stored if they are missing from configuration [(#8655)](https://github.com/prowler-cloud/prowler/pull/8655)
@@ -12,7 +12,6 @@ All notable changes to the **Prowler API** are documented in this file.
- API Key support [(#8805)](https://github.com/prowler-cloud/prowler/pull/8805)
- SAML role mapping protection for single-admin tenants to prevent accidental lockout [(#8882)](https://github.com/prowler-cloud/prowler/pull/8882)
- Support for `passed_findings` and `total_findings` fields in compliance requirement overview for accurate Prowler ThreatScore calculation [(#8582)](https://github.com/prowler-cloud/prowler/pull/8582)
- PDF reporting for Prowler ThreatScore [(#8867)](https://github.com/prowler-cloud/prowler/pull/8867)
- Database read replica support [(#8869)](https://github.com/prowler-cloud/prowler/pull/8869)
- Support Common Cloud Controls for AWS, Azure and GCP [(#8000)](https://github.com/prowler-cloud/prowler/pull/8000)
- Add `provider_id__in` filter support to findings and findings severity overview endpoints [(#8951)](https://github.com/prowler-cloud/prowler/pull/8951)
+4 -586
View File
@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.2.0 and should not be changed by hand.
[[package]]
name = "about-time"
@@ -1164,18 +1164,6 @@ files = [
{file = "charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14"},
]
[[package]]
name = "circuitbreaker"
version = "2.1.3"
description = "Python Circuit Breaker pattern implementation"
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "circuitbreaker-2.1.3-py3-none-any.whl", hash = "sha256:87ba6a3ed03fdc7032bc175561c2b04d52ade9d5faf94ca2b035fbdc5e6b1dd1"},
{file = "circuitbreaker-2.1.3.tar.gz", hash = "sha256:1a4baee510f7bea3c91b194dcce7c07805fe96c4423ed5594b75af438531d084"},
]
[[package]]
name = "click"
version = "8.2.1"
@@ -1268,98 +1256,6 @@ files = [
{file = "contextlib2-21.6.0.tar.gz", hash = "sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869"},
]
[[package]]
name = "contourpy"
version = "1.3.3"
description = "Python library for calculating contours of 2D quadrilateral grids"
optional = false
python-versions = ">=3.11"
groups = ["main"]
files = [
{file = "contourpy-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:709a48ef9a690e1343202916450bc48b9e51c049b089c7f79a267b46cffcdaa1"},
{file = "contourpy-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23416f38bfd74d5d28ab8429cc4d63fa67d5068bd711a85edb1c3fb0c3e2f381"},
{file = "contourpy-1.3.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:929ddf8c4c7f348e4c0a5a3a714b5c8542ffaa8c22954862a46ca1813b667ee7"},
{file = "contourpy-1.3.3-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9e999574eddae35f1312c2b4b717b7885d4edd6cb46700e04f7f02db454e67c1"},
{file = "contourpy-1.3.3-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf67e0e3f482cb69779dd3061b534eb35ac9b17f163d851e2a547d56dba0a3a"},
{file = "contourpy-1.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51e79c1f7470158e838808d4a996fa9bac72c498e93d8ebe5119bc1e6becb0db"},
{file = "contourpy-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:598c3aaece21c503615fd59c92a3598b428b2f01bfb4b8ca9c4edeecc2438620"},
{file = "contourpy-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:322ab1c99b008dad206d406bb61d014cf0174df491ae9d9d0fac6a6fda4f977f"},
{file = "contourpy-1.3.3-cp311-cp311-win32.whl", hash = "sha256:fd907ae12cd483cd83e414b12941c632a969171bf90fc937d0c9f268a31cafff"},
{file = "contourpy-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:3519428f6be58431c56581f1694ba8e50626f2dd550af225f82fb5f5814d2a42"},
{file = "contourpy-1.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:15ff10bfada4bf92ec8b31c62bf7c1834c244019b4a33095a68000d7075df470"},
{file = "contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb"},
{file = "contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6"},
{file = "contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7"},
{file = "contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8"},
{file = "contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea"},
{file = "contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1"},
{file = "contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7"},
{file = "contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411"},
{file = "contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69"},
{file = "contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b"},
{file = "contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc"},
{file = "contourpy-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:177fb367556747a686509d6fef71d221a4b198a3905fe824430e5ea0fda54eb5"},
{file = "contourpy-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d002b6f00d73d69333dac9d0b8d5e84d9724ff9ef044fd63c5986e62b7c9e1b1"},
{file = "contourpy-1.3.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:348ac1f5d4f1d66d3322420f01d42e43122f43616e0f194fc1c9f5d830c5b286"},
{file = "contourpy-1.3.3-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:655456777ff65c2c548b7c454af9c6f33f16c8884f11083244b5819cc214f1b5"},
{file = "contourpy-1.3.3-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:644a6853d15b2512d67881586bd03f462c7ab755db95f16f14d7e238f2852c67"},
{file = "contourpy-1.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4debd64f124ca62069f313a9cb86656ff087786016d76927ae2cf37846b006c9"},
{file = "contourpy-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a15459b0f4615b00bbd1e91f1b9e19b7e63aea7483d03d804186f278c0af2659"},
{file = "contourpy-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca0fdcd73925568ca027e0b17ab07aad764be4706d0a925b89227e447d9737b7"},
{file = "contourpy-1.3.3-cp313-cp313-win32.whl", hash = "sha256:b20c7c9a3bf701366556e1b1984ed2d0cedf999903c51311417cf5f591d8c78d"},
{file = "contourpy-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:1cadd8b8969f060ba45ed7c1b714fe69185812ab43bd6b86a9123fe8f99c3263"},
{file = "contourpy-1.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:fd914713266421b7536de2bfa8181aa8c699432b6763a0ea64195ebe28bff6a9"},
{file = "contourpy-1.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:88df9880d507169449d434c293467418b9f6cbe82edd19284aa0409e7fdb933d"},
{file = "contourpy-1.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d06bb1f751ba5d417047db62bca3c8fde202b8c11fb50742ab3ab962c81e8216"},
{file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4e6b05a45525357e382909a4c1600444e2a45b4795163d3b22669285591c1ae"},
{file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab3074b48c4e2cf1a960e6bbeb7f04566bf36b1861d5c9d4d8ac04b82e38ba20"},
{file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c3d53c796f8647d6deb1abe867daeb66dcc8a97e8455efa729516b997b8ed99"},
{file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50ed930df7289ff2a8d7afeb9603f8289e5704755c7e5c3bbd929c90c817164b"},
{file = "contourpy-1.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4feffb6537d64b84877da813a5c30f1422ea5739566abf0bd18065ac040e120a"},
{file = "contourpy-1.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2b7e9480ffe2b0cd2e787e4df64270e3a0440d9db8dc823312e2c940c167df7e"},
{file = "contourpy-1.3.3-cp313-cp313t-win32.whl", hash = "sha256:283edd842a01e3dcd435b1c5116798d661378d83d36d337b8dde1d16a5fc9ba3"},
{file = "contourpy-1.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:87acf5963fc2b34825e5b6b048f40e3635dd547f590b04d2ab317c2619ef7ae8"},
{file = "contourpy-1.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:3c30273eb2a55024ff31ba7d052dde990d7d8e5450f4bbb6e913558b3d6c2301"},
{file = "contourpy-1.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fde6c716d51c04b1c25d0b90364d0be954624a0ee9d60e23e850e8d48353d07a"},
{file = "contourpy-1.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cbedb772ed74ff5be440fa8eee9bd49f64f6e3fc09436d9c7d8f1c287b121d77"},
{file = "contourpy-1.3.3-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22e9b1bd7a9b1d652cd77388465dc358dafcd2e217d35552424aa4f996f524f5"},
{file = "contourpy-1.3.3-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a22738912262aa3e254e4f3cb079a95a67132fc5a063890e224393596902f5a4"},
{file = "contourpy-1.3.3-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:afe5a512f31ee6bd7d0dda52ec9864c984ca3d66664444f2d72e0dc4eb832e36"},
{file = "contourpy-1.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f64836de09927cba6f79dcd00fdd7d5329f3fccc633468507079c829ca4db4e3"},
{file = "contourpy-1.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1fd43c3be4c8e5fd6e4f2baeae35ae18176cf2e5cced681cca908addf1cdd53b"},
{file = "contourpy-1.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6afc576f7b33cf00996e5c1102dc2a8f7cc89e39c0b55df93a0b78c1bd992b36"},
{file = "contourpy-1.3.3-cp314-cp314-win32.whl", hash = "sha256:66c8a43a4f7b8df8b71ee1840e4211a3c8d93b214b213f590e18a1beca458f7d"},
{file = "contourpy-1.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:cf9022ef053f2694e31d630feaacb21ea24224be1c3ad0520b13d844274614fd"},
{file = "contourpy-1.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:95b181891b4c71de4bb404c6621e7e2390745f887f2a026b2d99e92c17892339"},
{file = "contourpy-1.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:33c82d0138c0a062380332c861387650c82e4cf1747aaa6938b9b6516762e772"},
{file = "contourpy-1.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ea37e7b45949df430fe649e5de8351c423430046a2af20b1c1961cae3afcda77"},
{file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d304906ecc71672e9c89e87c4675dc5c2645e1f4269a5063b99b0bb29f232d13"},
{file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca658cd1a680a5c9ea96dc61cdbae1e85c8f25849843aa799dfd3cb370ad4fbe"},
{file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ab2fd90904c503739a75b7c8c5c01160130ba67944a7b77bbf36ef8054576e7f"},
{file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7301b89040075c30e5768810bc96a8e8d78085b47d8be6e4c3f5a0b4ed478a0"},
{file = "contourpy-1.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2a2a8b627d5cc6b7c41a4beff6c5ad5eb848c88255fda4a8745f7e901b32d8e4"},
{file = "contourpy-1.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fd6ec6be509c787f1caf6b247f0b1ca598bef13f4ddeaa126b7658215529ba0f"},
{file = "contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae"},
{file = "contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc"},
{file = "contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b"},
{file = "contourpy-1.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cd5dfcaeb10f7b7f9dc8941717c6c2ade08f587be2226222c12b25f0483ed497"},
{file = "contourpy-1.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0c1fc238306b35f246d61a1d416a627348b5cf0648648a031e14bb8705fcdfe8"},
{file = "contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f9aad7de812d6541d29d2bbf8feb22ff7e1c299523db288004e3157ff4674e"},
{file = "contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed3657edf08512fc3fe81b510e35c2012fbd3081d2e26160f27ca28affec989"},
{file = "contourpy-1.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3d1a3799d62d45c18bafd41c5fa05120b96a28079f2393af559b843d1a966a77"},
{file = "contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880"},
]
[package.dependencies]
numpy = ">=1.25"
[package.extras]
bokeh = ["bokeh", "selenium"]
docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"]
mypy = ["bokeh", "contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.17.0)", "types-Pillow"]
test = ["Pillow", "contourpy[test-no-images]", "matplotlib"]
test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"]
[[package]]
name = "coverage"
version = "7.5.4"
@@ -1494,22 +1390,6 @@ ssh = ["bcrypt (>=3.1.5)"]
test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
test-randomorder = ["pytest-randomly"]
[[package]]
name = "cycler"
version = "0.12.1"
description = "Composable style cycles"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"},
{file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"},
]
[package.extras]
docs = ["ipython", "matplotlib", "numpydoc", "sphinx"]
tests = ["pytest", "pytest-cov", "pytest-xdist"]
[[package]]
name = "dash"
version = "3.1.1"
@@ -2240,87 +2120,6 @@ werkzeug = ">=3.1.0"
async = ["asgiref (>=3.2)"]
dotenv = ["python-dotenv"]
[[package]]
name = "fonttools"
version = "4.60.1"
description = "Tools to manipulate font files"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "fonttools-4.60.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9a52f254ce051e196b8fe2af4634c2d2f02c981756c6464dc192f1b6050b4e28"},
{file = "fonttools-4.60.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7420a2696a44650120cdd269a5d2e56a477e2bfa9d95e86229059beb1c19e15"},
{file = "fonttools-4.60.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee0c0b3b35b34f782afc673d503167157094a16f442ace7c6c5e0ca80b08f50c"},
{file = "fonttools-4.60.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:282dafa55f9659e8999110bd8ed422ebe1c8aecd0dc396550b038e6c9a08b8ea"},
{file = "fonttools-4.60.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4ba4bd646e86de16160f0fb72e31c3b9b7d0721c3e5b26b9fa2fc931dfdb2652"},
{file = "fonttools-4.60.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0b0835ed15dd5b40d726bb61c846a688f5b4ce2208ec68779bc81860adb5851a"},
{file = "fonttools-4.60.1-cp310-cp310-win32.whl", hash = "sha256:1525796c3ffe27bb6268ed2a1bb0dcf214d561dfaf04728abf01489eb5339dce"},
{file = "fonttools-4.60.1-cp310-cp310-win_amd64.whl", hash = "sha256:268ecda8ca6cb5c4f044b1fb9b3b376e8cd1b361cef275082429dc4174907038"},
{file = "fonttools-4.60.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7b4c32e232a71f63a5d00259ca3d88345ce2a43295bb049d21061f338124246f"},
{file = "fonttools-4.60.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3630e86c484263eaac71d117085d509cbcf7b18f677906824e4bace598fb70d2"},
{file = "fonttools-4.60.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5c1015318e4fec75dd4943ad5f6a206d9727adf97410d58b7e32ab644a807914"},
{file = "fonttools-4.60.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e6c58beb17380f7c2ea181ea11e7db8c0ceb474c9dd45f48e71e2cb577d146a1"},
{file = "fonttools-4.60.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ec3681a0cb34c255d76dd9d865a55f260164adb9fa02628415cdc2d43ee2c05d"},
{file = "fonttools-4.60.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f4b5c37a5f40e4d733d3bbaaef082149bee5a5ea3156a785ff64d949bd1353fa"},
{file = "fonttools-4.60.1-cp311-cp311-win32.whl", hash = "sha256:398447f3d8c0c786cbf1209711e79080a40761eb44b27cdafffb48f52bcec258"},
{file = "fonttools-4.60.1-cp311-cp311-win_amd64.whl", hash = "sha256:d066ea419f719ed87bc2c99a4a4bfd77c2e5949cb724588b9dd58f3fd90b92bf"},
{file = "fonttools-4.60.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:7b0c6d57ab00dae9529f3faf187f2254ea0aa1e04215cf2f1a8ec277c96661bc"},
{file = "fonttools-4.60.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:839565cbf14645952d933853e8ade66a463684ed6ed6c9345d0faf1f0e868877"},
{file = "fonttools-4.60.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8177ec9676ea6e1793c8a084a90b65a9f778771998eb919d05db6d4b1c0b114c"},
{file = "fonttools-4.60.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:996a4d1834524adbb423385d5a629b868ef9d774670856c63c9a0408a3063401"},
{file = "fonttools-4.60.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a46b2f450bc79e06ef3b6394f0c68660529ed51692606ad7f953fc2e448bc903"},
{file = "fonttools-4.60.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6ec722ee589e89a89f5b7574f5c45604030aa6ae24cb2c751e2707193b466fed"},
{file = "fonttools-4.60.1-cp312-cp312-win32.whl", hash = "sha256:b2cf105cee600d2de04ca3cfa1f74f1127f8455b71dbad02b9da6ec266e116d6"},
{file = "fonttools-4.60.1-cp312-cp312-win_amd64.whl", hash = "sha256:992775c9fbe2cf794786fa0ffca7f09f564ba3499b8fe9f2f80bd7197db60383"},
{file = "fonttools-4.60.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6f68576bb4bbf6060c7ab047b1574a1ebe5c50a17de62830079967b211059ebb"},
{file = "fonttools-4.60.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:eedacb5c5d22b7097482fa834bda0dafa3d914a4e829ec83cdea2a01f8c813c4"},
{file = "fonttools-4.60.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b33a7884fabd72bdf5f910d0cf46be50dce86a0362a65cfc746a4168c67eb96c"},
{file = "fonttools-4.60.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2409d5fb7b55fd70f715e6d34e7a6e4f7511b8ad29a49d6df225ee76da76dd77"},
{file = "fonttools-4.60.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c8651e0d4b3bdeda6602b85fdc2abbefc1b41e573ecb37b6779c4ca50753a199"},
{file = "fonttools-4.60.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:145daa14bf24824b677b9357c5e44fd8895c2a8f53596e1b9ea3496081dc692c"},
{file = "fonttools-4.60.1-cp313-cp313-win32.whl", hash = "sha256:2299df884c11162617a66b7c316957d74a18e3758c0274762d2cc87df7bc0272"},
{file = "fonttools-4.60.1-cp313-cp313-win_amd64.whl", hash = "sha256:a3db56f153bd4c5c2b619ab02c5db5192e222150ce5a1bc10f16164714bc39ac"},
{file = "fonttools-4.60.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:a884aef09d45ba1206712c7dbda5829562d3fea7726935d3289d343232ecb0d3"},
{file = "fonttools-4.60.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8a44788d9d91df72d1a5eac49b31aeb887a5f4aab761b4cffc4196c74907ea85"},
{file = "fonttools-4.60.1-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e852d9dda9f93ad3651ae1e3bb770eac544ec93c3807888798eccddf84596537"},
{file = "fonttools-4.60.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:154cb6ee417e417bf5f7c42fe25858c9140c26f647c7347c06f0cc2d47eff003"},
{file = "fonttools-4.60.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5664fd1a9ea7f244487ac8f10340c4e37664675e8667d6fee420766e0fb3cf08"},
{file = "fonttools-4.60.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:583b7f8e3c49486e4d489ad1deacfb8d5be54a8ef34d6df824f6a171f8511d99"},
{file = "fonttools-4.60.1-cp314-cp314-win32.whl", hash = "sha256:66929e2ea2810c6533a5184f938502cfdaea4bc3efb7130d8cc02e1c1b4108d6"},
{file = "fonttools-4.60.1-cp314-cp314-win_amd64.whl", hash = "sha256:f3d5be054c461d6a2268831f04091dc82753176f6ea06dc6047a5e168265a987"},
{file = "fonttools-4.60.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:b6379e7546ba4ae4b18f8ae2b9bc5960936007a1c0e30b342f662577e8bc3299"},
{file = "fonttools-4.60.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9d0ced62b59e0430b3690dbc5373df1c2aa7585e9a8ce38eff87f0fd993c5b01"},
{file = "fonttools-4.60.1-cp314-cp314t-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:875cb7764708b3132637f6c5fb385b16eeba0f7ac9fa45a69d35e09b47045801"},
{file = "fonttools-4.60.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a184b2ea57b13680ab6d5fbde99ccef152c95c06746cb7718c583abd8f945ccc"},
{file = "fonttools-4.60.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:026290e4ec76583881763fac284aca67365e0be9f13a7fb137257096114cb3bc"},
{file = "fonttools-4.60.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f0e8817c7d1a0c2eedebf57ef9a9896f3ea23324769a9a2061a80fe8852705ed"},
{file = "fonttools-4.60.1-cp314-cp314t-win32.whl", hash = "sha256:1410155d0e764a4615774e5c2c6fc516259fe3eca5882f034eb9bfdbee056259"},
{file = "fonttools-4.60.1-cp314-cp314t-win_amd64.whl", hash = "sha256:022beaea4b73a70295b688f817ddc24ed3e3418b5036ffcd5658141184ef0d0c"},
{file = "fonttools-4.60.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:122e1a8ada290423c493491d002f622b1992b1ab0b488c68e31c413390dc7eb2"},
{file = "fonttools-4.60.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a140761c4ff63d0cb9256ac752f230460ee225ccef4ad8f68affc723c88e2036"},
{file = "fonttools-4.60.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0eae96373e4b7c9e45d099d7a523444e3554360927225c1cdae221a58a45b856"},
{file = "fonttools-4.60.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:596ecaca36367027d525b3b426d8a8208169d09edcf8c7506aceb3a38bfb55c7"},
{file = "fonttools-4.60.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2ee06fc57512144d8b0445194c2da9f190f61ad51e230f14836286470c99f854"},
{file = "fonttools-4.60.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b42d86938e8dda1cd9a1a87a6d82f1818eaf933348429653559a458d027446da"},
{file = "fonttools-4.60.1-cp39-cp39-win32.whl", hash = "sha256:8b4eb332f9501cb1cd3d4d099374a1e1306783ff95489a1026bde9eb02ccc34a"},
{file = "fonttools-4.60.1-cp39-cp39-win_amd64.whl", hash = "sha256:7473a8ed9ed09aeaa191301244a5a9dbe46fe0bf54f9d6cd21d83044c3321217"},
{file = "fonttools-4.60.1-py3-none-any.whl", hash = "sha256:906306ac7afe2156fcf0042173d6ebbb05416af70f6b370967b47f8f00103bbb"},
{file = "fonttools-4.60.1.tar.gz", hash = "sha256:ef00af0439ebfee806b25f24c8f92109157ff3fac5731dc7867957812e87b8d9"},
]
[package.extras]
all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0) ; python_version <= \"3.12\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"]
graphite = ["lz4 (>=1.7.4.2)"]
interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""]
lxml = ["lxml (>=4.0)"]
pathops = ["skia-pathops (>=0.5.0)"]
plot = ["matplotlib"]
repacker = ["uharfbuzz (>=0.23.0)"]
symfont = ["sympy"]
type1 = ["xattr ; sys_platform == \"darwin\""]
unicode = ["unicodedata2 (>=15.1.0) ; python_version <= \"3.12\""]
woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"]
[[package]]
name = "freezegun"
version = "1.5.1"
@@ -2988,117 +2787,6 @@ files = [
[package.dependencies]
referencing = ">=0.31.0"
[[package]]
name = "kiwisolver"
version = "1.4.9"
description = "A fast implementation of the Cassowary constraint solver"
optional = false
python-versions = ">=3.10"
groups = ["main"]
files = [
{file = "kiwisolver-1.4.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b4b4d74bda2b8ebf4da5bd42af11d02d04428b2c32846e4c2c93219df8a7987b"},
{file = "kiwisolver-1.4.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fb3b8132019ea572f4611d770991000d7f58127560c4889729248eb5852a102f"},
{file = "kiwisolver-1.4.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84fd60810829c27ae375114cd379da1fa65e6918e1da405f356a775d49a62bcf"},
{file = "kiwisolver-1.4.9-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b78efa4c6e804ecdf727e580dbb9cba85624d2e1c6b5cb059c66290063bd99a9"},
{file = "kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d4efec7bcf21671db6a3294ff301d2fc861c31faa3c8740d1a94689234d1b415"},
{file = "kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:90f47e70293fc3688b71271100a1a5453aa9944a81d27ff779c108372cf5567b"},
{file = "kiwisolver-1.4.9-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8fdca1def57a2e88ef339de1737a1449d6dbf5fab184c54a1fca01d541317154"},
{file = "kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cf554f21be770f5111a1690d42313e140355e687e05cf82cb23d0a721a64a48"},
{file = "kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fc1795ac5cd0510207482c3d1d3ed781143383b8cfd36f5c645f3897ce066220"},
{file = "kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:ccd09f20ccdbbd341b21a67ab50a119b64a403b09288c27481575105283c1586"},
{file = "kiwisolver-1.4.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:540c7c72324d864406a009d72f5d6856f49693db95d1fbb46cf86febef873634"},
{file = "kiwisolver-1.4.9-cp310-cp310-win_amd64.whl", hash = "sha256:ede8c6d533bc6601a47ad4046080d36b8fc99f81e6f1c17b0ac3c2dc91ac7611"},
{file = "kiwisolver-1.4.9-cp310-cp310-win_arm64.whl", hash = "sha256:7b4da0d01ac866a57dd61ac258c5607b4cd677f63abaec7b148354d2b2cdd536"},
{file = "kiwisolver-1.4.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:eb14a5da6dc7642b0f3a18f13654847cd8b7a2550e2645a5bda677862b03ba16"},
{file = "kiwisolver-1.4.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:39a219e1c81ae3b103643d2aedb90f1ef22650deb266ff12a19e7773f3e5f089"},
{file = "kiwisolver-1.4.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2405a7d98604b87f3fc28b1716783534b1b4b8510d8142adca34ee0bc3c87543"},
{file = "kiwisolver-1.4.9-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dc1ae486f9abcef254b5618dfb4113dd49f94c68e3e027d03cf0143f3f772b61"},
{file = "kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8a1f570ce4d62d718dce3f179ee78dac3b545ac16c0c04bb363b7607a949c0d1"},
{file = "kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb27e7b78d716c591e88e0a09a2139c6577865d7f2e152488c2cc6257f460872"},
{file = "kiwisolver-1.4.9-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:15163165efc2f627eb9687ea5f3a28137217d217ac4024893d753f46bce9de26"},
{file = "kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bdee92c56a71d2b24c33a7d4c2856bd6419d017e08caa7802d2963870e315028"},
{file = "kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:412f287c55a6f54b0650bd9b6dce5aceddb95864a1a90c87af16979d37c89771"},
{file = "kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2c93f00dcba2eea70af2be5f11a830a742fe6b579a1d4e00f47760ef13be247a"},
{file = "kiwisolver-1.4.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f117e1a089d9411663a3207ba874f31be9ac8eaa5b533787024dc07aeb74f464"},
{file = "kiwisolver-1.4.9-cp311-cp311-win_amd64.whl", hash = "sha256:be6a04e6c79819c9a8c2373317d19a96048e5a3f90bec587787e86a1153883c2"},
{file = "kiwisolver-1.4.9-cp311-cp311-win_arm64.whl", hash = "sha256:0ae37737256ba2de764ddc12aed4956460277f00c4996d51a197e72f62f5eec7"},
{file = "kiwisolver-1.4.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ac5a486ac389dddcc5bef4f365b6ae3ffff2c433324fb38dd35e3fab7c957999"},
{file = "kiwisolver-1.4.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f2ba92255faa7309d06fe44c3a4a97efe1c8d640c2a79a5ef728b685762a6fd2"},
{file = "kiwisolver-1.4.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a2899935e724dd1074cb568ce7ac0dce28b2cd6ab539c8e001a8578eb106d14"},
{file = "kiwisolver-1.4.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f6008a4919fdbc0b0097089f67a1eb55d950ed7e90ce2cc3e640abadd2757a04"},
{file = "kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:67bb8b474b4181770f926f7b7d2f8c0248cbcb78b660fdd41a47054b28d2a752"},
{file = "kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2327a4a30d3ee07d2fbe2e7933e8a37c591663b96ce42a00bc67461a87d7df77"},
{file = "kiwisolver-1.4.9-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7a08b491ec91b1d5053ac177afe5290adacf1f0f6307d771ccac5de30592d198"},
{file = "kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8fc5c867c22b828001b6a38d2eaeb88160bf5783c6cb4a5e440efc981ce286d"},
{file = "kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3b3115b2581ea35bb6d1f24a4c90af37e5d9b49dcff267eeed14c3893c5b86ab"},
{file = "kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858e4c22fb075920b96a291928cb7dea5644e94c0ee4fcd5af7e865655e4ccf2"},
{file = "kiwisolver-1.4.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ed0fecd28cc62c54b262e3736f8bb2512d8dcfdc2bcf08be5f47f96bf405b145"},
{file = "kiwisolver-1.4.9-cp312-cp312-win_amd64.whl", hash = "sha256:f68208a520c3d86ea51acf688a3e3002615a7f0238002cccc17affecc86a8a54"},
{file = "kiwisolver-1.4.9-cp312-cp312-win_arm64.whl", hash = "sha256:2c1a4f57df73965f3f14df20b80ee29e6a7930a57d2d9e8491a25f676e197c60"},
{file = "kiwisolver-1.4.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5d0432ccf1c7ab14f9949eec60c5d1f924f17c037e9f8b33352fa05799359b8"},
{file = "kiwisolver-1.4.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efb3a45b35622bb6c16dbfab491a8f5a391fe0e9d45ef32f4df85658232ca0e2"},
{file = "kiwisolver-1.4.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a12cf6398e8a0a001a059747a1cbf24705e18fe413bc22de7b3d15c67cffe3f"},
{file = "kiwisolver-1.4.9-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b67e6efbf68e077dd71d1a6b37e43e1a99d0bff1a3d51867d45ee8908b931098"},
{file = "kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5656aa670507437af0207645273ccdfee4f14bacd7f7c67a4306d0dcaeaf6eed"},
{file = "kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:bfc08add558155345129c7803b3671cf195e6a56e7a12f3dde7c57d9b417f525"},
{file = "kiwisolver-1.4.9-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:40092754720b174e6ccf9e845d0d8c7d8e12c3d71e7fc35f55f3813e96376f78"},
{file = "kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:497d05f29a1300d14e02e6441cf0f5ee81c1ff5a304b0d9fb77423974684e08b"},
{file = "kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdd1a81a1860476eb41ac4bc1e07b3f07259e6d55bbf739b79c8aaedcf512799"},
{file = "kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e6b93f13371d341afee3be9f7c5964e3fe61d5fa30f6a30eb49856935dfe4fc3"},
{file = "kiwisolver-1.4.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d75aa530ccfaa593da12834b86a0724f58bff12706659baa9227c2ccaa06264c"},
{file = "kiwisolver-1.4.9-cp313-cp313-win_amd64.whl", hash = "sha256:dd0a578400839256df88c16abddf9ba14813ec5f21362e1fe65022e00c883d4d"},
{file = "kiwisolver-1.4.9-cp313-cp313-win_arm64.whl", hash = "sha256:d4188e73af84ca82468f09cadc5ac4db578109e52acb4518d8154698d3a87ca2"},
{file = "kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:5a0f2724dfd4e3b3ac5a82436a8e6fd16baa7d507117e4279b660fe8ca38a3a1"},
{file = "kiwisolver-1.4.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b11d6a633e4ed84fc0ddafd4ebfd8ea49b3f25082c04ad12b8315c11d504dc1"},
{file = "kiwisolver-1.4.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61874cdb0a36016354853593cffc38e56fc9ca5aa97d2c05d3dcf6922cd55a11"},
{file = "kiwisolver-1.4.9-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:60c439763a969a6af93b4881db0eed8fadf93ee98e18cbc35bc8da868d0c4f0c"},
{file = "kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92a2f997387a1b79a75e7803aa7ded2cfbe2823852ccf1ba3bcf613b62ae3197"},
{file = "kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31d512c812daea6d8b3be3b2bfcbeb091dbb09177706569bcfc6240dcf8b41c"},
{file = "kiwisolver-1.4.9-cp313-cp313t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:52a15b0f35dad39862d376df10c5230155243a2c1a436e39eb55623ccbd68185"},
{file = "kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a30fd6fdef1430fd9e1ba7b3398b5ee4e2887783917a687d86ba69985fb08748"},
{file = "kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cc9617b46837c6468197b5945e196ee9ca43057bb7d9d1ae688101e4e1dddf64"},
{file = "kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:0ab74e19f6a2b027ea4f845a78827969af45ce790e6cb3e1ebab71bdf9f215ff"},
{file = "kiwisolver-1.4.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dba5ee5d3981160c28d5490f0d1b7ed730c22470ff7f6cc26cfcfaacb9896a07"},
{file = "kiwisolver-1.4.9-cp313-cp313t-win_arm64.whl", hash = "sha256:0749fd8f4218ad2e851e11cc4dc05c7cbc0cbc4267bdfdb31782e65aace4ee9c"},
{file = "kiwisolver-1.4.9-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:9928fe1eb816d11ae170885a74d074f57af3a0d65777ca47e9aeb854a1fba386"},
{file = "kiwisolver-1.4.9-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d0005b053977e7b43388ddec89fa567f43d4f6d5c2c0affe57de5ebf290dc552"},
{file = "kiwisolver-1.4.9-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2635d352d67458b66fd0667c14cb1d4145e9560d503219034a18a87e971ce4f3"},
{file = "kiwisolver-1.4.9-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:767c23ad1c58c9e827b649a9ab7809fd5fd9db266a9cf02b0e926ddc2c680d58"},
{file = "kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72d0eb9fba308b8311685c2268cf7d0a0639a6cd027d8128659f72bdd8a024b4"},
{file = "kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f68e4f3eeca8fb22cc3d731f9715a13b652795ef657a13df1ad0c7dc0e9731df"},
{file = "kiwisolver-1.4.9-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d84cd4061ae292d8ac367b2c3fa3aad11cb8625a95d135fe93f286f914f3f5a6"},
{file = "kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a60ea74330b91bd22a29638940d115df9dc00af5035a9a2a6ad9399ffb4ceca5"},
{file = "kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ce6a3a4e106cf35c2d9c4fa17c05ce0b180db622736845d4315519397a77beaf"},
{file = "kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:77937e5e2a38a7b48eef0585114fe7930346993a88060d0bf886086d2aa49ef5"},
{file = "kiwisolver-1.4.9-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:24c175051354f4a28c5d6a31c93906dc653e2bf234e8a4bbfb964892078898ce"},
{file = "kiwisolver-1.4.9-cp314-cp314-win_amd64.whl", hash = "sha256:0763515d4df10edf6d06a3c19734e2566368980d21ebec439f33f9eb936c07b7"},
{file = "kiwisolver-1.4.9-cp314-cp314-win_arm64.whl", hash = "sha256:0e4e2bf29574a6a7b7f6cb5fa69293b9f96c928949ac4a53ba3f525dffb87f9c"},
{file = "kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d976bbb382b202f71c67f77b0ac11244021cfa3f7dfd9e562eefcea2df711548"},
{file = "kiwisolver-1.4.9-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2489e4e5d7ef9a1c300a5e0196e43d9c739f066ef23270607d45aba368b91f2d"},
{file = "kiwisolver-1.4.9-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:e2ea9f7ab7fbf18fffb1b5434ce7c69a07582f7acc7717720f1d69f3e806f90c"},
{file = "kiwisolver-1.4.9-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b34e51affded8faee0dfdb705416153819d8ea9250bbbf7ea1b249bdeb5f1122"},
{file = "kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8aacd3d4b33b772542b2e01beb50187536967b514b00003bdda7589722d2a64"},
{file = "kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7cf974dd4e35fa315563ac99d6287a1024e4dc2077b8a7d7cd3d2fb65d283134"},
{file = "kiwisolver-1.4.9-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:85bd218b5ecfbee8c8a82e121802dcb519a86044c9c3b2e4aef02fa05c6da370"},
{file = "kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0856e241c2d3df4efef7c04a1e46b1936b6120c9bcf36dd216e3acd84bc4fb21"},
{file = "kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9af39d6551f97d31a4deebeac6f45b156f9755ddc59c07b402c148f5dbb6482a"},
{file = "kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:bb4ae2b57fc1d8cbd1cf7b1d9913803681ffa903e7488012be5b76dedf49297f"},
{file = "kiwisolver-1.4.9-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:aedff62918805fb62d43a4aa2ecd4482c380dc76cd31bd7c8878588a61bd0369"},
{file = "kiwisolver-1.4.9-cp314-cp314t-win_amd64.whl", hash = "sha256:1fa333e8b2ce4d9660f2cda9c0e1b6bafcfb2457a9d259faa82289e73ec24891"},
{file = "kiwisolver-1.4.9-cp314-cp314t-win_arm64.whl", hash = "sha256:4a48a2ce79d65d363597ef7b567ce3d14d68783d2b2263d98db3d9477805ba32"},
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4d1d9e582ad4d63062d34077a9a1e9f3c34088a2ec5135b1f7190c07cf366527"},
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:deed0c7258ceb4c44ad5ec7d9918f9f14fd05b2be86378d86cf50e63d1e7b771"},
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a590506f303f512dff6b7f75fd2fd18e16943efee932008fe7140e5fa91d80e"},
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e09c2279a4d01f099f52d5c4b3d9e208e91edcbd1a175c9662a8b16e000fece9"},
{file = "kiwisolver-1.4.9-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c9e7cdf45d594ee04d5be1b24dd9d49f3d1590959b2271fb30b5ca2b262c00fb"},
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:720e05574713db64c356e86732c0f3c5252818d05f9df320f0ad8380641acea5"},
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:17680d737d5335b552994a2008fab4c851bcd7de33094a82067ef3a576ff02fa"},
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85b5352f94e490c028926ea567fc569c52ec79ce131dadb968d3853e809518c2"},
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:464415881e4801295659462c49461a24fb107c140de781d55518c4b80cb6790f"},
{file = "kiwisolver-1.4.9-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:fb940820c63a9590d31d88b815e7a3aa5915cad3ce735ab45f0c730b39547de1"},
{file = "kiwisolver-1.4.9.tar.gz", hash = "sha256:c3b22c26c6fd6811b0ae8363b95ca8ce4ea3c202d3d0975b2914310ceb1bcc4d"},
]
[[package]]
name = "kombu"
version = "5.5.4"
@@ -3449,85 +3137,6 @@ dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"]
docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"]
tests = ["pytest", "simplejson"]
[[package]]
name = "matplotlib"
version = "3.10.6"
description = "Python plotting package"
optional = false
python-versions = ">=3.10"
groups = ["main"]
files = [
{file = "matplotlib-3.10.6-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:bc7316c306d97463a9866b89d5cc217824e799fa0de346c8f68f4f3d27c8693d"},
{file = "matplotlib-3.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d00932b0d160ef03f59f9c0e16d1e3ac89646f7785165ce6ad40c842db16cc2e"},
{file = "matplotlib-3.10.6-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8fa4c43d6bfdbfec09c733bca8667de11bfa4970e8324c471f3a3632a0301c15"},
{file = "matplotlib-3.10.6-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ea117a9c1627acaa04dbf36265691921b999cbf515a015298e54e1a12c3af837"},
{file = "matplotlib-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:08fc803293b4e1694ee325896030de97f74c141ccff0be886bb5915269247676"},
{file = "matplotlib-3.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:2adf92d9b7527fbfb8818e050260f0ebaa460f79d61546374ce73506c9421d09"},
{file = "matplotlib-3.10.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:905b60d1cb0ee604ce65b297b61cf8be9f4e6cfecf95a3fe1c388b5266bc8f4f"},
{file = "matplotlib-3.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bac38d816637343e53d7185d0c66677ff30ffb131044a81898b5792c956ba76"},
{file = "matplotlib-3.10.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:942a8de2b5bfff1de31d95722f702e2966b8a7e31f4e68f7cd963c7cd8861cf6"},
{file = "matplotlib-3.10.6-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3276c85370bc0dfca051ec65c5817d1e0f8f5ce1b7787528ec8ed2d524bbc2f"},
{file = "matplotlib-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9df5851b219225731f564e4b9e7f2ac1e13c9e6481f941b5631a0f8e2d9387ce"},
{file = "matplotlib-3.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:abb5d9478625dd9c9eb51a06d39aae71eda749ae9b3138afb23eb38824026c7e"},
{file = "matplotlib-3.10.6-cp311-cp311-win_arm64.whl", hash = "sha256:886f989ccfae63659183173bb3fced7fd65e9eb793c3cc21c273add368536951"},
{file = "matplotlib-3.10.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:31ca662df6a80bd426f871105fdd69db7543e28e73a9f2afe80de7e531eb2347"},
{file = "matplotlib-3.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1678bb61d897bb4ac4757b5ecfb02bfb3fddf7f808000fb81e09c510712fda75"},
{file = "matplotlib-3.10.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:56cd2d20842f58c03d2d6e6c1f1cf5548ad6f66b91e1e48f814e4fb5abd1cb95"},
{file = "matplotlib-3.10.6-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:662df55604a2f9a45435566d6e2660e41efe83cd94f4288dfbf1e6d1eae4b0bb"},
{file = "matplotlib-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:08f141d55148cd1fc870c3387d70ca4df16dee10e909b3b038782bd4bda6ea07"},
{file = "matplotlib-3.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:590f5925c2d650b5c9d813c5b3b5fc53f2929c3f8ef463e4ecfa7e052044fb2b"},
{file = "matplotlib-3.10.6-cp312-cp312-win_arm64.whl", hash = "sha256:f44c8d264a71609c79a78d50349e724f5d5fc3684ead7c2a473665ee63d868aa"},
{file = "matplotlib-3.10.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:819e409653c1106c8deaf62e6de6b8611449c2cd9939acb0d7d4e57a3d95cc7a"},
{file = "matplotlib-3.10.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:59c8ac8382fefb9cb71308dde16a7c487432f5255d8f1fd32473523abecfecdf"},
{file = "matplotlib-3.10.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:84e82d9e0fd70c70bc55739defbd8055c54300750cbacf4740c9673a24d6933a"},
{file = "matplotlib-3.10.6-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:25f7a3eb42d6c1c56e89eacd495661fc815ffc08d9da750bca766771c0fd9110"},
{file = "matplotlib-3.10.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f9c862d91ec0b7842920a4cfdaaec29662195301914ea54c33e01f1a28d014b2"},
{file = "matplotlib-3.10.6-cp313-cp313-win_amd64.whl", hash = "sha256:1b53bd6337eba483e2e7d29c5ab10eee644bc3a2491ec67cc55f7b44583ffb18"},
{file = "matplotlib-3.10.6-cp313-cp313-win_arm64.whl", hash = "sha256:cbd5eb50b7058b2892ce45c2f4e92557f395c9991f5c886d1bb74a1582e70fd6"},
{file = "matplotlib-3.10.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:acc86dd6e0e695c095001a7fccff158c49e45e0758fdf5dcdbb0103318b59c9f"},
{file = "matplotlib-3.10.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e228cd2ffb8f88b7d0b29e37f68ca9aaf83e33821f24a5ccc4f082dd8396bc27"},
{file = "matplotlib-3.10.6-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:658bc91894adeab669cf4bb4a186d049948262987e80f0857216387d7435d833"},
{file = "matplotlib-3.10.6-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8913b7474f6dd83ac444c9459c91f7f0f2859e839f41d642691b104e0af056aa"},
{file = "matplotlib-3.10.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:091cea22e059b89f6d7d1a18e2c33a7376c26eee60e401d92a4d6726c4e12706"},
{file = "matplotlib-3.10.6-cp313-cp313t-win_amd64.whl", hash = "sha256:491e25e02a23d7207629d942c666924a6b61e007a48177fdd231a0097b7f507e"},
{file = "matplotlib-3.10.6-cp313-cp313t-win_arm64.whl", hash = "sha256:3d80d60d4e54cda462e2cd9a086d85cd9f20943ead92f575ce86885a43a565d5"},
{file = "matplotlib-3.10.6-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:70aaf890ce1d0efd482df969b28a5b30ea0b891224bb315810a3940f67182899"},
{file = "matplotlib-3.10.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1565aae810ab79cb72e402b22facfa6501365e73ebab70a0fdfb98488d2c3c0c"},
{file = "matplotlib-3.10.6-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3b23315a01981689aa4e1a179dbf6ef9fbd17143c3eea77548c2ecfb0499438"},
{file = "matplotlib-3.10.6-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:30fdd37edf41a4e6785f9b37969de57aea770696cb637d9946eb37470c94a453"},
{file = "matplotlib-3.10.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bc31e693da1c08012c764b053e702c1855378e04102238e6a5ee6a7117c53a47"},
{file = "matplotlib-3.10.6-cp314-cp314-win_amd64.whl", hash = "sha256:05be9bdaa8b242bc6ff96330d18c52f1fc59c6fb3a4dd411d953d67e7e1baf98"},
{file = "matplotlib-3.10.6-cp314-cp314-win_arm64.whl", hash = "sha256:f56a0d1ab05d34c628592435781d185cd99630bdfd76822cd686fb5a0aecd43a"},
{file = "matplotlib-3.10.6-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:94f0b4cacb23763b64b5dace50d5b7bfe98710fed5f0cef5c08135a03399d98b"},
{file = "matplotlib-3.10.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cc332891306b9fb39462673d8225d1b824c89783fee82840a709f96714f17a5c"},
{file = "matplotlib-3.10.6-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee1d607b3fb1590deb04b69f02ea1d53ed0b0bf75b2b1a5745f269afcbd3cdd3"},
{file = "matplotlib-3.10.6-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:376a624a218116461696b27b2bbf7a8945053e6d799f6502fc03226d077807bf"},
{file = "matplotlib-3.10.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:83847b47f6524c34b4f2d3ce726bb0541c48c8e7692729865c3df75bfa0f495a"},
{file = "matplotlib-3.10.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c7e0518e0d223683532a07f4b512e2e0729b62674f1b3a1a69869f98e6b1c7e3"},
{file = "matplotlib-3.10.6-cp314-cp314t-win_arm64.whl", hash = "sha256:4dd83e029f5b4801eeb87c64efd80e732452781c16a9cf7415b7b63ec8f374d7"},
{file = "matplotlib-3.10.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:13fcd07ccf17e354398358e0307a1f53f5325dca22982556ddb9c52837b5af41"},
{file = "matplotlib-3.10.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:470fc846d59d1406e34fa4c32ba371039cd12c2fe86801159a965956f2575bd1"},
{file = "matplotlib-3.10.6-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f7173f8551b88f4ef810a94adae3128c2530e0d07529f7141be7f8d8c365f051"},
{file = "matplotlib-3.10.6-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f2d684c3204fa62421bbf770ddfebc6b50130f9cad65531eeba19236d73bb488"},
{file = "matplotlib-3.10.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:6f4a69196e663a41d12a728fab8751177215357906436804217d6d9cf0d4d6cf"},
{file = "matplotlib-3.10.6-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d6ca6ef03dfd269f4ead566ec6f3fb9becf8dab146fb999022ed85ee9f6b3eb"},
{file = "matplotlib-3.10.6.tar.gz", hash = "sha256:ec01b645840dd1996df21ee37f208cd8ba57644779fa20464010638013d3203c"},
]
[package.dependencies]
contourpy = ">=1.0.1"
cycler = ">=0.10"
fonttools = ">=4.22.0"
kiwisolver = ">=1.3.1"
numpy = ">=1.23"
packaging = ">=20.0"
pillow = ">=8"
pyparsing = ">=2.3.1"
python-dateutil = ">=2.7"
[package.extras]
dev = ["meson-python (>=0.13.1,<0.17.0)", "pybind11 (>=2.13.2,!=2.13.3)", "setuptools (>=64)", "setuptools_scm (>=7)"]
[[package]]
name = "mccabe"
version = "0.7.0"
@@ -4058,29 +3667,6 @@ rsa = ["cryptography (>=3.0.0)"]
signals = ["blinker (>=1.4.0)"]
signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
[[package]]
name = "oci"
version = "2.160.3"
description = "Oracle Cloud Infrastructure Python SDK"
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "oci-2.160.3-py3-none-any.whl", hash = "sha256:858bff3e697098bdda44833d2476bfb4632126f0182178e7dbde4dbd156d71f0"},
{file = "oci-2.160.3.tar.gz", hash = "sha256:57514889be3b713a8385d86e3ba8a33cf46e3563c2a7e29a93027fb30b8a2537"},
]
[package.dependencies]
certifi = "*"
circuitbreaker = {version = ">=1.3.1,<3.0.0", markers = "python_version >= \"3.7\""}
cryptography = ">=3.2.1,<46.0.0"
pyOpenSSL = ">=17.5.0,<25.0.0"
python-dateutil = ">=2.5.3,<3.0.0"
pytz = ">=2016.10"
[package.extras]
adk = ["docstring-parser (>=0.16) ; python_version >= \"3.10\" and python_version < \"4\"", "mcp (>=1.6.0) ; python_version >= \"3.10\" and python_version < \"4\"", "pydantic (>=2.10.6) ; python_version >= \"3.10\" and python_version < \"4\"", "rich (>=13.9.4) ; python_version >= \"3.10\" and python_version < \"4\""]
[[package]]
name = "openai"
version = "1.101.0"
@@ -4271,131 +3857,6 @@ files = [
[package.dependencies]
setuptools = "*"
[[package]]
name = "pillow"
version = "11.3.0"
description = "Python Imaging Library (Fork)"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "pillow-11.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b9c17fd4ace828b3003dfd1e30bff24863e0eb59b535e8f80194d9cc7ecf860"},
{file = "pillow-11.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:65dc69160114cdd0ca0f35cb434633c75e8e7fad4cf855177a05bf38678f73ad"},
{file = "pillow-11.3.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7107195ddc914f656c7fc8e4a5e1c25f32e9236ea3ea860f257b0436011fddd0"},
{file = "pillow-11.3.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc3e831b563b3114baac7ec2ee86819eb03caa1a2cef0b481a5675b59c4fe23b"},
{file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1f182ebd2303acf8c380a54f615ec883322593320a9b00438eb842c1f37ae50"},
{file = "pillow-11.3.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4445fa62e15936a028672fd48c4c11a66d641d2c05726c7ec1f8ba6a572036ae"},
{file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:71f511f6b3b91dd543282477be45a033e4845a40278fa8dcdbfdb07109bf18f9"},
{file = "pillow-11.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040a5b691b0713e1f6cbe222e0f4f74cd233421e105850ae3b3c0ceda520f42e"},
{file = "pillow-11.3.0-cp310-cp310-win32.whl", hash = "sha256:89bd777bc6624fe4115e9fac3352c79ed60f3bb18651420635f26e643e3dd1f6"},
{file = "pillow-11.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:19d2ff547c75b8e3ff46f4d9ef969a06c30ab2d4263a9e287733aa8b2429ce8f"},
{file = "pillow-11.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:819931d25e57b513242859ce1876c58c59dc31587847bf74cfe06b2e0cb22d2f"},
{file = "pillow-11.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1cd110edf822773368b396281a2293aeb91c90a2db00d78ea43e7e861631b722"},
{file = "pillow-11.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c412fddd1b77a75aa904615ebaa6001f169b26fd467b4be93aded278266b288"},
{file = "pillow-11.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1aa4de119a0ecac0a34a9c8bde33f34022e2e8f99104e47a3ca392fd60e37d"},
{file = "pillow-11.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91da1d88226663594e3f6b4b8c3c8d85bd504117d043740a8e0ec449087cc494"},
{file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:643f189248837533073c405ec2f0bb250ba54598cf80e8c1e043381a60632f58"},
{file = "pillow-11.3.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:106064daa23a745510dabce1d84f29137a37224831d88eb4ce94bb187b1d7e5f"},
{file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd8ff254faf15591e724dc7c4ddb6bf4793efcbe13802a4ae3e863cd300b493e"},
{file = "pillow-11.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:932c754c2d51ad2b2271fd01c3d121daaa35e27efae2a616f77bf164bc0b3e94"},
{file = "pillow-11.3.0-cp311-cp311-win32.whl", hash = "sha256:b4b8f3efc8d530a1544e5962bd6b403d5f7fe8b9e08227c6b255f98ad82b4ba0"},
{file = "pillow-11.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:1a992e86b0dd7aeb1f053cd506508c0999d710a8f07b4c791c63843fc6a807ac"},
{file = "pillow-11.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:30807c931ff7c095620fe04448e2c2fc673fcbb1ffe2a7da3fb39613489b1ddd"},
{file = "pillow-11.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdae223722da47b024b867c1ea0be64e0df702c5e0a60e27daad39bf960dd1e4"},
{file = "pillow-11.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:921bd305b10e82b4d1f5e802b6850677f965d8394203d182f078873851dada69"},
{file = "pillow-11.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eb76541cba2f958032d79d143b98a3a6b3ea87f0959bbe256c0b5e416599fd5d"},
{file = "pillow-11.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:67172f2944ebba3d4a7b54f2e95c786a3a50c21b88456329314caaa28cda70f6"},
{file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f07ed9f56a3b9b5f49d3661dc9607484e85c67e27f3e8be2c7d28ca032fec7"},
{file = "pillow-11.3.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:676b2815362456b5b3216b4fd5bd89d362100dc6f4945154ff172e206a22c024"},
{file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e184b2f26ff146363dd07bde8b711833d7b0202e27d13540bfe2e35a323a809"},
{file = "pillow-11.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6be31e3fc9a621e071bc17bb7de63b85cbe0bfae91bb0363c893cbe67247780d"},
{file = "pillow-11.3.0-cp312-cp312-win32.whl", hash = "sha256:7b161756381f0918e05e7cb8a371fff367e807770f8fe92ecb20d905d0e1c149"},
{file = "pillow-11.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a6444696fce635783440b7f7a9fc24b3ad10a9ea3f0ab66c5905be1c19ccf17d"},
{file = "pillow-11.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:2aceea54f957dd4448264f9bf40875da0415c83eb85f55069d89c0ed436e3542"},
{file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:1c627742b539bba4309df89171356fcb3cc5a9178355b2727d1b74a6cf155fbd"},
{file = "pillow-11.3.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:30b7c02f3899d10f13d7a48163c8969e4e653f8b43416d23d13d1bbfdc93b9f8"},
{file = "pillow-11.3.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:7859a4cc7c9295f5838015d8cc0a9c215b77e43d07a25e460f35cf516df8626f"},
{file = "pillow-11.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec1ee50470b0d050984394423d96325b744d55c701a439d2bd66089bff963d3c"},
{file = "pillow-11.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7db51d222548ccfd274e4572fdbf3e810a5e66b00608862f947b163e613b67dd"},
{file = "pillow-11.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2d6fcc902a24ac74495df63faad1884282239265c6839a0a6416d33faedfae7e"},
{file = "pillow-11.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f0f5d8f4a08090c6d6d578351a2b91acf519a54986c055af27e7a93feae6d3f1"},
{file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c37d8ba9411d6003bba9e518db0db0c58a680ab9fe5179f040b0463644bc9805"},
{file = "pillow-11.3.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13f87d581e71d9189ab21fe0efb5a23e9f28552d5be6979e84001d3b8505abe8"},
{file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:023f6d2d11784a465f09fd09a34b150ea4672e85fb3d05931d89f373ab14abb2"},
{file = "pillow-11.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:45dfc51ac5975b938e9809451c51734124e73b04d0f0ac621649821a63852e7b"},
{file = "pillow-11.3.0-cp313-cp313-win32.whl", hash = "sha256:a4d336baed65d50d37b88ca5b60c0fa9d81e3a87d4a7930d3880d1624d5b31f3"},
{file = "pillow-11.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0bce5c4fd0921f99d2e858dc4d4d64193407e1b99478bc5cacecba2311abde51"},
{file = "pillow-11.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:1904e1264881f682f02b7f8167935cce37bc97db457f8e7849dc3a6a52b99580"},
{file = "pillow-11.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4c834a3921375c48ee6b9624061076bc0a32a60b5532b322cc0ea64e639dd50e"},
{file = "pillow-11.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5e05688ccef30ea69b9317a9ead994b93975104a677a36a8ed8106be9260aa6d"},
{file = "pillow-11.3.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1019b04af07fc0163e2810167918cb5add8d74674b6267616021ab558dc98ced"},
{file = "pillow-11.3.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f944255db153ebb2b19c51fe85dd99ef0ce494123f21b9db4877ffdfc5590c7c"},
{file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f85acb69adf2aaee8b7da124efebbdb959a104db34d3a2cb0f3793dbae422a8"},
{file = "pillow-11.3.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:05f6ecbeff5005399bb48d198f098a9b4b6bdf27b8487c7f38ca16eeb070cd59"},
{file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a7bc6e6fd0395bc052f16b1a8670859964dbd7003bd0af2ff08342eb6e442cfe"},
{file = "pillow-11.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:83e1b0161c9d148125083a35c1c5a89db5b7054834fd4387499e06552035236c"},
{file = "pillow-11.3.0-cp313-cp313t-win32.whl", hash = "sha256:2a3117c06b8fb646639dce83694f2f9eac405472713fcb1ae887469c0d4f6788"},
{file = "pillow-11.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:857844335c95bea93fb39e0fa2726b4d9d758850b34075a7e3ff4f4fa3aa3b31"},
{file = "pillow-11.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:8797edc41f3e8536ae4b10897ee2f637235c94f27404cac7297f7b607dd0716e"},
{file = "pillow-11.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:d9da3df5f9ea2a89b81bb6087177fb1f4d1c7146d583a3fe5c672c0d94e55e12"},
{file = "pillow-11.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0b275ff9b04df7b640c59ec5a3cb113eefd3795a8df80bac69646ef699c6981a"},
{file = "pillow-11.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0743841cabd3dba6a83f38a92672cccbd69af56e3e91777b0ee7f4dba4385632"},
{file = "pillow-11.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2465a69cf967b8b49ee1b96d76718cd98c4e925414ead59fdf75cf0fd07df673"},
{file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41742638139424703b4d01665b807c6468e23e699e8e90cffefe291c5832b027"},
{file = "pillow-11.3.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:93efb0b4de7e340d99057415c749175e24c8864302369e05914682ba642e5d77"},
{file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7966e38dcd0fa11ca390aed7c6f20454443581d758242023cf36fcb319b1a874"},
{file = "pillow-11.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:98a9afa7b9007c67ed84c57c9e0ad86a6000da96eaa638e4f8abe5b65ff83f0a"},
{file = "pillow-11.3.0-cp314-cp314-win32.whl", hash = "sha256:02a723e6bf909e7cea0dac1b0e0310be9d7650cd66222a5f1c571455c0a45214"},
{file = "pillow-11.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a418486160228f64dd9e9efcd132679b7a02a5f22c982c78b6fc7dab3fefb635"},
{file = "pillow-11.3.0-cp314-cp314-win_arm64.whl", hash = "sha256:155658efb5e044669c08896c0c44231c5e9abcaadbc5cd3648df2f7c0b96b9a6"},
{file = "pillow-11.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:59a03cdf019efbfeeed910bf79c7c93255c3d54bc45898ac2a4140071b02b4ae"},
{file = "pillow-11.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f8a5827f84d973d8636e9dc5764af4f0cf2318d26744b3d902931701b0d46653"},
{file = "pillow-11.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ee92f2fd10f4adc4b43d07ec5e779932b4eb3dbfbc34790ada5a6669bc095aa6"},
{file = "pillow-11.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c96d333dcf42d01f47b37e0979b6bd73ec91eae18614864622d9b87bbd5bbf36"},
{file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4c96f993ab8c98460cd0c001447bff6194403e8b1d7e149ade5f00594918128b"},
{file = "pillow-11.3.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41342b64afeba938edb034d122b2dda5db2139b9a4af999729ba8818e0056477"},
{file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:068d9c39a2d1b358eb9f245ce7ab1b5c3246c7c8c7d9ba58cfa5b43146c06e50"},
{file = "pillow-11.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a1bc6ba083b145187f648b667e05a2534ecc4b9f2784c2cbe3089e44868f2b9b"},
{file = "pillow-11.3.0-cp314-cp314t-win32.whl", hash = "sha256:118ca10c0d60b06d006be10a501fd6bbdfef559251ed31b794668ed569c87e12"},
{file = "pillow-11.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:8924748b688aa210d79883357d102cd64690e56b923a186f35a82cbc10f997db"},
{file = "pillow-11.3.0-cp314-cp314t-win_arm64.whl", hash = "sha256:79ea0d14d3ebad43ec77ad5272e6ff9bba5b679ef73375ea760261207fa8e0aa"},
{file = "pillow-11.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:48d254f8a4c776de343051023eb61ffe818299eeac478da55227d96e241de53f"},
{file = "pillow-11.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7aee118e30a4cf54fdd873bd3a29de51e29105ab11f9aad8c32123f58c8f8081"},
{file = "pillow-11.3.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:23cff760a9049c502721bdb743a7cb3e03365fafcdfc2ef9784610714166e5a4"},
{file = "pillow-11.3.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6359a3bc43f57d5b375d1ad54a0074318a0844d11b76abccf478c37c986d3cfc"},
{file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:092c80c76635f5ecb10f3f83d76716165c96f5229addbd1ec2bdbbda7d496e06"},
{file = "pillow-11.3.0-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cadc9e0ea0a2431124cde7e1697106471fc4c1da01530e679b2391c37d3fbb3a"},
{file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6a418691000f2a418c9135a7cf0d797c1bb7d9a485e61fe8e7722845b95ef978"},
{file = "pillow-11.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:97afb3a00b65cc0804d1c7abddbf090a81eaac02768af58cbdcaaa0a931e0b6d"},
{file = "pillow-11.3.0-cp39-cp39-win32.whl", hash = "sha256:ea944117a7974ae78059fcc1800e5d3295172bb97035c0c1d9345fca1419da71"},
{file = "pillow-11.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:e5c5858ad8ec655450a7c7df532e9842cf8df7cc349df7225c60d5d348c8aada"},
{file = "pillow-11.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:6abdbfd3aea42be05702a8dd98832329c167ee84400a1d1f61ab11437f1717eb"},
{file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee80663f29e3843b68199b9d6f4f54bd1d4a6b59bdd91bceefc51238bcb967"},
{file = "pillow-11.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b5f56c3f344f2ccaf0dd875d3e180f631dc60a51b314295a3e681fe8cf851fbe"},
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e67d793d180c9df62f1f40aee3accca4829d3794c95098887edc18af4b8b780c"},
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d000f46e2917c705e9fb93a3606ee4a819d1e3aa7a9b442f6444f07e77cf5e25"},
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:527b37216b6ac3a12d7838dc3bd75208ec57c1c6d11ef01902266a5a0c14fc27"},
{file = "pillow-11.3.0-pp310-pypy310_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:be5463ac478b623b9dd3937afd7fb7ab3d79dd290a28e2b6df292dc75063eb8a"},
{file = "pillow-11.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8dc70ca24c110503e16918a658b869019126ecfe03109b754c402daff12b3d9f"},
{file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c8ec7a017ad1bd562f93dbd8505763e688d388cde6e4a010ae1486916e713e6"},
{file = "pillow-11.3.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9ab6ae226de48019caa8074894544af5b53a117ccb9d3b3dcb2871464c829438"},
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe27fb049cdcca11f11a7bfda64043c37b30e6b91f10cb5bab275806c32f6ab3"},
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:465b9e8844e3c3519a983d58b80be3f668e2a7a5db97f2784e7079fbc9f9822c"},
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5418b53c0d59b3824d05e029669efa023bbef0f3e92e75ec8428f3799487f361"},
{file = "pillow-11.3.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:504b6f59505f08ae014f724b6207ff6222662aab5cc9542577fb084ed0676ac7"},
{file = "pillow-11.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c84d689db21a1c397d001aa08241044aa2069e7587b398c8cc63020390b1c1b8"},
{file = "pillow-11.3.0.tar.gz", hash = "sha256:3828ee7586cd0b2091b6209e5ad53e20d0649bbe87164a459d0676e035e8f523"},
]
[package.extras]
docs = ["furo", "olefile", "sphinx (>=8.2)", "sphinx-autobuild", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"]
fpx = ["olefile"]
mic = ["olefile"]
test-arrow = ["pyarrow"]
tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "trove-classifiers (>=2024.10.12)"]
typing = ["typing-extensions ; python_version < \"3.10\""]
xmp = ["defusedxml"]
[[package]]
name = "platformdirs"
version = "4.3.8"
@@ -4669,7 +4130,6 @@ markdown = "3.9.0"
microsoft-kiota-abstractions = "1.9.2"
msgraph-sdk = "1.23.0"
numpy = "2.0.2"
oci = "2.160.3"
pandas = "2.2.3"
py-iam-expand = "0.1.0"
py-ocsf-models = "0.5.0"
@@ -4686,8 +4146,8 @@ tzlocal = "5.3.1"
[package.source]
type = "git"
url = "https://github.com/prowler-cloud/prowler.git"
reference = "v5.13"
resolved_reference = "b1856e42f0143a64e8cc26c7aa3c7643bd1083d3"
reference = "master"
resolved_reference = "a52697bfdfee83d14a49c11dcbe96888b5cd767e"
[[package]]
name = "psutil"
@@ -5172,25 +4632,6 @@ cffi = ">=1.4.1"
docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
[[package]]
name = "pyopenssl"
version = "24.3.0"
description = "Python wrapper module around the OpenSSL library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a"},
{file = "pyopenssl-24.3.0.tar.gz", hash = "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36"},
]
[package.dependencies]
cryptography = ">=41.0.5,<45"
[package.extras]
docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx_rtd_theme"]
test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"]
[[package]]
name = "pyparsing"
version = "3.2.3"
@@ -5575,29 +5016,6 @@ attrs = ">=22.2.0"
rpds-py = ">=0.7.0"
typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""}
[[package]]
name = "reportlab"
version = "4.4.4"
description = "The Reportlab Toolkit"
optional = false
python-versions = "<4,>=3.9"
groups = ["main"]
files = [
{file = "reportlab-4.4.4-py3-none-any.whl", hash = "sha256:299b3b0534e7202bb94ed2ddcd7179b818dcda7de9d8518a57c85a58a1ebaadb"},
{file = "reportlab-4.4.4.tar.gz", hash = "sha256:cb2f658b7f4a15be2cc68f7203aa67faef67213edd4f2d4bdd3eb20dab75a80d"},
]
[package.dependencies]
charset-normalizer = "*"
pillow = ">=9.0.0"
[package.extras]
accel = ["rl_accel (>=0.9.0,<1.1)"]
bidi = ["rlbidi"]
pycairo = ["freetype-py (>=2.3.0,<2.4)", "rlPyCairo (>=0.2.0,<1)"]
renderpm = ["rl_renderPM (>=4.0.3,<4.1)"]
shaping = ["uharfbuzz"]
[[package]]
name = "requests"
version = "2.32.5"
@@ -6841,4 +6259,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.11,<3.13"
content-hash = "8fcb616e55530e7940019d3da33e955b026b9105e1216a3c5f39b411c015b6d7"
content-hash = "03442fd4673006c5a74374f90f53621fd1c9d117279fe6cc0355ef833eb7f9bb"
+2 -4
View File
@@ -24,7 +24,7 @@ dependencies = [
"drf-spectacular-jsonapi==0.5.1",
"gunicorn==23.0.0",
"lxml==5.3.2",
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.13",
"prowler @ git+https://github.com/prowler-cloud/prowler.git@master",
"psycopg2-binary==2.9.9",
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
@@ -33,9 +33,7 @@ dependencies = [
"xmlsec==1.3.14",
"h2 (==4.3.0)",
"markdown (>=3.9,<4.0)",
"drf-simple-apikey (==2.2.1)",
"matplotlib (>=3.10.6,<4.0.0)",
"reportlab (>=4.4.4,<5.0.0)"
"drf-simple-apikey (==2.2.1)"
]
description = "Prowler's API (Django/DRF)"
license = "Apache-2.0"
@@ -24,7 +24,7 @@ class Migration(migrations.Migration):
(
"name",
models.CharField(
max_length=100,
max_length=255,
validators=[django.core.validators.MinLengthValidator(3)],
),
),
-49
View File
@@ -2689,55 +2689,6 @@ class TestScanViewSet:
== "There is a problem with credentials."
)
@patch("api.v1.views.ScanViewSet._get_task_status")
@patch("api.v1.views.get_s3_client")
@patch("api.v1.views.env.str")
def test_threatscore_s3_wildcard(
self,
mock_env_str,
mock_get_s3_client,
mock_get_task_status,
authenticated_client,
scans_fixture,
):
"""
When the threatscore endpoint is called with an S3 output_location,
the view should list objects in S3 using wildcard pattern matching,
retrieve the matching PDF file, and return it with HTTP 200 and proper headers.
"""
scan = scans_fixture[0]
scan.state = StateChoices.COMPLETED
bucket = "test-bucket"
zip_key = "tenant-id/scan-id/prowler-output-foo.zip"
scan.output_location = f"s3://{bucket}/{zip_key}"
scan.save()
pdf_key = os.path.join(
os.path.dirname(zip_key),
"threatscore",
"prowler-output-123_threatscore_report.pdf",
)
mock_s3_client = Mock()
mock_s3_client.list_objects_v2.return_value = {"Contents": [{"Key": pdf_key}]}
mock_s3_client.get_object.return_value = {"Body": io.BytesIO(b"pdf-bytes")}
mock_env_str.return_value = bucket
mock_get_s3_client.return_value = mock_s3_client
mock_get_task_status.return_value = None
url = reverse("scan-threatscore", kwargs={"pk": scan.id})
response = authenticated_client.get(url)
assert response.status_code == status.HTTP_200_OK
assert response["Content-Type"] == "application/pdf"
assert response["Content-Disposition"].endswith(
'"prowler-output-123_threatscore_report.pdf"'
)
assert response.content == b"pdf-bytes"
mock_s3_client.list_objects_v2.assert_called_once()
mock_s3_client.get_object.assert_called_once_with(Bucket=bucket, Key=pdf_key)
def test_report_s3_success(self, authenticated_client, scans_fixture, monkeypatch):
"""
When output_location is an S3 URL and the S3 client returns the file successfully,
+1 -74
View File
@@ -1,4 +1,3 @@
import fnmatch
import glob
import logging
import os
@@ -1594,25 +1593,6 @@ class ProviderViewSet(BaseRLSViewSet):
},
request=None,
),
threatscore=extend_schema(
tags=["Scan"],
summary="Retrieve threatscore report",
description="Download a specific threatscore report (e.g., 'prowler_threatscore_aws') as a PDF file.",
request=None,
responses={
200: OpenApiResponse(
description="PDF file containing the threatscore report"
),
202: OpenApiResponse(description="The task is in progress"),
401: OpenApiResponse(
description="API key missing or user not Authenticated"
),
403: OpenApiResponse(description="There is a problem with credentials"),
404: OpenApiResponse(
description="The scan has no threatscore reports, or the threatscore report generation task has not started yet"
),
},
),
)
@method_decorator(CACHE_DECORATOR, name="list")
@method_decorator(CACHE_DECORATOR, name="retrieve")
@@ -1669,9 +1649,6 @@ class ScanViewSet(BaseRLSViewSet):
if hasattr(self, "response_serializer_class"):
return self.response_serializer_class
return ScanComplianceReportSerializer
elif self.action == "threatscore":
if hasattr(self, "response_serializer_class"):
return self.response_serializer_class
return super().get_serializer_class()
def partial_update(self, request, *args, **kwargs):
@@ -1776,18 +1753,7 @@ class ScanViewSet(BaseRLSViewSet):
status=status.HTTP_502_BAD_GATEWAY,
)
contents = resp.get("Contents", [])
keys = []
for obj in contents:
key = obj["Key"]
key_basename = os.path.basename(key)
if any(ch in suffix for ch in ("*", "?", "[")):
if fnmatch.fnmatch(key_basename, suffix):
keys.append(key)
elif key_basename == suffix:
keys.append(key)
elif key.endswith(suffix):
# Backward compatibility if suffix already includes directories
keys.append(key)
keys = [obj["Key"] for obj in contents if obj["Key"].endswith(suffix)]
if not keys:
return Response(
{
@@ -1914,45 +1880,6 @@ class ScanViewSet(BaseRLSViewSet):
content, filename = loader
return self._serve_file(content, filename, "text/csv")
@action(
detail=True,
methods=["get"],
url_name="threatscore",
)
def threatscore(self, request, pk=None):
scan = self.get_object()
running_resp = self._get_task_status(scan)
if running_resp:
return running_resp
if not scan.output_location:
return Response(
{
"detail": "The scan has no reports, or the threatscore report generation task has not started yet."
},
status=status.HTTP_404_NOT_FOUND,
)
if scan.output_location.startswith("s3://"):
bucket = env.str("DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET", "")
key_prefix = scan.output_location.removeprefix(f"s3://{bucket}/")
prefix = os.path.join(
os.path.dirname(key_prefix),
"threatscore",
"*_threatscore_report.pdf",
)
loader = self._load_file(prefix, s3=True, bucket=bucket, list_objects=True)
else:
base = os.path.dirname(scan.output_location)
pattern = os.path.join(base, "threatscore", "*_threatscore_report.pdf")
loader = self._load_file(pattern, s3=False)
if isinstance(loader, Response):
return loader
content, filename = loader
return self._serve_file(content, filename, "application/pdf")
def create(self, request, *args, **kwargs):
input_serializer = self.get_serializer(data=request.data)
input_serializer.is_valid(raise_exception=True)
Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

+29 -31
View File
@@ -20,10 +20,10 @@ from prowler.lib.outputs.asff.asff import ASFF
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
AWSWellArchitected,
)
from prowler.lib.outputs.compliance.c5.c5_aws import AWSC5
from prowler.lib.outputs.compliance.ccc.ccc_aws import CCC_AWS
from prowler.lib.outputs.compliance.ccc.ccc_azure import CCC_Azure
from prowler.lib.outputs.compliance.ccc.ccc_gcp import CCC_GCP
from prowler.lib.outputs.compliance.c5.c5_aws import AWSC5
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
@@ -183,21 +183,18 @@ def get_s3_client():
return s3_client
def _upload_to_s3(
tenant_id: str, scan_id: str, local_path: str, relative_key: str
) -> str | None:
def _upload_to_s3(tenant_id: str, zip_path: str, scan_id: str) -> str | None:
"""
Upload a local artifact to an S3 bucket under the tenant/scan prefix.
Upload the specified ZIP file to an S3 bucket.
If the S3 bucket environment variables are not configured,
the function returns None without performing an upload.
Args:
tenant_id (str): The tenant identifier used as the first segment of the S3 key.
scan_id (str): The scan identifier used as the second segment of the S3 key.
local_path (str): Filesystem path to the artifact to upload.
relative_key (str): Object key relative to `<tenant_id>/<scan_id>/`.
tenant_id (str): The tenant identifier, used as part of the S3 key prefix.
zip_path (str): The local file system path to the ZIP file to be uploaded.
scan_id (str): The scan identifier, used as part of the S3 key prefix.
Returns:
str | None: S3 URI of the uploaded artifact, or None if the upload is skipped.
str: The S3 URI of the uploaded file (e.g., "s3://<bucket>/<key>") if successful.
None: If the required environment variables for the S3 bucket are not set.
Raises:
botocore.exceptions.ClientError: If the upload attempt to S3 fails for any reason.
"""
@@ -205,26 +202,34 @@ def _upload_to_s3(
if not bucket:
return
if not relative_key:
return
if not os.path.isfile(local_path):
return
try:
s3 = get_s3_client()
s3_key = f"{tenant_id}/{scan_id}/{relative_key}"
s3.upload_file(Filename=local_path, Bucket=bucket, Key=s3_key)
# Upload the ZIP file (outputs) to the S3 bucket
zip_key = f"{tenant_id}/{scan_id}/{os.path.basename(zip_path)}"
s3.upload_file(
Filename=zip_path,
Bucket=bucket,
Key=zip_key,
)
return f"s3://{base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET}/{s3_key}"
# Upload the compliance directory to the S3 bucket
compliance_dir = os.path.join(os.path.dirname(zip_path), "compliance")
for filename in os.listdir(compliance_dir):
local_path = os.path.join(compliance_dir, filename)
if not os.path.isfile(local_path):
continue
file_key = f"{tenant_id}/{scan_id}/compliance/{filename}"
s3.upload_file(Filename=local_path, Bucket=bucket, Key=file_key)
return f"s3://{base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET}/{zip_key}"
except (ClientError, NoCredentialsError, ParamValidationError, ValueError) as e:
logger.error(f"S3 upload failed: {str(e)}")
def _generate_output_directory(
output_directory, prowler_provider: object, tenant_id: str, scan_id: str
) -> tuple[str, str, str]:
) -> tuple[str, str]:
"""
Generate a file system path for the output directory of a prowler scan.
@@ -251,7 +256,6 @@ def _generate_output_directory(
>>> _generate_output_directory("/tmp", "aws", "tenant-1234", "scan-5678")
'/tmp/tenant-1234/aws/scan-5678/prowler-output-2023-02-15T12:34:56',
'/tmp/tenant-1234/aws/scan-5678/compliance/prowler-output-2023-02-15T12:34:56'
'/tmp/tenant-1234/aws/scan-5678/threatscore/prowler-output-2023-02-15T12:34:56'
"""
# Sanitize the prowler provider name to ensure it is a valid directory name
prowler_provider_sanitized = re.sub(r"[^\w\-]", "-", prowler_provider)
@@ -272,10 +276,4 @@ def _generate_output_directory(
)
os.makedirs("/".join(compliance_path.split("/")[:-1]), exist_ok=True)
threatscore_path = (
f"{output_directory}/{tenant_id}/{scan_id}/threatscore/prowler-output-"
f"{prowler_provider_sanitized}-{timestamp}"
)
os.makedirs("/".join(threatscore_path.split("/")[:-1]), exist_ok=True)
return path, compliance_path, threatscore_path
return path, compliance_path
File diff suppressed because it is too large Load Diff
+6 -48
View File
@@ -1,4 +1,3 @@
import os
from datetime import datetime, timedelta, timezone
from pathlib import Path
from shutil import rmtree
@@ -27,7 +26,6 @@ from tasks.jobs.integrations import (
upload_s3_integration,
upload_security_hub_integration,
)
from tasks.jobs.report import generate_threatscore_report_job
from tasks.jobs.scan import (
aggregate_findings,
create_compliance_requirements,
@@ -66,15 +64,10 @@ def _perform_scan_complete_tasks(tenant_id: str, scan_id: str, provider_id: str)
generate_outputs_task.si(
scan_id=scan_id, provider_id=provider_id, tenant_id=tenant_id
),
group(
generate_threatscore_report_task.si(
tenant_id=tenant_id, scan_id=scan_id, provider_id=provider_id
),
check_integrations_task.si(
tenant_id=tenant_id,
provider_id=provider_id,
scan_id=scan_id,
),
check_integrations_task.si(
tenant_id=tenant_id,
provider_id=provider_id,
scan_id=scan_id,
),
).apply_async()
@@ -311,7 +304,7 @@ def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
frameworks_bulk = Compliance.get_bulk(provider_type)
frameworks_avail = get_compliance_frameworks(provider_type)
out_dir, comp_dir, _ = _generate_output_directory(
out_dir, comp_dir = _generate_output_directory(
DJANGO_TMP_OUTPUT_DIRECTORY, provider_uid, tenant_id, scan_id
)
@@ -414,24 +407,7 @@ def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
writer._data.clear()
compressed = _compress_output_files(out_dir)
upload_uri = _upload_to_s3(
tenant_id,
scan_id,
compressed,
os.path.basename(compressed),
)
compliance_dir_path = Path(comp_dir).parent
if compliance_dir_path.exists():
for artifact_path in sorted(compliance_dir_path.iterdir()):
if artifact_path.is_file():
_upload_to_s3(
tenant_id,
scan_id,
str(artifact_path),
f"compliance/{artifact_path.name}",
)
upload_uri = _upload_to_s3(tenant_id, compressed, scan_id)
# S3 integrations (need output_directory)
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
@@ -641,21 +617,3 @@ def jira_integration_task(
return send_findings_to_jira(
tenant_id, integration_id, project_key, issue_type, finding_ids
)
@shared_task(
base=RLSTask,
name="scan-threatscore-report",
queue="scan-reports",
)
def generate_threatscore_report_task(tenant_id: str, scan_id: str, provider_id: str):
"""
Task to generate a threatscore report for a given scan.
Args:
tenant_id (str): The tenant identifier.
scan_id (str): The scan identifier.
provider_id (str): The provider identifier.
"""
return generate_threatscore_report_job(
tenant_id=tenant_id, scan_id=scan_id, provider_id=provider_id
)
+10 -32
View File
@@ -72,26 +72,17 @@ class TestOutputs:
client_mock = MagicMock()
mock_get_client.return_value = client_mock
result = _upload_to_s3(
"tenant-id",
"scan-id",
str(zip_path),
"outputs.zip",
)
result = _upload_to_s3("tenant-id", str(zip_path), "scan-id")
expected_uri = "s3://test-bucket/tenant-id/scan-id/outputs.zip"
assert result == expected_uri
client_mock.upload_file.assert_called_once_with(
Filename=str(zip_path),
Bucket="test-bucket",
Key="tenant-id/scan-id/outputs.zip",
)
assert client_mock.upload_file.call_count == 2
@patch("tasks.jobs.export.get_s3_client")
@patch("tasks.jobs.export.base")
def test_upload_to_s3_missing_bucket(self, mock_base, mock_get_client):
mock_base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET = ""
result = _upload_to_s3("tenant", "scan", "/tmp/fake.zip", "fake.zip")
result = _upload_to_s3("tenant", "/tmp/fake.zip", "scan")
assert result is None
@patch("tasks.jobs.export.get_s3_client")
@@ -110,15 +101,11 @@ class TestOutputs:
client_mock = MagicMock()
mock_get_client.return_value = client_mock
result = _upload_to_s3(
"tenant",
"scan",
str(compliance_dir / "subdir"),
"compliance/subdir",
)
result = _upload_to_s3("tenant", str(zip_path), "scan")
assert result is None
client_mock.upload_file.assert_not_called()
expected_uri = "s3://test-bucket/tenant/scan/results.zip"
assert result == expected_uri
client_mock.upload_file.assert_called_once()
@patch(
"tasks.jobs.export.get_s3_client",
@@ -139,12 +126,7 @@ class TestOutputs:
compliance_dir.mkdir()
(compliance_dir / "report.csv").write_text("csv")
_upload_to_s3(
"tenant",
"scan",
str(zip_path),
"zipfile.zip",
)
_upload_to_s3("tenant", str(zip_path), "scan")
mock_logger.assert_called()
@patch("tasks.jobs.export.rls_transaction")
@@ -168,17 +150,15 @@ class TestOutputs:
provider = "aws"
expected_timestamp = "20230615103045"
path, compliance, threatscore = _generate_output_directory(
path, compliance = _generate_output_directory(
base_dir, provider, tenant_id, scan_id
)
assert os.path.isdir(os.path.dirname(path))
assert os.path.isdir(os.path.dirname(compliance))
assert os.path.isdir(os.path.dirname(threatscore))
assert path.endswith(f"{provider}-{expected_timestamp}")
assert compliance.endswith(f"{provider}-{expected_timestamp}")
assert threatscore.endswith(f"{provider}-{expected_timestamp}")
@patch("tasks.jobs.export.rls_transaction")
@patch("tasks.jobs.export.Scan")
@@ -201,14 +181,12 @@ class TestOutputs:
provider = "aws/test@check"
expected_timestamp = "20230615103045"
path, compliance, threatscore = _generate_output_directory(
path, compliance = _generate_output_directory(
base_dir, provider, tenant_id, scan_id
)
assert os.path.isdir(os.path.dirname(path))
assert os.path.isdir(os.path.dirname(compliance))
assert os.path.isdir(os.path.dirname(threatscore))
assert path.endswith(f"aws-test-check-{expected_timestamp}")
assert compliance.endswith(f"aws-test-check-{expected_timestamp}")
assert threatscore.endswith(f"aws-test-check-{expected_timestamp}")
-963
View File
@@ -1,963 +0,0 @@
import uuid
from pathlib import Path
from unittest.mock import MagicMock, patch
import matplotlib
import pytest
from tasks.jobs.report import (
_aggregate_requirement_statistics_from_database,
_calculate_requirements_data_from_statistics,
_load_findings_for_requirement_checks,
generate_threatscore_report,
generate_threatscore_report_job,
)
from tasks.tasks import generate_threatscore_report_task
from api.models import Finding, StatusChoices
from prowler.lib.check.models import Severity
matplotlib.use("Agg") # Use non-interactive backend for tests
@pytest.mark.django_db
class TestGenerateThreatscoreReport:
def setup_method(self):
self.scan_id = str(uuid.uuid4())
self.provider_id = str(uuid.uuid4())
self.tenant_id = str(uuid.uuid4())
def test_no_findings_returns_early(self):
with patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter:
mock_filter.return_value.exists.return_value = False
result = generate_threatscore_report_job(
tenant_id=self.tenant_id,
scan_id=self.scan_id,
provider_id=self.provider_id,
)
assert result == {"upload": False}
mock_filter.assert_called_once_with(scan_id=self.scan_id)
@patch("tasks.jobs.report.rmtree")
@patch("tasks.jobs.report._upload_to_s3")
@patch("tasks.jobs.report.generate_threatscore_report")
@patch("tasks.jobs.report._generate_output_directory")
@patch("tasks.jobs.report.Provider.objects.get")
@patch("tasks.jobs.report.ScanSummary.objects.filter")
def test_generate_threatscore_report_happy_path(
self,
mock_scan_summary_filter,
mock_provider_get,
mock_generate_output_directory,
mock_generate_report,
mock_upload,
mock_rmtree,
):
mock_scan_summary_filter.return_value.exists.return_value = True
mock_provider = MagicMock()
mock_provider.uid = "provider-uid"
mock_provider.provider = "aws"
mock_provider_get.return_value = mock_provider
mock_generate_output_directory.return_value = (
"/tmp/output",
"/tmp/compressed",
"/tmp/threatscore_path",
)
mock_upload.return_value = "s3://bucket/threatscore_report.pdf"
result = generate_threatscore_report_job(
tenant_id=self.tenant_id,
scan_id=self.scan_id,
provider_id=self.provider_id,
)
assert result == {"upload": True}
mock_generate_report.assert_called_once_with(
tenant_id=self.tenant_id,
scan_id=self.scan_id,
compliance_id="prowler_threatscore_aws",
output_path="/tmp/threatscore_path_threatscore_report.pdf",
provider_id=self.provider_id,
only_failed=True,
min_risk_level=4,
)
mock_upload.assert_called_once_with(
self.tenant_id,
self.scan_id,
"/tmp/threatscore_path_threatscore_report.pdf",
"threatscore/threatscore_path_threatscore_report.pdf",
)
mock_rmtree.assert_called_once_with(
Path("/tmp/threatscore_path_threatscore_report.pdf").parent,
ignore_errors=True,
)
def test_generate_threatscore_report_fails_upload(self):
with (
patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter,
patch("tasks.jobs.report.Provider.objects.get") as mock_provider_get,
patch("tasks.jobs.report._generate_output_directory") as mock_gen_dir,
patch("tasks.jobs.report.generate_threatscore_report"),
patch("tasks.jobs.report._upload_to_s3", return_value=None),
):
mock_filter.return_value.exists.return_value = True
# Mock provider
mock_provider = MagicMock()
mock_provider.uid = "aws-provider-uid"
mock_provider.provider = "aws"
mock_provider_get.return_value = mock_provider
mock_gen_dir.return_value = (
"/tmp/output",
"/tmp/compressed",
"/tmp/threatscore_path",
)
result = generate_threatscore_report_job(
tenant_id=self.tenant_id,
scan_id=self.scan_id,
provider_id=self.provider_id,
)
assert result == {"upload": False}
def test_generate_threatscore_report_logs_rmtree_exception(self, caplog):
with (
patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter,
patch("tasks.jobs.report.Provider.objects.get") as mock_provider_get,
patch("tasks.jobs.report._generate_output_directory") as mock_gen_dir,
patch("tasks.jobs.report.generate_threatscore_report"),
patch(
"tasks.jobs.report._upload_to_s3", return_value="s3://bucket/report.pdf"
),
patch(
"tasks.jobs.report.rmtree", side_effect=Exception("Test deletion error")
),
):
mock_filter.return_value.exists.return_value = True
# Mock provider
mock_provider = MagicMock()
mock_provider.uid = "aws-provider-uid"
mock_provider.provider = "aws"
mock_provider_get.return_value = mock_provider
mock_gen_dir.return_value = (
"/tmp/output",
"/tmp/compressed",
"/tmp/threatscore_path",
)
with caplog.at_level("ERROR"):
generate_threatscore_report_job(
tenant_id=self.tenant_id,
scan_id=self.scan_id,
provider_id=self.provider_id,
)
assert "Error deleting output files" in caplog.text
def test_generate_threatscore_report_azure_provider(self):
with (
patch("tasks.jobs.report.ScanSummary.objects.filter") as mock_filter,
patch("tasks.jobs.report.Provider.objects.get") as mock_provider_get,
patch("tasks.jobs.report._generate_output_directory") as mock_gen_dir,
patch("tasks.jobs.report.generate_threatscore_report") as mock_generate,
patch(
"tasks.jobs.report._upload_to_s3", return_value="s3://bucket/report.pdf"
),
patch("tasks.jobs.report.rmtree"),
):
mock_filter.return_value.exists.return_value = True
mock_provider = MagicMock()
mock_provider.uid = "azure-provider-uid"
mock_provider.provider = "azure"
mock_provider_get.return_value = mock_provider
mock_gen_dir.return_value = (
"/tmp/output",
"/tmp/compressed",
"/tmp/threatscore_path",
)
generate_threatscore_report_job(
tenant_id=self.tenant_id,
scan_id=self.scan_id,
provider_id=self.provider_id,
)
mock_generate.assert_called_once_with(
tenant_id=self.tenant_id,
scan_id=self.scan_id,
compliance_id="prowler_threatscore_azure",
output_path="/tmp/threatscore_path_threatscore_report.pdf",
provider_id=self.provider_id,
only_failed=True,
min_risk_level=4,
)
@pytest.mark.django_db
class TestAggregateRequirementStatistics:
"""Test suite for _aggregate_requirement_statistics_from_database function."""
def test_aggregates_findings_correctly(self, tenants_fixture, scans_fixture):
"""Verify correct pass/total counts per check are aggregated from database."""
tenant = tenants_fixture[0]
scan = scans_fixture[0]
# Create findings with different check_ids and statuses
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid="finding-1",
check_id="check_1",
status=StatusChoices.PASS,
severity=Severity.high,
impact=Severity.high,
check_metadata={},
raw_result={},
)
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid="finding-2",
check_id="check_1",
status=StatusChoices.FAIL,
severity=Severity.high,
impact=Severity.high,
check_metadata={},
raw_result={},
)
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid="finding-3",
check_id="check_2",
status=StatusChoices.PASS,
severity=Severity.medium,
impact=Severity.medium,
check_metadata={},
raw_result={},
)
result = _aggregate_requirement_statistics_from_database(
str(tenant.id), str(scan.id)
)
assert result == {
"check_1": {"passed": 1, "total": 2},
"check_2": {"passed": 1, "total": 1},
}
def test_handles_empty_scan(self, tenants_fixture, scans_fixture):
"""Return empty dict when no findings exist for the scan."""
tenant = tenants_fixture[0]
scan = scans_fixture[0]
result = _aggregate_requirement_statistics_from_database(
str(tenant.id), str(scan.id)
)
assert result == {}
def test_multiple_findings_same_check(self, tenants_fixture, scans_fixture):
"""Aggregate multiple findings for same check_id correctly."""
tenant = tenants_fixture[0]
scan = scans_fixture[0]
# Create 5 findings for same check, 3 passed
for i in range(3):
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid=f"finding-pass-{i}",
check_id="check_same",
status=StatusChoices.PASS,
severity=Severity.medium,
impact=Severity.medium,
check_metadata={},
raw_result={},
)
for i in range(2):
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid=f"finding-fail-{i}",
check_id="check_same",
status=StatusChoices.FAIL,
severity=Severity.medium,
impact=Severity.medium,
check_metadata={},
raw_result={},
)
result = _aggregate_requirement_statistics_from_database(
str(tenant.id), str(scan.id)
)
assert result == {"check_same": {"passed": 3, "total": 5}}
def test_only_failed_findings(self, tenants_fixture, scans_fixture):
"""Correctly count when all findings are FAIL status."""
tenant = tenants_fixture[0]
scan = scans_fixture[0]
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid="finding-fail-1",
check_id="check_fail",
status=StatusChoices.FAIL,
severity=Severity.medium,
impact=Severity.medium,
check_metadata={},
raw_result={},
)
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid="finding-fail-2",
check_id="check_fail",
status=StatusChoices.FAIL,
severity=Severity.medium,
impact=Severity.medium,
check_metadata={},
raw_result={},
)
result = _aggregate_requirement_statistics_from_database(
str(tenant.id), str(scan.id)
)
assert result == {"check_fail": {"passed": 0, "total": 2}}
def test_mixed_statuses(self, tenants_fixture, scans_fixture):
"""Test with PASS, FAIL, and MANUAL statuses mixed."""
tenant = tenants_fixture[0]
scan = scans_fixture[0]
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid="finding-pass",
check_id="check_mixed",
status=StatusChoices.PASS,
severity=Severity.medium,
impact=Severity.medium,
check_metadata={},
raw_result={},
)
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid="finding-fail",
check_id="check_mixed",
status=StatusChoices.FAIL,
severity=Severity.medium,
impact=Severity.medium,
check_metadata={},
raw_result={},
)
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid="finding-manual",
check_id="check_mixed",
status=StatusChoices.MANUAL,
severity=Severity.medium,
impact=Severity.medium,
check_metadata={},
raw_result={},
)
result = _aggregate_requirement_statistics_from_database(
str(tenant.id), str(scan.id)
)
# Only PASS status is counted as passed
assert result == {"check_mixed": {"passed": 1, "total": 3}}
@pytest.mark.django_db
class TestLoadFindingsForChecks:
"""Test suite for _load_findings_for_requirement_checks function."""
def test_loads_only_requested_checks(
self, tenants_fixture, scans_fixture, providers_fixture
):
"""Verify only findings for specified check_ids are loaded."""
tenant = tenants_fixture[0]
scan = scans_fixture[0]
providers_fixture[0]
# Create findings with different check_ids
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid="finding-1",
check_id="check_requested",
status=StatusChoices.PASS,
severity=Severity.medium,
impact=Severity.medium,
check_metadata={},
raw_result={},
)
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid="finding-2",
check_id="check_not_requested",
status=StatusChoices.FAIL,
severity=Severity.medium,
impact=Severity.medium,
check_metadata={},
raw_result={},
)
mock_provider = MagicMock()
with patch(
"tasks.jobs.report.FindingOutput.transform_api_finding"
) as mock_transform:
mock_finding_output = MagicMock()
mock_finding_output.check_id = "check_requested"
mock_transform.return_value = mock_finding_output
result = _load_findings_for_requirement_checks(
str(tenant.id), str(scan.id), ["check_requested"], mock_provider
)
# Only one finding should be loaded
assert "check_requested" in result
assert "check_not_requested" not in result
assert len(result["check_requested"]) == 1
assert mock_transform.call_count == 1
def test_empty_check_ids_returns_empty(
self, tenants_fixture, scans_fixture, providers_fixture
):
"""Return empty dict when check_ids list is empty."""
tenant = tenants_fixture[0]
scan = scans_fixture[0]
mock_provider = MagicMock()
result = _load_findings_for_requirement_checks(
str(tenant.id), str(scan.id), [], mock_provider
)
assert result == {}
def test_groups_by_check_id(
self, tenants_fixture, scans_fixture, providers_fixture
):
"""Multiple findings for same check are grouped correctly."""
tenant = tenants_fixture[0]
scan = scans_fixture[0]
# Create multiple findings for same check
for i in range(3):
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid=f"finding-{i}",
check_id="check_group",
status=StatusChoices.PASS,
severity=Severity.medium,
impact=Severity.medium,
check_metadata={},
raw_result={},
)
mock_provider = MagicMock()
with patch(
"tasks.jobs.report.FindingOutput.transform_api_finding"
) as mock_transform:
mock_finding_output = MagicMock()
mock_finding_output.check_id = "check_group"
mock_transform.return_value = mock_finding_output
result = _load_findings_for_requirement_checks(
str(tenant.id), str(scan.id), ["check_group"], mock_provider
)
assert len(result["check_group"]) == 3
def test_transforms_to_finding_output(
self, tenants_fixture, scans_fixture, providers_fixture
):
"""Findings are transformed using FindingOutput.transform_api_finding."""
tenant = tenants_fixture[0]
scan = scans_fixture[0]
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid="finding-transform",
check_id="check_transform",
status=StatusChoices.PASS,
severity=Severity.medium,
impact=Severity.medium,
check_metadata={},
raw_result={},
)
mock_provider = MagicMock()
with patch(
"tasks.jobs.report.FindingOutput.transform_api_finding"
) as mock_transform:
mock_finding_output = MagicMock()
mock_finding_output.check_id = "check_transform"
mock_transform.return_value = mock_finding_output
result = _load_findings_for_requirement_checks(
str(tenant.id), str(scan.id), ["check_transform"], mock_provider
)
# Verify transform was called
mock_transform.assert_called_once()
# Verify the transformed output is in the result
assert result["check_transform"][0] == mock_finding_output
def test_batched_iteration(self, tenants_fixture, scans_fixture, providers_fixture):
"""Works correctly with multiple batches of findings."""
tenant = tenants_fixture[0]
scan = scans_fixture[0]
# Create enough findings to ensure batching (assuming batch size > 1)
for i in range(10):
Finding.objects.create(
tenant_id=tenant.id,
scan=scan,
uid=f"finding-batch-{i}",
check_id="check_batch",
status=StatusChoices.PASS,
severity=Severity.medium,
impact=Severity.medium,
check_metadata={},
raw_result={},
)
mock_provider = MagicMock()
with patch(
"tasks.jobs.report.FindingOutput.transform_api_finding"
) as mock_transform:
mock_finding_output = MagicMock()
mock_finding_output.check_id = "check_batch"
mock_transform.return_value = mock_finding_output
result = _load_findings_for_requirement_checks(
str(tenant.id), str(scan.id), ["check_batch"], mock_provider
)
# All 10 findings should be loaded regardless of batching
assert len(result["check_batch"]) == 10
assert mock_transform.call_count == 10
@pytest.mark.django_db
class TestCalculateRequirementsData:
"""Test suite for _calculate_requirements_data_from_statistics function."""
def test_requirement_status_all_pass(self):
"""Status is PASS when all findings for requirement checks pass."""
mock_compliance = MagicMock()
mock_compliance.Framework = "TestFramework"
mock_compliance.Version = "1.0"
mock_requirement = MagicMock()
mock_requirement.Id = "req_1"
mock_requirement.Description = "Test requirement"
mock_requirement.Checks = ["check_1", "check_2"]
mock_requirement.Attributes = [MagicMock()]
mock_compliance.Requirements = [mock_requirement]
requirement_statistics = {
"check_1": {"passed": 5, "total": 5},
"check_2": {"passed": 3, "total": 3},
}
attributes_by_id, requirements_list = (
_calculate_requirements_data_from_statistics(
mock_compliance, requirement_statistics
)
)
assert len(requirements_list) == 1
assert requirements_list[0]["attributes"]["status"] == StatusChoices.PASS
assert requirements_list[0]["attributes"]["passed_findings"] == 8
assert requirements_list[0]["attributes"]["total_findings"] == 8
def test_requirement_status_some_fail(self):
"""Status is FAIL when some findings fail."""
mock_compliance = MagicMock()
mock_compliance.Framework = "TestFramework"
mock_compliance.Version = "1.0"
mock_requirement = MagicMock()
mock_requirement.Id = "req_2"
mock_requirement.Description = "Test requirement with failures"
mock_requirement.Checks = ["check_3"]
mock_requirement.Attributes = [MagicMock()]
mock_compliance.Requirements = [mock_requirement]
requirement_statistics = {
"check_3": {"passed": 2, "total": 5},
}
attributes_by_id, requirements_list = (
_calculate_requirements_data_from_statistics(
mock_compliance, requirement_statistics
)
)
assert len(requirements_list) == 1
assert requirements_list[0]["attributes"]["status"] == StatusChoices.FAIL
assert requirements_list[0]["attributes"]["passed_findings"] == 2
assert requirements_list[0]["attributes"]["total_findings"] == 5
def test_requirement_status_no_findings(self):
"""Status is MANUAL when no findings exist for requirement."""
mock_compliance = MagicMock()
mock_compliance.Framework = "TestFramework"
mock_compliance.Version = "1.0"
mock_requirement = MagicMock()
mock_requirement.Id = "req_3"
mock_requirement.Description = "Manual requirement"
mock_requirement.Checks = ["check_nonexistent"]
mock_requirement.Attributes = [MagicMock()]
mock_compliance.Requirements = [mock_requirement]
requirement_statistics = {}
attributes_by_id, requirements_list = (
_calculate_requirements_data_from_statistics(
mock_compliance, requirement_statistics
)
)
assert len(requirements_list) == 1
assert requirements_list[0]["attributes"]["status"] == StatusChoices.MANUAL
assert requirements_list[0]["attributes"]["passed_findings"] == 0
assert requirements_list[0]["attributes"]["total_findings"] == 0
def test_aggregates_multiple_checks(self):
"""Correctly sum stats across multiple checks in requirement."""
mock_compliance = MagicMock()
mock_compliance.Framework = "TestFramework"
mock_compliance.Version = "1.0"
mock_requirement = MagicMock()
mock_requirement.Id = "req_4"
mock_requirement.Description = "Multi-check requirement"
mock_requirement.Checks = ["check_a", "check_b", "check_c"]
mock_requirement.Attributes = [MagicMock()]
mock_compliance.Requirements = [mock_requirement]
requirement_statistics = {
"check_a": {"passed": 10, "total": 15},
"check_b": {"passed": 5, "total": 10},
"check_c": {"passed": 0, "total": 5},
}
attributes_by_id, requirements_list = (
_calculate_requirements_data_from_statistics(
mock_compliance, requirement_statistics
)
)
assert len(requirements_list) == 1
# 10 + 5 + 0 = 15 passed
assert requirements_list[0]["attributes"]["passed_findings"] == 15
# 15 + 10 + 5 = 30 total
assert requirements_list[0]["attributes"]["total_findings"] == 30
# Not all passed, so should be FAIL
assert requirements_list[0]["attributes"]["status"] == StatusChoices.FAIL
def test_returns_correct_structure(self):
"""Verify tuple structure and dict keys are correct."""
mock_compliance = MagicMock()
mock_compliance.Framework = "TestFramework"
mock_compliance.Version = "1.0"
mock_attribute = MagicMock()
mock_requirement = MagicMock()
mock_requirement.Id = "req_5"
mock_requirement.Description = "Structure test"
mock_requirement.Checks = ["check_struct"]
mock_requirement.Attributes = [mock_attribute]
mock_compliance.Requirements = [mock_requirement]
requirement_statistics = {"check_struct": {"passed": 1, "total": 1}}
attributes_by_id, requirements_list = (
_calculate_requirements_data_from_statistics(
mock_compliance, requirement_statistics
)
)
# Verify attributes_by_id structure
assert "req_5" in attributes_by_id
assert "attributes" in attributes_by_id["req_5"]
assert "description" in attributes_by_id["req_5"]
assert "req_attributes" in attributes_by_id["req_5"]["attributes"]
assert "checks" in attributes_by_id["req_5"]["attributes"]
# Verify requirements_list structure
assert len(requirements_list) == 1
req = requirements_list[0]
assert "id" in req
assert "attributes" in req
assert "framework" in req["attributes"]
assert "version" in req["attributes"]
assert "status" in req["attributes"]
assert "description" in req["attributes"]
assert "passed_findings" in req["attributes"]
assert "total_findings" in req["attributes"]
@pytest.mark.django_db
class TestGenerateThreatscoreReportFunction:
def setup_method(self):
self.scan_id = str(uuid.uuid4())
self.provider_id = str(uuid.uuid4())
self.tenant_id = str(uuid.uuid4())
self.compliance_id = "prowler_threatscore_aws"
self.output_path = "/tmp/test_threatscore_report.pdf"
@patch("tasks.jobs.report.initialize_prowler_provider")
@patch("tasks.jobs.report.Provider.objects.get")
@patch("tasks.jobs.report.Compliance.get_bulk")
@patch("tasks.jobs.report._aggregate_requirement_statistics_from_database")
@patch("tasks.jobs.report._calculate_requirements_data_from_statistics")
@patch("tasks.jobs.report._load_findings_for_requirement_checks")
@patch("tasks.jobs.report.SimpleDocTemplate")
@patch("tasks.jobs.report.Image")
@patch("tasks.jobs.report.Spacer")
@patch("tasks.jobs.report.Paragraph")
@patch("tasks.jobs.report.PageBreak")
@patch("tasks.jobs.report.Table")
@patch("tasks.jobs.report.TableStyle")
@patch("tasks.jobs.report.plt.subplots")
@patch("tasks.jobs.report.plt.savefig")
@patch("tasks.jobs.report.io.BytesIO")
def test_generate_threatscore_report_success(
self,
mock_bytesio,
mock_savefig,
mock_subplots,
mock_table_style,
mock_table,
mock_page_break,
mock_paragraph,
mock_spacer,
mock_image,
mock_doc_template,
mock_load_findings,
mock_calculate_requirements,
mock_aggregate_statistics,
mock_compliance_get_bulk,
mock_provider_get,
mock_initialize_provider,
):
"""Test the updated generate_threatscore_report using new memory-efficient architecture."""
mock_provider = MagicMock()
mock_provider.provider = "aws"
mock_provider_get.return_value = mock_provider
prowler_provider = MagicMock()
mock_initialize_provider.return_value = prowler_provider
# Mock compliance object with requirements
mock_compliance_obj = MagicMock()
mock_compliance_obj.Framework = "ProwlerThreatScore"
mock_compliance_obj.Version = "1.0"
mock_compliance_obj.Description = "Test Description"
# Configure requirement with properly set numeric attributes for chart generation
mock_requirement = MagicMock()
mock_requirement.Id = "req_1"
mock_requirement.Description = "Test requirement"
mock_requirement.Checks = ["check_1"]
# Create a properly configured attribute mock with numeric values
mock_requirement_attr = MagicMock()
mock_requirement_attr.Section = "1. IAM"
mock_requirement_attr.SubSection = "1.1 Identity"
mock_requirement_attr.Title = "Test Requirement Title"
mock_requirement_attr.LevelOfRisk = 3
mock_requirement_attr.Weight = 100
mock_requirement_attr.AttributeDescription = "Test requirement description"
mock_requirement_attr.AdditionalInformation = "Additional test information"
mock_requirement.Attributes = [mock_requirement_attr]
mock_compliance_obj.Requirements = [mock_requirement]
mock_compliance_get_bulk.return_value = {
self.compliance_id: mock_compliance_obj
}
# Mock the aggregated statistics from database
mock_aggregate_statistics.return_value = {"check_1": {"passed": 5, "total": 10}}
# Mock the calculated requirements data with properly configured attributes
mock_attributes_by_id = {
"req_1": {
"attributes": {
"req_attributes": [mock_requirement_attr],
"checks": ["check_1"],
},
"description": "Test requirement",
}
}
mock_requirements_list = [
{
"id": "req_1",
"attributes": {
"framework": "ProwlerThreatScore",
"version": "1.0",
"status": StatusChoices.FAIL,
"description": "Test requirement",
"passed_findings": 5,
"total_findings": 10,
},
}
]
mock_calculate_requirements.return_value = (
mock_attributes_by_id,
mock_requirements_list,
)
# Mock the on-demand loaded findings
mock_finding_output = MagicMock()
mock_finding_output.check_id = "check_1"
mock_finding_output.status = "FAIL"
mock_finding_output.metadata = MagicMock()
mock_finding_output.metadata.CheckTitle = "Test Check"
mock_finding_output.metadata.Severity = "HIGH"
mock_finding_output.resource_name = "test-resource"
mock_finding_output.region = "us-east-1"
mock_load_findings.return_value = {"check_1": [mock_finding_output]}
# Mock PDF generation components
mock_doc = MagicMock()
mock_doc_template.return_value = mock_doc
mock_fig, mock_ax = MagicMock(), MagicMock()
mock_subplots.return_value = (mock_fig, mock_ax)
mock_buffer = MagicMock()
mock_bytesio.return_value = mock_buffer
mock_image.return_value = MagicMock()
mock_spacer.return_value = MagicMock()
mock_paragraph.return_value = MagicMock()
mock_page_break.return_value = MagicMock()
mock_table.return_value = MagicMock()
mock_table_style.return_value = MagicMock()
# Execute the function
generate_threatscore_report(
tenant_id=self.tenant_id,
scan_id=self.scan_id,
compliance_id=self.compliance_id,
output_path=self.output_path,
provider_id=self.provider_id,
only_failed=True,
min_risk_level=4,
)
# Verify the new workflow was followed
mock_provider_get.assert_called_once_with(id=self.provider_id)
mock_initialize_provider.assert_called_once_with(mock_provider)
mock_compliance_get_bulk.assert_called_once_with("aws")
# Verify the new functions were called in correct order with correct parameters
mock_aggregate_statistics.assert_called_once_with(self.tenant_id, self.scan_id)
mock_calculate_requirements.assert_called_once_with(
mock_compliance_obj, {"check_1": {"passed": 5, "total": 10}}
)
mock_load_findings.assert_called_once_with(
self.tenant_id, self.scan_id, ["check_1"], prowler_provider
)
# Verify PDF was built
mock_doc_template.assert_called_once()
mock_doc.build.assert_called_once()
@patch("tasks.jobs.report.initialize_prowler_provider")
@patch("tasks.jobs.report.Provider.objects.get")
@patch("tasks.jobs.report.Compliance.get_bulk")
@patch("tasks.jobs.report.Finding.all_objects.filter")
def test_generate_threatscore_report_exception_handling(
self,
mock_finding_filter,
mock_compliance_get_bulk,
mock_provider_get,
mock_initialize_provider,
):
mock_provider_get.side_effect = Exception("Provider not found")
with pytest.raises(Exception, match="Provider not found"):
generate_threatscore_report(
tenant_id=self.tenant_id,
scan_id=self.scan_id,
compliance_id=self.compliance_id,
output_path=self.output_path,
provider_id=self.provider_id,
only_failed=True,
min_risk_level=4,
)
@pytest.mark.django_db
class TestGenerateThreatscoreReportTask:
def setup_method(self):
self.scan_id = str(uuid.uuid4())
self.provider_id = str(uuid.uuid4())
self.tenant_id = str(uuid.uuid4())
@patch("tasks.tasks.generate_threatscore_report_job")
def test_generate_threatscore_report_task_calls_job(self, mock_generate_job):
mock_generate_job.return_value = {"upload": True}
result = generate_threatscore_report_task(
tenant_id=self.tenant_id,
scan_id=self.scan_id,
provider_id=self.provider_id,
)
assert result == {"upload": True}
mock_generate_job.assert_called_once_with(
tenant_id=self.tenant_id,
scan_id=self.scan_id,
provider_id=self.provider_id,
)
@patch("tasks.tasks.generate_threatscore_report_job")
def test_generate_threatscore_report_task_handles_job_exception(
self, mock_generate_job
):
mock_generate_job.side_effect = Exception("Job failed")
with pytest.raises(Exception, match="Job failed"):
generate_threatscore_report_task(
tenant_id=self.tenant_id,
scan_id=self.scan_id,
provider_id=self.provider_id,
)
+59 -127
View File
@@ -98,11 +98,7 @@ class TestGenerateOutputs:
),
patch(
"tasks.tasks._generate_output_directory",
return_value=(
"/tmp/test/out-dir",
"/tmp/test/comp-dir",
"/tmp/test/threat-dir",
),
return_value=("out-dir", "comp-dir"),
),
patch("tasks.tasks.Scan.all_objects.filter") as mock_scan_update,
patch("tasks.tasks.rmtree"),
@@ -130,8 +126,7 @@ class TestGenerateOutputs:
patch("tasks.tasks.get_compliance_frameworks"),
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
patch(
"tasks.tasks._generate_output_directory",
return_value=("/tmp/test/out", "/tmp/test/comp", "/tmp/test/threat"),
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
),
patch("tasks.tasks.FindingOutput._transform_findings_stats"),
patch("tasks.tasks.FindingOutput.transform_api_finding"),
@@ -173,35 +168,15 @@ class TestGenerateOutputs:
mock_finding_output = MagicMock()
mock_finding_output.compliance = {"cis": ["requirement-1", "requirement-2"]}
html_writer_mock = MagicMock()
html_writer_mock._data = []
html_writer_mock.close_file = False
html_writer_mock.transform = MagicMock()
html_writer_mock.batch_write_data_to_file = MagicMock()
compliance_writer_mock = MagicMock()
compliance_writer_mock._data = []
compliance_writer_mock.close_file = False
compliance_writer_mock.transform = MagicMock()
compliance_writer_mock.batch_write_data_to_file = MagicMock()
# Create a mock class that returns our mock instance when called
mock_compliance_class = MagicMock(return_value=compliance_writer_mock)
mock_provider = MagicMock()
mock_provider.provider = "aws"
mock_provider.uid = "test-provider-uid"
with (
patch("tasks.tasks.ScanSummary.objects.filter") as mock_filter,
patch("tasks.tasks.Provider.objects.get", return_value=mock_provider),
patch("tasks.tasks.Provider.objects.get"),
patch("tasks.tasks.initialize_prowler_provider"),
patch("tasks.tasks.Compliance.get_bulk", return_value={"cis": MagicMock()}),
patch("tasks.tasks.get_compliance_frameworks", return_value=["cis"]),
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
patch(
"tasks.tasks._generate_output_directory",
return_value=("/tmp/test/out", "/tmp/test/comp", "/tmp/test/threat"),
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
),
patch(
"tasks.tasks.FindingOutput._transform_findings_stats",
@@ -215,20 +190,6 @@ class TestGenerateOutputs:
patch("tasks.tasks._upload_to_s3", return_value="s3://bucket/f.zip"),
patch("tasks.tasks.Scan.all_objects.filter"),
patch("tasks.tasks.rmtree"),
patch(
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
{
"html": {
"class": lambda *args, **kwargs: html_writer_mock,
"suffix": ".html",
"kwargs": {},
}
},
),
patch(
"tasks.tasks.COMPLIANCE_CLASS_MAP",
{"aws": [(lambda x: True, mock_compliance_class)]},
),
):
mock_filter.return_value.exists.return_value = True
mock_findings.return_value.order_by.return_value.iterator.return_value = [
@@ -236,12 +197,29 @@ class TestGenerateOutputs:
True,
]
generate_outputs_task(
scan_id=self.scan_id,
provider_id=self.provider_id,
tenant_id=self.tenant_id,
)
html_writer_mock.batch_write_data_to_file.assert_called_once()
html_writer_mock = MagicMock()
with (
patch(
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
{
"html": {
"class": lambda *args, **kwargs: html_writer_mock,
"suffix": ".html",
"kwargs": {},
}
},
),
patch(
"tasks.tasks.COMPLIANCE_CLASS_MAP",
{"aws": [(lambda x: True, MagicMock())]},
),
):
generate_outputs_task(
scan_id=self.scan_id,
provider_id=self.provider_id,
tenant_id=self.tenant_id,
)
html_writer_mock.batch_write_data_to_file.assert_called_once()
def test_transform_called_only_on_second_batch(self):
raw1 = MagicMock()
@@ -278,11 +256,7 @@ class TestGenerateOutputs:
),
patch(
"tasks.tasks._generate_output_directory",
return_value=(
"/tmp/test/outdir",
"/tmp/test/compdir",
"/tmp/test/threatdir",
),
return_value=("outdir", "compdir"),
),
patch("tasks.tasks._compress_output_files", return_value="outdir.zip"),
patch("tasks.tasks._upload_to_s3", return_value="s3://bucket/outdir.zip"),
@@ -329,14 +303,12 @@ class TestGenerateOutputs:
def __init__(self, *args, **kwargs):
self.transform_calls = []
self._data = []
self.close_file = False
writer_instances.append(self)
def transform(self, fos, comp_obj, name):
self.transform_calls.append((fos, comp_obj, name))
def batch_write_data_to_file(self):
# Mock implementation - do nothing
pass
two_batches = [
@@ -357,11 +329,7 @@ class TestGenerateOutputs:
patch("tasks.tasks.get_compliance_frameworks", return_value=["cis"]),
patch(
"tasks.tasks._generate_output_directory",
return_value=(
"/tmp/test/outdir",
"/tmp/test/compdir",
"/tmp/test/threatdir",
),
return_value=("outdir", "compdir"),
),
patch("tasks.tasks.FindingOutput._transform_findings_stats"),
patch(
@@ -400,35 +368,15 @@ class TestGenerateOutputs:
mock_finding_output = MagicMock()
mock_finding_output.compliance = {"cis": ["requirement-1", "requirement-2"]}
json_writer_mock = MagicMock()
json_writer_mock._data = []
json_writer_mock.close_file = False
json_writer_mock.transform = MagicMock()
json_writer_mock.batch_write_data_to_file = MagicMock()
compliance_writer_mock = MagicMock()
compliance_writer_mock._data = []
compliance_writer_mock.close_file = False
compliance_writer_mock.transform = MagicMock()
compliance_writer_mock.batch_write_data_to_file = MagicMock()
# Create a mock class that returns our mock instance when called
mock_compliance_class = MagicMock(return_value=compliance_writer_mock)
mock_provider = MagicMock()
mock_provider.provider = "aws"
mock_provider.uid = "test-provider-uid"
with (
patch("tasks.tasks.ScanSummary.objects.filter") as mock_filter,
patch("tasks.tasks.Provider.objects.get", return_value=mock_provider),
patch("tasks.tasks.Provider.objects.get"),
patch("tasks.tasks.initialize_prowler_provider"),
patch("tasks.tasks.Compliance.get_bulk", return_value={"cis": MagicMock()}),
patch("tasks.tasks.get_compliance_frameworks", return_value=["cis"]),
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
patch(
"tasks.tasks._generate_output_directory",
return_value=("/tmp/test/out", "/tmp/test/comp", "/tmp/test/threat"),
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
),
patch(
"tasks.tasks.FindingOutput._transform_findings_stats",
@@ -442,20 +390,6 @@ class TestGenerateOutputs:
patch("tasks.tasks._upload_to_s3", return_value="s3://bucket/file.zip"),
patch("tasks.tasks.Scan.all_objects.filter"),
patch("tasks.tasks.rmtree", side_effect=Exception("Test deletion error")),
patch(
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
{
"json": {
"class": lambda *args, **kwargs: json_writer_mock,
"suffix": ".json",
"kwargs": {},
}
},
),
patch(
"tasks.tasks.COMPLIANCE_CLASS_MAP",
{"aws": [(lambda x: True, mock_compliance_class)]},
),
):
mock_filter.return_value.exists.return_value = True
mock_findings.return_value.order_by.return_value.iterator.return_value = [
@@ -463,13 +397,29 @@ class TestGenerateOutputs:
True,
]
with caplog.at_level("ERROR"):
generate_outputs_task(
scan_id=self.scan_id,
provider_id=self.provider_id,
tenant_id=self.tenant_id,
)
assert "Error deleting output files" in caplog.text
with (
patch(
"tasks.tasks.OUTPUT_FORMATS_MAPPING",
{
"json": {
"class": lambda *args, **kwargs: MagicMock(),
"suffix": ".json",
"kwargs": {},
}
},
),
patch(
"tasks.tasks.COMPLIANCE_CLASS_MAP",
{"aws": [(lambda x: True, MagicMock())]},
),
):
with caplog.at_level("ERROR"):
generate_outputs_task(
scan_id=self.scan_id,
provider_id=self.provider_id,
tenant_id=self.tenant_id,
)
assert "Error deleting output files" in caplog.text
@patch("tasks.tasks.rls_transaction")
@patch("tasks.tasks.Integration.objects.filter")
@@ -485,8 +435,7 @@ class TestGenerateOutputs:
patch("tasks.tasks.get_compliance_frameworks", return_value=[]),
patch("tasks.tasks.Finding.all_objects.filter") as mock_findings,
patch(
"tasks.tasks._generate_output_directory",
return_value=("/tmp/test/out", "/tmp/test/comp", "/tmp/test/threat"),
"tasks.tasks._generate_output_directory", return_value=("out", "comp")
),
patch("tasks.tasks.FindingOutput._transform_findings_stats"),
patch("tasks.tasks.FindingOutput.transform_api_finding"),
@@ -527,15 +476,8 @@ class TestScanCompleteTasks:
@patch("tasks.tasks.create_compliance_requirements_task.apply_async")
@patch("tasks.tasks.perform_scan_summary_task.si")
@patch("tasks.tasks.generate_outputs_task.si")
@patch("tasks.tasks.generate_threatscore_report_task.si")
@patch("tasks.tasks.check_integrations_task.si")
def test_scan_complete_tasks(
self,
mock_check_integrations_task,
mock_threatscore_task,
mock_outputs_task,
mock_scan_summary_task,
mock_compliance_tasks,
self, mock_outputs_task, mock_scan_summary_task, mock_compliance_tasks
):
_perform_scan_complete_tasks("tenant-id", "scan-id", "provider-id")
mock_compliance_tasks.assert_called_once_with(
@@ -550,16 +492,6 @@ class TestScanCompleteTasks:
provider_id="provider-id",
tenant_id="tenant-id",
)
mock_threatscore_task.assert_called_once_with(
tenant_id="tenant-id",
scan_id="scan-id",
provider_id="provider-id",
)
mock_check_integrations_task.assert_called_once_with(
tenant_id="tenant-id",
provider_id="provider-id",
scan_id="scan-id",
)
@pytest.mark.django_db
@@ -730,7 +662,7 @@ class TestCheckIntegrationsTask:
mock_initialize_provider.return_value = MagicMock()
mock_compliance_bulk.return_value = {}
mock_get_frameworks.return_value = []
mock_generate_dir.return_value = ("out-dir", "comp-dir", "threat-dir")
mock_generate_dir.return_value = ("out-dir", "comp-dir")
mock_transform_stats.return_value = {"stats": "data"}
# Mock findings
@@ -855,7 +787,7 @@ class TestCheckIntegrationsTask:
mock_initialize_provider.return_value = MagicMock()
mock_compliance_bulk.return_value = {}
mock_get_frameworks.return_value = []
mock_generate_dir.return_value = ("out-dir", "comp-dir", "threat-dir")
mock_generate_dir.return_value = ("out-dir", "comp-dir")
mock_transform_stats.return_value = {"stats": "data"}
# Mock findings
@@ -971,7 +903,7 @@ class TestCheckIntegrationsTask:
mock_initialize_provider.return_value = MagicMock()
mock_compliance_bulk.return_value = {}
mock_get_frameworks.return_value = []
mock_generate_dir.return_value = ("out-dir", "comp-dir", "threat-dir")
mock_generate_dir.return_value = ("out-dir", "comp-dir")
mock_transform_stats.return_value = {"stats": "data"}
# Mock findings
File diff suppressed because it is too large Load Diff
+4 -300
View File
@@ -5,7 +5,8 @@ title: 'Prowler Services'
Here you can find how to create a new service, or to complement an existing one, for a [Prowler Provider](/developer-guide/provider).
<Note>
First ensure that the provider you want to add the service is already created. It can be checked [here](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers). If the provider is not present, please refer to the [Provider](./provider.md) documentation to create it from scratch.
First ensure that the provider you want to add the service is already created. It can be checked [here](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers). If the provider is not present, please refer to the [Provider](/developer-guide/provider) documentation to create it from scratch.
</Note>
## Introduction
@@ -200,11 +201,11 @@ class <Item>(BaseModel):
#### Service Attributes
_Optimized Data Storage with Python Dictionaries_
*Optimized Data Storage with Python Dictionaries*
Each group of resources within a service should be structured as a Python [dictionary](https://docs.python.org/3/tutorial/datastructures.html#dictionaries) to enable efficient lookups. The dictionary lookup operation has [O(1) complexity](https://en.wikipedia.org/wiki/Big_O_notation#Orders_of_common_functions), and lookups are constantly executed.
_Assigning Unique Identifiers_
*Assigning Unique Identifiers*
Each dictionary key must be a unique ID to identify the resource in a univocal way.
@@ -240,301 +241,6 @@ Provider-Specific Permissions Documentation:
- [M365](/user-guide/providers/microsoft365/authentication#required-permissions)
- [GitHub](/user-guide/providers/github/authentication)
## Service Architecture and Cross-Service Communication
### Core Principle: Service Isolation with Client Communication
Each service must contain **ONLY** the information unique to that specific service. When a check requires information from multiple services, it must use the **client objects** of other services rather than directly accessing their data structures.
This architecture ensures:
- **Loose coupling** between services
- **Clear separation of concerns**
- **Maintainable and testable code**
- **Consistent data access patterns**
### Cross-Service Communication Pattern
Instead of services directly accessing each other's internal data, checks should import and use client objects:
**❌ INCORRECT - Direct data access:**
```python
# DON'T DO THIS
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import cloudtrail_service
from prowler.providers.aws.services.s3.s3_service import s3_service
class cloudtrail_bucket_requires_mfa_delete(Check):
def execute(self):
# WRONG: Directly accessing service data
for trail in cloudtrail_service.trails.values():
for bucket in s3_service.buckets.values():
# Direct access violates separation of concerns
```
**✅ CORRECT - Client-based communication:**
```python
# DO THIS INSTEAD
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import cloudtrail_client
from prowler.providers.aws.services.s3.s3_client import s3_client
class cloudtrail_bucket_requires_mfa_delete(Check):
def execute(self):
# CORRECT: Using client objects for cross-service communication
for trail in cloudtrail_client.trails.values():
trail_bucket = trail.s3_bucket
for bucket in s3_client.buckets.values():
if trail_bucket == bucket.name:
# Use bucket properties through s3_client
if bucket.mfa_delete:
# Implementation logic
```
### Real-World Example: CloudTrail + S3 Integration
This example demonstrates how CloudTrail checks validate S3 bucket configurations:
```python
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import cloudtrail_client
from prowler.providers.aws.services.s3.s3_client import s3_client
class cloudtrail_bucket_requires_mfa_delete(Check):
def execute(self):
findings = []
if cloudtrail_client.trails is not None:
for trail in cloudtrail_client.trails.values():
if trail.is_logging:
trail_bucket_is_in_account = False
trail_bucket = trail.s3_bucket
# Cross-service communication: CloudTrail check uses S3 client
for bucket in s3_client.buckets.values():
if trail_bucket == bucket.name:
trail_bucket_is_in_account = True
if bucket.mfa_delete:
report.status = "PASS"
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) has MFA delete enabled."
# Handle cross-account scenarios
if not trail_bucket_is_in_account:
report.status = "MANUAL"
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) is a cross-account bucket or out of Prowler's audit scope, please check it manually."
findings.append(report)
return findings
```
**Key Benefits:**
- **CloudTrail service** only contains CloudTrail-specific data (trails, configurations)
- **S3 service** only contains S3-specific data (buckets, policies, ACLs)
- **Check logic** orchestrates between services using their public client interfaces
- **Cross-account detection** is handled gracefully when resources span accounts
### Service Consolidation Guidelines
**When to combine services in the same file:**
Implement multiple services as **separate classes in the same file** when two services are **practically the same** or one is a **direct extension** of another.
**Example: S3 and S3Control**
S3Control is an extension of S3 that provides account-level controls and access points. Both are implemented in `s3_service.py`:
```python
# File: prowler/providers/aws/services/s3/s3_service.py
class S3(AWSService):
"""Standard S3 service for bucket operations"""
def __init__(self, provider):
super().__init__(__class__.__name__, provider)
self.buckets = {}
self.regions_with_buckets = []
# S3-specific initialization
self._list_buckets(provider)
self._get_bucket_versioning()
# ... other S3-specific operations
class S3Control(AWSService):
"""S3Control service for account-level and access point operations"""
def __init__(self, provider):
super().__init__(__class__.__name__, provider)
self.account_public_access_block = None
self.access_points = {}
# S3Control-specific initialization
self._get_public_access_block()
self._list_access_points()
# ... other S3Control-specific operations
```
**Separate client files:**
```python
# File: prowler/providers/aws/services/s3/s3_client.py
from prowler.providers.aws.services.s3.s3_service import S3
s3_client = S3(Provider.get_global_provider())
# File: prowler/providers/aws/services/s3/s3control_client.py
from prowler.providers.aws.services.s3.s3_service import S3Control
s3control_client = S3Control(Provider.get_global_provider())
```
**When NOT to consolidate services:**
Keep services separate when they:
- **Operate on different resource types** (EC2 vs RDS)
- **Have different authentication mechanisms** (different API endpoints)
- **Serve different operational domains** (IAM vs CloudTrail)
- **Have different regional behaviors** (global vs regional services)
### Cross-Service Dependencies Guidelines
**1. Always use client imports:**
```python
# Correct pattern
from prowler.providers.aws.services.service_a.service_a_client import service_a_client
from prowler.providers.aws.services.service_b.service_b_client import service_b_client
```
**2. Handle missing resources gracefully:**
```python
# Handle cross-service scenarios
resource_found_in_account = False
for external_resource in other_service_client.resources.values():
if target_resource_id == external_resource.id:
resource_found_in_account = True
# Process found resource
break
if not resource_found_in_account:
# Handle cross-account or missing resource scenarios
report.status = "MANUAL"
report.status_extended = "Resource is cross-account or out of audit scope"
```
**3. Document cross-service dependencies:**
```python
class check_with_dependencies(Check):
"""
Check Description
Dependencies:
- service_a_client: For primary resource information
- service_b_client: For related resource validation
- service_c_client: For policy analysis
"""
```
## Regional Service Implementation
When implementing services for regional providers (like AWS, Azure, GCP), special considerations are needed to handle resource discovery across multiple geographic locations. This section provides a complete guide using AWS as the reference example.
### Regional vs Non-Regional Services
**Regional Services:** Require iteration across multiple geographic locations where resources may exist (e.g., EC2 instances, VPC, RDS databases).
**Non-Regional/Global Services:** Operate at a global or tenant level without regional concepts (e.g., IAM users, Route53 hosted zones).
### AWS Regional Implementation Example
AWS is the perfect example of a regional provider. Here's how Prowler handles AWS's regional architecture:
```python
# File: prowler/providers/aws/services/ec2/ec2_service.py
class EC2(AWSService):
def __init__(self, provider):
super().__init__(__class__.__name__, provider)
self.instances = {}
self.security_groups = {}
# Regional resource discovery across all AWS regions
self.__threading_call__(self._describe_instances)
self.__threading_call__(self._describe_security_groups)
def _describe_instances(self, regional_client):
"""Discover EC2 instances in a specific region"""
try:
describe_instances_paginator = regional_client.get_paginator("describe_instances")
for page in describe_instances_paginator.paginate():
for reservation in page["Reservations"]:
for instance in reservation["Instances"]:
# Each instance includes its region
self.instances[instance["InstanceId"]] = Instance(
id=instance["InstanceId"],
region=regional_client.region,
state=instance["State"]["Name"],
# ... other properties
)
except Exception as error:
logger.error(f"Failed to describe instances in {regional_client.region}: {error}")
```
#### Regional Check Execution
```python
# File: prowler/providers/aws/services/ec2/ec2_instance_public_ip/ec2_instance_public_ip.py
class ec2_instance_public_ip(Check):
def execute(self):
findings = []
# Automatically iterates across ALL AWS regions where instances exist
for instance in ec2_client.instances.values():
report = Check_Report_AWS(metadata=self.metadata(), resource=instance)
report.region = instance.region # Critical: region attribution
report.resource_arn = f"arn:aws:ec2:{instance.region}:{instance.account_id}:instance/{instance.id}"
if instance.public_ip:
report.status = "FAIL"
report.status_extended = f"Instance {instance.id} in {instance.region} has public IP {instance.public_ip}"
else:
report.status = "PASS"
report.status_extended = f"Instance {instance.id} in {instance.region} does not have a public IP"
findings.append(report)
return findings
```
#### Key AWS Regional Features
**Region-Specific ARNs:**
```
arn:aws:ec2:us-east-1:123456789012:instance/i-1234567890abcdef0
arn:aws:s3:eu-west-1:123456789012:bucket/my-bucket
arn:aws:rds:ap-southeast-2:123456789012:db:my-database
```
**Parallel Processing:**
- Each region processed independently in separate threads
- Failed regions don't affect other regions
- User can filter specific regions: `-f us-east-1`
**Global vs Regional Services:**
- **Regional**: EC2, RDS, VPC (require region iteration)
- **Global**: IAM, Route53, CloudFront (single `us-east-1` call)
This architecture allows Prowler to efficiently scan AWS accounts with resources spread across multiple regions while maintaining performance and error isolation.
### Regional Service Best Practices
1. **Use Threading for Regional Discovery**: Leverage the `__threading_call__` method to parallelize resource discovery across regions
2. **Store Region Information**: Always include region metadata in resource objects for proper attribution
3. **Handle Regional Failures Gracefully**: Ensure that failures in one region don't affect others
4. **Optimize for Performance**: Use paginated calls and efficient data structures for large-scale resource discovery
5. **Support Region Filtering**: Allow users to limit scans to specific regions for focused audits
## Best Practices
- When available in the provider, use threading or parallelization utilities for all methods that can be parallelized by to maximize performance and reduce scan time.
@@ -546,5 +252,3 @@ This architecture allows Prowler to efficiently scan AWS accounts with resources
- Collect and store resource tags and additional attributes to support richer checks and reporting.
- Leverage shared utility helpers for session setup, identifier parsing, and other cross-cutting concerns to avoid code duplication. This kind of code is typically stored in a `lib` folder in the service folder.
- Keep code modular, maintainable, and well-documented for ease of extension and troubleshooting.
- **Each service should contain only information unique to that specific service** - use client objects for cross-service communication.
- **Handle cross-account and missing resources gracefully** when checks span multiple services.
@@ -25,9 +25,6 @@ Prowler configuration is based in `.env` files. Every version of Prowler can hav
curl -LO https://raw.githubusercontent.com/prowler-cloud/prowler/refs/heads/master/.env
docker compose up -d
```
> Containers are built for `linux/amd64`. If your workstation's architecture is different, please set `DOCKER_DEFAULT_PLATFORM=linux/amd64` in your environment or use the `--platform linux/amd64` flag in the docker command.
</Tab>
<Tab title="GitHub">
_Requirements_:
Binary file not shown.

Before

Width:  |  Height:  |  Size: 71 KiB

+1 -1
View File
@@ -2,7 +2,7 @@
All notable changes to the **Prowler MCP Server** are documented in this file.
## [0.1.0] (Prowler 5.13.0)
## [0.1.0] (Prowler UNRELEASED)
### Added
- Initial release of Prowler MCP Server [(#8695)](https://github.com/prowler-cloud/prowler/pull/8695)
+7 -26
View File
@@ -2,29 +2,7 @@
All notable changes to the **Prowler SDK** are documented in this file.
## [v5.14.0] (Prowler UNRELEASED)
### Added
- GitHub provider check `organization_default_repository_permission_strict` [(#8785)](https://github.com/prowler-cloud/prowler/pull/8785)
- `codepipeline_project_repo_private` check for AWS provider [(#5915)](https://github.com/prowler-cloud/prowler/pull/5915)
### Changed
- Update AWS Direct Connect service metadata to new format [(#8855)](https://github.com/prowler-cloud/prowler/pull/8855)
- Update AWS DRS service metadata to new format [(#8870)](https://github.com/prowler-cloud/prowler/pull/8870)
- Update AWS DynamoDB service metadata to new format [(#8871)](https://github.com/prowler-cloud/prowler/pull/8871)
---
## [v5.13.1] (Prowler UNRELEASED)
### Fixed
- Add `resource_name` for checks under `logging` for the GCP provider [(#9023)](https://github.com/prowler-cloud/prowler/pull/9023)
- Fix `ec2_instance_with_outdated_ami` check to handle None AMIs [(#9046)](https://github.com/prowler-cloud/prowler/pull/9046)
- Handle timestamp when transforming compliance findings in CCC [(#9042)](https://github.com/prowler-cloud/prowler/pull/9042)
---
## [v5.13.0] (Prowler v5.13.0)
## [v5.13.0] (Prowler UNRELEASED)
### Added
- Support for AdditionalURLs in outputs [(#8651)](https://github.com/prowler-cloud/prowler/pull/8651)
@@ -39,8 +17,6 @@ All notable changes to the **Prowler SDK** are documented in this file.
- Oracle Cloud provider with CIS 3.0 benchmark [(#8893)](https://github.com/prowler-cloud/prowler/pull/8893)
- Support for Atlassian Document Format (ADF) in Jira integration [(#8878)](https://github.com/prowler-cloud/prowler/pull/8878)
- Add Common Cloud Controls for AWS, Azure and GCP [(#8000)](https://github.com/prowler-cloud/prowler/pull/8000)
- Improve Provider documentation guide [(#8430)](https://github.com/prowler-cloud/prowler/pull/8430)
- `cloudstorage_bucket_lifecycle_management_enabled` check for GCP provider [(#8936)](https://github.com/prowler-cloud/prowler/pull/8936)
### Changed
@@ -74,8 +50,13 @@ All notable changes to the **Prowler SDK** are documented in this file.
- Prowler ThreatScore scoring calculation CLI [(#8582)](https://github.com/prowler-cloud/prowler/pull/8582)
- Add missing attributes for Mitre Attack AWS, Azure and GCP [(#8907)](https://github.com/prowler-cloud/prowler/pull/8907)
- Fix KeyError in CloudSQL and Monitoring services in GCP provider [(#8909)](https://github.com/prowler-cloud/prowler/pull/8909)
- Fix Value Errors in Entra service for M365 provider [(#8919)](https://github.com/prowler-cloud/prowler/pull/8919)
- Fix ResourceName in GCP provider [(#8928)](https://github.com/prowler-cloud/prowler/pull/8928)
---
## [v5.12.4] (Prowler UNRELEASED)
### Fixed
- Fix KeyError in `elb_ssl_listeners_use_acm_certificate` check and handle None cluster version in `eks_cluster_uses_a_supported_version` check [(#8791)](https://github.com/prowler-cloud/prowler/pull/8791)
- Fix file extension parsing for compliance reports [(#8791)](https://github.com/prowler-cloud/prowler/pull/8791)
- Added user pagination to Entra and Admincenter services [(#8858)](https://github.com/prowler-cloud/prowler/pull/8858)
+1 -1
View File
@@ -12,7 +12,7 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "5.13.1"
prowler_version = "5.13.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
@@ -1,4 +1,3 @@
from prowler.config.config import timestamp
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.ccc.models import CCC_AWSModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
@@ -45,7 +44,7 @@ class CCC_AWS(ComplianceOutput):
Description=compliance.Description,
AccountId=finding.account_uid,
Region=finding.region,
AssessmentDate=str(timestamp),
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_FamilyName=attribute.FamilyName,
@@ -74,7 +73,7 @@ class CCC_AWS(ComplianceOutput):
Description=compliance.Description,
AccountId="",
Region="",
AssessmentDate=str(timestamp),
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_FamilyName=attribute.FamilyName,
@@ -1,4 +1,3 @@
from prowler.config.config import timestamp
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.ccc.models import CCC_AzureModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
@@ -45,7 +44,7 @@ class CCC_Azure(ComplianceOutput):
Description=compliance.Description,
SubscriptionId=finding.account_uid,
Location=finding.region,
AssessmentDate=str(timestamp),
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_FamilyName=attribute.FamilyName,
@@ -74,7 +73,7 @@ class CCC_Azure(ComplianceOutput):
Description=compliance.Description,
SubscriptionId="",
Location="",
AssessmentDate=str(timestamp),
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_FamilyName=attribute.FamilyName,
@@ -1,4 +1,3 @@
from prowler.config.config import timestamp
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.ccc.models import CCC_GCPModel
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
@@ -45,7 +44,7 @@ class CCC_GCP(ComplianceOutput):
Description=compliance.Description,
ProjectId=finding.account_uid,
Location=finding.region,
AssessmentDate=str(timestamp),
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_FamilyName=attribute.FamilyName,
@@ -74,7 +73,7 @@ class CCC_GCP(ComplianceOutput):
Description=compliance.Description,
ProjectId="",
Location="",
AssessmentDate=str(timestamp),
AssessmentDate=str(finding.timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_FamilyName=attribute.FamilyName,
@@ -31,7 +31,7 @@ class ec2_instance_with_outdated_ami(Check):
(image for image in ec2_client.images if image.id == instance.image_id),
None,
)
if ami and ami.owner == "amazon":
if ami.owner == "amazon":
report = Check_Report_AWS(metadata=self.metadata(), resource=instance)
report.status = "PASS"
report.status_extended = (
@@ -1,34 +0,0 @@
{
"Provider": "gcp",
"CheckID": "cloudstorage_bucket_lifecycle_management_enabled",
"CheckTitle": "Cloud Storage buckets have lifecycle management enabled",
"CheckType": [],
"ServiceName": "cloudstorage",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "storage.googleapis.com/Bucket",
"Description": "**Google Cloud Storage buckets** are evaluated for the presence of **lifecycle management** with at least one valid rule (supported action and non-empty condition) to automatically transition or delete objects and optimize storage costs.",
"Risk": "Buckets without lifecycle rules can accumulate stale data, increase storage costs, and fail to meet data retention and internal compliance requirements.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/CloudStorage/enable-lifecycle-management.html",
"https://cloud.google.com/storage/docs/lifecycle"
],
"Remediation": {
"Code": {
"CLI": "gcloud storage buckets update gs://<BUCKET_NAME> --lifecycle-file=<PATH_TO_JSON>",
"NativeIaC": "",
"Other": "1) Open Google Cloud Console → Storage → Buckets → <BUCKET_NAME>\n2) Tab 'Lifecycle'\n3) Add rule(s) to delete or transition objects (e.g., delete after 365 days; transition STANDARD→NEARLINE after 90 days)\n4) Save",
"Terraform": "```hcl\n# Example: enable lifecycle to transition and delete objects\nresource \"google_storage_bucket\" \"example\" {\n name = var.bucket_name\n location = var.location\n\n # Transition STANDARD → NEARLINE after 90 days\n lifecycle_rule {\n action {\n type = \"SetStorageClass\"\n storage_class = \"NEARLINE\"\n }\n condition {\n age = 90\n matches_storage_class = [\"STANDARD\"]\n }\n }\n\n # Delete objects after 365 days\n lifecycle_rule {\n action {\n type = \"Delete\"\n }\n condition {\n age = 365\n }\n }\n}\n```"
},
"Recommendation": {
"Text": "Configure lifecycle rules to automatically delete stale objects or transition them to colder storage classes according to your organization's retention and cost-optimization policy.",
"Url": "https://hub.prowler.com/check/cloudstorage_bucket_lifecycle_management_enabled"
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}
@@ -1,48 +0,0 @@
from prowler.lib.check.models import Check, Check_Report_GCP
from prowler.providers.gcp.services.cloudstorage.cloudstorage_client import (
cloudstorage_client,
)
class cloudstorage_bucket_lifecycle_management_enabled(Check):
"""Ensure Cloud Storage buckets have lifecycle management enabled with at least one valid rule.
Reports PASS if a bucket has at least one valid lifecycle rule
(with a supported action and condition), otherwise FAIL.
"""
def execute(self) -> list[Check_Report_GCP]:
"""Run the lifecycle management check for each Cloud Storage bucket.
Returns:
list[Check_Report_GCP]: Results for all evaluated buckets.
"""
findings = []
for bucket in cloudstorage_client.buckets:
report = Check_Report_GCP(metadata=self.metadata(), resource=bucket)
report.status = "FAIL"
report.status_extended = (
f"Bucket {bucket.name} does not have lifecycle management enabled."
)
rules = bucket.lifecycle_rules
if rules:
valid_rules = []
for rule in rules:
action_type = rule.get("action", {}).get("type")
condition = rule.get("condition")
if action_type and condition:
valid_rules.append(rule)
if valid_rules:
report.status = "PASS"
report.status_extended = f"Bucket {bucket.name} has lifecycle management enabled with {len(valid_rules)} valid rule(s)."
else:
report.status = "FAIL"
report.status_extended = f"Bucket {bucket.name} has lifecycle rules configured but none are valid."
findings.append(report)
return findings
@@ -31,14 +31,6 @@ class CloudStorage(GCPService):
bucket_iam
) or "allUsers" in str(bucket_iam):
public = True
lifecycle_rules = None
lifecycle = bucket.get("lifecycle")
if isinstance(lifecycle, dict):
rules = lifecycle.get("rule")
if isinstance(rules, list):
lifecycle_rules = rules
self.buckets.append(
Bucket(
name=bucket["name"],
@@ -50,7 +42,6 @@ class CloudStorage(GCPService):
public=public,
retention_policy=bucket.get("retentionPolicy"),
project_id=project_id,
lifecycle_rules=lifecycle_rules,
)
)
@@ -71,4 +62,3 @@ class Bucket(BaseModel):
public: bool
project_id: str
retention_policy: Optional[dict] = None
lifecycle_rules: Optional[list[dict]] = None
@@ -14,37 +14,35 @@ class logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled(
'resource.type="gcs_bucket" AND protoPayload.methodName="storage.setIamPermissions"'
in metric.filter
):
metric_name = getattr(metric, "name", None) or "unknown"
report = Check_Report_GCP(
metadata=self.metadata(),
resource=metric,
resource_id=metric_name,
project_id=metric.project_id,
location=logging_client.region,
resource_name=(
metric_name if metric_name != "unknown" else "Log Metric Filter"
),
resource_name=metric.name if metric.name else "Log Metric Filter",
)
projects_with_metric.add(metric.project_id)
report.status = "FAIL"
report.status_extended = f"Log metric filter {metric_name} found but no alerts associated in project {metric.project_id}."
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated in project {metric.project_id}."
for alert_policy in monitoring_client.alert_policies:
for filter in alert_policy.filters:
if metric_name in filter:
if metric.name in filter:
report.status = "PASS"
report.status_extended = f"Log metric filter {metric_name} found with alert policy {alert_policy.display_name} associated in project {metric.project_id}."
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated in project {metric.project_id}."
break
findings.append(report)
for project in logging_client.project_ids:
if project not in projects_with_metric:
project_obj = logging_client.projects.get(project)
report = Check_Report_GCP(
metadata=self.metadata(),
resource=project_obj,
resource=logging_client.projects[project],
project_id=project,
location=logging_client.region,
resource_name=(getattr(project_obj, "name", None) or "GCP Project"),
resource_name=(
logging_client.projects[project].name
if logging_client.projects[project].name
else "GCP Project"
),
)
report.status = "FAIL"
report.status_extended = f"There are no log metric filters or alerts associated in project {project}."
@@ -14,38 +14,35 @@ class logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled(
'(protoPayload.serviceName="cloudresourcemanager.googleapis.com") AND (ProjectOwnership OR projectOwnerInvitee) OR (protoPayload.serviceData.policyDelta.bindingDeltas.action="REMOVE" AND protoPayload.serviceData.policyDelta.bindingDeltas.role="roles/owner") OR (protoPayload.serviceData.policyDelta.bindingDeltas.action="ADD" AND protoPayload.serviceData.policyDelta.bindingDeltas.role="roles/owner")'
in metric.filter
):
metric_name = getattr(metric, "name", None) or "unknown"
report = Check_Report_GCP(
metadata=self.metadata(),
resource=metric,
resource_id=metric_name,
project_id=metric.project_id,
location=logging_client.region,
resource_name=(
metric_name if metric_name != "unknown" else "Log Metric Filter"
),
resource_name=metric.name if metric.name else "Log Metric Filter",
)
projects_with_metric.add(metric.project_id)
report.status = "FAIL"
report.status_extended = f"Log metric filter {metric_name} found but no alerts associated in project {metric.project_id}."
report.status_extended = f"Log metric filter {metric.name} found but no alerts associated in project {metric.project_id}."
for alert_policy in monitoring_client.alert_policies:
for filter in alert_policy.filters:
if metric_name in filter:
if metric.name in filter:
report.status = "PASS"
report.status_extended = f"Log metric filter {metric_name} found with alert policy {alert_policy.display_name} associated in project {metric.project_id}."
report.status_extended = f"Log metric filter {metric.name} found with alert policy {alert_policy.display_name} associated in project {metric.project_id}."
break
findings.append(report)
for project in logging_client.project_ids:
if project not in projects_with_metric:
project_obj = logging_client.projects.get(project)
report = Check_Report_GCP(
metadata=self.metadata(),
resource=project_obj,
resource_id=project,
resource=logging_client.projects[project],
project_id=project,
location=logging_client.region,
resource_name=(getattr(project_obj, "name", None) or "GCP Project"),
resource_name=(
logging_client.projects[project].name
if logging_client.projects[project].name
else "GCP Project"
),
)
report.status = "FAIL"
report.status_extended = f"There are no log metric filters or alerts associated in project {project}."
@@ -12,32 +12,32 @@ class logging_sink_created(Check):
for project in logging_client.project_ids:
if project not in projects_with_logging_sink.keys():
project_obj = logging_client.projects.get(project)
report = Check_Report_GCP(
metadata=self.metadata(),
resource=project_obj,
resource_id=project,
resource=logging_client.projects[project],
project_id=project,
location=logging_client.region,
resource_name=(getattr(project_obj, "name", None) or "GCP Project"),
resource_name=(
logging_client.projects[project].name
if logging_client.projects[project].name
else "GCP Project"
),
)
report.status = "FAIL"
report.status_extended = f"There are no logging sinks to export copies of all the log entries in project {project}."
findings.append(report)
else:
sink = projects_with_logging_sink[project]
sink_name = getattr(sink, "name", None) or "unknown"
report = Check_Report_GCP(
metadata=self.metadata(),
resource=sink,
resource_id=sink_name,
project_id=project,
resource=projects_with_logging_sink[project],
location=logging_client.region,
resource_name=(
sink_name if sink_name != "unknown" else "Logging Sink"
projects_with_logging_sink[project].name
if projects_with_logging_sink[project].name
else "Logging Sink"
),
)
report.status = "PASS"
report.status_extended = f"Sink {sink_name} is enabled exporting copies of all the log entries in project {project}."
report.status_extended = f"Sink {projects_with_logging_sink[project].name} is enabled exporting copies of all the log entries in project {project}."
findings.append(report)
return findings
@@ -2,6 +2,7 @@ from prowler.lib.check.models import Check, CheckReportM365
from prowler.providers.m365.services.entra.entra_client import entra_client
from prowler.providers.m365.services.entra.entra_service import (
AdminRoles,
AuthenticationStrength,
ConditionalAccessPolicyState,
)
@@ -46,25 +47,7 @@ class entra_admin_users_phishing_resistant_mfa_enabled(Check):
if (
policy.grant_controls.authentication_strength is not None
and policy.grant_controls.authentication_strength
!= "Multifactor authentication"
and policy.grant_controls.authentication_strength != "Passwordless MFA"
and policy.grant_controls.authentication_strength
!= "Phishing-resistant MFA"
):
report = CheckReportM365(
metadata=self.metadata(),
resource=policy,
resource_name=policy.display_name,
resource_id=policy.id,
)
report.status = "MANUAL"
report.status_extended = f"Conditional Access Policy '{policy.display_name}' has a custom authentication strength, review it is Phishing-resistant MFA."
continue
if (
policy.grant_controls.authentication_strength is not None
and policy.grant_controls.authentication_strength
== "Phishing-resistant MFA"
== AuthenticationStrength.PHISHING_RESISTANT_MFA
):
report = CheckReportM365(
metadata=self.metadata(),
@@ -253,7 +253,9 @@ class Entra(M365Service):
)
),
authentication_strength=(
policy.grant_controls.authentication_strength.display_name
AuthenticationStrength(
policy.grant_controls.authentication_strength.display_name
)
if policy.grant_controls is not None
and policy.grant_controls.authentication_strength
is not None
@@ -453,7 +455,6 @@ class ConditionalAccessPolicyState(Enum):
class UserAction(Enum):
REGISTER_SECURITY_INFO = "urn:user:registersecurityinfo"
REGISTER_DEVICE = "urn:user:registerdevice"
class ApplicationsConditions(BaseModel):
@@ -522,19 +523,11 @@ class SessionControls(BaseModel):
class ConditionalAccessGrantControl(Enum):
"""
Built-in grant controls for Conditional Access policies.
Reference: https://learn.microsoft.com/en-us/graph/api/resources/conditionalaccessgrantcontrols
"""
MFA = "mfa"
BLOCK = "block"
DOMAIN_JOINED_DEVICE = "domainJoinedDevice"
PASSWORD_CHANGE = "passwordChange"
COMPLIANT_DEVICE = "compliantDevice"
APPROVED_APPLICATION = "approvedApplication"
COMPLIANT_APPLICATION = "compliantApplication"
TERMS_OF_USE = "termsOfUse"
class GrantControlOperator(Enum):
@@ -542,10 +535,16 @@ class GrantControlOperator(Enum):
OR = "OR"
class AuthenticationStrength(Enum):
MFA = "Multifactor authentication"
PASSWORDLESS_MFA = "Passwordless MFA"
PHISHING_RESISTANT_MFA = "Phishing-resistant MFA"
class GrantControls(BaseModel):
built_in_controls: List[ConditionalAccessGrantControl]
operator: GrantControlOperator
authentication_strength: Optional[str]
authentication_strength: Optional[AuthenticationStrength]
class ConditionalAccessPolicy(BaseModel):
+1 -1
View File
@@ -76,7 +76,7 @@ maintainers = [{name = "Prowler Engineering", email = "engineering@prowler.com"}
name = "prowler"
readme = "README.md"
requires-python = ">3.9.1,<3.13"
version = "5.13.1"
version = "5.13.0"
[project.scripts]
prowler = "prowler.__main__:prowler"
@@ -103,29 +103,6 @@ def mock_make_api_call_outdated_ami(self, operation_name, kwarg):
return make_api_call(self, operation_name, kwarg)
def mock_make_api_call_missing_ami(self, operation_name, kwarg):
if operation_name == "DescribeInstances":
return {
"Reservations": [
{
"Instances": [
{
"InstanceId": "i-0123456789abcdef0",
"State": {"Name": "running"},
"InstanceType": "t2.micro",
"ImageId": "ami-missing",
"LaunchTime": "2026-11-12T11:34:56.000Z",
"PrivateDnsName": "ip-172-31-32-101.ec2.internal",
}
]
}
]
}
elif operation_name == "DescribeImages":
return {"Images": []}
return make_api_call(self, operation_name, kwarg)
class Test_ec2_instance_with_outdated_ami:
@mock_aws
def test_ec2_no_instances(self):
@@ -242,30 +219,3 @@ class Test_ec2_instance_with_outdated_ami:
result[0].status_extended
== "EC2 Instance i-0123456789abcdef0 is using outdated AMI ami-87654321."
)
@mock.patch(
"botocore.client.BaseClient._make_api_call", new=mock_make_api_call_missing_ami
)
def test_instance_missing_ami_details(self):
from prowler.providers.aws.services.ec2.ec2_service import EC2
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.ec2.ec2_instance_with_outdated_ami.ec2_instance_with_outdated_ami.ec2_client",
new=EC2(aws_provider),
),
):
from prowler.providers.aws.services.ec2.ec2_instance_with_outdated_ami.ec2_instance_with_outdated_ami import (
ec2_instance_with_outdated_ami,
)
check = ec2_instance_with_outdated_ami()
result = check.execute()
assert result == []
@@ -1,223 +0,0 @@
from unittest import mock
from tests.providers.gcp.gcp_fixtures import (
GCP_PROJECT_ID,
GCP_US_CENTER1_LOCATION,
set_mocked_gcp_provider,
)
class TestCloudStorageBucketLifecycleManagementEnabled:
def test_bucket_without_lifecycle_rules(self):
cloudstorage_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_client",
new=cloudstorage_client,
),
):
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled import (
cloudstorage_bucket_lifecycle_management_enabled,
)
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
Bucket,
)
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
cloudstorage_client.buckets = [
Bucket(
name="no-lifecycle",
id="no-lifecycle",
region=GCP_US_CENTER1_LOCATION,
uniform_bucket_level_access=True,
public=False,
retention_policy=None,
project_id=GCP_PROJECT_ID,
lifecycle_rules=[],
)
]
check = cloudstorage_bucket_lifecycle_management_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Bucket {cloudstorage_client.buckets[0].name} does not have lifecycle management enabled."
)
assert result[0].resource_id == "no-lifecycle"
assert result[0].resource_name == "no-lifecycle"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID
def test_bucket_with_minimal_delete_rule(self):
cloudstorage_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_client",
new=cloudstorage_client,
),
):
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled import (
cloudstorage_bucket_lifecycle_management_enabled,
)
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
Bucket,
)
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
cloudstorage_client.buckets = [
Bucket(
name="delete-rule",
id="delete-rule",
region=GCP_US_CENTER1_LOCATION,
uniform_bucket_level_access=True,
public=False,
retention_policy=None,
project_id=GCP_PROJECT_ID,
lifecycle_rules=[
{"action": {"type": "Delete"}, "condition": {"age": 30}}
],
)
]
check = cloudstorage_bucket_lifecycle_management_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Bucket {cloudstorage_client.buckets[0].name} has lifecycle management enabled with 1 valid rule(s)."
)
assert result[0].resource_id == "delete-rule"
assert result[0].resource_name == "delete-rule"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID
def test_bucket_with_transition_and_delete_rules(self):
cloudstorage_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_client",
new=cloudstorage_client,
),
):
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled import (
cloudstorage_bucket_lifecycle_management_enabled,
)
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
Bucket,
)
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
cloudstorage_client.buckets = [
Bucket(
name="transition-delete",
id="transition-delete",
region=GCP_US_CENTER1_LOCATION,
uniform_bucket_level_access=True,
public=False,
retention_policy=None,
project_id=GCP_PROJECT_ID,
lifecycle_rules=[
{
"action": {
"type": "SetStorageClass",
"storageClass": "NEARLINE",
},
"condition": {"matchesStorageClass": ["STANDARD"]},
},
{"action": {"type": "Delete"}, "condition": {"age": 365}},
],
)
]
check = cloudstorage_bucket_lifecycle_management_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Bucket {cloudstorage_client.buckets[0].name} has lifecycle management enabled with 2 valid rule(s)."
)
assert result[0].resource_id == "transition-delete"
assert result[0].resource_name == "transition-delete"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID
def test_bucket_with_invalid_lifecycle_rules(self):
cloudstorage_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_client",
new=cloudstorage_client,
),
):
from prowler.providers.gcp.services.cloudstorage.cloudstorage_bucket_lifecycle_management_enabled.cloudstorage_bucket_lifecycle_management_enabled import (
cloudstorage_bucket_lifecycle_management_enabled,
)
from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import (
Bucket,
)
cloudstorage_client.project_ids = [GCP_PROJECT_ID]
cloudstorage_client.region = GCP_US_CENTER1_LOCATION
cloudstorage_client.buckets = [
Bucket(
name="invalid-rules",
id="invalid-rules",
region=GCP_US_CENTER1_LOCATION,
uniform_bucket_level_access=True,
public=False,
retention_policy=None,
project_id=GCP_PROJECT_ID,
lifecycle_rules=[
{"action": {}, "condition": {"age": 30}},
{"action": {"type": "Delete"}, "condition": {}},
],
)
]
check = cloudstorage_bucket_lifecycle_management_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Bucket {cloudstorage_client.buckets[0].name} has lifecycle rules configured but none are valid."
)
assert result[0].resource_id == "invalid-rules"
assert result[0].resource_name == "invalid-rules"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID
@@ -259,141 +259,3 @@ class Test_logging_log_metric_filter_and_alert_for_bucket_permission_changes_ena
assert result[0].resource_name == "metric_name"
assert result[0].project_id == GCP_PROJECT_ID
assert result[0].location == GCP_EU1_LOCATION
def test_log_metric_filters_with_none_name(self):
"""Test that metric with None name uses fallback 'Log Metric Filter'"""
logging_client = MagicMock()
monitoring_client = MagicMock()
with (
patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
patch(
"prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.logging_client",
new=logging_client,
),
patch(
"prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.monitoring_client",
new=monitoring_client,
),
):
from prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled import (
logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled,
)
# Create a MagicMock metric object with name=None
metric = MagicMock()
metric.name = None
metric.filter = 'resource.type="gcs_bucket" AND protoPayload.methodName="storage.setIamPermissions"'
metric.project_id = GCP_PROJECT_ID
logging_client.metrics = [metric]
logging_client.project_ids = [GCP_PROJECT_ID]
logging_client.region = GCP_EU1_LOCATION
monitoring_client.alert_policies = []
check = (
logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert result[0].resource_name == "Log Metric Filter"
assert (
result[0].resource_id == "unknown"
) # resource_id should never be None
assert result[0].project_id == GCP_PROJECT_ID
assert result[0].location == GCP_EU1_LOCATION
# When name is None, the 'or' pattern makes it use "unknown"
assert "unknown" in result[0].status_extended
def test_log_metric_filters_with_missing_name_attribute(self):
"""Test that metric without name attribute uses fallback 'Log Metric Filter'"""
logging_client = MagicMock()
monitoring_client = MagicMock()
with (
patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
patch(
"prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.logging_client",
new=logging_client,
),
patch(
"prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.monitoring_client",
new=monitoring_client,
),
):
from prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled import (
logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled,
)
# Create a MagicMock metric object without name attribute
metric = MagicMock(spec=["filter", "project_id"])
metric.filter = 'resource.type="gcs_bucket" AND protoPayload.methodName="storage.setIamPermissions"'
metric.project_id = GCP_PROJECT_ID
logging_client.metrics = [metric]
logging_client.project_ids = [GCP_PROJECT_ID]
logging_client.region = GCP_EU1_LOCATION
monitoring_client.alert_policies = []
check = (
logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert result[0].resource_name == "Log Metric Filter"
assert (
result[0].resource_id == "unknown"
) # resource_id should never be None
assert result[0].project_id == GCP_PROJECT_ID
assert result[0].location == GCP_EU1_LOCATION
def test_project_not_in_projects_dict(self):
"""Test that project not in projects dict uses None and fallback name"""
logging_client = MagicMock()
monitoring_client = MagicMock()
with (
patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
patch(
"prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.logging_client",
new=logging_client,
),
patch(
"prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.monitoring_client",
new=monitoring_client,
),
):
from prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled.logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled import (
logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled,
)
logging_client.metrics = []
logging_client.project_ids = [GCP_PROJECT_ID]
logging_client.region = GCP_EU1_LOCATION
# Project is in project_ids but NOT in projects dict
logging_client.projects = {}
monitoring_client.alert_policies = []
check = (
logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert result[0].resource_name == "GCP Project"
assert result[0].project_id == GCP_PROJECT_ID
assert result[0].location == GCP_EU1_LOCATION
@@ -259,136 +259,3 @@ class Test_logging_log_metric_filter_and_alert_for_project_ownership_changes_ena
assert result[0].resource_name == "metric_name"
assert result[0].project_id == GCP_PROJECT_ID
assert result[0].location == GCP_EU1_LOCATION
def test_log_metric_filters_with_none_name(self):
"""Test that metric with None name uses fallback 'Log Metric Filter'"""
logging_client = MagicMock()
monitoring_client = MagicMock()
with (
patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
patch(
"prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.logging_client",
new=logging_client,
),
patch(
"prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.monitoring_client",
new=monitoring_client,
),
):
from prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled import (
logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled,
)
# Create a MagicMock metric object with name=None
metric = MagicMock()
metric.name = None
metric.filter = '(protoPayload.serviceName="cloudresourcemanager.googleapis.com") AND (ProjectOwnership OR projectOwnerInvitee) OR (protoPayload.serviceData.policyDelta.bindingDeltas.action="REMOVE" AND protoPayload.serviceData.policyDelta.bindingDeltas.role="roles/owner") OR (protoPayload.serviceData.policyDelta.bindingDeltas.action="ADD" AND protoPayload.serviceData.policyDelta.bindingDeltas.role="roles/owner")'
metric.project_id = GCP_PROJECT_ID
logging_client.metrics = [metric]
logging_client.project_ids = [GCP_PROJECT_ID]
logging_client.region = GCP_EU1_LOCATION
monitoring_client.alert_policies = []
check = (
logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert result[0].resource_name == "Log Metric Filter"
assert result[0].resource_id == "unknown"
assert result[0].project_id == GCP_PROJECT_ID
assert result[0].location == GCP_EU1_LOCATION
assert "unknown" in result[0].status_extended
def test_log_metric_filters_with_missing_name_attribute(self):
"""Test that metric without name attribute uses fallback 'Log Metric Filter'"""
logging_client = MagicMock()
monitoring_client = MagicMock()
with (
patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
patch(
"prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.logging_client",
new=logging_client,
),
patch(
"prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.monitoring_client",
new=monitoring_client,
),
):
from prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled import (
logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled,
)
# Create a MagicMock metric object without name attribute
metric = MagicMock(spec=["filter", "project_id"])
metric.filter = '(protoPayload.serviceName="cloudresourcemanager.googleapis.com") AND (ProjectOwnership OR projectOwnerInvitee) OR (protoPayload.serviceData.policyDelta.bindingDeltas.action="REMOVE" AND protoPayload.serviceData.policyDelta.bindingDeltas.role="roles/owner") OR (protoPayload.serviceData.policyDelta.bindingDeltas.action="ADD" AND protoPayload.serviceData.policyDelta.bindingDeltas.role="roles/owner")'
metric.project_id = GCP_PROJECT_ID
logging_client.metrics = [metric]
logging_client.project_ids = [GCP_PROJECT_ID]
logging_client.region = GCP_EU1_LOCATION
monitoring_client.alert_policies = []
check = (
logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert result[0].resource_name == "Log Metric Filter"
assert result[0].resource_id == "unknown"
assert result[0].project_id == GCP_PROJECT_ID
assert result[0].location == GCP_EU1_LOCATION
def test_project_not_in_projects_dict(self):
"""Test that project not in projects dict uses None and fallback name"""
logging_client = MagicMock()
monitoring_client = MagicMock()
with (
patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
patch(
"prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.logging_client",
new=logging_client,
),
patch(
"prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.monitoring_client",
new=monitoring_client,
),
):
from prowler.providers.gcp.services.logging.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled.logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled import (
logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled,
)
logging_client.metrics = []
logging_client.project_ids = [GCP_PROJECT_ID]
logging_client.region = GCP_EU1_LOCATION
# Project is in project_ids but NOT in projects dict
logging_client.projects = {}
monitoring_client.alert_policies = []
check = (
logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled()
)
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert result[0].resource_name == "GCP Project"
assert result[0].project_id == GCP_PROJECT_ID
assert result[0].location == GCP_EU1_LOCATION
@@ -211,128 +211,3 @@ class Test_logging_sink_created:
result[0].status_extended
== f"There are no logging sinks to export copies of all the log entries in project {GCP_PROJECT_ID}."
)
def test_project_not_in_projects_dict(self):
"""Test that project not in projects dict uses None and fallback name"""
logging_client = MagicMock()
with (
patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
patch(
"prowler.providers.gcp.services.logging.logging_sink_created.logging_sink_created.logging_client",
new=logging_client,
),
):
from prowler.providers.gcp.services.logging.logging_sink_created.logging_sink_created import (
logging_sink_created,
)
logging_client.project_ids = [GCP_PROJECT_ID]
logging_client.region = GCP_EU1_LOCATION
logging_client.sinks = []
# Project is in project_ids but NOT in projects dict
logging_client.projects = {}
check = logging_sink_created()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert result[0].resource_name == "GCP Project"
assert result[0].resource_id == GCP_PROJECT_ID
assert result[0].project_id == GCP_PROJECT_ID
assert result[0].location == GCP_EU1_LOCATION
def test_sink_with_none_name(self):
"""Test that sink with None name uses fallback 'Logging Sink'"""
logging_client = MagicMock()
with (
patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
patch(
"prowler.providers.gcp.services.logging.logging_sink_created.logging_sink_created.logging_client",
new=logging_client,
),
):
from prowler.providers.gcp.services.logging.logging_sink_created.logging_sink_created import (
logging_sink_created,
)
# Create a MagicMock sink object with name=None
sink = MagicMock()
sink.name = None
sink.filter = "all"
sink.project_id = GCP_PROJECT_ID
logging_client.project_ids = [GCP_PROJECT_ID]
logging_client.region = GCP_EU1_LOCATION
logging_client.sinks = [sink]
logging_client.projects = {
GCP_PROJECT_ID: GCPProject(
id=GCP_PROJECT_ID,
number="123456789012",
name="test",
labels={},
lifecycle_state="ACTIVE",
)
}
check = logging_sink_created()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert result[0].resource_name == "Logging Sink"
assert result[0].resource_id == "unknown"
assert result[0].project_id == GCP_PROJECT_ID
assert result[0].location == GCP_EU1_LOCATION
assert "unknown" in result[0].status_extended
def test_sink_with_missing_name_attribute(self):
"""Test that sink without name attribute uses fallback 'Logging Sink'"""
logging_client = MagicMock()
with (
patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
patch(
"prowler.providers.gcp.services.logging.logging_sink_created.logging_sink_created.logging_client",
new=logging_client,
),
):
from prowler.providers.gcp.services.logging.logging_sink_created.logging_sink_created import (
logging_sink_created,
)
# Create a MagicMock sink object without name attribute
sink = MagicMock(spec=["filter", "project_id"])
sink.filter = "all"
sink.project_id = GCP_PROJECT_ID
logging_client.project_ids = [GCP_PROJECT_ID]
logging_client.region = GCP_EU1_LOCATION
logging_client.sinks = [sink]
logging_client.projects = {
GCP_PROJECT_ID: GCPProject(
id=GCP_PROJECT_ID,
number="123456789012",
name="test",
labels={},
lifecycle_state="ACTIVE",
)
}
check = logging_sink_created()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert result[0].resource_name == "Logging Sink"
assert result[0].resource_id == "unknown"
assert result[0].project_id == GCP_PROJECT_ID
assert result[0].location == GCP_EU1_LOCATION
@@ -3,6 +3,7 @@ from uuid import uuid4
from prowler.providers.m365.services.entra.entra_service import (
ApplicationsConditions,
AuthenticationStrength,
ConditionalAccessGrantControl,
ConditionalAccessPolicyState,
Conditions,
@@ -113,7 +114,7 @@ class Test_entra_admin_users_phishing_resistant_mfa_enabled:
grant_controls=GrantControls(
built_in_controls=[ConditionalAccessGrantControl.BLOCK],
operator=GrantControlOperator.AND,
authentication_strength="Phishing-resistant MFA",
authentication_strength=AuthenticationStrength.PHISHING_RESISTANT_MFA,
),
session_controls=SessionControls(
persistent_browser=PersistentBrowser(
@@ -205,7 +206,7 @@ class Test_entra_admin_users_phishing_resistant_mfa_enabled:
grant_controls=GrantControls(
built_in_controls=[ConditionalAccessGrantControl.BLOCK],
operator=GrantControlOperator.AND,
authentication_strength="Phishing-resistant MFA",
authentication_strength=AuthenticationStrength.PHISHING_RESISTANT_MFA,
),
session_controls=SessionControls(
persistent_browser=PersistentBrowser(
@@ -300,7 +301,7 @@ class Test_entra_admin_users_phishing_resistant_mfa_enabled:
grant_controls=GrantControls(
built_in_controls=[ConditionalAccessGrantControl.BLOCK],
operator=GrantControlOperator.AND,
authentication_strength="Phishing-resistant MFA",
authentication_strength=AuthenticationStrength.PHISHING_RESISTANT_MFA,
),
session_controls=SessionControls(
persistent_browser=PersistentBrowser(
@@ -7,6 +7,7 @@ from prowler.providers.m365.services.entra.entra_service import (
AdminConsentPolicy,
AdminRoles,
ApplicationsConditions,
AuthenticationStrength,
AuthorizationPolicy,
AuthPolicyRoles,
ConditionalAccessGrantControl,
@@ -74,7 +75,7 @@ async def mock_entra_get_conditional_access_policies(_):
ConditionalAccessGrantControl.COMPLIANT_DEVICE,
],
operator=GrantControlOperator.OR,
authentication_strength="Phishing-resistant MFA",
authentication_strength=AuthenticationStrength.PHISHING_RESISTANT_MFA,
),
session_controls=SessionControls(
persistent_browser=PersistentBrowser(
@@ -225,7 +226,7 @@ class Test_Entra_Service:
ConditionalAccessGrantControl.COMPLIANT_DEVICE,
],
operator=GrantControlOperator.OR,
authentication_strength="Phishing-resistant MFA",
authentication_strength=AuthenticationStrength.PHISHING_RESISTANT_MFA,
),
session_controls=SessionControls(
persistent_browser=PersistentBrowser(
+1 -2
View File
@@ -2,7 +2,7 @@
All notable changes to the **Prowler UI** are documented in this file.
## [1.13.0] (Prowler v5.13.0)
## [1.13.0] (Prowler UNRELEASED)
### 🚀 Added
@@ -12,7 +12,6 @@ All notable changes to the **Prowler UI** are documented in this file.
- React Compiler support for automatic optimization [(#8748)](https://github.com/prowler-cloud/prowler/pull/8748)
- Turbopack support for faster development builds [(#8748)](https://github.com/prowler-cloud/prowler/pull/8748)
- Add compliance name in compliance detail view [(#8775)](https://github.com/prowler-cloud/prowler/pull/8775)
- PDF reporting for Prowler ThreatScore [(#8867)](https://github.com/prowler-cloud/prowler/pull/8867)
- Support C5 compliance framework for the AWS provider [(#8830)](https://github.com/prowler-cloud/prowler/pull/8830)
- API key management in user profile [(#8308)](https://github.com/prowler-cloud/prowler/pull/8308)
- Refresh access token error handling [(#8864)](https://github.com/prowler-cloud/prowler/pull/8864)
-42
View File
@@ -268,45 +268,3 @@ export const getComplianceCsv = async (
};
}
};
export const getThreatScorePdf = async (scanId: string) => {
const headers = await getAuthHeaders({ contentType: false });
const url = new URL(`${apiBaseUrl}/scans/${scanId}/threatscore`);
try {
const response = await fetch(url.toString(), { headers });
if (response.status === 202) {
const json = await response.json();
const taskId = json?.data?.id;
const state = json?.data?.attributes?.state;
return {
pending: true,
state,
taskId,
};
}
if (!response.ok) {
const errorData = await response.json();
throw new Error(
errorData?.errors?.detail ||
"Unable to retrieve ThreatScore PDF report. Contact support if the issue continues.",
);
}
const arrayBuffer = await response.arrayBuffer();
const base64 = Buffer.from(arrayBuffer).toString("base64");
return {
success: true,
data: base64,
filename: `scan-${scanId}-threatscore.pdf`,
};
} catch (error) {
return {
error: getErrorMessage(error),
};
}
};
@@ -29,8 +29,6 @@ import {
} from "@/types/compliance";
import { ScanEntity } from "@/types/scans";
import { ThreatScoreDownloadButton } from "./threatscore-download-button";
interface ComplianceDetailSearchParams {
complianceId: string;
version?: string;
@@ -145,24 +143,13 @@ export default async function ComplianceDetail({
<Spacer y={8} />
</div>
)}
<div className="flex items-start justify-between gap-4">
<div className="flex-1">
<ComplianceHeader
scans={[]}
uniqueRegions={uniqueRegions}
showSearch={false}
framework={compliancetitle}
showProviders={false}
/>
</div>
{attributesData?.data?.[0]?.attributes?.framework ===
"ProwlerThreatScore" &&
selectedScanId && (
<div className="flex-shrink-0 pt-1">
<ThreatScoreDownloadButton scanId={selectedScanId} />
</div>
)}
</div>
<ComplianceHeader
scans={[]}
uniqueRegions={uniqueRegions}
showSearch={false}
framework={compliancetitle}
showProviders={false}
/>
<Suspense
key={searchParamsKey}
@@ -1,45 +0,0 @@
"use client";
import { Button } from "@heroui/button";
import { DownloadIcon } from "lucide-react";
import { useState } from "react";
import { toast } from "@/components/ui";
import { downloadThreatScorePdf } from "@/lib/helper";
interface ThreatScoreDownloadButtonProps {
scanId: string;
}
export const ThreatScoreDownloadButton = ({
scanId,
}: ThreatScoreDownloadButtonProps) => {
const [isDownloading, setIsDownloading] = useState<boolean>(false);
const handleDownload = async () => {
setIsDownloading(true);
try {
await downloadThreatScorePdf(scanId, toast);
} finally {
setIsDownloading(false);
}
};
return (
<Button
color="success"
variant="solid"
startContent={
<DownloadIcon
className={isDownloading ? "animate-download-icon" : ""}
size={16}
/>
}
onPress={handleDownload}
isLoading={isDownloading}
size="sm"
>
PDF ThreatScore Report
</Button>
);
};
+26 -81
View File
@@ -1,22 +1,16 @@
export const dynamic = "force-dynamic";
import { Suspense } from "react";
import {
getComplianceAttributes,
getComplianceOverviewMetadataInfo,
getComplianceRequirements,
getCompliancesOverview,
} from "@/actions/compliances";
import { getCompliancesOverview } from "@/actions/compliances";
import { getComplianceOverviewMetadataInfo } from "@/actions/compliances";
import { getScans } from "@/actions/scans";
import {
ComplianceCard,
ComplianceSkeletonGrid,
NoScansAvailable,
ThreatScoreBadge,
} from "@/components/compliance";
import { ComplianceHeader } from "@/components/compliance/compliance-header/compliance-header";
import { ContentLayout } from "@/components/ui";
import { calculateThreatScore } from "@/lib/compliance/threatscore-calculator";
import {
ExpandedScanData,
ScanEntity,
@@ -80,7 +74,6 @@ export default async function Compliance({
})
.filter(Boolean) as ExpandedScanData[];
// Use scanId from URL, or select the first scan if not provided
const selectedScanId =
resolvedSearchParams.scanId || expandedScansData[0]?.id || null;
const query = (filters["filter[search]"] as string) || "";
@@ -101,7 +94,6 @@ export default async function Compliance({
}
: undefined;
// Fetch metadata if we have a selected scan
const metadataInfoData = selectedScanId
? await getComplianceOverviewMetadataInfo({
query,
@@ -113,52 +105,14 @@ export default async function Compliance({
const uniqueRegions = metadataInfoData?.data?.attributes?.regions || [];
// Fetch ThreatScore data if we have a selected scan
let threatScoreData = null;
if (
selectedScanId &&
typeof selectedScanId === "string" &&
selectedScan?.providerInfo?.provider
) {
const complianceId = `prowler_threatscore_${selectedScan.providerInfo.provider.toLowerCase()}`;
const [attributesData, requirementsData] = await Promise.all([
getComplianceAttributes(complianceId),
getComplianceRequirements({
complianceId,
scanId: selectedScanId,
}),
]);
threatScoreData = calculateThreatScore(attributesData, requirementsData);
}
return (
<ContentLayout title="Compliance" icon="lucide:shield-check">
{selectedScanId ? (
<>
<div className="mb-6 flex flex-col gap-6">
<div className="flex items-start justify-between gap-6">
<div className="flex-1">
<ComplianceHeader
scans={expandedScansData}
uniqueRegions={uniqueRegions}
/>
</div>
{threatScoreData &&
typeof selectedScanId === "string" &&
selectedScan && (
<div className="w-[360px] flex-shrink-0">
<ThreatScoreBadge
score={threatScoreData.score}
scanId={selectedScanId}
provider={selectedScan.providerInfo.provider}
selectedScan={selectedScanData}
/>
</div>
)}
</div>
</div>
<ComplianceHeader
scans={expandedScansData}
uniqueRegions={uniqueRegions}
/>
<Suspense key={searchParamsKey} fallback={<ComplianceSkeletonGrid />}>
<SSRComplianceGrid
searchParams={resolvedSearchParams}
@@ -230,36 +184,27 @@ const SSRComplianceGrid = async ({
return (
<div className="grid grid-cols-1 gap-4 sm:grid-cols-2 lg:grid-cols-3 2xl:grid-cols-4">
{compliancesData.data
.filter((compliance: ComplianceOverviewData) => {
// Filter out ProwlerThreatScore from the grid
return compliance.attributes.framework !== "ProwlerThreatScore";
})
.map((compliance: ComplianceOverviewData) => {
const { attributes, id } = compliance;
const {
framework,
version,
requirements_passed,
total_requirements,
} = attributes;
{compliancesData.data.map((compliance: ComplianceOverviewData) => {
const { attributes, id } = compliance;
const { framework, version, requirements_passed, total_requirements } =
attributes;
return (
<ComplianceCard
key={id}
title={framework}
version={version}
passingRequirements={requirements_passed}
totalRequirements={total_requirements}
prevPassingRequirements={requirements_passed}
prevTotalRequirements={total_requirements}
scanId={scanId}
complianceId={id}
id={id}
selectedScan={selectedScan}
/>
);
})}
return (
<ComplianceCard
key={id}
title={framework}
version={version}
passingRequirements={requirements_passed}
totalRequirements={total_requirements}
prevPassingRequirements={requirements_passed}
prevTotalRequirements={total_requirements}
scanId={scanId}
complianceId={id}
id={id}
selectedScan={selectedScan}
/>
);
})}
</div>
);
};
@@ -1,134 +0,0 @@
"use client";
import { Bell, BellOff, ShieldCheck, TriangleAlert } from "lucide-react";
import { DonutChart } from "@/components/graphs/donut-chart";
import { DonutDataPoint } from "@/components/graphs/types";
import {
BaseCard,
CardContent,
CardHeader,
CardTitle,
ResourceStatsCard,
StatsContainer,
} from "@/components/shadcn";
import { CardVariant } from "@/components/shadcn/card/resource-stats-card/resource-stats-card-content";
interface CheckFindingsProps {
failFindingsData: {
total: number;
new: number;
muted: number;
};
passFindingsData: {
total: number;
new: number;
muted: number;
};
}
export const CheckFindings = ({
failFindingsData,
passFindingsData,
}: CheckFindingsProps) => {
// Calculate total findings
const totalFindings = failFindingsData.total + passFindingsData.total;
// Calculate percentages
const failPercentage = Math.round(
(failFindingsData.total / totalFindings) * 100,
);
const passPercentage = Math.round(
(passFindingsData.total / totalFindings) * 100,
);
// Calculate change percentages (new findings as percentage change)
const failChange =
failFindingsData.total > 0
? Math.round((failFindingsData.new / failFindingsData.total) * 100)
: 0;
const passChange =
passFindingsData.total > 0
? Math.round((passFindingsData.new / passFindingsData.total) * 100)
: 0;
// Mock data for DonutChart
const donutData: DonutDataPoint[] = [
{
name: "Fail Findings",
value: failFindingsData.total,
color: "#f43f5e", // Rose-500
percentage: Number(failPercentage),
change: Number(failChange),
},
{
name: "Pass Findings",
value: passFindingsData.total,
color: "#4ade80", // Green-400
percentage: Number(passPercentage),
change: Number(passChange),
},
];
return (
<BaseCard>
{/* Header */}
<CardHeader>
<CardTitle>Check Findings</CardTitle>
</CardHeader>
{/* DonutChart Content */}
<CardContent className="space-y-4">
<div className="mx-auto max-h-[200px] max-w-[200px]">
<DonutChart
data={donutData}
showLegend={false}
innerRadius={66}
outerRadius={86}
centerLabel={{
value: totalFindings.toLocaleString(),
label: "Total Findings",
}}
/>
</div>
{/* Footer with ResourceStatsCards */}
<StatsContainer>
<ResourceStatsCard
containerless
badge={{
icon: TriangleAlert,
count: failFindingsData.total,
variant: CardVariant.fail,
}}
label="Fail Findings"
stats={[
{ icon: Bell, label: `${failFindingsData.new} New` },
{ icon: BellOff, label: `${failFindingsData.muted} Muted` },
]}
className="flex-1"
/>
<div className="flex items-center justify-center px-[46px]">
<div className="h-full w-px bg-slate-300 dark:bg-[rgba(39,39,42,1)]" />
</div>
<ResourceStatsCard
containerless
badge={{
icon: ShieldCheck,
count: passFindingsData.total,
variant: CardVariant.pass,
}}
label="Pass Findings"
stats={[
{ icon: Bell, label: `${passFindingsData.new} New` },
{ icon: BellOff, label: `${passFindingsData.muted} Muted` },
]}
className="flex-1"
/>
</StatsContainer>
</CardContent>
</BaseCard>
);
};
-87
View File
@@ -1,87 +0,0 @@
import { Suspense } from "react";
import { getFindingsByStatus } from "@/actions/overview/overview";
import { ContentLayout } from "@/components/ui";
import { SearchParamsProps } from "@/types";
import { CheckFindings } from "./components/check-findings";
const FILTER_PREFIX = "filter[";
// Extract only query params that start with "filter[" for API calls
function pickFilterParams(
params: SearchParamsProps | undefined | null,
): Record<string, string | string[] | undefined> {
if (!params) return {};
return Object.fromEntries(
Object.entries(params).filter(([key]) => key.startsWith(FILTER_PREFIX)),
);
}
export default async function NewOverviewPage({
searchParams,
}: {
searchParams: Promise<SearchParamsProps>;
}) {
const resolvedSearchParams = await searchParams;
return (
<ContentLayout title="New Overview" icon="lucide:square-chart-gantt">
<div className="flex min-h-[60vh] items-center justify-center p-6">
<Suspense
fallback={
<div className="flex h-[400px] w-full max-w-md items-center justify-center rounded-xl border border-zinc-900 bg-stone-950">
<p className="text-zinc-400">Loading...</p>
</div>
}
>
<SSRCheckFindings searchParams={resolvedSearchParams} />
</Suspense>
</div>
</ContentLayout>
);
}
const SSRCheckFindings = async ({
searchParams,
}: {
searchParams: SearchParamsProps | undefined | null;
}) => {
const filters = pickFilterParams(searchParams);
const findingsByStatus = await getFindingsByStatus({ filters });
if (!findingsByStatus) {
return (
<div className="flex h-[400px] w-full max-w-md items-center justify-center rounded-xl border border-zinc-900 bg-stone-950">
<p className="text-zinc-400">Failed to load findings data</p>
</div>
);
}
const {
fail = 0,
pass = 0,
muted_new = 0,
muted_changed = 0,
fail_new = 0,
pass_new = 0,
} = findingsByStatus?.data?.attributes || {};
const mutedTotal = muted_new + muted_changed;
return (
<CheckFindings
failFindingsData={{
total: fail,
new: fail_new,
muted: mutedTotal,
}}
passFindingsData={{
total: pass,
new: pass_new,
muted: mutedTotal,
}}
/>
);
};
@@ -19,13 +19,11 @@ export const DataCompliance = ({ scans }: DataComplianceProps) => {
const selectedScanId = scanIdParam || (scans.length > 0 ? scans[0].id : "");
// Don't auto-push scanId to URL - the server already handles the default scan selection
// This avoids duplicate API calls caused by client-side navigation
useEffect(() => {
if (!scanIdParam && scans.length > 0) {
const params = new URLSearchParams(searchParams);
params.set("scanId", scans[0].id);
router.replace(`?${params.toString()}`, { scroll: false });
router.push(`?${params.toString()}`);
}
}, [scans, scanIdParam, searchParams, router]);
-2
View File
@@ -19,5 +19,3 @@ export * from "./skeletons/compliance-accordion-skeleton";
export * from "./skeletons/compliance-grid-skeleton";
export * from "./skeletons/heatmap-chart-skeleton";
export * from "./skeletons/pie-chart-skeleton";
export * from "./threatscore-badge";
export * from "./threatscore-logo";
@@ -1,148 +0,0 @@
"use client";
import { Button } from "@heroui/button";
import { Card, CardBody } from "@heroui/card";
import { Progress } from "@heroui/progress";
import { DownloadIcon, FileTextIcon } from "lucide-react";
import { useRouter, useSearchParams } from "next/navigation";
import { useState } from "react";
import { ThreatScoreLogo } from "@/components/compliance/threatscore-logo";
import { toast } from "@/components/ui";
import { downloadComplianceCsv, downloadThreatScorePdf } from "@/lib/helper";
import type { ScanEntity } from "@/types/scans";
interface ThreatScoreBadgeProps {
score: number;
scanId: string;
provider: string;
selectedScan?: ScanEntity;
}
export const ThreatScoreBadge = ({
score,
scanId,
provider,
selectedScan,
}: ThreatScoreBadgeProps) => {
const router = useRouter();
const searchParams = useSearchParams();
const [isDownloadingPdf, setIsDownloadingPdf] = useState(false);
const [isDownloadingCsv, setIsDownloadingCsv] = useState(false);
const complianceId = `prowler_threatscore_${provider.toLowerCase()}`;
const getScoreColor = (): "success" | "warning" | "danger" => {
if (score >= 80) return "success";
if (score >= 40) return "warning";
return "danger";
};
const getTextColor = () => {
if (score >= 80) return "text-success";
if (score >= 40) return "text-warning";
return "text-danger";
};
const handleCardClick = () => {
const title = "ProwlerThreatScore";
const version = "1.0";
const formattedTitleForUrl = encodeURIComponent(title);
const path = `/compliance/${formattedTitleForUrl}`;
const params = new URLSearchParams();
params.set("complianceId", complianceId);
params.set("version", version);
params.set("scanId", scanId);
if (selectedScan) {
params.set(
"scanData",
JSON.stringify({
id: selectedScan.id,
providerInfo: selectedScan.providerInfo,
attributes: selectedScan.attributes,
}),
);
}
const regionFilter = searchParams.get("filter[region__in]");
if (regionFilter) {
params.set("filter[region__in]", regionFilter);
}
router.push(`${path}?${params.toString()}`);
};
const handleDownloadPdf = async () => {
setIsDownloadingPdf(true);
try {
await downloadThreatScorePdf(scanId, toast);
} finally {
setIsDownloadingPdf(false);
}
};
const handleDownloadCsv = async () => {
setIsDownloadingCsv(true);
try {
await downloadComplianceCsv(scanId, complianceId, toast);
} finally {
setIsDownloadingCsv(false);
}
};
return (
<Card
shadow="sm"
className="border-default-200 h-full border bg-transparent"
>
<CardBody className="flex flex-col gap-3 p-4">
<button
className="border-default-200 hover:border-default-300 hover:bg-default-50/50 flex cursor-pointer flex-row items-center gap-4 rounded-lg border bg-transparent p-3 transition-all"
onClick={handleCardClick}
type="button"
>
<ThreatScoreLogo />
<div className="flex flex-col items-end gap-1">
<span className={`text-2xl font-bold ${getTextColor()}`}>
{score.toFixed(1)}%
</span>
<Progress
aria-label="ThreatScore progress"
value={score}
color={getScoreColor()}
size="sm"
className="w-24"
/>
</div>
</button>
<div className="flex gap-2">
<Button
size="sm"
variant="ghost"
className="text-default-500 hover:text-primary flex-1"
startContent={<DownloadIcon size={14} className="text-primary" />}
onPress={handleDownloadPdf}
isLoading={isDownloadingPdf}
isDisabled={isDownloadingCsv}
>
PDF
</Button>
<Button
size="sm"
variant="ghost"
className="text-default-500 hover:text-primary flex-1"
startContent={<FileTextIcon size={14} className="text-primary" />}
onPress={handleDownloadCsv}
isLoading={isDownloadingCsv}
isDisabled={isDownloadingPdf}
>
CSV
</Button>
</div>
</CardBody>
</Card>
);
};
@@ -1,79 +0,0 @@
"use client";
import { useTheme } from "next-themes";
import { useEffect, useState } from "react";
export const ThreatScoreLogo = () => {
const { resolvedTheme } = useTheme();
const [mounted, setMounted] = useState(false);
// Avoid hydration mismatch by only rendering after mount
useEffect(() => {
setMounted(true);
}, []);
if (!mounted) {
return <div className="h-14" style={{ width: "280px", height: "56px" }} />;
}
const prowlerColor = resolvedTheme === "dark" ? "#fff" : "#000";
return (
<svg
viewBox="0 0 1000 280"
fill="none"
xmlns="http://www.w3.org/2000/svg"
className="h-14 w-auto"
preserveAspectRatio="xMinYMid meet"
>
{/* Prowler logo from the new SVG - scaled and positioned to match THREATSCORE size */}
<g transform="scale(0.50) translate(-60, 20)">
<path
fill={prowlerColor}
d="M1222.86,185.51c20.76-12.21,34.44-34.9,34.44-59.79,0-38.18-31.06-69.25-69.25-69.25l-216.9.23v145.17h-64.8V56.47h-79.95s-47.14,95.97-47.14,95.97V56.47h-52.09s-47.14,95.97-47.14,95.97V56.47h-53.48v69.6c-12.72-41.96-51.75-72.6-97.81-72.6-42.63,0-79.24,26.25-94.54,63.43-4.35-34.03-33.48-60.43-68.67-60.43h-100.01v47.43c-9.16-27.52-35.14-47.43-65.71-47.43H53.47s46.34,46.33,46.34,46.33v151.64h53.47v-76.68l17.21,17.21h29.33c30.56,0,56.54-19.91,65.71-47.43v106.91h53.48v-81.51l76.01,81.51h69.62l-64.29-68.94c11.14-6.56,20.22-16.15,26.26-27.46,1.27,55.26,46.58,99.82,102.14,99.82,46.06,0,85.09-30.64,97.81-72.6v69.18h60.88l38.34-78.06v78.06h60.88l66.2-134.78v135.69h95.41l22.86-22.86v22.86h95.05l21.84-21.84v20.93h53.48v-81.5l76.01,81.5h69.62l-64.29-68.94ZM199.83,141.5h-46.54v-31.54h46.54c8.7,0,15.77,7.07,15.77,15.77s-7.07,15.77-15.77,15.77ZM365.55,141.5l-46.54-.18v-31.36h46.54c8.7,0,15.77,7.07,15.77,15.77s-7.08,15.77-15.77,15.77ZM528.76,204.39c-26.86,0-48.72-21.86-48.72-48.72s21.86-48.72,48.72-48.72,48.72,21.86,48.72,48.72-21.86,48.72-48.72,48.72ZM1088.03,201.88h-63.41v-20.35h42.91v-50.88h-42.91v-20.46h63.41v91.69ZM1188.05,141.5l-46.54-.18v-31.36h46.54c8.7,0,15.77,7.07,15.77,15.77s-7.07,15.77-15.77,15.77Z"
/>
</g>
{/* THREATSCORE text */}
<text x="0" y="240" fontSize="80" fontWeight="700" fill="#22c55e">
THREATSCORE
</text>
{/* Gauge icon - semicircular meter - 1.5x larger */}
<g transform="translate(680, 0) scale(2)">
{/* Gauge arcs - drawing from left to right (orange, red, green) */}
<path
d="M 20 80 A 60 60 0 0 1 50 29.6"
stroke="#fb923c"
strokeWidth="16"
fill="none"
strokeLinecap="round"
/>
<path
d="M 50 29.6 A 60 60 0 0 1 110 29.6"
stroke="#ef4444"
strokeWidth="16"
fill="none"
strokeLinecap="round"
/>
<path
d="M 110 29.6 A 60 60 0 0 1 140 80"
stroke="#22c55e"
strokeWidth="16"
fill="none"
strokeLinecap="round"
/>
{/* Checkmark */}
<path
d="M 60 80 L 72 92 L 104 60"
stroke="#22c55e"
strokeWidth="8"
fill="none"
strokeLinecap="round"
strokeLinejoin="round"
/>
</g>
</svg>
);
};
+162
View File
@@ -0,0 +1,162 @@
"use client";
import {
Bar,
BarChart as RechartsBar,
CartesianGrid,
Cell,
ResponsiveContainer,
Tooltip,
XAxis,
YAxis,
} from "recharts";
import { ChartTooltip } from "./shared/ChartTooltip";
import { CHART_COLORS, LAYOUT_OPTIONS } from "./shared/constants";
import { getSeverityColorByName } from "./shared/utils";
import { BarDataPoint, LayoutOption } from "./types";
interface BarChartProps {
data: BarDataPoint[];
layout?: LayoutOption;
xLabel?: string;
yLabel?: string;
height?: number;
showValues?: boolean;
}
const CustomLabel = ({ x, y, width, height, value, data }: any) => {
const percentage = data.percentage;
return (
<text
x={x + width + 10}
y={y + height / 2}
fill={CHART_COLORS.textSecondary}
fontSize={12}
textAnchor="start"
dominantBaseline="middle"
>
{percentage !== undefined
? `${percentage}% • ${value.toLocaleString()}`
: value.toLocaleString()}
</text>
);
};
export function BarChart({
data,
layout = LAYOUT_OPTIONS.horizontal,
xLabel,
yLabel,
height = 400,
showValues = true,
}: BarChartProps) {
const isHorizontal = layout === LAYOUT_OPTIONS.horizontal;
return (
<ResponsiveContainer width="100%" height={height}>
<RechartsBar
data={data}
layout={layout}
margin={{ top: 20, right: showValues ? 100 : 30, left: 20, bottom: 20 }}
>
<CartesianGrid
strokeDasharray="3 3"
stroke={CHART_COLORS.gridLine}
horizontal={isHorizontal}
vertical={!isHorizontal}
/>
{isHorizontal ? (
<>
<XAxis
type="number"
tick={{ fill: CHART_COLORS.textSecondary, fontSize: 12 }}
label={
xLabel
? {
value: xLabel,
position: "insideBottom",
offset: -10,
fill: CHART_COLORS.textSecondary,
}
: undefined
}
/>
<YAxis
dataKey="name"
type="category"
width={100}
tick={{ fill: CHART_COLORS.textSecondary, fontSize: 12 }}
label={
yLabel
? {
value: yLabel,
angle: -90,
position: "insideLeft",
fill: CHART_COLORS.textSecondary,
}
: undefined
}
/>
</>
) : (
<>
<XAxis
dataKey="name"
tick={{ fill: CHART_COLORS.textSecondary, fontSize: 12 }}
label={
xLabel
? {
value: xLabel,
position: "insideBottom",
offset: -10,
fill: CHART_COLORS.textSecondary,
}
: undefined
}
/>
<YAxis
type="number"
tick={{ fill: CHART_COLORS.textSecondary, fontSize: 12 }}
label={
yLabel
? {
value: yLabel,
angle: -90,
position: "insideLeft",
fill: CHART_COLORS.textSecondary,
}
: undefined
}
/>
</>
)}
<Tooltip content={<ChartTooltip />} />
<Bar
dataKey="value"
radius={4}
label={
showValues && isHorizontal
? (props: any) => (
<CustomLabel {...props} data={data[props.index]} />
)
: false
}
>
{data.map((entry, index) => (
<Cell
key={`cell-${index}`}
fill={
entry.color ||
getSeverityColorByName(entry.name) ||
CHART_COLORS.defaultColor
}
opacity={1}
className="transition-opacity hover:opacity-80"
/>
))}
</Bar>
</RechartsBar>
</ResponsiveContainer>
);
}
@@ -5,7 +5,7 @@ import { Cell, Label, Pie, PieChart, Tooltip } from "recharts";
import { ChartConfig, ChartContainer } from "@/components/ui/chart/Chart";
import { ChartLegend } from "./shared/chart-legend";
import { ChartLegend } from "./shared/ChartLegend";
import { DonutDataPoint } from "./types";
interface DonutChartProps {
@@ -21,39 +21,32 @@ interface DonutChartProps {
}
const CustomTooltip = ({ active, payload }: any) => {
if (!active || !payload || !payload.length) return null;
const entry = payload[0];
const name = entry.name;
const percentage = entry.payload?.percentage;
const color = entry.color || entry.payload?.color;
const change = entry.payload?.change;
return (
<div className="rounded-lg border border-slate-200 bg-white p-3 shadow-lg dark:border-slate-600 dark:bg-slate-800">
<div className="flex items-center gap-1">
<div
className="h-3 w-3 rounded-sm"
style={{ backgroundColor: color }}
/>
<span className="text-sm font-semibold text-slate-600 dark:text-zinc-300">
{percentage}%
</span>
<span>{name}</span>
</div>
<p className="mt-1 text-xs text-slate-600 dark:text-zinc-300">
{change !== undefined && (
<>
if (active && payload && payload.length) {
const data = payload[0].payload;
return (
<div className="rounded-lg border border-slate-700 bg-slate-800 p-3 shadow-lg">
<div className="flex items-center gap-2">
<div
className="h-3 w-3 rounded-sm"
style={{ backgroundColor: data.color }}
/>
<span className="text-sm font-semibold text-white">
{data.percentage}% {data.name}
</span>
</div>
{data.change !== undefined && (
<p className="mt-2 text-xs text-slate-400">
<span className="font-bold">
{change > 0 ? "+" : ""}
{change}%
</span>
<span> Since Last Scan</span>
</>
{data.change > 0 ? "+" : ""}
{data.change}%
</span>{" "}
Since last scan
</p>
)}
</p>
</div>
);
</div>
);
}
return null;
};
const CustomLegend = ({ payload }: any) => {
@@ -67,8 +60,8 @@ const CustomLegend = ({ payload }: any) => {
export function DonutChart({
data,
innerRadius = 68,
outerRadius = 86,
innerRadius = 80,
outerRadius = 120,
showLegend = true,
centerLabel,
}: DonutChartProps) {
@@ -103,7 +96,7 @@ export function DonutChart({
}));
return (
<>
<div>
<ChartContainer
config={chartConfig}
className="mx-auto aspect-square max-h-[350px]"
@@ -117,7 +110,7 @@ export function DonutChart({
innerRadius={innerRadius}
outerRadius={outerRadius}
strokeWidth={0}
paddingAngle={0}
paddingAngle={2}
>
{chartData.map((entry, index) => {
const opacity =
@@ -152,20 +145,14 @@ export function DonutChart({
<tspan
x={viewBox.cx}
y={viewBox.cy}
className="text-3xl font-bold text-black dark:text-white"
style={{
fill: "currentColor",
}}
className="fill-white text-3xl font-bold"
>
{formattedValue}
</tspan>
<tspan
x={viewBox.cx}
y={(viewBox.cy || 0) + 24}
className="text-black dark:text-white"
style={{
fill: "currentColor",
}}
className="fill-slate-400"
>
{centerLabel.label}
</tspan>
@@ -179,6 +166,6 @@ export function DonutChart({
</PieChart>
</ChartContainer>
{showLegend && <CustomLegend payload={legendPayload} />}
</>
</div>
);
}
@@ -26,12 +26,7 @@ export function HorizontalBarChart({ data, title }: HorizontalBarChartProps) {
<div className="w-full">
{title && (
<div className="mb-4">
<h3
className="text-lg font-semibold"
style={{ color: "var(--chart-text-primary)" }}
>
{title}
</h3>
<h3 className="text-lg font-semibold text-white">{title}</h3>
</div>
)}
@@ -53,9 +48,8 @@ export function HorizontalBarChart({ data, title }: HorizontalBarChartProps) {
>
<div className="w-24 text-right">
<span
className="text-sm"
className="text-sm text-white"
style={{
color: "var(--chart-text-primary)",
opacity: isFaded ? 0.5 : 1,
transition: "opacity 0.2s",
}}
@@ -76,44 +70,26 @@ export function HorizontalBarChart({ data, title }: HorizontalBarChartProps) {
/>
{isHovered && (
<div
className="absolute top-10 left-0 z-10 min-w-[200px] rounded-lg border p-3 shadow-lg"
style={{
backgroundColor: "var(--chart-background)",
borderColor: "var(--chart-border-emphasis)",
}}
>
<div className="absolute top-10 left-0 z-10 min-w-[200px] rounded-lg border border-slate-700 bg-slate-800 p-3 shadow-lg">
<div className="flex items-center gap-2">
<div
className="h-3 w-3 rounded-sm"
style={{ backgroundColor: barColor }}
/>
<span
className="font-semibold"
style={{ color: "var(--chart-text-primary)" }}
>
<span className="font-semibold text-white">
{item.value.toLocaleString()} {item.name} Risk
</span>
</div>
{item.newFindings !== undefined && (
<div className="mt-2 flex items-center gap-2">
<Bell
size={14}
style={{ color: "var(--chart-fail)" }}
/>
<span
className="text-sm"
style={{ color: "var(--chart-text-secondary)" }}
>
<Bell size={14} className="text-slate-400" />
<span className="text-sm text-slate-400">
{item.newFindings} New Findings
</span>
</div>
)}
{item.change !== undefined && (
<p
className="mt-1 text-sm"
style={{ color: "var(--chart-text-secondary)" }}
>
<p className="mt-1 text-sm text-slate-400">
<span className="font-bold">
{item.change > 0 ? "+" : ""}
{item.change}%
@@ -126,16 +102,15 @@ export function HorizontalBarChart({ data, title }: HorizontalBarChartProps) {
</div>
<div
className="flex w-40 items-center gap-2 text-sm"
className="flex w-40 items-center gap-2 text-sm text-white"
style={{
color: "var(--chart-text-primary)",
opacity: isFaded ? 0.5 : 1,
transition: "opacity 0.2s",
}}
>
<span className="font-semibold">{item.percentage}%</span>
<span style={{ color: "var(--chart-text-secondary)" }}></span>
<span className="font-bold">{item.value.toLocaleString()}</span>
<span className="text-slate-400"></span>
<span>{item.value.toLocaleString()}</span>
</div>
</div>
);
@@ -14,8 +14,8 @@ import {
YAxis,
} from "recharts";
import { AlertPill } from "./shared/alert-pill";
import { ChartLegend } from "./shared/chart-legend";
import { AlertPill } from "./shared/AlertPill";
import { ChartLegend } from "./shared/ChartLegend";
import { CHART_COLORS } from "./shared/constants";
import { LineConfig, LineDataPoint } from "./types";
@@ -48,19 +48,8 @@ const CustomLineTooltip = ({
const totalValue = typedPayload.reduce((sum, item) => sum + item.value, 0);
return (
<div
className="rounded-lg border p-3 shadow-lg"
style={{
backgroundColor: "var(--chart-background)",
borderColor: "var(--chart-border-emphasis)",
}}
>
<p
className="mb-3 text-xs"
style={{ color: "var(--chart-text-secondary)" }}
>
{label}
</p>
<div className="rounded-lg border border-slate-700 bg-slate-800 p-3 shadow-lg">
<p className="mb-3 text-xs text-slate-400">{label}</p>
<div className="mb-3">
<AlertPill value={totalValue} textSize="sm" />
@@ -78,29 +67,18 @@ const CustomLineTooltip = ({
className="h-2 w-2 rounded-full"
style={{ backgroundColor: item.stroke }}
/>
<span
className="text-sm"
style={{ color: "var(--chart-text-primary)" }}
>
{item.value}
</span>
<span className="text-sm text-white">{item.value}</span>
</div>
{newFindings !== undefined && (
<div className="flex items-center gap-2">
<Bell size={14} style={{ color: "var(--chart-fail)" }} />
<span
className="text-xs"
style={{ color: "var(--chart-text-secondary)" }}
>
<Bell size={14} className="text-slate-400" />
<span className="text-xs text-slate-400">
{newFindings} New Findings
</span>
</div>
)}
{change !== undefined && typeof change === "number" && (
<p
className="text-xs"
style={{ color: "var(--chart-text-secondary)" }}
>
<p className="text-xs text-slate-400">
<span className="font-bold">
{change > 0 ? "+" : ""}
{change}%
@@ -13,7 +13,7 @@ import {
ChartTooltip,
} from "@/components/ui/chart/Chart";
import { AlertPill } from "./shared/alert-pill";
import { AlertPill } from "./shared/AlertPill";
import { CHART_COLORS } from "./shared/constants";
import { RadarDataPoint } from "./types";
@@ -28,7 +28,7 @@ interface RadarChartProps {
const chartConfig = {
value: {
label: "Findings",
color: "var(--chart-radar-primary)",
color: "var(--color-magenta)",
},
} satisfies ChartConfig;
@@ -36,27 +36,15 @@ const CustomTooltip = ({ active, payload }: any) => {
if (active && payload && payload.length) {
const data = payload[0];
return (
<div
className="rounded-lg border p-3 shadow-lg"
style={{
borderColor: CHART_COLORS.tooltipBorder,
backgroundColor: CHART_COLORS.tooltipBackground,
}}
>
<p
className="text-sm font-semibold"
style={{ color: CHART_COLORS.textPrimary }}
>
<div className="rounded-lg border border-slate-700 bg-slate-800 p-3 shadow-lg">
<p className="text-sm font-semibold text-white">
{data.payload.category}
</p>
<div className="mt-1">
<AlertPill value={data.value} />
</div>
{data.payload.change !== undefined && (
<p
className="mt-1 text-xs"
style={{ color: CHART_COLORS.textSecondary }}
>
<p className="mt-1 text-xs text-slate-400">
<span className="font-bold">
{data.payload.change > 0 ? "+" : ""}
{data.payload.change}%
@@ -96,11 +84,8 @@ const CustomDot = (props: any) => {
cx={cx}
cy={cy}
r={isSelected ? 9 : 6}
fill={
isSelected ? "var(--chart-success-color)" : "var(--chart-radar-primary)"
}
fill={isSelected ? "var(--color-success)" : "var(--color-purple-dark)"}
fillOpacity={1}
className={isSelected ? "drop-shadow-[0_0_8px_#86da26]" : ""}
style={{
cursor: onSelectPoint ? "pointer" : "default",
pointerEvents: "all",
@@ -132,7 +117,7 @@ export function RadarChart({
<PolarGrid strokeOpacity={0.3} />
<Radar
dataKey={dataKey}
fill="var(--chart-radar-primary)"
fill="var(--color-magenta)"
fillOpacity={0.2}
activeDot={false}
dot={
@@ -150,7 +135,7 @@ export function RadarChart({
}
: {
r: 6,
fill: "var(--chart-radar-primary)",
fill: "var(--color-purple-dark)",
fillOpacity: 1,
}
}
@@ -23,7 +23,7 @@ interface RadialChartProps {
export function RadialChart({
percentage,
label = "Score",
color = "var(--chart-success-color)",
color = "var(--color-success)",
backgroundColor = CHART_COLORS.tooltipBackground,
height = 250,
innerRadius = 60,
@@ -68,10 +68,7 @@ export function RadialChart({
y="50%"
textAnchor="middle"
dominantBaseline="middle"
className="text-4xl font-bold"
style={{
fill: "var(--chart-text-primary)",
}}
className="fill-white text-4xl font-bold"
>
{percentage}%
</text>
+137
View File
@@ -0,0 +1,137 @@
"use client";
import { Rectangle, ResponsiveContainer, Sankey, Tooltip } from "recharts";
import { CHART_COLORS, SEVERITY_COLORS } from "./shared/constants";
interface SankeyNode {
name: string;
}
interface SankeyLink {
source: number;
target: number;
value: number;
}
interface SankeyChartProps {
data: {
nodes: SankeyNode[];
links: SankeyLink[];
};
height?: number;
}
const COLORS: Record<string, string> = {
Success: "var(--color-success)",
Fail: "var(--color-destructive)",
AWS: "var(--color-orange)",
Azure: "var(--color-cyan)",
Google: "var(--color-red)",
...SEVERITY_COLORS,
};
const CustomTooltip = ({ active, payload }: any) => {
if (active && payload && payload.length) {
const data = payload[0].payload;
return (
<div className="rounded-lg border border-slate-700 bg-slate-800 p-3 shadow-lg">
<p className="text-sm font-semibold text-white">{data.name}</p>
{data.value && (
<p className="text-xs text-slate-400">Value: {data.value}</p>
)}
</div>
);
}
return null;
};
const CustomNode = ({ x, y, width, height, payload, containerWidth }: any) => {
const isOut = x + width + 6 > containerWidth;
const nodeName = payload.name;
const color = COLORS[nodeName] || CHART_COLORS.defaultColor;
return (
<g>
<Rectangle
x={x}
y={y}
width={width}
height={height}
fill={color}
fillOpacity="1"
/>
<text
textAnchor={isOut ? "end" : "start"}
x={isOut ? x - 6 : x + width + 6}
y={y + height / 2}
fontSize="14"
className="fill-white stroke-white"
>
{nodeName}
</text>
<text
textAnchor={isOut ? "end" : "start"}
x={isOut ? x - 6 : x + width + 6}
y={y + height / 2 + 13}
fontSize="12"
className="fill-slate-400 stroke-slate-400"
strokeOpacity="0.5"
>
{payload.value}
</text>
</g>
);
};
const CustomLink = (props: any) => {
const {
sourceX,
targetX,
sourceY,
targetY,
sourceControlX,
targetControlX,
linkWidth,
} = props;
const sourceName = props.payload.source?.name || "";
const color = COLORS[sourceName] || CHART_COLORS.defaultColor;
return (
<g>
<path
d={`
M${sourceX},${sourceY + linkWidth / 2}
C${sourceControlX},${sourceY + linkWidth / 2}
${targetControlX},${targetY + linkWidth / 2}
${targetX},${targetY + linkWidth / 2}
L${targetX},${targetY - linkWidth / 2}
C${targetControlX},${targetY - linkWidth / 2}
${sourceControlX},${sourceY - linkWidth / 2}
${sourceX},${sourceY - linkWidth / 2}
Z
`}
fill={color}
fillOpacity="0.4"
stroke="none"
/>
</g>
);
};
export function SankeyChart({ data, height = 400 }: SankeyChartProps) {
return (
<ResponsiveContainer width="100%" height={height}>
<Sankey
data={data}
node={<CustomNode />}
link={<CustomLink />}
nodePadding={50}
margin={{ top: 20, right: 160, bottom: 20, left: 160 }}
>
<Tooltip content={<CustomTooltip />} />
</Sankey>
</ResponsiveContainer>
);
}
@@ -11,11 +11,18 @@ import {
YAxis,
} from "recharts";
import { AlertPill } from "./shared/alert-pill";
import { ChartLegend } from "./shared/chart-legend";
import { AlertPill } from "./shared/AlertPill";
import { ChartLegend } from "./shared/ChartLegend";
import { CHART_COLORS } from "./shared/constants";
import { getSeverityColorByRiskScore } from "./shared/utils";
import type { ScatterDataPoint } from "./types";
interface ScatterDataPoint {
x: number;
y: number;
provider: string;
name: string;
size?: number;
}
interface ScatterPlotProps {
data: ScatterDataPoint[];
@@ -27,9 +34,9 @@ interface ScatterPlotProps {
}
const PROVIDER_COLORS = {
AWS: "var(--chart-provider-aws)",
Azure: "var(--chart-provider-azure)",
Google: "var(--chart-provider-google)",
AWS: "var(--color-orange)",
Azure: "var(--color-cyan)",
Google: "var(--color-red)",
};
const CustomTooltip = ({ active, payload }: any) => {
@@ -38,23 +45,9 @@ const CustomTooltip = ({ active, payload }: any) => {
const severityColor = getSeverityColorByRiskScore(data.x);
return (
<div
className="rounded-lg border p-3 shadow-lg"
style={{
borderColor: CHART_COLORS.tooltipBorder,
backgroundColor: CHART_COLORS.tooltipBackground,
}}
>
<p
className="text-sm font-semibold"
style={{ color: CHART_COLORS.textPrimary }}
>
{data.name}
</p>
<p
className="mt-1 text-xs"
style={{ color: CHART_COLORS.textSecondary }}
>
<div className="rounded-lg border border-slate-700 bg-slate-800 p-3 shadow-lg">
<p className="text-sm font-semibold text-white">{data.name}</p>
<p className="mt-1 text-xs text-slate-400">
<span style={{ color: severityColor }}>{data.x}</span> Risk Score
</p>
<div className="mt-2">
@@ -76,7 +69,7 @@ const CustomScatterDot = ({
const isSelected = selectedPoint?.name === payload.name;
const size = isSelected ? 18 : 8;
const fill = isSelected
? "#86DA26"
? "var(--color-success)"
: PROVIDER_COLORS[payload.provider as keyof typeof PROVIDER_COLORS] ||
CHART_COLORS.defaultColor;
@@ -86,9 +79,8 @@ const CustomScatterDot = ({
cy={cy}
r={size / 2}
fill={fill}
stroke={isSelected ? "#86DA26" : "transparent"}
stroke={isSelected ? "var(--color-success)" : "transparent"}
strokeWidth={2}
className={isSelected ? "drop-shadow-[0_0_8px_#86da26]" : ""}
style={{ cursor: "pointer" }}
onClick={() => onSelectPoint?.(payload)}
/>
+9 -9
View File
@@ -1,9 +1,9 @@
export { DonutChart } from "./donut-chart";
export { HorizontalBarChart } from "./horizontal-bar-chart";
export { LineChart } from "./line-chart";
export { MapChart, type MapChartData, type MapChartProps } from "./map-chart";
export { RadarChart } from "./radar-chart";
export { RadialChart } from "./radial-chart";
export { SankeyChart } from "./sankey-chart";
export { ScatterPlot } from "./scatter-plot";
export { ChartLegend, type ChartLegendItem } from "./shared/chart-legend";
export { BarChart } from "./BarChart";
export { DonutChart } from "./DonutChart";
export { HorizontalBarChart } from "./HorizontalBarChart";
export { LineChart } from "./LineChart";
export { RadarChart } from "./RadarChart";
export { RadialChart } from "./RadialChart";
export { SankeyChart } from "./SankeyChart";
export { ScatterPlot } from "./ScatterPlot";
export { ChartLegend, type ChartLegendItem } from "./shared/ChartLegend";
-479
View File
@@ -1,479 +0,0 @@
"use client";
import * as d3 from "d3";
import type {
Feature,
FeatureCollection,
GeoJsonProperties,
Geometry,
} from "geojson";
import { AlertTriangle, Info, MapPin } from "lucide-react";
import { useEffect, useRef, useState } from "react";
import { feature } from "topojson-client";
import type {
GeometryCollection,
Objects,
Topology,
} from "topojson-specification";
import { HorizontalBarChart } from "./horizontal-bar-chart";
import { BarDataPoint } from "./types";
// Constants
const MAP_CONFIG = {
defaultWidth: 688,
defaultHeight: 400,
pointRadius: 6,
selectedPointRadius: 8,
transitionDuration: 300,
} as const;
const MAP_COLORS = {
landFill: "var(--chart-border-emphasis)",
landStroke: "var(--chart-border)",
pointDefault: "#DB2B49",
pointSelected: "#86DA26",
pointHover: "#DB2B49",
} as const;
const RISK_LEVELS = {
LOW_HIGH: "low-high",
HIGH: "high",
CRITICAL: "critical",
} as const;
type RiskLevel = (typeof RISK_LEVELS)[keyof typeof RISK_LEVELS];
interface LocationPoint {
id: string;
name: string;
region: string;
coordinates: [number, number];
totalFindings: number;
riskLevel: RiskLevel;
severityData: BarDataPoint[];
change?: number;
}
export interface MapChartData {
locations: LocationPoint[];
regions: string[];
}
export interface MapChartProps {
data: MapChartData;
height?: number;
onLocationSelect?: (location: LocationPoint | null) => void;
}
// Utility functions
function createProjection(width: number, height: number) {
return d3
.geoNaturalEarth1()
.fitExtent(
[
[1, 1],
[width - 1, height - 1],
],
{ type: "Sphere" },
)
.precision(0.2);
}
async function fetchWorldData(): Promise<FeatureCollection | null> {
try {
const worldAtlasModule = await import("world-atlas/countries-110m.json");
const worldData = worldAtlasModule.default || worldAtlasModule;
const topology = worldData as unknown as Topology<Objects>;
return feature(
topology,
topology.objects.countries as GeometryCollection,
) as FeatureCollection;
} catch (error) {
console.error("Error loading world map data:", error);
return null;
}
}
// Helper: Create SVG element
function createSVGElement<T extends SVGElement>(
type: string,
attributes: Record<string, string>,
): T {
const element = document.createElementNS(
"http://www.w3.org/2000/svg",
type,
) as T;
Object.entries(attributes).forEach(([key, value]) => {
element.setAttribute(key, value);
});
return element;
}
// Components
function MapTooltip({
location,
position,
}: {
location: LocationPoint;
position: { x: number; y: number };
}) {
const CHART_COLORS = {
tooltipBorder: "var(--chart-border-emphasis)",
tooltipBackground: "var(--chart-background)",
textPrimary: "var(--chart-text-primary)",
textSecondary: "var(--chart-text-secondary)",
};
return (
<div
className="pointer-events-none absolute z-50 min-w-[200px] rounded-lg border p-3 shadow-lg"
style={{
left: `${position.x + 15}px`,
top: `${position.y + 15}px`,
transform: "translate(0, -50%)",
borderColor: CHART_COLORS.tooltipBorder,
backgroundColor: CHART_COLORS.tooltipBackground,
}}
>
<div className="flex items-center gap-2">
<MapPin size={14} style={{ color: CHART_COLORS.textSecondary }} />
<span
className="text-sm font-semibold"
style={{ color: CHART_COLORS.textPrimary }}
>
{location.name}
</span>
</div>
<div className="mt-1 flex items-center gap-2">
<AlertTriangle size={14} className="text-[#DB2B49]" />
<span className="text-sm" style={{ color: CHART_COLORS.textPrimary }}>
{location.totalFindings.toLocaleString()} Fail Findings
</span>
</div>
{location.change !== undefined && (
<p
className="mt-1 text-xs"
style={{ color: CHART_COLORS.textSecondary }}
>
<span className="font-bold">
{location.change > 0 ? "+" : ""}
{location.change}%
</span>{" "}
since last scan
</p>
)}
</div>
);
}
function EmptyState() {
const CHART_COLORS = {
tooltipBorder: "var(--chart-border-emphasis)",
tooltipBackground: "var(--chart-background)",
textSecondary: "var(--chart-text-secondary)",
};
return (
<div
className="flex h-full min-h-[400px] items-center justify-center rounded-lg border p-6"
style={{
borderColor: CHART_COLORS.tooltipBorder,
backgroundColor: CHART_COLORS.tooltipBackground,
}}
>
<div className="text-center">
<Info
size={48}
className="mx-auto mb-2"
style={{ color: CHART_COLORS.textSecondary }}
/>
<p className="text-sm" style={{ color: CHART_COLORS.textSecondary }}>
Select a location on the map to view details
</p>
</div>
</div>
);
}
function LoadingState({ height }: { height: number }) {
const CHART_COLORS = {
textSecondary: "var(--chart-text-secondary)",
};
return (
<div className="flex items-center justify-center" style={{ height }}>
<div className="text-center">
<div className="mb-2" style={{ color: CHART_COLORS.textSecondary }}>
Loading map...
</div>
</div>
</div>
);
}
export function MapChart({
data,
height = MAP_CONFIG.defaultHeight,
}: MapChartProps) {
const svgRef = useRef<SVGSVGElement>(null);
const containerRef = useRef<HTMLDivElement>(null);
const [selectedLocation, setSelectedLocation] =
useState<LocationPoint | null>(null);
const [hoveredLocation, setHoveredLocation] = useState<LocationPoint | null>(
null,
);
const [tooltipPosition, setTooltipPosition] = useState<{
x: number;
y: number;
} | null>(null);
const [worldData, setWorldData] = useState<FeatureCollection | null>(null);
const [isLoadingMap, setIsLoadingMap] = useState(true);
const [dimensions, setDimensions] = useState<{
width: number;
height: number;
}>({
width: MAP_CONFIG.defaultWidth,
height,
});
// Fetch world data once on mount
useEffect(() => {
let isMounted = true;
fetchWorldData()
.then((data) => {
if (isMounted && data) setWorldData(data);
})
.catch(console.error)
.finally(() => {
if (isMounted) setIsLoadingMap(false);
});
return () => {
isMounted = false;
};
}, []);
// Update dimensions on resize
useEffect(() => {
const updateDimensions = () => {
if (containerRef.current) {
setDimensions({ width: containerRef.current.clientWidth, height });
}
};
updateDimensions();
window.addEventListener("resize", updateDimensions);
return () => window.removeEventListener("resize", updateDimensions);
}, [height]);
// Render the map
useEffect(() => {
if (!svgRef.current || !worldData || isLoadingMap) return;
const svg = svgRef.current;
const { width, height } = dimensions;
svg.innerHTML = "";
const projection = createProjection(width, height);
const path = d3.geoPath().projection(projection);
// Render countries
const mapGroup = createSVGElement<SVGGElement>("g", {
class: "map-countries",
});
worldData.features?.forEach(
(feature: Feature<Geometry, GeoJsonProperties>) => {
const pathData = path(feature);
if (pathData) {
const pathElement = createSVGElement<SVGPathElement>("path", {
d: pathData,
fill: MAP_COLORS.landFill,
stroke: MAP_COLORS.landStroke,
"stroke-width": "0.5",
});
mapGroup.appendChild(pathElement);
}
},
);
svg.appendChild(mapGroup);
// Helper to update tooltip position
const updateTooltip = (e: MouseEvent) => {
const rect = svg.getBoundingClientRect();
setTooltipPosition({
x: e.clientX - rect.left,
y: e.clientY - rect.top,
});
};
// Helper to create circle
const createCircle = (location: LocationPoint) => {
const projected = projection(location.coordinates);
if (!projected) return null;
const [x, y] = projected;
if (x < 0 || x > width || y < 0 || y > height) return null;
const isSelected = selectedLocation?.id === location.id;
const isHovered = hoveredLocation?.id === location.id;
const classes = ["cursor-pointer"];
if (isSelected) classes.push("drop-shadow-[0_0_8px_#86da26]");
if (isHovered && !isSelected) classes.push("opacity-70");
const circle = createSVGElement<SVGCircleElement>("circle", {
cx: x.toString(),
cy: y.toString(),
r: (isSelected
? MAP_CONFIG.selectedPointRadius
: MAP_CONFIG.pointRadius
).toString(),
fill: isSelected ? MAP_COLORS.pointSelected : MAP_COLORS.pointDefault,
class: classes.join(" "),
});
circle.addEventListener("click", () =>
setSelectedLocation(isSelected ? null : location),
);
circle.addEventListener("mouseenter", (e) => {
setHoveredLocation(location);
updateTooltip(e);
});
circle.addEventListener("mousemove", updateTooltip);
circle.addEventListener("mouseleave", () => {
setHoveredLocation(null);
setTooltipPosition(null);
});
return circle;
};
// Render points
const pointsGroup = createSVGElement<SVGGElement>("g", {
class: "threat-points",
});
// Unselected points first
data.locations.forEach((location) => {
if (selectedLocation?.id !== location.id) {
const circle = createCircle(location);
if (circle) pointsGroup.appendChild(circle);
}
});
// Selected point last (on top)
if (selectedLocation) {
const selectedData = data.locations.find(
(loc) => loc.id === selectedLocation.id,
);
if (selectedData) {
const circle = createCircle(selectedData);
if (circle) pointsGroup.appendChild(circle);
}
}
svg.appendChild(pointsGroup);
}, [
data.locations,
dimensions,
selectedLocation,
hoveredLocation,
worldData,
isLoadingMap,
]);
const CHART_COLORS = {
tooltipBorder: "var(--chart-border-emphasis)",
tooltipBackground: "var(--chart-background)",
textPrimary: "var(--chart-text-primary)",
textSecondary: "var(--chart-text-secondary)",
};
return (
<div className="flex w-full flex-col gap-6 lg:flex-row lg:items-start">
{/* Map Section */}
<div className="flex-1">
<h3
className="mb-4 text-lg font-semibold"
style={{ color: CHART_COLORS.textPrimary }}
>
Threat Map
</h3>
<div
ref={containerRef}
className="rounded-lg border p-4"
style={{
borderColor: CHART_COLORS.tooltipBorder,
backgroundColor: CHART_COLORS.tooltipBackground,
}}
>
{isLoadingMap ? (
<LoadingState height={dimensions.height} />
) : (
<>
<div className="relative">
<svg
ref={svgRef}
width={dimensions.width}
height={dimensions.height}
className="w-full"
style={{ maxWidth: "100%" }}
/>
{hoveredLocation && tooltipPosition && (
<MapTooltip
location={hoveredLocation}
position={tooltipPosition}
/>
)}
</div>
<div className="mt-4 flex items-center gap-2">
<div className="h-3 w-3 rounded-full bg-[#DB2B49]" />
<span
className="text-sm"
style={{ color: CHART_COLORS.textSecondary }}
>
{data.locations.length} Locations
</span>
</div>
</>
)}
</div>
</div>
{/* Details Section */}
<div className="w-full lg:w-[400px]">
<div className="mb-4 h-10" />
{selectedLocation ? (
<div
className="rounded-lg border p-6"
style={{
borderColor: CHART_COLORS.tooltipBorder,
backgroundColor: CHART_COLORS.tooltipBackground,
}}
>
<div className="mb-6">
<div className="mb-1 flex items-center gap-2">
<div className="h-2 w-2 rounded-full bg-[#86DA26]" />
<h4
className="text-base font-semibold"
style={{ color: CHART_COLORS.textPrimary }}
>
{selectedLocation.name}
</h4>
</div>
<p
className="text-sm"
style={{ color: CHART_COLORS.textSecondary }}
>
{selectedLocation.totalFindings.toLocaleString()} Total Findings
</p>
</div>
<HorizontalBarChart data={selectedLocation.severityData} />
</div>
) : (
<EmptyState />
)}
</div>
</div>
);
}
@@ -1,50 +0,0 @@
"use client";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "../ui/select/Select";
interface MapRegionFilterProps {
regions: string[];
selectedRegion: string;
onRegionChange: (region: string) => void;
chartColors: {
tooltipBorder: string;
tooltipBackground: string;
textPrimary: string;
};
}
export function MapRegionFilter({
regions,
selectedRegion,
onRegionChange,
chartColors,
}: MapRegionFilterProps) {
return (
<Select value={selectedRegion} onValueChange={onRegionChange}>
<SelectTrigger
className="min-w-[200px] rounded-lg"
style={{
borderColor: chartColors.tooltipBorder,
backgroundColor: chartColors.tooltipBackground,
color: chartColors.textPrimary,
}}
>
<SelectValue placeholder="All Regions" />
</SelectTrigger>
<SelectContent>
<SelectItem value="All Regions">All Regions</SelectItem>
{regions.map((region) => (
<SelectItem key={region} value={region}>
{region}
</SelectItem>
))}
</SelectContent>
</Select>
);
}
-403
View File
@@ -1,403 +0,0 @@
"use client";
import { useState } from "react";
import { Rectangle, ResponsiveContainer, Sankey, Tooltip } from "recharts";
import { ChartTooltip } from "./shared/chart-tooltip";
import { CHART_COLORS } from "./shared/constants";
interface SankeyNode {
name: string;
newFindings?: number;
change?: number;
}
interface SankeyLink {
source: number;
target: number;
value: number;
}
interface SankeyChartProps {
data: {
nodes: SankeyNode[];
links: SankeyLink[];
};
height?: number;
}
interface LinkTooltipState {
show: boolean;
x: number;
y: number;
sourceName: string;
targetName: string;
value: number;
color: string;
}
interface NodeTooltipState {
show: boolean;
x: number;
y: number;
name: string;
value: number;
color: string;
newFindings?: number;
change?: number;
}
// Note: Using hex colors directly because Recharts SVG fill doesn't resolve CSS variables
const COLORS: Record<string, string> = {
Success: "#86da26",
Fail: "#db2b49",
AWS: "#ff9900",
Azure: "#00bcd4",
Google: "#EA4335",
Critical: "#971348",
High: "#ff3077",
Medium: "#ff7d19",
Low: "#fdd34f",
Info: "#2e51b2",
Informational: "#2e51b2",
};
const CustomTooltip = ({ active, payload }: any) => {
if (active && payload && payload.length) {
const data = payload[0].payload;
return (
<div
className="rounded-lg border p-3 shadow-lg"
style={{
borderColor: CHART_COLORS.tooltipBorder,
backgroundColor: CHART_COLORS.tooltipBackground,
}}
>
<p
className="text-sm font-semibold"
style={{ color: CHART_COLORS.textPrimary }}
>
{data.name}
</p>
{data.value && (
<p className="text-xs" style={{ color: CHART_COLORS.textSecondary }}>
Value: {data.value}
</p>
)}
</div>
);
}
return null;
};
const CustomNode = (props: any) => {
const { x, y, width, height, payload, containerWidth } = props;
const isOut = x + width + 6 > containerWidth;
const nodeName = payload.name;
const color = COLORS[nodeName] || CHART_COLORS.defaultColor;
const isHidden = nodeName === "";
const hasTooltip = !isHidden && payload.newFindings;
const handleMouseEnter = (e: React.MouseEvent) => {
if (!hasTooltip) return;
const rect = e.currentTarget.closest("svg") as SVGSVGElement;
if (rect) {
const bbox = rect.getBoundingClientRect();
props.onNodeHover?.({
x: e.clientX - bbox.left,
y: e.clientY - bbox.top,
name: nodeName,
value: payload.value,
color,
newFindings: payload.newFindings,
change: payload.change,
});
}
};
const handleMouseMove = (e: React.MouseEvent) => {
if (!hasTooltip) return;
const rect = e.currentTarget.closest("svg") as SVGSVGElement;
if (rect) {
const bbox = rect.getBoundingClientRect();
props.onNodeMove?.({
x: e.clientX - bbox.left,
y: e.clientY - bbox.top,
});
}
};
const handleMouseLeave = () => {
if (!hasTooltip) return;
props.onNodeLeave?.();
};
return (
<g
style={{ cursor: hasTooltip ? "pointer" : "default" }}
onMouseEnter={handleMouseEnter}
onMouseMove={handleMouseMove}
onMouseLeave={handleMouseLeave}
>
<Rectangle
x={x}
y={y}
width={width}
height={height}
fill={color}
fillOpacity={isHidden ? "0" : "1"}
/>
{!isHidden && (
<>
<text
textAnchor={isOut ? "end" : "start"}
x={isOut ? x - 6 : x + width + 6}
y={y + height / 2}
fontSize="14"
fill={CHART_COLORS.textPrimary}
>
{nodeName}
</text>
<text
textAnchor={isOut ? "end" : "start"}
x={isOut ? x - 6 : x + width + 6}
y={y + height / 2 + 13}
fontSize="12"
fill={CHART_COLORS.textSecondary}
>
{payload.value}
</text>
</>
)}
</g>
);
};
const CustomLink = (props: any) => {
const {
sourceX,
targetX,
sourceY,
targetY,
sourceControlX,
targetControlX,
linkWidth,
index,
} = props;
const sourceName = props.payload.source?.name || "";
const targetName = props.payload.target?.name || "";
const value = props.payload.value || 0;
const color = COLORS[sourceName] || CHART_COLORS.defaultColor;
const isHidden = targetName === "";
const isHovered = props.hoveredLink !== null && props.hoveredLink === index;
const hasHoveredLink = props.hoveredLink !== null;
const pathD = `
M${sourceX},${sourceY + linkWidth / 2}
C${sourceControlX},${sourceY + linkWidth / 2}
${targetControlX},${targetY + linkWidth / 2}
${targetX},${targetY + linkWidth / 2}
L${targetX},${targetY - linkWidth / 2}
C${targetControlX},${targetY - linkWidth / 2}
${sourceControlX},${sourceY - linkWidth / 2}
${sourceX},${sourceY - linkWidth / 2}
Z
`;
const getOpacity = () => {
if (isHidden) return "0";
if (!hasHoveredLink) return "0.4";
return isHovered ? "0.8" : "0.1";
};
const handleMouseEnter = (e: React.MouseEvent) => {
const rect = e.currentTarget.parentElement?.parentElement
?.parentElement as unknown as SVGSVGElement;
if (rect) {
const bbox = rect.getBoundingClientRect();
props.onLinkHover?.(index, {
x: e.clientX - bbox.left,
y: e.clientY - bbox.top,
sourceName,
targetName,
value,
color,
});
}
};
const handleMouseMove = (e: React.MouseEvent) => {
const rect = e.currentTarget.parentElement?.parentElement
?.parentElement as unknown as SVGSVGElement;
if (rect && isHovered) {
const bbox = rect.getBoundingClientRect();
props.onLinkMove?.({
x: e.clientX - bbox.left,
y: e.clientY - bbox.top,
});
}
};
const handleMouseLeave = () => {
props.onLinkLeave?.();
};
return (
<g>
<path
d={pathD}
fill={color}
fillOpacity={getOpacity()}
stroke="none"
style={{ cursor: "pointer", transition: "fill-opacity 0.2s" }}
onMouseEnter={handleMouseEnter}
onMouseMove={handleMouseMove}
onMouseLeave={handleMouseLeave}
/>
</g>
);
};
export function SankeyChart({ data, height = 400 }: SankeyChartProps) {
const [hoveredLink, setHoveredLink] = useState<number | null>(null);
const [linkTooltip, setLinkTooltip] = useState<LinkTooltipState>({
show: false,
x: 0,
y: 0,
sourceName: "",
targetName: "",
value: 0,
color: "",
});
const [nodeTooltip, setNodeTooltip] = useState<NodeTooltipState>({
show: false,
x: 0,
y: 0,
name: "",
value: 0,
color: "",
});
const handleLinkHover = (
index: number,
data: Omit<LinkTooltipState, "show">,
) => {
setHoveredLink(index);
setLinkTooltip({ show: true, ...data });
};
const handleLinkMove = (position: { x: number; y: number }) => {
setLinkTooltip((prev) => ({
...prev,
x: position.x,
y: position.y,
}));
};
const handleLinkLeave = () => {
setHoveredLink(null);
setLinkTooltip((prev) => ({ ...prev, show: false }));
};
const handleNodeHover = (data: Omit<NodeTooltipState, "show">) => {
setNodeTooltip({ show: true, ...data });
};
const handleNodeMove = (position: { x: number; y: number }) => {
setNodeTooltip((prev) => ({
...prev,
x: position.x,
y: position.y,
}));
};
const handleNodeLeave = () => {
setNodeTooltip((prev) => ({ ...prev, show: false }));
};
return (
<div className="relative">
<ResponsiveContainer width="100%" height={height}>
<Sankey
data={data}
node={
<CustomNode
onNodeHover={handleNodeHover}
onNodeMove={handleNodeMove}
onNodeLeave={handleNodeLeave}
/>
}
link={
<CustomLink
hoveredLink={hoveredLink}
onLinkHover={handleLinkHover}
onLinkMove={handleLinkMove}
onLinkLeave={handleLinkLeave}
/>
}
nodePadding={50}
margin={{ top: 20, right: 160, bottom: 20, left: 160 }}
sort={false}
>
<Tooltip content={<CustomTooltip />} />
</Sankey>
</ResponsiveContainer>
{linkTooltip.show && (
<div
className="pointer-events-none absolute z-50"
style={{
left: `${Math.max(125, Math.min(linkTooltip.x, window.innerWidth - 125))}px`,
top: `${Math.max(linkTooltip.y - 80, 10)}px`,
transform: "translate(-50%, -100%)",
}}
>
<ChartTooltip
active={true}
payload={[
{
payload: {
name: linkTooltip.targetName,
value: linkTooltip.value,
color: linkTooltip.color,
},
color: linkTooltip.color,
},
]}
label={`${linkTooltip.sourceName}${linkTooltip.targetName}`}
/>
</div>
)}
{nodeTooltip.show && (
<div
className="pointer-events-none absolute z-50"
style={{
left: `${Math.max(125, Math.min(nodeTooltip.x, window.innerWidth - 125))}px`,
top: `${Math.max(nodeTooltip.y - 80, 10)}px`,
transform: "translate(-50%, -100%)",
}}
>
<ChartTooltip
active={true}
payload={[
{
payload: {
name: nodeTooltip.name,
value: nodeTooltip.value,
color: nodeTooltip.color,
newFindings: nodeTooltip.newFindings,
change: nodeTooltip.change,
},
color: nodeTooltip.color,
},
]}
/>
</div>
)}
</div>
);
}
@@ -17,17 +17,13 @@ export function AlertPill({
}: AlertPillProps) {
return (
<div className="flex items-center gap-2">
<div
className="flex items-center gap-1 rounded-full px-2 py-1"
style={{ backgroundColor: "var(--chart-alert-bg)" }}
>
<AlertTriangle
size={iconSize}
style={{ color: "var(--chart-alert-text)" }}
/>
<div className="bg-alert-pill-bg flex items-center gap-1 rounded-full px-2 py-1">
<AlertTriangle size={iconSize} className="text-alert-pill-text" />
<span
className={cn(`text-${textSize}`, "font-semibold")}
style={{ color: "var(--chart-alert-text)" }}
className={cn(
`text-${textSize}`,
"text-alert-pill-text font-semibold",
)}
>
{value}
</span>
@@ -9,22 +9,14 @@ interface ChartLegendProps {
export function ChartLegend({ items }: ChartLegendProps) {
return (
<div
className="mt-4 inline-flex gap-[46px] rounded-full border-2 px-[19px] py-[9px]"
style={{ borderColor: "var(--chart-border)" }}
>
<div className="bg-card-border mt-4 inline-flex gap-[46px] rounded-full border-2 px-[19px] py-[9px]">
{items.map((item, index) => (
<div key={`legend-${index}`} className="flex items-center gap-1">
<div
className="h-3 w-3 rounded"
style={{ backgroundColor: item.color }}
/>
<span
className="text-xs"
style={{ color: "var(--chart-text-secondary)" }}
>
{item.label}
</span>
<span className="text-xs text-gray-300">{item.label}</span>
</div>
))}
</div>
@@ -3,7 +3,6 @@ import { Bell, VolumeX } from "lucide-react";
import { cn } from "@/lib/utils";
import { TooltipData } from "../types";
import { CHART_COLORS } from "./constants";
interface ChartTooltipProps {
active?: boolean;
@@ -28,13 +27,7 @@ export function ChartTooltip({
const color = payload[0].color || data.color;
return (
<div
className="min-w-[200px] rounded-lg border border-slate-200 bg-white p-3 shadow-lg dark:border-slate-600 dark:bg-slate-800"
style={{
borderColor: CHART_COLORS.tooltipBorder,
backgroundColor: CHART_COLORS.tooltipBackground,
}}
>
<div className="min-w-[200px] rounded-lg border border-slate-700 bg-slate-800 p-3 shadow-lg">
<div className="flex items-center gap-2">
{showColorIndicator && color && (
<div
@@ -45,12 +38,10 @@ export function ChartTooltip({
style={{ backgroundColor: color }}
/>
)}
<p className="text-sm font-semibold text-slate-900 dark:text-white">
{label || data.name}
</p>
<p className="text-sm font-semibold text-white">{label || data.name}</p>
</div>
<p className="mt-1 text-xs text-slate-900 dark:text-white">
<p className="mt-1 text-xs text-white">
{typeof data.value === "number"
? data.value.toLocaleString()
: data.value}
@@ -59,8 +50,8 @@ export function ChartTooltip({
{data.newFindings !== undefined && data.newFindings > 0 && (
<div className="mt-1 flex items-center gap-2">
<Bell size={14} className="text-slate-600 dark:text-slate-400" />
<span className="text-xs text-slate-600 dark:text-slate-400">
<Bell size={14} className="text-slate-400" />
<span className="text-xs text-slate-400">
{data.newFindings} New Findings
</span>
</div>
@@ -68,24 +59,20 @@ export function ChartTooltip({
{data.new !== undefined && data.new > 0 && (
<div className="mt-1 flex items-center gap-2">
<Bell size={14} className="text-slate-600 dark:text-slate-400" />
<span className="text-xs text-slate-600 dark:text-slate-400">
{data.new} New
</span>
<Bell size={14} className="text-slate-400" />
<span className="text-xs text-slate-400">{data.new} New</span>
</div>
)}
{data.muted !== undefined && data.muted > 0 && (
<div className="mt-1 flex items-center gap-2">
<VolumeX size={14} className="text-slate-600 dark:text-slate-400" />
<span className="text-xs text-slate-600 dark:text-slate-400">
{data.muted} Muted
</span>
<VolumeX size={14} className="text-slate-400" />
<span className="text-xs text-slate-400">{data.muted} Muted</span>
</div>
)}
{data.change !== undefined && (
<p className="mt-1 text-xs text-slate-600 dark:text-slate-400">
<p className="mt-1 text-xs text-slate-400">
<span className="font-bold">
{data.change > 0 ? "+" : ""}
{data.change}%
@@ -110,10 +97,8 @@ export function MultiSeriesChartTooltip({
}
return (
<div className="min-w-[200px] rounded-lg border border-slate-200 bg-white p-3 shadow-lg dark:border-slate-600 dark:bg-slate-800">
<p className="mb-2 text-sm font-semibold text-slate-900 dark:text-white">
{label}
</p>
<div className="min-w-[200px] rounded-lg border border-slate-700 bg-slate-800 p-3 shadow-lg">
<p className="mb-2 text-sm font-semibold text-white">{label}</p>
{payload.map((entry: any, index: number) => (
<div key={index} className="flex items-center gap-2">
@@ -121,14 +106,12 @@ export function MultiSeriesChartTooltip({
className="h-2 w-2 rounded-full"
style={{ backgroundColor: entry.color }}
/>
<span className="text-xs text-slate-900 dark:text-white">
{entry.name}:
</span>
<span className="text-xs font-semibold text-slate-900 dark:text-white">
<span className="text-xs text-white">{entry.name}:</span>
<span className="text-xs font-semibold text-white">
{entry.value}
</span>
{entry.payload[`${entry.dataKey}_change`] && (
<span className="text-xs text-slate-600 dark:text-slate-400">
<span className="text-xs text-slate-400">
({entry.payload[`${entry.dataKey}_change`] > 0 ? "+" : ""}
{entry.payload[`${entry.dataKey}_change`]}%)
</span>
+13 -25
View File
@@ -1,33 +1,21 @@
export const SEVERITY_COLORS = {
Informational: "var(--chart-info)",
Info: "var(--chart-info)",
Low: "var(--chart-warning)",
Medium: "var(--chart-warning-emphasis)",
High: "var(--chart-danger)",
Critical: "var(--chart-danger-emphasis)",
} as const;
export const PROVIDER_COLORS = {
AWS: "var(--chart-provider-aws)",
Azure: "var(--chart-provider-azure)",
Google: "var(--chart-provider-google)",
} as const;
export const STATUS_COLORS = {
Success: "var(--chart-success-color)",
Fail: "var(--chart-fail)",
Informational: "var(--color-info)",
Low: "var(--color-warning)",
Medium: "var(--color-warning-emphasis)",
High: "var(--color-danger)",
Critical: "var(--color-danger-emphasis)",
} as const;
export const CHART_COLORS = {
tooltipBorder: "var(--chart-border-emphasis)",
tooltipBackground: "var(--chart-background)",
textPrimary: "var(--chart-text-primary)",
textSecondary: "var(--chart-text-secondary)",
gridLine: "var(--chart-border-emphasis)",
tooltipBorder: "var(--color-slate-700)",
tooltipBackground: "var(--color-slate-800)",
textPrimary: "var(--color-white)",
textSecondary: "var(--color-slate-400)",
gridLine: "var(--color-slate-700)",
backgroundTrack: "rgba(51, 65, 85, 0.5)", // slate-700 with 50% opacity
alertPillBg: "var(--chart-alert-bg)",
alertPillText: "var(--chart-alert-text)",
defaultColor: "#64748b", // slate-500
alertPillBg: "var(--color-alert-pill-bg)",
alertPillText: "var(--color-alert-pill-text)",
defaultColor: "var(--color-slate-500)", // Default fallback color for charts
} as const;
export const CHART_DIMENSIONS = {
-8
View File
@@ -36,14 +36,6 @@ export interface RadarDataPoint {
change?: number;
}
export interface ScatterDataPoint {
x: number;
y: number;
provider: string;
name: string;
size?: number;
}
export interface LineConfig {
dataKey: string;
color: string;
+53
View File
@@ -0,0 +1,53 @@
import { tv } from "tailwind-variants";
export const title = tv({
base: "tracking-tight inline font-semibold",
variants: {
color: {
violet: "from-[#FF1CF7] to-[#b249f8]",
yellow: "from-[#FF705B] to-[#FFB457]",
blue: "from-[#5EA2EF] to-[#0072F5]",
cyan: "from-[#00b7fa] to-[#01cfea]",
green: "from-[#6FEE8D] to-[#17c964]",
pink: "from-[#FF72E1] to-[#F54C7A]",
foreground: "dark:from-[#FFFFFF] dark:to-[#4B4B4B]",
},
size: {
sm: "text-3xl lg:text-4xl",
md: "text-[2.3rem] lg:text-5xl leading-9",
lg: "text-4xl lg:text-6xl",
},
fullWidth: {
true: "w-full block",
},
},
defaultVariants: {
size: "md",
},
compoundVariants: [
{
color: [
"violet",
"yellow",
"blue",
"cyan",
"green",
"pink",
"foreground",
],
class: "bg-clip-text text-transparent bg-linear-to-b",
},
],
});
export const subtitle = tv({
base: "w-full md:w-1/2 my-2 text-lg lg:text-xl text-default-600 block max-w-full",
variants: {
fullWidth: {
true: "w-full!",
},
},
defaultVariants: {
fullWidth: true,
},
});
+5 -10
View File
@@ -4,18 +4,13 @@ This directory contains all shadcn/ui based components for the Prowler applicati
## Directory Structure
Example of a custom component:
```
shadcn/
├── card/
│ ├── base-card/
│ ├── base-card.tsx
│ ├── card/
│ ├── card.tsx
│ └── resource-stats-card/
│ ├── resource-stats-card.tsx
│ ├── resource-stats-card.example.tsx
├── card.tsx # shadcn Card component
├── resource-stats-card/ # Custom ResourceStatsCard built on shadcn
│ ├── resource-stats-card.tsx
│ ├── resource-stats-card.example.tsx
└── index.ts
├── index.ts # Barrel exports
└── README.md
```
@@ -1,3 +1,5 @@
import * as React from "react";
import { cn } from "@/lib/utils";
function Card({ className, ...props }: React.ComponentProps<"div">) {
@@ -18,7 +20,7 @@ function CardHeader({ className, ...props }: React.ComponentProps<"div">) {
<div
data-slot="card-header"
className={cn(
"@container/card-header grid auto-rows-min grid-rows-[auto_auto] items-start gap-2 has-data-[slot=card-action]:grid-cols-[1fr_auto] [.border-b]:pb-6",
"@container/card-header grid auto-rows-min grid-rows-[auto_auto] items-start gap-2 px-6 has-data-[slot=card-action]:grid-cols-[1fr_auto] [.border-b]:pb-6",
className,
)}
{...props}
@@ -30,10 +32,7 @@ function CardTitle({ className, ...props }: React.ComponentProps<"div">) {
return (
<div
data-slot="card-title"
className={cn(
"my-2 text-[18px] leading-none text-slate-900 dark:text-white",
className,
)}
className={cn("leading-none font-semibold", className)}
{...props}
/>
);
@@ -1,36 +0,0 @@
import { cva, type VariantProps } from "class-variance-authority";
import { cn } from "@/lib/utils";
import { Card } from "../card";
const baseCardVariants = cva("", {
variants: {
variant: {
default:
"border-slate-200 bg-white dark:border-zinc-900 dark:bg-stone-950",
},
},
defaultVariants: {
variant: "default",
},
});
interface BaseCardProps
extends React.ComponentProps<typeof Card>,
VariantProps<typeof baseCardVariants> {}
const BaseCard = ({ className, variant, ...props }: BaseCardProps) => {
return (
<Card
className={cn(
baseCardVariants({ variant }),
"gap-2 px-[18px] pt-3 pb-4",
className,
)}
{...props}
/>
);
};
export { BaseCard };
@@ -1,25 +0,0 @@
import { cn } from "@/lib/utils";
interface StatsContainerProps extends React.HTMLAttributes<HTMLDivElement> {
children: React.ReactNode;
}
const StatsContainer = ({
className,
children,
...props
}: StatsContainerProps) => {
return (
<div
className={cn(
"flex rounded-xl border border-slate-200 bg-white px-[19px] py-[9px] dark:border-[rgba(38,38,38,0.7)] dark:bg-[rgba(23,23,23,0.5)] dark:backdrop-blur-[46px]",
className,
)}
{...props}
>
{children}
</div>
);
};
export { StatsContainer };
+21 -8
View File
@@ -1,8 +1,21 @@
export * from "./card/base-card/base-card";
export * from "./card/card";
export * from "./card/resource-stats-card/resource-stats-card";
export * from "./card/resource-stats-card/resource-stats-card-container";
export * from "./card/resource-stats-card/resource-stats-card-content";
export * from "./card/resource-stats-card/resource-stats-card-divider";
export * from "./card/resource-stats-card/resource-stats-card-header";
export * from "./card/stats-container";
export {
Card,
CardContent,
CardDescription,
CardFooter,
CardHeader,
CardTitle,
} from "./card";
export {
ResourceStatsCard,
ResourceStatsCardContainer,
type ResourceStatsCardContainerProps,
ResourceStatsCardContent,
type ResourceStatsCardContentProps,
ResourceStatsCardDivider,
type ResourceStatsCardDividerProps,
ResourceStatsCardHeader,
type ResourceStatsCardHeaderProps,
type ResourceStatsCardProps,
type StatItem,
} from "./resource-stats-card";
@@ -0,0 +1,13 @@
export type { ResourceStatsCardProps } from "./resource-stats-card";
export { ResourceStatsCard } from "./resource-stats-card";
export type { ResourceStatsCardContainerProps } from "./resource-stats-card-container";
export { ResourceStatsCardContainer } from "./resource-stats-card-container";
export type {
ResourceStatsCardContentProps,
StatItem,
} from "./resource-stats-card-content";
export { ResourceStatsCardContent } from "./resource-stats-card-content";
export type { ResourceStatsCardDividerProps } from "./resource-stats-card-divider";
export { ResourceStatsCardDivider } from "./resource-stats-card-divider";
export type { ResourceStatsCardHeaderProps } from "./resource-stats-card-header";
export { ResourceStatsCardHeader } from "./resource-stats-card-header";
@@ -33,11 +33,11 @@ const badgeVariants = cva(
{
variants: {
variant: {
[CardVariant.default]: "bg-slate-100 dark:bg-[#535359]",
[CardVariant.fail]: "bg-red-100 dark:bg-[#432232]",
[CardVariant.pass]: "bg-green-100 dark:bg-[#204237]",
[CardVariant.warning]: "bg-amber-100 dark:bg-[#3d3520]",
[CardVariant.info]: "bg-blue-100 dark:bg-[#1e3a5f]",
[CardVariant.default]: "bg-[#535359]",
[CardVariant.fail]: "bg-[#432232]",
[CardVariant.pass]: "bg-[#204237]",
[CardVariant.warning]: "bg-[#3d3520]",
[CardVariant.info]: "bg-[#1e3a5f]",
},
size: {
sm: "px-1 text-xs",
@@ -66,7 +66,7 @@ const badgeIconVariants = cva("", {
});
const labelTextVariants = cva(
"leading-6 font-semibold text-slate-900 dark:text-zinc-300 whitespace-nowrap",
"leading-6 font-semibold text-zinc-300 dark:text-zinc-300",
{
variants: {
size: {
@@ -81,7 +81,7 @@ const labelTextVariants = cva(
},
);
const statIconVariants = cva("text-slate-600 dark:text-zinc-300", {
const statIconVariants = cva("text-zinc-300 dark:text-zinc-300", {
variants: {
size: {
sm: "h-2.5 w-2.5",
@@ -95,7 +95,7 @@ const statIconVariants = cva("text-slate-600 dark:text-zinc-300", {
});
const statLabelVariants = cva(
"leading-5 font-medium text-slate-700 dark:text-zinc-300",
"leading-5 font-medium text-zinc-300 dark:text-zinc-300",
{
variants: {
size: {
@@ -111,7 +111,7 @@ export const ResourceStatsCard = ({
{header && <ResourceStatsCardHeader {...header} size={resolvedSize} />}
{emptyState ? (
<div className="flex h-[51px] w-full flex-col items-center justify-center">
<p className="text-center text-sm leading-5 font-medium text-slate-600 dark:text-zinc-300">
<p className="text-center text-sm leading-5 font-medium text-zinc-300 dark:text-zinc-300">
{emptyState.message}
</p>
</div>
@@ -141,7 +141,7 @@ export const ResourceStatsCard = ({
{header && <ResourceStatsCardHeader {...header} size={resolvedSize} />}
{emptyState ? (
<div className="flex h-[51px] w-full flex-col items-center justify-center">
<p className="text-center text-sm leading-5 font-medium text-slate-600 dark:text-zinc-300">
<p className="text-center text-sm leading-5 font-medium text-zinc-300 dark:text-zinc-300">
{emptyState.message}
</p>
</div>
+1 -2
View File
@@ -70,7 +70,6 @@ const ChartContainer = React.forwardRef<
</ChartContext.Provider>
);
});
ChartContainer.displayName = "Chart";
const ChartStyle = ({ id, config }: { id: string; config: ChartConfig }) => {
@@ -185,7 +184,7 @@ const ChartTooltipContent = React.forwardRef<
<div
ref={ref}
className={cn(
"grid min-w-32 items-start gap-1.5 rounded-lg border border-slate-200/50 bg-white px-2.5 py-1.5 text-xs shadow-xl dark:border-slate-800/50 dark:bg-slate-950",
"grid min-w-32 items-start gap-1.5 rounded-lg border border-slate-200 border-slate-200/50 bg-white px-2.5 py-1.5 text-xs shadow-xl dark:border-slate-800 dark:border-slate-800/50 dark:bg-slate-950",
className,
)}
>
+1 -1
View File
@@ -86,7 +86,7 @@ export const CustomButton = React.forwardRef<
) => (
<Button
as={asLink ? Link : undefined}
{...(asLink && { href: asLink })}
href={asLink}
target={target}
type={type}
aria-label={ariaLabel}
+1 -1
View File
@@ -12,6 +12,6 @@ export * from "./feedback-banner/feedback-banner";
export * from "./headers/navigation-header";
export * from "./label/Label";
export * from "./main-layout/main-layout";
export * from "./select";
export * from "./select/Select";
export * from "./sidebar";
export * from "./toast";
-12
View File
@@ -1,12 +0,0 @@
export {
Select,
SelectContent,
SelectGroup,
SelectItem,
SelectLabel,
SelectScrollDownButton,
SelectScrollUpButton,
SelectSeparator,
SelectTrigger,
SelectValue,
} from "./Select";
+107 -147
View File
@@ -5,7 +5,7 @@
"from": "1.0.59",
"to": "1.0.59",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-10-01T11:13:12.025Z"
},
{
"section": "dependencies",
@@ -13,7 +13,7 @@
"from": "2.0.59",
"to": "2.0.59",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-10-01T11:13:12.025Z"
},
{
"section": "dependencies",
@@ -21,15 +21,15 @@
"from": "2.8.4",
"to": "2.8.4",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-29T14:26:25.838Z"
},
{
"section": "dependencies",
"name": "@hookform/resolvers",
"from": "5.2.2",
"from": "3.10.0",
"to": "5.2.2",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-10-01T15:09:44.056Z"
},
{
"section": "dependencies",
@@ -37,7 +37,7 @@
"from": "0.3.77",
"to": "0.3.77",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-23T10:22:08.630Z"
},
{
"section": "dependencies",
@@ -45,23 +45,23 @@
"from": "0.4.9",
"to": "0.4.9",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-23T10:22:08.630Z"
},
{
"section": "dependencies",
"name": "@langchain/langgraph-supervisor",
"from": "0.0.20",
"from": "0.0.12",
"to": "0.0.20",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-23T10:22:08.630Z"
},
{
"section": "dependencies",
"name": "@langchain/openai",
"from": "0.5.18",
"from": "0.6.9",
"to": "0.5.18",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-23T10:22:08.630Z"
},
{
"section": "dependencies",
@@ -69,7 +69,7 @@
"from": "15.3.5",
"to": "15.3.5",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -77,7 +77,7 @@
"from": "1.1.14",
"to": "1.1.14",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -85,7 +85,7 @@
"from": "1.1.14",
"to": "1.1.14",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -93,7 +93,7 @@
"from": "2.1.15",
"to": "2.1.15",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -101,7 +101,7 @@
"from": "1.3.2",
"to": "1.3.2",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -109,7 +109,7 @@
"from": "2.1.7",
"to": "2.1.7",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -117,7 +117,7 @@
"from": "2.2.5",
"to": "2.2.5",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -125,7 +125,7 @@
"from": "1.2.3",
"to": "1.2.3",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -133,7 +133,7 @@
"from": "1.2.14",
"to": "1.2.14",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -141,7 +141,7 @@
"from": "3.9.4",
"to": "3.9.4",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -149,7 +149,7 @@
"from": "3.8.12",
"to": "3.8.12",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -157,7 +157,7 @@
"from": "4.1.13",
"to": "4.1.13",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-24T15:04:48.761Z"
},
{
"section": "dependencies",
@@ -165,7 +165,7 @@
"from": "0.5.16",
"to": "0.5.16",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -173,7 +173,7 @@
"from": "8.21.3",
"to": "8.21.3",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -181,15 +181,15 @@
"from": "4.0.9",
"to": "4.0.9",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
"name": "ai",
"from": "5.0.59",
"from": "4.3.16",
"to": "5.0.59",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-10-01T10:03:22.788Z"
},
{
"section": "dependencies",
@@ -197,7 +197,7 @@
"from": "6.0.2",
"to": "6.0.2",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -205,7 +205,7 @@
"from": "0.7.1",
"to": "0.7.1",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -213,15 +213,7 @@
"from": "2.1.1",
"to": "2.1.1",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
},
{
"section": "dependencies",
"name": "d3",
"from": "7.9.0",
"to": "7.9.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -229,7 +221,7 @@
"from": "4.1.0",
"to": "4.1.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -237,7 +229,7 @@
"from": "11.18.2",
"to": "11.18.2",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -245,7 +237,7 @@
"from": "10.7.16",
"to": "10.7.16",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -253,7 +245,7 @@
"from": "5.10.0",
"to": "5.10.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -261,7 +253,7 @@
"from": "4.1.0",
"to": "4.1.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -269,7 +261,7 @@
"from": "4.0.0",
"to": "4.0.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -277,7 +269,7 @@
"from": "0.543.0",
"to": "0.543.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -285,15 +277,15 @@
"from": "15.0.12",
"to": "15.0.12",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
"name": "next",
"from": "15.5.3",
"from": "14.2.32",
"to": "15.5.3",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-23T10:22:08.630Z"
},
{
"section": "dependencies",
@@ -301,7 +293,7 @@
"from": "5.0.0-beta.29",
"to": "5.0.0-beta.29",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -309,7 +301,7 @@
"from": "0.2.1",
"to": "0.2.1",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -317,23 +309,23 @@
"from": "1.4.2",
"to": "1.4.2",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
"name": "react",
"from": "19.1.1",
"from": "18.3.1",
"to": "19.1.1",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-23T10:22:08.630Z"
},
{
"section": "dependencies",
"name": "react-dom",
"from": "19.1.1",
"from": "18.3.1",
"to": "19.1.1",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-23T10:22:08.630Z"
},
{
"section": "dependencies",
@@ -341,7 +333,7 @@
"from": "7.62.0",
"to": "7.62.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -349,7 +341,7 @@
"from": "10.1.0",
"to": "10.1.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -357,7 +349,7 @@
"from": "2.15.4",
"to": "2.15.4",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -365,7 +357,7 @@
"from": "3.13.0",
"to": "3.13.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -373,7 +365,15 @@
"from": "0.0.1",
"to": "0.0.1",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
"name": "shadcn",
"from": "3.2.1",
"to": "3.2.1",
"strategy": "installed",
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -381,7 +381,7 @@
"from": "0.33.5",
"to": "0.33.5",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -389,7 +389,7 @@
"from": "3.3.1",
"to": "3.3.1",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -397,15 +397,7 @@
"from": "1.0.7",
"to": "1.0.7",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
},
{
"section": "dependencies",
"name": "topojson-client",
"from": "3.1.0",
"to": "3.1.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
@@ -413,7 +405,7 @@
"from": "1.4.0",
"to": "1.4.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-10-15T07:57:13.225Z"
},
{
"section": "dependencies",
@@ -421,31 +413,23 @@
"from": "11.1.0",
"to": "11.1.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
},
{
"section": "dependencies",
"name": "world-atlas",
"from": "2.0.2",
"to": "2.0.2",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.548Z"
},
{
"section": "dependencies",
"name": "zod",
"from": "4.1.11",
"from": "3.25.73",
"to": "4.1.11",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-10-01T09:40:25.207Z"
},
{
"section": "dependencies",
"name": "zustand",
"from": "5.0.8",
"from": "4.5.7",
"to": "5.0.8",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-10-01T09:40:25.207Z"
},
{
"section": "devDependencies",
@@ -453,23 +437,15 @@
"from": "5.2.1",
"to": "5.2.1",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
"name": "@playwright/test",
"from": "1.56.1",
"to": "1.56.1",
"from": "1.53.2",
"to": "1.53.2",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
},
{
"section": "devDependencies",
"name": "@types/d3",
"from": "7.4.3",
"to": "7.4.3",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -477,31 +453,23 @@
"from": "20.5.7",
"to": "20.5.7",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
"name": "@types/react",
"from": "19.1.13",
"from": "18.3.3",
"to": "19.1.13",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-23T10:22:08.630Z"
},
{
"section": "devDependencies",
"name": "@types/react-dom",
"from": "19.1.9",
"from": "18.3.0",
"to": "19.1.9",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
},
{
"section": "devDependencies",
"name": "@types/topojson-client",
"from": "3.1.5",
"to": "3.1.5",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-23T10:22:08.630Z"
},
{
"section": "devDependencies",
@@ -509,7 +477,7 @@
"from": "10.0.0",
"to": "10.0.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -517,7 +485,7 @@
"from": "7.18.0",
"to": "7.18.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -525,7 +493,7 @@
"from": "7.18.0",
"to": "7.18.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -533,7 +501,7 @@
"from": "10.4.19",
"to": "10.4.19",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -541,7 +509,7 @@
"from": "19.1.0-rc.3",
"to": "19.1.0-rc.3",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-23T10:22:08.630Z"
},
{
"section": "devDependencies",
@@ -549,15 +517,15 @@
"from": "8.57.1",
"to": "8.57.1",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
"name": "eslint-config-next",
"from": "15.5.3",
"from": "14.2.32",
"to": "15.5.3",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-23T10:22:08.630Z"
},
{
"section": "devDependencies",
@@ -565,7 +533,7 @@
"from": "10.1.5",
"to": "10.1.5",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -573,7 +541,7 @@
"from": "2.32.0",
"to": "2.32.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -581,7 +549,7 @@
"from": "6.10.2",
"to": "6.10.2",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -589,7 +557,7 @@
"from": "11.1.0",
"to": "11.1.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -597,7 +565,7 @@
"from": "5.5.1",
"to": "5.5.1",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -605,7 +573,7 @@
"from": "7.37.5",
"to": "7.37.5",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -613,7 +581,7 @@
"from": "4.6.2",
"to": "4.6.2",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -621,7 +589,7 @@
"from": "3.0.1",
"to": "3.0.1",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -629,7 +597,7 @@
"from": "12.1.1",
"to": "12.1.1",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -637,7 +605,7 @@
"from": "3.2.0",
"to": "3.2.0",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -645,7 +613,7 @@
"from": "9.1.7",
"to": "9.1.7",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -653,7 +621,7 @@
"from": "15.5.2",
"to": "15.5.2",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -661,7 +629,7 @@
"from": "8.4.38",
"to": "8.4.38",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
@@ -669,23 +637,15 @@
"from": "3.6.2",
"to": "3.6.2",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
"name": "prettier-plugin-tailwindcss",
"from": "0.6.14",
"from": "0.6.13",
"to": "0.6.14",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
},
{
"section": "devDependencies",
"name": "shadcn",
"from": "3.4.1",
"to": "3.4.1",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-24T13:59:11.231Z"
},
{
"section": "devDependencies",
@@ -693,15 +653,15 @@
"from": "0.1.20",
"to": "0.1.20",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
},
{
"section": "devDependencies",
"name": "tailwindcss",
"from": "4.1.13",
"from": "3.4.3",
"to": "4.1.13",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-24T13:59:11.231Z"
},
{
"section": "devDependencies",
@@ -709,6 +669,6 @@
"from": "5.5.4",
"to": "5.5.4",
"strategy": "installed",
"generatedAt": "2025-10-22T12:36:37.962Z"
"generatedAt": "2025-09-10T11:50:17.554Z"
}
]

Some files were not shown because too many files have changed in this diff Show More