mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-05-14 00:02:47 +00:00
Compare commits
200 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 55327704dd | |||
| a8c244849f | |||
| df8d82345d | |||
| 3e4458c8f3 | |||
| e12e0dc1aa | |||
| beb2daa30d | |||
| 14b60b8bee | |||
| cab9b008d1 | |||
| ced0b8def4 | |||
| f31e230537 | |||
| c6cc82c527 | |||
| 5cc3cdc466 | |||
| b7f83da012 | |||
| 4169611a6a | |||
| 9ad2e1ef98 | |||
| 78ce4d8d9b | |||
| 49585ac6c7 | |||
| 0c3c6aea0e | |||
| 144d59de45 | |||
| e3027190de | |||
| 9f4b5e01cf | |||
| 8acdf8e65b | |||
| 35c727c7e4 | |||
| 18fa788268 | |||
| b6e04f507c | |||
| 85c90cac31 | |||
| 4ed27e1aaa | |||
| 53b5030f00 | |||
| 627d6da699 | |||
| 352f136a0f | |||
| ab4d7e0c19 | |||
| 47532cf498 | |||
| afb8701450 | |||
| 942177ae59 | |||
| 750182cd6d | |||
| 9bfa1e740c | |||
| e58e939f55 | |||
| d7f0b5b190 | |||
| a37aea84e7 | |||
| 8d1d041092 | |||
| 6f018183cd | |||
| 8ce56b5ed6 | |||
| ad5095595c | |||
| 3fbe157d10 | |||
| 83d04753ef | |||
| de8e2219c2 | |||
| 2850c40dd5 | |||
| e213afd4e1 | |||
| deada62d66 | |||
| b8d9860a2f | |||
| be759216c4 | |||
| ca9211b5ed | |||
| 3cf7f7845e | |||
| 81e046ecf6 | |||
| 0d363e6100 | |||
| 0719e31b58 | |||
| 19ceb7db88 | |||
| 43875b6ae7 | |||
| 641dc78c3a | |||
| 57b9a2ea10 | |||
| 19e9a9965b | |||
| 3eb2595f6d | |||
| d776356d16 | |||
| 5118d0ecb4 | |||
| df8e465366 | |||
| f4a78d64f1 | |||
| e5cd25e60c | |||
| 7d963751aa | |||
| fa4371bbf6 | |||
| ff6fbcbf48 | |||
| 9bf3702d71 | |||
| ec32be2f1d | |||
| d93c7dcc4d | |||
| 4abead2787 | |||
| d1d03ba421 | |||
| bd47fe2072 | |||
| b395f52a00 | |||
| d14bf31844 | |||
| fcea8dba12 | |||
| 83dac0c59f | |||
| 0bdd1c3f35 | |||
| c6b4b9c94f | |||
| 1c241bb53c | |||
| d15dd53708 | |||
| 15eac061fc | |||
| 597364fb09 | |||
| 13ec7c13b9 | |||
| 89b3b5a81f | |||
| c58ca136f0 | |||
| 594188f7ed | |||
| b9bfdc1a5a | |||
| c83374d4ed | |||
| c1e1fb00c6 | |||
| cbc621cb43 | |||
| 433853493b | |||
| 5aa112d438 | |||
| 1b2c73d2e3 | |||
| 90e3fabc33 | |||
| d4b90abd10 | |||
| 251fc6d4e3 | |||
| dd85da703e | |||
| b549c8dbad | |||
| 79ac7cf6d4 | |||
| d292c6e58a | |||
| 8f361e7e8d | |||
| 3eb278cb9f | |||
| 2f7eec8bca | |||
| 00063c57de | |||
| 2341b5bc7d | |||
| 4015beff20 | |||
| ab475bafc3 | |||
| b4ce01afd4 | |||
| 2b4b23c719 | |||
| 4398b00801 | |||
| e0cf8bffd4 | |||
| 6761f0ffd0 | |||
| 51bbaeb403 | |||
| 6158c16108 | |||
| 0c2c5ea265 | |||
| 3b56166c34 | |||
| b5151a8ee5 | |||
| 0495267351 | |||
| eefe045c18 | |||
| d7d1b22c45 | |||
| 439dbe679b | |||
| 0e9ba4b116 | |||
| 89295f7e7d | |||
| 7cf7758851 | |||
| 06142094cd | |||
| 93f1c02f44 | |||
| e2f30e0987 | |||
| c80710adfc | |||
| 1410fe2ff1 | |||
| 284910d402 | |||
| 04f795bd49 | |||
| 8b5e00163e | |||
| 57d7f77c81 | |||
| 16b1052ff1 | |||
| 978e2c82af | |||
| 0c3ba0b737 | |||
| 4addfcc848 | |||
| 8588cc03f4 | |||
| 7507fea24b | |||
| 18f0fc693e | |||
| 606f505ba3 | |||
| bfce602859 | |||
| ba45b86a82 | |||
| d786bb4440 | |||
| 9424289416 | |||
| 3cbb6175a5 | |||
| 438deef3f8 | |||
| 1cdf4e65b2 | |||
| dbdd02ebd1 | |||
| d264f3daff | |||
| 01fe379b55 | |||
| 50286846e0 | |||
| 20ed8b3d2d | |||
| 45cc6e8b85 | |||
| 962c64eae5 | |||
| 7b56f0640f | |||
| 49c75cc418 | |||
| 56bca7c104 | |||
| faaa172b86 | |||
| 219ce0ba89 | |||
| 2170e5fe12 | |||
| e9efb12aa8 | |||
| 74d72dd56b | |||
| 06d1d214fd | |||
| 902bc9ad57 | |||
| 3616c0a8c0 | |||
| 7288585fec | |||
| 6400dc1059 | |||
| 379c1dc7dd | |||
| eb247360c3 | |||
| 7f12832808 | |||
| 9c387d5742 | |||
| 4a5801c519 | |||
| 85cb39af28 | |||
| c7abd77a1c | |||
| a622b9d965 | |||
| 8bd95a04ce | |||
| 340454ba68 | |||
| 6dff4bfd8b | |||
| 22c88e66a1 | |||
| 3b711f6143 | |||
| dbdce98cf2 | |||
| 53404dfa62 | |||
| c8872dd6ac | |||
| 26fd7d3adc | |||
| cb84bd0f94 | |||
| cb3f3ab35d | |||
| f58c1fddfb | |||
| c1bb51cf1a | |||
| a4e12a94f9 | |||
| 7b1915e489 | |||
| 56d092c87e | |||
| 29a1034658 | |||
| f5c2146d19 | |||
| 069f0d106c | |||
| 803ada7b16 |
@@ -15,6 +15,13 @@ AUTH_SECRET="N/c6mnaS5+SWq81+819OrzQZlmx1Vxtp/orjttJSmw8="
|
||||
# Google Tag Manager ID
|
||||
NEXT_PUBLIC_GOOGLE_TAG_MANAGER_ID=""
|
||||
|
||||
#### MCP Server ####
|
||||
PROWLER_MCP_VERSION=stable
|
||||
# For UI and MCP running on docker:
|
||||
PROWLER_MCP_SERVER_URL=http://mcp-server:8000/mcp
|
||||
# For UI running on host, MCP in docker:
|
||||
# PROWLER_MCP_SERVER_URL=http://localhost:8000/mcp
|
||||
|
||||
#### Code Review Configuration ####
|
||||
# Enable Claude Code standards validation on pre-push hook
|
||||
# Set to 'true' to validate changes against AGENTS.md standards via Claude Code
|
||||
@@ -112,7 +119,7 @@ NEXT_PUBLIC_SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT}
|
||||
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.12.2
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.16.0
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
|
||||
@@ -87,7 +87,7 @@ runs:
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
if: always()
|
||||
with:
|
||||
name: trivy-scan-report-${{ inputs.image-name }}
|
||||
name: trivy-scan-report-${{ inputs.image-name }}-${{ inputs.image-tag }}
|
||||
path: trivy-report.json
|
||||
retention-days: ${{ inputs.artifact-retention-days }}
|
||||
|
||||
|
||||
@@ -46,6 +46,11 @@ provider/oci:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/oraclecloud/**"
|
||||
- any-glob-to-any-file: "tests/providers/oraclecloud/**"
|
||||
|
||||
provider/alibabacloud:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/alibabacloud/**"
|
||||
- any-glob-to-any-file: "tests/providers/alibabacloud/**"
|
||||
|
||||
github_actions:
|
||||
- changed-files:
|
||||
@@ -69,6 +74,8 @@ mutelist:
|
||||
- any-glob-to-any-file: "tests/providers/gcp/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/kubernetes/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/mongodbatlas/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/oci/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/alibabacloud/lib/mutelist/**"
|
||||
|
||||
integration/s3:
|
||||
- changed-files:
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"channel": "${{ env.SLACK_CHANNEL_ID }}",
|
||||
"ts": "${{ env.MESSAGE_TS }}",
|
||||
"attachments": [
|
||||
{
|
||||
"color": "${{ env.STATUS_COLOR }}",
|
||||
|
||||
@@ -0,0 +1,254 @@
|
||||
name: 'API: Bump Version'
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- 'published'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
jobs:
|
||||
detect-release-type:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
is_minor: ${{ steps.detect.outputs.is_minor }}
|
||||
is_patch: ${{ steps.detect.outputs.is_patch }}
|
||||
major_version: ${{ steps.detect.outputs.major_version }}
|
||||
minor_version: ${{ steps.detect.outputs.minor_version }}
|
||||
patch_version: ${{ steps.detect.outputs.patch_version }}
|
||||
current_api_version: ${{ steps.get_api_version.outputs.current_api_version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Get current API version
|
||||
id: get_api_version
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep -oP '^version = "\K[^"]+' api/pyproject.toml)
|
||||
echo "current_api_version=${CURRENT_API_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "Current API version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Detect release type and parse version
|
||||
id: detect
|
||||
run: |
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
PATCH_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
echo "major_version=${MAJOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "minor_version=${MINOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "patch_version=${PATCH_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if (( MAJOR_VERSION != 5 )); then
|
||||
echo "::error::Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if (( PATCH_VERSION == 0 )); then
|
||||
echo "is_minor=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "is_patch=false" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Minor release detected: $PROWLER_VERSION"
|
||||
else
|
||||
echo "is_minor=false" >> "${GITHUB_OUTPUT}"
|
||||
echo "is_patch=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Patch release detected: $PROWLER_VERSION"
|
||||
fi
|
||||
else
|
||||
echo "::error::Invalid version syntax: '$PROWLER_VERSION' (must be X.Y.Z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bump-minor-version:
|
||||
needs: detect-release-type
|
||||
if: needs.detect-release-type.outputs.is_minor == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Calculate next API minor version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
CURRENT_API_VERSION="${{ needs.detect-release-type.outputs.current_api_version }}"
|
||||
|
||||
# API version follows Prowler minor + 1
|
||||
# For Prowler 5.17.0 -> API 1.18.0
|
||||
# For next master (Prowler 5.18.0) -> API 1.19.0
|
||||
NEXT_API_VERSION=1.$((MINOR_VERSION + 2)).0
|
||||
|
||||
echo "CURRENT_API_VERSION=${CURRENT_API_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "NEXT_API_VERSION=${NEXT_API_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Prowler release version: ${MAJOR_VERSION}.${MINOR_VERSION}.0"
|
||||
echo "Current API version: $CURRENT_API_VERSION"
|
||||
echo "Next API minor version (for master): $NEXT_API_VERSION"
|
||||
|
||||
- name: Bump API versions in files for master
|
||||
run: |
|
||||
set -e
|
||||
|
||||
sed -i "s|version = \"${CURRENT_API_VERSION}\"|version = \"${NEXT_API_VERSION}\"|" api/pyproject.toml
|
||||
sed -i "s|spectacular_settings.VERSION = \"${CURRENT_API_VERSION}\"|spectacular_settings.VERSION = \"${NEXT_API_VERSION}\"|" api/src/backend/api/v1/views.py
|
||||
sed -i "s| version: ${CURRENT_API_VERSION}| version: ${NEXT_API_VERSION}|" api/src/backend/api/specs/v1.yaml
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for next API minor version to master
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: master
|
||||
commit-message: 'chore(api): Bump version to v${{ env.NEXT_API_VERSION }}'
|
||||
branch: api-version-bump-to-v${{ env.NEXT_API_VERSION }}
|
||||
title: 'chore(api): Bump version to v${{ env.NEXT_API_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler API version to v${{ env.NEXT_API_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
- name: Checkout version branch
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
|
||||
|
||||
- name: Calculate first API patch version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
CURRENT_API_VERSION="${{ needs.detect-release-type.outputs.current_api_version }}"
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
|
||||
# API version follows Prowler minor + 1
|
||||
# For Prowler 5.17.0 release -> version branch v5.17 should have API 1.18.1
|
||||
FIRST_API_PATCH_VERSION=1.$((MINOR_VERSION + 1)).1
|
||||
|
||||
echo "CURRENT_API_VERSION=${CURRENT_API_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "FIRST_API_PATCH_VERSION=${FIRST_API_PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Prowler release version: ${MAJOR_VERSION}.${MINOR_VERSION}.0"
|
||||
echo "First API patch version (for ${VERSION_BRANCH}): $FIRST_API_PATCH_VERSION"
|
||||
echo "Version branch: $VERSION_BRANCH"
|
||||
|
||||
- name: Bump API versions in files for version branch
|
||||
run: |
|
||||
set -e
|
||||
|
||||
sed -i "s|version = \"${CURRENT_API_VERSION}\"|version = \"${FIRST_API_PATCH_VERSION}\"|" api/pyproject.toml
|
||||
sed -i "s|spectacular_settings.VERSION = \"${CURRENT_API_VERSION}\"|spectacular_settings.VERSION = \"${FIRST_API_PATCH_VERSION}\"|" api/src/backend/api/v1/views.py
|
||||
sed -i "s| version: ${CURRENT_API_VERSION}| version: ${FIRST_API_PATCH_VERSION}|" api/src/backend/api/specs/v1.yaml
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for first API patch version to version branch
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'chore(api): Bump version to v${{ env.FIRST_API_PATCH_VERSION }}'
|
||||
branch: api-version-bump-to-v${{ env.FIRST_API_PATCH_VERSION }}
|
||||
title: 'chore(api): Bump version to v${{ env.FIRST_API_PATCH_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler API version to v${{ env.FIRST_API_PATCH_VERSION }} in version branch after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
bump-patch-version:
|
||||
needs: detect-release-type
|
||||
if: needs.detect-release-type.outputs.is_patch == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Calculate next API patch version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
PATCH_VERSION=${{ needs.detect-release-type.outputs.patch_version }}
|
||||
CURRENT_API_VERSION="${{ needs.detect-release-type.outputs.current_api_version }}"
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
|
||||
# Extract current API patch to increment it
|
||||
if [[ $CURRENT_API_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
API_PATCH=${BASH_REMATCH[3]}
|
||||
|
||||
# API version follows Prowler minor + 1
|
||||
# Keep same API minor (based on Prowler minor), increment patch
|
||||
NEXT_API_PATCH_VERSION=1.$((MINOR_VERSION + 1)).$((API_PATCH + 1))
|
||||
|
||||
echo "CURRENT_API_VERSION=${CURRENT_API_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "NEXT_API_PATCH_VERSION=${NEXT_API_PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Prowler release version: ${MAJOR_VERSION}.${MINOR_VERSION}.${PATCH_VERSION}"
|
||||
echo "Current API version: $CURRENT_API_VERSION"
|
||||
echo "Next API patch version: $NEXT_API_PATCH_VERSION"
|
||||
echo "Target branch: $VERSION_BRANCH"
|
||||
else
|
||||
echo "::error::Invalid API version format: $CURRENT_API_VERSION"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Bump API versions in files for version branch
|
||||
run: |
|
||||
set -e
|
||||
|
||||
sed -i "s|version = \"${CURRENT_API_VERSION}\"|version = \"${NEXT_API_PATCH_VERSION}\"|" api/pyproject.toml
|
||||
sed -i "s|spectacular_settings.VERSION = \"${CURRENT_API_VERSION}\"|spectacular_settings.VERSION = \"${NEXT_API_PATCH_VERSION}\"|" api/src/backend/api/v1/views.py
|
||||
sed -i "s| version: ${CURRENT_API_VERSION}| version: ${NEXT_API_PATCH_VERSION}|" api/src/backend/api/specs/v1.yaml
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for next API patch version to version branch
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'chore(api): Bump version to v${{ env.NEXT_API_PATCH_VERSION }}'
|
||||
branch: api-version-bump-to-v${{ env.NEXT_API_PATCH_VERSION }}
|
||||
title: 'chore(api): Bump version to v${{ env.NEXT_API_PATCH_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler API version to v${{ env.NEXT_API_PATCH_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check for API changes
|
||||
id: check-changes
|
||||
|
||||
@@ -42,15 +42,15 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
category: '/language:${{ matrix.language }}'
|
||||
|
||||
@@ -7,7 +7,7 @@ on:
|
||||
paths:
|
||||
- 'api/**'
|
||||
- 'prowler/**'
|
||||
- '.github/workflows/api-build-lint-push-containers.yml'
|
||||
- '.github/workflows/api-container-build-push.yml'
|
||||
release:
|
||||
types:
|
||||
- 'published'
|
||||
@@ -48,8 +48,34 @@ jobs:
|
||||
id: set-short-sha
|
||||
run: echo "short-sha=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
|
||||
|
||||
container-build-push:
|
||||
notify-release-started:
|
||||
if: github.repository == 'prowler-cloud/prowler' && (github.event_name == 'release' || github.event_name == 'workflow_dispatch')
|
||||
needs: setup
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
outputs:
|
||||
message-ts: ${{ steps.slack-notification.outputs.ts }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Notify container push started
|
||||
id: slack-notification
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
COMPONENT: API
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-started.json"
|
||||
|
||||
container-build-push:
|
||||
needs: [setup, notify-release-started]
|
||||
if: always() && needs.setup.result == 'success' && (needs.notify-release-started.result == 'success' || needs.notify-release-started.result == 'skipped')
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -67,7 +93,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
@@ -78,20 +104,6 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Notify container push started
|
||||
if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
COMPONENT: API
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-started.json"
|
||||
|
||||
- name: Build and push API container for ${{ matrix.arch }}
|
||||
id: container-push
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
@@ -105,25 +117,10 @@ jobs:
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Notify container push completed
|
||||
if: (github.event_name == 'release' || github.event_name == 'workflow_dispatch') && always()
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
COMPONENT: API
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-completed.json"
|
||||
step-outcome: ${{ steps.container-push.outcome }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
needs: [setup, container-build-push]
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
if: always() && needs.setup.result == 'success' && needs.container-build-push.result == 'success'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -166,9 +163,43 @@ jobs:
|
||||
regctl tag delete "${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-arm64" || true
|
||||
echo "Cleanup completed"
|
||||
|
||||
notify-release-completed:
|
||||
if: always() && needs.notify-release-started.result == 'success' && (github.event_name == 'release' || github.event_name == 'workflow_dispatch')
|
||||
needs: [setup, notify-release-started, container-build-push, create-manifest]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Determine overall outcome
|
||||
id: outcome
|
||||
run: |
|
||||
if [[ "${{ needs.container-build-push.result }}" == "success" && "${{ needs.create-manifest.result }}" == "success" ]]; then
|
||||
echo "outcome=success" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "outcome=failure" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Notify container push completed
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
MESSAGE_TS: ${{ needs.notify-release-started.outputs.message-ts }}
|
||||
COMPONENT: API
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-completed.json"
|
||||
step-outcome: ${{ steps.outcome.outputs.outcome }}
|
||||
update-ts: ${{ needs.notify-release-started.outputs.message-ts }}
|
||||
|
||||
trigger-deployment:
|
||||
if: github.event_name == 'push'
|
||||
needs: [setup, container-build-push]
|
||||
if: always() && github.event_name == 'push' && needs.setup.result == 'success' && needs.container-build-push.result == 'success'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
@@ -176,7 +207,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Trigger API deployment
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
@@ -20,6 +20,7 @@ env:
|
||||
|
||||
jobs:
|
||||
api-dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
@@ -27,7 +28,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check if Dockerfile changed
|
||||
id: dockerfile-changed
|
||||
@@ -43,7 +44,17 @@ jobs:
|
||||
ignore: DL3013
|
||||
|
||||
api-container-build-and-scan:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -52,7 +63,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check for API changes
|
||||
id: check-changes
|
||||
@@ -68,22 +79,23 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build container
|
||||
- name: Build container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Scan container with Trivy
|
||||
if: github.repository == 'prowler-cloud/prowler' && steps.check-changes.outputs.any_changed == 'true'
|
||||
- name: Scan container with Trivy for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check for API changes
|
||||
id: check-changes
|
||||
|
||||
@@ -73,7 +73,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check for API changes
|
||||
id: check-changes
|
||||
|
||||
@@ -0,0 +1,247 @@
|
||||
name: 'Docs: Bump Version'
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- 'published'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
jobs:
|
||||
detect-release-type:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
is_minor: ${{ steps.detect.outputs.is_minor }}
|
||||
is_patch: ${{ steps.detect.outputs.is_patch }}
|
||||
major_version: ${{ steps.detect.outputs.major_version }}
|
||||
minor_version: ${{ steps.detect.outputs.minor_version }}
|
||||
patch_version: ${{ steps.detect.outputs.patch_version }}
|
||||
current_docs_version: ${{ steps.get_docs_version.outputs.current_docs_version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Get current documentation version
|
||||
id: get_docs_version
|
||||
run: |
|
||||
CURRENT_DOCS_VERSION=$(grep -oP 'PROWLER_UI_VERSION="\K[^"]+' docs/getting-started/installation/prowler-app.mdx)
|
||||
echo "current_docs_version=${CURRENT_DOCS_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "Current documentation version: $CURRENT_DOCS_VERSION"
|
||||
|
||||
- name: Detect release type and parse version
|
||||
id: detect
|
||||
run: |
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
PATCH_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
echo "major_version=${MAJOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "minor_version=${MINOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "patch_version=${PATCH_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if (( MAJOR_VERSION != 5 )); then
|
||||
echo "::error::Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if (( PATCH_VERSION == 0 )); then
|
||||
echo "is_minor=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "is_patch=false" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Minor release detected: $PROWLER_VERSION"
|
||||
else
|
||||
echo "is_minor=false" >> "${GITHUB_OUTPUT}"
|
||||
echo "is_patch=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Patch release detected: $PROWLER_VERSION"
|
||||
fi
|
||||
else
|
||||
echo "::error::Invalid version syntax: '$PROWLER_VERSION' (must be X.Y.Z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bump-minor-version:
|
||||
needs: detect-release-type
|
||||
if: needs.detect-release-type.outputs.is_minor == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Calculate next minor version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
CURRENT_DOCS_VERSION="${{ needs.detect-release-type.outputs.current_docs_version }}"
|
||||
|
||||
NEXT_MINOR_VERSION=${MAJOR_VERSION}.$((MINOR_VERSION + 1)).0
|
||||
echo "CURRENT_DOCS_VERSION=${CURRENT_DOCS_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "NEXT_MINOR_VERSION=${NEXT_MINOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Current documentation version: $CURRENT_DOCS_VERSION"
|
||||
echo "Current release version: $PROWLER_VERSION"
|
||||
echo "Next minor version: $NEXT_MINOR_VERSION"
|
||||
|
||||
- name: Bump versions in documentation for master
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Update prowler-app.mdx with current release version
|
||||
sed -i "s|PROWLER_UI_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_UI_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
sed -i "s|PROWLER_API_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_API_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for documentation update to master
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: master
|
||||
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}
|
||||
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Update Prowler documentation version references to v${{ env.PROWLER_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### Files Updated
|
||||
- `docs/getting-started/installation/prowler-app.mdx`: `PROWLER_UI_VERSION` and `PROWLER_API_VERSION`
|
||||
- All `*.mdx` files with `<VersionBadge>` components
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
- name: Checkout version branch
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
|
||||
|
||||
- name: Calculate first patch version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
CURRENT_DOCS_VERSION="${{ needs.detect-release-type.outputs.current_docs_version }}"
|
||||
|
||||
FIRST_PATCH_VERSION=${MAJOR_VERSION}.${MINOR_VERSION}.1
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
|
||||
echo "CURRENT_DOCS_VERSION=${CURRENT_DOCS_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "FIRST_PATCH_VERSION=${FIRST_PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "First patch version: $FIRST_PATCH_VERSION"
|
||||
echo "Version branch: $VERSION_BRANCH"
|
||||
|
||||
- name: Bump versions in documentation for version branch
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Update prowler-app.mdx with current release version
|
||||
sed -i "s|PROWLER_UI_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_UI_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
sed -i "s|PROWLER_API_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_API_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for documentation update to version branch
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}-branch
|
||||
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Update Prowler documentation version references to v${{ env.PROWLER_VERSION }} in version branch after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### Files Updated
|
||||
- `docs/getting-started/installation/prowler-app.mdx`: `PROWLER_UI_VERSION` and `PROWLER_API_VERSION`
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
bump-patch-version:
|
||||
needs: detect-release-type
|
||||
if: needs.detect-release-type.outputs.is_patch == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Calculate next patch version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
PATCH_VERSION=${{ needs.detect-release-type.outputs.patch_version }}
|
||||
CURRENT_DOCS_VERSION="${{ needs.detect-release-type.outputs.current_docs_version }}"
|
||||
|
||||
NEXT_PATCH_VERSION=${MAJOR_VERSION}.${MINOR_VERSION}.$((PATCH_VERSION + 1))
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
|
||||
echo "CURRENT_DOCS_VERSION=${CURRENT_DOCS_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "NEXT_PATCH_VERSION=${NEXT_PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Current documentation version: $CURRENT_DOCS_VERSION"
|
||||
echo "Current release version: $PROWLER_VERSION"
|
||||
echo "Next patch version: $NEXT_PATCH_VERSION"
|
||||
echo "Target branch: $VERSION_BRANCH"
|
||||
|
||||
- name: Bump versions in documentation for patch version
|
||||
run: |
|
||||
set -e
|
||||
|
||||
# Update prowler-app.mdx with current release version
|
||||
sed -i "s|PROWLER_UI_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_UI_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
sed -i "s|PROWLER_API_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_API_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for documentation update to version branch
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}
|
||||
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Update Prowler documentation version references to v${{ env.PROWLER_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### Files Updated
|
||||
- `docs/getting-started/installation/prowler-app.mdx`: `PROWLER_UI_VERSION` and `PROWLER_API_VERSION`
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
@@ -23,11 +23,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Scan for secrets with TruffleHog
|
||||
uses: trufflesecurity/trufflehog@b84c3d14d189e16da175e2c27fa8136603783ffc # v3.90.12
|
||||
uses: trufflesecurity/trufflehog@aade3bff5594fe8808578dd4db3dfeae9bf2abdc # v3.91.1
|
||||
with:
|
||||
extra_args: '--results=verified,unknown'
|
||||
|
||||
@@ -47,8 +47,34 @@ jobs:
|
||||
id: set-short-sha
|
||||
run: echo "short-sha=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
|
||||
|
||||
container-build-push:
|
||||
notify-release-started:
|
||||
if: github.repository == 'prowler-cloud/prowler' && (github.event_name == 'release' || github.event_name == 'workflow_dispatch')
|
||||
needs: setup
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
outputs:
|
||||
message-ts: ${{ steps.slack-notification.outputs.ts }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Notify container push started
|
||||
id: slack-notification
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
COMPONENT: MCP
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-started.json"
|
||||
|
||||
container-build-push:
|
||||
needs: [setup, notify-release-started]
|
||||
if: always() && needs.setup.result == 'success' && (needs.notify-release-started.result == 'success' || needs.notify-release-started.result == 'skipped')
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -65,7 +91,7 @@ jobs:
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
@@ -76,20 +102,6 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Notify container push started
|
||||
if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
COMPONENT: MCP
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-started.json"
|
||||
|
||||
- name: Build and push MCP container for ${{ matrix.arch }}
|
||||
id: container-push
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
@@ -111,25 +123,10 @@ jobs:
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Notify container push completed
|
||||
if: (github.event_name == 'release' || github.event_name == 'workflow_dispatch') && always()
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
COMPONENT: MCP
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-completed.json"
|
||||
step-outcome: ${{ steps.container-push.outcome }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
needs: [setup, container-build-push]
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
if: always() && needs.setup.result == 'success' && needs.container-build-push.result == 'success'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -172,9 +169,43 @@ jobs:
|
||||
regctl tag delete "${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-arm64" || true
|
||||
echo "Cleanup completed"
|
||||
|
||||
notify-release-completed:
|
||||
if: always() && needs.notify-release-started.result == 'success' && (github.event_name == 'release' || github.event_name == 'workflow_dispatch')
|
||||
needs: [setup, notify-release-started, container-build-push, create-manifest]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Determine overall outcome
|
||||
id: outcome
|
||||
run: |
|
||||
if [[ "${{ needs.container-build-push.result }}" == "success" && "${{ needs.create-manifest.result }}" == "success" ]]; then
|
||||
echo "outcome=success" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "outcome=failure" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Notify container push completed
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
MESSAGE_TS: ${{ needs.notify-release-started.outputs.message-ts }}
|
||||
COMPONENT: MCP
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-completed.json"
|
||||
step-outcome: ${{ steps.outcome.outputs.outcome }}
|
||||
update-ts: ${{ needs.notify-release-started.outputs.message-ts }}
|
||||
|
||||
trigger-deployment:
|
||||
if: github.event_name == 'push'
|
||||
needs: [setup, container-build-push]
|
||||
if: always() && github.event_name == 'push' && needs.setup.result == 'success' && needs.container-build-push.result == 'success'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
@@ -182,7 +213,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Trigger MCP deployment
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
@@ -20,6 +20,7 @@ env:
|
||||
|
||||
jobs:
|
||||
mcp-dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
@@ -27,7 +28,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check if Dockerfile changed
|
||||
id: dockerfile-changed
|
||||
@@ -42,7 +43,17 @@ jobs:
|
||||
dockerfile: mcp_server/Dockerfile
|
||||
|
||||
mcp-container-build-and-scan:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -51,7 +62,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check for MCP changes
|
||||
id: check-changes
|
||||
@@ -66,22 +77,23 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build MCP container
|
||||
- name: Build MCP container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.MCP_WORKING_DIR }}
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Scan MCP container with Trivy
|
||||
if: github.repository == 'prowler-cloud/prowler' && steps.check-changes.outputs.any_changed == 'true'
|
||||
- name: Scan MCP container with Trivy for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
name: "MCP: PyPI Release"
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- "published"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
PYTHON_VERSION: "3.12"
|
||||
WORKING_DIRECTORY: ./mcp_server
|
||||
|
||||
jobs:
|
||||
validate-release:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
prowler_version: ${{ steps.parse-version.outputs.version }}
|
||||
major_version: ${{ steps.parse-version.outputs.major }}
|
||||
|
||||
steps:
|
||||
- name: Parse and validate version
|
||||
id: parse-version
|
||||
run: |
|
||||
PROWLER_VERSION="${{ env.RELEASE_TAG }}"
|
||||
echo "version=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Extract major version
|
||||
MAJOR_VERSION="${PROWLER_VERSION%%.*}"
|
||||
echo "major=${MAJOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Validate major version (only Prowler 3, 4, 5 supported)
|
||||
case ${MAJOR_VERSION} in
|
||||
3|4|5)
|
||||
echo "✓ Releasing Prowler MCP for tag ${PROWLER_VERSION}"
|
||||
;;
|
||||
*)
|
||||
echo "::error::Unsupported Prowler major version: ${MAJOR_VERSION}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
publish-prowler-mcp:
|
||||
needs: validate-release
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
environment:
|
||||
name: pypi-prowler-mcp
|
||||
url: https://pypi.org/project/prowler-mcp/
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
|
||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: Build prowler-mcp package
|
||||
working-directory: ${{ env.WORKING_DIRECTORY }}
|
||||
run: uv build
|
||||
|
||||
- name: Publish prowler-mcp package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
with:
|
||||
packages-dir: ${{ env.WORKING_DIRECTORY }}/dist/
|
||||
print-hash: true
|
||||
@@ -29,7 +29,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout PR head
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -13,7 +13,10 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
trigger-cloud-pull-request:
|
||||
if: github.event.pull_request.merged == true && github.repository == 'prowler-cloud/prowler'
|
||||
if: |
|
||||
github.event.pull_request.merged == true &&
|
||||
github.repository == 'prowler-cloud/prowler' &&
|
||||
!contains(github.event.pull_request.labels.*.name, 'skip-sync')
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
@@ -26,7 +29,7 @@ jobs:
|
||||
echo "SHORT_SHA=${SHORT_SHA::7}" >> $GITHUB_ENV
|
||||
|
||||
- name: Trigger Cloud repository pull request
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
@@ -27,13 +27,13 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
|
||||
@@ -88,59 +88,56 @@ jobs:
|
||||
|
||||
- name: Read changelog versions from release branch
|
||||
run: |
|
||||
# Function to extract the latest version from changelog
|
||||
extract_latest_version() {
|
||||
# Function to extract the version for a specific Prowler release from changelog
|
||||
# This looks for entries with "(Prowler X.Y.Z)" to find the released version
|
||||
extract_version_for_release() {
|
||||
local changelog_file="$1"
|
||||
local prowler_version="$2"
|
||||
if [ -f "$changelog_file" ]; then
|
||||
# Extract the first version entry (most recent) from changelog
|
||||
# Format: ## [version] (1.2.3) or ## [vversion] (v1.2.3)
|
||||
local version=$(grep -m 1 '^## \[' "$changelog_file" | sed 's/^## \[\(.*\)\].*/\1/' | sed 's/^v//' | tr -d '[:space:]')
|
||||
# Extract version that matches this Prowler release
|
||||
# Format: ## [version] (Prowler X.Y.Z) or ## [vversion] (Prowler vX.Y.Z)
|
||||
local version=$(grep '^## \[' "$changelog_file" | grep "(Prowler v\?${prowler_version})" | head -1 | sed 's/^## \[\(.*\)\].*/\1/' | sed 's/^v//' | tr -d '[:space:]')
|
||||
echo "$version"
|
||||
else
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
# Read actual versions from changelogs (source of truth)
|
||||
UI_VERSION=$(extract_latest_version "ui/CHANGELOG.md")
|
||||
API_VERSION=$(extract_latest_version "api/CHANGELOG.md")
|
||||
SDK_VERSION=$(extract_latest_version "prowler/CHANGELOG.md")
|
||||
MCP_VERSION=$(extract_latest_version "mcp_server/CHANGELOG.md")
|
||||
# Read versions from changelogs for this specific Prowler release
|
||||
SDK_VERSION=$(extract_version_for_release "prowler/CHANGELOG.md" "$PROWLER_VERSION")
|
||||
API_VERSION=$(extract_version_for_release "api/CHANGELOG.md" "$PROWLER_VERSION")
|
||||
UI_VERSION=$(extract_version_for_release "ui/CHANGELOG.md" "$PROWLER_VERSION")
|
||||
MCP_VERSION=$(extract_version_for_release "mcp_server/CHANGELOG.md" "$PROWLER_VERSION")
|
||||
|
||||
echo "UI_VERSION=${UI_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "API_VERSION=${API_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "SDK_VERSION=${SDK_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "API_VERSION=${API_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "UI_VERSION=${UI_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "MCP_VERSION=${MCP_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
if [ -n "$UI_VERSION" ]; then
|
||||
echo "Read UI version from changelog: $UI_VERSION"
|
||||
if [ -n "$SDK_VERSION" ]; then
|
||||
echo "✓ SDK version for Prowler $PROWLER_VERSION: $SDK_VERSION"
|
||||
else
|
||||
echo "Warning: No UI version found in ui/CHANGELOG.md"
|
||||
echo "ℹ No SDK version found for Prowler $PROWLER_VERSION in prowler/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
if [ -n "$API_VERSION" ]; then
|
||||
echo "Read API version from changelog: $API_VERSION"
|
||||
echo "✓ API version for Prowler $PROWLER_VERSION: $API_VERSION"
|
||||
else
|
||||
echo "Warning: No API version found in api/CHANGELOG.md"
|
||||
echo "ℹ No API version found for Prowler $PROWLER_VERSION in api/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
if [ -n "$SDK_VERSION" ]; then
|
||||
echo "Read SDK version from changelog: $SDK_VERSION"
|
||||
if [ -n "$UI_VERSION" ]; then
|
||||
echo "✓ UI version for Prowler $PROWLER_VERSION: $UI_VERSION"
|
||||
else
|
||||
echo "Warning: No SDK version found in prowler/CHANGELOG.md"
|
||||
echo "ℹ No UI version found for Prowler $PROWLER_VERSION in ui/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
if [ -n "$MCP_VERSION" ]; then
|
||||
echo "Read MCP version from changelog: $MCP_VERSION"
|
||||
echo "✓ MCP version for Prowler $PROWLER_VERSION: $MCP_VERSION"
|
||||
else
|
||||
echo "Warning: No MCP version found in mcp_server/CHANGELOG.md"
|
||||
echo "ℹ No MCP version found for Prowler $PROWLER_VERSION in mcp_server/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
echo "UI version: $UI_VERSION"
|
||||
echo "API version: $API_VERSION"
|
||||
echo "SDK version: $SDK_VERSION"
|
||||
echo "MCP version: $MCP_VERSION"
|
||||
|
||||
- name: Extract and combine changelog entries
|
||||
run: |
|
||||
set -e
|
||||
@@ -166,70 +163,54 @@ jobs:
|
||||
|
||||
# Remove --- separators
|
||||
sed -i '/^---$/d' "$output_file"
|
||||
|
||||
# Remove only trailing empty lines (not all empty lines)
|
||||
sed -i -e :a -e '/^\s*$/d;N;ba' "$output_file"
|
||||
}
|
||||
|
||||
# Calculate expected versions for this release
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
EXPECTED_UI_VERSION="1.${BASH_REMATCH[2]}.${BASH_REMATCH[3]}"
|
||||
EXPECTED_API_VERSION="1.$((${BASH_REMATCH[2]} + 1)).${BASH_REMATCH[3]}"
|
||||
|
||||
echo "Expected UI version for this release: $EXPECTED_UI_VERSION"
|
||||
echo "Expected API version for this release: $EXPECTED_API_VERSION"
|
||||
fi
|
||||
|
||||
# Determine if components have changes for this specific release
|
||||
# UI has changes if its current version matches what we expect for this release
|
||||
if [ -n "$UI_VERSION" ] && [ "$UI_VERSION" = "$EXPECTED_UI_VERSION" ]; then
|
||||
echo "HAS_UI_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ UI changes detected - version matches expected: $UI_VERSION"
|
||||
extract_changelog "ui/CHANGELOG.md" "$UI_VERSION" "ui_changelog.md"
|
||||
else
|
||||
echo "HAS_UI_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No UI changes for this release (current: $UI_VERSION, expected: $EXPECTED_UI_VERSION)"
|
||||
touch "ui_changelog.md"
|
||||
fi
|
||||
|
||||
# API has changes if its current version matches what we expect for this release
|
||||
if [ -n "$API_VERSION" ] && [ "$API_VERSION" = "$EXPECTED_API_VERSION" ]; then
|
||||
echo "HAS_API_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ API changes detected - version matches expected: $API_VERSION"
|
||||
extract_changelog "api/CHANGELOG.md" "$API_VERSION" "api_changelog.md"
|
||||
else
|
||||
echo "HAS_API_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No API changes for this release (current: $API_VERSION, expected: $EXPECTED_API_VERSION)"
|
||||
touch "api_changelog.md"
|
||||
fi
|
||||
|
||||
# SDK has changes if its current version matches the input version
|
||||
if [ -n "$SDK_VERSION" ] && [ "$SDK_VERSION" = "$PROWLER_VERSION" ]; then
|
||||
if [ -n "$SDK_VERSION" ]; then
|
||||
echo "HAS_SDK_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ SDK changes detected - version matches input: $SDK_VERSION"
|
||||
extract_changelog "prowler/CHANGELOG.md" "$PROWLER_VERSION" "prowler_changelog.md"
|
||||
HAS_SDK_CHANGES="true"
|
||||
echo "✓ SDK changes detected - version: $SDK_VERSION"
|
||||
extract_changelog "prowler/CHANGELOG.md" "$SDK_VERSION" "prowler_changelog.md"
|
||||
else
|
||||
echo "HAS_SDK_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No SDK changes for this release (current: $SDK_VERSION, input: $PROWLER_VERSION)"
|
||||
HAS_SDK_CHANGES="false"
|
||||
echo "ℹ No SDK changes for this release"
|
||||
touch "prowler_changelog.md"
|
||||
fi
|
||||
|
||||
# MCP has changes if the changelog references this Prowler version
|
||||
# Check if the changelog contains "(Prowler X.Y.Z)" or "(Prowler UNRELEASED)"
|
||||
if [ -f "mcp_server/CHANGELOG.md" ]; then
|
||||
MCP_PROWLER_REF=$(grep -m 1 "^## \[.*\] (Prowler" mcp_server/CHANGELOG.md | sed -E 's/.*\(Prowler ([^)]+)\).*/\1/' | tr -d '[:space:]')
|
||||
if [ "$MCP_PROWLER_REF" = "$PROWLER_VERSION" ] || [ "$MCP_PROWLER_REF" = "UNRELEASED" ]; then
|
||||
echo "HAS_MCP_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ MCP changes detected - Prowler reference: $MCP_PROWLER_REF (version: $MCP_VERSION)"
|
||||
extract_changelog "mcp_server/CHANGELOG.md" "$MCP_VERSION" "mcp_changelog.md"
|
||||
else
|
||||
echo "HAS_MCP_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No MCP changes for this release (Prowler reference: $MCP_PROWLER_REF, input: $PROWLER_VERSION)"
|
||||
touch "mcp_changelog.md"
|
||||
fi
|
||||
if [ -n "$API_VERSION" ]; then
|
||||
echo "HAS_API_CHANGES=true" >> $GITHUB_ENV
|
||||
HAS_API_CHANGES="true"
|
||||
echo "✓ API changes detected - version: $API_VERSION"
|
||||
extract_changelog "api/CHANGELOG.md" "$API_VERSION" "api_changelog.md"
|
||||
else
|
||||
echo "HAS_API_CHANGES=false" >> $GITHUB_ENV
|
||||
HAS_API_CHANGES="false"
|
||||
echo "ℹ No API changes for this release"
|
||||
touch "api_changelog.md"
|
||||
fi
|
||||
|
||||
if [ -n "$UI_VERSION" ]; then
|
||||
echo "HAS_UI_CHANGES=true" >> $GITHUB_ENV
|
||||
HAS_UI_CHANGES="true"
|
||||
echo "✓ UI changes detected - version: $UI_VERSION"
|
||||
extract_changelog "ui/CHANGELOG.md" "$UI_VERSION" "ui_changelog.md"
|
||||
else
|
||||
echo "HAS_UI_CHANGES=false" >> $GITHUB_ENV
|
||||
HAS_UI_CHANGES="false"
|
||||
echo "ℹ No UI changes for this release"
|
||||
touch "ui_changelog.md"
|
||||
fi
|
||||
|
||||
if [ -n "$MCP_VERSION" ]; then
|
||||
echo "HAS_MCP_CHANGES=true" >> $GITHUB_ENV
|
||||
HAS_MCP_CHANGES="true"
|
||||
echo "✓ MCP changes detected - version: $MCP_VERSION"
|
||||
extract_changelog "mcp_server/CHANGELOG.md" "$MCP_VERSION" "mcp_changelog.md"
|
||||
else
|
||||
echo "HAS_MCP_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No MCP changelog found"
|
||||
HAS_MCP_CHANGES="false"
|
||||
echo "ℹ No MCP changes for this release"
|
||||
touch "mcp_changelog.md"
|
||||
fi
|
||||
|
||||
@@ -325,6 +306,17 @@ jobs:
|
||||
fi
|
||||
echo "✓ api/src/backend/api/v1/views.py version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Verify API version in api/src/backend/api/specs/v1.yaml
|
||||
if: ${{ env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep '^ version: ' api/src/backend/api/specs/v1.yaml | sed -E 's/ version: ([0-9]+\.[0-9]+\.[0-9]+)/\1/' | tr -d '[:space:]')
|
||||
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_API_VERSION" != "$API_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: API version mismatch in api/src/backend/api/specs/v1.yaml (expected: '$API_VERSION_TRIMMED', found: '$CURRENT_API_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ api/src/backend/api/specs/v1.yaml version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Update API prowler dependency for minor release
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
run: |
|
||||
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Calculate next minor version
|
||||
run: |
|
||||
@@ -86,7 +86,6 @@ jobs:
|
||||
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${NEXT_MINOR_VERSION}\"|" pyproject.toml
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${NEXT_MINOR_VERSION}\"|" prowler/config/config.py
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_MINOR_VERSION}|" .env
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
@@ -100,7 +99,7 @@ jobs:
|
||||
commit-message: 'chore(release): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
|
||||
branch: version-bump-to-v${{ env.NEXT_MINOR_VERSION }}
|
||||
title: 'chore(release): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
|
||||
labels: no-changelog
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -111,7 +110,7 @@ jobs:
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
- name: Checkout version branch
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
|
||||
|
||||
@@ -135,7 +134,6 @@ jobs:
|
||||
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${FIRST_PATCH_VERSION}\"|" pyproject.toml
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${FIRST_PATCH_VERSION}\"|" prowler/config/config.py
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${FIRST_PATCH_VERSION}|" .env
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
@@ -149,7 +147,7 @@ jobs:
|
||||
commit-message: 'chore(release): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
|
||||
branch: version-bump-to-v${{ env.FIRST_PATCH_VERSION }}
|
||||
title: 'chore(release): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
|
||||
labels: no-changelog
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -169,7 +167,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Calculate next patch version
|
||||
run: |
|
||||
@@ -193,7 +191,6 @@ jobs:
|
||||
|
||||
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${NEXT_PATCH_VERSION}\"|" pyproject.toml
|
||||
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${NEXT_PATCH_VERSION}\"|" prowler/config/config.py
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_PATCH_VERSION}|" .env
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
@@ -207,7 +204,7 @@ jobs:
|
||||
commit-message: 'chore(release): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
|
||||
branch: version-bump-to-v${{ env.NEXT_PATCH_VERSION }}
|
||||
title: 'chore(release): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
|
||||
labels: no-changelog
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check for SDK changes
|
||||
id: check-changes
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: 'poetry'
|
||||
|
||||
@@ -49,15 +49,15 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
category: '/language:${{ matrix.language }}'
|
||||
|
||||
@@ -50,8 +50,90 @@ env:
|
||||
AWS_REGION: us-east-1
|
||||
|
||||
jobs:
|
||||
container-build-push:
|
||||
setup:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
outputs:
|
||||
prowler_version: ${{ steps.get-prowler-version.outputs.prowler_version }}
|
||||
prowler_version_major: ${{ steps.get-prowler-version.outputs.prowler_version_major }}
|
||||
latest_tag: ${{ steps.get-prowler-version.outputs.latest_tag }}
|
||||
stable_tag: ${{ steps.get-prowler-version.outputs.stable_tag }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install poetry==2.1.1
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Get Prowler version and set tags
|
||||
id: get-prowler-version
|
||||
run: |
|
||||
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
|
||||
echo "prowler_version=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Extract major version
|
||||
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
|
||||
echo "prowler_version_major=${PROWLER_VERSION_MAJOR}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Set version-specific tags
|
||||
case ${PROWLER_VERSION_MAJOR} in
|
||||
3)
|
||||
echo "latest_tag=v3-latest" >> "${GITHUB_OUTPUT}"
|
||||
echo "stable_tag=v3-stable" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Prowler v3 detected - tags: v3-latest, v3-stable"
|
||||
;;
|
||||
4)
|
||||
echo "latest_tag=v4-latest" >> "${GITHUB_OUTPUT}"
|
||||
echo "stable_tag=v4-stable" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Prowler v4 detected - tags: v4-latest, v4-stable"
|
||||
;;
|
||||
5)
|
||||
echo "latest_tag=latest" >> "${GITHUB_OUTPUT}"
|
||||
echo "stable_tag=stable" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Prowler v5 detected - tags: latest, stable"
|
||||
;;
|
||||
*)
|
||||
echo "::error::Unsupported Prowler major version: ${PROWLER_VERSION_MAJOR}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
notify-release-started:
|
||||
if: github.repository == 'prowler-cloud/prowler' && (github.event_name == 'release' || github.event_name == 'workflow_dispatch')
|
||||
needs: setup
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
outputs:
|
||||
message-ts: ${{ steps.slack-notification.outputs.ts }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Notify container push started
|
||||
id: slack-notification
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
COMPONENT: SDK
|
||||
RELEASE_TAG: ${{ needs.setup.outputs.prowler_version }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-started.json"
|
||||
|
||||
container-build-push:
|
||||
needs: [setup, notify-release-started]
|
||||
if: always() && needs.setup.result == 'success' && (needs.notify-release-started.result == 'success' || needs.notify-release-started.result == 'skipped')
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -66,68 +148,10 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
outputs:
|
||||
prowler_version: ${{ steps.get-prowler-version.outputs.prowler_version }}
|
||||
prowler_version_major: ${{ steps.get-prowler-version.outputs.prowler_version_major }}
|
||||
latest_tag: ${{ steps.get-prowler-version.outputs.latest_tag }}
|
||||
stable_tag: ${{ steps.get-prowler-version.outputs.stable_tag }}
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: 'false'
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install poetry==2.1.1
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Get Prowler version and set tags
|
||||
id: get-prowler-version
|
||||
run: |
|
||||
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
|
||||
echo "prowler_version=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Extract major version
|
||||
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
|
||||
echo "prowler_version_major=${PROWLER_VERSION_MAJOR}" >> "${GITHUB_OUTPUT}"
|
||||
echo "PROWLER_VERSION_MAJOR=${PROWLER_VERSION_MAJOR}" >> "${GITHUB_ENV}"
|
||||
|
||||
# Set version-specific tags
|
||||
case ${PROWLER_VERSION_MAJOR} in
|
||||
3)
|
||||
echo "LATEST_TAG=v3-latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=v3-stable" >> "${GITHUB_ENV}"
|
||||
echo "latest_tag=v3-latest" >> "${GITHUB_OUTPUT}"
|
||||
echo "stable_tag=v3-stable" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Prowler v3 detected - tags: v3-latest, v3-stable"
|
||||
;;
|
||||
4)
|
||||
echo "LATEST_TAG=v4-latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=v4-stable" >> "${GITHUB_ENV}"
|
||||
echo "latest_tag=v4-latest" >> "${GITHUB_OUTPUT}"
|
||||
echo "stable_tag=v4-stable" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Prowler v4 detected - tags: v4-latest, v4-stable"
|
||||
;;
|
||||
5)
|
||||
echo "LATEST_TAG=latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=stable" >> "${GITHUB_ENV}"
|
||||
echo "latest_tag=latest" >> "${GITHUB_OUTPUT}"
|
||||
echo "stable_tag=stable" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Prowler v5 detected - tags: latest, stable"
|
||||
;;
|
||||
*)
|
||||
echo "::error::Unsupported Prowler major version: ${PROWLER_VERSION_MAJOR}"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
@@ -147,20 +171,6 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Notify container push started
|
||||
if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
COMPONENT: SDK
|
||||
RELEASE_TAG: ${{ env.PROWLER_VERSION }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-started.json"
|
||||
|
||||
- name: Build and push SDK container for ${{ matrix.arch }}
|
||||
id: container-push
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
@@ -171,29 +181,14 @@ jobs:
|
||||
push: true
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}-${{ matrix.arch }}
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.latest_tag }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Notify container push completed
|
||||
if: (github.event_name == 'release' || github.event_name == 'workflow_dispatch') && always()
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
COMPONENT: SDK
|
||||
RELEASE_TAG: ${{ env.PROWLER_VERSION }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-completed.json"
|
||||
step-outcome: ${{ steps.container-push.outcome }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
needs: [container-build-push]
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
needs: [setup, container-build-push]
|
||||
if: always() && needs.setup.result == 'success' && needs.container-build-push.result == 'success'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -219,24 +214,24 @@ jobs:
|
||||
if: github.event_name == 'push'
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.container-build-push.outputs.latest_tag }} \
|
||||
-t ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.container-build-push.outputs.latest_tag }} \
|
||||
-t ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.container-build-push.outputs.latest_tag }} \
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.container-build-push.outputs.latest_tag }}-amd64 \
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.container-build-push.outputs.latest_tag }}-arm64
|
||||
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.latest_tag }} \
|
||||
-t ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.latest_tag }} \
|
||||
-t ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.latest_tag }} \
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.latest_tag }}-amd64 \
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.latest_tag }}-arm64
|
||||
|
||||
- name: Create and push manifests for release event
|
||||
if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
-t ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ needs.container-build-push.outputs.prowler_version }} \
|
||||
-t ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ needs.container-build-push.outputs.stable_tag }} \
|
||||
-t ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ needs.container-build-push.outputs.prowler_version }} \
|
||||
-t ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ needs.container-build-push.outputs.stable_tag }} \
|
||||
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.container-build-push.outputs.prowler_version }} \
|
||||
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.container-build-push.outputs.stable_tag }} \
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.container-build-push.outputs.latest_tag }}-amd64 \
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.container-build-push.outputs.latest_tag }}-arm64
|
||||
-t ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ needs.setup.outputs.prowler_version }} \
|
||||
-t ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ needs.setup.outputs.stable_tag }} \
|
||||
-t ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ needs.setup.outputs.prowler_version }} \
|
||||
-t ${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ needs.setup.outputs.stable_tag }} \
|
||||
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.prowler_version }} \
|
||||
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.stable_tag }} \
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.latest_tag }}-amd64 \
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.latest_tag }}-arm64
|
||||
|
||||
- name: Install regctl
|
||||
if: always()
|
||||
@@ -246,13 +241,47 @@ jobs:
|
||||
if: always()
|
||||
run: |
|
||||
echo "Cleaning up intermediate tags..."
|
||||
regctl tag delete "${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.container-build-push.outputs.latest_tag }}-amd64" || true
|
||||
regctl tag delete "${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.container-build-push.outputs.latest_tag }}-arm64" || true
|
||||
regctl tag delete "${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.latest_tag }}-amd64" || true
|
||||
regctl tag delete "${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.latest_tag }}-arm64" || true
|
||||
echo "Cleanup completed"
|
||||
|
||||
notify-release-completed:
|
||||
if: always() && needs.notify-release-started.result == 'success' && (github.event_name == 'release' || github.event_name == 'workflow_dispatch')
|
||||
needs: [setup, notify-release-started, container-build-push, create-manifest]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Determine overall outcome
|
||||
id: outcome
|
||||
run: |
|
||||
if [[ "${{ needs.container-build-push.result }}" == "success" && "${{ needs.create-manifest.result }}" == "success" ]]; then
|
||||
echo "outcome=success" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "outcome=failure" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Notify container push completed
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
MESSAGE_TS: ${{ needs.notify-release-started.outputs.message-ts }}
|
||||
COMPONENT: SDK
|
||||
RELEASE_TAG: ${{ needs.setup.outputs.prowler_version }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-completed.json"
|
||||
step-outcome: ${{ steps.outcome.outputs.outcome }}
|
||||
update-ts: ${{ needs.notify-release-started.outputs.message-ts }}
|
||||
|
||||
dispatch-v3-deployment:
|
||||
if: needs.container-build-push.outputs.prowler_version_major == '3'
|
||||
needs: container-build-push
|
||||
needs: [setup, container-build-push]
|
||||
if: always() && needs.setup.outputs.prowler_version_major == '3' && needs.setup.result == 'success' && needs.container-build-push.result == 'success'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
@@ -265,7 +294,7 @@ jobs:
|
||||
|
||||
- name: Dispatch v3 deployment (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}
|
||||
@@ -274,9 +303,9 @@ jobs:
|
||||
|
||||
- name: Dispatch v3 deployment (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}
|
||||
event-type: dispatch
|
||||
client-payload: '{"version":"release","tag":"${{ needs.container-build-push.outputs.prowler_version }}"}'
|
||||
client-payload: '{"version":"release","tag":"${{ needs.setup.outputs.prowler_version }}"}'
|
||||
|
||||
@@ -27,7 +27,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check if Dockerfile changed
|
||||
id: dockerfile-changed
|
||||
@@ -44,7 +44,16 @@ jobs:
|
||||
|
||||
sdk-container-build-and-scan:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -53,7 +62,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check for SDK changes
|
||||
id: check-changes
|
||||
@@ -82,22 +91,23 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build SDK container
|
||||
- name: Build SDK container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Scan SDK container with Trivy
|
||||
- name: Scan SDK container with Trivy for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -59,13 +59,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Install Poetry
|
||||
run: pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: 'poetry'
|
||||
@@ -91,13 +91,13 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Install Poetry
|
||||
run: pipx install poetry==2.1.1
|
||||
|
||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: 'poetry'
|
||||
|
||||
@@ -25,12 +25,12 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
ref: 'master'
|
||||
|
||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: 'pip'
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
run: pip install boto3
|
||||
|
||||
- name: Configure AWS credentials
|
||||
uses: aws-actions/configure-aws-credentials@00943011d9042930efac3dcd3a170e4273319bc8 # v5.1.0
|
||||
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 # v5.1.1
|
||||
with:
|
||||
aws-region: ${{ env.AWS_REGION }}
|
||||
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
|
||||
|
||||
@@ -24,7 +24,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check for SDK changes
|
||||
id: check-changes
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
|
||||
- name: Set up Python 3.12
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: '3.12'
|
||||
cache: 'poetry'
|
||||
|
||||
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check for SDK changes
|
||||
id: check-changes
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
cache: 'poetry'
|
||||
@@ -82,9 +82,110 @@ jobs:
|
||||
./tests/**/aws/**
|
||||
./poetry.lock
|
||||
|
||||
- name: Resolve AWS services under test
|
||||
if: steps.changed-aws.outputs.any_changed == 'true'
|
||||
id: aws-services
|
||||
shell: bash
|
||||
run: |
|
||||
python3 <<'PY'
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
dependents = {
|
||||
"acm": ["elb"],
|
||||
"autoscaling": ["dynamodb"],
|
||||
"awslambda": ["ec2", "inspector2"],
|
||||
"backup": ["dynamodb", "ec2", "rds"],
|
||||
"cloudfront": ["shield"],
|
||||
"cloudtrail": ["awslambda", "cloudwatch"],
|
||||
"cloudwatch": ["bedrock"],
|
||||
"ec2": ["dlm", "dms", "elbv2", "emr", "inspector2", "rds", "redshift", "route53", "shield", "ssm"],
|
||||
"ecr": ["inspector2"],
|
||||
"elb": ["shield"],
|
||||
"elbv2": ["shield"],
|
||||
"globalaccelerator": ["shield"],
|
||||
"iam": ["bedrock", "cloudtrail", "cloudwatch", "codebuild"],
|
||||
"kafka": ["firehose"],
|
||||
"kinesis": ["firehose"],
|
||||
"kms": ["kafka"],
|
||||
"organizations": ["iam", "servicecatalog"],
|
||||
"route53": ["shield"],
|
||||
"s3": ["bedrock", "cloudfront", "cloudtrail", "macie"],
|
||||
"ssm": ["ec2"],
|
||||
"vpc": ["awslambda", "ec2", "efs", "elasticache", "neptune", "networkfirewall", "rds", "redshift", "workspaces"],
|
||||
"waf": ["elbv2"],
|
||||
"wafv2": ["cognito", "elbv2"],
|
||||
}
|
||||
|
||||
changed_raw = """${{ steps.changed-aws.outputs.all_changed_files }}"""
|
||||
# all_changed_files is space-separated, not newline-separated
|
||||
# Strip leading "./" if present for consistent path handling
|
||||
changed_files = [Path(f.lstrip("./")) for f in changed_raw.split() if f]
|
||||
|
||||
services = set()
|
||||
run_all = False
|
||||
|
||||
for path in changed_files:
|
||||
path_str = path.as_posix()
|
||||
parts = path.parts
|
||||
if path_str.startswith("prowler/providers/aws/services/"):
|
||||
if len(parts) > 4 and "." not in parts[4]:
|
||||
services.add(parts[4])
|
||||
else:
|
||||
run_all = True
|
||||
elif path_str.startswith("tests/providers/aws/services/"):
|
||||
if len(parts) > 4 and "." not in parts[4]:
|
||||
services.add(parts[4])
|
||||
else:
|
||||
run_all = True
|
||||
elif path_str.startswith("prowler/providers/aws/") or path_str.startswith("tests/providers/aws/"):
|
||||
run_all = True
|
||||
|
||||
# Expand with direct dependent services (one level only)
|
||||
# We only test services that directly depend on the changed services,
|
||||
# not transitive dependencies (services that depend on dependents)
|
||||
original_services = set(services)
|
||||
for svc in original_services:
|
||||
for dep in dependents.get(svc, []):
|
||||
services.add(dep)
|
||||
|
||||
if run_all or not services:
|
||||
run_all = True
|
||||
services = set()
|
||||
|
||||
service_paths = " ".join(sorted(f"tests/providers/aws/services/{svc}" for svc in services))
|
||||
|
||||
output_lines = [
|
||||
f"run_all={'true' if run_all else 'false'}",
|
||||
f"services={' '.join(sorted(services))}",
|
||||
f"service_paths={service_paths}",
|
||||
]
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a") as gh_out:
|
||||
for line in output_lines:
|
||||
gh_out.write(line + "\n")
|
||||
|
||||
print(f"AWS changed files (filtered): {changed_raw or 'none'}")
|
||||
print(f"Run all AWS tests: {run_all}")
|
||||
if services:
|
||||
print(f"AWS service test paths: {service_paths}")
|
||||
else:
|
||||
print("AWS service test paths: none detected")
|
||||
PY
|
||||
|
||||
- name: Run AWS tests
|
||||
if: steps.changed-aws.outputs.any_changed == 'true'
|
||||
run: poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
|
||||
run: |
|
||||
echo "AWS run_all=${{ steps.aws-services.outputs.run_all }}"
|
||||
echo "AWS service_paths='${{ steps.aws-services.outputs.service_paths }}'"
|
||||
|
||||
if [ "${{ steps.aws-services.outputs.run_all }}" = "true" ]; then
|
||||
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml tests/providers/aws
|
||||
elif [ -z "${{ steps.aws-services.outputs.service_paths }}" ]; then
|
||||
echo "No AWS service paths detected; skipping AWS tests."
|
||||
else
|
||||
poetry run pytest -n auto --cov=./prowler/providers/aws --cov-report=xml:aws_coverage.xml ${{ steps.aws-services.outputs.service_paths }}
|
||||
fi
|
||||
|
||||
- name: Upload AWS coverage to Codecov
|
||||
if: steps.changed-aws.outputs.any_changed == 'true'
|
||||
|
||||
@@ -0,0 +1,221 @@
|
||||
name: 'UI: Bump Version'
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- 'published'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
|
||||
cancel-in-progress: false
|
||||
|
||||
env:
|
||||
PROWLER_VERSION: ${{ github.event.release.tag_name }}
|
||||
BASE_BRANCH: master
|
||||
|
||||
jobs:
|
||||
detect-release-type:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
is_minor: ${{ steps.detect.outputs.is_minor }}
|
||||
is_patch: ${{ steps.detect.outputs.is_patch }}
|
||||
major_version: ${{ steps.detect.outputs.major_version }}
|
||||
minor_version: ${{ steps.detect.outputs.minor_version }}
|
||||
patch_version: ${{ steps.detect.outputs.patch_version }}
|
||||
steps:
|
||||
- name: Detect release type and parse version
|
||||
id: detect
|
||||
run: |
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
MAJOR_VERSION=${BASH_REMATCH[1]}
|
||||
MINOR_VERSION=${BASH_REMATCH[2]}
|
||||
PATCH_VERSION=${BASH_REMATCH[3]}
|
||||
|
||||
echo "major_version=${MAJOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "minor_version=${MINOR_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
echo "patch_version=${PATCH_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if (( MAJOR_VERSION != 5 )); then
|
||||
echo "::error::Releasing another Prowler major version, aborting..."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if (( PATCH_VERSION == 0 )); then
|
||||
echo "is_minor=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "is_patch=false" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Minor release detected: $PROWLER_VERSION"
|
||||
else
|
||||
echo "is_minor=false" >> "${GITHUB_OUTPUT}"
|
||||
echo "is_patch=true" >> "${GITHUB_OUTPUT}"
|
||||
echo "✓ Patch release detected: $PROWLER_VERSION"
|
||||
fi
|
||||
else
|
||||
echo "::error::Invalid version syntax: '$PROWLER_VERSION' (must be X.Y.Z)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bump-minor-version:
|
||||
needs: detect-release-type
|
||||
if: needs.detect-release-type.outputs.is_minor == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Calculate next minor version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
|
||||
NEXT_MINOR_VERSION=${MAJOR_VERSION}.$((MINOR_VERSION + 1)).0
|
||||
echo "NEXT_MINOR_VERSION=${NEXT_MINOR_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Current version: $PROWLER_VERSION"
|
||||
echo "Next minor version: $NEXT_MINOR_VERSION"
|
||||
|
||||
- name: Bump UI version in .env for master
|
||||
run: |
|
||||
set -e
|
||||
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_MINOR_VERSION}|" .env
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for next minor version to master
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: master
|
||||
commit-message: 'chore(ui): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
|
||||
branch: ui-version-bump-to-v${{ env.NEXT_MINOR_VERSION }}
|
||||
title: 'chore(ui): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler UI version to v${{ env.NEXT_MINOR_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### Files Updated
|
||||
- `.env`: `NEXT_PUBLIC_PROWLER_RELEASE_VERSION`
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
- name: Checkout version branch
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
with:
|
||||
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
|
||||
|
||||
- name: Calculate first patch version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
|
||||
FIRST_PATCH_VERSION=${MAJOR_VERSION}.${MINOR_VERSION}.1
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
|
||||
echo "FIRST_PATCH_VERSION=${FIRST_PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "First patch version: $FIRST_PATCH_VERSION"
|
||||
echo "Version branch: $VERSION_BRANCH"
|
||||
|
||||
- name: Bump UI version in .env for version branch
|
||||
run: |
|
||||
set -e
|
||||
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${FIRST_PATCH_VERSION}|" .env
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for first patch version to version branch
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'chore(ui): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
|
||||
branch: ui-version-bump-to-v${{ env.FIRST_PATCH_VERSION }}
|
||||
title: 'chore(ui): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler UI version to v${{ env.FIRST_PATCH_VERSION }} in version branch after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### Files Updated
|
||||
- `.env`: `NEXT_PUBLIC_PROWLER_RELEASE_VERSION`
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
|
||||
bump-patch-version:
|
||||
needs: detect-release-type
|
||||
if: needs.detect-release-type.outputs.is_patch == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Calculate next patch version
|
||||
run: |
|
||||
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
|
||||
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
|
||||
PATCH_VERSION=${{ needs.detect-release-type.outputs.patch_version }}
|
||||
|
||||
NEXT_PATCH_VERSION=${MAJOR_VERSION}.${MINOR_VERSION}.$((PATCH_VERSION + 1))
|
||||
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
|
||||
|
||||
echo "NEXT_PATCH_VERSION=${NEXT_PATCH_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "Current version: $PROWLER_VERSION"
|
||||
echo "Next patch version: $NEXT_PATCH_VERSION"
|
||||
echo "Target branch: $VERSION_BRANCH"
|
||||
|
||||
- name: Bump UI version in .env for version branch
|
||||
run: |
|
||||
set -e
|
||||
|
||||
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_PATCH_VERSION}|" .env
|
||||
|
||||
echo "Files modified:"
|
||||
git --no-pager diff
|
||||
|
||||
- name: Create PR for next patch version to version branch
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
base: ${{ env.VERSION_BRANCH }}
|
||||
commit-message: 'chore(ui): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
|
||||
branch: ui-version-bump-to-v${{ env.NEXT_PATCH_VERSION }}
|
||||
title: 'chore(ui): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
|
||||
labels: no-changelog,skip-sync
|
||||
body: |
|
||||
### Description
|
||||
|
||||
Bump Prowler UI version to v${{ env.NEXT_PATCH_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
|
||||
|
||||
### Files Updated
|
||||
- `.env`: `NEXT_PUBLIC_PROWLER_RELEASE_VERSION`
|
||||
|
||||
### License
|
||||
|
||||
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
|
||||
@@ -45,15 +45,15 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
category: '/language:${{ matrix.language }}'
|
||||
|
||||
@@ -50,8 +50,34 @@ jobs:
|
||||
id: set-short-sha
|
||||
run: echo "short-sha=${GITHUB_SHA::7}" >> $GITHUB_OUTPUT
|
||||
|
||||
container-build-push:
|
||||
notify-release-started:
|
||||
if: github.repository == 'prowler-cloud/prowler' && (github.event_name == 'release' || github.event_name == 'workflow_dispatch')
|
||||
needs: setup
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
outputs:
|
||||
message-ts: ${{ steps.slack-notification.outputs.ts }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Notify container push started
|
||||
id: slack-notification
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
COMPONENT: UI
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-started.json"
|
||||
|
||||
container-build-push:
|
||||
needs: [setup, notify-release-started]
|
||||
if: always() && needs.setup.result == 'success' && (needs.notify-release-started.result == 'success' || needs.notify-release-started.result == 'skipped')
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -69,7 +95,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
@@ -80,20 +106,6 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Notify container push started
|
||||
if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
COMPONENT: UI
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-started.json"
|
||||
|
||||
- name: Build and push UI container for ${{ matrix.arch }}
|
||||
id: container-push
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
@@ -110,25 +122,10 @@ jobs:
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Notify container push completed
|
||||
if: (github.event_name == 'release' || github.event_name == 'workflow_dispatch') && always()
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
COMPONENT: UI
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-completed.json"
|
||||
step-outcome: ${{ steps.container-push.outcome }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
needs: [setup, container-build-push]
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
if: always() && needs.setup.result == 'success' && needs.container-build-push.result == 'success'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -171,9 +168,43 @@ jobs:
|
||||
regctl tag delete "${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-arm64" || true
|
||||
echo "Cleanup completed"
|
||||
|
||||
notify-release-completed:
|
||||
if: always() && needs.notify-release-started.result == 'success' && (github.event_name == 'release' || github.event_name == 'workflow_dispatch')
|
||||
needs: [setup, notify-release-started, container-build-push, create-manifest]
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Determine overall outcome
|
||||
id: outcome
|
||||
run: |
|
||||
if [[ "${{ needs.container-build-push.result }}" == "success" && "${{ needs.create-manifest.result }}" == "success" ]]; then
|
||||
echo "outcome=success" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "outcome=failure" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Notify container push completed
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
MESSAGE_TS: ${{ needs.notify-release-started.outputs.message-ts }}
|
||||
COMPONENT: UI
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-completed.json"
|
||||
step-outcome: ${{ steps.outcome.outputs.outcome }}
|
||||
update-ts: ${{ needs.notify-release-started.outputs.message-ts }}
|
||||
|
||||
trigger-deployment:
|
||||
if: github.event_name == 'push'
|
||||
needs: [setup, container-build-push]
|
||||
if: always() && github.event_name == 'push' && needs.setup.result == 'success' && needs.container-build-push.result == 'success'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
permissions:
|
||||
@@ -181,7 +212,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Trigger UI deployment
|
||||
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
|
||||
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
repository: ${{ secrets.CLOUD_DISPATCH }}
|
||||
|
||||
@@ -20,6 +20,7 @@ env:
|
||||
|
||||
jobs:
|
||||
ui-dockerfile-lint:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
@@ -27,7 +28,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check if Dockerfile changed
|
||||
id: dockerfile-changed
|
||||
@@ -43,7 +44,17 @@ jobs:
|
||||
ignore: DL3018
|
||||
|
||||
ui-container-build-and-scan:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -52,7 +63,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check for UI changes
|
||||
id: check-changes
|
||||
@@ -67,7 +78,7 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build UI container
|
||||
- name: Build UI container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
@@ -75,17 +86,18 @@ jobs:
|
||||
target: prod
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_51LwpXXXX
|
||||
|
||||
- name: Scan UI container with Trivy
|
||||
if: github.repository == 'prowler-cloud/prowler' && steps.check-changes.outputs.any_changed == 'true'
|
||||
- name: Scan UI container with Trivy for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -54,7 +54,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: helm/kind-action@v1
|
||||
with:
|
||||
@@ -117,29 +117,42 @@ jobs:
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
run_install: false
|
||||
- name: Get pnpm store directory
|
||||
shell: bash
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
- name: Setup pnpm cache
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('ui/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
- name: Install UI dependencies
|
||||
working-directory: ./ui
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
- name: Build UI application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
run: pnpm run build
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
id: playwright-cache
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-${{ hashFiles('ui/package-lock.json') }}
|
||||
key: ${{ runner.os }}-playwright-${{ hashFiles('ui/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright-
|
||||
- name: Install Playwright browsers
|
||||
working-directory: ./ui
|
||||
if: steps.playwright-cache.outputs.cache-hit != 'true'
|
||||
run: npm run test:e2e:install
|
||||
run: pnpm run test:e2e:install
|
||||
- name: Run E2E tests
|
||||
working-directory: ./ui
|
||||
run: npm run test:e2e
|
||||
run: pnpm run test:e2e
|
||||
- name: Upload test reports
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
if: failure()
|
||||
|
||||
@@ -30,7 +30,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Check for UI changes
|
||||
id: check-changes
|
||||
@@ -48,17 +48,36 @@ jobs:
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
|
||||
- name: Setup pnpm
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
shell: bash
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup pnpm cache
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('ui/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run healthcheck
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: npm run healthcheck
|
||||
run: pnpm run healthcheck
|
||||
|
||||
- name: Build application
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: npm run build
|
||||
run: pnpm run build
|
||||
|
||||
@@ -150,9 +150,5 @@ _data/
|
||||
# Claude
|
||||
CLAUDE.md
|
||||
|
||||
# MCP Server
|
||||
mcp_server/prowler_mcp_server/prowler_app/server.py
|
||||
mcp_server/prowler_mcp_server/prowler_app/utils/schema.yaml
|
||||
|
||||
# Compliance report
|
||||
*.pdf
|
||||
|
||||
@@ -12,6 +12,7 @@ ENV TRIVY_VERSION=${TRIVY_VERSION}
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget libicu72 libunwind8 libssl3 libcurl4 ca-certificates apt-transport-https gnupg \
|
||||
build-essential pkg-config libzstd-dev zlib1g-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
|
||||
@@ -47,12 +47,12 @@ help: ## Show this help.
|
||||
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)
|
||||
|
||||
##@ Build no cache
|
||||
build-no-cache-dev:
|
||||
docker compose -f docker-compose-dev.yml build --no-cache api-dev worker-dev worker-beat
|
||||
build-no-cache-dev:
|
||||
docker compose -f docker-compose-dev.yml build --no-cache api-dev worker-dev worker-beat mcp-server
|
||||
|
||||
##@ Development Environment
|
||||
run-api-dev: ## Start development environment with API, PostgreSQL, Valkey, and workers
|
||||
docker compose -f docker-compose-dev.yml up api-dev postgres valkey worker-dev worker-beat
|
||||
run-api-dev: ## Start development environment with API, PostgreSQL, Valkey, MCP, and workers
|
||||
docker compose -f docker-compose-dev.yml up api-dev postgres valkey worker-dev worker-beat mcp-server
|
||||
|
||||
##@ Development Environment
|
||||
build-and-run-api-dev: build-no-cache-dev run-api-dev
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<b><i>Prowler</b> is the Open Cloud Security platform trusted by thousands to automate security and compliance in any cloud environment. With hundreds of ready-to-use checks and compliance frameworks, Prowler delivers real-time, customizable monitoring and seamless integrations, making cloud security simple, scalable, and cost-effective for organizations of any size.
|
||||
</p>
|
||||
<p align="center">
|
||||
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
|
||||
<b>Secure ANY cloud at AI Speed at <a href="https://prowler.com">prowler.com</i></b>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
@@ -23,6 +23,7 @@
|
||||
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/toniblyx/prowler"></a>
|
||||
<a href="https://gallery.ecr.aws/prowler-cloud/prowler"><img width="120" height=19" alt="AWS ECR Gallery" src="https://user-images.githubusercontent.com/3985464/151531396-b6535a68-c907-44eb-95a1-a09508178616.png"></a>
|
||||
<a href="https://codecov.io/gh/prowler-cloud/prowler"><img src="https://codecov.io/gh/prowler-cloud/prowler/graph/badge.svg?token=OflBGsdpDl"/></a>
|
||||
<a href="https://insights.linuxfoundation.org/project/prowler-cloud-prowler"><img src="https://insights.linuxfoundation.org/api/badge/health-score?project=prowler-cloud-prowler"/></a>
|
||||
</p>
|
||||
<p align="center">
|
||||
<a href="https://github.com/prowler-cloud/prowler/releases"><img alt="Version" src="https://img.shields.io/github/v/release/prowler-cloud/prowler"></a>
|
||||
@@ -35,28 +36,32 @@
|
||||
</p>
|
||||
<hr>
|
||||
<p align="center">
|
||||
<img align="center" src="/docs/img/prowler-cli-quick.gif" width="100%" height="100%">
|
||||
<img align="center" src="/docs/img/prowler-cloud.gif" width="100%" height="100%">
|
||||
</p>
|
||||
|
||||
# Description
|
||||
|
||||
**Prowler** is an open-source security tool designed to assess and enforce security best practices across AWS, Azure, Google Cloud, and Kubernetes. It supports tasks such as security audits, incident response, continuous monitoring, system hardening, forensic readiness, and remediation processes.
|
||||
**Prowler** is the world’s most widely used _open-source cloud security platform_ that automates security and compliance across **any cloud environment**. With hundreds of ready-to-use security checks, remediation guidance, and compliance frameworks, Prowler is built to _“Secure ANY cloud at AI Speed”_. Prowler delivers **AI-driven**, **customizable**, and **easy-to-use** assessments, dashboards, reports, and integrations, making cloud security **simple**, **scalable**, and **cost-effective** for organizations of any size.
|
||||
|
||||
Prowler includes hundreds of built-in controls to ensure compliance with standards and frameworks, including:
|
||||
|
||||
- **Industry Standards:** CIS, NIST 800, NIST CSF, and CISA
|
||||
- **Regulatory Compliance and Governance:** RBI, FedRAMP, and PCI-DSS
|
||||
- **Prowler ThreatScore:** Weighted risk prioritization scoring that helps you focus on the most critical security findings first
|
||||
- **Industry Standards:** CIS, NIST 800, NIST CSF, CISA, and MITRE ATT&CK
|
||||
- **Regulatory Compliance and Governance:** RBI, FedRAMP, PCI-DSS, and NIS2
|
||||
- **Frameworks for Sensitive Data and Privacy:** GDPR, HIPAA, and FFIEC
|
||||
- **Frameworks for Organizational Governance and Quality Control:** SOC2 and GXP
|
||||
- **AWS-Specific Frameworks:** AWS Foundational Technical Review (FTR) and AWS Well-Architected Framework (Security Pillar)
|
||||
- **National Security Standards:** ENS (Spanish National Security Scheme)
|
||||
- **Frameworks for Organizational Governance and Quality Control:** SOC2, GXP, and ISO 27001
|
||||
- **Cloud-Specific Frameworks:** AWS Foundational Technical Review (FTR), AWS Well-Architected Framework, and BSI C5
|
||||
- **National Security Standards:** ENS (Spanish National Security Scheme) and KISA ISMS-P (Korean)
|
||||
- **Custom Security Frameworks:** Tailored to your needs
|
||||
|
||||
## Prowler App
|
||||
## Prowler App / Prowler Cloud
|
||||
|
||||
Prowler App is a web-based application that simplifies running Prowler across your cloud provider accounts. It provides a user-friendly interface to visualize the results and streamline your security assessments.
|
||||
Prowler App / [Prowler Cloud](https://cloud.prowler.com/) is a web-based application that simplifies running Prowler across your cloud provider accounts. It provides a user-friendly interface to visualize the results and streamline your security assessments.
|
||||
|
||||

|
||||

|
||||

|
||||
|
||||
|
||||
>For more details, refer to the [Prowler App Documentation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-app-installation)
|
||||
|
||||
@@ -82,15 +87,16 @@ prowler dashboard
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) | Support | Interface |
|
||||
|---|---|---|---|---|---|---|
|
||||
| AWS | 576 | 82 | 39 | 10 | Official | UI, API, CLI |
|
||||
| GCP | 79 | 13 | 13 | 3 | Official | UI, API, CLI |
|
||||
| Azure | 162 | 19 | 13 | 4 | Official | UI, API, CLI |
|
||||
| Kubernetes | 83 | 7 | 5 | 7 | Official | UI, API, CLI |
|
||||
| GitHub | 17 | 2 | 1 | 0 | Official | Stable | UI, API, CLI |
|
||||
| AWS | 584 | 85 | 40 | 17 | Official | UI, API, CLI |
|
||||
| GCP | 89 | 17 | 14 | 5 | Official | UI, API, CLI |
|
||||
| Azure | 169 | 22 | 15 | 8 | Official | UI, API, CLI |
|
||||
| Kubernetes | 84 | 7 | 6 | 9 | Official | UI, API, CLI |
|
||||
| GitHub | 20 | 2 | 1 | 2 | Official | UI, API, CLI |
|
||||
| M365 | 70 | 7 | 3 | 2 | Official | UI, API, CLI |
|
||||
| OCI | 51 | 13 | 1 | 10 | Official | UI, API, CLI |
|
||||
| OCI | 52 | 15 | 1 | 12 | Official | UI, API, CLI |
|
||||
| Alibaba Cloud | 63 | 10 | 1 | 9 | Official | CLI |
|
||||
| IaC | [See `trivy` docs.](https://trivy.dev/latest/docs/coverage/iac/) | N/A | N/A | N/A | Official | UI, API, CLI |
|
||||
| MongoDB Atlas | 10 | 3 | 0 | 0 | Official | UI, API, CLI |
|
||||
| MongoDB Atlas | 10 | 4 | 0 | 3 | Official | UI, API, CLI |
|
||||
| LLM | [See `promptfoo` docs.](https://www.promptfoo.dev/docs/red-team/plugins/) | N/A | N/A | N/A | Official | CLI |
|
||||
| NHN | 6 | 2 | 1 | 0 | Unofficial | CLI |
|
||||
|
||||
@@ -142,9 +148,9 @@ If your workstation's architecture is incompatible, you can resolve this by:
|
||||
### Common Issues with Docker Pull Installation
|
||||
|
||||
> [!Note]
|
||||
If you want to use AWS role assumption (e.g., with the "Connect assuming IAM Role" option), you may need to mount your local `.aws` directory into the container as a volume (e.g., `- "${HOME}/.aws:/home/prowler/.aws:ro"`). There are several ways to configure credentials for Docker containers. See the [Troubleshooting](./docs/troubleshooting.md) section for more details and examples.
|
||||
If you want to use AWS role assumption (e.g., with the "Connect assuming IAM Role" option), you may need to mount your local `.aws` directory into the container as a volume (e.g., `- "${HOME}/.aws:/home/prowler/.aws:ro"`). There are several ways to configure credentials for Docker containers. See the [Troubleshooting](./docs/troubleshooting.mdx) section for more details and examples.
|
||||
|
||||
You can find more information in the [Troubleshooting](./docs/troubleshooting.md) section.
|
||||
You can find more information in the [Troubleshooting](./docs/troubleshooting.mdx) section.
|
||||
|
||||
|
||||
### From GitHub
|
||||
@@ -153,7 +159,7 @@ You can find more information in the [Troubleshooting](./docs/troubleshooting.md
|
||||
|
||||
* `git` installed.
|
||||
* `poetry` v2 installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
* `pnpm` installed: [pnpm installation](https://pnpm.io/installation).
|
||||
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
**Commands to run the API**
|
||||
@@ -209,9 +215,9 @@ python -m celery -A config.celery beat -l info --scheduler django_celery_beat.sc
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/ui
|
||||
npm install
|
||||
npm run build
|
||||
npm start
|
||||
pnpm install
|
||||
pnpm run build
|
||||
pnpm start
|
||||
```
|
||||
|
||||
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
|
||||
@@ -271,11 +277,12 @@ python prowler-cli.py -v
|
||||
# ✏️ High level architecture
|
||||
|
||||
## Prowler App
|
||||
**Prowler App** is composed of three key components:
|
||||
**Prowler App** is composed of four key components:
|
||||
|
||||
- **Prowler UI**: A web-based interface, built with Next.js, providing a user-friendly experience for executing Prowler scans and visualizing results.
|
||||
- **Prowler API**: A backend service, developed with Django REST Framework, responsible for running Prowler scans and storing the generated results.
|
||||
- **Prowler SDK**: A Python SDK designed to extend the functionality of the Prowler CLI for advanced capabilities.
|
||||
- **Prowler MCP Server**: A Model Context Protocol server that provides AI tools for Lighthouse, the AI-powered security assistant. This is a critical dependency for Lighthouse functionality.
|
||||
|
||||

|
||||
|
||||
|
||||
+62
-2
@@ -2,13 +2,73 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [1.16.0] (Unreleased)
|
||||
## [1.18.0] (Prowler UNRELEASED)
|
||||
|
||||
### Added
|
||||
- Support AlibabaCloud provider [(#9485)](https://github.com/prowler-cloud/prowler/pull/9485)
|
||||
|
||||
---
|
||||
|
||||
## [1.17.2] (Prowler v5.16.2)
|
||||
|
||||
### Security
|
||||
- Updated dependencies to patch security vulnerabilities: Django 5.1.15 (CVE-2025-64460, CVE-2025-13372), Werkzeug 3.1.4 (CVE-2025-66221), sqlparse 0.5.5 (PVE-2025-82038), fonttools 4.60.2 (CVE-2025-66034) [(#9730)](https://github.com/prowler-cloud/prowler/pull/9730)
|
||||
|
||||
---
|
||||
|
||||
## [1.17.1] (Prowler v5.16.1)
|
||||
|
||||
### Changed
|
||||
- Security Hub integration error when no regions [(#9635)](https://github.com/prowler-cloud/prowler/pull/9635)
|
||||
|
||||
### Fixed
|
||||
- Orphan scheduled scans caused by transaction isolation during provider creation [(#9633)](https://github.com/prowler-cloud/prowler/pull/9633)
|
||||
|
||||
---
|
||||
|
||||
## [1.17.0] (Prowler v5.16.0)
|
||||
|
||||
### Added
|
||||
- New endpoint to retrieve and overview of the categories based on finding severities [(#9529)](https://github.com/prowler-cloud/prowler/pull/9529)
|
||||
- Endpoints `GET /findings` and `GET /findings/latests` can now use the category filter [(#9529)](https://github.com/prowler-cloud/prowler/pull/9529)
|
||||
- Account id, alias and provider name to PDF reporting table [(#9574)](https://github.com/prowler-cloud/prowler/pull/9574)
|
||||
|
||||
### Changed
|
||||
- Endpoint `GET /overviews/attack-surfaces` no longer returns the related check IDs [(#9529)](https://github.com/prowler-cloud/prowler/pull/9529)
|
||||
- OpenAI provider to only load chat-compatible models with tool calling support [(#9523)](https://github.com/prowler-cloud/prowler/pull/9523)
|
||||
- Increased execution delay for the first scheduled scan tasks to 5 seconds[(#9558)](https://github.com/prowler-cloud/prowler/pull/9558)
|
||||
|
||||
### Fixed
|
||||
- Made `scan_id` a required filter in the compliance overview endpoint [(#9560)](https://github.com/prowler-cloud/prowler/pull/9560)
|
||||
- Reduced unnecessary UPDATE resources operations by only saving when tag mappings change, lowering write load during scans [(#9569)](https://github.com/prowler-cloud/prowler/pull/9569)
|
||||
|
||||
---
|
||||
|
||||
## [1.16.1] (Prowler v5.15.1)
|
||||
|
||||
### Fixed
|
||||
- Race condition in scheduled scan creation by adding countdown to task [(#9516)](https://github.com/prowler-cloud/prowler/pull/9516)
|
||||
|
||||
## [1.16.0] (Prowler v5.15.0)
|
||||
|
||||
### Added
|
||||
- New endpoint to retrieve an overview of the attack surfaces [(#9309)](https://github.com/prowler-cloud/prowler/pull/9309)
|
||||
- New endpoint `GET /api/v1/overviews/findings_severity/timeseries` to retrieve daily aggregated findings by severity level [(#9363)](https://github.com/prowler-cloud/prowler/pull/9363)
|
||||
- Lighthouse AI support for Amazon Bedrock API key [(#9343)](https://github.com/prowler-cloud/prowler/pull/9343)
|
||||
- Exception handler for provider deletions during scans [(#9414)](https://github.com/prowler-cloud/prowler/pull/9414)
|
||||
- Support to use admin credentials through the read replica database [(#9440)](https://github.com/prowler-cloud/prowler/pull/9440)
|
||||
|
||||
### Changed
|
||||
- Restore the compliance overview endpoint's mandatory filters [(#9330)](https://github.com/prowler-cloud/prowler/pull/9330)
|
||||
- Error messages from Lighthouse celery tasks [(#9165)](https://github.com/prowler-cloud/prowler/pull/9165)
|
||||
- Restore the compliance overview endpoint's mandatory filters [(#9338)](https://github.com/prowler-cloud/prowler/pull/9338)
|
||||
|
||||
---
|
||||
|
||||
## [1.15.2] (Prowler v5.14.2)
|
||||
|
||||
### Fixed
|
||||
- Unique constraint violation during compliance overviews task [(#9436)](https://github.com/prowler-cloud/prowler/pull/9436)
|
||||
- Division by zero error in ENS PDF report when all requirements are manual [(#9443)](https://github.com/prowler-cloud/prowler/pull/9443)
|
||||
|
||||
---
|
||||
|
||||
|
||||
Generated
+847
-78
File diff suppressed because it is too large
Load Diff
+6
-3
@@ -7,7 +7,7 @@ authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
dependencies = [
|
||||
"celery[pytest] (>=5.4.0,<6.0.0)",
|
||||
"dj-rest-auth[with_social,jwt] (==7.0.1)",
|
||||
"django (==5.1.14)",
|
||||
"django (==5.1.15)",
|
||||
"django-allauth[saml] (>=65.8.0,<66.0.0)",
|
||||
"django-celery-beat (>=2.7.0,<3.0.0)",
|
||||
"django-celery-results (>=2.5.1,<3.0.0)",
|
||||
@@ -36,7 +36,10 @@ dependencies = [
|
||||
"drf-simple-apikey (==2.2.1)",
|
||||
"matplotlib (>=3.10.6,<4.0.0)",
|
||||
"reportlab (>=4.4.4,<5.0.0)",
|
||||
"gevent (>=25.9.1,<26.0.0)"
|
||||
"gevent (>=25.9.1,<26.0.0)",
|
||||
"werkzeug (>=3.1.4)",
|
||||
"sqlparse (>=0.5.4)",
|
||||
"fonttools (>=4.60.2)"
|
||||
]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
@@ -44,7 +47,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.16.0"
|
||||
version = "1.18.0"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -40,7 +40,6 @@ class ApiConfig(AppConfig):
|
||||
self._ensure_crypto_keys()
|
||||
|
||||
load_prowler_compliance()
|
||||
self._initialize_attack_surface_mapping()
|
||||
|
||||
def _ensure_crypto_keys(self):
|
||||
"""
|
||||
@@ -168,13 +167,3 @@ class ApiConfig(AppConfig):
|
||||
f"Error generating JWT keys: {e}. Please set '{SIGNING_KEY_ENV}' and '{VERIFYING_KEY_ENV}' manually."
|
||||
)
|
||||
raise e
|
||||
|
||||
def _initialize_attack_surface_mapping(self):
|
||||
from tasks.jobs.scan import ( # noqa: F401
|
||||
_get_attack_surface_mapping_from_provider,
|
||||
)
|
||||
|
||||
from api.models import Provider # noqa: F401
|
||||
|
||||
for provider_type, _label in Provider.ProviderChoices.choices:
|
||||
_get_attack_surface_mapping_from_provider(provider_type)
|
||||
|
||||
@@ -26,6 +26,7 @@ class MainRouter:
|
||||
default_db = "default"
|
||||
admin_db = "admin"
|
||||
replica_db = "replica"
|
||||
admin_replica_db = "admin_replica"
|
||||
|
||||
def db_for_read(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
@@ -49,7 +50,12 @@ class MainRouter:
|
||||
|
||||
def allow_relation(self, obj1, obj2, **hints): # noqa: F841
|
||||
# Allow relations when both objects originate from allowed connectors
|
||||
allowed_dbs = {self.default_db, self.admin_db, self.replica_db}
|
||||
allowed_dbs = {
|
||||
self.default_db,
|
||||
self.admin_db,
|
||||
self.replica_db,
|
||||
self.admin_replica_db,
|
||||
}
|
||||
if {obj1._state.db, obj2._state.db} <= allowed_dbs:
|
||||
return True
|
||||
return None
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
import uuid
|
||||
from functools import wraps
|
||||
|
||||
from django.db import connection, transaction
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import IntegrityError, connection, transaction
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
|
||||
from api.db_router import READ_REPLICA_ALIAS
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY, rls_transaction
|
||||
from api.exceptions import ProviderDeletedException
|
||||
from api.models import Provider, Scan
|
||||
|
||||
|
||||
def set_tenant(func=None, *, keep_tenant=False):
|
||||
@@ -66,3 +70,49 @@ def set_tenant(func=None, *, keep_tenant=False):
|
||||
return decorator
|
||||
else:
|
||||
return decorator(func)
|
||||
|
||||
|
||||
def handle_provider_deletion(func):
|
||||
"""
|
||||
Decorator that raises ProviderDeletedException if provider was deleted during execution.
|
||||
|
||||
Catches ObjectDoesNotExist and IntegrityError, checks if provider still exists,
|
||||
and raises ProviderDeletedException if not. Otherwise, re-raises original exception.
|
||||
|
||||
Requires tenant_id and provider_id in kwargs.
|
||||
|
||||
Example:
|
||||
@shared_task
|
||||
@handle_provider_deletion
|
||||
def scan_task(scan_id, tenant_id, provider_id):
|
||||
...
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except (ObjectDoesNotExist, IntegrityError):
|
||||
tenant_id = kwargs.get("tenant_id")
|
||||
provider_id = kwargs.get("provider_id")
|
||||
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
if provider_id is None:
|
||||
scan_id = kwargs.get("scan_id")
|
||||
if scan_id is None:
|
||||
raise AssertionError(
|
||||
"This task does not have provider or scan in the kwargs"
|
||||
)
|
||||
scan = Scan.objects.filter(pk=scan_id).first()
|
||||
if scan is None:
|
||||
raise ProviderDeletedException(
|
||||
f"Provider for scan '{scan_id}' was deleted during the scan"
|
||||
) from None
|
||||
provider_id = str(scan.provider_id)
|
||||
if not Provider.objects.filter(pk=provider_id).exists():
|
||||
raise ProviderDeletedException(
|
||||
f"Provider '{provider_id}' was deleted during the scan"
|
||||
) from None
|
||||
raise
|
||||
|
||||
return wrapper
|
||||
|
||||
@@ -66,6 +66,10 @@ class ProviderConnectionError(Exception):
|
||||
"""Base exception for provider connection errors."""
|
||||
|
||||
|
||||
class ProviderDeletedException(Exception):
|
||||
"""Raised when a provider has been deleted during scan/task execution."""
|
||||
|
||||
|
||||
def custom_exception_handler(exc, context):
|
||||
if isinstance(exc, django_validation_error):
|
||||
if hasattr(exc, "error_dict"):
|
||||
|
||||
@@ -25,6 +25,7 @@ from api.db_utils import (
|
||||
from api.models import (
|
||||
AttackSurfaceOverview,
|
||||
ComplianceRequirementOverview,
|
||||
DailySeveritySummary,
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
@@ -42,6 +43,7 @@ from api.models import (
|
||||
ResourceTag,
|
||||
Role,
|
||||
Scan,
|
||||
ScanCategorySummary,
|
||||
ScanSummary,
|
||||
SeverityChoices,
|
||||
StateChoices,
|
||||
@@ -156,6 +158,9 @@ class CommonFindingFilters(FilterSet):
|
||||
field_name="resources__type", lookup_expr="icontains"
|
||||
)
|
||||
|
||||
category = CharFilter(method="filter_category")
|
||||
category__in = CharInFilter(field_name="categories", lookup_expr="overlap")
|
||||
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# resource_tag_key = CharFilter(field_name="resources__tags__key")
|
||||
# resource_tag_key__in = CharInFilter(
|
||||
@@ -187,6 +192,9 @@ class CommonFindingFilters(FilterSet):
|
||||
def filter_resource_type(self, queryset, name, value):
|
||||
return queryset.filter(resource_types__contains=[value])
|
||||
|
||||
def filter_category(self, queryset, name, value):
|
||||
return queryset.filter(categories__contains=[value])
|
||||
|
||||
def filter_resource_tag(self, queryset, name, value):
|
||||
overall_query = Q()
|
||||
for key_value_pair in value:
|
||||
@@ -761,7 +769,7 @@ class RoleFilter(FilterSet):
|
||||
|
||||
class ComplianceOverviewFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
scan_id = UUIDFilter(field_name="scan_id")
|
||||
scan_id = UUIDFilter(field_name="scan_id", required=True)
|
||||
region = CharFilter(field_name="region")
|
||||
|
||||
class Meta:
|
||||
@@ -795,6 +803,68 @@ class ScanSummaryFilter(FilterSet):
|
||||
}
|
||||
|
||||
|
||||
class DailySeveritySummaryFilter(FilterSet):
|
||||
"""Filter for findings_severity/timeseries endpoint."""
|
||||
|
||||
MAX_DATE_RANGE_DAYS = 365
|
||||
|
||||
provider_id = UUIDFilter(field_name="provider_id", lookup_expr="exact")
|
||||
provider_id__in = UUIDInFilter(field_name="provider_id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
field_name="provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
field_name="provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
date_from = DateFilter(method="filter_noop")
|
||||
date_to = DateFilter(method="filter_noop")
|
||||
|
||||
class Meta:
|
||||
model = DailySeveritySummary
|
||||
fields = ["provider_id"]
|
||||
|
||||
def filter_noop(self, queryset, name, value):
|
||||
return queryset
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not self.data.get("date_from"):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "This query parameter is required.",
|
||||
"status": "400",
|
||||
"source": {"pointer": "filter[date_from]"},
|
||||
"code": "required",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
today = date.today()
|
||||
date_from = self.form.cleaned_data.get("date_from")
|
||||
date_to = min(self.form.cleaned_data.get("date_to") or today, today)
|
||||
|
||||
if (date_to - date_from).days > self.MAX_DATE_RANGE_DAYS:
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": f"Date range cannot exceed {self.MAX_DATE_RANGE_DAYS} days.",
|
||||
"status": "400",
|
||||
"source": {"pointer": "filter[date_from]"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
# View access
|
||||
self.request._date_from = date_from
|
||||
self.request._date_to = date_to
|
||||
|
||||
# Apply date filter (only lte for fill-forward logic)
|
||||
queryset = queryset.filter(date__lte=date_to)
|
||||
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
|
||||
class ScanSummarySeverityFilter(ScanSummaryFilter):
|
||||
"""Filter for findings_severity ScanSummary endpoint - includes status filters"""
|
||||
|
||||
@@ -887,6 +957,26 @@ class ProcessorFilter(FilterSet):
|
||||
)
|
||||
|
||||
|
||||
class IntegrationGitHubFindingsFilter(FilterSet):
|
||||
# To be expanded as needed
|
||||
finding_id = UUIDFilter(field_name="id", lookup_expr="exact")
|
||||
finding_id__in = UUIDInFilter(field_name="id", lookup_expr="in")
|
||||
|
||||
class Meta:
|
||||
model = Finding
|
||||
fields = {}
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
# Validate that there is at least one filter provided
|
||||
if not self.data:
|
||||
raise ValidationError(
|
||||
{
|
||||
"findings": "No finding filters provided. At least one filter is required."
|
||||
}
|
||||
)
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
|
||||
class IntegrationJiraFindingsFilter(FilterSet):
|
||||
# To be expanded as needed
|
||||
finding_id = UUIDFilter(field_name="id", lookup_expr="exact")
|
||||
@@ -1033,3 +1123,22 @@ class AttackSurfaceOverviewFilter(FilterSet):
|
||||
class Meta:
|
||||
model = AttackSurfaceOverview
|
||||
fields = {}
|
||||
|
||||
|
||||
class CategoryOverviewFilter(FilterSet):
|
||||
provider_id = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider_id__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
field_name="scan__provider__provider",
|
||||
choices=Provider.ProviderChoices.choices,
|
||||
lookup_expr="in",
|
||||
)
|
||||
category = CharFilter(field_name="category", lookup_expr="exact")
|
||||
category__in = CharInFilter(field_name="category", lookup_expr="in")
|
||||
|
||||
class Meta:
|
||||
model = ScanCategorySummary
|
||||
fields = {}
|
||||
|
||||
@@ -0,0 +1,96 @@
|
||||
# Generated by Django 5.1.14 on 2025-12-03 13:38
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0060_attack_surface_overview"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="DailySeveritySummary",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("date", models.DateField()),
|
||||
("critical", models.IntegerField(default=0)),
|
||||
("high", models.IntegerField(default=0)),
|
||||
("medium", models.IntegerField(default=0)),
|
||||
("low", models.IntegerField(default=0)),
|
||||
("informational", models.IntegerField(default=0)),
|
||||
("muted", models.IntegerField(default=0)),
|
||||
(
|
||||
"provider",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="daily_severity_summaries",
|
||||
related_query_name="daily_severity_summary",
|
||||
to="api.provider",
|
||||
),
|
||||
),
|
||||
(
|
||||
"scan",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="daily_severity_summaries",
|
||||
related_query_name="daily_severity_summary",
|
||||
to="api.scan",
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="api.tenant",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "daily_severity_summaries",
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="dailyseveritysummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "id"],
|
||||
name="dss_tenant_id_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="dailyseveritysummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "provider_id"],
|
||||
name="dss_tenant_provider_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="dailyseveritysummary",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "provider", "date"),
|
||||
name="unique_daily_severity_summary",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="dailyseveritysummary",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_dailyseveritysummary",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,30 @@
|
||||
# Generated by Django 5.1.14 on 2025-12-10
|
||||
|
||||
from django.db import migrations
|
||||
from tasks.tasks import backfill_daily_severity_summaries_task
|
||||
|
||||
from api.db_router import MainRouter
|
||||
from api.rls import Tenant
|
||||
|
||||
|
||||
def trigger_backfill_task(apps, schema_editor):
|
||||
"""
|
||||
Trigger the backfill task for all tenants.
|
||||
|
||||
This dispatches backfill_daily_severity_summaries_task for each tenant
|
||||
in the system to populate DailySeveritySummary records from historical scans.
|
||||
"""
|
||||
tenant_ids = Tenant.objects.using(MainRouter.admin_db).values_list("id", flat=True)
|
||||
|
||||
for tenant_id in tenant_ids:
|
||||
backfill_daily_severity_summaries_task.delay(tenant_id=str(tenant_id), days=90)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0061_daily_severity_summary"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(trigger_backfill_task, migrations.RunPython.noop),
|
||||
]
|
||||
@@ -0,0 +1,111 @@
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.db_utils
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0062_backfill_daily_severity_summaries"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ScanCategorySummary",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="api.tenant",
|
||||
),
|
||||
),
|
||||
(
|
||||
"inserted_at",
|
||||
models.DateTimeField(auto_now_add=True),
|
||||
),
|
||||
(
|
||||
"scan",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="category_summaries",
|
||||
related_query_name="category_summary",
|
||||
to="api.scan",
|
||||
),
|
||||
),
|
||||
(
|
||||
"category",
|
||||
models.CharField(max_length=100),
|
||||
),
|
||||
(
|
||||
"severity",
|
||||
api.db_utils.SeverityEnumField(
|
||||
choices=[
|
||||
("critical", "Critical"),
|
||||
("high", "High"),
|
||||
("medium", "Medium"),
|
||||
("low", "Low"),
|
||||
("informational", "Informational"),
|
||||
],
|
||||
),
|
||||
),
|
||||
(
|
||||
"total_findings",
|
||||
models.IntegerField(
|
||||
default=0, help_text="Non-muted findings (PASS + FAIL)"
|
||||
),
|
||||
),
|
||||
(
|
||||
"failed_findings",
|
||||
models.IntegerField(
|
||||
default=0,
|
||||
help_text="Non-muted FAIL findings (subset of total_findings)",
|
||||
),
|
||||
),
|
||||
(
|
||||
"new_failed_findings",
|
||||
models.IntegerField(
|
||||
default=0,
|
||||
help_text="Non-muted FAIL with delta='new' (subset of failed_findings)",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "scan_category_summaries",
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="scancategorysummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan"], name="scs_tenant_scan_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="scancategorysummary",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "scan_id", "category", "severity"),
|
||||
name="unique_category_severity_per_scan",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="scancategorysummary",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_scancategorysummary",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,22 @@
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0063_scan_category_summary"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="finding",
|
||||
name="categories",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
size=None,
|
||||
help_text="Categories from check metadata for efficient filtering",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,37 @@
|
||||
# Generated by Django migration for Alibaba Cloud provider support
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
import api.db_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0064_finding_categories"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="provider",
|
||||
name="provider",
|
||||
field=api.db_utils.ProviderEnumField(
|
||||
choices=[
|
||||
("aws", "AWS"),
|
||||
("azure", "Azure"),
|
||||
("gcp", "GCP"),
|
||||
("kubernetes", "Kubernetes"),
|
||||
("m365", "M365"),
|
||||
("github", "GitHub"),
|
||||
("mongodbatlas", "MongoDB Atlas"),
|
||||
("iac", "IaC"),
|
||||
("oraclecloud", "Oracle Cloud Infrastructure"),
|
||||
("alibabacloud", "Alibaba Cloud"),
|
||||
],
|
||||
default="aws",
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"ALTER TYPE provider ADD VALUE IF NOT EXISTS 'alibabacloud';",
|
||||
reverse_sql=migrations.RunSQL.noop,
|
||||
),
|
||||
]
|
||||
@@ -287,6 +287,7 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
MONGODBATLAS = "mongodbatlas", _("MongoDB Atlas")
|
||||
IAC = "iac", _("IaC")
|
||||
ORACLECLOUD = "oraclecloud", _("Oracle Cloud Infrastructure")
|
||||
ALIBABACLOUD = "alibabacloud", _("Alibaba Cloud")
|
||||
|
||||
@staticmethod
|
||||
def validate_aws_uid(value):
|
||||
@@ -391,6 +392,15 @@ class Provider(RowLevelSecurityProtectedModel):
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def validate_alibabacloud_uid(value):
|
||||
if not re.match(r"^\d{16}$", value):
|
||||
raise ModelValidationError(
|
||||
detail="Alibaba Cloud account ID must be exactly 16 digits.",
|
||||
code="alibabacloud-uid",
|
||||
pointer="/data/attributes/uid",
|
||||
)
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
updated_at = models.DateTimeField(auto_now=True, editable=False)
|
||||
@@ -716,14 +726,19 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
self.clear_tags()
|
||||
return
|
||||
|
||||
# Add new relationships with the tenant_id field
|
||||
# Add new relationships with the tenant_id field; avoid touching the
|
||||
# Resource row unless a mapping is actually created to prevent noisy
|
||||
# updates during scans.
|
||||
mapping_created = False
|
||||
for tag in tags:
|
||||
ResourceTagMapping.objects.update_or_create(
|
||||
_, created = ResourceTagMapping.objects.update_or_create(
|
||||
tag=tag, resource=self, tenant_id=self.tenant_id
|
||||
)
|
||||
mapping_created = mapping_created or created
|
||||
|
||||
# Save the instance
|
||||
self.save()
|
||||
if mapping_created:
|
||||
# Only bump updated_at when the tag set truly changed
|
||||
self.save(update_fields=["updated_at"])
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "resources"
|
||||
@@ -868,6 +883,14 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
null=True,
|
||||
)
|
||||
|
||||
# Check metadata denormalization
|
||||
categories = ArrayField(
|
||||
models.CharField(max_length=100),
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text="Categories from check metadata for efficient filtering",
|
||||
)
|
||||
|
||||
# Relationships
|
||||
scan = models.ForeignKey(to=Scan, related_name="findings", on_delete=models.CASCADE)
|
||||
|
||||
@@ -1500,10 +1523,70 @@ class ScanSummary(RowLevelSecurityProtectedModel):
|
||||
resource_name = "scan-summaries"
|
||||
|
||||
|
||||
class DailySeveritySummary(RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
Pre-aggregated daily severity counts per provider.
|
||||
Used by findings_severity/timeseries endpoint for efficient queries.
|
||||
"""
|
||||
|
||||
objects = ActiveProviderManager()
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
date = models.DateField()
|
||||
|
||||
provider = models.ForeignKey(
|
||||
Provider,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="daily_severity_summaries",
|
||||
related_query_name="daily_severity_summary",
|
||||
)
|
||||
scan = models.ForeignKey(
|
||||
Scan,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="daily_severity_summaries",
|
||||
related_query_name="daily_severity_summary",
|
||||
)
|
||||
|
||||
# Aggregated fail counts by severity
|
||||
critical = models.IntegerField(default=0)
|
||||
high = models.IntegerField(default=0)
|
||||
medium = models.IntegerField(default=0)
|
||||
low = models.IntegerField(default=0)
|
||||
informational = models.IntegerField(default=0)
|
||||
muted = models.IntegerField(default=0)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "daily_severity_summaries"
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "provider", "date"),
|
||||
name="unique_daily_severity_summary",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "id"],
|
||||
name="dss_tenant_id_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_id"],
|
||||
name="dss_tenant_provider_idx",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class Integration(RowLevelSecurityProtectedModel):
|
||||
class IntegrationChoices(models.TextChoices):
|
||||
AMAZON_S3 = "amazon_s3", _("Amazon S3")
|
||||
AWS_SECURITY_HUB = "aws_security_hub", _("AWS Security Hub")
|
||||
GITHUB = "github", _("GitHub")
|
||||
JIRA = "jira", _("JIRA")
|
||||
SLACK = "slack", _("Slack")
|
||||
|
||||
@@ -1892,6 +1975,64 @@ class ResourceScanSummary(RowLevelSecurityProtectedModel):
|
||||
]
|
||||
|
||||
|
||||
class ScanCategorySummary(RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
Pre-aggregated category metrics per scan by severity.
|
||||
|
||||
Stores one row per (category, severity) combination per scan for efficient
|
||||
overview queries. Categories come from check_metadata.categories.
|
||||
|
||||
Count relationships (each is a subset of the previous):
|
||||
- total_findings >= failed_findings >= new_failed_findings
|
||||
"""
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
|
||||
scan = models.ForeignKey(
|
||||
Scan,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="category_summaries",
|
||||
related_query_name="category_summary",
|
||||
)
|
||||
|
||||
category = models.CharField(max_length=100)
|
||||
severity = SeverityEnumField(choices=SeverityChoices)
|
||||
|
||||
total_findings = models.IntegerField(
|
||||
default=0, help_text="Non-muted findings (PASS + FAIL)"
|
||||
)
|
||||
failed_findings = models.IntegerField(
|
||||
default=0, help_text="Non-muted FAIL findings (subset of total_findings)"
|
||||
)
|
||||
new_failed_findings = models.IntegerField(
|
||||
default=0,
|
||||
help_text="Non-muted FAIL with delta='new' (subset of failed_findings)",
|
||||
)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "scan_category_summaries"
|
||||
|
||||
indexes = [
|
||||
models.Index(fields=["tenant_id", "scan"], name="scs_tenant_scan_idx"),
|
||||
]
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "scan_id", "category", "severity"),
|
||||
name="unique_category_severity_per_scan",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "scan-category-summaries"
|
||||
|
||||
|
||||
class LighthouseConfiguration(RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
Stores configuration and API keys for LLM services.
|
||||
|
||||
+827
-104
File diff suppressed because it is too large
Load Diff
@@ -2,9 +2,12 @@ import uuid
|
||||
from unittest.mock import call, patch
|
||||
|
||||
import pytest
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import IntegrityError
|
||||
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
|
||||
from api.decorators import set_tenant
|
||||
from api.decorators import handle_provider_deletion, set_tenant
|
||||
from api.exceptions import ProviderDeletedException
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -34,3 +37,142 @@ class TestSetTenantDecorator:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
random_func("test_arg")
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestHandleProviderDeletionDecorator:
|
||||
def test_success_no_exception(self, tenants_fixture, providers_fixture):
|
||||
"""Decorated function runs normally when no exception is raised."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
|
||||
@handle_provider_deletion
|
||||
def task_func(**kwargs):
|
||||
return "success"
|
||||
|
||||
result = task_func(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
)
|
||||
assert result == "success"
|
||||
|
||||
@patch("api.decorators.rls_transaction")
|
||||
@patch("api.decorators.Provider.objects.filter")
|
||||
def test_provider_deleted_with_provider_id(
|
||||
self, mock_filter, mock_rls, tenants_fixture
|
||||
):
|
||||
"""Raises ProviderDeletedException when provider_id provided and provider deleted."""
|
||||
tenant = tenants_fixture[0]
|
||||
deleted_provider_id = str(uuid.uuid4())
|
||||
|
||||
mock_rls.return_value.__enter__ = lambda s: None
|
||||
mock_rls.return_value.__exit__ = lambda s, *args: None
|
||||
mock_filter.return_value.exists.return_value = False
|
||||
|
||||
@handle_provider_deletion
|
||||
def task_func(**kwargs):
|
||||
raise ObjectDoesNotExist("Some object not found")
|
||||
|
||||
with pytest.raises(ProviderDeletedException) as exc_info:
|
||||
task_func(tenant_id=str(tenant.id), provider_id=deleted_provider_id)
|
||||
|
||||
assert deleted_provider_id in str(exc_info.value)
|
||||
|
||||
@patch("api.decorators.rls_transaction")
|
||||
@patch("api.decorators.Provider.objects.filter")
|
||||
@patch("api.decorators.Scan.objects.filter")
|
||||
def test_provider_deleted_with_scan_id(
|
||||
self, mock_scan_filter, mock_provider_filter, mock_rls, tenants_fixture
|
||||
):
|
||||
"""Raises ProviderDeletedException when scan exists but provider deleted."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan_id = str(uuid.uuid4())
|
||||
provider_id = str(uuid.uuid4())
|
||||
|
||||
mock_rls.return_value.__enter__ = lambda s: None
|
||||
mock_rls.return_value.__exit__ = lambda s, *args: None
|
||||
|
||||
mock_scan = type("MockScan", (), {"provider_id": provider_id})()
|
||||
mock_scan_filter.return_value.first.return_value = mock_scan
|
||||
mock_provider_filter.return_value.exists.return_value = False
|
||||
|
||||
@handle_provider_deletion
|
||||
def task_func(**kwargs):
|
||||
raise ObjectDoesNotExist("Some object not found")
|
||||
|
||||
with pytest.raises(ProviderDeletedException) as exc_info:
|
||||
task_func(tenant_id=str(tenant.id), scan_id=scan_id)
|
||||
|
||||
assert provider_id in str(exc_info.value)
|
||||
|
||||
@patch("api.decorators.rls_transaction")
|
||||
@patch("api.decorators.Scan.objects.filter")
|
||||
def test_scan_deleted_cascade(self, mock_scan_filter, mock_rls, tenants_fixture):
|
||||
"""Raises ProviderDeletedException when scan was deleted (CASCADE from provider)."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan_id = str(uuid.uuid4())
|
||||
|
||||
mock_rls.return_value.__enter__ = lambda s: None
|
||||
mock_rls.return_value.__exit__ = lambda s, *args: None
|
||||
mock_scan_filter.return_value.first.return_value = None
|
||||
|
||||
@handle_provider_deletion
|
||||
def task_func(**kwargs):
|
||||
raise ObjectDoesNotExist("Some object not found")
|
||||
|
||||
with pytest.raises(ProviderDeletedException) as exc_info:
|
||||
task_func(tenant_id=str(tenant.id), scan_id=scan_id)
|
||||
|
||||
assert scan_id in str(exc_info.value)
|
||||
|
||||
@patch("api.decorators.rls_transaction")
|
||||
@patch("api.decorators.Provider.objects.filter")
|
||||
def test_provider_exists_reraises_original(
|
||||
self, mock_filter, mock_rls, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Re-raises original exception when provider still exists."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
|
||||
mock_rls.return_value.__enter__ = lambda s: None
|
||||
mock_rls.return_value.__exit__ = lambda s, *args: None
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
|
||||
@handle_provider_deletion
|
||||
def task_func(**kwargs):
|
||||
raise ObjectDoesNotExist("Actual object missing")
|
||||
|
||||
with pytest.raises(ObjectDoesNotExist):
|
||||
task_func(tenant_id=str(tenant.id), provider_id=str(provider.id))
|
||||
|
||||
@patch("api.decorators.rls_transaction")
|
||||
@patch("api.decorators.Provider.objects.filter")
|
||||
def test_integrity_error_provider_deleted(
|
||||
self, mock_filter, mock_rls, tenants_fixture
|
||||
):
|
||||
"""Raises ProviderDeletedException on IntegrityError when provider deleted."""
|
||||
tenant = tenants_fixture[0]
|
||||
deleted_provider_id = str(uuid.uuid4())
|
||||
|
||||
mock_rls.return_value.__enter__ = lambda s: None
|
||||
mock_rls.return_value.__exit__ = lambda s, *args: None
|
||||
mock_filter.return_value.exists.return_value = False
|
||||
|
||||
@handle_provider_deletion
|
||||
def task_func(**kwargs):
|
||||
raise IntegrityError("FK constraint violation")
|
||||
|
||||
with pytest.raises(ProviderDeletedException):
|
||||
task_func(tenant_id=str(tenant.id), provider_id=deleted_provider_id)
|
||||
|
||||
def test_missing_provider_and_scan_raises_assertion(self, tenants_fixture):
|
||||
"""Raises AssertionError when neither provider_id nor scan_id in kwargs."""
|
||||
|
||||
@handle_provider_deletion
|
||||
def task_func(**kwargs):
|
||||
raise ObjectDoesNotExist("Some object not found")
|
||||
|
||||
with pytest.raises(AssertionError) as exc_info:
|
||||
task_func(tenant_id=str(tenants_fixture[0].id))
|
||||
|
||||
assert "provider or scan" in str(exc_info.value)
|
||||
|
||||
@@ -16,6 +16,7 @@ from api.utils import (
|
||||
return_prowler_provider,
|
||||
validate_invitation,
|
||||
)
|
||||
from prowler.providers.alibabacloud.alibabacloud_provider import AlibabacloudProvider
|
||||
from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHubConnection
|
||||
from prowler.providers.azure.azure_provider import AzureProvider
|
||||
@@ -116,6 +117,7 @@ class TestReturnProwlerProvider:
|
||||
(Provider.ProviderChoices.MONGODBATLAS.value, MongodbatlasProvider),
|
||||
(Provider.ProviderChoices.ORACLECLOUD.value, OraclecloudProvider),
|
||||
(Provider.ProviderChoices.IAC.value, IacProvider),
|
||||
(Provider.ProviderChoices.ALIBABACLOUD.value, AlibabacloudProvider),
|
||||
],
|
||||
)
|
||||
def test_return_prowler_provider(self, provider_type, expected_provider):
|
||||
|
||||
+1295
-423
File diff suppressed because it is too large
Load Diff
@@ -11,6 +11,7 @@ from api.exceptions import InvitationTokenExpiredException
|
||||
from api.models import Integration, Invitation, Processor, Provider, Resource
|
||||
from api.v1.serializers import FindingMetadataSerializer
|
||||
from prowler.lib.outputs.jira.jira import Jira, JiraBasicAuthError
|
||||
from prowler.providers.alibabacloud.alibabacloud_provider import AlibabacloudProvider
|
||||
from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.aws.lib.s3.s3 import S3
|
||||
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHub
|
||||
@@ -63,8 +64,9 @@ def merge_dicts(default_dict: dict, replacement_dict: dict) -> dict:
|
||||
|
||||
def return_prowler_provider(
|
||||
provider: Provider,
|
||||
) -> [
|
||||
AwsProvider
|
||||
) -> (
|
||||
AlibabacloudProvider
|
||||
| AwsProvider
|
||||
| AzureProvider
|
||||
| GcpProvider
|
||||
| GithubProvider
|
||||
@@ -73,14 +75,14 @@ def return_prowler_provider(
|
||||
| M365Provider
|
||||
| MongodbatlasProvider
|
||||
| OraclecloudProvider
|
||||
]:
|
||||
):
|
||||
"""Return the Prowler provider class based on the given provider type.
|
||||
|
||||
Args:
|
||||
provider (Provider): The provider object containing the provider type and associated secrets.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | GithubProvider | IacProvider | KubernetesProvider | M365Provider | OraclecloudProvider | MongodbatlasProvider: The corresponding provider class.
|
||||
AlibabacloudProvider | AwsProvider | AzureProvider | GcpProvider | GithubProvider | IacProvider | KubernetesProvider | M365Provider | MongodbatlasProvider | OraclecloudProvider: The corresponding provider class.
|
||||
|
||||
Raises:
|
||||
ValueError: If the provider type specified in `provider.provider` is not supported.
|
||||
@@ -104,6 +106,8 @@ def return_prowler_provider(
|
||||
prowler_provider = IacProvider
|
||||
case Provider.ProviderChoices.ORACLECLOUD.value:
|
||||
prowler_provider = OraclecloudProvider
|
||||
case Provider.ProviderChoices.ALIBABACLOUD.value:
|
||||
prowler_provider = AlibabacloudProvider
|
||||
case _:
|
||||
raise ValueError(f"Provider type {provider.provider} not supported")
|
||||
return prowler_provider
|
||||
@@ -169,7 +173,8 @@ def initialize_prowler_provider(
|
||||
provider: Provider,
|
||||
mutelist_processor: Processor | None = None,
|
||||
) -> (
|
||||
AwsProvider
|
||||
AlibabacloudProvider
|
||||
| AwsProvider
|
||||
| AzureProvider
|
||||
| GcpProvider
|
||||
| GithubProvider
|
||||
@@ -186,9 +191,8 @@ def initialize_prowler_provider(
|
||||
mutelist_processor (Processor): The mutelist processor object containing the mutelist configuration.
|
||||
|
||||
Returns:
|
||||
AwsProvider | AzureProvider | GcpProvider | GithubProvider | IacProvider | KubernetesProvider | M365Provider | OraclecloudProvider | MongodbatlasProvider: An instance of the corresponding provider class
|
||||
(`AwsProvider`, `AzureProvider`, `GcpProvider`, `GithubProvider`, `IacProvider`, `KubernetesProvider`, `M365Provider`, `OraclecloudProvider` or `MongodbatlasProvider`) initialized with the
|
||||
provider's secrets.
|
||||
AlibabacloudProvider | AwsProvider | AzureProvider | GcpProvider | GithubProvider | IacProvider | KubernetesProvider | M365Provider | MongodbatlasProvider | OraclecloudProvider: An instance of the corresponding provider class
|
||||
initialized with the provider's secrets.
|
||||
"""
|
||||
prowler_provider = return_prowler_provider(provider)
|
||||
prowler_provider_kwargs = get_prowler_provider_kwargs(provider, mutelist_processor)
|
||||
@@ -283,6 +287,20 @@ def prowler_integration_connection_test(integration: Integration) -> Connection:
|
||||
integration.save()
|
||||
|
||||
return connection
|
||||
elif integration.integration_type == Integration.IntegrationChoices.GITHUB:
|
||||
from prowler.lib.outputs.github.github import GitHub
|
||||
|
||||
github_connection = GitHub.test_connection(
|
||||
**integration.credentials,
|
||||
raise_on_exception=False,
|
||||
)
|
||||
repositories = (
|
||||
github_connection.repositories if github_connection.is_connected else {}
|
||||
)
|
||||
with rls_transaction(str(integration.tenant_id)):
|
||||
integration.configuration["repositories"] = repositories
|
||||
integration.save()
|
||||
return github_connection
|
||||
elif integration.integration_type == Integration.IntegrationChoices.JIRA:
|
||||
jira_connection = Jira.test_connection(
|
||||
**integration.credentials,
|
||||
@@ -382,10 +400,18 @@ def get_findings_metadata_no_aggregations(tenant_id: str, filtered_queryset):
|
||||
regions = sorted({region for region in aggregation["regions"] or [] if region})
|
||||
resource_types = sorted(set(aggregation["resource_types"] or []))
|
||||
|
||||
# Aggregate categories from findings
|
||||
categories_set = set()
|
||||
for categories_list in filtered_queryset.values_list("categories", flat=True):
|
||||
if categories_list:
|
||||
categories_set.update(categories_list)
|
||||
categories = sorted(categories_set)
|
||||
|
||||
result = {
|
||||
"services": services,
|
||||
"regions": regions,
|
||||
"resource_types": resource_types,
|
||||
"categories": categories,
|
||||
}
|
||||
|
||||
serializer = FindingMetadataSerializer(data=result)
|
||||
@@ -394,9 +420,22 @@ def get_findings_metadata_no_aggregations(tenant_id: str, filtered_queryset):
|
||||
return serializer.data
|
||||
|
||||
|
||||
def initialize_prowler_integration(integration: Integration) -> Jira:
|
||||
def initialize_prowler_integration(integration: Integration):
|
||||
# TODO Refactor other integrations to use this function
|
||||
if integration.integration_type == Integration.IntegrationChoices.JIRA:
|
||||
if integration.integration_type == Integration.IntegrationChoices.GITHUB:
|
||||
from prowler.lib.outputs.github.exceptions import GitHubAuthenticationError
|
||||
from prowler.lib.outputs.github.github import GitHub
|
||||
|
||||
try:
|
||||
return GitHub(**integration.credentials)
|
||||
except GitHubAuthenticationError as github_auth_error:
|
||||
with rls_transaction(str(integration.tenant_id)):
|
||||
integration.configuration["repositories"] = {}
|
||||
integration.connected = False
|
||||
integration.connection_last_checked_at = datetime.now(tz=timezone.utc)
|
||||
integration.save()
|
||||
raise github_auth_error
|
||||
elif integration.integration_type == Integration.IntegrationChoices.JIRA:
|
||||
try:
|
||||
return Jira(**integration.credentials)
|
||||
except JiraBasicAuthError as jira_auth_error:
|
||||
|
||||
@@ -67,6 +67,14 @@ class SecurityHubConfigSerializer(BaseValidateSerializer):
|
||||
resource_name = "integrations"
|
||||
|
||||
|
||||
class GitHubConfigSerializer(BaseValidateSerializer):
|
||||
owner = serializers.CharField(read_only=True)
|
||||
repositories = serializers.DictField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
resource_name = "integrations"
|
||||
|
||||
|
||||
class JiraConfigSerializer(BaseValidateSerializer):
|
||||
domain = serializers.CharField(read_only=True)
|
||||
issue_types = serializers.ListField(
|
||||
@@ -93,6 +101,14 @@ class AWSCredentialSerializer(BaseValidateSerializer):
|
||||
resource_name = "integrations"
|
||||
|
||||
|
||||
class GitHubCredentialSerializer(BaseValidateSerializer):
|
||||
token = serializers.CharField(required=True)
|
||||
owner = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
resource_name = "integrations"
|
||||
|
||||
|
||||
class JiraCredentialSerializer(BaseValidateSerializer):
|
||||
user_mail = serializers.EmailField(required=True)
|
||||
api_token = serializers.CharField(required=True)
|
||||
@@ -153,6 +169,23 @@ class JiraCredentialSerializer(BaseValidateSerializer):
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GitHub Credentials",
|
||||
"properties": {
|
||||
"token": {
|
||||
"type": "string",
|
||||
"description": "GitHub Personal Access Token (PAT) with repo scope. Can be generated from "
|
||||
"GitHub Settings > Developer settings > Personal access tokens.",
|
||||
},
|
||||
"owner": {
|
||||
"type": "string",
|
||||
"description": "Repository owner (username or organization name). Optional - if not provided, "
|
||||
"all accessible repositories will be available.",
|
||||
},
|
||||
},
|
||||
"required": ["token"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "JIRA Credentials",
|
||||
@@ -221,6 +254,14 @@ class IntegrationCredentialField(serializers.JSONField):
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "GitHub",
|
||||
"description": "GitHub integration does not accept any configuration in the payload. Leave it as an "
|
||||
"empty JSON object (`{}`).",
|
||||
"properties": {},
|
||||
"additionalProperties": False,
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "JIRA",
|
||||
|
||||
@@ -40,11 +40,16 @@ class BedrockCredentialsSerializer(serializers.Serializer):
|
||||
"""
|
||||
Serializer for AWS Bedrock credentials validation.
|
||||
|
||||
Validates long-term AWS credentials (AKIA) and region format.
|
||||
Supports two authentication methods:
|
||||
1. AWS access key + secret key
|
||||
2. Bedrock API key (bearer token)
|
||||
|
||||
In both cases, region is mandatory.
|
||||
"""
|
||||
|
||||
access_key_id = serializers.CharField()
|
||||
secret_access_key = serializers.CharField()
|
||||
access_key_id = serializers.CharField(required=False, allow_blank=False)
|
||||
secret_access_key = serializers.CharField(required=False, allow_blank=False)
|
||||
api_key = serializers.CharField(required=False, allow_blank=False)
|
||||
region = serializers.CharField()
|
||||
|
||||
def validate_access_key_id(self, value: str) -> str:
|
||||
@@ -65,6 +70,15 @@ class BedrockCredentialsSerializer(serializers.Serializer):
|
||||
)
|
||||
return value
|
||||
|
||||
def validate_api_key(self, value: str) -> str:
|
||||
"""
|
||||
Validate Bedrock API key (bearer token).
|
||||
"""
|
||||
pattern = r"^ABSKQmVkcm9ja0FQSUtleS[A-Za-z0-9+/=]{110}$"
|
||||
if not re.match(pattern, value or ""):
|
||||
raise serializers.ValidationError("Invalid Bedrock API key format.")
|
||||
return value
|
||||
|
||||
def validate_region(self, value: str) -> str:
|
||||
"""Validate AWS region format."""
|
||||
pattern = r"^[a-z]{2}-[a-z]+-\d+$"
|
||||
@@ -74,6 +88,50 @@ class BedrockCredentialsSerializer(serializers.Serializer):
|
||||
)
|
||||
return value
|
||||
|
||||
def validate(self, attrs):
|
||||
"""
|
||||
Enforce either:
|
||||
- access_key_id + secret_access_key + region
|
||||
OR
|
||||
- api_key + region
|
||||
"""
|
||||
access_key_id = attrs.get("access_key_id")
|
||||
secret_access_key = attrs.get("secret_access_key")
|
||||
api_key = attrs.get("api_key")
|
||||
region = attrs.get("region")
|
||||
|
||||
errors = {}
|
||||
|
||||
if not region:
|
||||
errors["region"] = ["Region is required."]
|
||||
|
||||
using_access_keys = bool(access_key_id or secret_access_key)
|
||||
using_api_key = api_key is not None and api_key != ""
|
||||
|
||||
if using_access_keys and using_api_key:
|
||||
errors["non_field_errors"] = [
|
||||
"Provide either access key + secret key OR api key, not both."
|
||||
]
|
||||
elif not using_access_keys and not using_api_key:
|
||||
errors["non_field_errors"] = [
|
||||
"You must provide either access key + secret key OR api key."
|
||||
]
|
||||
elif using_access_keys:
|
||||
# Both access_key_id and secret_access_key must be present together
|
||||
if not access_key_id:
|
||||
errors.setdefault("access_key_id", []).append(
|
||||
"AWS access key ID is required when using access key authentication."
|
||||
)
|
||||
if not secret_access_key:
|
||||
errors.setdefault("secret_access_key", []).append(
|
||||
"AWS secret access key is required when using access key authentication."
|
||||
)
|
||||
|
||||
if errors:
|
||||
raise serializers.ValidationError(errors)
|
||||
|
||||
return attrs
|
||||
|
||||
def to_internal_value(self, data):
|
||||
"""Check for unknown fields before DRF filters them out."""
|
||||
if not isinstance(data, dict):
|
||||
@@ -111,6 +169,15 @@ class BedrockCredentialsUpdateSerializer(BedrockCredentialsSerializer):
|
||||
for field in self.fields.values():
|
||||
field.required = False
|
||||
|
||||
def validate(self, attrs):
|
||||
"""
|
||||
For updates, this serializer only checks individual fields.
|
||||
It does NOT enforce the "either access keys OR api key" rule.
|
||||
That rule is applied later, after merging with existing stored
|
||||
credentials, in LighthouseProviderConfigUpdateSerializer.
|
||||
"""
|
||||
return attrs
|
||||
|
||||
|
||||
class OpenAICompatibleCredentialsSerializer(serializers.Serializer):
|
||||
"""
|
||||
@@ -168,27 +235,51 @@ class OpenAICompatibleCredentialsSerializer(serializers.Serializer):
|
||||
"required": ["api_key"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "AWS Bedrock Credentials",
|
||||
"properties": {
|
||||
"access_key_id": {
|
||||
"type": "string",
|
||||
"description": "AWS access key ID.",
|
||||
"pattern": "^AKIA[0-9A-Z]{16}$",
|
||||
"oneOf": [
|
||||
{
|
||||
"title": "IAM Access Key Pair",
|
||||
"type": "object",
|
||||
"description": "Authenticate with AWS access key and secret key. Recommended when you manage IAM users or roles.",
|
||||
"properties": {
|
||||
"access_key_id": {
|
||||
"type": "string",
|
||||
"description": "AWS access key ID.",
|
||||
"pattern": "^AKIA[0-9A-Z]{16}$",
|
||||
},
|
||||
"secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "AWS secret access key.",
|
||||
"pattern": "^[A-Za-z0-9/+=]{40}$",
|
||||
},
|
||||
"region": {
|
||||
"type": "string",
|
||||
"description": "AWS region identifier where Bedrock is available. Examples: us-east-1, "
|
||||
"us-west-2, eu-west-1, ap-northeast-1.",
|
||||
"pattern": "^[a-z]{2}-[a-z]+-\\d+$",
|
||||
},
|
||||
},
|
||||
"required": ["access_key_id", "secret_access_key", "region"],
|
||||
},
|
||||
"secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "AWS secret access key.",
|
||||
"pattern": "^[A-Za-z0-9/+=]{40}$",
|
||||
{
|
||||
"title": "Amazon Bedrock API Key",
|
||||
"type": "object",
|
||||
"description": "Authenticate with an Amazon Bedrock API key (bearer token). Region is still required.",
|
||||
"properties": {
|
||||
"api_key": {
|
||||
"type": "string",
|
||||
"description": "Amazon Bedrock API key (bearer token).",
|
||||
},
|
||||
"region": {
|
||||
"type": "string",
|
||||
"description": "AWS region identifier where Bedrock is available. Examples: us-east-1, "
|
||||
"us-west-2, eu-west-1, ap-northeast-1.",
|
||||
"pattern": "^[a-z]{2}-[a-z]+-\\d+$",
|
||||
},
|
||||
},
|
||||
"required": ["api_key", "region"],
|
||||
},
|
||||
"region": {
|
||||
"type": "string",
|
||||
"description": "AWS region identifier where Bedrock is available. Examples: us-east-1, "
|
||||
"us-west-2, eu-west-1, ap-northeast-1.",
|
||||
"pattern": "^[a-z]{2}-[a-z]+-\\d+$",
|
||||
},
|
||||
},
|
||||
"required": ["access_key_id", "secret_access_key", "region"],
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
|
||||
@@ -304,6 +304,48 @@ from rest_framework_json_api import serializers
|
||||
},
|
||||
"required": ["atlas_public_key", "atlas_private_key"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Alibaba Cloud Static Credentials",
|
||||
"properties": {
|
||||
"access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The Alibaba Cloud access key ID for authentication.",
|
||||
},
|
||||
"access_key_secret": {
|
||||
"type": "string",
|
||||
"description": "The Alibaba Cloud access key secret for authentication.",
|
||||
},
|
||||
"security_token": {
|
||||
"type": "string",
|
||||
"description": "The STS security token for temporary credentials (optional).",
|
||||
},
|
||||
},
|
||||
"required": ["access_key_id", "access_key_secret"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "Alibaba Cloud RAM Role Assumption",
|
||||
"properties": {
|
||||
"role_arn": {
|
||||
"type": "string",
|
||||
"description": "The ARN of the RAM role to assume (e.g., acs:ram::1234567890123456:role/ProwlerRole).",
|
||||
},
|
||||
"access_key_id": {
|
||||
"type": "string",
|
||||
"description": "The Alibaba Cloud access key ID of the RAM user that will assume the role.",
|
||||
},
|
||||
"access_key_secret": {
|
||||
"type": "string",
|
||||
"description": "The Alibaba Cloud access key secret of the RAM user that will assume the role.",
|
||||
},
|
||||
"role_session_name": {
|
||||
"type": "string",
|
||||
"description": "An identifier for the role session (optional, defaults to 'ProwlerSession').",
|
||||
},
|
||||
},
|
||||
"required": ["role_arn", "access_key_id", "access_key_secret"],
|
||||
},
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
@@ -54,6 +54,8 @@ from api.models import (
|
||||
from api.rls import Tenant
|
||||
from api.v1.serializer_utils.integrations import (
|
||||
AWSCredentialSerializer,
|
||||
GitHubConfigSerializer,
|
||||
GitHubCredentialSerializer,
|
||||
IntegrationConfigField,
|
||||
IntegrationCredentialField,
|
||||
JiraConfigSerializer,
|
||||
@@ -1301,6 +1303,7 @@ class FindingSerializer(RLSSerializer):
|
||||
"severity",
|
||||
"check_id",
|
||||
"check_metadata",
|
||||
"categories",
|
||||
"raw_result",
|
||||
"inserted_at",
|
||||
"updated_at",
|
||||
@@ -1356,6 +1359,7 @@ class FindingMetadataSerializer(BaseSerializerV1):
|
||||
resource_types = serializers.ListField(
|
||||
child=serializers.CharField(), allow_empty=True
|
||||
)
|
||||
categories = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
# Temporarily disabled until we implement tag filtering in the UI
|
||||
# tags = serializers.JSONField(help_text="Tags are described as key-value pairs.")
|
||||
|
||||
@@ -1388,12 +1392,23 @@ class BaseWriteProviderSecretSerializer(BaseWriteSerializer):
|
||||
serializer = OracleCloudProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.MONGODBATLAS.value:
|
||||
serializer = MongoDBAtlasProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.ALIBABACLOUD.value:
|
||||
serializer = AlibabaCloudProviderSecret(data=secret)
|
||||
else:
|
||||
raise serializers.ValidationError(
|
||||
{"provider": f"Provider type not supported {provider_type}"}
|
||||
)
|
||||
elif secret_type == ProviderSecret.TypeChoices.ROLE:
|
||||
serializer = AWSRoleAssumptionProviderSecret(data=secret)
|
||||
if provider_type == Provider.ProviderChoices.AWS.value:
|
||||
serializer = AWSRoleAssumptionProviderSecret(data=secret)
|
||||
elif provider_type == Provider.ProviderChoices.ALIBABACLOUD.value:
|
||||
serializer = AlibabaCloudRoleAssumptionProviderSecret(data=secret)
|
||||
else:
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"secret_type": f"Role assumption not supported for provider type: {provider_type}"
|
||||
}
|
||||
)
|
||||
elif secret_type == ProviderSecret.TypeChoices.SERVICE_ACCOUNT:
|
||||
serializer = GCPServiceAccountProviderSecret(data=secret)
|
||||
else:
|
||||
@@ -1530,6 +1545,34 @@ class OracleCloudProviderSecret(serializers.Serializer):
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class AlibabaCloudProviderSecret(serializers.Serializer):
|
||||
access_key_id = serializers.CharField()
|
||||
access_key_secret = serializers.CharField()
|
||||
security_token = serializers.CharField(required=False)
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class AlibabaCloudRoleAssumptionProviderSecret(serializers.Serializer):
|
||||
role_arn = serializers.CharField(
|
||||
help_text="Access Key ID of the RAM user that will assume the role"
|
||||
)
|
||||
access_key_id = serializers.CharField(
|
||||
help_text="Access Key ID of the RAM user that will assume the role"
|
||||
)
|
||||
access_key_secret = serializers.CharField(
|
||||
help_text="Access Key Secret of the RAM user that will assume the role"
|
||||
)
|
||||
role_session_name = serializers.CharField(
|
||||
required=False,
|
||||
help_text="Session name for the assumed role session (optional, defaults to 'ProwlerSession')",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
resource_name = "provider-secrets"
|
||||
|
||||
|
||||
class AWSRoleAssumptionProviderSecret(serializers.Serializer):
|
||||
role_arn = serializers.CharField()
|
||||
external_id = serializers.CharField()
|
||||
@@ -2204,6 +2247,22 @@ class OverviewSeveritySerializer(BaseSerializerV1):
|
||||
resource_name = "findings-severity-overview"
|
||||
|
||||
|
||||
class FindingsSeverityOverTimeSerializer(BaseSerializerV1):
|
||||
"""Serializer for daily findings severity trend data."""
|
||||
|
||||
id = serializers.DateField(source="date")
|
||||
critical = serializers.IntegerField()
|
||||
high = serializers.IntegerField()
|
||||
medium = serializers.IntegerField()
|
||||
low = serializers.IntegerField()
|
||||
informational = serializers.IntegerField()
|
||||
muted = serializers.IntegerField()
|
||||
scan_ids = serializers.ListField(child=serializers.UUIDField())
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "findings-severity-over-time"
|
||||
|
||||
|
||||
class OverviewServiceSerializer(BaseSerializerV1):
|
||||
id = serializers.CharField(source="service")
|
||||
total = serializers.IntegerField()
|
||||
@@ -2226,14 +2285,26 @@ class AttackSurfaceOverviewSerializer(BaseSerializerV1):
|
||||
total_findings = serializers.IntegerField()
|
||||
failed_findings = serializers.IntegerField()
|
||||
muted_failed_findings = serializers.IntegerField()
|
||||
check_ids = serializers.ListField(
|
||||
child=serializers.CharField(), allow_empty=True, default=list, read_only=True
|
||||
)
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "attack-surface-overviews"
|
||||
|
||||
|
||||
class CategoryOverviewSerializer(BaseSerializerV1):
|
||||
"""Serializer for category overview aggregations."""
|
||||
|
||||
id = serializers.CharField(source="category")
|
||||
total_findings = serializers.IntegerField()
|
||||
failed_findings = serializers.IntegerField()
|
||||
new_failed_findings = serializers.IntegerField()
|
||||
severity = serializers.JSONField(
|
||||
help_text="Severity breakdown: {informational, low, medium, high, critical}"
|
||||
)
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "category-overviews"
|
||||
|
||||
|
||||
class OverviewRegionSerializer(serializers.Serializer):
|
||||
id = serializers.SerializerMethodField()
|
||||
provider_type = serializers.CharField()
|
||||
@@ -2363,6 +2434,28 @@ class BaseWriteIntegrationSerializer(BaseWriteSerializer):
|
||||
)
|
||||
config_serializer = SecurityHubConfigSerializer
|
||||
credentials_serializers = [AWSCredentialSerializer]
|
||||
elif integration_type == Integration.IntegrationChoices.GITHUB:
|
||||
if providers:
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"providers": "Relationship field is not accepted. This integration applies to all providers."
|
||||
}
|
||||
)
|
||||
if configuration:
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"configuration": "This integration does not support custom configuration."
|
||||
}
|
||||
)
|
||||
config_serializer = GitHubConfigSerializer
|
||||
# Create non-editable configuration for GitHub integration
|
||||
configuration.update(
|
||||
{
|
||||
"repositories": {},
|
||||
"owner": credentials.get("owner", ""),
|
||||
}
|
||||
)
|
||||
credentials_serializers = [GitHubCredentialSerializer]
|
||||
elif integration_type == Integration.IntegrationChoices.JIRA:
|
||||
if providers:
|
||||
raise serializers.ValidationError(
|
||||
@@ -2450,7 +2543,11 @@ class IntegrationSerializer(RLSSerializer):
|
||||
for provider in representation["providers"]
|
||||
if provider["id"] in allowed_provider_ids
|
||||
]
|
||||
if instance.integration_type == Integration.IntegrationChoices.JIRA:
|
||||
if instance.integration_type == Integration.IntegrationChoices.GITHUB:
|
||||
representation["configuration"].update(
|
||||
{"owner": instance.credentials.get("owner", "")}
|
||||
)
|
||||
elif instance.integration_type == Integration.IntegrationChoices.JIRA:
|
||||
representation["configuration"].update(
|
||||
{"domain": instance.credentials.get("domain")}
|
||||
)
|
||||
@@ -2597,6 +2694,51 @@ class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
|
||||
return representation
|
||||
|
||||
|
||||
class IntegrationGitHubDispatchSerializer(BaseSerializerV1):
|
||||
"""
|
||||
Serializer for dispatching findings to GitHub integration.
|
||||
"""
|
||||
|
||||
repository = serializers.CharField(required=True)
|
||||
labels = serializers.ListField(
|
||||
child=serializers.CharField(), required=False, default=list
|
||||
)
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "integrations-github-dispatches"
|
||||
|
||||
def validate(self, attrs):
|
||||
validated_attrs = super().validate(attrs)
|
||||
integration_instance = Integration.objects.get(
|
||||
id=self.context.get("integration_id")
|
||||
)
|
||||
if (
|
||||
integration_instance.integration_type
|
||||
!= Integration.IntegrationChoices.GITHUB
|
||||
):
|
||||
raise ValidationError(
|
||||
{
|
||||
"integration_type": "The given integration is not a GitHub integration"
|
||||
}
|
||||
)
|
||||
|
||||
if not integration_instance.enabled:
|
||||
raise ValidationError(
|
||||
{"integration": "The given integration is not enabled"}
|
||||
)
|
||||
|
||||
repository = attrs.get("repository")
|
||||
if repository not in integration_instance.configuration.get("repositories", {}):
|
||||
raise ValidationError(
|
||||
{
|
||||
"repository": "The given repository is not available for this GitHub integration. Refresh the "
|
||||
"connection if this is an error."
|
||||
}
|
||||
)
|
||||
|
||||
return validated_attrs
|
||||
|
||||
|
||||
class IntegrationJiraDispatchSerializer(BaseSerializerV1):
|
||||
"""
|
||||
Serializer for dispatching findings to JIRA integration.
|
||||
@@ -3299,6 +3441,19 @@ class LighthouseProviderConfigUpdateSerializer(BaseWriteSerializer):
|
||||
and provider_type
|
||||
== LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK
|
||||
):
|
||||
# For updates, enforce that the authentication method (access keys vs API key)
|
||||
# is immutable. To switch methods, the UI must delete and recreate the provider.
|
||||
existing_credentials = (
|
||||
self.instance.credentials_decoded if self.instance else {}
|
||||
) or {}
|
||||
|
||||
existing_uses_api_key = "api_key" in existing_credentials
|
||||
existing_uses_access_keys = any(
|
||||
k in existing_credentials
|
||||
for k in ("access_key_id", "secret_access_key")
|
||||
)
|
||||
|
||||
# First run field-level validation on the partial payload
|
||||
try:
|
||||
BedrockCredentialsUpdateSerializer(data=credentials).is_valid(
|
||||
raise_exception=True
|
||||
@@ -3309,6 +3464,31 @@ class LighthouseProviderConfigUpdateSerializer(BaseWriteSerializer):
|
||||
e.detail[f"credentials/{key}"] = value
|
||||
del e.detail[key]
|
||||
raise e
|
||||
|
||||
# Then enforce invariants about not changing the auth method
|
||||
# If the existing config uses an API key, forbid introducing access keys.
|
||||
if existing_uses_api_key and any(
|
||||
k in credentials for k in ("access_key_id", "secret_access_key")
|
||||
):
|
||||
raise ValidationError(
|
||||
{
|
||||
"credentials/non_field_errors": [
|
||||
"Cannot change Bedrock authentication method from API key "
|
||||
"to access key via update. Delete and recreate the provider instead."
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
# If the existing config uses access keys, forbid introducing an API key.
|
||||
if existing_uses_access_keys and "api_key" in credentials:
|
||||
raise ValidationError(
|
||||
{
|
||||
"credentials/non_field_errors": [
|
||||
"Cannot change Bedrock authentication method from access key "
|
||||
"to API key via update. Delete and recreate the provider instead."
|
||||
]
|
||||
}
|
||||
)
|
||||
elif (
|
||||
credentials is not None
|
||||
and provider_type
|
||||
|
||||
@@ -12,6 +12,7 @@ from api.v1.views import (
|
||||
FindingViewSet,
|
||||
GithubSocialLoginView,
|
||||
GoogleSocialLoginView,
|
||||
IntegrationGitHubViewSet,
|
||||
IntegrationJiraViewSet,
|
||||
IntegrationViewSet,
|
||||
InvitationAcceptViewSet,
|
||||
@@ -94,6 +95,9 @@ users_router.register(r"memberships", MembershipViewSet, basename="user-membersh
|
||||
integrations_router = routers.NestedSimpleRouter(
|
||||
router, r"integrations", lookup="integration"
|
||||
)
|
||||
integrations_router.register(
|
||||
r"github", IntegrationGitHubViewSet, basename="integration-github"
|
||||
)
|
||||
integrations_router.register(
|
||||
r"jira", IntegrationJiraViewSet, basename="integration-jira"
|
||||
)
|
||||
|
||||
+579
-136
@@ -74,8 +74,8 @@ from rest_framework_json_api.views import RelationshipView, Response
|
||||
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
|
||||
from tasks.beat import schedule_provider_scan
|
||||
from tasks.jobs.export import get_s3_client
|
||||
from tasks.jobs.scan import _get_attack_surface_mapping_from_provider
|
||||
from tasks.tasks import (
|
||||
backfill_compliance_summaries_task,
|
||||
backfill_scan_resource_summaries_task,
|
||||
check_integration_connection_task,
|
||||
check_lighthouse_connection_task,
|
||||
@@ -83,6 +83,7 @@ from tasks.tasks import (
|
||||
check_provider_connection_task,
|
||||
delete_provider_task,
|
||||
delete_tenant_task,
|
||||
github_integration_task,
|
||||
jira_integration_task,
|
||||
mute_historical_findings_task,
|
||||
perform_scan_task,
|
||||
@@ -99,10 +100,13 @@ from api.db_utils import rls_transaction
|
||||
from api.exceptions import TaskFailedException
|
||||
from api.filters import (
|
||||
AttackSurfaceOverviewFilter,
|
||||
CategoryOverviewFilter,
|
||||
ComplianceOverviewFilter,
|
||||
CustomDjangoFilterBackend,
|
||||
DailySeveritySummaryFilter,
|
||||
FindingFilter,
|
||||
IntegrationFilter,
|
||||
IntegrationGitHubFindingsFilter,
|
||||
IntegrationJiraFindingsFilter,
|
||||
InvitationFilter,
|
||||
LatestFindingFilter,
|
||||
@@ -128,7 +132,9 @@ from api.filters import (
|
||||
)
|
||||
from api.models import (
|
||||
AttackSurfaceOverview,
|
||||
ComplianceOverviewSummary,
|
||||
ComplianceRequirementOverview,
|
||||
DailySeveritySummary,
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
@@ -153,6 +159,7 @@ from api.models import (
|
||||
SAMLDomainIndex,
|
||||
SAMLToken,
|
||||
Scan,
|
||||
ScanCategorySummary,
|
||||
ScanSummary,
|
||||
SeverityChoices,
|
||||
StateChoices,
|
||||
@@ -174,6 +181,7 @@ from api.uuid_utils import datetime_to_uuid7, uuid7_start
|
||||
from api.v1.mixins import DisablePaginationMixin, PaginateByPkMixin, TaskManagementMixin
|
||||
from api.v1.serializers import (
|
||||
AttackSurfaceOverviewSerializer,
|
||||
CategoryOverviewSerializer,
|
||||
ComplianceOverviewAttributesSerializer,
|
||||
ComplianceOverviewDetailSerializer,
|
||||
ComplianceOverviewDetailThreatscoreSerializer,
|
||||
@@ -182,7 +190,9 @@ from api.v1.serializers import (
|
||||
FindingDynamicFilterSerializer,
|
||||
FindingMetadataSerializer,
|
||||
FindingSerializer,
|
||||
FindingsSeverityOverTimeSerializer,
|
||||
IntegrationCreateSerializer,
|
||||
IntegrationGitHubDispatchSerializer,
|
||||
IntegrationJiraDispatchSerializer,
|
||||
IntegrationSerializer,
|
||||
IntegrationUpdateSerializer,
|
||||
@@ -352,7 +362,7 @@ class SchemaView(SpectacularAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
spectacular_settings.TITLE = "Prowler API"
|
||||
spectacular_settings.VERSION = "1.16.0"
|
||||
spectacular_settings.VERSION = "1.18.0"
|
||||
spectacular_settings.DESCRIPTION = (
|
||||
"Prowler API specification.\n\nThis file is auto-generated."
|
||||
)
|
||||
@@ -2755,12 +2765,15 @@ class FindingViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
|
||||
queryset = ResourceScanSummary.objects.filter(tenant_id=tenant_id)
|
||||
scan_based_filters = {}
|
||||
category_scan_filters = {} # Filters for ScanCategorySummary
|
||||
|
||||
if scans := query_params.get("filter[scan__in]") or query_params.get(
|
||||
"filter[scan]"
|
||||
):
|
||||
queryset = queryset.filter(scan_id__in=scans.split(","))
|
||||
scan_based_filters = {"id__in": scans.split(",")}
|
||||
scan_ids_list = scans.split(",")
|
||||
queryset = queryset.filter(scan_id__in=scan_ids_list)
|
||||
scan_based_filters = {"id__in": scan_ids_list}
|
||||
category_scan_filters = {"scan_id__in": scan_ids_list}
|
||||
else:
|
||||
exact = query_params.get("filter[inserted_at]")
|
||||
gte = query_params.get("filter[inserted_at__gte]")
|
||||
@@ -2804,6 +2817,7 @@ class FindingViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
scan_based_filters = {
|
||||
key.lstrip("scan_"): value for key, value in date_filters.items()
|
||||
}
|
||||
category_scan_filters = date_filters
|
||||
|
||||
# ToRemove: Temporary fallback mechanism
|
||||
if not queryset.exists():
|
||||
@@ -2850,10 +2864,31 @@ class FindingViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
.order_by("resource_type")
|
||||
)
|
||||
|
||||
# Get categories from ScanCategorySummary using same scan filters
|
||||
categories = list(
|
||||
ScanCategorySummary.objects.filter(
|
||||
tenant_id=tenant_id, **category_scan_filters
|
||||
)
|
||||
.values_list("category", flat=True)
|
||||
.distinct()
|
||||
.order_by("category")
|
||||
)
|
||||
|
||||
# Fallback to finding aggregation if no ScanCategorySummary exists
|
||||
if not categories:
|
||||
categories_set = set()
|
||||
for categories_list in filtered_queryset.values_list(
|
||||
"categories", flat=True
|
||||
):
|
||||
if categories_list:
|
||||
categories_set.update(categories_list)
|
||||
categories = sorted(categories_set)
|
||||
|
||||
result = {
|
||||
"services": services,
|
||||
"regions": regions,
|
||||
"resource_types": resource_types,
|
||||
"categories": categories,
|
||||
}
|
||||
|
||||
serializer = self.get_serializer(data=result)
|
||||
@@ -2958,10 +2993,36 @@ class FindingViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
.order_by("resource_type")
|
||||
)
|
||||
|
||||
# Get categories from ScanCategorySummary for latest scans
|
||||
categories = list(
|
||||
ScanCategorySummary.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
scan_id__in=latest_scans_queryset.values_list("id", flat=True),
|
||||
)
|
||||
.values_list("category", flat=True)
|
||||
.distinct()
|
||||
.order_by("category")
|
||||
)
|
||||
|
||||
# Fallback to finding aggregation if no ScanCategorySummary exists
|
||||
if not categories:
|
||||
filtered_queryset = self.filter_queryset(self.get_queryset()).filter(
|
||||
tenant_id=tenant_id,
|
||||
scan_id__in=latest_scans_queryset.values_list("id", flat=True),
|
||||
)
|
||||
categories_set = set()
|
||||
for categories_list in filtered_queryset.values_list(
|
||||
"categories", flat=True
|
||||
):
|
||||
if categories_list:
|
||||
categories_set.update(categories_list)
|
||||
categories = sorted(categories_set)
|
||||
|
||||
result = {
|
||||
"services": services,
|
||||
"regions": regions,
|
||||
"resource_types": resource_types,
|
||||
"categories": categories,
|
||||
}
|
||||
|
||||
serializer = self.get_serializer(data=result)
|
||||
@@ -3526,6 +3587,126 @@ class ComplianceOverviewViewSet(BaseRLSViewSet, TaskManagementMixin):
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
raise MethodNotAllowed(method="GET")
|
||||
|
||||
def _compliance_summaries_queryset(self, scan_id):
|
||||
"""Return pre-aggregated summaries constrained by RBAC visibility."""
|
||||
role = get_role(self.request.user)
|
||||
unlimited_visibility = getattr(
|
||||
role, Permissions.UNLIMITED_VISIBILITY.value, False
|
||||
)
|
||||
summaries = ComplianceOverviewSummary.objects.filter(
|
||||
tenant_id=self.request.tenant_id,
|
||||
scan_id=scan_id,
|
||||
)
|
||||
|
||||
if not unlimited_visibility:
|
||||
providers = Provider.all_objects.filter(
|
||||
provider_groups__in=role.provider_groups.all()
|
||||
).distinct()
|
||||
summaries = summaries.filter(scan__provider__in=providers)
|
||||
|
||||
return summaries
|
||||
|
||||
def _get_compliance_template(self, *, provider=None, scan_id=None):
|
||||
"""Return the compliance template for the given provider or scan."""
|
||||
if provider is None and scan_id is not None:
|
||||
try:
|
||||
scan = Scan.all_objects.select_related("provider").get(pk=scan_id)
|
||||
except Scan.DoesNotExist:
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "Scan not found",
|
||||
"status": 404,
|
||||
"source": {"pointer": "filter[scan_id]"},
|
||||
"code": "not_found",
|
||||
}
|
||||
]
|
||||
)
|
||||
provider = scan.provider
|
||||
|
||||
if not provider:
|
||||
return {}
|
||||
|
||||
return PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE.get(provider.provider, {})
|
||||
|
||||
def _aggregate_compliance_overview(self, queryset, template_metadata=None):
|
||||
"""
|
||||
Aggregate requirement rows into compliance overview dictionaries.
|
||||
|
||||
Args:
|
||||
queryset: ComplianceRequirementOverview queryset already filtered.
|
||||
template_metadata: Optional dict mapping compliance_id -> metadata.
|
||||
"""
|
||||
template_metadata = template_metadata or {}
|
||||
requirement_status_subquery = queryset.values(
|
||||
"compliance_id", "requirement_id"
|
||||
).annotate(
|
||||
fail_count=Count("id", filter=Q(requirement_status="FAIL")),
|
||||
pass_count=Count("id", filter=Q(requirement_status="PASS")),
|
||||
total_count=Count("id"),
|
||||
)
|
||||
|
||||
compliance_data = {}
|
||||
fallback_metadata = {
|
||||
item["compliance_id"]: {
|
||||
"framework": item["framework"],
|
||||
"version": item["version"],
|
||||
}
|
||||
for item in queryset.values(
|
||||
"compliance_id", "framework", "version"
|
||||
).distinct()
|
||||
}
|
||||
|
||||
for item in requirement_status_subquery:
|
||||
compliance_id = item["compliance_id"]
|
||||
|
||||
if item["fail_count"] > 0:
|
||||
req_status = "FAIL"
|
||||
elif item["pass_count"] == item["total_count"]:
|
||||
req_status = "PASS"
|
||||
else:
|
||||
req_status = "MANUAL"
|
||||
|
||||
compliance_status = compliance_data.setdefault(
|
||||
compliance_id,
|
||||
{
|
||||
"total_requirements": 0,
|
||||
"requirements_passed": 0,
|
||||
"requirements_failed": 0,
|
||||
"requirements_manual": 0,
|
||||
},
|
||||
)
|
||||
|
||||
compliance_status["total_requirements"] += 1
|
||||
if req_status == "PASS":
|
||||
compliance_status["requirements_passed"] += 1
|
||||
elif req_status == "FAIL":
|
||||
compliance_status["requirements_failed"] += 1
|
||||
else:
|
||||
compliance_status["requirements_manual"] += 1
|
||||
|
||||
response_data = []
|
||||
for compliance_id, data in compliance_data.items():
|
||||
template = template_metadata.get(compliance_id, {})
|
||||
fallback = fallback_metadata.get(compliance_id, {})
|
||||
|
||||
response_data.append(
|
||||
{
|
||||
"id": compliance_id,
|
||||
"compliance_id": compliance_id,
|
||||
"framework": template.get("framework")
|
||||
or fallback.get("framework", ""),
|
||||
"version": template.get("version") or fallback.get("version", ""),
|
||||
"requirements_passed": data["requirements_passed"],
|
||||
"requirements_failed": data["requirements_failed"],
|
||||
"requirements_manual": data["requirements_manual"],
|
||||
"total_requirements": data["total_requirements"],
|
||||
}
|
||||
)
|
||||
|
||||
serializer = self.get_serializer(response_data, many=True)
|
||||
return serializer.data
|
||||
|
||||
def _task_response_if_running(self, scan_id):
|
||||
"""Check for an in-progress task only when no compliance data exists."""
|
||||
try:
|
||||
@@ -3540,90 +3721,84 @@ class ComplianceOverviewViewSet(BaseRLSViewSet, TaskManagementMixin):
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
scan_id = request.query_params.get("filter[scan_id]")
|
||||
if not scan_id:
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "This query parameter is required.",
|
||||
"status": 400,
|
||||
"source": {"pointer": "filter[scan_id]"},
|
||||
"code": "required",
|
||||
}
|
||||
]
|
||||
)
|
||||
try:
|
||||
if task := self.get_task_response_if_running(
|
||||
task_name="scan-compliance-overviews",
|
||||
task_kwargs={"tenant_id": self.request.tenant_id, "scan_id": scan_id},
|
||||
raise_on_not_found=False,
|
||||
):
|
||||
return task
|
||||
except TaskFailedException:
|
||||
return Response(
|
||||
{"detail": "Task failed to generate compliance overview data."},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
queryset = self.filter_queryset(self.filter_queryset(self.get_queryset()))
|
||||
|
||||
requirement_status_subquery = queryset.values(
|
||||
"compliance_id", "requirement_id"
|
||||
).annotate(
|
||||
fail_count=Count("id", filter=Q(requirement_status="FAIL")),
|
||||
pass_count=Count("id", filter=Q(requirement_status="PASS")),
|
||||
total_count=Count("id"),
|
||||
def _list_with_region_filter(self, scan_id, region_filter):
|
||||
"""
|
||||
Fall back to detailed ComplianceRequirementOverview query when region filter is applied.
|
||||
This uses the original aggregation logic across filtered regions.
|
||||
"""
|
||||
regions = region_filter.split(",") if "," in region_filter else [region_filter]
|
||||
queryset = self.filter_queryset(self.get_queryset()).filter(
|
||||
scan_id=scan_id,
|
||||
region__in=regions,
|
||||
)
|
||||
|
||||
compliance_data = {}
|
||||
framework_info = {}
|
||||
data = self._aggregate_compliance_overview(queryset)
|
||||
if data:
|
||||
return Response(data)
|
||||
|
||||
for item in queryset.values("compliance_id", "framework", "version").distinct():
|
||||
framework_info[item["compliance_id"]] = {
|
||||
"framework": item["framework"],
|
||||
"version": item["version"],
|
||||
}
|
||||
task_response = self._task_response_if_running(scan_id)
|
||||
if task_response:
|
||||
return task_response
|
||||
|
||||
for item in requirement_status_subquery:
|
||||
compliance_id = item["compliance_id"]
|
||||
return Response(data)
|
||||
|
||||
if item["fail_count"] > 0:
|
||||
req_status = "FAIL"
|
||||
elif item["pass_count"] == item["total_count"]:
|
||||
req_status = "PASS"
|
||||
else:
|
||||
req_status = "MANUAL"
|
||||
def _list_without_region_aggregation(self, scan_id):
|
||||
"""
|
||||
Fall back aggregation when compliance summaries don't exist yet.
|
||||
Aggregates ComplianceRequirementOverview data across ALL regions.
|
||||
"""
|
||||
queryset = self.filter_queryset(self.get_queryset()).filter(scan_id=scan_id)
|
||||
compliance_template = self._get_compliance_template(scan_id=scan_id)
|
||||
data = self._aggregate_compliance_overview(
|
||||
queryset, template_metadata=compliance_template
|
||||
)
|
||||
if data:
|
||||
return Response(data)
|
||||
|
||||
if compliance_id not in compliance_data:
|
||||
compliance_data[compliance_id] = {
|
||||
"total_requirements": 0,
|
||||
"requirements_passed": 0,
|
||||
"requirements_failed": 0,
|
||||
"requirements_manual": 0,
|
||||
}
|
||||
task_response = self._task_response_if_running(scan_id)
|
||||
if task_response:
|
||||
return task_response
|
||||
|
||||
compliance_data[compliance_id]["total_requirements"] += 1
|
||||
if req_status == "PASS":
|
||||
compliance_data[compliance_id]["requirements_passed"] += 1
|
||||
elif req_status == "FAIL":
|
||||
compliance_data[compliance_id]["requirements_failed"] += 1
|
||||
else:
|
||||
compliance_data[compliance_id]["requirements_manual"] += 1
|
||||
return Response(data)
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
scan_id = request.query_params.get("filter[scan_id]")
|
||||
|
||||
# Specific scan requested - use optimized summaries with region support
|
||||
region_filter = request.query_params.get(
|
||||
"filter[region]"
|
||||
) or request.query_params.get("filter[region__in]")
|
||||
|
||||
if region_filter:
|
||||
# Fall back to detailed query with region filtering
|
||||
return self._list_with_region_filter(scan_id, region_filter)
|
||||
|
||||
summaries = list(self._compliance_summaries_queryset(scan_id))
|
||||
if not summaries:
|
||||
# Trigger async backfill for next time
|
||||
backfill_compliance_summaries_task.delay(
|
||||
tenant_id=self.request.tenant_id, scan_id=scan_id
|
||||
)
|
||||
# Use fallback aggregation for this request
|
||||
return self._list_without_region_aggregation(scan_id)
|
||||
|
||||
# Get compliance template for provider to enrich with framework/version
|
||||
compliance_template = self._get_compliance_template(scan_id=scan_id)
|
||||
|
||||
# Convert to response format with framework/version enrichment
|
||||
response_data = []
|
||||
for compliance_id, data in compliance_data.items():
|
||||
framework = framework_info.get(compliance_id, {})
|
||||
|
||||
for summary in summaries:
|
||||
compliance_metadata = compliance_template.get(summary.compliance_id, {})
|
||||
response_data.append(
|
||||
{
|
||||
"id": compliance_id,
|
||||
"compliance_id": compliance_id,
|
||||
"framework": framework.get("framework", ""),
|
||||
"version": framework.get("version", ""),
|
||||
"requirements_passed": data["requirements_passed"],
|
||||
"requirements_failed": data["requirements_failed"],
|
||||
"requirements_manual": data["requirements_manual"],
|
||||
"total_requirements": data["total_requirements"],
|
||||
"id": summary.compliance_id,
|
||||
"compliance_id": summary.compliance_id,
|
||||
"framework": compliance_metadata.get("framework", ""),
|
||||
"version": compliance_metadata.get("version", ""),
|
||||
"requirements_passed": summary.requirements_passed,
|
||||
"requirements_failed": summary.requirements_failed,
|
||||
"requirements_manual": summary.requirements_manual,
|
||||
"total_requirements": summary.total_requirements,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -3893,36 +4068,33 @@ class ComplianceOverviewViewSet(BaseRLSViewSet, TaskManagementMixin):
|
||||
),
|
||||
filters=True,
|
||||
),
|
||||
findings_severity_timeseries=extend_schema(
|
||||
summary="Get findings severity data over time",
|
||||
description=(
|
||||
"Retrieve daily aggregated findings data grouped by severity levels over a date range. "
|
||||
"Returns one data point per day with counts of failed findings by severity (critical, high, "
|
||||
"medium, low, informational) and muted findings. Days without scans are filled forward with "
|
||||
"the most recent known values. Use date_from (required) and date_to filters to specify the range."
|
||||
),
|
||||
filters=True,
|
||||
),
|
||||
attack_surface=extend_schema(
|
||||
summary="Get attack surface overview",
|
||||
description="Retrieve aggregated attack surface metrics from latest completed scans per provider.",
|
||||
tags=["Overview"],
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="filter[provider_id]",
|
||||
type=OpenApiTypes.UUID,
|
||||
location=OpenApiParameter.QUERY,
|
||||
description="Filter by specific provider ID",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="filter[provider_id.in]",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.QUERY,
|
||||
description="Filter by multiple provider IDs (comma-separated UUIDs)",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="filter[provider_type]",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.QUERY,
|
||||
description="Filter by provider type (aws, azure, gcp, etc.)",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="filter[provider_type.in]",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.QUERY,
|
||||
description="Filter by multiple provider types (comma-separated)",
|
||||
),
|
||||
],
|
||||
filters=True,
|
||||
responses={200: AttackSurfaceOverviewSerializer(many=True)},
|
||||
),
|
||||
categories=extend_schema(
|
||||
summary="Get category overview",
|
||||
description=(
|
||||
"Retrieve aggregated category metrics from latest completed scans per provider. "
|
||||
"Returns one row per category with total, failed, and new failed findings counts, "
|
||||
"plus a severity breakdown showing failed findings per severity level. "
|
||||
),
|
||||
tags=["Overview"],
|
||||
filters=True,
|
||||
responses={200: CategoryOverviewSerializer(many=True)},
|
||||
),
|
||||
)
|
||||
@method_decorator(CACHE_DECORATOR, name="list")
|
||||
@@ -3941,7 +4113,16 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
if not role.unlimited_visibility:
|
||||
self.allowed_providers = providers
|
||||
|
||||
return ScanSummary.all_objects.filter(tenant_id=self.request.tenant_id)
|
||||
tenant_id = self.request.tenant_id
|
||||
|
||||
# Return appropriate queryset per action
|
||||
if self.action == "findings_severity_timeseries":
|
||||
qs = DailySeveritySummary.objects.filter(tenant_id=tenant_id)
|
||||
if hasattr(self, "allowed_providers"):
|
||||
qs = qs.filter(provider_id__in=self.allowed_providers)
|
||||
return qs
|
||||
|
||||
return ScanSummary.all_objects.filter(tenant_id=tenant_id)
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "providers":
|
||||
@@ -3952,6 +4133,8 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
return OverviewFindingSerializer
|
||||
elif self.action == "findings_severity":
|
||||
return OverviewSeveritySerializer
|
||||
elif self.action == "findings_severity_timeseries":
|
||||
return FindingsSeverityOverTimeSerializer
|
||||
elif self.action == "services":
|
||||
return OverviewServiceSerializer
|
||||
elif self.action == "regions":
|
||||
@@ -3960,6 +4143,8 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
return ThreatScoreSnapshotSerializer
|
||||
elif self.action == "attack_surface":
|
||||
return AttackSurfaceOverviewSerializer
|
||||
elif self.action == "categories":
|
||||
return CategoryOverviewSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def get_filterset_class(self):
|
||||
@@ -3969,8 +4154,22 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
return ScanSummaryFilter
|
||||
elif self.action == "findings_severity":
|
||||
return ScanSummarySeverityFilter
|
||||
elif self.action == "findings_severity_timeseries":
|
||||
return DailySeveritySummaryFilter
|
||||
elif self.action == "categories":
|
||||
return CategoryOverviewFilter
|
||||
elif self.action == "attack_surface":
|
||||
return AttackSurfaceOverviewFilter
|
||||
return None
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
# Skip OrderingFilter for findings_severity_timeseries (no inserted_at field)
|
||||
if self.action == "findings_severity_timeseries":
|
||||
return CustomDjangoFilterBackend().filter_queryset(
|
||||
self.request, queryset, self
|
||||
)
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
@extend_schema(exclude=True)
|
||||
def list(self, request, *args, **kwargs):
|
||||
raise MethodNotAllowed(method="GET")
|
||||
@@ -4046,29 +4245,41 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
filterset = filterset_class(normalized_params, queryset=queryset)
|
||||
return filterset.qs
|
||||
|
||||
def _latest_scan_ids_for_allowed_providers(self, tenant_id):
|
||||
def _latest_scan_ids_for_allowed_providers(self, tenant_id, provider_filters=None):
|
||||
provider_filter = self._get_provider_filter()
|
||||
queryset = Scan.all_objects.filter(
|
||||
tenant_id=tenant_id, state=StateChoices.COMPLETED, **provider_filter
|
||||
)
|
||||
if provider_filters:
|
||||
queryset = queryset.filter(**provider_filters)
|
||||
return (
|
||||
Scan.all_objects.filter(
|
||||
tenant_id=tenant_id, state=StateChoices.COMPLETED, **provider_filter
|
||||
)
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
queryset.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
.values_list("id", flat=True)
|
||||
)
|
||||
|
||||
def _attack_surface_check_ids_by_provider_types(self, provider_types):
|
||||
check_ids_by_type = {
|
||||
attack_surface_type: set()
|
||||
for attack_surface_type in AttackSurfaceOverview.AttackSurfaceTypeChoices.values
|
||||
}
|
||||
for provider_type in provider_types:
|
||||
attack_surface_mapping = _get_attack_surface_mapping_from_provider(
|
||||
provider_type=provider_type
|
||||
)
|
||||
for attack_surface_type, check_ids in attack_surface_mapping.items():
|
||||
check_ids_by_type[attack_surface_type].update(check_ids)
|
||||
return check_ids_by_type
|
||||
def _extract_provider_filters_from_params(self):
|
||||
"""Extract provider filters from query params to apply on Scan queryset."""
|
||||
params = self.request.query_params
|
||||
filters = {}
|
||||
|
||||
provider_id = params.get("filter[provider_id]")
|
||||
if provider_id:
|
||||
filters["provider_id"] = provider_id
|
||||
|
||||
provider_id_in = params.get("filter[provider_id__in]")
|
||||
if provider_id_in:
|
||||
filters["provider_id__in"] = provider_id_in.split(",")
|
||||
|
||||
provider_type = params.get("filter[provider_type]")
|
||||
if provider_type:
|
||||
filters["provider__provider"] = provider_type
|
||||
|
||||
provider_type_in = params.get("filter[provider_type__in]")
|
||||
if provider_type_in:
|
||||
filters["provider__provider__in"] = provider_type_in.split(",")
|
||||
|
||||
return filters
|
||||
|
||||
@action(detail=False, methods=["get"], url_name="providers")
|
||||
def providers(self, request):
|
||||
@@ -4247,6 +4458,108 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(
|
||||
detail=False,
|
||||
methods=["get"],
|
||||
url_path="findings_severity/timeseries",
|
||||
url_name="findings_severity_timeseries",
|
||||
)
|
||||
def findings_severity_timeseries(self, request):
|
||||
"""
|
||||
Daily severity trends for charts. Uses DailySeveritySummary pre-aggregation.
|
||||
Requires date_from filter.
|
||||
"""
|
||||
# Get queryset with RBAC, provider, and date filters applied
|
||||
# Date validation is handled by DailySeveritySummaryFilter
|
||||
daily_qs = self.filter_queryset(self.get_queryset())
|
||||
|
||||
date_from = request._date_from
|
||||
date_to = request._date_to
|
||||
|
||||
if not daily_qs.exists():
|
||||
# No data matches filters - return zeros
|
||||
result = self._generate_zero_result(date_from, date_to)
|
||||
serializer = self.get_serializer(result, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
# Fetch all data for fill-forward logic
|
||||
daily_summaries = list(
|
||||
daily_qs.order_by("provider_id", "-date").values(
|
||||
"provider_id",
|
||||
"scan_id",
|
||||
"date",
|
||||
"critical",
|
||||
"high",
|
||||
"medium",
|
||||
"low",
|
||||
"informational",
|
||||
"muted",
|
||||
)
|
||||
)
|
||||
|
||||
if not daily_summaries:
|
||||
result = self._generate_zero_result(date_from, date_to)
|
||||
serializer = self.get_serializer(result, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
# Build provider_data: {provider_id: [(date, data), ...]} sorted by date desc
|
||||
provider_data = defaultdict(list)
|
||||
for summary in daily_summaries:
|
||||
provider_data[summary["provider_id"]].append(summary)
|
||||
|
||||
# For each day, find the latest data per provider and sum values
|
||||
result = []
|
||||
current_date = date_from
|
||||
while current_date <= date_to:
|
||||
day_totals = {
|
||||
"critical": 0,
|
||||
"high": 0,
|
||||
"medium": 0,
|
||||
"low": 0,
|
||||
"informational": 0,
|
||||
"muted": 0,
|
||||
}
|
||||
day_scan_ids = []
|
||||
|
||||
for provider_id, summaries in provider_data.items():
|
||||
# Find the latest data for this provider <= current_date
|
||||
for summary in summaries: # Already sorted by date desc
|
||||
if summary["date"] <= current_date:
|
||||
day_totals["critical"] += summary["critical"] or 0
|
||||
day_totals["high"] += summary["high"] or 0
|
||||
day_totals["medium"] += summary["medium"] or 0
|
||||
day_totals["low"] += summary["low"] or 0
|
||||
day_totals["informational"] += summary["informational"] or 0
|
||||
day_totals["muted"] += summary["muted"] or 0
|
||||
day_scan_ids.append(summary["scan_id"])
|
||||
break # Found the latest data for this provider
|
||||
|
||||
result.append(
|
||||
{"date": current_date, "scan_ids": day_scan_ids, **day_totals}
|
||||
)
|
||||
current_date += timedelta(days=1)
|
||||
|
||||
serializer = self.get_serializer(result, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def _generate_zero_result(self, date_from, date_to):
|
||||
"""Generate a list of zero-filled results for each date in range."""
|
||||
result = []
|
||||
current_date = date_from
|
||||
zero_values = {
|
||||
"critical": 0,
|
||||
"high": 0,
|
||||
"medium": 0,
|
||||
"low": 0,
|
||||
"informational": 0,
|
||||
"muted": 0,
|
||||
"scan_ids": [],
|
||||
}
|
||||
while current_date <= date_to:
|
||||
result.append({"date": current_date, **zero_values})
|
||||
current_date += timedelta(days=1)
|
||||
return result
|
||||
|
||||
@extend_schema(
|
||||
summary="Get ThreatScore snapshots",
|
||||
description=(
|
||||
@@ -4573,22 +4886,13 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
tenant_id = request.tenant_id
|
||||
latest_scan_ids = self._latest_scan_ids_for_allowed_providers(tenant_id)
|
||||
|
||||
# Build base queryset and apply user filters via FilterSet
|
||||
base_queryset = AttackSurfaceOverview.objects.filter(
|
||||
tenant_id=tenant_id, scan_id__in=latest_scan_ids
|
||||
)
|
||||
filtered_queryset = self._apply_filterset(
|
||||
base_queryset, AttackSurfaceOverviewFilter
|
||||
)
|
||||
provider_types = list(
|
||||
filtered_queryset.values_list(
|
||||
"scan__provider__provider", flat=True
|
||||
).distinct()
|
||||
)
|
||||
attack_surface_check_ids = self._attack_surface_check_ids_by_provider_types(
|
||||
provider_types
|
||||
)
|
||||
# Aggregate attack surface data
|
||||
|
||||
aggregation = filtered_queryset.values("attack_surface_type").annotate(
|
||||
total_findings=Coalesce(Sum("total_findings"), 0),
|
||||
failed_findings=Coalesce(Sum("failed_findings"), 0),
|
||||
@@ -4611,12 +4915,71 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
}
|
||||
|
||||
response_data = [
|
||||
{
|
||||
"attack_surface_type": key,
|
||||
**value,
|
||||
"check_ids": attack_surface_check_ids.get(key, []),
|
||||
{"attack_surface_type": key, **value} for key, value in results.items()
|
||||
]
|
||||
|
||||
return Response(
|
||||
self.get_serializer(response_data, many=True).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@action(detail=False, methods=["get"], url_name="categories")
|
||||
def categories(self, request):
|
||||
tenant_id = request.tenant_id
|
||||
provider_filters = self._extract_provider_filters_from_params()
|
||||
latest_scan_ids = self._latest_scan_ids_for_allowed_providers(
|
||||
tenant_id, provider_filters
|
||||
)
|
||||
|
||||
base_queryset = ScanCategorySummary.objects.filter(
|
||||
tenant_id=tenant_id, scan_id__in=latest_scan_ids
|
||||
)
|
||||
provider_filter_keys = {
|
||||
"provider_id",
|
||||
"provider_id__in",
|
||||
"provider_type",
|
||||
"provider_type__in",
|
||||
}
|
||||
filtered_queryset = self._apply_filterset(
|
||||
base_queryset, CategoryOverviewFilter, exclude_keys=provider_filter_keys
|
||||
)
|
||||
|
||||
aggregation = (
|
||||
filtered_queryset.values("category", "severity")
|
||||
.annotate(
|
||||
total=Coalesce(Sum("total_findings"), 0),
|
||||
failed=Coalesce(Sum("failed_findings"), 0),
|
||||
new_failed=Coalesce(Sum("new_failed_findings"), 0),
|
||||
)
|
||||
.order_by("category", "severity")
|
||||
)
|
||||
|
||||
category_data = defaultdict(
|
||||
lambda: {
|
||||
"total_findings": 0,
|
||||
"failed_findings": 0,
|
||||
"new_failed_findings": 0,
|
||||
"severity": {
|
||||
"informational": 0,
|
||||
"low": 0,
|
||||
"medium": 0,
|
||||
"high": 0,
|
||||
"critical": 0,
|
||||
},
|
||||
}
|
||||
for key, value in results.items()
|
||||
)
|
||||
|
||||
for row in aggregation:
|
||||
cat = row["category"]
|
||||
sev = row["severity"]
|
||||
category_data[cat]["total_findings"] += row["total"]
|
||||
category_data[cat]["failed_findings"] += row["failed"]
|
||||
category_data[cat]["new_failed_findings"] += row["new_failed"]
|
||||
if sev in category_data[cat]["severity"]:
|
||||
category_data[cat]["severity"][sev] = row["failed"]
|
||||
|
||||
response_data = [
|
||||
{"category": cat, **data} for cat, data in sorted(category_data.items())
|
||||
]
|
||||
|
||||
return Response(
|
||||
@@ -4771,6 +5134,86 @@ class IntegrationViewSet(BaseRLSViewSet):
|
||||
)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
dispatches=extend_schema(
|
||||
tags=["Integration"],
|
||||
summary="Send findings to a GitHub integration",
|
||||
description="Send a set of filtered findings to the given GitHub integration as issues. At least one finding "
|
||||
"filter must be provided.",
|
||||
responses={202: OpenApiResponse(response=TaskSerializer)},
|
||||
filters=True,
|
||||
)
|
||||
)
|
||||
class IntegrationGitHubViewSet(BaseRLSViewSet):
|
||||
queryset = Finding.all_objects.all()
|
||||
serializer_class = IntegrationGitHubDispatchSerializer
|
||||
http_method_names = ["post"]
|
||||
filter_backends = [CustomDjangoFilterBackend]
|
||||
filterset_class = IntegrationGitHubFindingsFilter
|
||||
# RBAC required permissions
|
||||
required_permissions = [Permissions.MANAGE_INTEGRATIONS]
|
||||
|
||||
@extend_schema(exclude=True)
|
||||
def create(self, request, *args, **kwargs):
|
||||
raise MethodNotAllowed(method="POST")
|
||||
|
||||
def get_queryset(self):
|
||||
tenant_id = self.request.tenant_id
|
||||
user_roles = get_role(self.request.user)
|
||||
if user_roles.unlimited_visibility:
|
||||
# User has unlimited visibility, return all findings
|
||||
queryset = Finding.all_objects.filter(tenant_id=tenant_id)
|
||||
else:
|
||||
# User lacks permission, filter findings based on provider groups associated with the role
|
||||
queryset = Finding.all_objects.filter(
|
||||
scan__provider__in=get_providers(user_roles)
|
||||
)
|
||||
|
||||
return queryset
|
||||
|
||||
@action(detail=False, methods=["post"], url_name="dispatches")
|
||||
def dispatches(self, request, integration_pk=None):
|
||||
get_object_or_404(Integration, pk=integration_pk)
|
||||
serializer = self.get_serializer(
|
||||
data=request.data, context={"integration_id": integration_pk}
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
if self.filter_queryset(self.get_queryset()).count() == 0:
|
||||
raise ValidationError(
|
||||
{"findings": "No findings match the provided filters"}
|
||||
)
|
||||
|
||||
finding_ids = [
|
||||
str(finding_id)
|
||||
for finding_id in self.filter_queryset(self.get_queryset()).values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
]
|
||||
repository = serializer.validated_data["repository"]
|
||||
labels = serializer.validated_data.get("labels", [])
|
||||
|
||||
with transaction.atomic():
|
||||
task = github_integration_task.delay(
|
||||
tenant_id=self.request.tenant_id,
|
||||
integration_id=integration_pk,
|
||||
repository=repository,
|
||||
labels=labels,
|
||||
finding_ids=finding_ids,
|
||||
)
|
||||
prowler_task = Task.objects.get(id=task.id)
|
||||
serializer = TaskSerializer(prowler_task)
|
||||
return Response(
|
||||
data=serializer.data,
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
headers={
|
||||
"Content-Location": reverse(
|
||||
"task-detail", kwargs={"pk": prowler_task.id}
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
dispatches=extend_schema(
|
||||
tags=["Integration"],
|
||||
|
||||
@@ -36,6 +36,14 @@ DATABASES = {
|
||||
"HOST": env("POSTGRES_REPLICA_HOST", default=default_db_host),
|
||||
"PORT": env("POSTGRES_REPLICA_PORT", default=default_db_port),
|
||||
},
|
||||
"admin_replica": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_REPLICA_DB", default=default_db_name),
|
||||
"USER": env("POSTGRES_ADMIN_USER", default="prowler"),
|
||||
"PASSWORD": env("POSTGRES_ADMIN_PASSWORD", default="S3cret"),
|
||||
"HOST": env("POSTGRES_REPLICA_HOST", default=default_db_host),
|
||||
"PORT": env("POSTGRES_REPLICA_PORT", default=default_db_port),
|
||||
},
|
||||
}
|
||||
|
||||
DATABASES["default"] = DATABASES["prowler_user"]
|
||||
|
||||
@@ -37,6 +37,14 @@ DATABASES = {
|
||||
"HOST": env("POSTGRES_REPLICA_HOST", default=default_db_host),
|
||||
"PORT": env("POSTGRES_REPLICA_PORT", default=default_db_port),
|
||||
},
|
||||
"admin_replica": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_REPLICA_DB", default=default_db_name),
|
||||
"USER": env("POSTGRES_ADMIN_USER"),
|
||||
"PASSWORD": env("POSTGRES_ADMIN_PASSWORD"),
|
||||
"HOST": env("POSTGRES_REPLICA_HOST", default=default_db_host),
|
||||
"PORT": env("POSTGRES_REPLICA_PORT", default=default_db_port),
|
||||
},
|
||||
}
|
||||
|
||||
DATABASES["default"] = DATABASES["prowler_user"]
|
||||
|
||||
@@ -5,6 +5,9 @@ IGNORED_EXCEPTIONS = [
|
||||
# Provider is not connected due to credentials errors
|
||||
"is not connected",
|
||||
"ProviderConnectionError",
|
||||
# Provider was deleted during a scan
|
||||
"ProviderDeletedException",
|
||||
"violates foreign key constraint",
|
||||
# Authentication Errors from AWS
|
||||
"InvalidToken",
|
||||
"AccessDeniedException",
|
||||
|
||||
+143
-1
@@ -11,7 +11,10 @@ from django.urls import reverse
|
||||
from django_celery_results.models import TaskResult
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APIClient
|
||||
from tasks.jobs.backfill import backfill_resource_scan_summaries
|
||||
from tasks.jobs.backfill import (
|
||||
backfill_resource_scan_summaries,
|
||||
backfill_scan_category_summaries,
|
||||
)
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
@@ -36,6 +39,7 @@ from api.models import (
|
||||
SAMLConfiguration,
|
||||
SAMLDomainIndex,
|
||||
Scan,
|
||||
ScanCategorySummary,
|
||||
ScanSummary,
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
@@ -513,6 +517,12 @@ def providers_fixture(tenants_fixture):
|
||||
alias="mongodbatlas_testing",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
provider9 = Provider.objects.create(
|
||||
provider="alibabacloud",
|
||||
uid="1234567890123456",
|
||||
alias="alibabacloud_testing",
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
return (
|
||||
provider1,
|
||||
@@ -523,6 +533,7 @@ def providers_fixture(tenants_fixture):
|
||||
provider6,
|
||||
provider7,
|
||||
provider8,
|
||||
provider9,
|
||||
)
|
||||
|
||||
|
||||
@@ -1271,6 +1282,113 @@ def latest_scan_finding(authenticated_client, providers_fixture, resources_fixtu
|
||||
return finding
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def findings_with_categories(scans_fixture, resources_fixture):
|
||||
scan = scans_fixture[0]
|
||||
resource = resources_fixture[0]
|
||||
|
||||
finding = Finding.objects.create(
|
||||
tenant_id=scan.tenant_id,
|
||||
uid="finding_with_categories_1",
|
||||
scan=scan,
|
||||
delta=None,
|
||||
status=Status.FAIL,
|
||||
status_extended="test status",
|
||||
impact=Severity.critical,
|
||||
impact_extended="test impact",
|
||||
severity=Severity.critical,
|
||||
raw_result={"status": Status.FAIL},
|
||||
check_id="genai_check",
|
||||
check_metadata={"CheckId": "genai_check"},
|
||||
categories=["gen-ai", "security"],
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
finding.add_resources([resource])
|
||||
backfill_resource_scan_summaries(str(scan.tenant_id), str(scan.id))
|
||||
return finding
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def findings_with_multiple_categories(scans_fixture, resources_fixture):
|
||||
scan = scans_fixture[0]
|
||||
resource1, resource2 = resources_fixture[:2]
|
||||
|
||||
finding1 = Finding.objects.create(
|
||||
tenant_id=scan.tenant_id,
|
||||
uid="finding_multi_cat_1",
|
||||
scan=scan,
|
||||
delta=None,
|
||||
status=Status.FAIL,
|
||||
status_extended="test status",
|
||||
impact=Severity.critical,
|
||||
impact_extended="test impact",
|
||||
severity=Severity.critical,
|
||||
raw_result={"status": Status.FAIL},
|
||||
check_id="genai_check",
|
||||
check_metadata={"CheckId": "genai_check"},
|
||||
categories=["gen-ai", "security"],
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
finding1.add_resources([resource1])
|
||||
|
||||
finding2 = Finding.objects.create(
|
||||
tenant_id=scan.tenant_id,
|
||||
uid="finding_multi_cat_2",
|
||||
scan=scan,
|
||||
delta=None,
|
||||
status=Status.FAIL,
|
||||
status_extended="test status 2",
|
||||
impact=Severity.high,
|
||||
impact_extended="test impact 2",
|
||||
severity=Severity.high,
|
||||
raw_result={"status": Status.FAIL},
|
||||
check_id="iam_check",
|
||||
check_metadata={"CheckId": "iam_check"},
|
||||
categories=["iam", "security"],
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
finding2.add_resources([resource2])
|
||||
|
||||
backfill_resource_scan_summaries(str(scan.tenant_id), str(scan.id))
|
||||
return finding1, finding2
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def latest_scan_finding_with_categories(
|
||||
authenticated_client, providers_fixture, resources_fixture
|
||||
):
|
||||
provider = providers_fixture[0]
|
||||
tenant_id = str(providers_fixture[0].tenant_id)
|
||||
resource = resources_fixture[0]
|
||||
scan = Scan.objects.create(
|
||||
name="latest completed scan with categories",
|
||||
provider=provider,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.COMPLETED,
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
finding = Finding.objects.create(
|
||||
tenant_id=tenant_id,
|
||||
uid="latest_finding_with_categories",
|
||||
scan=scan,
|
||||
delta="new",
|
||||
status=Status.FAIL,
|
||||
status_extended="test status",
|
||||
impact=Severity.critical,
|
||||
impact_extended="test impact",
|
||||
severity=Severity.critical,
|
||||
raw_result={"status": Status.FAIL},
|
||||
check_id="genai_iam_check",
|
||||
check_metadata={"CheckId": "genai_iam_check"},
|
||||
categories=["gen-ai", "iam"],
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
finding.add_resources([resource])
|
||||
backfill_resource_scan_summaries(tenant_id, str(scan.id))
|
||||
backfill_scan_category_summaries(tenant_id, str(scan.id))
|
||||
return finding
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def latest_scan_resource(authenticated_client, providers_fixture):
|
||||
provider = providers_fixture[0]
|
||||
@@ -1485,6 +1603,30 @@ def create_attack_surface_overview():
|
||||
return _create
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def create_scan_category_summary():
|
||||
def _create(
|
||||
tenant,
|
||||
scan,
|
||||
category,
|
||||
severity,
|
||||
total_findings=10,
|
||||
failed_findings=5,
|
||||
new_failed_findings=2,
|
||||
):
|
||||
return ScanCategorySummary.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan,
|
||||
category=category,
|
||||
severity=severity,
|
||||
total_findings=total_findings,
|
||||
failed_findings=failed_findings,
|
||||
new_failed_findings=new_failed_findings,
|
||||
)
|
||||
|
||||
return _create
|
||||
|
||||
|
||||
def get_authorization_header(access_token: str) -> dict:
|
||||
return {"Authorization": f"Bearer {access_token}"}
|
||||
|
||||
|
||||
@@ -61,4 +61,5 @@ def schedule_provider_scan(provider_instance: Provider):
|
||||
"tenant_id": str(provider_instance.tenant_id),
|
||||
"provider_id": provider_id,
|
||||
},
|
||||
countdown=5, # Avoid race conditions between the worker and the database
|
||||
)
|
||||
|
||||
@@ -1,14 +1,23 @@
|
||||
from collections import defaultdict
|
||||
from datetime import timedelta
|
||||
|
||||
from django.db.models import Sum
|
||||
from django.utils import timezone
|
||||
from tasks.jobs.scan import aggregate_category_counts
|
||||
|
||||
from api.db_router import READ_REPLICA_ALIAS
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
ComplianceOverviewSummary,
|
||||
ComplianceRequirementOverview,
|
||||
DailySeveritySummary,
|
||||
Finding,
|
||||
Resource,
|
||||
ResourceFindingMapping,
|
||||
ResourceScanSummary,
|
||||
Scan,
|
||||
ScanCategorySummary,
|
||||
ScanSummary,
|
||||
StateChoices,
|
||||
)
|
||||
|
||||
@@ -175,3 +184,160 @@ def backfill_compliance_summaries(tenant_id: str, scan_id: str):
|
||||
)
|
||||
|
||||
return {"status": "backfilled", "inserted": len(summary_objects)}
|
||||
|
||||
|
||||
def backfill_daily_severity_summaries(tenant_id: str, days: int = None):
|
||||
"""
|
||||
Backfill DailySeveritySummary from completed scans.
|
||||
Groups by provider+date, keeps latest scan per day.
|
||||
"""
|
||||
created_count = 0
|
||||
updated_count = 0
|
||||
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
scan_filter = {
|
||||
"tenant_id": tenant_id,
|
||||
"state": StateChoices.COMPLETED,
|
||||
"completed_at__isnull": False,
|
||||
}
|
||||
|
||||
if days is not None:
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
scan_filter["completed_at__gte"] = cutoff_date
|
||||
|
||||
completed_scans = (
|
||||
Scan.objects.filter(**scan_filter)
|
||||
.order_by("provider_id", "-completed_at")
|
||||
.values("id", "provider_id", "completed_at")
|
||||
)
|
||||
|
||||
if not completed_scans:
|
||||
return {"status": "no scans to backfill"}
|
||||
|
||||
# Keep only latest scan per provider/day
|
||||
latest_scans_by_day = {}
|
||||
for scan in completed_scans:
|
||||
key = (scan["provider_id"], scan["completed_at"].date())
|
||||
if key not in latest_scans_by_day:
|
||||
latest_scans_by_day[key] = scan
|
||||
|
||||
# Process each provider/day
|
||||
for (provider_id, scan_date), scan in latest_scans_by_day.items():
|
||||
scan_id = scan["id"]
|
||||
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
severity_totals = (
|
||||
ScanSummary.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
)
|
||||
.values("severity")
|
||||
.annotate(total_fail=Sum("fail"), total_muted=Sum("muted"))
|
||||
)
|
||||
|
||||
severity_data = {
|
||||
"critical": 0,
|
||||
"high": 0,
|
||||
"medium": 0,
|
||||
"low": 0,
|
||||
"informational": 0,
|
||||
"muted": 0,
|
||||
}
|
||||
|
||||
for row in severity_totals:
|
||||
severity = row["severity"]
|
||||
if severity in severity_data:
|
||||
severity_data[severity] = row["total_fail"] or 0
|
||||
severity_data["muted"] += row["total_muted"] or 0
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
_, created = DailySeveritySummary.objects.update_or_create(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
date=scan_date,
|
||||
defaults={
|
||||
"scan_id": scan_id,
|
||||
"critical": severity_data["critical"],
|
||||
"high": severity_data["high"],
|
||||
"medium": severity_data["medium"],
|
||||
"low": severity_data["low"],
|
||||
"informational": severity_data["informational"],
|
||||
"muted": severity_data["muted"],
|
||||
},
|
||||
)
|
||||
|
||||
if created:
|
||||
created_count += 1
|
||||
else:
|
||||
updated_count += 1
|
||||
|
||||
return {
|
||||
"status": "backfilled",
|
||||
"created": created_count,
|
||||
"updated": updated_count,
|
||||
"total_days": len(latest_scans_by_day),
|
||||
}
|
||||
|
||||
|
||||
def backfill_scan_category_summaries(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Backfill ScanCategorySummary for a completed scan.
|
||||
|
||||
Aggregates category counts from all findings in the scan and creates
|
||||
one ScanCategorySummary row per (category, severity) combination.
|
||||
|
||||
Args:
|
||||
tenant_id: Target tenant UUID
|
||||
scan_id: Scan UUID to backfill
|
||||
|
||||
Returns:
|
||||
dict: Status indicating whether backfill was performed
|
||||
"""
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
if ScanCategorySummary.objects.filter(
|
||||
tenant_id=tenant_id, scan_id=scan_id
|
||||
).exists():
|
||||
return {"status": "already backfilled"}
|
||||
|
||||
if not Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
id=scan_id,
|
||||
state__in=(StateChoices.COMPLETED, StateChoices.FAILED),
|
||||
).exists():
|
||||
return {"status": "scan is not completed"}
|
||||
|
||||
category_counts: dict[tuple[str, str], dict[str, int]] = {}
|
||||
for finding in Finding.all_objects.filter(
|
||||
tenant_id=tenant_id, scan_id=scan_id
|
||||
).values("categories", "severity", "status", "delta", "muted"):
|
||||
aggregate_category_counts(
|
||||
categories=finding.get("categories") or [],
|
||||
severity=finding.get("severity"),
|
||||
status=finding.get("status"),
|
||||
delta=finding.get("delta"),
|
||||
muted=finding.get("muted", False),
|
||||
cache=category_counts,
|
||||
)
|
||||
|
||||
if not category_counts:
|
||||
return {"status": "no categories to backfill"}
|
||||
|
||||
category_summaries = [
|
||||
ScanCategorySummary(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
category=category,
|
||||
severity=severity,
|
||||
total_findings=counts["total"],
|
||||
failed_findings=counts["failed"],
|
||||
new_failed_findings=counts["new_failed"],
|
||||
)
|
||||
for (category, severity), counts in category_counts.items()
|
||||
]
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
ScanCategorySummary.objects.bulk_create(
|
||||
category_summaries, batch_size=500, ignore_conflicts=True
|
||||
)
|
||||
|
||||
return {"status": "backfilled", "categories_count": len(category_counts)}
|
||||
|
||||
@@ -27,6 +27,7 @@ from prowler.lib.outputs.compliance.c5.c5_gcp import GCPC5
|
||||
from prowler.lib.outputs.compliance.ccc.ccc_aws import CCC_AWS
|
||||
from prowler.lib.outputs.compliance.ccc.ccc_azure import CCC_Azure
|
||||
from prowler.lib.outputs.compliance.ccc.ccc_gcp import CCC_GCP
|
||||
from prowler.lib.outputs.compliance.cis.cis_alibabacloud import AlibabaCloudCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
|
||||
@@ -50,6 +51,9 @@ from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
|
||||
AzureMitreAttack,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_gcp import GCPMitreAttack
|
||||
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_alibaba import (
|
||||
ProwlerThreatScoreAlibaba,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_aws import (
|
||||
ProwlerThreatScoreAWS,
|
||||
)
|
||||
@@ -128,6 +132,13 @@ COMPLIANCE_CLASS_MAP = {
|
||||
"oraclecloud": [
|
||||
(lambda name: name.startswith("cis_"), OracleCloudCIS),
|
||||
],
|
||||
"alibabacloud": [
|
||||
(lambda name: name.startswith("cis_"), AlibabaCloudCIS),
|
||||
(
|
||||
lambda name: name == "prowler_threatscore_alibabacloud",
|
||||
ProwlerThreatScoreAlibaba,
|
||||
),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -17,6 +17,9 @@ from prowler.lib.outputs.html.html import HTML
|
||||
from prowler.lib.outputs.ocsf.ocsf import OCSF
|
||||
from prowler.providers.aws.aws_provider import AwsProvider
|
||||
from prowler.providers.aws.lib.s3.s3 import S3
|
||||
from prowler.providers.aws.lib.security_hub.exceptions.exceptions import (
|
||||
SecurityHubNoEnabledRegionsError,
|
||||
)
|
||||
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHub
|
||||
from prowler.providers.common.models import Connection
|
||||
|
||||
@@ -222,8 +225,9 @@ def get_security_hub_client_from_integration(
|
||||
)
|
||||
return True, security_hub
|
||||
else:
|
||||
# Reset regions information if connection fails
|
||||
# Reset regions information if connection fails and integration is not connected
|
||||
with rls_transaction(tenant_id, using=MainRouter.default_db):
|
||||
integration.connected = False
|
||||
integration.configuration["regions"] = {}
|
||||
integration.save()
|
||||
|
||||
@@ -330,15 +334,18 @@ def upload_security_hub_integration(
|
||||
)
|
||||
|
||||
if not connected:
|
||||
logger.error(
|
||||
f"Security Hub connection failed for integration {integration.id}: "
|
||||
f"{security_hub.error}"
|
||||
)
|
||||
with rls_transaction(
|
||||
tenant_id, using=MainRouter.default_db
|
||||
if isinstance(
|
||||
security_hub.error,
|
||||
SecurityHubNoEnabledRegionsError,
|
||||
):
|
||||
integration.connected = False
|
||||
integration.save()
|
||||
logger.warning(
|
||||
f"Security Hub integration {integration.id} has no enabled regions"
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"Security Hub connection failed for integration {integration.id}: "
|
||||
f"{security_hub.error}"
|
||||
)
|
||||
break # Skip this integration
|
||||
|
||||
security_hub_client = security_hub
|
||||
@@ -409,22 +416,16 @@ def upload_security_hub_integration(
|
||||
logger.warning(
|
||||
f"Failed to archive previous findings: {str(archive_error)}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Security Hub integration {integration.id} failed: {str(e)}"
|
||||
)
|
||||
continue
|
||||
|
||||
result = integration_executions == len(integrations)
|
||||
if result:
|
||||
logger.info(
|
||||
f"All Security Hub integrations completed successfully for provider {provider_id}"
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"Some Security Hub integrations failed for provider {provider_id}"
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@@ -435,6 +436,81 @@ def upload_security_hub_integration(
|
||||
return False
|
||||
|
||||
|
||||
def send_findings_to_github(
|
||||
tenant_id: str,
|
||||
integration_id: str,
|
||||
repository: str,
|
||||
labels: list[str],
|
||||
finding_ids: list[str],
|
||||
):
|
||||
with rls_transaction(tenant_id):
|
||||
integration = Integration.objects.get(id=integration_id)
|
||||
github_integration = initialize_prowler_integration(integration)
|
||||
|
||||
num_issues_created = 0
|
||||
for finding_id in finding_ids:
|
||||
with rls_transaction(tenant_id):
|
||||
finding_instance = (
|
||||
Finding.all_objects.select_related("scan__provider")
|
||||
.prefetch_related("resources")
|
||||
.get(id=finding_id)
|
||||
)
|
||||
|
||||
# Extract resource information
|
||||
resource = (
|
||||
finding_instance.resources.first()
|
||||
if finding_instance.resources.exists()
|
||||
else None
|
||||
)
|
||||
resource_uid = resource.uid if resource else ""
|
||||
resource_name = resource.name if resource else ""
|
||||
resource_tags = {}
|
||||
if resource and hasattr(resource, "tags"):
|
||||
resource_tags = resource.get_tags(tenant_id)
|
||||
|
||||
# Get region
|
||||
region = resource.region if resource and resource.region else ""
|
||||
|
||||
# Extract remediation information from check_metadata
|
||||
check_metadata = finding_instance.check_metadata
|
||||
remediation = check_metadata.get("remediation", {})
|
||||
recommendation = remediation.get("recommendation", {})
|
||||
remediation_code = remediation.get("code", {})
|
||||
|
||||
# Send the individual finding to GitHub
|
||||
result = github_integration.send_finding(
|
||||
check_id=finding_instance.check_id,
|
||||
check_title=check_metadata.get("checktitle", ""),
|
||||
severity=finding_instance.severity,
|
||||
status=finding_instance.status,
|
||||
status_extended=finding_instance.status_extended or "",
|
||||
provider=finding_instance.scan.provider.provider,
|
||||
region=region,
|
||||
resource_uid=resource_uid,
|
||||
resource_name=resource_name,
|
||||
risk=check_metadata.get("risk", ""),
|
||||
recommendation_text=recommendation.get("text", ""),
|
||||
recommendation_url=recommendation.get("url", ""),
|
||||
remediation_code_native_iac=remediation_code.get("nativeiac", ""),
|
||||
remediation_code_terraform=remediation_code.get("terraform", ""),
|
||||
remediation_code_cli=remediation_code.get("cli", ""),
|
||||
remediation_code_other=remediation_code.get("other", ""),
|
||||
resource_tags=resource_tags,
|
||||
compliance=finding_instance.compliance or {},
|
||||
repository=repository,
|
||||
issue_labels=labels,
|
||||
)
|
||||
if result:
|
||||
num_issues_created += 1
|
||||
else:
|
||||
logger.error(f"Failed to send finding {finding_id} to GitHub")
|
||||
|
||||
return {
|
||||
"created_count": num_issues_created,
|
||||
"failed_count": len(finding_ids) - num_issues_created,
|
||||
}
|
||||
|
||||
|
||||
def send_findings_to_jira(
|
||||
tenant_id: str,
|
||||
integration_id: str,
|
||||
|
||||
@@ -2,6 +2,8 @@ from typing import Dict
|
||||
|
||||
import boto3
|
||||
import openai
|
||||
from botocore import UNSIGNED
|
||||
from botocore.config import Config
|
||||
from botocore.exceptions import BotoCoreError, ClientError
|
||||
from celery.utils.log import get_task_logger
|
||||
|
||||
@@ -9,6 +11,74 @@ from api.models import LighthouseProviderConfiguration, LighthouseProviderModels
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
# OpenAI model prefixes to exclude from Lighthouse model selection.
|
||||
# These models don't support text chat completions and tool calling.
|
||||
EXCLUDED_OPENAI_MODEL_PREFIXES = (
|
||||
"dall-e", # Image generation
|
||||
"whisper", # Audio transcription
|
||||
"tts-", # Text-to-speech (tts-1, tts-1-hd, etc.)
|
||||
"sora", # Text-to-video (sora-2, sora-2-pro, etc.)
|
||||
"text-embedding", # Embeddings
|
||||
"embedding", # Embeddings (alternative naming)
|
||||
"text-moderation", # Content moderation
|
||||
"omni-moderation", # Content moderation
|
||||
"text-davinci", # Legacy completion models
|
||||
"text-curie", # Legacy completion models
|
||||
"text-babbage", # Legacy completion models
|
||||
"text-ada", # Legacy completion models
|
||||
"davinci", # Legacy completion models
|
||||
"curie", # Legacy completion models
|
||||
"babbage", # Legacy completion models
|
||||
"ada", # Legacy completion models
|
||||
"computer-use", # Computer control agent
|
||||
"gpt-image", # Image generation
|
||||
"gpt-audio", # Audio models
|
||||
"gpt-realtime", # Realtime voice API
|
||||
)
|
||||
|
||||
# OpenAI model substrings to exclude (patterns that can appear anywhere in model ID).
|
||||
# These patterns identify non-chat model variants.
|
||||
EXCLUDED_OPENAI_MODEL_SUBSTRINGS = (
|
||||
"-audio-", # Audio preview models (gpt-4o-audio-preview, etc.)
|
||||
"-realtime-", # Realtime preview models (gpt-4o-realtime-preview, etc.)
|
||||
"-transcribe", # Transcription models (gpt-4o-transcribe, etc.)
|
||||
"-tts", # TTS models (gpt-4o-mini-tts)
|
||||
"-instruct", # Legacy instruct models (gpt-3.5-turbo-instruct, etc.)
|
||||
)
|
||||
|
||||
|
||||
def _extract_error_message(e: Exception) -> str:
|
||||
"""
|
||||
Extract a user-friendly error message from various exception types.
|
||||
|
||||
This function handles exceptions from different providers (OpenAI, AWS Bedrock)
|
||||
and extracts the most relevant error message for display to users.
|
||||
|
||||
Args:
|
||||
e: The exception to extract a message from.
|
||||
|
||||
Returns:
|
||||
str: A user-friendly error message.
|
||||
"""
|
||||
# For OpenAI SDK errors (>= v1.0)
|
||||
# OpenAI exceptions have a 'body' attribute with error details
|
||||
if hasattr(e, "body") and isinstance(e.body, dict):
|
||||
if "message" in e.body:
|
||||
return e.body["message"]
|
||||
# Sometimes nested under 'error' key
|
||||
if "error" in e.body and isinstance(e.body["error"], dict):
|
||||
return e.body["error"].get("message", str(e))
|
||||
|
||||
# For boto3 ClientError
|
||||
# Boto3 exceptions have a 'response' attribute with error details
|
||||
if hasattr(e, "response") and isinstance(e.response, dict):
|
||||
error_info = e.response.get("Error", {})
|
||||
if error_info.get("Message"):
|
||||
return error_info["Message"]
|
||||
|
||||
# Fallback to string representation for unknown error types
|
||||
return str(e)
|
||||
|
||||
|
||||
def _extract_openai_api_key(
|
||||
provider_cfg: LighthouseProviderConfiguration,
|
||||
@@ -56,21 +126,39 @@ def _extract_bedrock_credentials(
|
||||
"""
|
||||
Safely extract AWS Bedrock credentials from a provider configuration.
|
||||
|
||||
Supports two authentication methods:
|
||||
1. AWS access key + secret key + region
|
||||
2. Bedrock API key (bearer token) + region
|
||||
|
||||
Args:
|
||||
provider_cfg (LighthouseProviderConfiguration): The provider configuration instance
|
||||
containing the credentials.
|
||||
|
||||
Returns:
|
||||
Dict[str, str] | None: Dictionary with 'access_key_id', 'secret_access_key', and
|
||||
'region' if present and valid, otherwise None.
|
||||
Dict[str, str] | None: Dictionary with either:
|
||||
- 'access_key_id', 'secret_access_key', and 'region' for access key auth
|
||||
- 'api_key' and 'region' for API key (bearer token) auth
|
||||
Returns None if credentials are invalid or missing.
|
||||
"""
|
||||
creds = provider_cfg.credentials_decoded
|
||||
if not isinstance(creds, dict):
|
||||
return None
|
||||
|
||||
region = creds.get("region")
|
||||
if not isinstance(region, str) or not region:
|
||||
return None
|
||||
|
||||
# Check for API key authentication first
|
||||
api_key = creds.get("api_key")
|
||||
if isinstance(api_key, str) and api_key:
|
||||
return {
|
||||
"api_key": api_key,
|
||||
"region": region,
|
||||
}
|
||||
|
||||
# Fall back to access key authentication
|
||||
access_key_id = creds.get("access_key_id")
|
||||
secret_access_key = creds.get("secret_access_key")
|
||||
region = creds.get("region")
|
||||
|
||||
# Validate all required fields are present and are strings
|
||||
if (
|
||||
@@ -78,8 +166,6 @@ def _extract_bedrock_credentials(
|
||||
or not access_key_id
|
||||
or not isinstance(secret_access_key, str)
|
||||
or not secret_access_key
|
||||
or not isinstance(region, str)
|
||||
or not region
|
||||
):
|
||||
return None
|
||||
|
||||
@@ -90,6 +176,51 @@ def _extract_bedrock_credentials(
|
||||
}
|
||||
|
||||
|
||||
def _create_bedrock_client(
|
||||
bedrock_creds: Dict[str, str], service_name: str = "bedrock"
|
||||
):
|
||||
"""
|
||||
Create a boto3 Bedrock client with the appropriate authentication method.
|
||||
|
||||
Supports two authentication methods:
|
||||
1. API key (bearer token) - uses unsigned requests with Authorization header
|
||||
2. AWS access key + secret key - uses standard SigV4 signing
|
||||
|
||||
Args:
|
||||
bedrock_creds: Dictionary with either:
|
||||
- 'api_key' and 'region' for API key (bearer token) auth
|
||||
- 'access_key_id', 'secret_access_key', and 'region' for access key auth
|
||||
service_name: The Bedrock service name. Use 'bedrock' for control plane
|
||||
operations (list_foundation_models, etc.) or 'bedrock-runtime' for
|
||||
inference operations.
|
||||
|
||||
Returns:
|
||||
boto3 client configured for the specified Bedrock service.
|
||||
"""
|
||||
region = bedrock_creds["region"]
|
||||
|
||||
if "api_key" in bedrock_creds:
|
||||
bearer_token = bedrock_creds["api_key"]
|
||||
client = boto3.client(
|
||||
service_name=service_name,
|
||||
region_name=region,
|
||||
config=Config(signature_version=UNSIGNED),
|
||||
)
|
||||
|
||||
def inject_bearer_token(request, **kwargs):
|
||||
request.headers["Authorization"] = f"Bearer {bearer_token}"
|
||||
|
||||
client.meta.events.register("before-send.*.*", inject_bearer_token)
|
||||
return client
|
||||
|
||||
return boto3.client(
|
||||
service_name=service_name,
|
||||
region_name=region,
|
||||
aws_access_key_id=bedrock_creds["access_key_id"],
|
||||
aws_secret_access_key=bedrock_creds["secret_access_key"],
|
||||
)
|
||||
|
||||
|
||||
def check_lighthouse_provider_connection(provider_config_id: str) -> Dict:
|
||||
"""
|
||||
Validate a Lighthouse provider configuration by calling the provider API and
|
||||
@@ -141,12 +272,7 @@ def check_lighthouse_provider_connection(provider_config_id: str) -> Dict:
|
||||
}
|
||||
|
||||
# Test connection by listing foundation models
|
||||
bedrock_client = boto3.client(
|
||||
"bedrock",
|
||||
aws_access_key_id=bedrock_creds["access_key_id"],
|
||||
aws_secret_access_key=bedrock_creds["secret_access_key"],
|
||||
region_name=bedrock_creds["region"],
|
||||
)
|
||||
bedrock_client = _create_bedrock_client(bedrock_creds)
|
||||
_ = bedrock_client.list_foundation_models()
|
||||
|
||||
elif (
|
||||
@@ -179,32 +305,54 @@ def check_lighthouse_provider_connection(provider_config_id: str) -> Dict:
|
||||
return {"connected": True, "error": None}
|
||||
|
||||
except Exception as e:
|
||||
error_message = _extract_error_message(e)
|
||||
logger.warning(
|
||||
"%s connection check failed: %s", provider_cfg.provider_type, str(e)
|
||||
"%s connection check failed: %s", provider_cfg.provider_type, error_message
|
||||
)
|
||||
provider_cfg.is_active = False
|
||||
provider_cfg.save()
|
||||
return {"connected": False, "error": str(e)}
|
||||
return {"connected": False, "error": error_message}
|
||||
|
||||
|
||||
def _fetch_openai_models(api_key: str) -> Dict[str, str]:
|
||||
"""
|
||||
Fetch available models from OpenAI API.
|
||||
|
||||
Filters out models that don't support text input/output and tool calling,
|
||||
such as image generation (DALL-E), audio transcription (Whisper),
|
||||
text-to-speech (TTS), embeddings, and moderation models.
|
||||
|
||||
Args:
|
||||
api_key: OpenAI API key for authentication.
|
||||
|
||||
Returns:
|
||||
Dict mapping model_id to model_name. For OpenAI, both are the same
|
||||
as the API doesn't provide separate display names.
|
||||
as the API doesn't provide separate display names. Only includes
|
||||
models that support text input, text output or tool calling.
|
||||
|
||||
Raises:
|
||||
Exception: If the API call fails.
|
||||
"""
|
||||
client = openai.OpenAI(api_key=api_key)
|
||||
models = client.models.list()
|
||||
# OpenAI uses model.id for both ID and display name
|
||||
return {m.id: m.id for m in getattr(models, "data", [])}
|
||||
|
||||
# Filter models to only include those supporting chat completions + tool calling
|
||||
filtered_models = {}
|
||||
for model in getattr(models, "data", []):
|
||||
model_id = model.id
|
||||
|
||||
# Skip if model ID starts with excluded prefixes
|
||||
if model_id.startswith(EXCLUDED_OPENAI_MODEL_PREFIXES):
|
||||
continue
|
||||
|
||||
# Skip if model ID contains excluded substrings
|
||||
if any(substring in model_id for substring in EXCLUDED_OPENAI_MODEL_SUBSTRINGS):
|
||||
continue
|
||||
|
||||
# Include model (supports chat completions + tool calling)
|
||||
filtered_models[model_id] = model_id
|
||||
|
||||
return filtered_models
|
||||
|
||||
|
||||
def _fetch_openai_compatible_models(base_url: str, api_key: str) -> Dict[str, str]:
|
||||
@@ -232,105 +380,219 @@ def _fetch_openai_compatible_models(base_url: str, api_key: str) -> Dict[str, st
|
||||
return available_models
|
||||
|
||||
|
||||
def _fetch_bedrock_models(bedrock_creds: Dict[str, str]) -> Dict[str, str]:
|
||||
def _get_region_prefix(region: str) -> str:
|
||||
"""
|
||||
Fetch available models from AWS Bedrock with entitlement verification.
|
||||
Determine geographic prefix for AWS region.
|
||||
|
||||
This function:
|
||||
1. Lists foundation models with TEXT modality support
|
||||
2. Lists inference profiles with TEXT modality support
|
||||
3. Verifies user has entitlement access to each model
|
||||
Examples: ap-south-1 -> apac, us-east-1 -> us, eu-west-1 -> eu
|
||||
"""
|
||||
if region.startswith(("us-", "ca-", "sa-")):
|
||||
return "us"
|
||||
elif region.startswith("eu-"):
|
||||
return "eu"
|
||||
elif region.startswith("ap-"):
|
||||
return "apac"
|
||||
return "global"
|
||||
|
||||
Args:
|
||||
bedrock_creds: Dictionary with 'access_key_id', 'secret_access_key', and 'region'.
|
||||
|
||||
def _clean_inference_profile_name(profile_name: str) -> str:
|
||||
"""
|
||||
Remove geographic prefix from inference profile name.
|
||||
|
||||
AWS includes geographic prefixes in profile names which are redundant
|
||||
since the profile ID already contains this information.
|
||||
|
||||
Examples:
|
||||
"APAC Anthropic Claude 3.5 Sonnet" -> "Anthropic Claude 3.5 Sonnet"
|
||||
"GLOBAL Claude Sonnet 4.5" -> "Claude Sonnet 4.5"
|
||||
"US Anthropic Claude 3 Haiku" -> "Anthropic Claude 3 Haiku"
|
||||
"""
|
||||
prefixes = ["APAC ", "GLOBAL ", "US ", "EU ", "APAC-", "GLOBAL-", "US-", "EU-"]
|
||||
|
||||
for prefix in prefixes:
|
||||
if profile_name.upper().startswith(prefix.upper()):
|
||||
return profile_name[len(prefix) :].strip()
|
||||
|
||||
return profile_name
|
||||
|
||||
|
||||
def _supports_text_modality(input_modalities: list, output_modalities: list) -> bool:
|
||||
"""Check if model supports TEXT for both input and output."""
|
||||
return "TEXT" in input_modalities and "TEXT" in output_modalities
|
||||
|
||||
|
||||
def _get_foundation_model_modalities(
|
||||
bedrock_client, model_id: str
|
||||
) -> tuple[list, list] | None:
|
||||
"""
|
||||
Fetch input and output modalities for a foundation model.
|
||||
|
||||
Returns:
|
||||
Dict mapping model_id to model_name for all accessible models.
|
||||
|
||||
Raises:
|
||||
BotoCoreError, ClientError: If AWS API calls fail.
|
||||
(input_modalities, output_modalities) or None if fetch fails
|
||||
"""
|
||||
bedrock_client = boto3.client(
|
||||
"bedrock",
|
||||
aws_access_key_id=bedrock_creds["access_key_id"],
|
||||
aws_secret_access_key=bedrock_creds["secret_access_key"],
|
||||
region_name=bedrock_creds["region"],
|
||||
)
|
||||
try:
|
||||
model_info = bedrock_client.get_foundation_model(modelIdentifier=model_id)
|
||||
model_details = model_info.get("modelDetails", {})
|
||||
input_mods = model_details.get("inputModalities", [])
|
||||
output_mods = model_details.get("outputModalities", [])
|
||||
return (input_mods, output_mods)
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
logger.debug("Could not fetch model details for %s: %s", model_id, str(e))
|
||||
return None
|
||||
|
||||
models_to_check: Dict[str, str] = {}
|
||||
|
||||
# Step 1: Get foundation models with TEXT modality
|
||||
def _extract_foundation_model_ids(profile_models: list) -> list[str]:
|
||||
"""
|
||||
Extract foundation model IDs from inference profile model ARNs.
|
||||
|
||||
Args:
|
||||
profile_models: List of model references from inference profile
|
||||
|
||||
Returns:
|
||||
List of foundation model IDs extracted from ARNs
|
||||
"""
|
||||
model_ids = []
|
||||
for model_ref in profile_models:
|
||||
model_arn = model_ref.get("modelArn", "")
|
||||
if "foundation-model/" in model_arn:
|
||||
model_id = model_arn.split("foundation-model/")[1]
|
||||
model_ids.append(model_id)
|
||||
return model_ids
|
||||
|
||||
|
||||
def _build_inference_profile_map(
|
||||
bedrock_client, region: str
|
||||
) -> Dict[str, tuple[str, str]]:
|
||||
"""
|
||||
Build map of foundation_model_id -> best inference profile.
|
||||
|
||||
Returns:
|
||||
Dict mapping foundation_model_id to (profile_id, profile_name)
|
||||
Only includes profiles with TEXT modality support
|
||||
Prefers region-matched profiles over others
|
||||
"""
|
||||
region_prefix = _get_region_prefix(region)
|
||||
model_to_profile: Dict[str, tuple[str, str]] = {}
|
||||
|
||||
try:
|
||||
response = bedrock_client.list_inference_profiles()
|
||||
profiles = response.get("inferenceProfileSummaries", [])
|
||||
|
||||
for profile in profiles:
|
||||
profile_id = profile.get("inferenceProfileId")
|
||||
profile_name = profile.get("inferenceProfileName")
|
||||
|
||||
if not profile_id or not profile_name:
|
||||
continue
|
||||
|
||||
profile_models = profile.get("models", [])
|
||||
if not profile_models:
|
||||
continue
|
||||
|
||||
foundation_model_ids = _extract_foundation_model_ids(profile_models)
|
||||
if not foundation_model_ids:
|
||||
continue
|
||||
|
||||
modalities = _get_foundation_model_modalities(
|
||||
bedrock_client, foundation_model_ids[0]
|
||||
)
|
||||
if not modalities:
|
||||
continue
|
||||
|
||||
input_mods, output_mods = modalities
|
||||
if not _supports_text_modality(input_mods, output_mods):
|
||||
continue
|
||||
|
||||
is_preferred = profile_id.startswith(f"{region_prefix}.")
|
||||
clean_name = _clean_inference_profile_name(profile_name)
|
||||
|
||||
for foundation_model_id in foundation_model_ids:
|
||||
if foundation_model_id not in model_to_profile:
|
||||
model_to_profile[foundation_model_id] = (profile_id, clean_name)
|
||||
elif is_preferred and not model_to_profile[foundation_model_id][
|
||||
0
|
||||
].startswith(f"{region_prefix}."):
|
||||
model_to_profile[foundation_model_id] = (profile_id, clean_name)
|
||||
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
logger.info("Could not fetch inference profiles in %s: %s", region, str(e))
|
||||
|
||||
return model_to_profile
|
||||
|
||||
|
||||
def _check_on_demand_availability(bedrock_client, model_id: str) -> bool:
|
||||
"""Check if an ON_DEMAND foundation model is entitled and available."""
|
||||
try:
|
||||
availability = bedrock_client.get_foundation_model_availability(
|
||||
modelId=model_id
|
||||
)
|
||||
entitlement = availability.get("entitlementAvailability")
|
||||
return entitlement == "AVAILABLE"
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
logger.debug("Could not check availability for %s: %s", model_id, str(e))
|
||||
return False
|
||||
|
||||
|
||||
def _fetch_bedrock_models(bedrock_creds: Dict[str, str]) -> Dict[str, str]:
|
||||
"""
|
||||
Fetch available models from AWS Bedrock, preferring inference profiles over ON_DEMAND.
|
||||
|
||||
Strategy:
|
||||
1. Build map of foundation_model -> best_inference_profile (with TEXT validation)
|
||||
2. For each TEXT-capable foundation model:
|
||||
- Use inference profile ID if available (preferred - better throughput)
|
||||
- Fallback to foundation model ID if only ON_DEMAND available
|
||||
3. Verify entitlement for ON_DEMAND models
|
||||
|
||||
Args:
|
||||
bedrock_creds: Dict with 'region' and auth credentials
|
||||
|
||||
Returns:
|
||||
Dict mapping model_id to model_name. IDs can be:
|
||||
- Inference profile IDs (e.g., "apac.anthropic.claude-3-5-sonnet-20240620-v1:0")
|
||||
- Foundation model IDs (e.g., "anthropic.claude-3-5-sonnet-20240620-v1:0")
|
||||
"""
|
||||
bedrock_client = _create_bedrock_client(bedrock_creds)
|
||||
region = bedrock_creds["region"]
|
||||
|
||||
model_to_profile = _build_inference_profile_map(bedrock_client, region)
|
||||
|
||||
foundation_response = bedrock_client.list_foundation_models()
|
||||
model_summaries = foundation_response.get("modelSummaries", [])
|
||||
|
||||
for model in model_summaries:
|
||||
# Check if model supports TEXT input and output modality
|
||||
input_modalities = model.get("inputModalities", [])
|
||||
output_modalities = model.get("outputModalities", [])
|
||||
models_to_return: Dict[str, str] = {}
|
||||
on_demand_models: set[str] = set()
|
||||
|
||||
if "TEXT" not in input_modalities or "TEXT" not in output_modalities:
|
||||
for model in model_summaries:
|
||||
input_mods = model.get("inputModalities", [])
|
||||
output_mods = model.get("outputModalities", [])
|
||||
|
||||
if not _supports_text_modality(input_mods, output_mods):
|
||||
continue
|
||||
|
||||
model_id = model.get("modelId")
|
||||
if not model_id:
|
||||
model_name = model.get("modelName")
|
||||
|
||||
if not model_id or not model_name:
|
||||
continue
|
||||
|
||||
inference_types = model.get("inferenceTypesSupported", [])
|
||||
if model_id in model_to_profile:
|
||||
profile_id, profile_name = model_to_profile[model_id]
|
||||
models_to_return[profile_id] = profile_name
|
||||
else:
|
||||
inference_types = model.get("inferenceTypesSupported", [])
|
||||
if "ON_DEMAND" in inference_types:
|
||||
models_to_return[model_id] = model_name
|
||||
on_demand_models.add(model_id)
|
||||
|
||||
# Only include models with ON_DEMAND inference support
|
||||
if "ON_DEMAND" in inference_types:
|
||||
models_to_check[model_id] = model["modelName"]
|
||||
|
||||
# Step 2: Get inference profiles
|
||||
try:
|
||||
inference_profiles_response = bedrock_client.list_inference_profiles()
|
||||
inference_profiles = inference_profiles_response.get(
|
||||
"inferenceProfileSummaries", []
|
||||
)
|
||||
|
||||
for profile in inference_profiles:
|
||||
# Check if profile supports TEXT modality
|
||||
input_modalities = profile.get("inputModalities", [])
|
||||
output_modalities = profile.get("outputModalities", [])
|
||||
|
||||
if "TEXT" not in input_modalities or "TEXT" not in output_modalities:
|
||||
continue
|
||||
|
||||
profile_id = profile.get("inferenceProfileId")
|
||||
if profile_id:
|
||||
models_to_check[profile_id] = profile["inferenceProfileName"]
|
||||
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
logger.info(
|
||||
"Could not fetch inference profiles in %s: %s",
|
||||
bedrock_creds["region"],
|
||||
str(e),
|
||||
)
|
||||
|
||||
# Step 3: Verify entitlement availability for each model
|
||||
available_models: Dict[str, str] = {}
|
||||
|
||||
for model_id, model_name in models_to_check.items():
|
||||
try:
|
||||
availability = bedrock_client.get_foundation_model_availability(
|
||||
modelId=model_id
|
||||
)
|
||||
|
||||
entitlement = availability.get("entitlementAvailability")
|
||||
|
||||
# Only include models user has access to
|
||||
if entitlement == "AVAILABLE":
|
||||
for model_id, model_name in models_to_return.items():
|
||||
if model_id in on_demand_models:
|
||||
if _check_on_demand_availability(bedrock_client, model_id):
|
||||
available_models[model_id] = model_name
|
||||
else:
|
||||
logger.debug(
|
||||
"Skipping model %s - entitlement status: %s", model_id, entitlement
|
||||
)
|
||||
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
logger.debug(
|
||||
"Could not check availability for model %s: %s", model_id, str(e)
|
||||
)
|
||||
continue
|
||||
else:
|
||||
available_models[model_id] = model_name
|
||||
|
||||
return available_models
|
||||
|
||||
@@ -359,7 +621,6 @@ def refresh_lighthouse_provider_models(provider_config_id: str) -> Dict:
|
||||
provider_cfg = LighthouseProviderConfiguration.objects.get(pk=provider_config_id)
|
||||
fetched_models: Dict[str, str] = {}
|
||||
|
||||
# Fetch models from the appropriate provider
|
||||
try:
|
||||
if (
|
||||
provider_cfg.provider_type
|
||||
@@ -414,12 +675,13 @@ def refresh_lighthouse_provider_models(provider_config_id: str) -> Dict:
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
error_message = _extract_error_message(e)
|
||||
logger.warning(
|
||||
"Unexpected error refreshing %s models: %s",
|
||||
provider_cfg.provider_type,
|
||||
str(e),
|
||||
error_message,
|
||||
)
|
||||
return {"created": 0, "updated": 0, "deleted": 0, "error": str(e)}
|
||||
return {"created": 0, "updated": 0, "deleted": 0, "error": error_message}
|
||||
|
||||
# Upsert models into the catalog
|
||||
created = 0
|
||||
|
||||
@@ -243,15 +243,28 @@ def _safe_getattr(obj, attr: str, default: str = "N/A") -> str:
|
||||
|
||||
|
||||
def _create_info_table_style() -> TableStyle:
|
||||
"""Create a reusable table style for information/metadata tables."""
|
||||
"""Create a reusable table style for information/metadata tables.
|
||||
|
||||
ReportLab TableStyle coordinate system:
|
||||
- Format: (COMMAND, (start_col, start_row), (end_col, end_row), value)
|
||||
- Coordinates use (column, row) format, starting at (0, 0) for top-left cell
|
||||
- Negative indices work like Python slicing: -1 means "last row/column"
|
||||
- (0, 0) to (0, -1) = entire first column (all rows)
|
||||
- (0, 0) to (-1, 0) = entire first row (all columns)
|
||||
- (0, 0) to (-1, -1) = entire table
|
||||
- Styles are applied in order; later rules override earlier ones
|
||||
"""
|
||||
return TableStyle(
|
||||
[
|
||||
# Column 0 (labels): blue background with white text
|
||||
("BACKGROUND", (0, 0), (0, -1), COLOR_BLUE),
|
||||
("TEXTCOLOR", (0, 0), (0, -1), COLOR_WHITE),
|
||||
("FONTNAME", (0, 0), (0, -1), "FiraCode"),
|
||||
# Column 1 (values): light blue background with gray text
|
||||
("BACKGROUND", (1, 0), (1, -1), COLOR_BG_BLUE),
|
||||
("TEXTCOLOR", (1, 0), (1, -1), COLOR_GRAY),
|
||||
("FONTNAME", (1, 0), (1, -1), "PlusJakartaSans"),
|
||||
# Apply to entire table
|
||||
("ALIGN", (0, 0), (-1, -1), "LEFT"),
|
||||
("VALIGN", (0, 0), (-1, -1), "TOP"),
|
||||
("FONTSIZE", (0, 0), (-1, -1), 11),
|
||||
@@ -265,19 +278,30 @@ def _create_info_table_style() -> TableStyle:
|
||||
|
||||
|
||||
def _create_header_table_style(header_color: colors.Color = None) -> TableStyle:
|
||||
"""Create a reusable table style for tables with headers."""
|
||||
"""Create a reusable table style for tables with headers.
|
||||
|
||||
ReportLab TableStyle coordinate system:
|
||||
- Format: (COMMAND, (start_col, start_row), (end_col, end_row), value)
|
||||
- (0, 0) to (-1, 0) = entire first row (header row)
|
||||
- (1, 1) to (-1, -1) = all data cells (excludes header row and first column)
|
||||
- See _create_info_table_style() for full coordinate system documentation
|
||||
"""
|
||||
if header_color is None:
|
||||
header_color = COLOR_BLUE
|
||||
|
||||
return TableStyle(
|
||||
[
|
||||
# Header row (row 0): colored background with white text
|
||||
("BACKGROUND", (0, 0), (-1, 0), header_color),
|
||||
("TEXTCOLOR", (0, 0), (-1, 0), COLOR_WHITE),
|
||||
("FONTNAME", (0, 0), (-1, 0), "FiraCode"),
|
||||
("FONTSIZE", (0, 0), (-1, 0), 10),
|
||||
# Apply to entire table
|
||||
("ALIGN", (0, 0), (-1, -1), "CENTER"),
|
||||
("VALIGN", (0, 0), (-1, -1), "MIDDLE"),
|
||||
# Data cells (excluding header): smaller font
|
||||
("FONTSIZE", (1, 1), (-1, -1), 9),
|
||||
# Apply to entire table
|
||||
("GRID", (0, 0), (-1, -1), 1, COLOR_GRID_GRAY),
|
||||
("LEFTPADDING", (0, 0), (-1, -1), PADDING_MEDIUM),
|
||||
("RIGHTPADDING", (0, 0), (-1, -1), PADDING_MEDIUM),
|
||||
@@ -288,18 +312,30 @@ def _create_header_table_style(header_color: colors.Color = None) -> TableStyle:
|
||||
|
||||
|
||||
def _create_findings_table_style() -> TableStyle:
|
||||
"""Create a reusable table style for findings tables."""
|
||||
"""Create a reusable table style for findings tables.
|
||||
|
||||
ReportLab TableStyle coordinate system:
|
||||
- Format: (COMMAND, (start_col, start_row), (end_col, end_row), value)
|
||||
- (0, 0) to (-1, 0) = entire first row (header row)
|
||||
- (0, 0) to (0, 0) = only the top-left cell
|
||||
- See _create_info_table_style() for full coordinate system documentation
|
||||
"""
|
||||
return TableStyle(
|
||||
[
|
||||
# Header row (row 0): colored background with white text
|
||||
("BACKGROUND", (0, 0), (-1, 0), COLOR_BLUE),
|
||||
("TEXTCOLOR", (0, 0), (-1, 0), COLOR_WHITE),
|
||||
("FONTNAME", (0, 0), (-1, 0), "FiraCode"),
|
||||
# Only top-left cell centered (for index/number column)
|
||||
("ALIGN", (0, 0), (0, 0), "CENTER"),
|
||||
# Apply to entire table
|
||||
("VALIGN", (0, 0), (-1, -1), "MIDDLE"),
|
||||
("FONTSIZE", (0, 0), (-1, -1), 9),
|
||||
("GRID", (0, 0), (-1, -1), 0.1, COLOR_BORDER_GRAY),
|
||||
# Remove padding only from top-left cell
|
||||
("LEFTPADDING", (0, 0), (0, 0), 0),
|
||||
("RIGHTPADDING", (0, 0), (0, 0), 0),
|
||||
# Apply to entire table
|
||||
("TOPPADDING", (0, 0), (-1, -1), PADDING_SMALL),
|
||||
("BOTTOMPADDING", (0, 0), (-1, -1), PADDING_SMALL),
|
||||
]
|
||||
@@ -1103,11 +1139,15 @@ def generate_threatscore_report(
|
||||
elements.append(Spacer(1, 0.5 * inch))
|
||||
|
||||
# Add compliance information table
|
||||
provider_alias = provider_obj.alias or "N/A"
|
||||
info_data = [
|
||||
["Framework:", compliance_framework],
|
||||
["ID:", compliance_id],
|
||||
["Name:", Paragraph(compliance_name, normal_center)],
|
||||
["Version:", compliance_version],
|
||||
["Provider:", provider_type.upper()],
|
||||
["Account ID:", provider_obj.uid],
|
||||
["Alias:", provider_alias],
|
||||
["Scan ID:", scan_id],
|
||||
["Description:", Paragraph(compliance_description, normal_center)],
|
||||
]
|
||||
@@ -2059,12 +2099,15 @@ def generate_ens_report(
|
||||
elements.append(Spacer(1, 0.5 * inch))
|
||||
|
||||
# Add compliance information table
|
||||
provider_alias = provider_obj.alias or "N/A"
|
||||
info_data = [
|
||||
["Framework:", compliance_framework],
|
||||
["ID:", compliance_id],
|
||||
["Nombre:", Paragraph(compliance_name, normal_center)],
|
||||
["Versión:", compliance_version],
|
||||
["Proveedor:", provider_type.upper()],
|
||||
["Account ID:", provider_obj.uid],
|
||||
["Alias:", provider_alias],
|
||||
["Scan ID:", scan_id],
|
||||
["Descripción:", Paragraph(compliance_description, normal_center)],
|
||||
]
|
||||
@@ -2072,12 +2115,12 @@ def generate_ens_report(
|
||||
info_table.setStyle(
|
||||
TableStyle(
|
||||
[
|
||||
("BACKGROUND", (0, 0), (0, 6), colors.Color(0.2, 0.4, 0.6)),
|
||||
("TEXTCOLOR", (0, 0), (0, 6), colors.white),
|
||||
("FONTNAME", (0, 0), (0, 6), "FiraCode"),
|
||||
("BACKGROUND", (1, 0), (1, 6), colors.Color(0.95, 0.97, 1.0)),
|
||||
("TEXTCOLOR", (1, 0), (1, 6), colors.Color(0.2, 0.2, 0.2)),
|
||||
("FONTNAME", (1, 0), (1, 6), "PlusJakartaSans"),
|
||||
("BACKGROUND", (0, 0), (0, -1), colors.Color(0.2, 0.4, 0.6)),
|
||||
("TEXTCOLOR", (0, 0), (0, -1), colors.white),
|
||||
("FONTNAME", (0, 0), (0, -1), "FiraCode"),
|
||||
("BACKGROUND", (1, 0), (1, -1), colors.Color(0.95, 0.97, 1.0)),
|
||||
("TEXTCOLOR", (1, 0), (1, -1), colors.Color(0.2, 0.2, 0.2)),
|
||||
("FONTNAME", (1, 0), (1, -1), "PlusJakartaSans"),
|
||||
("ALIGN", (0, 0), (-1, -1), "LEFT"),
|
||||
("VALIGN", (0, 0), (-1, -1), "TOP"),
|
||||
("FONTSIZE", (0, 0), (-1, -1), 11),
|
||||
@@ -2238,12 +2281,20 @@ def generate_ens_report(
|
||||
[
|
||||
"CUMPLE",
|
||||
str(passed_requirements),
|
||||
f"{(passed_requirements / total_requirements * 100):.1f}%",
|
||||
(
|
||||
f"{(passed_requirements / total_requirements * 100):.1f}%"
|
||||
if total_requirements > 0
|
||||
else "0.0%"
|
||||
),
|
||||
],
|
||||
[
|
||||
"NO CUMPLE",
|
||||
str(failed_requirements),
|
||||
f"{(failed_requirements / total_requirements * 100):.1f}%",
|
||||
(
|
||||
f"{(failed_requirements / total_requirements * 100):.1f}%"
|
||||
if total_requirements > 0
|
||||
else "0.0%"
|
||||
),
|
||||
],
|
||||
["TOTAL", str(total_requirements), "100%"],
|
||||
]
|
||||
@@ -2989,11 +3040,14 @@ def generate_nis2_report(
|
||||
elements.append(Spacer(1, 0.3 * inch))
|
||||
|
||||
# Compliance metadata table
|
||||
provider_alias = provider_obj.alias or "N/A"
|
||||
metadata_data = [
|
||||
["Framework:", compliance_framework],
|
||||
["Name:", Paragraph(compliance_name, normal_center)],
|
||||
["Version:", compliance_version or "N/A"],
|
||||
["Provider:", provider_type.upper()],
|
||||
["Account ID:", provider_obj.uid],
|
||||
["Alias:", provider_alias],
|
||||
["Scan ID:", scan_id],
|
||||
["Description:", Paragraph(compliance_description, normal_center)],
|
||||
]
|
||||
@@ -3477,6 +3531,7 @@ def generate_compliance_reports(
|
||||
"gcp",
|
||||
"m365",
|
||||
"kubernetes",
|
||||
"alibabacloud",
|
||||
]:
|
||||
logger.info(
|
||||
f"Provider {provider_id} ({provider_type}) is not supported for ThreatScore report"
|
||||
|
||||
@@ -8,6 +8,7 @@ from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any
|
||||
|
||||
import sentry_sdk
|
||||
from celery.utils.log import get_task_logger
|
||||
from config.env import env
|
||||
from config.settings.celery import CELERY_DEADLOCK_ATTEMPTS
|
||||
@@ -29,6 +30,7 @@ from api.models import (
|
||||
AttackSurfaceOverview,
|
||||
ComplianceOverviewSummary,
|
||||
ComplianceRequirementOverview,
|
||||
DailySeveritySummary,
|
||||
Finding,
|
||||
MuteRule,
|
||||
Processor,
|
||||
@@ -38,6 +40,7 @@ from api.models import (
|
||||
ResourceScanSummary,
|
||||
ResourceTag,
|
||||
Scan,
|
||||
ScanCategorySummary,
|
||||
ScanSummary,
|
||||
StateChoices,
|
||||
)
|
||||
@@ -76,7 +79,6 @@ FINDINGS_MICRO_BATCH_SIZE = env.int("DJANGO_FINDINGS_MICRO_BATCH_SIZE", default=
|
||||
# Controls how many rows each ORM bulk_create/bulk_update call sends to Postgres
|
||||
SCAN_DB_BATCH_SIZE = env.int("DJANGO_SCAN_DB_BATCH_SIZE", default=500)
|
||||
|
||||
|
||||
ATTACK_SURFACE_PROVIDER_COMPATIBILITY = {
|
||||
"internet-exposed": None, # Compatible with all providers
|
||||
"secrets": None, # Compatible with all providers
|
||||
@@ -87,6 +89,40 @@ ATTACK_SURFACE_PROVIDER_COMPATIBILITY = {
|
||||
_ATTACK_SURFACE_MAPPING_CACHE: dict[str, dict] = {}
|
||||
|
||||
|
||||
def aggregate_category_counts(
|
||||
categories: list[str],
|
||||
severity: str,
|
||||
status: str,
|
||||
delta: str | None,
|
||||
muted: bool,
|
||||
cache: dict[tuple[str, str], dict[str, int]],
|
||||
) -> None:
|
||||
"""
|
||||
Increment category counters in-place for a finding.
|
||||
|
||||
Args:
|
||||
categories: List of categories from finding metadata.
|
||||
severity: Severity level (e.g., "high", "medium").
|
||||
status: Finding status as string ("FAIL", "PASS").
|
||||
delta: Delta value as string ("new", "changed") or None.
|
||||
muted: Whether the finding is muted.
|
||||
cache: Dict {(category, severity): {"total", "failed", "new_failed"}} to update.
|
||||
"""
|
||||
is_failed = status == "FAIL" and not muted
|
||||
is_new_failed = is_failed and delta == "new"
|
||||
|
||||
for cat in categories:
|
||||
key = (cat, severity)
|
||||
if key not in cache:
|
||||
cache[key] = {"total": 0, "failed": 0, "new_failed": 0}
|
||||
if not muted:
|
||||
cache[key]["total"] += 1
|
||||
if is_failed:
|
||||
cache[key]["failed"] += 1
|
||||
if is_new_failed:
|
||||
cache[key]["new_failed"] += 1
|
||||
|
||||
|
||||
def _get_attack_surface_mapping_from_provider(provider_type: str) -> dict:
|
||||
global _ATTACK_SURFACE_MAPPING_CACHE
|
||||
|
||||
@@ -370,7 +406,7 @@ def _create_compliance_summaries(
|
||||
if summary_objects:
|
||||
with rls_transaction(tenant_id):
|
||||
ComplianceOverviewSummary.objects.bulk_create(
|
||||
summary_objects, batch_size=500
|
||||
summary_objects, batch_size=500, ignore_conflicts=True
|
||||
)
|
||||
|
||||
|
||||
@@ -397,6 +433,7 @@ def _process_finding_micro_batch(
|
||||
unique_resources: set,
|
||||
scan_resource_cache: set,
|
||||
mute_rules_cache: dict,
|
||||
scan_categories_cache: dict[tuple[str, str], dict[str, int]],
|
||||
) -> None:
|
||||
"""
|
||||
Process a micro-batch of findings and persist them using bulk operations.
|
||||
@@ -417,6 +454,7 @@ def _process_finding_micro_batch(
|
||||
unique_resources: Set tracking (uid, region) pairs seen in the scan.
|
||||
scan_resource_cache: Set of tuples used to create `ResourceScanSummary` rows.
|
||||
mute_rules_cache: Map of finding UID -> mute reason gathered before the scan.
|
||||
scan_categories_cache: Dict tracking category counts {(category, severity): {"total", "failed", "new_failed"}}.
|
||||
"""
|
||||
# Accumulate objects for bulk operations
|
||||
findings_to_create = []
|
||||
@@ -572,11 +610,12 @@ def _process_finding_micro_batch(
|
||||
resource_failed_findings_cache[resource_uid] += 1
|
||||
|
||||
# Create finding object (don't save yet)
|
||||
check_metadata = finding.get_metadata()
|
||||
finding_instance = Finding(
|
||||
tenant_id=tenant_id,
|
||||
uid=finding_uid,
|
||||
delta=delta,
|
||||
check_metadata=finding.get_metadata(),
|
||||
check_metadata=check_metadata,
|
||||
status=status,
|
||||
status_extended=finding.status_extended,
|
||||
severity=finding.severity,
|
||||
@@ -589,6 +628,7 @@ def _process_finding_micro_batch(
|
||||
muted_at=datetime.now(tz=timezone.utc) if is_muted else None,
|
||||
muted_reason=muted_reason,
|
||||
compliance=finding.compliance,
|
||||
categories=check_metadata.get("categories", []) or [],
|
||||
)
|
||||
findings_to_create.append(finding_instance)
|
||||
resource_denormalized_data.append((finding_instance, resource_instance))
|
||||
@@ -603,6 +643,16 @@ def _process_finding_micro_batch(
|
||||
)
|
||||
)
|
||||
|
||||
# Track categories with counts for ScanCategorySummary by (category, severity)
|
||||
aggregate_category_counts(
|
||||
categories=check_metadata.get("categories", []) or [],
|
||||
severity=finding.severity.value,
|
||||
status=status.value,
|
||||
delta=delta.value if delta else None,
|
||||
muted=is_muted,
|
||||
cache=scan_categories_cache,
|
||||
)
|
||||
|
||||
# Bulk operations within single transaction
|
||||
with rls_transaction(tenant_id):
|
||||
# Bulk create findings
|
||||
@@ -702,6 +752,7 @@ def perform_prowler_scan(
|
||||
exception = None
|
||||
unique_resources = set()
|
||||
scan_resource_cache: set[tuple[str, str, str, str]] = set()
|
||||
scan_categories_cache: dict[tuple[str, str], dict[str, int]] = {}
|
||||
start_time = time.time()
|
||||
exc = None
|
||||
|
||||
@@ -791,6 +842,7 @@ def perform_prowler_scan(
|
||||
unique_resources=unique_resources,
|
||||
scan_resource_cache=scan_resource_cache,
|
||||
mute_rules_cache=mute_rules_cache,
|
||||
scan_categories_cache=scan_categories_cache,
|
||||
)
|
||||
|
||||
# Update scan progress
|
||||
@@ -850,13 +902,33 @@ def perform_prowler_scan(
|
||||
resource_scan_summaries, batch_size=500, ignore_conflicts=True
|
||||
)
|
||||
except Exception as filter_exception:
|
||||
import sentry_sdk
|
||||
|
||||
sentry_sdk.capture_exception(filter_exception)
|
||||
logger.error(
|
||||
f"Error storing filter values for scan {scan_id}: {filter_exception}"
|
||||
)
|
||||
|
||||
try:
|
||||
if scan_categories_cache:
|
||||
category_summaries = [
|
||||
ScanCategorySummary(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
category=category,
|
||||
severity=severity,
|
||||
total_findings=counts["total"],
|
||||
failed_findings=counts["failed"],
|
||||
new_failed_findings=counts["new_failed"],
|
||||
)
|
||||
for (category, severity), counts in scan_categories_cache.items()
|
||||
]
|
||||
with rls_transaction(tenant_id):
|
||||
ScanCategorySummary.objects.bulk_create(
|
||||
category_summaries, batch_size=500, ignore_conflicts=True
|
||||
)
|
||||
except Exception as cat_exception:
|
||||
sentry_sdk.capture_exception(cat_exception)
|
||||
logger.error(f"Error storing categories for scan {scan_id}: {cat_exception}")
|
||||
|
||||
serializer = ScanTaskSerializer(instance=scan_instance)
|
||||
return serializer.data
|
||||
|
||||
@@ -1348,3 +1420,72 @@ def aggregate_attack_surface(tenant_id: str, scan_id: str):
|
||||
)
|
||||
else:
|
||||
logger.info(f"No attack surface overview records created for scan {scan_id}")
|
||||
|
||||
|
||||
def aggregate_daily_severity(tenant_id: str, scan_id: str):
|
||||
"""Aggregate scan severity counts into DailySeveritySummary (one record per provider/day)."""
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
scan = Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
id=scan_id,
|
||||
state=StateChoices.COMPLETED,
|
||||
).first()
|
||||
|
||||
if not scan:
|
||||
logger.warning(f"Scan {scan_id} not found or not completed")
|
||||
return {"status": "scan is not completed"}
|
||||
|
||||
provider_id = scan.provider_id
|
||||
scan_date = scan.completed_at.date()
|
||||
|
||||
severity_totals = (
|
||||
ScanSummary.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
)
|
||||
.values("severity")
|
||||
.annotate(total_fail=Sum("fail"), total_muted=Sum("muted"))
|
||||
)
|
||||
|
||||
severity_data = {
|
||||
"critical": 0,
|
||||
"high": 0,
|
||||
"medium": 0,
|
||||
"low": 0,
|
||||
"informational": 0,
|
||||
"muted": 0,
|
||||
}
|
||||
|
||||
for row in severity_totals:
|
||||
severity = row["severity"]
|
||||
if severity in severity_data:
|
||||
severity_data[severity] = row["total_fail"] or 0
|
||||
severity_data["muted"] += row["total_muted"] or 0
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
summary, created = DailySeveritySummary.objects.update_or_create(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
date=scan_date,
|
||||
defaults={
|
||||
"scan_id": scan_id,
|
||||
"critical": severity_data["critical"],
|
||||
"high": severity_data["high"],
|
||||
"medium": severity_data["medium"],
|
||||
"low": severity_data["low"],
|
||||
"informational": severity_data["informational"],
|
||||
"muted": severity_data["muted"],
|
||||
},
|
||||
)
|
||||
|
||||
action = "created" if created else "updated"
|
||||
logger.info(
|
||||
f"Daily severity summary {action} for provider {provider_id} on {scan_date}"
|
||||
)
|
||||
|
||||
return {
|
||||
"status": action,
|
||||
"provider_id": str(provider_id),
|
||||
"date": str(scan_date),
|
||||
"severity_data": severity_data,
|
||||
}
|
||||
|
||||
@@ -10,7 +10,9 @@ from config.django.base import DJANGO_FINDINGS_BATCH_SIZE, DJANGO_TMP_OUTPUT_DIR
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from tasks.jobs.backfill import (
|
||||
backfill_compliance_summaries,
|
||||
backfill_daily_severity_summaries,
|
||||
backfill_resource_scan_summaries,
|
||||
backfill_scan_category_summaries,
|
||||
)
|
||||
from tasks.jobs.connection import (
|
||||
check_integration_connection,
|
||||
@@ -26,6 +28,7 @@ from tasks.jobs.export import (
|
||||
_upload_to_s3,
|
||||
)
|
||||
from tasks.jobs.integrations import (
|
||||
send_findings_to_github,
|
||||
send_findings_to_jira,
|
||||
upload_s3_integration,
|
||||
upload_security_hub_integration,
|
||||
@@ -38,6 +41,7 @@ from tasks.jobs.muting import mute_historical_findings
|
||||
from tasks.jobs.report import generate_compliance_reports_job
|
||||
from tasks.jobs.scan import (
|
||||
aggregate_attack_surface,
|
||||
aggregate_daily_severity,
|
||||
aggregate_findings,
|
||||
create_compliance_requirements,
|
||||
perform_prowler_scan,
|
||||
@@ -47,7 +51,7 @@ from tasks.utils import batched, get_next_execution_datetime
|
||||
from api.compliance import get_compliance_frameworks
|
||||
from api.db_router import READ_REPLICA_ALIAS
|
||||
from api.db_utils import rls_transaction
|
||||
from api.decorators import set_tenant
|
||||
from api.decorators import handle_provider_deletion, set_tenant
|
||||
from api.models import Finding, Integration, Provider, Scan, ScanSummary, StateChoices
|
||||
from api.utils import initialize_prowler_provider
|
||||
from api.v1.serializers import ScanTaskSerializer
|
||||
@@ -58,6 +62,58 @@ from prowler.lib.outputs.finding import Finding as FindingOutput
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def _cleanup_orphan_scheduled_scans(
|
||||
tenant_id: str,
|
||||
provider_id: str,
|
||||
scheduler_task_id: int,
|
||||
) -> int:
|
||||
"""
|
||||
TEMPORARY WORKAROUND: Clean up orphan AVAILABLE scans.
|
||||
|
||||
Detects and removes AVAILABLE scans that were never used due to an
|
||||
issue during the first scheduled scan setup.
|
||||
|
||||
An AVAILABLE scan is considered orphan if there's also a SCHEDULED scan for
|
||||
the same provider with the same scheduler_task_id. This situation indicates
|
||||
that the first scan execution didn't find the AVAILABLE scan (because it
|
||||
wasn't committed yet, probably) and created a new one, leaving the AVAILABLE orphaned.
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant ID.
|
||||
provider_id: The provider ID.
|
||||
scheduler_task_id: The PeriodicTask ID that triggers these scans.
|
||||
|
||||
Returns:
|
||||
Number of orphan scans deleted (0 if none found).
|
||||
"""
|
||||
orphan_available_scans = Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=scheduler_task_id,
|
||||
)
|
||||
|
||||
scheduled_scan_exists = Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduler_task_id=scheduler_task_id,
|
||||
).exists()
|
||||
|
||||
if scheduled_scan_exists and orphan_available_scans.exists():
|
||||
orphan_count = orphan_available_scans.count()
|
||||
logger.warning(
|
||||
f"[WORKAROUND] Found {orphan_count} orphan AVAILABLE scan(s) for "
|
||||
f"provider {provider_id} alongside a SCHEDULED scan. Cleaning up orphans..."
|
||||
)
|
||||
orphan_available_scans.delete()
|
||||
return orphan_count
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def _perform_scan_complete_tasks(tenant_id: str, scan_id: str, provider_id: str):
|
||||
"""
|
||||
Helper function to perform tasks after a scan is completed.
|
||||
@@ -75,8 +131,11 @@ def _perform_scan_complete_tasks(tenant_id: str, scan_id: str, provider_id: str)
|
||||
)
|
||||
chain(
|
||||
perform_scan_summary_task.si(tenant_id=tenant_id, scan_id=scan_id),
|
||||
generate_outputs_task.si(
|
||||
scan_id=scan_id, provider_id=provider_id, tenant_id=tenant_id
|
||||
group(
|
||||
aggregate_daily_severity_task.si(tenant_id=tenant_id, scan_id=scan_id),
|
||||
generate_outputs_task.si(
|
||||
scan_id=scan_id, provider_id=provider_id, tenant_id=tenant_id
|
||||
),
|
||||
),
|
||||
group(
|
||||
# Use optimized task that generates both reports with shared queries
|
||||
@@ -144,6 +203,7 @@ def delete_provider_task(provider_id: str, tenant_id: str):
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="scan-perform", queue="scans")
|
||||
@handle_provider_deletion
|
||||
def perform_scan_task(
|
||||
tenant_id: str, scan_id: str, provider_id: str, checks_to_execute: list[str] = None
|
||||
):
|
||||
@@ -176,6 +236,7 @@ def perform_scan_task(
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, bind=True, name="scan-perform-scheduled", queue="scans")
|
||||
@handle_provider_deletion
|
||||
def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
"""
|
||||
Task to perform a scheduled Prowler scan on a given provider.
|
||||
@@ -239,6 +300,14 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
return serializer.data
|
||||
|
||||
next_scan_datetime = get_next_execution_datetime(task_id, provider_id)
|
||||
|
||||
# TEMPORARY WORKAROUND: Clean up orphan scans from transaction isolation issue
|
||||
_cleanup_orphan_scheduled_scans(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
scheduler_task_id=periodic_task_instance.id,
|
||||
)
|
||||
|
||||
scan_instance, _ = Scan.objects.get_or_create(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
@@ -281,6 +350,7 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
|
||||
|
||||
@shared_task(name="scan-summary", queue="overview")
|
||||
@handle_provider_deletion
|
||||
def perform_scan_summary_task(tenant_id: str, scan_id: str):
|
||||
return aggregate_findings(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
@@ -296,6 +366,7 @@ def delete_tenant_task(tenant_id: str):
|
||||
queue="scan-reports",
|
||||
)
|
||||
@set_tenant(keep_tenant=True)
|
||||
@handle_provider_deletion
|
||||
def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
"""
|
||||
Process findings in batches and generate output files in multiple formats.
|
||||
@@ -491,6 +562,7 @@ def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
|
||||
|
||||
@shared_task(name="backfill-scan-resource-summaries", queue="backfill")
|
||||
@handle_provider_deletion
|
||||
def backfill_scan_resource_summaries_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Tries to backfill the resource scan summaries table for a given scan.
|
||||
@@ -503,6 +575,7 @@ def backfill_scan_resource_summaries_task(tenant_id: str, scan_id: str):
|
||||
|
||||
|
||||
@shared_task(name="backfill-compliance-summaries", queue="backfill")
|
||||
@handle_provider_deletion
|
||||
def backfill_compliance_summaries_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Tries to backfill compliance overview summaries for a completed scan.
|
||||
@@ -517,7 +590,29 @@ def backfill_compliance_summaries_task(tenant_id: str, scan_id: str):
|
||||
return backfill_compliance_summaries(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(name="backfill-daily-severity-summaries", queue="backfill")
|
||||
def backfill_daily_severity_summaries_task(tenant_id: str, days: int = None):
|
||||
"""Backfill DailySeveritySummary from historical scans. Use days param to limit scope."""
|
||||
return backfill_daily_severity_summaries(tenant_id=tenant_id, days=days)
|
||||
|
||||
|
||||
@shared_task(name="backfill-scan-category-summaries", queue="backfill")
|
||||
@handle_provider_deletion
|
||||
def backfill_scan_category_summaries_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Backfill ScanCategorySummary for a completed scan.
|
||||
|
||||
Aggregates unique categories from findings and creates a summary row.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The tenant identifier.
|
||||
scan_id (str): The scan identifier.
|
||||
"""
|
||||
return backfill_scan_category_summaries(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="scan-compliance-overviews", queue="compliance")
|
||||
@handle_provider_deletion
|
||||
def create_compliance_requirements_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Creates detailed compliance requirement records for a scan.
|
||||
@@ -534,6 +629,7 @@ def create_compliance_requirements_task(tenant_id: str, scan_id: str):
|
||||
|
||||
|
||||
@shared_task(name="scan-attack-surface-overviews", queue="overview")
|
||||
@handle_provider_deletion
|
||||
def aggregate_attack_surface_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Creates attack surface overview records for a scan.
|
||||
@@ -548,6 +644,13 @@ def aggregate_attack_surface_task(tenant_id: str, scan_id: str):
|
||||
return aggregate_attack_surface(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(name="scan-daily-severity", queue="overview")
|
||||
@handle_provider_deletion
|
||||
def aggregate_daily_severity_task(tenant_id: str, scan_id: str):
|
||||
"""Aggregate scan severity into DailySeveritySummary for findings_severity/timeseries endpoint."""
|
||||
return aggregate_daily_severity(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="lighthouse-connection-check")
|
||||
@set_tenant
|
||||
def check_lighthouse_connection_task(lighthouse_config_id: str, tenant_id: str = None):
|
||||
@@ -586,6 +689,7 @@ def refresh_lighthouse_provider_models_task(
|
||||
|
||||
|
||||
@shared_task(name="integration-check")
|
||||
@handle_provider_deletion
|
||||
def check_integrations_task(tenant_id: str, provider_id: str, scan_id: str = None):
|
||||
"""
|
||||
Check and execute all configured integrations for a provider.
|
||||
@@ -650,6 +754,7 @@ def check_integrations_task(tenant_id: str, provider_id: str, scan_id: str = Non
|
||||
name="integration-s3",
|
||||
queue="integrations",
|
||||
)
|
||||
@handle_provider_deletion
|
||||
def s3_integration_task(
|
||||
tenant_id: str,
|
||||
provider_id: str,
|
||||
@@ -687,6 +792,23 @@ def security_hub_integration_task(
|
||||
return upload_security_hub_integration(tenant_id, provider_id, scan_id)
|
||||
|
||||
|
||||
@shared_task(
|
||||
base=RLSTask,
|
||||
name="integration-github",
|
||||
queue="integrations",
|
||||
)
|
||||
def github_integration_task(
|
||||
tenant_id: str,
|
||||
integration_id: str,
|
||||
repository: str,
|
||||
labels: list[str],
|
||||
finding_ids: list[str],
|
||||
):
|
||||
return send_findings_to_github(
|
||||
tenant_id, integration_id, repository, labels, finding_ids
|
||||
)
|
||||
|
||||
|
||||
@shared_task(
|
||||
base=RLSTask,
|
||||
name="integration-jira",
|
||||
@@ -709,6 +831,7 @@ def jira_integration_task(
|
||||
name="scan-compliance-reports",
|
||||
queue="scan-reports",
|
||||
)
|
||||
@handle_provider_deletion
|
||||
def generate_compliance_reports_task(tenant_id: str, scan_id: str, provider_id: str):
|
||||
"""
|
||||
Optimized task to generate ThreatScore, ENS, and NIS2 reports with shared queries.
|
||||
|
||||
@@ -4,14 +4,19 @@ import pytest
|
||||
from tasks.jobs.backfill import (
|
||||
backfill_compliance_summaries,
|
||||
backfill_resource_scan_summaries,
|
||||
backfill_scan_category_summaries,
|
||||
)
|
||||
|
||||
from api.models import (
|
||||
ComplianceOverviewSummary,
|
||||
Finding,
|
||||
ResourceScanSummary,
|
||||
Scan,
|
||||
ScanCategorySummary,
|
||||
StateChoices,
|
||||
)
|
||||
from prowler.lib.check.models import Severity
|
||||
from prowler.lib.outputs.finding import Status
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
@@ -46,6 +51,45 @@ def get_not_completed_scans(providers_fixture):
|
||||
return scan_1, scan_2
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def findings_with_categories_fixture(scans_fixture, resources_fixture):
|
||||
scan = scans_fixture[0]
|
||||
resource = resources_fixture[0]
|
||||
|
||||
finding = Finding.objects.create(
|
||||
tenant_id=scan.tenant_id,
|
||||
uid="finding_with_categories",
|
||||
scan=scan,
|
||||
delta="new",
|
||||
status=Status.FAIL,
|
||||
status_extended="test status",
|
||||
impact=Severity.critical,
|
||||
impact_extended="test impact",
|
||||
severity=Severity.critical,
|
||||
raw_result={"status": Status.FAIL},
|
||||
check_id="test_check",
|
||||
check_metadata={"CheckId": "test_check"},
|
||||
categories=["gen-ai", "security"],
|
||||
first_seen_at="2024-01-02T00:00:00Z",
|
||||
)
|
||||
finding.add_resources([resource])
|
||||
return finding
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def scan_category_summary_fixture(scans_fixture):
|
||||
scan = scans_fixture[0]
|
||||
return ScanCategorySummary.objects.create(
|
||||
tenant_id=scan.tenant_id,
|
||||
scan=scan,
|
||||
category="existing-category",
|
||||
severity=Severity.critical,
|
||||
total_findings=1,
|
||||
failed_findings=0,
|
||||
new_failed_findings=0,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestBackfillResourceScanSummaries:
|
||||
def test_already_backfilled(self, resource_scan_summary_data):
|
||||
@@ -172,3 +216,47 @@ class TestBackfillComplianceSummaries:
|
||||
assert summary.requirements_failed == expected_counts["requirements_failed"]
|
||||
assert summary.requirements_manual == expected_counts["requirements_manual"]
|
||||
assert summary.total_requirements == expected_counts["total_requirements"]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestBackfillScanCategorySummaries:
|
||||
def test_already_backfilled(self, scan_category_summary_fixture):
|
||||
tenant_id = scan_category_summary_fixture.tenant_id
|
||||
scan_id = scan_category_summary_fixture.scan_id
|
||||
|
||||
result = backfill_scan_category_summaries(str(tenant_id), str(scan_id))
|
||||
|
||||
assert result == {"status": "already backfilled"}
|
||||
|
||||
def test_not_completed_scan(self, get_not_completed_scans):
|
||||
for scan in get_not_completed_scans:
|
||||
result = backfill_scan_category_summaries(str(scan.tenant_id), str(scan.id))
|
||||
assert result == {"status": "scan is not completed"}
|
||||
|
||||
def test_no_categories_to_backfill(self, scans_fixture):
|
||||
scan = scans_fixture[1] # Failed scan with no findings
|
||||
result = backfill_scan_category_summaries(str(scan.tenant_id), str(scan.id))
|
||||
assert result == {"status": "no categories to backfill"}
|
||||
|
||||
def test_successful_backfill(self, findings_with_categories_fixture):
|
||||
finding = findings_with_categories_fixture
|
||||
tenant_id = str(finding.tenant_id)
|
||||
scan_id = str(finding.scan_id)
|
||||
|
||||
result = backfill_scan_category_summaries(tenant_id, scan_id)
|
||||
|
||||
# 2 categories × 1 severity = 2 rows
|
||||
assert result == {"status": "backfilled", "categories_count": 2}
|
||||
|
||||
summaries = ScanCategorySummary.objects.filter(
|
||||
tenant_id=tenant_id, scan_id=scan_id
|
||||
)
|
||||
assert summaries.count() == 2
|
||||
categories = set(summaries.values_list("category", flat=True))
|
||||
assert categories == {"gen-ai", "security"}
|
||||
|
||||
for summary in summaries:
|
||||
assert summary.severity == Severity.critical
|
||||
assert summary.total_findings == 1
|
||||
assert summary.failed_findings == 1
|
||||
assert summary.new_failed_findings == 1
|
||||
|
||||
@@ -28,6 +28,7 @@ class TestScheduleProviderScan:
|
||||
"tenant_id": str(provider_instance.tenant_id),
|
||||
"provider_id": str(provider_instance.id),
|
||||
},
|
||||
countdown=5,
|
||||
)
|
||||
|
||||
task_name = f"scan-perform-scheduled-{provider_instance.id}"
|
||||
|
||||
@@ -1199,9 +1199,6 @@ class TestSecurityHubIntegrationUploads:
|
||||
)
|
||||
|
||||
assert result is False
|
||||
# Integration should be marked as disconnected
|
||||
integration.save.assert_called_once()
|
||||
assert integration.connected is False
|
||||
|
||||
@patch("tasks.jobs.integrations.ASFF")
|
||||
@patch("tasks.jobs.integrations.FindingOutput")
|
||||
|
||||
@@ -20,6 +20,7 @@ from tasks.jobs.scan import (
|
||||
_process_finding_micro_batch,
|
||||
_store_resources,
|
||||
aggregate_attack_surface,
|
||||
aggregate_category_counts,
|
||||
aggregate_findings,
|
||||
create_compliance_requirements,
|
||||
perform_prowler_scan,
|
||||
@@ -1377,6 +1378,7 @@ class TestProcessFindingMicroBatch:
|
||||
unique_resources: set[tuple[str, str]] = set()
|
||||
scan_resource_cache: set[tuple[str, str, str, str]] = set()
|
||||
mute_rules_cache = {}
|
||||
scan_categories_cache: dict[tuple[str, str], dict[str, int]] = {}
|
||||
|
||||
with (
|
||||
patch("tasks.jobs.scan.rls_transaction", new=noop_rls_transaction),
|
||||
@@ -1394,6 +1396,7 @@ class TestProcessFindingMicroBatch:
|
||||
unique_resources,
|
||||
scan_resource_cache,
|
||||
mute_rules_cache,
|
||||
scan_categories_cache,
|
||||
)
|
||||
|
||||
created_finding = Finding.objects.get(uid=finding.uid)
|
||||
@@ -1486,6 +1489,7 @@ class TestProcessFindingMicroBatch:
|
||||
unique_resources: set[tuple[str, str]] = set()
|
||||
scan_resource_cache: set[tuple[str, str, str, str]] = set()
|
||||
mute_rules_cache = {finding.uid: "Muted via rule"}
|
||||
scan_categories_cache: dict[tuple[str, str], dict[str, int]] = {}
|
||||
|
||||
with (
|
||||
patch("tasks.jobs.scan.rls_transaction", new=noop_rls_transaction),
|
||||
@@ -1503,6 +1507,7 @@ class TestProcessFindingMicroBatch:
|
||||
unique_resources,
|
||||
scan_resource_cache,
|
||||
mute_rules_cache,
|
||||
scan_categories_cache,
|
||||
)
|
||||
|
||||
existing_resource.refresh_from_db()
|
||||
@@ -1610,6 +1615,7 @@ class TestProcessFindingMicroBatch:
|
||||
unique_resources: set[tuple[str, str]] = set()
|
||||
scan_resource_cache: set[tuple[str, str, str, str]] = set()
|
||||
mute_rules_cache = {}
|
||||
scan_categories_cache: dict[tuple[str, str], dict[str, int]] = {}
|
||||
|
||||
with (
|
||||
patch("tasks.jobs.scan.rls_transaction", new=noop_rls_transaction),
|
||||
@@ -1628,6 +1634,7 @@ class TestProcessFindingMicroBatch:
|
||||
unique_resources,
|
||||
scan_resource_cache,
|
||||
mute_rules_cache,
|
||||
scan_categories_cache,
|
||||
)
|
||||
|
||||
# Verify the long UID finding was NOT created
|
||||
@@ -1648,6 +1655,118 @@ class TestProcessFindingMicroBatch:
|
||||
for call in warning_calls
|
||||
)
|
||||
|
||||
def test_process_finding_micro_batch_tracks_categories(
|
||||
self, tenants_fixture, scans_fixture
|
||||
):
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
provider = scan.provider
|
||||
|
||||
finding1 = FakeFinding(
|
||||
uid="finding-cat-1",
|
||||
status=StatusChoices.PASS,
|
||||
status_extended="all good",
|
||||
severity=Severity.low,
|
||||
check_id="genai_check",
|
||||
resource_uid="arn:aws:bedrock:::model/test",
|
||||
resource_name="test-model",
|
||||
region="us-east-1",
|
||||
service_name="bedrock",
|
||||
resource_type="model",
|
||||
resource_tags={},
|
||||
resource_metadata={},
|
||||
resource_details={},
|
||||
partition="aws",
|
||||
raw={},
|
||||
compliance={},
|
||||
metadata={"categories": ["gen-ai", "security"]},
|
||||
muted=False,
|
||||
)
|
||||
|
||||
finding2 = FakeFinding(
|
||||
uid="finding-cat-2",
|
||||
status=StatusChoices.FAIL,
|
||||
status_extended="bad",
|
||||
severity=Severity.high,
|
||||
check_id="iam_check",
|
||||
resource_uid="arn:aws:iam:::user/test",
|
||||
resource_name="test-user",
|
||||
region="us-east-1",
|
||||
service_name="iam",
|
||||
resource_type="user",
|
||||
resource_tags={},
|
||||
resource_metadata={},
|
||||
resource_details={},
|
||||
partition="aws",
|
||||
raw={},
|
||||
compliance={},
|
||||
metadata={"categories": ["security", "iam"]},
|
||||
muted=False,
|
||||
)
|
||||
|
||||
resource_cache = {}
|
||||
tag_cache = {}
|
||||
last_status_cache = {}
|
||||
resource_failed_findings_cache = {}
|
||||
unique_resources: set[tuple[str, str]] = set()
|
||||
scan_resource_cache: set[tuple[str, str, str, str]] = set()
|
||||
mute_rules_cache = {}
|
||||
scan_categories_cache: dict[tuple[str, str], dict[str, int]] = {}
|
||||
|
||||
with (
|
||||
patch("tasks.jobs.scan.rls_transaction", new=noop_rls_transaction),
|
||||
patch("api.db_utils.rls_transaction", new=noop_rls_transaction),
|
||||
):
|
||||
_process_finding_micro_batch(
|
||||
str(tenant.id),
|
||||
[finding1, finding2],
|
||||
scan,
|
||||
provider,
|
||||
resource_cache,
|
||||
tag_cache,
|
||||
last_status_cache,
|
||||
resource_failed_findings_cache,
|
||||
unique_resources,
|
||||
scan_resource_cache,
|
||||
mute_rules_cache,
|
||||
scan_categories_cache,
|
||||
)
|
||||
|
||||
# finding1: PASS, severity=low, categories=["gen-ai", "security"]
|
||||
# finding2: FAIL, severity=high, categories=["security", "iam"]
|
||||
# Keys are (category, severity) tuples
|
||||
assert set(scan_categories_cache.keys()) == {
|
||||
("gen-ai", "low"),
|
||||
("security", "low"),
|
||||
("security", "high"),
|
||||
("iam", "high"),
|
||||
}
|
||||
assert scan_categories_cache[("gen-ai", "low")] == {
|
||||
"total": 1,
|
||||
"failed": 0,
|
||||
"new_failed": 0,
|
||||
}
|
||||
assert scan_categories_cache[("security", "low")] == {
|
||||
"total": 1,
|
||||
"failed": 0,
|
||||
"new_failed": 0,
|
||||
}
|
||||
assert scan_categories_cache[("security", "high")] == {
|
||||
"total": 1,
|
||||
"failed": 1,
|
||||
"new_failed": 1,
|
||||
}
|
||||
assert scan_categories_cache[("iam", "high")] == {
|
||||
"total": 1,
|
||||
"failed": 1,
|
||||
"new_failed": 1,
|
||||
}
|
||||
|
||||
created_finding1 = Finding.objects.get(uid="finding-cat-1")
|
||||
created_finding2 = Finding.objects.get(uid="finding-cat-2")
|
||||
assert set(created_finding1.categories) == {"gen-ai", "security"}
|
||||
assert set(created_finding2.categories) == {"security", "iam"}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestCreateComplianceRequirements:
|
||||
@@ -3756,3 +3875,150 @@ class TestAggregateAttackSurface:
|
||||
aggregate_attack_surface(str(tenant.id), str(scan.id))
|
||||
|
||||
mock_select_related.assert_called_once_with("provider")
|
||||
|
||||
|
||||
class TestAggregateCategoryCounts:
|
||||
"""Test aggregate_category_counts helper function."""
|
||||
|
||||
def test_aggregate_category_counts_basic(self):
|
||||
"""Test basic category counting for a non-muted PASS finding."""
|
||||
cache: dict[tuple[str, str], dict[str, int]] = {}
|
||||
aggregate_category_counts(
|
||||
categories=["security", "iam"],
|
||||
severity="high",
|
||||
status="PASS",
|
||||
delta=None,
|
||||
muted=False,
|
||||
cache=cache,
|
||||
)
|
||||
|
||||
assert ("security", "high") in cache
|
||||
assert ("iam", "high") in cache
|
||||
assert cache[("security", "high")] == {"total": 1, "failed": 0, "new_failed": 0}
|
||||
assert cache[("iam", "high")] == {"total": 1, "failed": 0, "new_failed": 0}
|
||||
|
||||
def test_aggregate_category_counts_fail_not_muted(self):
|
||||
"""Test category counting for a non-muted FAIL finding."""
|
||||
cache: dict[tuple[str, str], dict[str, int]] = {}
|
||||
aggregate_category_counts(
|
||||
categories=["security"],
|
||||
severity="critical",
|
||||
status="FAIL",
|
||||
delta=None,
|
||||
muted=False,
|
||||
cache=cache,
|
||||
)
|
||||
|
||||
assert cache[("security", "critical")] == {
|
||||
"total": 1,
|
||||
"failed": 1,
|
||||
"new_failed": 0,
|
||||
}
|
||||
|
||||
def test_aggregate_category_counts_new_fail(self):
|
||||
"""Test category counting for a new FAIL finding (delta='new')."""
|
||||
cache: dict[tuple[str, str], dict[str, int]] = {}
|
||||
aggregate_category_counts(
|
||||
categories=["gen-ai"],
|
||||
severity="high",
|
||||
status="FAIL",
|
||||
delta="new",
|
||||
muted=False,
|
||||
cache=cache,
|
||||
)
|
||||
|
||||
assert cache[("gen-ai", "high")] == {"total": 1, "failed": 1, "new_failed": 1}
|
||||
|
||||
def test_aggregate_category_counts_muted_finding(self):
|
||||
"""Test that muted findings are excluded from all counts."""
|
||||
cache: dict[tuple[str, str], dict[str, int]] = {}
|
||||
aggregate_category_counts(
|
||||
categories=["security"],
|
||||
severity="high",
|
||||
status="FAIL",
|
||||
delta="new",
|
||||
muted=True,
|
||||
cache=cache,
|
||||
)
|
||||
|
||||
assert cache[("security", "high")] == {"total": 0, "failed": 0, "new_failed": 0}
|
||||
|
||||
def test_aggregate_category_counts_accumulates(self):
|
||||
"""Test that multiple calls accumulate counts."""
|
||||
cache: dict[tuple[str, str], dict[str, int]] = {}
|
||||
|
||||
# First finding: PASS
|
||||
aggregate_category_counts(
|
||||
categories=["security"],
|
||||
severity="high",
|
||||
status="PASS",
|
||||
delta=None,
|
||||
muted=False,
|
||||
cache=cache,
|
||||
)
|
||||
|
||||
# Second finding: FAIL (new)
|
||||
aggregate_category_counts(
|
||||
categories=["security"],
|
||||
severity="high",
|
||||
status="FAIL",
|
||||
delta="new",
|
||||
muted=False,
|
||||
cache=cache,
|
||||
)
|
||||
|
||||
# Third finding: FAIL (changed)
|
||||
aggregate_category_counts(
|
||||
categories=["security"],
|
||||
severity="high",
|
||||
status="FAIL",
|
||||
delta="changed",
|
||||
muted=False,
|
||||
cache=cache,
|
||||
)
|
||||
|
||||
assert cache[("security", "high")] == {"total": 3, "failed": 2, "new_failed": 1}
|
||||
|
||||
def test_aggregate_category_counts_empty_categories(self):
|
||||
"""Test with empty categories list."""
|
||||
cache: dict[tuple[str, str], dict[str, int]] = {}
|
||||
aggregate_category_counts(
|
||||
categories=[],
|
||||
severity="high",
|
||||
status="FAIL",
|
||||
delta="new",
|
||||
muted=False,
|
||||
cache=cache,
|
||||
)
|
||||
|
||||
assert cache == {}
|
||||
|
||||
def test_aggregate_category_counts_changed_delta(self):
|
||||
"""Test that changed delta increments failed but not new_failed."""
|
||||
cache: dict[tuple[str, str], dict[str, int]] = {}
|
||||
aggregate_category_counts(
|
||||
categories=["iam"],
|
||||
severity="medium",
|
||||
status="FAIL",
|
||||
delta="changed",
|
||||
muted=False,
|
||||
cache=cache,
|
||||
)
|
||||
|
||||
assert cache[("iam", "medium")] == {"total": 1, "failed": 1, "new_failed": 0}
|
||||
|
||||
def test_aggregate_category_counts_multiple_categories_single_finding(self):
|
||||
"""Test single finding with multiple categories."""
|
||||
cache: dict[tuple[str, str], dict[str, int]] = {}
|
||||
aggregate_category_counts(
|
||||
categories=["security", "compliance", "data-protection"],
|
||||
severity="low",
|
||||
status="FAIL",
|
||||
delta="new",
|
||||
muted=False,
|
||||
cache=cache,
|
||||
)
|
||||
|
||||
assert len(cache) == 3
|
||||
for cat in ["security", "compliance", "data-protection"]:
|
||||
assert cache[(cat, "low")] == {"total": 1, "failed": 1, "new_failed": 1}
|
||||
|
||||
@@ -4,7 +4,13 @@ from unittest.mock import MagicMock, patch
|
||||
import openai
|
||||
import pytest
|
||||
from botocore.exceptions import ClientError
|
||||
from django_celery_beat.models import IntervalSchedule, PeriodicTask
|
||||
from tasks.jobs.lighthouse_providers import (
|
||||
_create_bedrock_client,
|
||||
_extract_bedrock_credentials,
|
||||
)
|
||||
from tasks.tasks import (
|
||||
_cleanup_orphan_scheduled_scans,
|
||||
_perform_scan_complete_tasks,
|
||||
check_integrations_task,
|
||||
check_lighthouse_provider_connection_task,
|
||||
@@ -18,9 +24,203 @@ from api.models import (
|
||||
Integration,
|
||||
LighthouseProviderConfiguration,
|
||||
LighthouseProviderModels,
|
||||
Scan,
|
||||
StateChoices,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestExtractBedrockCredentials:
|
||||
"""Unit tests for _extract_bedrock_credentials helper function."""
|
||||
|
||||
def test_extract_access_key_credentials(self, tenants_fixture):
|
||||
"""Test extraction of access key + secret key credentials."""
|
||||
provider_cfg = LighthouseProviderConfiguration(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_type=LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
is_active=True,
|
||||
)
|
||||
provider_cfg.credentials_decoded = {
|
||||
"access_key_id": "AKIAIOSFODNN7EXAMPLE",
|
||||
"secret_access_key": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
|
||||
"region": "us-east-1",
|
||||
}
|
||||
provider_cfg.save()
|
||||
|
||||
result = _extract_bedrock_credentials(provider_cfg)
|
||||
|
||||
assert result is not None
|
||||
assert result["access_key_id"] == "AKIAIOSFODNN7EXAMPLE"
|
||||
assert result["secret_access_key"] == "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"
|
||||
assert result["region"] == "us-east-1"
|
||||
assert "api_key" not in result
|
||||
|
||||
def test_extract_api_key_credentials(self, tenants_fixture):
|
||||
"""Test extraction of API key (bearer token) credentials."""
|
||||
valid_api_key = "ABSKQmVkcm9ja0FQSUtleS" + ("A" * 110)
|
||||
provider_cfg = LighthouseProviderConfiguration(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_type=LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
is_active=True,
|
||||
)
|
||||
provider_cfg.credentials_decoded = {
|
||||
"api_key": valid_api_key,
|
||||
"region": "us-west-2",
|
||||
}
|
||||
provider_cfg.save()
|
||||
|
||||
result = _extract_bedrock_credentials(provider_cfg)
|
||||
|
||||
assert result is not None
|
||||
assert result["api_key"] == valid_api_key
|
||||
assert result["region"] == "us-west-2"
|
||||
assert "access_key_id" not in result
|
||||
assert "secret_access_key" not in result
|
||||
|
||||
def test_api_key_takes_precedence_over_access_keys(self, tenants_fixture):
|
||||
"""Test that API key is preferred when both auth methods are present."""
|
||||
valid_api_key = "ABSKQmVkcm9ja0FQSUtleS" + ("B" * 110)
|
||||
provider_cfg = LighthouseProviderConfiguration(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_type=LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
is_active=True,
|
||||
)
|
||||
provider_cfg.credentials_decoded = {
|
||||
"api_key": valid_api_key,
|
||||
"access_key_id": "AKIAIOSFODNN7EXAMPLE",
|
||||
"secret_access_key": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
|
||||
"region": "eu-west-1",
|
||||
}
|
||||
provider_cfg.save()
|
||||
|
||||
result = _extract_bedrock_credentials(provider_cfg)
|
||||
|
||||
assert result is not None
|
||||
assert result["api_key"] == valid_api_key
|
||||
assert result["region"] == "eu-west-1"
|
||||
assert "access_key_id" not in result
|
||||
|
||||
def test_missing_region_returns_none(self, tenants_fixture):
|
||||
"""Test that missing region returns None."""
|
||||
provider_cfg = LighthouseProviderConfiguration(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_type=LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
is_active=True,
|
||||
)
|
||||
provider_cfg.credentials_decoded = {
|
||||
"api_key": "ABSKQmVkcm9ja0FQSUtleS" + ("A" * 110),
|
||||
}
|
||||
provider_cfg.save()
|
||||
|
||||
result = _extract_bedrock_credentials(provider_cfg)
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_empty_credentials_returns_none(self, tenants_fixture):
|
||||
"""Test that empty credentials dict returns None (region only is not enough)."""
|
||||
provider_cfg = LighthouseProviderConfiguration(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_type=LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
is_active=True,
|
||||
)
|
||||
# Only region, no auth credentials - should return None
|
||||
provider_cfg.credentials_decoded = {
|
||||
"region": "us-east-1",
|
||||
}
|
||||
provider_cfg.save()
|
||||
|
||||
result = _extract_bedrock_credentials(provider_cfg)
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_non_dict_credentials_returns_none(self, tenants_fixture):
|
||||
"""Test that non-dict credentials returns None."""
|
||||
provider_cfg = LighthouseProviderConfiguration(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_type=LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
is_active=True,
|
||||
)
|
||||
# Store valid credentials first to pass model validation
|
||||
provider_cfg.credentials_decoded = {
|
||||
"api_key": "ABSKQmVkcm9ja0FQSUtleS" + ("A" * 110),
|
||||
"region": "us-east-1",
|
||||
}
|
||||
provider_cfg.save()
|
||||
|
||||
# Mock the credentials_decoded property to return a non-dict value
|
||||
# This simulates corrupted/invalid stored data
|
||||
with patch.object(
|
||||
type(provider_cfg),
|
||||
"credentials_decoded",
|
||||
new_callable=lambda: property(lambda self: "invalid"),
|
||||
):
|
||||
result = _extract_bedrock_credentials(provider_cfg)
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestCreateBedrockClient:
|
||||
"""Unit tests for _create_bedrock_client helper function."""
|
||||
|
||||
@patch("tasks.jobs.lighthouse_providers.boto3.client")
|
||||
def test_create_client_with_access_keys(self, mock_boto_client):
|
||||
"""Test creating client with access key authentication."""
|
||||
mock_client = MagicMock()
|
||||
mock_boto_client.return_value = mock_client
|
||||
|
||||
creds = {
|
||||
"access_key_id": "AKIAIOSFODNN7EXAMPLE",
|
||||
"secret_access_key": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
|
||||
"region": "us-east-1",
|
||||
}
|
||||
|
||||
result = _create_bedrock_client(creds)
|
||||
|
||||
assert result == mock_client
|
||||
mock_boto_client.assert_called_once_with(
|
||||
service_name="bedrock",
|
||||
region_name="us-east-1",
|
||||
aws_access_key_id="AKIAIOSFODNN7EXAMPLE",
|
||||
aws_secret_access_key="wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
|
||||
)
|
||||
|
||||
@patch("tasks.jobs.lighthouse_providers.Config")
|
||||
@patch("tasks.jobs.lighthouse_providers.boto3.client")
|
||||
def test_create_client_with_api_key(self, mock_boto_client, mock_config):
|
||||
"""Test creating client with API key authentication."""
|
||||
mock_client = MagicMock()
|
||||
mock_events = MagicMock()
|
||||
mock_client.meta.events = mock_events
|
||||
mock_boto_client.return_value = mock_client
|
||||
mock_config_instance = MagicMock()
|
||||
mock_config.return_value = mock_config_instance
|
||||
valid_api_key = "ABSKQmVkcm9ja0FQSUtleS" + ("A" * 110)
|
||||
|
||||
creds = {
|
||||
"api_key": valid_api_key,
|
||||
"region": "us-west-2",
|
||||
}
|
||||
|
||||
result = _create_bedrock_client(creds)
|
||||
|
||||
assert result == mock_client
|
||||
mock_boto_client.assert_called_once_with(
|
||||
service_name="bedrock",
|
||||
region_name="us-west-2",
|
||||
config=mock_config_instance,
|
||||
)
|
||||
mock_events.register.assert_called_once()
|
||||
call_args = mock_events.register.call_args
|
||||
assert call_args[0][0] == "before-send.*.*"
|
||||
|
||||
# Verify handler injects bearer token
|
||||
handler_fn = call_args[0][1]
|
||||
mock_request = MagicMock()
|
||||
mock_request.headers = {}
|
||||
handler_fn(mock_request)
|
||||
assert mock_request.headers["Authorization"] == f"Bearer {valid_api_key}"
|
||||
|
||||
|
||||
# TODO Move this to outputs/reports jobs
|
||||
@pytest.mark.django_db
|
||||
class TestGenerateOutputs:
|
||||
@@ -1152,6 +1352,16 @@ class TestCheckLighthouseProviderConnectionTask:
|
||||
None,
|
||||
{"connected": True, "error": None},
|
||||
),
|
||||
# Bedrock API key authentication
|
||||
(
|
||||
LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
{
|
||||
"api_key": "ABSKQmVkcm9ja0FQSUtleS" + ("A" * 110),
|
||||
"region": "us-east-1",
|
||||
},
|
||||
None,
|
||||
{"connected": True, "error": None},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_check_connection_success_all_providers(
|
||||
@@ -1220,6 +1430,24 @@ class TestCheckLighthouseProviderConnectionTask:
|
||||
"list_foundation_models",
|
||||
),
|
||||
),
|
||||
# Bedrock API key authentication failure
|
||||
(
|
||||
LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
{
|
||||
"api_key": "ABSKQmVkcm9ja0FQSUtleS" + ("X" * 110),
|
||||
"region": "us-east-1",
|
||||
},
|
||||
None,
|
||||
ClientError(
|
||||
{
|
||||
"Error": {
|
||||
"Code": "UnrecognizedClientException",
|
||||
"Message": "Invalid API key",
|
||||
}
|
||||
},
|
||||
"list_foundation_models",
|
||||
),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_check_connection_api_failure(
|
||||
@@ -1344,6 +1572,17 @@ class TestRefreshLighthouseProviderModelsTask:
|
||||
{"openai.gpt-oss-120b-1:0": "gpt-oss-120b"},
|
||||
1,
|
||||
),
|
||||
# Bedrock API key authentication
|
||||
(
|
||||
LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
{
|
||||
"api_key": "ABSKQmVkcm9ja0FQSUtleS" + ("A" * 110),
|
||||
"region": "us-east-1",
|
||||
},
|
||||
None,
|
||||
{"anthropic.claude-v3": "Claude 3"},
|
||||
1,
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_refresh_models_create_new(
|
||||
@@ -1480,3 +1719,343 @@ class TestRefreshLighthouseProviderModelsTask:
|
||||
assert result["deleted"] == 0
|
||||
assert "error" in result
|
||||
assert result["error"] is not None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestCleanupOrphanScheduledScans:
|
||||
"""Unit tests for _cleanup_orphan_scheduled_scans helper function."""
|
||||
|
||||
def _create_periodic_task(self, provider_id, tenant_id):
|
||||
"""Helper to create a PeriodicTask for testing."""
|
||||
interval, _ = IntervalSchedule.objects.get_or_create(every=24, period="hours")
|
||||
return PeriodicTask.objects.create(
|
||||
name=f"scan-perform-scheduled-{provider_id}",
|
||||
task="scan-perform-scheduled",
|
||||
interval=interval,
|
||||
kwargs=f'{{"tenant_id": "{tenant_id}", "provider_id": "{provider_id}"}}',
|
||||
enabled=True,
|
||||
)
|
||||
|
||||
def test_cleanup_deletes_orphan_when_both_available_and_scheduled_exist(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that AVAILABLE scan is deleted when SCHEDULED also exists."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
periodic_task = self._create_periodic_task(provider.id, tenant.id)
|
||||
|
||||
# Create orphan AVAILABLE scan
|
||||
orphan_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Create SCHEDULED scan (next execution)
|
||||
scheduled_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Execute cleanup
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Verify orphan was deleted
|
||||
assert deleted_count == 1
|
||||
assert not Scan.objects.filter(id=orphan_scan.id).exists()
|
||||
assert Scan.objects.filter(id=scheduled_scan.id).exists()
|
||||
|
||||
def test_cleanup_does_not_delete_when_only_available_exists(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that AVAILABLE scan is NOT deleted when no SCHEDULED exists."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
periodic_task = self._create_periodic_task(provider.id, tenant.id)
|
||||
|
||||
# Create only AVAILABLE scan (normal first scan scenario)
|
||||
available_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Execute cleanup
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Verify nothing was deleted
|
||||
assert deleted_count == 0
|
||||
assert Scan.objects.filter(id=available_scan.id).exists()
|
||||
|
||||
def test_cleanup_does_not_delete_when_only_scheduled_exists(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that nothing is deleted when only SCHEDULED exists."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
periodic_task = self._create_periodic_task(provider.id, tenant.id)
|
||||
|
||||
# Create only SCHEDULED scan (normal subsequent scan scenario)
|
||||
scheduled_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Execute cleanup
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Verify nothing was deleted
|
||||
assert deleted_count == 0
|
||||
assert Scan.objects.filter(id=scheduled_scan.id).exists()
|
||||
|
||||
def test_cleanup_returns_zero_when_no_scans_exist(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that cleanup returns 0 when no scans exist."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
periodic_task = self._create_periodic_task(provider.id, tenant.id)
|
||||
|
||||
# Execute cleanup with no scans
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
assert deleted_count == 0
|
||||
|
||||
def test_cleanup_deletes_multiple_orphan_available_scans(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that multiple AVAILABLE orphan scans are all deleted."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
periodic_task = self._create_periodic_task(provider.id, tenant.id)
|
||||
|
||||
# Create multiple orphan AVAILABLE scans
|
||||
orphan_scan_1 = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
orphan_scan_2 = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Create SCHEDULED scan
|
||||
scheduled_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Execute cleanup
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Verify all orphans were deleted
|
||||
assert deleted_count == 2
|
||||
assert not Scan.objects.filter(id=orphan_scan_1.id).exists()
|
||||
assert not Scan.objects.filter(id=orphan_scan_2.id).exists()
|
||||
assert Scan.objects.filter(id=scheduled_scan.id).exists()
|
||||
|
||||
def test_cleanup_does_not_affect_different_provider(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that cleanup only affects scans for the specified provider."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider1 = providers_fixture[0]
|
||||
provider2 = providers_fixture[1]
|
||||
periodic_task1 = self._create_periodic_task(provider1.id, tenant.id)
|
||||
periodic_task2 = self._create_periodic_task(provider2.id, tenant.id)
|
||||
|
||||
# Create orphan scenario for provider1
|
||||
orphan_scan_p1 = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider1,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task1.id,
|
||||
)
|
||||
scheduled_scan_p1 = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider1,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduler_task_id=periodic_task1.id,
|
||||
)
|
||||
|
||||
# Create AVAILABLE scan for provider2 (should not be affected)
|
||||
available_scan_p2 = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider2,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task2.id,
|
||||
)
|
||||
|
||||
# Execute cleanup for provider1 only
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider1.id),
|
||||
scheduler_task_id=periodic_task1.id,
|
||||
)
|
||||
|
||||
# Verify only provider1's orphan was deleted
|
||||
assert deleted_count == 1
|
||||
assert not Scan.objects.filter(id=orphan_scan_p1.id).exists()
|
||||
assert Scan.objects.filter(id=scheduled_scan_p1.id).exists()
|
||||
assert Scan.objects.filter(id=available_scan_p2.id).exists()
|
||||
|
||||
def test_cleanup_does_not_affect_manual_scans(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that cleanup only affects SCHEDULED trigger scans, not MANUAL."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
periodic_task = self._create_periodic_task(provider.id, tenant.id)
|
||||
|
||||
# Create orphan AVAILABLE scheduled scan
|
||||
orphan_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Create SCHEDULED scan
|
||||
scheduled_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Create AVAILABLE manual scan (should not be affected)
|
||||
manual_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Manual scan",
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.AVAILABLE,
|
||||
)
|
||||
|
||||
# Execute cleanup
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
scheduler_task_id=periodic_task.id,
|
||||
)
|
||||
|
||||
# Verify only scheduled orphan was deleted
|
||||
assert deleted_count == 1
|
||||
assert not Scan.objects.filter(id=orphan_scan.id).exists()
|
||||
assert Scan.objects.filter(id=scheduled_scan.id).exists()
|
||||
assert Scan.objects.filter(id=manual_scan.id).exists()
|
||||
|
||||
def test_cleanup_does_not_affect_different_scheduler_task(
|
||||
self, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Test that cleanup only affects scans with the specified scheduler_task_id."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
periodic_task1 = self._create_periodic_task(provider.id, tenant.id)
|
||||
|
||||
# Create another periodic task
|
||||
interval, _ = IntervalSchedule.objects.get_or_create(every=24, period="hours")
|
||||
periodic_task2 = PeriodicTask.objects.create(
|
||||
name=f"scan-perform-scheduled-other-{provider.id}",
|
||||
task="scan-perform-scheduled",
|
||||
interval=interval,
|
||||
kwargs=f'{{"tenant_id": "{tenant.id}", "provider_id": "{provider.id}"}}',
|
||||
enabled=True,
|
||||
)
|
||||
|
||||
# Create orphan scenario for periodic_task1
|
||||
orphan_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task1.id,
|
||||
)
|
||||
scheduled_scan = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.SCHEDULED,
|
||||
scheduler_task_id=periodic_task1.id,
|
||||
)
|
||||
|
||||
# Create AVAILABLE scan for periodic_task2 (should not be affected)
|
||||
available_scan_other_task = Scan.objects.create(
|
||||
tenant_id=tenant.id,
|
||||
provider=provider,
|
||||
name="Daily scheduled scan",
|
||||
trigger=Scan.TriggerChoices.SCHEDULED,
|
||||
state=StateChoices.AVAILABLE,
|
||||
scheduler_task_id=periodic_task2.id,
|
||||
)
|
||||
|
||||
# Execute cleanup for periodic_task1 only
|
||||
deleted_count = _cleanup_orphan_scheduled_scans(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
scheduler_task_id=periodic_task1.id,
|
||||
)
|
||||
|
||||
# Verify only periodic_task1's orphan was deleted
|
||||
assert deleted_count == 1
|
||||
assert not Scan.objects.filter(id=orphan_scan.id).exists()
|
||||
assert Scan.objects.filter(id=scheduled_scan.id).exists()
|
||||
assert Scan.objects.filter(id=available_scan_other_task.id).exists()
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 90 KiB |
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
@@ -0,0 +1,28 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_threatscore
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBSECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_threatscore(
|
||||
aux,
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBSECTION",
|
||||
"REQUIREMENTS_ID",
|
||||
)
|
||||
@@ -312,3 +312,28 @@ def create_table_row_dropdown(table_rows: list) -> html.Div:
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_category_dropdown(categories: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the category.
|
||||
Args:
|
||||
categories (list): List of categories.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the category.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Category:", className="text-prowler-stone-900 font-bold text-sm"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="category-filter",
|
||||
options=[{"label": i, "value": i} for i in categories],
|
||||
value=["All"],
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
@@ -12,6 +12,7 @@ def create_layout_overview(
|
||||
provider_dropdown: html.Div,
|
||||
table_row_dropdown: html.Div,
|
||||
status_dropdown: html.Div,
|
||||
category_dropdown: html.Div,
|
||||
table_div_header: html.Div,
|
||||
amount_providers: int,
|
||||
) -> html.Div:
|
||||
@@ -51,8 +52,9 @@ def create_layout_overview(
|
||||
html.Div([service_dropdown], className=""),
|
||||
html.Div([provider_dropdown], className=""),
|
||||
html.Div([status_dropdown], className=""),
|
||||
html.Div([category_dropdown], className=""),
|
||||
],
|
||||
className="grid gap-x-4 mb-[30px] sm:grid-cols-2 lg:grid-cols-4",
|
||||
className="grid gap-x-4 mb-[30px] sm:grid-cols-2 lg:grid-cols-5",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
@@ -61,6 +63,7 @@ def create_layout_overview(
|
||||
html.Div(className="flex", id="gcp_card", n_clicks=0),
|
||||
html.Div(className="flex", id="k8s_card", n_clicks=0),
|
||||
html.Div(className="flex", id="m365_card", n_clicks=0),
|
||||
html.Div(className="flex", id="alibabacloud_card", n_clicks=0),
|
||||
],
|
||||
className=f"grid gap-x-4 mb-[30px] sm:grid-cols-2 lg:grid-cols-{amount_providers}",
|
||||
),
|
||||
|
||||
@@ -76,8 +76,10 @@ def load_csv_files(csv_files):
|
||||
result = result.replace("_AZURE", " - AZURE")
|
||||
if "KUBERNETES" in result:
|
||||
result = result.replace("_KUBERNETES", " - KUBERNETES")
|
||||
if "M365" in result:
|
||||
result = result.replace("_M365", " - M365")
|
||||
if "M65" in result:
|
||||
result = result.replace("_M65", " - M65")
|
||||
if "ALIBABACLOUD" in result:
|
||||
result = result.replace("_ALIBABACLOUD", " - ALIBABACLOUD")
|
||||
results.append(result)
|
||||
|
||||
unique_results = set(results)
|
||||
@@ -278,6 +280,10 @@ def display_data(
|
||||
data["REQUIREMENTS_ATTRIBUTES_PROFILE"] = data[
|
||||
"REQUIREMENTS_ATTRIBUTES_PROFILE"
|
||||
].apply(lambda x: x.split(" - ")[0])
|
||||
|
||||
# Rename the column LOCATION to REGION for Alibaba Cloud
|
||||
if "alibabacloud" in analytics_input:
|
||||
data = data.rename(columns={"LOCATION": "REGION"})
|
||||
# Filter the chosen level of the CIS
|
||||
if is_level_1:
|
||||
data = data[data["REQUIREMENTS_ATTRIBUTES_PROFILE"].str.contains("Level 1")]
|
||||
@@ -401,9 +407,11 @@ def display_data(
|
||||
compliance_module = importlib.import_module(
|
||||
f"dashboard.compliance.{current}"
|
||||
)
|
||||
data = data.drop_duplicates(
|
||||
subset=["CHECKID", "STATUS", "MUTED", "RESOURCEID", "STATUSEXTENDED"]
|
||||
)
|
||||
# Build subset list based on available columns
|
||||
dedup_columns = ["CHECKID", "STATUS", "RESOURCEID", "STATUSEXTENDED"]
|
||||
if "MUTED" in data.columns:
|
||||
dedup_columns.insert(2, "MUTED")
|
||||
data = data.drop_duplicates(subset=dedup_columns)
|
||||
|
||||
if "threatscore" in analytics_input:
|
||||
data = get_threatscore_mean_by_pillar(data)
|
||||
@@ -646,6 +654,7 @@ def get_table(current_compliance, table):
|
||||
def get_threatscore_mean_by_pillar(df):
|
||||
score_per_pillar = {}
|
||||
max_score_per_pillar = {}
|
||||
counted_findings_per_pillar = {}
|
||||
|
||||
for _, row in df.iterrows():
|
||||
pillar = (
|
||||
@@ -657,6 +666,18 @@ def get_threatscore_mean_by_pillar(df):
|
||||
if pillar not in score_per_pillar:
|
||||
score_per_pillar[pillar] = 0
|
||||
max_score_per_pillar[pillar] = 0
|
||||
counted_findings_per_pillar[pillar] = set()
|
||||
|
||||
# Skip muted findings for score calculation
|
||||
is_muted = "MUTED" in df.columns and row.get("MUTED") == "True"
|
||||
if is_muted:
|
||||
continue
|
||||
|
||||
# Create unique finding identifier to avoid counting duplicates
|
||||
finding_id = f"{row.get('CHECKID', '')}_{row.get('RESOURCEID', '')}"
|
||||
if finding_id in counted_findings_per_pillar[pillar]:
|
||||
continue
|
||||
counted_findings_per_pillar[pillar].add(finding_id)
|
||||
|
||||
level_of_risk = pd.to_numeric(
|
||||
row["REQUIREMENTS_ATTRIBUTES_LEVELOFRISK"], errors="coerce"
|
||||
@@ -700,6 +721,10 @@ def get_table_prowler_threatscore(df):
|
||||
score_per_pillar = {}
|
||||
max_score_per_pillar = {}
|
||||
pillars = {}
|
||||
counted_findings_per_pillar = {}
|
||||
counted_pass = set()
|
||||
counted_fail = set()
|
||||
counted_muted = set()
|
||||
|
||||
df_copy = df.copy()
|
||||
|
||||
@@ -714,6 +739,24 @@ def get_table_prowler_threatscore(df):
|
||||
pillars[pillar] = {"FAIL": 0, "PASS": 0, "MUTED": 0}
|
||||
score_per_pillar[pillar] = 0
|
||||
max_score_per_pillar[pillar] = 0
|
||||
counted_findings_per_pillar[pillar] = set()
|
||||
|
||||
# Create unique finding identifier
|
||||
finding_id = f"{row.get('CHECKID', '')}_{row.get('RESOURCEID', '')}"
|
||||
|
||||
# Check if muted
|
||||
is_muted = "MUTED" in df_copy.columns and row.get("MUTED") == "True"
|
||||
|
||||
# Count muted findings (separate from score calculation)
|
||||
if is_muted and finding_id not in counted_muted:
|
||||
counted_muted.add(finding_id)
|
||||
pillars[pillar]["MUTED"] += 1
|
||||
continue # Skip muted findings for score calculation
|
||||
|
||||
# Skip if already counted for this pillar
|
||||
if finding_id in counted_findings_per_pillar[pillar]:
|
||||
continue
|
||||
counted_findings_per_pillar[pillar].add(finding_id)
|
||||
|
||||
level_of_risk = pd.to_numeric(
|
||||
row["REQUIREMENTS_ATTRIBUTES_LEVELOFRISK"], errors="coerce"
|
||||
@@ -732,13 +775,14 @@ def get_table_prowler_threatscore(df):
|
||||
max_score_per_pillar[pillar] += level_of_risk * weight
|
||||
|
||||
if row["STATUS"] == "PASS":
|
||||
pillars[pillar]["PASS"] += 1
|
||||
if finding_id not in counted_pass:
|
||||
counted_pass.add(finding_id)
|
||||
pillars[pillar]["PASS"] += 1
|
||||
score_per_pillar[pillar] += level_of_risk * weight
|
||||
elif row["STATUS"] == "FAIL":
|
||||
pillars[pillar]["FAIL"] += 1
|
||||
|
||||
if "MUTED" in row and row["MUTED"] == "True":
|
||||
pillars[pillar]["MUTED"] += 1
|
||||
if finding_id not in counted_fail:
|
||||
counted_fail.add(finding_id)
|
||||
pillars[pillar]["FAIL"] += 1
|
||||
|
||||
result_df = []
|
||||
|
||||
|
||||
+109
-1
@@ -35,6 +35,7 @@ from dashboard.config import (
|
||||
from dashboard.lib.cards import create_provider_card
|
||||
from dashboard.lib.dropdowns import (
|
||||
create_account_dropdown,
|
||||
create_category_dropdown,
|
||||
create_date_dropdown,
|
||||
create_provider_dropdown,
|
||||
create_region_dropdown,
|
||||
@@ -79,6 +80,9 @@ ks8_provider_logo = html.Img(
|
||||
m365_provider_logo = html.Img(
|
||||
src="assets/images/providers/m365_provider.png", alt="m365 provider"
|
||||
)
|
||||
alibabacloud_provider_logo = html.Img(
|
||||
src="assets/images/providers/alibabacloud_provider.png", alt="alibabacloud provider"
|
||||
)
|
||||
|
||||
|
||||
def load_csv_files(csv_files):
|
||||
@@ -253,6 +257,8 @@ else:
|
||||
accounts.append(account + " - AWS")
|
||||
if "kubernetes" in list(data[data["ACCOUNT_UID"] == account]["PROVIDER"]):
|
||||
accounts.append(account + " - K8S")
|
||||
if "alibabacloud" in list(data[data["ACCOUNT_UID"] == account]["PROVIDER"]):
|
||||
accounts.append(account + " - ALIBABACLOUD")
|
||||
|
||||
account_dropdown = create_account_dropdown(accounts)
|
||||
|
||||
@@ -298,6 +304,8 @@ else:
|
||||
services.append(service + " - GCP")
|
||||
if "m365" in list(data[data["SERVICE_NAME"] == service]["PROVIDER"]):
|
||||
services.append(service + " - M365")
|
||||
if "alibabacloud" in list(data[data["SERVICE_NAME"] == service]["PROVIDER"]):
|
||||
services.append(service + " - ALIBABACLOUD")
|
||||
|
||||
services = ["All"] + services
|
||||
services = [
|
||||
@@ -336,6 +344,18 @@ else:
|
||||
status = [x for x in status if str(x) != "nan" and x.__class__.__name__ == "str"]
|
||||
|
||||
status_dropdown = create_status_dropdown(status)
|
||||
|
||||
# Create the category dropdown
|
||||
categories = []
|
||||
if "CATEGORIES" in data.columns:
|
||||
for cat_list in data["CATEGORIES"].dropna().unique():
|
||||
if cat_list and str(cat_list) != "nan":
|
||||
for cat in str(cat_list).split(","):
|
||||
cat = cat.strip()
|
||||
if cat and cat not in categories:
|
||||
categories.append(cat)
|
||||
categories = ["All"] + sorted(categories)
|
||||
category_dropdown = create_category_dropdown(categories)
|
||||
table_div_header = []
|
||||
table_div_header.append(
|
||||
html.Div(
|
||||
@@ -497,6 +517,7 @@ else:
|
||||
provider_dropdown,
|
||||
table_row_dropdown,
|
||||
status_dropdown,
|
||||
category_dropdown,
|
||||
table_div_header,
|
||||
len(data["PROVIDER"].unique()),
|
||||
)
|
||||
@@ -520,6 +541,7 @@ else:
|
||||
Output("gcp_card", "children"),
|
||||
Output("k8s_card", "children"),
|
||||
Output("m365_card", "children"),
|
||||
Output("alibabacloud_card", "children"),
|
||||
Output("subscribe_card", "children"),
|
||||
Output("info-file-over", "title"),
|
||||
Output("severity-filter", "value"),
|
||||
@@ -532,11 +554,14 @@ else:
|
||||
Output("table-rows", "options"),
|
||||
Output("status-filter", "value"),
|
||||
Output("status-filter", "options"),
|
||||
Output("category-filter", "value"),
|
||||
Output("category-filter", "options"),
|
||||
Output("aws_card", "n_clicks"),
|
||||
Output("azure_card", "n_clicks"),
|
||||
Output("gcp_card", "n_clicks"),
|
||||
Output("k8s_card", "n_clicks"),
|
||||
Output("m365_card", "n_clicks"),
|
||||
Output("alibabacloud_card", "n_clicks"),
|
||||
],
|
||||
Input("cloud-account-filter", "value"),
|
||||
Input("region-filter", "value"),
|
||||
@@ -548,6 +573,7 @@ else:
|
||||
Input("provider-filter", "value"),
|
||||
Input("table-rows", "value"),
|
||||
Input("status-filter", "value"),
|
||||
Input("category-filter", "value"),
|
||||
Input("search-input", "value"),
|
||||
Input("aws_card", "n_clicks"),
|
||||
Input("azure_card", "n_clicks"),
|
||||
@@ -560,6 +586,7 @@ else:
|
||||
Input("sort_button_region", "n_clicks"),
|
||||
Input("sort_button_service", "n_clicks"),
|
||||
Input("sort_button_account", "n_clicks"),
|
||||
Input("alibabacloud_card", "n_clicks"),
|
||||
)
|
||||
def filter_data(
|
||||
cloud_account_values,
|
||||
@@ -572,6 +599,7 @@ def filter_data(
|
||||
provider_values,
|
||||
table_row_values,
|
||||
status_values,
|
||||
category_values,
|
||||
search_value,
|
||||
aws_clicks,
|
||||
azure_clicks,
|
||||
@@ -584,6 +612,7 @@ def filter_data(
|
||||
sort_button_region,
|
||||
sort_button_service,
|
||||
sort_button_account,
|
||||
alibabacloud_clicks,
|
||||
):
|
||||
# Use n_clicks for vulture
|
||||
n_clicks_csv = n_clicks_csv
|
||||
@@ -599,6 +628,7 @@ def filter_data(
|
||||
gcp_clicks = 0
|
||||
k8s_clicks = 0
|
||||
m365_clicks = 0
|
||||
alibabacloud_clicks = 0
|
||||
if azure_clicks > 0:
|
||||
filtered_data = data.copy()
|
||||
if azure_clicks % 2 != 0 and "azure" in list(data["PROVIDER"]):
|
||||
@@ -607,6 +637,7 @@ def filter_data(
|
||||
gcp_clicks = 0
|
||||
k8s_clicks = 0
|
||||
m365_clicks = 0
|
||||
alibabacloud_clicks = 0
|
||||
if gcp_clicks > 0:
|
||||
filtered_data = data.copy()
|
||||
if gcp_clicks % 2 != 0 and "gcp" in list(data["PROVIDER"]):
|
||||
@@ -615,6 +646,7 @@ def filter_data(
|
||||
azure_clicks = 0
|
||||
k8s_clicks = 0
|
||||
m365_clicks = 0
|
||||
alibabacloud_clicks = 0
|
||||
if k8s_clicks > 0:
|
||||
filtered_data = data.copy()
|
||||
if k8s_clicks % 2 != 0 and "kubernetes" in list(data["PROVIDER"]):
|
||||
@@ -623,6 +655,7 @@ def filter_data(
|
||||
azure_clicks = 0
|
||||
gcp_clicks = 0
|
||||
m365_clicks = 0
|
||||
alibabacloud_clicks = 0
|
||||
if m365_clicks > 0:
|
||||
filtered_data = data.copy()
|
||||
if m365_clicks % 2 != 0 and "m365" in list(data["PROVIDER"]):
|
||||
@@ -631,7 +664,16 @@ def filter_data(
|
||||
azure_clicks = 0
|
||||
gcp_clicks = 0
|
||||
k8s_clicks = 0
|
||||
|
||||
alibabacloud_clicks = 0
|
||||
if alibabacloud_clicks > 0:
|
||||
filtered_data = data.copy()
|
||||
if alibabacloud_clicks % 2 != 0 and "alibabacloud" in list(data["PROVIDER"]):
|
||||
filtered_data = filtered_data[filtered_data["PROVIDER"] == "alibabacloud"]
|
||||
aws_clicks = 0
|
||||
azure_clicks = 0
|
||||
gcp_clicks = 0
|
||||
k8s_clicks = 0
|
||||
m365_clicks = 0
|
||||
# For all the data, we will add to the status column the value 'MUTED (FAIL)' and 'MUTED (PASS)' depending on the value of the column 'STATUS' and 'MUTED'
|
||||
if "MUTED" in filtered_data.columns:
|
||||
filtered_data["STATUS"] = filtered_data.apply(
|
||||
@@ -723,6 +765,8 @@ def filter_data(
|
||||
all_account_ids.append(account)
|
||||
if "kubernetes" in list(data[data["ACCOUNT_UID"] == account]["PROVIDER"]):
|
||||
all_account_ids.append(account)
|
||||
if "alibabacloud" in list(data[data["ACCOUNT_UID"] == account]["PROVIDER"]):
|
||||
all_account_ids.append(account)
|
||||
|
||||
all_account_names = []
|
||||
if "ACCOUNT_NAME" in filtered_data.columns:
|
||||
@@ -745,6 +789,10 @@ def filter_data(
|
||||
cloud_accounts_options.append(item + " - AWS")
|
||||
if "kubernetes" in list(data[data["ACCOUNT_UID"] == item]["PROVIDER"]):
|
||||
cloud_accounts_options.append(item + " - K8S")
|
||||
if "alibabacloud" in list(
|
||||
data[data["ACCOUNT_UID"] == item]["PROVIDER"]
|
||||
):
|
||||
cloud_accounts_options.append(item + " - ALIBABACLOUD")
|
||||
if "ACCOUNT_NAME" in filtered_data.columns:
|
||||
if "azure" in list(data[data["ACCOUNT_NAME"] == item]["PROVIDER"]):
|
||||
cloud_accounts_options.append(item + " - AZURE")
|
||||
@@ -873,6 +921,10 @@ def filter_data(
|
||||
filtered_data[filtered_data["SERVICE_NAME"] == item]["PROVIDER"]
|
||||
):
|
||||
service_filter_options.append(item + " - M365")
|
||||
if "alibabacloud" in list(
|
||||
filtered_data[filtered_data["SERVICE_NAME"] == item]["PROVIDER"]
|
||||
):
|
||||
service_filter_options.append(item + " - ALIBABACLOUD")
|
||||
|
||||
# Filter Service
|
||||
if service_values == ["All"]:
|
||||
@@ -931,6 +983,41 @@ def filter_data(
|
||||
|
||||
status_filter_options = ["All"] + list(filtered_data["STATUS"].unique())
|
||||
|
||||
# Filter Category
|
||||
if "CATEGORIES" in filtered_data.columns:
|
||||
if category_values == ["All"]:
|
||||
updated_category_values = None
|
||||
elif "All" in category_values and len(category_values) > 1:
|
||||
category_values.remove("All")
|
||||
updated_category_values = category_values
|
||||
elif len(category_values) == 0:
|
||||
updated_category_values = None
|
||||
category_values = ["All"]
|
||||
else:
|
||||
updated_category_values = category_values
|
||||
|
||||
if updated_category_values:
|
||||
filtered_data = filtered_data[
|
||||
filtered_data["CATEGORIES"].apply(
|
||||
lambda x: any(
|
||||
cat.strip() in updated_category_values
|
||||
for cat in str(x).split(",")
|
||||
if str(x) != "nan"
|
||||
)
|
||||
)
|
||||
]
|
||||
|
||||
category_filter_options = ["All"]
|
||||
for cat_list in filtered_data["CATEGORIES"].dropna().unique():
|
||||
if cat_list and str(cat_list) != "nan":
|
||||
for cat in str(cat_list).split(","):
|
||||
cat = cat.strip()
|
||||
if cat and cat not in category_filter_options:
|
||||
category_filter_options.append(cat)
|
||||
category_filter_options = sorted(category_filter_options)
|
||||
else:
|
||||
category_filter_options = ["All"]
|
||||
|
||||
if len(filtered_data_sp) == 0:
|
||||
fig = px.pie()
|
||||
fig.update_layout(
|
||||
@@ -1324,6 +1411,12 @@ def filter_data(
|
||||
filtered_data.loc[
|
||||
filtered_data["ACCOUNT_UID"] == account, "ACCOUNT_UID"
|
||||
] = (account + " - M365")
|
||||
if "alibabacloud" in list(
|
||||
data[data["ACCOUNT_UID"] == account]["PROVIDER"]
|
||||
):
|
||||
filtered_data.loc[
|
||||
filtered_data["ACCOUNT_UID"] == account, "ACCOUNT_UID"
|
||||
] = (account + " - ALIBABACLOUD")
|
||||
|
||||
table_collapsible = []
|
||||
for item in filtered_data.to_dict("records"):
|
||||
@@ -1410,6 +1503,13 @@ def filter_data(
|
||||
else:
|
||||
m365_card = None
|
||||
|
||||
if "alibabacloud" in list(data["PROVIDER"].unique()):
|
||||
alibabacloud_card = create_provider_card(
|
||||
"alibabacloud", alibabacloud_provider_logo, "Accounts", full_filtered_data
|
||||
)
|
||||
else:
|
||||
alibabacloud_card = None
|
||||
|
||||
# Subscribe to Prowler Cloud card
|
||||
subscribe_card = [
|
||||
html.Div(
|
||||
@@ -1454,6 +1554,7 @@ def filter_data(
|
||||
gcp_card,
|
||||
k8s_card,
|
||||
m365_card,
|
||||
alibabacloud_card,
|
||||
subscribe_card,
|
||||
list_files,
|
||||
severity_values,
|
||||
@@ -1464,11 +1565,14 @@ def filter_data(
|
||||
table_row_options,
|
||||
status_values,
|
||||
status_filter_options,
|
||||
category_values,
|
||||
category_filter_options,
|
||||
aws_clicks,
|
||||
azure_clicks,
|
||||
gcp_clicks,
|
||||
k8s_clicks,
|
||||
m365_clicks,
|
||||
alibabacloud_clicks,
|
||||
)
|
||||
else:
|
||||
return (
|
||||
@@ -1487,6 +1591,7 @@ def filter_data(
|
||||
gcp_card,
|
||||
k8s_card,
|
||||
m365_card,
|
||||
alibabacloud_card,
|
||||
subscribe_card,
|
||||
list_files,
|
||||
severity_values,
|
||||
@@ -1499,11 +1604,14 @@ def filter_data(
|
||||
table_row_options,
|
||||
status_values,
|
||||
status_filter_options,
|
||||
category_values,
|
||||
category_filter_options,
|
||||
aws_clicks,
|
||||
azure_clicks,
|
||||
gcp_clicks,
|
||||
k8s_clicks,
|
||||
m365_clicks,
|
||||
alibabacloud_clicks,
|
||||
)
|
||||
|
||||
|
||||
|
||||
+34
-1
@@ -41,6 +41,9 @@ services:
|
||||
volumes:
|
||||
- "./ui:/app"
|
||||
- "/app/node_modules"
|
||||
depends_on:
|
||||
mcp-server:
|
||||
condition: service_healthy
|
||||
|
||||
postgres:
|
||||
image: postgres:16.3-alpine3.20
|
||||
@@ -57,7 +60,11 @@ services:
|
||||
ports:
|
||||
- "${POSTGRES_PORT:-5432}:${POSTGRES_PORT:-5432}"
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_ADMIN_USER} -d ${POSTGRES_DB}'"]
|
||||
test:
|
||||
[
|
||||
"CMD-SHELL",
|
||||
"sh -c 'pg_isready -U ${POSTGRES_ADMIN_USER} -d ${POSTGRES_DB}'",
|
||||
]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
@@ -118,6 +125,32 @@ services:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "beat"
|
||||
|
||||
mcp-server:
|
||||
build:
|
||||
context: ./mcp_server
|
||||
dockerfile: Dockerfile
|
||||
environment:
|
||||
- PROWLER_MCP_TRANSPORT_MODE=http
|
||||
env_file:
|
||||
- path: .env
|
||||
required: false
|
||||
ports:
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
- ./mcp_server/prowler_mcp_server:/app/prowler_mcp_server
|
||||
- ./mcp_server/pyproject.toml:/app/pyproject.toml
|
||||
- ./mcp_server/entrypoint.sh:/app/entrypoint.sh
|
||||
command: ["uvicorn", "--host", "0.0.0.0", "--port", "8000"]
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD-SHELL",
|
||||
"wget -q -O /dev/null http://127.0.0.1:8000/health || exit 1",
|
||||
]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
volumes:
|
||||
outputs:
|
||||
driver: local
|
||||
|
||||
@@ -1,3 +1,9 @@
|
||||
# Production Docker Compose configuration
|
||||
# Uses pre-built images from Docker Hub (prowlercloud/*)
|
||||
#
|
||||
# For development with local builds and hot-reload, use docker-compose-dev.yml instead:
|
||||
# docker compose -f docker-compose-dev.yml up
|
||||
#
|
||||
services:
|
||||
api:
|
||||
hostname: "prowler-api"
|
||||
@@ -26,6 +32,9 @@ services:
|
||||
required: false
|
||||
ports:
|
||||
- ${UI_PORT:-3000}:${UI_PORT:-3000}
|
||||
depends_on:
|
||||
mcp-server:
|
||||
condition: service_healthy
|
||||
|
||||
postgres:
|
||||
image: postgres:16.3-alpine3.20
|
||||
@@ -93,6 +102,22 @@ services:
|
||||
- "../docker-entrypoint.sh"
|
||||
- "beat"
|
||||
|
||||
mcp-server:
|
||||
image: prowlercloud/prowler-mcp:${PROWLER_MCP_VERSION:-stable}
|
||||
environment:
|
||||
- PROWLER_MCP_TRANSPORT_MODE=http
|
||||
env_file:
|
||||
- path: .env
|
||||
required: false
|
||||
ports:
|
||||
- "8000:8000"
|
||||
command: ["uvicorn", "--host", "0.0.0.0", "--port", "8000"]
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "wget -q -O /dev/null http://127.0.0.1:8000/health || exit 1"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
volumes:
|
||||
output:
|
||||
driver: local
|
||||
|
||||
@@ -479,6 +479,66 @@ Effective headers and section titles enhance document readability and structure,
|
||||
|
||||
---
|
||||
|
||||
## Version Badge for Feature Documentation
|
||||
|
||||
The Version Badge component indicates when a specific feature or functionality was introduced in Prowler. This component is located at `docs/snippets/version-badge.mdx` and should be used consistently across the documentation.
|
||||
|
||||
### When to Use the Version Badge
|
||||
|
||||
Use the Version Badge when documenting:
|
||||
|
||||
* New features added in a specific version.
|
||||
* New CLI options or flags.
|
||||
* New API endpoints or SDK methods.
|
||||
* New compliance frameworks or security checks.
|
||||
* Breaking changes or deprecated features (with appropriate context).
|
||||
|
||||
### How to Use the Version Badge
|
||||
|
||||
1. **Import the Component**
|
||||
|
||||
At the top of the MDX file, import the snippet:
|
||||
|
||||
```mdx
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
```
|
||||
|
||||
2. **Place the Badge**
|
||||
|
||||
Insert the badge immediately after the section header or feature title:
|
||||
|
||||
```mdx
|
||||
## New Feature Name
|
||||
|
||||
<VersionBadge version="4.5.0" />
|
||||
|
||||
Description of the feature...
|
||||
```
|
||||
|
||||
3. **Version Format**
|
||||
|
||||
Use semantic versioning format (e.g., `4.5.0`, `5.0.0`). Do not include the "v" prefix.
|
||||
|
||||
### Placement Guidelines
|
||||
|
||||
* Place the Version Badge on its own line, directly below the header.
|
||||
* Leave a blank line after the badge before continuing with the content.
|
||||
* For subsections, place the badge only if the subsection introduces something new independently from the parent section.
|
||||
|
||||
**Example:**
|
||||
|
||||
```mdx
|
||||
## Tag-Based Scanning
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="4.3.0" />
|
||||
|
||||
Tag-Based Scanning allows filtering resources by AWS tags during security assessments...
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Avoid Assumptions Regarding Audience’s Expertise
|
||||
|
||||
### Understand Your Audience’s Expertise
|
||||
|
||||
@@ -0,0 +1,212 @@
|
||||
---
|
||||
title: 'Alibaba Cloud Provider'
|
||||
---
|
||||
|
||||
This page details the [Alibaba Cloud](https://www.alibabacloud.com/) provider implementation in Prowler.
|
||||
|
||||
By default, Prowler will audit all the Alibaba Cloud regions that are available. To configure it, follow the [Alibaba Cloud getting started guide](/user-guide/providers/alibabacloud/getting-started-alibabacloud).
|
||||
|
||||
## Alibaba Cloud Provider Classes Architecture
|
||||
|
||||
The Alibaba Cloud provider implementation follows the general [Provider structure](/developer-guide/provider). This section focuses on the Alibaba Cloud-specific implementation, highlighting how the generic provider concepts are realized for Alibaba Cloud in Prowler. For a full overview of the provider pattern, base classes, and extension guidelines, see [Provider documentation](/developer-guide/provider).
|
||||
|
||||
### Main Class
|
||||
|
||||
- **Location:** [`prowler/providers/alibabacloud/alibabacloud_provider.py`](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/alibabacloud/alibabacloud_provider.py)
|
||||
- **Base Class:** Inherits from `Provider` (see [base class details](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/common/provider.py)).
|
||||
- **Purpose:** Central orchestrator for Alibaba Cloud-specific logic, session management, credential validation, and configuration.
|
||||
- **Key Alibaba Cloud Responsibilities:**
|
||||
- Initializes and manages Alibaba Cloud sessions (supports Access Keys, STS Temporary Credentials, RAM Role Assumption, ECS RAM Role, OIDC Authentication, and Credentials URI).
|
||||
- Validates credentials using STS GetCallerIdentity.
|
||||
- Loads and manages configuration, mutelist, and fixer settings.
|
||||
- Discovers and manages Alibaba Cloud regions.
|
||||
- Provides properties and methods for downstream Alibaba Cloud service classes to access session, identity, and configuration data.
|
||||
|
||||
### Data Models
|
||||
|
||||
- **Location:** [`prowler/providers/alibabacloud/models.py`](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/alibabacloud/models.py)
|
||||
- **Purpose:** Define structured data for Alibaba Cloud identity, session, credentials, and region info.
|
||||
- **Key Alibaba Cloud Models:**
|
||||
- `AlibabaCloudCallerIdentity`: Stores caller identity information from STS GetCallerIdentity (account_id, principal_id, arn, identity_type).
|
||||
- `AlibabaCloudIdentityInfo`: Holds Alibaba Cloud identity metadata including account ID, user info, profile, and audited regions.
|
||||
- `AlibabaCloudCredentials`: Stores credentials (access_key_id, access_key_secret, security_token).
|
||||
- `AlibabaCloudRegion`: Represents an Alibaba Cloud region with region_id and region_name.
|
||||
- `AlibabaCloudSession`: Manages the session and provides methods to create service clients.
|
||||
|
||||
### `AlibabaCloudService` (Service Base Class)
|
||||
|
||||
- **Location:** [`prowler/providers/alibabacloud/lib/service/service.py`](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/alibabacloud/lib/service/service.py)
|
||||
- **Purpose:** Abstract base class that all Alibaba Cloud service-specific classes inherit from. This implements the generic service pattern (described in [service page](/developer-guide/services#service-base-class)) specifically for Alibaba Cloud.
|
||||
- **Key Alibaba Cloud Responsibilities:**
|
||||
- Receives an `AlibabacloudProvider` instance to access session, identity, and configuration.
|
||||
- Manages regional clients for services that are region-specific.
|
||||
- Provides `__threading_call__` method to make API calls in parallel by region or resource.
|
||||
- Exposes common audit context (`audited_account`, `audited_account_name`, `audit_resources`, `audit_config`) to subclasses.
|
||||
|
||||
### Exception Handling
|
||||
|
||||
- **Location:** [`prowler/providers/alibabacloud/exceptions/exceptions.py`](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/alibabacloud/exceptions/exceptions.py)
|
||||
- **Purpose:** Custom exception classes for Alibaba Cloud-specific error handling.
|
||||
- **Key Alibaba Cloud Exceptions:**
|
||||
- `AlibabaCloudClientError`: General client errors
|
||||
- `AlibabaCloudNoCredentialsError`: No credentials found
|
||||
- `AlibabaCloudInvalidCredentialsError`: Invalid credentials provided
|
||||
- `AlibabaCloudSetUpSessionError`: Session setup failures
|
||||
- `AlibabaCloudAssumeRoleError`: RAM role assumption failures
|
||||
- `AlibabaCloudInvalidRegionError`: Invalid region specified
|
||||
- `AlibabaCloudHTTPError`: HTTP/API errors
|
||||
|
||||
### Session and Utility Helpers
|
||||
|
||||
- **Location:** [`prowler/providers/alibabacloud/lib/`](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers/alibabacloud/lib/)
|
||||
- **Purpose:** Helpers for argument parsing, mutelist management, and other cross-cutting concerns.
|
||||
|
||||
## Specific Patterns in Alibaba Cloud Services
|
||||
|
||||
The generic service pattern is described in [service page](/developer-guide/services#service-structure-and-initialisation). You can find all the currently implemented services in the following locations:
|
||||
|
||||
- Directly in the code, in location [`prowler/providers/alibabacloud/services/`](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers/alibabacloud/services)
|
||||
- In the [Prowler Hub](https://hub.prowler.com/) for a more human-readable view.
|
||||
|
||||
The best reference to understand how to implement a new service is following the [service implementation documentation](/developer-guide/services#adding-a-new-service) and taking other services already implemented as reference. In next subsection you can find a list of common patterns that are used across all Alibaba Cloud services.
|
||||
|
||||
### Alibaba Cloud Service Common Patterns
|
||||
|
||||
- Services communicate with Alibaba Cloud using the official Alibaba Cloud Python SDKs. Documentation for individual services can be found in the [Alibaba Cloud SDK documentation](https://www.alibabacloud.com/help/en/sdk).
|
||||
- Every Alibaba Cloud service class inherits from `AlibabaCloudService`, ensuring access to session, identity, configuration, and client utilities.
|
||||
- The constructor (`__init__`) always calls `super().__init__` with the service name, provider, and optionally `global_service=True` for services that are not regional (e.g., RAM).
|
||||
- Resource containers **must** be initialized in the constructor. For regional services, resources are typically stored in dictionaries keyed by region and resource ID.
|
||||
- All Alibaba Cloud resources are represented as Pydantic `BaseModel` classes, providing type safety and structured access to resource attributes.
|
||||
- Alibaba Cloud SDK functions are wrapped in try/except blocks, with specific handling for errors, always logging errors.
|
||||
- Regional services use `self.regional_clients` to maintain clients for each audited region.
|
||||
- The `__threading_call__` method is used for parallel execution across regions or resources.
|
||||
|
||||
### Example Service Implementation
|
||||
|
||||
```python
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.alibabacloud.lib.service.service import AlibabaCloudService
|
||||
|
||||
|
||||
class MyService(AlibabaCloudService):
|
||||
def __init__(self, provider):
|
||||
# Initialize parent class with service name
|
||||
super().__init__("myservice", provider)
|
||||
|
||||
# Initialize resource containers
|
||||
self.resources = {}
|
||||
|
||||
# Discover resources using threading
|
||||
self.__threading_call__(self._describe_resources)
|
||||
|
||||
def _describe_resources(self, regional_client):
|
||||
try:
|
||||
region = regional_client.region
|
||||
response = regional_client.describe_resources()
|
||||
|
||||
for resource in response.body.resources:
|
||||
self.resources[resource.id] = MyResource(
|
||||
id=resource.id,
|
||||
name=resource.name,
|
||||
region=region,
|
||||
# ... other attributes
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
```
|
||||
|
||||
## Specific Patterns in Alibaba Cloud Checks
|
||||
|
||||
The Alibaba Cloud checks pattern is described in [checks page](/developer-guide/checks). You can find all the currently implemented checks:
|
||||
|
||||
- Directly in the code, within each service folder, each check has its own folder named after the name of the check. (e.g. [`prowler/providers/alibabacloud/services/ram/ram_no_root_access_key/`](https://github.com/prowler-cloud/prowler/tree/master/prowler/providers/alibabacloud/services/ram/ram_no_root_access_key))
|
||||
- In the [Prowler Hub](https://hub.prowler.com/) for a more human-readable view.
|
||||
|
||||
The best reference to understand how to implement a new check is following the [check implementation documentation](/developer-guide/checks#creating-a-check) and taking other similar checks as reference.
|
||||
|
||||
### Check Report Class
|
||||
|
||||
The `CheckReportAlibabaCloud` class models a single finding for an Alibaba Cloud resource in a check report. It is defined in [`prowler/lib/check/models.py`](https://github.com/prowler-cloud/prowler/blob/master/prowler/lib/check/models.py) and inherits from the generic `Check_Report` base class.
|
||||
|
||||
#### Purpose
|
||||
|
||||
`CheckReportAlibabaCloud` extends the base report structure with Alibaba Cloud-specific fields, enabling detailed tracking of the resource, resource ID, ARN, and region associated with each finding.
|
||||
|
||||
#### Constructor and Attribute Population
|
||||
|
||||
When you instantiate `CheckReportAlibabaCloud`, you must provide the check metadata and a resource object. The class will attempt to automatically populate its Alibaba Cloud-specific attributes from the resource, using the following logic:
|
||||
|
||||
- **`resource_id`**:
|
||||
- Uses `resource.id` if present.
|
||||
- Otherwise, uses `resource.name` if present.
|
||||
- Defaults to an empty string if not available.
|
||||
|
||||
- **`resource_arn`**:
|
||||
- Uses `resource.arn` if present.
|
||||
- Defaults to an empty string if not available.
|
||||
|
||||
- **`region`**:
|
||||
- Uses `resource.region` if present.
|
||||
- Defaults to an empty string if not available.
|
||||
|
||||
If the resource object does not contain the required attributes, you must set them manually in the check logic.
|
||||
|
||||
Other attributes are inherited from the `Check_Report` class, from which you **always** have to set the `status` and `status_extended` attributes in the check logic.
|
||||
|
||||
#### Example Usage
|
||||
|
||||
```python
|
||||
from prowler.lib.check.models import Check, CheckReportAlibabaCloud
|
||||
from prowler.providers.alibabacloud.services.myservice.myservice_client import myservice_client
|
||||
|
||||
|
||||
class myservice_example_check(Check):
|
||||
def execute(self) -> list[CheckReportAlibabaCloud]:
|
||||
findings = []
|
||||
|
||||
for resource in myservice_client.resources.values():
|
||||
report = CheckReportAlibabaCloud(
|
||||
metadata=self.metadata(),
|
||||
resource=resource
|
||||
)
|
||||
report.region = resource.region
|
||||
report.resource_id = resource.id
|
||||
report.resource_arn = f"acs:myservice::{myservice_client.audited_account}:resource/{resource.id}"
|
||||
|
||||
if resource.is_compliant:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Resource {resource.name} is compliant."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Resource {resource.name} is not compliant."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
```
|
||||
|
||||
## Authentication Methods
|
||||
|
||||
The Alibaba Cloud provider supports multiple authentication methods, prioritized in the following order:
|
||||
|
||||
1. **Credentials URI** - Retrieve credentials from an external URI endpoint
|
||||
2. **OIDC Role Authentication** - For applications running in ACK with RRSA enabled
|
||||
3. **ECS RAM Role** - For ECS instances with attached RAM roles
|
||||
4. **RAM Role Assumption** - Cross-account access with role assumption
|
||||
5. **STS Temporary Credentials** - Pre-obtained temporary credentials
|
||||
6. **Permanent Access Keys** - Static access key credentials
|
||||
7. **Default Credential Chain** - Automatic credential discovery
|
||||
|
||||
For detailed authentication configuration, see the [Authentication documentation](/user-guide/providers/alibabacloud/authentication).
|
||||
|
||||
## Regions
|
||||
|
||||
Alibaba Cloud has multiple regions across the globe. By default, Prowler audits all available regions. You can specify specific regions using the `--regions` CLI argument:
|
||||
|
||||
```bash
|
||||
prowler alibabacloud --regions cn-hangzhou cn-shanghai
|
||||
```
|
||||
|
||||
The list of supported regions is maintained in [`prowler/providers/alibabacloud/config.py`](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/alibabacloud/config.py).
|
||||
@@ -213,3 +213,5 @@ Also is important to keep all code examples as short as possible, including the
|
||||
| software-supply-chain | Detects or prevents tampering, unauthorized packages, or third-party risks in software supply chain |
|
||||
| e3 | M365-specific controls enabled by or dependent on an E3 license (e.g., baseline security policies, conditional access) |
|
||||
| e5 | M365-specific controls enabled by or dependent on an E5 license (e.g., advanced threat protection, audit, DLP, and eDiscovery) |
|
||||
| privilege-escalation | Detects IAM policies or permissions that allow identities to elevate their privileges beyond their intended scope, potentially gaining administrator or higher-level access through specific action combinations |
|
||||
| ec2-imdsv1 | Identifies EC2 instances using Instance Metadata Service version 1 (IMDSv1), which is vulnerable to SSRF attacks and should be replaced with IMDSv2 for enhanced security |
|
||||
@@ -237,6 +237,7 @@ Below is a generic example of a check metadata file. **Do not include comments i
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Other",
|
||||
"ResourceGroup": "security",
|
||||
"Description": "This check verifies that the service resource has the required **security setting** enabled to protect against potential vulnerabilities.\n\nIt ensures that the resource follows security best practices and maintains proper access controls. The check evaluates whether the security configuration is properly implemented and active.",
|
||||
"Risk": "Without proper security settings, the resource may be vulnerable to:\n\n- **Unauthorized access** - Malicious actors could gain entry\n- **Data breaches** - Sensitive information could be compromised\n- **Security threats** - Various attack vectors could be exploited\n\nThis could result in compliance violations and potential financial or reputational damage.",
|
||||
"RelatedUrl": "",
|
||||
@@ -312,7 +313,33 @@ The type of resource being audited. This field helps categorize and organize fin
|
||||
- **Azure**: Use types from [Azure Resource Graph](https://learn.microsoft.com/en-us/azure/governance/resource-graph/reference/supported-tables-resources), for example: `Microsoft.Storage/storageAccounts`.
|
||||
- **Google Cloud**: Use [Cloud Asset Inventory asset types](https://cloud.google.com/asset-inventory/docs/asset-types), for example: `compute.googleapis.com/Instance`.
|
||||
- **Kubernetes**: Use types shown under `KIND` from `kubectl api-resources`.
|
||||
- **M365 / GitHub**: Leave empty due to lack of standardized types.
|
||||
- **Oracle Cloud Infrastructure**: Use types from [Oracle Cloud Infrastructure documentation](https://docs.public.oneportal.content.oci.oraclecloud.com/en-us/iaas/Content/Search/Tasks/queryingresources_topic-Listing_Supported_Resource_Types.htm).
|
||||
- **M365 / GitHub / MongoDB Atlas**: Leave empty due to lack of standardized types.
|
||||
|
||||
#### ResourceGroup
|
||||
|
||||
A high-level classification that groups checks by the type of cloud resource they audit. This field enables filtering and organizing findings by resource category across all providers. The value must be one of the following predefined groups:
|
||||
|
||||
| Group | Description |
|
||||
|-------|-------------|
|
||||
| `compute` | Virtual machines, instances, auto-scaling groups, workspaces, streaming |
|
||||
| `container` | Container orchestration, Kubernetes, registries, pods |
|
||||
| `serverless` | Functions, step functions, event-driven compute |
|
||||
| `database` | Relational, NoSQL, caches, search engines, data warehouses, graph databases |
|
||||
| `storage` | Object storage, block storage, file systems, backups, archives |
|
||||
| `network` | VPCs, subnets, load balancers, DNS, VPN, firewalls, CDN |
|
||||
| `IAM` | IAM users, roles, policies, access keys, service accounts, directories |
|
||||
| `messaging` | Queues, topics, event buses, streaming, email services |
|
||||
| `security` | WAF, secrets, KMS, certificates, security tools, defenders, DDoS protection |
|
||||
| `monitoring` | Logs, metrics, alerts, audit trails, observability, config tracking |
|
||||
| `api_gateway` | API management, REST APIs, GraphQL endpoints |
|
||||
| `ai_ml` | Machine learning, AI services, notebooks, training, LLM |
|
||||
| `governance` | Accounts, organizations, projects, policies, settings, compliance tools |
|
||||
| `collaboration` | Productivity SaaS apps (Exchange, Teams, SharePoint) |
|
||||
| `devops` | CI/CD, infrastructure as code, automation, code repositories, version control |
|
||||
| `analytics` | Data warehouses, query engines, ETL pipelines, BI tools, data lakes |
|
||||
|
||||
The group is determined by the resource type being audited, not the service. For example, an EC2 security group check would use `network` (not `compute`), while an EC2 instance check would use `compute`.
|
||||
|
||||
#### Description
|
||||
|
||||
|
||||
@@ -0,0 +1,327 @@
|
||||
---
|
||||
title: 'End-2-End Tests for Prowler App'
|
||||
---
|
||||
|
||||
End-to-end (E2E) tests validate complete user flows in Prowler App (UI + API). These tests are implemented with [Playwright](https://playwright.dev/) under the `ui/tests` folder and are designed to run against a Prowler App environment.
|
||||
|
||||
## General Recommendations
|
||||
|
||||
When adding or maintaining E2E tests for Prowler App, follow these guidelines:
|
||||
|
||||
1. **Test real user journeys**
|
||||
Focus on full workflows (for example, sign-up → login → add provider → launch scan) instead of low-level UI details already covered by unit or integration tests.
|
||||
|
||||
2. **Group tests by entity or feature area**
|
||||
- Organize E2E tests by entity or feature area (for example, `providers.spec.ts`, `scans.spec.ts`, `invitations.spec.ts`, `sign-up.spec.ts`).
|
||||
- Each entity should have its own test file and corresponding page model class (for example, `ProvidersPage`, `ScansPage`, `InvitationsPage`).
|
||||
- Related tests for the same entity should be grouped together in the same test file to improve maintainability and make it easier to find and update tests for a specific feature.
|
||||
|
||||
3. **Use a Page Model (Page Object Model)**
|
||||
- Encapsulate selectors and common actions in page classes instead of repeating them in each test.
|
||||
- Leverage and extend the existing Playwright page models in `ui/tests`—such as `ProvidersPage`, `ScansPage`, and others—which are all based on the shared `BasePage`.
|
||||
- Page models for Prowler App pages should be placed in their respective entity folders (for example, `ui/tests/providers/providers-page.ts`).
|
||||
- Page models for external pages (not part of Prowler App) should be grouped in the `external` folder (for example, `ui/tests/external/github-page.ts`).
|
||||
- This approach improves readability, reduces duplication, and makes refactors safer.
|
||||
|
||||
4. **Reuse authentication states (StorageState)**
|
||||
- Multiple authentication setup projects are available that generate pre-authenticated state files stored in `playwright/.auth/`. Each project requires specific environment variables:
|
||||
- `admin.auth.setup` – Admin users with full system permissions (requires `E2E_ADMIN_USER` / `E2E_ADMIN_PASSWORD`)
|
||||
- `manage-scans.auth.setup` – Users with scan management permissions (requires `E2E_MANAGE_SCANS_USER` / `E2E_MANAGE_SCANS_PASSWORD`)
|
||||
- `manage-integrations.auth.setup` – Users with integration management permissions (requires `E2E_MANAGE_INTEGRATIONS_USER` / `E2E_MANAGE_INTEGRATIONS_PASSWORD`)
|
||||
- `manage-account.auth.setup` – Users with account management permissions (requires `E2E_MANAGE_ACCOUNT_USER` / `E2E_MANAGE_ACCOUNT_PASSWORD`)
|
||||
- `manage-cloud-providers.auth.setup` – Users with cloud provider management permissions (requires `E2E_MANAGE_CLOUD_PROVIDERS_USER` / `E2E_MANAGE_CLOUD_PROVIDERS_PASSWORD`)
|
||||
- `unlimited-visibility.auth.setup` – Users with unlimited visibility permissions (requires `E2E_UNLIMITED_VISIBILITY_USER` / `E2E_UNLIMITED_VISIBILITY_PASSWORD`)
|
||||
- `invite-and-manage-users.auth.setup` – Users with user invitation and management permissions (requires `E2E_INVITE_AND_MANAGE_USERS_USER` / `E2E_INVITE_AND_MANAGE_USERS_PASSWORD`)
|
||||
<Note>
|
||||
If fixtures have been applied (fixtures are used to populate the database with initial development data), you can use the user `e2e@prowler.com` with password `Thisisapassword123@` to configure the Admin credentials by setting `E2E_ADMIN_USER=e2e@prowler.com` and `E2E_ADMIN_PASSWORD=Thisisapassword123@`.
|
||||
</Note>
|
||||
|
||||
- Within test files, use `test.use({ storageState: "playwright/.auth/admin_user.json" })` to load the pre-authenticated state, avoiding redundant authentication steps in each test. This must be placed at the test level (not inside the test function) to apply the authentication state to all tests in that scope. This approach is preferred over declaring dependencies in `playwright.config.ts` because it provides more control over which authentication states are used in specific tests.
|
||||
|
||||
**Example:**
|
||||
|
||||
```typescript
|
||||
// Use admin authentication state for all tests in this scope
|
||||
test.use({ storageState: "playwright/.auth/admin_user.json" });
|
||||
|
||||
test("should perform admin action", async ({ page }) => {
|
||||
// Test implementation
|
||||
});
|
||||
```
|
||||
|
||||
5. **Tag and document scenarios**
|
||||
- Follow the existing naming convention for suites and test cases (for example, `SCANS-E2E-001`, `PROVIDER-E2E-003`) and use tags such as `@e2e`, `@serial` and feature tags (for example, `@providers`, `@scans`,`@aws`) to filter and organize tests.
|
||||
|
||||
**Example:**
|
||||
```typescript
|
||||
test(
|
||||
"should add a new AWS provider with static credentials",
|
||||
{
|
||||
tag: [
|
||||
"@critical",
|
||||
"@e2e",
|
||||
"@providers",
|
||||
"@aws",
|
||||
"@serial",
|
||||
"@PROVIDER-E2E-001",
|
||||
],
|
||||
},
|
||||
async ({ page }) => {
|
||||
// Test implementation
|
||||
}
|
||||
);
|
||||
```
|
||||
- Document each one in the Markdown files under `ui/tests`, including **Priority**, **Tags**, **Description**, **Preconditions**, **Flow steps**, **Expected results**,**Key verification points** and **Notes**.
|
||||
|
||||
**Example**
|
||||
```Markdown
|
||||
## Test Case: `SCANS-E2E-001` - Execute On-Demand Scan
|
||||
|
||||
**Priority:** `critical`
|
||||
|
||||
**Tags:**
|
||||
|
||||
- type → @e2e, @serial
|
||||
- feature → @scans
|
||||
|
||||
**Description/Objective:** Validates the complete flow to execute an on-demand scan selecting a provider by UID and confirming success on the Scans page.
|
||||
|
||||
**Preconditions:**
|
||||
|
||||
- Admin user authentication required (admin.auth.setup setup)
|
||||
- Environment variables configured for : E2E_AWS_PROVIDER_ACCOUNT_ID,E2E_AWS_PROVIDER_ACCESS_KEY and E2E_AWS_PROVIDER_SECRET_KEY
|
||||
- Remove any existing AWS provider with the same Account ID before starting the test
|
||||
- This test must be run serially and never in parallel with other tests, as it requires the Account ID Provider to be already registered.
|
||||
|
||||
### Flow Steps:
|
||||
|
||||
1. Navigate to Scans page
|
||||
2. Open provider selector and choose the entry whose text contains E2E_AWS_PROVIDER_ACCOUNT_ID
|
||||
3. Optionally fill scan label (alias)
|
||||
4. Click "Start now" to launch the scan
|
||||
5. Verify the success toast appears
|
||||
6. Verify a row in the Scans table contains the provided scan label (or shows the new scan entry)
|
||||
|
||||
### Expected Result:
|
||||
|
||||
- Scan is launched successfully
|
||||
- Success toast is displayed to the user
|
||||
- Scans table displays the new scan entry (including the alias when provided)
|
||||
|
||||
### Key verification points:
|
||||
|
||||
- Scans page loads correctly
|
||||
- Provider select is available and lists the configured provider UID
|
||||
- "Start now" button is rendered and enabled when form is valid
|
||||
- Success toast message: "The scan was launched successfully."
|
||||
- Table contains a row with the scan label or new scan state (queued/available/executing)
|
||||
|
||||
### Notes:
|
||||
|
||||
- The table may take a short time to reflect the new scan; assertions look for a row containing the alias.
|
||||
- Provider cleanup performed before each test to ensure clean state
|
||||
- Tests should run serially to avoid state conflicts.
|
||||
|
||||
```
|
||||
|
||||
6. **Use environment variables for secrets and dynamic data**
|
||||
Credentials, provider identifiers, secrets, tokens must come from environment variables (for example, `E2E_AWS_PROVIDER_ACCOUNT_ID`, `E2E_AWS_PROVIDER_ACCESS_KEY`, `E2E_AWS_PROVIDER_SECRET_KEY`, `E2E_GCP_PROJECT_ID`).
|
||||
|
||||
<Warning>
|
||||
Never commit real secrets, tokens, or account IDs to the repository.
|
||||
</Warning>
|
||||
|
||||
7. **Keep tests deterministic and isolated**
|
||||
- Use Playwright's `test.beforeEach()` and `test.afterEach()` hooks to manage test state:
|
||||
- **`test.beforeEach()`**: Execute cleanup or setup logic before each test runs (for example, delete existing providers with a specific account ID to ensure a clean state).
|
||||
- **`test.afterEach()`**: Execute cleanup logic after each test completes (for example, remove test data created during the test execution to prevent interference with subsequent tests).
|
||||
- Define tests as serial using `test.describe.serial()` when they share state or resources that could interfere with parallel execution (for example, tests that use the same provider account ID or create dependent resources). This ensures tests within the serial group run sequentially, preventing race conditions and data conflicts.
|
||||
- Use unique identifiers (for example, random suffixes for emails or labels) to prevent data collisions.
|
||||
|
||||
8. **Use explicit waiting strategies**
|
||||
- Avoid using `waitForLoadState('networkidle')` as it is unreliable and can lead to flaky tests or unnecessary delays.
|
||||
- Leverage Playwright's auto-waiting capabilities by waiting for specific elements to be actionable (for example, `locator.click()`, `locator.fill()`, `locator.waitFor()`).
|
||||
- **Prioritize selector strategies**: Prefer `page.getByRole()` over other approaches like `page.getByText()`. `getByRole()` is more resilient to UI changes, aligns with accessibility best practices, and better reflects how users interact with the application (by role and accessible name rather than implementation details).
|
||||
- For dynamic content, wait for specific UI elements that indicate the page is ready (for example, button becoming enabled, a specific text appearing, etc).
|
||||
- This approach makes tests more reliable, faster, and aligned with how users actually interact with the application.
|
||||
|
||||
**Common waiting patterns used in Prowler E2E tests:**
|
||||
|
||||
- **Element visibility assertions**: Use `expect(locator).toBeVisible()` or `expect(locator).not.toBeVisible()` to wait for elements to appear or disappear (Playwright automatically waits for these conditions).
|
||||
|
||||
- **URL changes**: Use `expect(page).toHaveURL(url)` or `page.waitForURL(url)` to wait for navigation to complete.
|
||||
|
||||
- **Element states**: Use `locator.waitFor({ state: "visible" })` or `locator.waitFor({ state: "hidden" })` when you need explicit state control.
|
||||
|
||||
- **Text content**: Use `expect(locator).toHaveText(text)` or `expect(locator).toContainText(text)` to wait for specific text to appear.
|
||||
|
||||
- **Element attributes**: Use `expect(locator).toHaveAttribute(name, value)` to wait for attributes like `aria-disabled="false"` indicating a button is enabled.
|
||||
|
||||
- **Custom conditions**: Use `page.waitForFunction(() => condition)` for complex conditions that cannot be expressed with locators (for example, checking DOM element dimensions or computed styles).
|
||||
|
||||
- **Retryable assertions**: Use `expect(async () => { ... }).toPass({ timeout })` for conditions that may take time to stabilize (for example, waiting for table rows to filter after a server request).
|
||||
|
||||
- **Scroll into view**: Use `locator.scrollIntoViewIfNeeded()` before interacting with elements that may be outside the viewport.
|
||||
|
||||
**Example from Prowler tests:**
|
||||
|
||||
```typescript
|
||||
// Wait for page to load by checking main content is visible
|
||||
await expect(page.locator("main")).toBeVisible();
|
||||
|
||||
// Wait for URL change after form submission
|
||||
await expect(page).toHaveURL("/providers");
|
||||
|
||||
// Wait for button to become enabled
|
||||
await expect(submitButton).toHaveAttribute("aria-disabled", "false");
|
||||
|
||||
// Wait for loading spinner to disappear
|
||||
await expect(page.getByText("Loading")).not.toBeVisible();
|
||||
|
||||
// Wait for custom condition
|
||||
await page.waitForFunction(() => {
|
||||
const main = document.querySelector("main");
|
||||
return main && main.offsetHeight > 0;
|
||||
});
|
||||
|
||||
// Wait for retryable condition (e.g., table filtering)
|
||||
await expect(async () => {
|
||||
const rowCount = await tableRows.count();
|
||||
expect(rowCount).toBeLessThanOrEqual(1);
|
||||
}).toPass({ timeout: 20000 });
|
||||
```
|
||||
|
||||
## Running Prowler Tests
|
||||
|
||||
E2E tests for Prowler App run from the `ui` project using Playwright. The Playwright configuration lives in `ui/playwright.config.ts` and defines:
|
||||
|
||||
- `testDir: "./tests"` – location of E2E test files (relative to the `ui` project root, so `ui/tests`).
|
||||
- `webServer` – how to start the Next.js development server and connect to Prowler API.
|
||||
- `use.baseURL` – base URL for browser interactions (defaults to `http://localhost:3000` or `AUTH_URL` if set).
|
||||
- `reporter: [["list"]]` – uses the list reporter to display test results in a concise format in the terminal. Other reporter options are available (for example, `html`, `json`, `junit`, `github`), and multiple reporters can be configured simultaneously. See the [Playwright reporter documentation](https://playwright.dev/docs/test-reporters) for all available options.
|
||||
- `expect.timeout: 20000` – timeout for assertions (20 seconds). This is the maximum time Playwright will wait for an assertion to pass before considering it failed.
|
||||
- **Test artifacts** (in `use` configuration): By default, `trace`, `screenshot`, and `video` are set to `"off"` to minimize resource usage. To review test failures or debug issues, these can be enabled in `playwright.config.ts` by changing them to `"on"`, `"on-first-retry"`, or `"retain-on-failure"` depending on your needs.
|
||||
- `outputDir: "/tmp/playwright-tests"` – directory where Playwright stores test artifacts (screenshots, videos, traces) during test execution.
|
||||
- **CI-specific configuration**: The configuration uses different settings when running in CI environments (detected via `process.env.CI`):
|
||||
- **Retries**: `2` retries in CI (to handle flaky tests), `0` retries locally (for faster feedback during development).
|
||||
- **Workers**: `1` worker in CI (sequential execution for stability), `undefined` locally (parallel execution by default for faster test runs).
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Before running E2E tests:
|
||||
|
||||
- **Install root and UI dependencies**
|
||||
- Follow the [developer guide introduction](/developer-guide/introduction#getting-the-code-and-installing-all-dependencies) to clone the repository and install core dependencies.
|
||||
- From the `ui` directory, install frontend dependencies:
|
||||
|
||||
```bash
|
||||
cd ui
|
||||
pnpm install
|
||||
pnpm run test:e2e:install # Install Playwright browsers
|
||||
```
|
||||
|
||||
- **Ensure Prowler API is available**
|
||||
- By default, Playwright uses `NEXT_PUBLIC_API_BASE_URL=http://localhost:8080/api/v1` (configured in `playwright.config.ts`).
|
||||
- Start Prowler API so it is reachable on that URL (for example, via `docker-compose-dev.yml` or the development orchestration used locally).
|
||||
- If a different API URL is required, set `NEXT_PUBLIC_API_BASE_URL` accordingly before running the tests.
|
||||
|
||||
- **Ensure Prowler App UI is available**
|
||||
- Playwright automatically starts the Next.js server through the `webServer` block in `playwright.config.ts` (`pnpm run dev` by default).
|
||||
- If the UI is already running on `http://localhost:3000`, Playwright will reuse the existing server when `reuseExistingServer` is `true`.
|
||||
|
||||
- **Configure E2E environment variables**
|
||||
- Suite-specific variables (for example, provider account IDs, credentials, and E2E user data) must be provided before running tests.
|
||||
- They can be defined either:
|
||||
- As exported environment variables in the shell before executing the Playwright commands, or
|
||||
- In a `.env.local` or `.env` file under `ui/`, and then loaded into the shell before running tests, for example:
|
||||
|
||||
```bash
|
||||
cd ui
|
||||
set -a
|
||||
source .env.local # or .env
|
||||
set +a
|
||||
```
|
||||
- Refer to the Markdown documentation files in `ui/tests` for each E2E suite (for example, the `*.md` files that describe sign-up, providers, scans, invitations, and other flows) to see the exact list of required variables and their meaning.
|
||||
- Each E2E test suite explicitly checks that its required environment variables are defined at runtime and will fail with a clear error message if any mandatory variable is missing, making misconfiguration easy to detect.
|
||||
|
||||
### Executing Tests
|
||||
|
||||
To execute E2E tests for Prowler App:
|
||||
|
||||
1. **Run the full E2E suite (headless)**
|
||||
|
||||
From the `ui` directory:
|
||||
|
||||
```bash
|
||||
pnpm run test:e2e
|
||||
```
|
||||
|
||||
This command runs Playwright with the configured projects
|
||||
|
||||
2. **Run E2E tests with the Playwright UI runner**
|
||||
|
||||
```bash
|
||||
pnpm run test:e2e:ui
|
||||
```
|
||||
|
||||
This opens the Playwright test runner UI to inspect, debug, and rerun specific tests or projects.
|
||||
|
||||
3. **Debug E2E tests interactively**
|
||||
|
||||
```bash
|
||||
pnpm run test:e2e:debug
|
||||
```
|
||||
|
||||
Use this mode to step through flows, inspect selectors, and adjust timings. It runs tests in headed mode with debugging tools enabled.
|
||||
|
||||
4. **Run tests in headed mode without debugger**
|
||||
|
||||
```bash
|
||||
pnpm run test:e2e:headed
|
||||
```
|
||||
|
||||
This is useful to visually confirm flows while still running the full suite.
|
||||
|
||||
5. **View previous test reports**
|
||||
|
||||
```bash
|
||||
pnpm run test:e2e:report
|
||||
```
|
||||
|
||||
This opens the latest Playwright HTML report, including traces and screenshots when enabled.
|
||||
|
||||
6. **Run specific tests or subsets**
|
||||
|
||||
In addition to the predefined scripts, Playwright allows filtering which tests run. These examples use the Playwright CLI directly through `pnpm`:
|
||||
|
||||
- **By test ID (`@ID` in the test metadata or description)**
|
||||
|
||||
To run a single test case identified by its ID (for example, `@PROVIDER-E2E-001` or `@SCANS-E2E-001`):
|
||||
|
||||
```bash
|
||||
pnpm playwright test --grep @PROVIDER-E2E-001
|
||||
```
|
||||
|
||||
- **By tags**
|
||||
|
||||
To run all tests that share a common tag (for example, all provider E2E tests tagged with `@providers`):
|
||||
|
||||
```bash
|
||||
pnpm playwright test --grep @providers
|
||||
```
|
||||
|
||||
This is useful to focus on a specific feature area such as providers, scans, invitations, or sign-up.
|
||||
|
||||
- **By Playwright project**
|
||||
|
||||
To run only the tests associated with a given project defined in `playwright.config.ts` (for example, `providers` or `scans`):
|
||||
|
||||
```bash
|
||||
pnpm playwright test --project=providers
|
||||
```
|
||||
|
||||
Combining project and grep filters is also supported, enabling very narrow runs (for example, a single test ID within the `providers` project). For additional CLI options and combinations, see the [Playwright command line documentation](https://playwright.dev/docs/test-cli).
|
||||
|
||||
<Note>
|
||||
For detailed flows, preconditions, and environment variable requirements per feature, always refer to the Markdown files in `ui/tests`. Those documents are the single source of truth for business expectations and validation points in each E2E suite.
|
||||
</Note>
|
||||
@@ -0,0 +1,447 @@
|
||||
---
|
||||
title: 'Extending the MCP Server'
|
||||
---
|
||||
|
||||
This guide explains how to extend the Prowler MCP Server with new tools and features.
|
||||
|
||||
<Info>
|
||||
**New to Prowler MCP Server?** Start with the user documentation:
|
||||
- [Overview](/getting-started/products/prowler-mcp) - Key capabilities, use cases, and deployment options
|
||||
- [Installation](/getting-started/installation/prowler-mcp) - Install locally or use the managed server
|
||||
- [Configuration](/getting-started/basic-usage/prowler-mcp) - Configure Claude Desktop, Cursor, and other MCP hosts
|
||||
- [Tools Reference](/getting-started/basic-usage/prowler-mcp-tools) - Complete list of all available tools
|
||||
</Info>
|
||||
|
||||
## Introduction
|
||||
|
||||
The Prowler MCP Server brings the entire Prowler ecosystem to AI assistants through the [Model Context Protocol (MCP)](https://modelcontextprotocol.io). It enables seamless integration with AI tools like Claude Desktop, Cursor, and other MCP clients.
|
||||
|
||||
The server follows a modular architecture with three independent sub-servers:
|
||||
|
||||
| Sub-Server | Auth Required | Description |
|
||||
|------------|---------------|-------------|
|
||||
| Prowler App | Yes | Full access to Prowler Cloud and Self-Managed features |
|
||||
| Prowler Hub | No | Security checks catalog with **over 1000 checks**, fixers, and **70+ compliance frameworks** |
|
||||
| Prowler Documentation | No | Full-text search and retrieval of official documentation |
|
||||
|
||||
<Note>
|
||||
For a complete list of tools and their descriptions, see the [Tools Reference](/getting-started/basic-usage/prowler-mcp-tools).
|
||||
</Note>
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
The MCP Server architecture is illustrated in the [Overview documentation](/getting-started/products/prowler-mcp#mcp-server-architecture). AI assistants connect through the MCP protocol to access Prowler's three main components.
|
||||
|
||||
### Server Structure
|
||||
|
||||
The main server orchestrates three sub-servers with prefixed namespacing:
|
||||
|
||||
```
|
||||
mcp_server/prowler_mcp_server/
|
||||
├── server.py # Main orchestrator
|
||||
├── main.py # CLI entry point
|
||||
├── prowler_hub/
|
||||
├── prowler_app/
|
||||
│ ├── tools/ # Tool implementations
|
||||
│ ├── models/ # Pydantic models
|
||||
│ └── utils/ # API client, auth, loader
|
||||
└── prowler_documentation/
|
||||
```
|
||||
|
||||
### Tool Registration Patterns
|
||||
|
||||
The MCP Server uses two patterns for tool registration:
|
||||
|
||||
1. **Direct Decorators** (Prowler Hub/Docs): Tools are registered using `@mcp.tool()` decorators
|
||||
2. **Auto-Discovery** (Prowler App): All public methods of `BaseTool` subclasses are auto-registered
|
||||
|
||||
## Adding Tools to Prowler App
|
||||
|
||||
### Step 1: Create the Tool Class
|
||||
|
||||
Create a new file or add to an existing file in `prowler_app/tools/`:
|
||||
|
||||
```python
|
||||
# prowler_app/tools/new_feature.py
|
||||
from typing import Any
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from prowler_mcp_server.prowler_app.models.new_feature import (
|
||||
FeatureListResponse,
|
||||
DetailedFeature,
|
||||
)
|
||||
from prowler_mcp_server.prowler_app.tools.base import BaseTool
|
||||
|
||||
|
||||
class NewFeatureTools(BaseTool):
|
||||
"""Tools for managing new features."""
|
||||
|
||||
async def list_features(
|
||||
self,
|
||||
status: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by status (active, inactive, pending)"
|
||||
),
|
||||
page_size: int = Field(
|
||||
default=50,
|
||||
description="Number of results per page (1-100)"
|
||||
),
|
||||
) -> dict[str, Any]:
|
||||
"""List all features with optional filtering.
|
||||
|
||||
Returns a lightweight list of features optimized for LLM consumption.
|
||||
Use get_feature for complete information about a specific feature.
|
||||
"""
|
||||
# Validate parameters
|
||||
self.api_client.validate_page_size(page_size)
|
||||
|
||||
# Build query parameters
|
||||
params: dict[str, Any] = {"page[size]": page_size}
|
||||
if status:
|
||||
params["filter[status]"] = status
|
||||
|
||||
# Make API request
|
||||
clean_params = self.api_client.build_filter_params(params)
|
||||
response = await self.api_client.get("/api/v1/features", params=clean_params)
|
||||
|
||||
# Transform to LLM-friendly format
|
||||
return FeatureListResponse.from_api_response(response).model_dump()
|
||||
|
||||
async def get_feature(
|
||||
self,
|
||||
feature_id: str = Field(description="The UUID of the feature"),
|
||||
) -> dict[str, Any]:
|
||||
"""Get detailed information about a specific feature.
|
||||
|
||||
Returns complete feature details including configuration and metadata.
|
||||
"""
|
||||
try:
|
||||
response = await self.api_client.get(f"/api/v1/features/{feature_id}")
|
||||
return DetailedFeature.from_api_response(response["data"]).model_dump()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get feature {feature_id}: {e}")
|
||||
return {"error": str(e), "status": "failed"}
|
||||
```
|
||||
|
||||
### Step 2: Create the Models
|
||||
|
||||
Create corresponding models in `prowler_app/models/`:
|
||||
|
||||
```python
|
||||
# prowler_app/models/new_feature.py
|
||||
from typing import Any
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from prowler_mcp_server.prowler_app.models.base import MinimalSerializerMixin
|
||||
|
||||
|
||||
class SimplifiedFeature(MinimalSerializerMixin):
|
||||
"""Lightweight feature for list operations."""
|
||||
|
||||
id: str = Field(description="Unique feature identifier")
|
||||
name: str = Field(description="Feature name")
|
||||
status: str = Field(description="Current status")
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict[str, Any]) -> "SimplifiedFeature":
|
||||
"""Transform API response to simplified format."""
|
||||
attributes = data.get("attributes", {})
|
||||
return cls(
|
||||
id=data["id"],
|
||||
name=attributes["name"],
|
||||
status=attributes["status"],
|
||||
)
|
||||
|
||||
|
||||
class DetailedFeature(SimplifiedFeature):
|
||||
"""Extended feature with complete details."""
|
||||
|
||||
description: str | None = Field(default=None, description="Feature description")
|
||||
configuration: dict[str, Any] | None = Field(default=None, description="Configuration")
|
||||
created_at: str = Field(description="Creation timestamp")
|
||||
updated_at: str = Field(description="Last update timestamp")
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict[str, Any]) -> "DetailedFeature":
|
||||
"""Transform API response to detailed format."""
|
||||
attributes = data.get("attributes", {})
|
||||
return cls(
|
||||
id=data["id"],
|
||||
name=attributes["name"],
|
||||
status=attributes["status"],
|
||||
description=attributes.get("description"),
|
||||
configuration=attributes.get("configuration"),
|
||||
created_at=attributes["created_at"],
|
||||
updated_at=attributes["updated_at"],
|
||||
)
|
||||
|
||||
|
||||
class FeatureListResponse(MinimalSerializerMixin):
|
||||
"""Response wrapper for feature list operations."""
|
||||
|
||||
count: int = Field(description="Total number of features")
|
||||
features: list[SimplifiedFeature] = Field(description="List of features")
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, response: dict[str, Any]) -> "FeatureListResponse":
|
||||
"""Transform API response to list format."""
|
||||
data = response.get("data", [])
|
||||
features = [SimplifiedFeature.from_api_response(item) for item in data]
|
||||
return cls(count=len(features), features=features)
|
||||
```
|
||||
|
||||
### Step 3: Verify Auto-Discovery
|
||||
|
||||
No manual registration is needed. The `tool_loader.py` automatically discovers and registers all `BaseTool` subclasses. Verify your tool is loaded by checking the server logs:
|
||||
|
||||
```
|
||||
INFO - Auto-registered 2 tools from NewFeatureTools
|
||||
INFO - Loaded and registered: NewFeatureTools
|
||||
```
|
||||
|
||||
## Adding Tools to Prowler Hub/Docs
|
||||
|
||||
For Prowler Hub or Documentation tools, use the `@mcp.tool()` decorator directly:
|
||||
|
||||
```python
|
||||
# prowler_hub/server.py
|
||||
from fastmcp import FastMCP
|
||||
|
||||
hub_mcp_server = FastMCP("prowler-hub")
|
||||
|
||||
@hub_mcp_server.tool()
|
||||
async def get_new_artifact(
|
||||
artifact_id: str,
|
||||
) -> dict:
|
||||
"""Fetch a specific artifact from Prowler Hub.
|
||||
|
||||
Args:
|
||||
artifact_id: The unique identifier of the artifact
|
||||
|
||||
Returns:
|
||||
Dictionary containing artifact details
|
||||
"""
|
||||
response = prowler_hub_client.get(f"/artifact/{artifact_id}")
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
```
|
||||
|
||||
## Model Design Patterns
|
||||
|
||||
### MinimalSerializerMixin
|
||||
|
||||
All models should use `MinimalSerializerMixin` to optimize responses for LLM consumption:
|
||||
|
||||
```python
|
||||
from prowler_mcp_server.prowler_app.models.base import MinimalSerializerMixin
|
||||
|
||||
class MyModel(MinimalSerializerMixin):
|
||||
"""Model that excludes empty values from serialization."""
|
||||
required_field: str
|
||||
optional_field: str | None = None # Excluded if None
|
||||
empty_list: list = [] # Excluded if empty
|
||||
```
|
||||
|
||||
This mixin automatically excludes:
|
||||
- `None` values
|
||||
- Empty strings
|
||||
- Empty lists
|
||||
- Empty dictionaries
|
||||
|
||||
### Two-Tier Model Pattern
|
||||
|
||||
Use two-tier models for efficient responses:
|
||||
|
||||
- **Simplified**: Lightweight models for list operations
|
||||
- **Detailed**: Extended models for single-item retrieval
|
||||
|
||||
```python
|
||||
class SimplifiedItem(MinimalSerializerMixin):
|
||||
"""Use for list operations - minimal fields."""
|
||||
id: str
|
||||
name: str
|
||||
status: str
|
||||
|
||||
class DetailedItem(SimplifiedItem):
|
||||
"""Use for get operations - extends simplified with details."""
|
||||
description: str | None = None
|
||||
configuration: dict | None = None
|
||||
created_at: str
|
||||
updated_at: str
|
||||
```
|
||||
|
||||
### Factory Method Pattern
|
||||
|
||||
Always implement `from_api_response()` for API transformation:
|
||||
|
||||
```python
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict[str, Any]) -> "MyModel":
|
||||
"""Transform API response to model.
|
||||
|
||||
This method handles the JSON:API format used by Prowler API,
|
||||
extracting attributes and relationships as needed.
|
||||
"""
|
||||
attributes = data.get("attributes", {})
|
||||
return cls(
|
||||
id=data["id"],
|
||||
name=attributes["name"],
|
||||
# ... map other fields
|
||||
)
|
||||
```
|
||||
|
||||
## API Client Usage
|
||||
|
||||
The `ProwlerAPIClient` is a singleton that handles authentication and HTTP requests:
|
||||
|
||||
```python
|
||||
class MyTools(BaseTool):
|
||||
async def my_tool(self) -> dict:
|
||||
# GET request
|
||||
response = await self.api_client.get("/api/v1/endpoint", params={"key": "value"})
|
||||
|
||||
# POST request
|
||||
response = await self.api_client.post(
|
||||
"/api/v1/endpoint",
|
||||
json_data={"data": {"type": "items", "attributes": {...}}}
|
||||
)
|
||||
|
||||
# PATCH request
|
||||
response = await self.api_client.patch(
|
||||
f"/api/v1/endpoint/{id}",
|
||||
json_data={"data": {"attributes": {...}}}
|
||||
)
|
||||
|
||||
# DELETE request
|
||||
response = await self.api_client.delete(f"/api/v1/endpoint/{id}")
|
||||
```
|
||||
|
||||
### Helper Methods
|
||||
|
||||
The API client provides useful helper methods:
|
||||
|
||||
```python
|
||||
# Validate page size (1-1000)
|
||||
self.api_client.validate_page_size(page_size)
|
||||
|
||||
# Normalize date range with max days limit
|
||||
date_range = self.api_client.normalize_date_range(date_from, date_to, max_days=2)
|
||||
|
||||
# Build filter parameters (handles type conversion)
|
||||
clean_params = self.api_client.build_filter_params({
|
||||
"filter[status]": "active",
|
||||
"filter[severity__in]": ["high", "critical"], # Converts to comma-separated
|
||||
"filter[muted]": True, # Converts to "true"
|
||||
})
|
||||
|
||||
# Poll async task until completion
|
||||
result = await self.api_client.poll_task_until_complete(
|
||||
task_id=task_id,
|
||||
timeout=60,
|
||||
poll_interval=1.0
|
||||
)
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Tool Docstrings
|
||||
|
||||
Tool docstrings become description that is going to be read by the LLM. Provide clear usage instructions and common workflows:
|
||||
|
||||
```python
|
||||
async def search_items(self, status: str = Field(...)) -> dict:
|
||||
"""Search items with advanced filtering.
|
||||
|
||||
Returns a lightweight list optimized for LLM consumption.
|
||||
Use get_item for complete details about a specific item.
|
||||
|
||||
Common workflows:
|
||||
- Find critical items: status="critical"
|
||||
- Find recent items: Use date_from parameter
|
||||
"""
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
Return structured error responses instead of raising exceptions:
|
||||
|
||||
```python
|
||||
async def get_item(self, item_id: str) -> dict:
|
||||
try:
|
||||
response = await self.api_client.get(f"/api/v1/items/{item_id}")
|
||||
return DetailedItem.from_api_response(response["data"]).model_dump()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get item {item_id}: {e}")
|
||||
return {"error": str(e), "status": "failed"}
|
||||
```
|
||||
|
||||
### Parameter Descriptions
|
||||
|
||||
Use Pydantic `Field()` with clear descriptions. This also helps LLMs understand
|
||||
the purpose of each parameter, so be as descriptive as possible:
|
||||
|
||||
```python
|
||||
async def list_items(
|
||||
self,
|
||||
severity: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by severity levels (critical, high, medium, low)"
|
||||
),
|
||||
status: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by status (PASS, FAIL, MANUAL)"
|
||||
),
|
||||
page_size: int = Field(
|
||||
default=50,
|
||||
description="Results per page"
|
||||
),
|
||||
) -> dict:
|
||||
```
|
||||
|
||||
## Development Commands
|
||||
|
||||
```bash
|
||||
# Navigate to MCP server directory
|
||||
cd mcp_server
|
||||
|
||||
# Run in STDIO mode (default)
|
||||
uv run prowler-mcp
|
||||
|
||||
# Run in HTTP mode
|
||||
uv run prowler-mcp --transport http --host 0.0.0.0 --port 8000
|
||||
|
||||
# Run with environment variables
|
||||
PROWLER_APP_API_KEY="pk_xxx" uv run prowler-mcp
|
||||
```
|
||||
|
||||
For complete installation and deployment options, see:
|
||||
- [Installation Guide](/getting-started/installation/prowler-mcp#from-source-development) - Development setup instructions
|
||||
- [Configuration Guide](/getting-started/basic-usage/prowler-mcp) - MCP client configuration
|
||||
|
||||
For development I recommend to use the [Model Context Protocol Inspector](https://github.com/modelcontextprotocol/inspector) as MCP client to test and debug your tools.
|
||||
|
||||
## Related Documentation
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card title="MCP Server Overview" icon="circle-info" href="/getting-started/products/prowler-mcp">
|
||||
Key capabilities, use cases, and deployment options
|
||||
</Card>
|
||||
<Card title="Tools Reference" icon="wrench" href="/getting-started/basic-usage/prowler-mcp-tools">
|
||||
Complete reference of all available tools
|
||||
</Card>
|
||||
<Card title="Prowler Hub" icon="database" href="/getting-started/products/prowler-hub">
|
||||
Security checks and compliance frameworks catalog
|
||||
</Card>
|
||||
<Card title="Lighthouse AI" icon="robot" href="/getting-started/products/prowler-lighthouse-ai">
|
||||
AI-powered security analyst
|
||||
</Card>
|
||||
</CardGroup>
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [MCP Protocol Specification](https://modelcontextprotocol.io) - Model Context Protocol details
|
||||
- [Prowler API Documentation](https://api.prowler.com/api/v1/docs) - API reference
|
||||
- [Prowler Hub API](https://hub.prowler.com/api/docs) - Hub API reference
|
||||
- [GitHub Repository](https://github.com/prowler-cloud/prowler) - Source code
|
||||
@@ -220,6 +220,7 @@ The function returns a JSON file containing the list of regions for the provider
|
||||
"sa-east-1", "us-east-1", "us-east-2", "us-west-1", "us-west-2"
|
||||
],
|
||||
"aws-cn": ["cn-north-1", "cn-northwest-1"],
|
||||
"aws-eusc": ["eusc-de-east-1"],
|
||||
"aws-us-gov": ["us-gov-east-1", "us-gov-west-1"]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,6 +63,82 @@ Other Commands for Running Tests
|
||||
Refer to the [pytest documentation](https://docs.pytest.org/en/7.1.x/getting-started.html) for more details.
|
||||
|
||||
</Note>
|
||||
|
||||
## AWS Service Dependency Table (CI Optimization)
|
||||
|
||||
To optimize CI pipeline execution time, the GitHub Actions workflow for AWS tests uses a **service dependency table** that determines which tests to run based on changed files. This ensures that when a service is modified, all dependent services are also tested.
|
||||
|
||||
### How It Works
|
||||
|
||||
The dependency table is defined in `.github/workflows/sdk-tests.yml` within the "Resolve AWS services under test" step. When files in a specific AWS service are changed:
|
||||
|
||||
1. Tests for the changed service are run
|
||||
2. Tests for all services that **depend on** the changed service are also run
|
||||
|
||||
For example, if you modify the `ec2` service, tests will also run for `dlm`, `dms`, `elbv2`, `emr`, `inspector2`, `rds`, `redshift`, `route53`, `shield`, `ssm`, and `workspaces` because these services use the EC2 client.
|
||||
|
||||
### Current Dependency Table
|
||||
|
||||
The table maps a service (key) to the list of services that depend on it (values):
|
||||
|
||||
| Service | Dependent Services |
|
||||
|---------|-------------------|
|
||||
| `acm` | `elb` |
|
||||
| `autoscaling` | `dynamodb` |
|
||||
| `awslambda` | `ec2`, `inspector2` |
|
||||
| `backup` | `dynamodb`, `ec2`, `rds` |
|
||||
| `cloudfront` | `shield` |
|
||||
| `cloudtrail` | `awslambda`, `cloudwatch` |
|
||||
| `cloudwatch` | `bedrock` |
|
||||
| `ec2` | `dlm`, `dms`, `elbv2`, `emr`, `inspector2`, `rds`, `redshift`, `route53`, `shield`, `ssm` |
|
||||
| `ecr` | `inspector2` |
|
||||
| `elb` | `shield` |
|
||||
| `elbv2` | `shield` |
|
||||
| `globalaccelerator` | `shield` |
|
||||
| `iam` | `bedrock`, `cloudtrail`, `cloudwatch`, `codebuild` |
|
||||
| `kafka` | `firehose` |
|
||||
| `kinesis` | `firehose` |
|
||||
| `kms` | `kafka` |
|
||||
| `organizations` | `iam`, `servicecatalog` |
|
||||
| `route53` | `shield` |
|
||||
| `s3` | `bedrock`, `cloudfront`, `cloudtrail`, `macie` |
|
||||
| `ssm` | `ec2` |
|
||||
| `vpc` | `awslambda`, `ec2`, `efs`, `elasticache`, `neptune`, `networkfirewall`, `rds`, `redshift`, `workspaces` |
|
||||
| `waf` | `elbv2` |
|
||||
| `wafv2` | `cognito`, `elbv2` |
|
||||
|
||||
### When to Update the Table
|
||||
|
||||
You must update the dependency table when:
|
||||
|
||||
1. **A new check or service uses another service's client**: If your check imports a client from another service (e.g., `from prowler.providers.aws.services.ec2.ec2_client import ec2_client` in a non-ec2 check), add your service to the dependent services list of that client's service.
|
||||
|
||||
2. **A service relationship changes**: If you remove or add a service client dependency in an existing check, update the table accordingly.
|
||||
|
||||
### How to Update the Table
|
||||
|
||||
1. Open `.github/workflows/sdk-tests.yml`
|
||||
2. Find the `dependents` dictionary in the "Resolve AWS services under test" step
|
||||
3. Add or modify entries as needed
|
||||
4. **Update this documentation page** (`docs/developer-guide/unit-testing.mdx`) to reflect the changes in the [Current Dependency Table](#current-dependency-table) section above
|
||||
|
||||
```python
|
||||
dependents = {
|
||||
# ... existing entries ...
|
||||
"service_being_used": ["service_that_uses_it"],
|
||||
}
|
||||
```
|
||||
|
||||
**Example**: If you create a new check in the `newservice` service that imports `ec2_client`, add `newservice` to the `ec2` entry:
|
||||
|
||||
```python
|
||||
"ec2": ["dlm", "dms", "elbv2", "emr", "inspector2", "newservice", "rds", "redshift", "route53", "shield", "ssm"],
|
||||
```
|
||||
|
||||
<Warning>
|
||||
Failing to update this table when adding cross-service dependencies may result in CI tests passing even when related functionality is broken, as the dependent service tests won't be triggered.
|
||||
</Warning>
|
||||
|
||||
## AWS Testing Approaches
|
||||
|
||||
For AWS provider, different testing approaches apply based on API coverage based on several criteria.
|
||||
|
||||
+20
-3
@@ -99,7 +99,14 @@
|
||||
},
|
||||
"user-guide/tutorials/prowler-app-rbac",
|
||||
"user-guide/tutorials/prowler-app-api-keys",
|
||||
"user-guide/tutorials/prowler-app-mute-findings",
|
||||
{
|
||||
"group": "Mutelist",
|
||||
"expanded": true,
|
||||
"pages": [
|
||||
"user-guide/tutorials/prowler-app-simple-mutelist",
|
||||
"user-guide/tutorials/prowler-app-mute-findings"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Integrations",
|
||||
"expanded": true,
|
||||
@@ -198,6 +205,13 @@
|
||||
"user-guide/providers/gcp/retry-configuration"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Alibaba Cloud",
|
||||
"pages": [
|
||||
"user-guide/providers/alibabacloud/getting-started-alibabacloud",
|
||||
"user-guide/providers/alibabacloud/authentication"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Kubernetes",
|
||||
"pages": [
|
||||
@@ -270,7 +284,8 @@
|
||||
"developer-guide/outputs",
|
||||
"developer-guide/integrations",
|
||||
"developer-guide/security-compliance-framework",
|
||||
"developer-guide/lighthouse"
|
||||
"developer-guide/lighthouse",
|
||||
"developer-guide/mcp-server"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -279,6 +294,7 @@
|
||||
"developer-guide/aws-details",
|
||||
"developer-guide/azure-details",
|
||||
"developer-guide/gcp-details",
|
||||
"developer-guide/alibabacloud-details",
|
||||
"developer-guide/kubernetes-details",
|
||||
"developer-guide/m365-details",
|
||||
"developer-guide/github-details",
|
||||
@@ -293,7 +309,8 @@
|
||||
"group": "Testing",
|
||||
"pages": [
|
||||
"developer-guide/unit-testing",
|
||||
"developer-guide/integration-testing"
|
||||
"developer-guide/integration-testing",
|
||||
"developer-guide/end2end-testing"
|
||||
]
|
||||
},
|
||||
"developer-guide/debugging",
|
||||
|
||||
@@ -10,7 +10,7 @@ Complete reference guide for all tools available in the Prowler MCP Server. Tool
|
||||
|----------|------------|------------------------|
|
||||
| Prowler Hub | 10 tools | No |
|
||||
| Prowler Documentation | 2 tools | No |
|
||||
| Prowler Cloud/App | 28 tools | Yes |
|
||||
| Prowler Cloud/App | 24 tools | Yes |
|
||||
|
||||
## Tool Naming Convention
|
||||
|
||||
@@ -20,39 +20,6 @@ All tools follow a consistent naming pattern with prefixes:
|
||||
- `prowler_docs_*` - Prowler documentation search and retrieval
|
||||
- `prowler_app_*` - Prowler Cloud and App (Self-Managed) management tools
|
||||
|
||||
## Prowler Hub Tools
|
||||
|
||||
Access Prowler's security check catalog and compliance frameworks. **No authentication required.**
|
||||
|
||||
### Check Discovery
|
||||
|
||||
- **`prowler_hub_get_checks`** - List security checks with advanced filtering options
|
||||
- **`prowler_hub_get_check_filters`** - Return available filter values for checks (providers, services, severities, categories, compliances)
|
||||
- **`prowler_hub_search_checks`** - Full-text search across check metadata
|
||||
- **`prowler_hub_get_check_raw_metadata`** - Fetch raw check metadata in JSON format
|
||||
|
||||
### Check Code
|
||||
|
||||
- **`prowler_hub_get_check_code`** - Fetch the Python implementation code for a security check
|
||||
- **`prowler_hub_get_check_fixer`** - Fetch the automated fixer code for a check (if available)
|
||||
|
||||
### Compliance Frameworks
|
||||
|
||||
- **`prowler_hub_get_compliance_frameworks`** - List and filter compliance frameworks
|
||||
- **`prowler_hub_search_compliance_frameworks`** - Full-text search across compliance frameworks
|
||||
|
||||
### Provider Information
|
||||
|
||||
- **`prowler_hub_list_providers`** - List Prowler official providers and their services
|
||||
- **`prowler_hub_get_artifacts_count`** - Get total count of checks and frameworks in Prowler Hub
|
||||
|
||||
## Prowler Documentation Tools
|
||||
|
||||
Search and access official Prowler documentation. **No authentication required.**
|
||||
|
||||
- **`prowler_docs_search`** - Search the official Prowler documentation using full-text search
|
||||
- **`prowler_docs_get_document`** - Retrieve the full markdown content of a specific documentation file
|
||||
|
||||
## Prowler Cloud/App Tools
|
||||
|
||||
Manage Prowler Cloud or Prowler App (Self-Managed) features. **Requires authentication.**
|
||||
@@ -63,49 +30,97 @@ These tools require a valid API key. See the [Configuration Guide](/getting-star
|
||||
|
||||
### Findings Management
|
||||
|
||||
- **`prowler_app_list_findings`** - List security findings with advanced filtering
|
||||
- **`prowler_app_get_finding`** - Get detailed information about a specific finding
|
||||
- **`prowler_app_get_latest_findings`** - Retrieve latest findings from the most recent scans
|
||||
- **`prowler_app_get_findings_metadata`** - Get unique metadata values from filtered findings
|
||||
- **`prowler_app_get_latest_findings_metadata`** - Get metadata from latest findings across all providers
|
||||
Tools for searching, viewing, and analyzing security findings across all cloud providers.
|
||||
|
||||
- **`prowler_app_search_security_findings`** - Search and filter security findings with advanced filtering options (severity, status, provider, region, service, check ID, date range, muted status)
|
||||
- **`prowler_app_get_finding_details`** - Get comprehensive details about a specific finding including remediation guidance, check metadata, and resource relationships
|
||||
- **`prowler_app_get_findings_overview`** - Get aggregate statistics and trends about security findings as a markdown report
|
||||
|
||||
### Provider Management
|
||||
|
||||
- **`prowler_app_list_providers`** - List all providers with filtering options
|
||||
- **`prowler_app_create_provider`** - Create a new provider in the current tenant
|
||||
- **`prowler_app_get_provider`** - Get detailed information about a specific provider
|
||||
- **`prowler_app_update_provider`** - Update provider details (alias, etc.)
|
||||
- **`prowler_app_delete_provider`** - Delete a specific provider
|
||||
- **`prowler_app_test_provider_connection`** - Test provider connection status
|
||||
Tools for managing cloud provider connections in Prowler.
|
||||
|
||||
### Provider Secrets Management
|
||||
|
||||
- **`prowler_app_list_provider_secrets`** - List all provider secrets with filtering
|
||||
- **`prowler_app_add_provider_secret`** - Add or update credentials for a provider
|
||||
- **`prowler_app_get_provider_secret`** - Get detailed information about a provider secret
|
||||
- **`prowler_app_update_provider_secret`** - Update provider secret details
|
||||
- **`prowler_app_delete_provider_secret`** - Delete a provider secret
|
||||
- **`prowler_app_search_providers`** - Search and view configured providers with their connection status
|
||||
- **`prowler_app_connect_provider`** - Register and connect a provider with credentials for security scanning
|
||||
- **`prowler_app_delete_provider`** - Permanently remove a provider from Prowler
|
||||
|
||||
### Scan Management
|
||||
|
||||
- **`prowler_app_list_scans`** - List all scans with filtering options
|
||||
- **`prowler_app_create_scan`** - Trigger a manual scan for a specific provider
|
||||
- **`prowler_app_get_scan`** - Get detailed information about a specific scan
|
||||
- **`prowler_app_update_scan`** - Update scan details
|
||||
- **`prowler_app_get_scan_compliance_report`** - Download compliance report as CSV
|
||||
- **`prowler_app_get_scan_report`** - Download ZIP file containing complete scan report
|
||||
Tools for managing and monitoring security scans.
|
||||
|
||||
### Schedule Management
|
||||
- **`prowler_app_list_scans`** - List and filter security scans across all providers
|
||||
- **`prowler_app_get_scan`** - Get comprehensive details about a specific scan (progress, duration, resource counts)
|
||||
- **`prowler_app_trigger_scan`** - Trigger a manual security scan for a provider
|
||||
- **`prowler_app_schedule_daily_scan`** - Schedule automated daily scans for continuous monitoring
|
||||
- **`prowler_app_update_scan`** - Update scan name for better organization
|
||||
|
||||
- **`prowler_app_schedules_daily_scan`** - Create a daily scheduled scan for a provider
|
||||
### Resources Management
|
||||
|
||||
### Processor Management
|
||||
Tools for searching, viewing, and analyzing cloud resources discovered by Prowler.
|
||||
|
||||
- **`prowler_app_processors_list`** - List all processors with filtering
|
||||
- **`prowler_app_processors_create`** - Create a new processor (currently only mute lists supported)
|
||||
- **`prowler_app_processors_retrieve`** - Get processor details by ID
|
||||
- **`prowler_app_processors_partial_update`** - Update processor configuration
|
||||
- **`prowler_app_processors_destroy`** - Delete a processor
|
||||
- **`prowler_app_list_resources`** - List and filter cloud resources with advanced filtering options (provider, region, service, resource type, tags)
|
||||
- **`prowler_app_get_resource`** - Get comprehensive details about a specific resource including configuration, metadata, and finding relationships
|
||||
- **`prowler_app_get_resources_overview`** - Get aggregate statistics about cloud resources as a markdown report
|
||||
|
||||
### Muting Management
|
||||
|
||||
Tools for managing finding muting, including pattern-based bulk muting (mutelist) and finding-specific mute rules.
|
||||
|
||||
#### Mutelist (Pattern-Based Muting)
|
||||
|
||||
- **`prowler_app_get_mutelist`** - Retrieve the current mutelist configuration for the tenant
|
||||
- **`prowler_app_set_mutelist`** - Create or update the mutelist configuration for pattern-based bulk muting
|
||||
- **`prowler_app_delete_mutelist`** - Remove the mutelist configuration from the tenant
|
||||
|
||||
#### Mute Rules (Finding-Specific Muting)
|
||||
|
||||
- **`prowler_app_list_mute_rules`** - Search and filter mute rules with pagination support
|
||||
- **`prowler_app_get_mute_rule`** - Retrieve comprehensive details about a specific mute rule
|
||||
- **`prowler_app_create_mute_rule`** - Create a new mute rule to mute specific findings with documentation and audit trail
|
||||
- **`prowler_app_update_mute_rule`** - Update a mute rule's name, reason, or enabled status
|
||||
- **`prowler_app_delete_mute_rule`** - Delete a mute rule from the system
|
||||
|
||||
### Compliance Management
|
||||
|
||||
Tools for viewing compliance status and framework details across all cloud providers.
|
||||
|
||||
- **`prowler_app_get_compliance_overview`** - Get high-level compliance status across all frameworks for a specific scan or provider, including pass/fail statistics per framework
|
||||
- **`prowler_app_get_compliance_framework_state_details`** - Get detailed requirement-level breakdown for a specific compliance framework, including failed requirements and associated finding IDs
|
||||
|
||||
## Prowler Hub Tools
|
||||
|
||||
Access Prowler's security check catalog and compliance frameworks. **No authentication required.**
|
||||
|
||||
Tools follow a **two-tier pattern**: lightweight listing for browsing + detailed retrieval for complete information.
|
||||
|
||||
### Check Discovery and Details
|
||||
|
||||
- **`prowler_hub_list_checks`** - List security checks with lightweight data (id, title, severity, provider) and advanced filtering options
|
||||
- **`prowler_hub_semantic_search_checks`** - Full-text search across check metadata with lightweight results
|
||||
- **`prowler_hub_get_check_details`** - Get comprehensive details for a specific check including risk, remediation guidance, and compliance mappings
|
||||
|
||||
### Check Code
|
||||
|
||||
- **`prowler_hub_get_check_code`** - Fetch the Python implementation code for a security check
|
||||
- **`prowler_hub_get_check_fixer`** - Fetch the automated fixer code for a check (if available)
|
||||
|
||||
### Compliance Frameworks
|
||||
|
||||
- **`prowler_hub_list_compliances`** - List compliance frameworks with lightweight data (id, name, provider) and filtering options
|
||||
- **`prowler_hub_semantic_search_compliances`** - Full-text search across compliance frameworks with lightweight results
|
||||
- **`prowler_hub_get_compliance_details`** - Get comprehensive compliance details including requirements and mapped checks
|
||||
|
||||
### Providers Information
|
||||
|
||||
- **`prowler_hub_list_providers`** - List Prowler official providers
|
||||
- **`prowler_hub_get_provider_services`** - Get available services for a specific provider
|
||||
|
||||
## Prowler Documentation Tools
|
||||
|
||||
Search and access official Prowler documentation. **No authentication required.**
|
||||
|
||||
- **`prowler_docs_search`** - Search the official Prowler documentation using full-text search with the `term` parameter
|
||||
- **`prowler_docs_get_document`** - Retrieve the full markdown content of a specific documentation file using the path from search results
|
||||
|
||||
## Usage Tips
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user