Compare commits
88 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| f03d83872e | |||
| bb620022f5 | |||
| 27a81defec | |||
| a81293d2ea | |||
| 80427dd127 | |||
| 14e9506b87 | |||
| 3e72d575d4 | |||
| 79825d35fc | |||
| 6215c1ba46 | |||
| faaa172b86 | |||
| 219ce0ba89 | |||
| 2170e5fe12 | |||
| e9efb12aa8 | |||
| 74d72dd56b | |||
| 06d1d214fd | |||
| 902bc9ad57 | |||
| 3616c0a8c0 | |||
| 7288585fec | |||
| 6400dc1059 | |||
| 379c1dc7dd | |||
| eb247360c3 | |||
| 7f12832808 | |||
| 9c387d5742 | |||
| 4a5801c519 | |||
| 85cb39af28 | |||
| c7abd77a1c | |||
| a622b9d965 | |||
| 8bd95a04ce | |||
| 340454ba68 | |||
| 6dff4bfd8b | |||
| 22c88e66a1 | |||
| 3b711f6143 | |||
| dbdce98cf2 | |||
| 53404dfa62 | |||
| c8872dd6ac | |||
| 26fd7d3adc | |||
| cb84bd0f94 | |||
| cb3f3ab35d | |||
| f58c1fddfb | |||
| c1bb51cf1a | |||
| a4e12a94f9 | |||
| 7b1915e489 | |||
| 56d092c87e | |||
| 29a1034658 | |||
| f5c2146d19 | |||
| 069f0d106c | |||
| 803ada7b16 | |||
| 5e033321e8 | |||
| 175d7f95f5 | |||
| 07e82bde56 | |||
| 4661e01c26 | |||
| dda0a2567d | |||
| 56ea498cca | |||
| f9e1e29631 | |||
| 3dadb264cc | |||
| 495aee015e | |||
| d3a000cbc4 | |||
| b2abdbeb60 | |||
| dc852b4595 | |||
| 1250f582a5 | |||
| bb43e924ee | |||
| 0225627a98 | |||
| 3097513525 | |||
| 6af9ff4b4b | |||
| 06fa57a949 | |||
| dc9e91ac4e | |||
| 59f8dfe5ae | |||
| 7e0c5540bb | |||
| 79ec53bfc5 | |||
| ed5f6b3af6 | |||
| 6e135abaa0 | |||
| 65b054f798 | |||
| 28d5b2bb6c | |||
| c8d9f37e70 | |||
| 9d7b9c3327 | |||
| 127b8d8e56 | |||
| 4e9dd46a5e | |||
| 880345bebe | |||
| 1259713fd6 | |||
| 26088868a2 | |||
| e58574e2a4 | |||
| a07e599cfc | |||
| e020b3f74b | |||
| 8e7e376e4f | |||
| a63a3d3f68 | |||
| 10838de636 | |||
| 5ebf455e04 | |||
| 0d59441c5f |
@@ -87,7 +87,7 @@ runs:
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
if: always()
|
||||
with:
|
||||
name: trivy-scan-report-${{ inputs.image-name }}
|
||||
name: trivy-scan-report-${{ inputs.image-name }}-${{ inputs.image-tag }}
|
||||
path: trivy-report.json
|
||||
retention-days: ${{ inputs.artifact-retention-days }}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"channel": "${{ env.SLACK_CHANNEL_ID }}",
|
||||
"ts": "${{ env.MESSAGE_TS }}",
|
||||
"attachments": [
|
||||
{
|
||||
"color": "${{ env.STATUS_COLOR }}",
|
||||
|
||||
@@ -7,10 +7,16 @@ on:
|
||||
paths:
|
||||
- 'api/**'
|
||||
- 'prowler/**'
|
||||
- '.github/workflows/api-build-lint-push-containers.yml'
|
||||
- '.github/workflows/api-container-build-push.yml'
|
||||
release:
|
||||
types:
|
||||
- 'published'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_tag:
|
||||
description: 'Release tag (e.g., 5.14.0)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -22,7 +28,7 @@ concurrency:
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name || inputs.release_tag }}
|
||||
STABLE_TAG: stable
|
||||
WORKING_DIRECTORY: ./api
|
||||
|
||||
@@ -72,20 +78,9 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push API container for ${{ matrix.arch }}
|
||||
if: github.event_name == 'push' || github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Notify container push started
|
||||
if: github.event_name == 'release'
|
||||
id: slack-notification-started
|
||||
if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
@@ -98,11 +93,25 @@ jobs:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-started.json"
|
||||
|
||||
- name: Build and push API container for ${{ matrix.arch }}
|
||||
id: container-push
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
push: true
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Notify container push completed
|
||||
if: github.event_name == 'release' && always()
|
||||
if: (github.event_name == 'release' || github.event_name == 'workflow_dispatch') && always()
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
MESSAGE_TS: ${{ steps.slack-notification-started.outputs.ts }}
|
||||
COMPONENT: API
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
@@ -112,11 +121,12 @@ jobs:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-completed.json"
|
||||
step-outcome: ${{ steps.container-push.outcome }}
|
||||
update-ts: ${{ steps.slack-notification-started.outputs.ts }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
needs: [setup, container-build-push]
|
||||
if: github.event_name == 'push' || github.event_name == 'release'
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -139,7 +149,7 @@ jobs:
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-arm64
|
||||
|
||||
- name: Create and push manifests for release event
|
||||
if: github.event_name == 'release'
|
||||
if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }} \
|
||||
|
||||
@@ -43,7 +43,16 @@ jobs:
|
||||
ignore: DL3013
|
||||
|
||||
api-container-build-and-scan:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -68,22 +77,23 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build container
|
||||
- name: Build container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.API_WORKING_DIR }}
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Scan container with Trivy
|
||||
- name: Scan container with Trivy for ${{ matrix.arch }}
|
||||
if: github.repository == 'prowler-cloud/prowler' && steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -10,6 +10,12 @@ on:
|
||||
release:
|
||||
types:
|
||||
- 'published'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_tag:
|
||||
description: 'Release tag (e.g., 5.14.0)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -21,7 +27,7 @@ concurrency:
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name || inputs.release_tag }}
|
||||
STABLE_TAG: stable
|
||||
WORKING_DIRECTORY: ./mcp_server
|
||||
|
||||
@@ -43,7 +49,7 @@ jobs:
|
||||
|
||||
container-build-push:
|
||||
needs: setup
|
||||
runs-on: ${{ matrix.runner }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
@@ -70,8 +76,24 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Notify container push started
|
||||
id: slack-notification-started
|
||||
if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
COMPONENT: MCP
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-started.json"
|
||||
|
||||
- name: Build and push MCP container for ${{ matrix.arch }}
|
||||
if: github.event_name == 'push' || github.event_name == 'release'
|
||||
id: container-push
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
@@ -90,25 +112,12 @@ jobs:
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Notify container push started
|
||||
if: github.event_name == 'release'
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
COMPONENT: MCP
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
GITHUB_REPOSITORY: ${{ github.repository }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
with:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-started.json"
|
||||
|
||||
- name: Notify container push completed
|
||||
if: github.event_name == 'release' && always()
|
||||
if: (github.event_name == 'release' || github.event_name == 'workflow_dispatch') && always()
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
MESSAGE_TS: ${{ steps.slack-notification-started.outputs.ts }}
|
||||
COMPONENT: MCP
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
@@ -118,11 +127,12 @@ jobs:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-completed.json"
|
||||
step-outcome: ${{ steps.container-push.outcome }}
|
||||
update-ts: ${{ steps.slack-notification-started.outputs.ts }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
needs: [setup, container-build-push]
|
||||
if: github.event_name == 'push' || github.event_name == 'release'
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -145,14 +155,14 @@ jobs:
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-arm64
|
||||
|
||||
- name: Create and push manifests for release event
|
||||
if: github.event_name == 'release'
|
||||
if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }} \
|
||||
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.STABLE_TAG }} \
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-amd64 \
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-arm64
|
||||
|
||||
|
||||
- name: Install regctl
|
||||
if: always()
|
||||
uses: regclient/actions/regctl-installer@main
|
||||
|
||||
@@ -42,7 +42,16 @@ jobs:
|
||||
dockerfile: mcp_server/Dockerfile
|
||||
|
||||
mcp-container-build-and-scan:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -66,22 +75,23 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build MCP container
|
||||
- name: Build MCP container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.MCP_WORKING_DIR }}
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Scan MCP container with Trivy
|
||||
- name: Scan MCP container with Trivy for ${{ matrix.arch }}
|
||||
if: github.repository == 'prowler-cloud/prowler' && steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -88,59 +88,56 @@ jobs:
|
||||
|
||||
- name: Read changelog versions from release branch
|
||||
run: |
|
||||
# Function to extract the latest version from changelog
|
||||
extract_latest_version() {
|
||||
# Function to extract the version for a specific Prowler release from changelog
|
||||
# This looks for entries with "(Prowler X.Y.Z)" to find the released version
|
||||
extract_version_for_release() {
|
||||
local changelog_file="$1"
|
||||
local prowler_version="$2"
|
||||
if [ -f "$changelog_file" ]; then
|
||||
# Extract the first version entry (most recent) from changelog
|
||||
# Format: ## [version] (1.2.3) or ## [vversion] (v1.2.3)
|
||||
local version=$(grep -m 1 '^## \[' "$changelog_file" | sed 's/^## \[\(.*\)\].*/\1/' | sed 's/^v//' | tr -d '[:space:]')
|
||||
# Extract version that matches this Prowler release
|
||||
# Format: ## [version] (Prowler X.Y.Z) or ## [vversion] (Prowler vX.Y.Z)
|
||||
local version=$(grep '^## \[' "$changelog_file" | grep "(Prowler v\?${prowler_version})" | head -1 | sed 's/^## \[\(.*\)\].*/\1/' | sed 's/^v//' | tr -d '[:space:]')
|
||||
echo "$version"
|
||||
else
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
# Read actual versions from changelogs (source of truth)
|
||||
UI_VERSION=$(extract_latest_version "ui/CHANGELOG.md")
|
||||
API_VERSION=$(extract_latest_version "api/CHANGELOG.md")
|
||||
SDK_VERSION=$(extract_latest_version "prowler/CHANGELOG.md")
|
||||
MCP_VERSION=$(extract_latest_version "mcp_server/CHANGELOG.md")
|
||||
# Read versions from changelogs for this specific Prowler release
|
||||
SDK_VERSION=$(extract_version_for_release "prowler/CHANGELOG.md" "$PROWLER_VERSION")
|
||||
API_VERSION=$(extract_version_for_release "api/CHANGELOG.md" "$PROWLER_VERSION")
|
||||
UI_VERSION=$(extract_version_for_release "ui/CHANGELOG.md" "$PROWLER_VERSION")
|
||||
MCP_VERSION=$(extract_version_for_release "mcp_server/CHANGELOG.md" "$PROWLER_VERSION")
|
||||
|
||||
echo "UI_VERSION=${UI_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "API_VERSION=${API_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "SDK_VERSION=${SDK_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "API_VERSION=${API_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "UI_VERSION=${UI_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "MCP_VERSION=${MCP_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
if [ -n "$UI_VERSION" ]; then
|
||||
echo "Read UI version from changelog: $UI_VERSION"
|
||||
if [ -n "$SDK_VERSION" ]; then
|
||||
echo "✓ SDK version for Prowler $PROWLER_VERSION: $SDK_VERSION"
|
||||
else
|
||||
echo "Warning: No UI version found in ui/CHANGELOG.md"
|
||||
echo "ℹ No SDK version found for Prowler $PROWLER_VERSION in prowler/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
if [ -n "$API_VERSION" ]; then
|
||||
echo "Read API version from changelog: $API_VERSION"
|
||||
echo "✓ API version for Prowler $PROWLER_VERSION: $API_VERSION"
|
||||
else
|
||||
echo "Warning: No API version found in api/CHANGELOG.md"
|
||||
echo "ℹ No API version found for Prowler $PROWLER_VERSION in api/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
if [ -n "$SDK_VERSION" ]; then
|
||||
echo "Read SDK version from changelog: $SDK_VERSION"
|
||||
if [ -n "$UI_VERSION" ]; then
|
||||
echo "✓ UI version for Prowler $PROWLER_VERSION: $UI_VERSION"
|
||||
else
|
||||
echo "Warning: No SDK version found in prowler/CHANGELOG.md"
|
||||
echo "ℹ No UI version found for Prowler $PROWLER_VERSION in ui/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
if [ -n "$MCP_VERSION" ]; then
|
||||
echo "Read MCP version from changelog: $MCP_VERSION"
|
||||
echo "✓ MCP version for Prowler $PROWLER_VERSION: $MCP_VERSION"
|
||||
else
|
||||
echo "Warning: No MCP version found in mcp_server/CHANGELOG.md"
|
||||
echo "ℹ No MCP version found for Prowler $PROWLER_VERSION in mcp_server/CHANGELOG.md"
|
||||
fi
|
||||
|
||||
echo "UI version: $UI_VERSION"
|
||||
echo "API version: $API_VERSION"
|
||||
echo "SDK version: $SDK_VERSION"
|
||||
echo "MCP version: $MCP_VERSION"
|
||||
|
||||
- name: Extract and combine changelog entries
|
||||
run: |
|
||||
set -e
|
||||
@@ -166,70 +163,54 @@ jobs:
|
||||
|
||||
# Remove --- separators
|
||||
sed -i '/^---$/d' "$output_file"
|
||||
|
||||
# Remove only trailing empty lines (not all empty lines)
|
||||
sed -i -e :a -e '/^\s*$/d;N;ba' "$output_file"
|
||||
}
|
||||
|
||||
# Calculate expected versions for this release
|
||||
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
|
||||
EXPECTED_UI_VERSION="1.${BASH_REMATCH[2]}.${BASH_REMATCH[3]}"
|
||||
EXPECTED_API_VERSION="1.$((${BASH_REMATCH[2]} + 1)).${BASH_REMATCH[3]}"
|
||||
|
||||
echo "Expected UI version for this release: $EXPECTED_UI_VERSION"
|
||||
echo "Expected API version for this release: $EXPECTED_API_VERSION"
|
||||
fi
|
||||
|
||||
# Determine if components have changes for this specific release
|
||||
# UI has changes if its current version matches what we expect for this release
|
||||
if [ -n "$UI_VERSION" ] && [ "$UI_VERSION" = "$EXPECTED_UI_VERSION" ]; then
|
||||
echo "HAS_UI_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ UI changes detected - version matches expected: $UI_VERSION"
|
||||
extract_changelog "ui/CHANGELOG.md" "$UI_VERSION" "ui_changelog.md"
|
||||
else
|
||||
echo "HAS_UI_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No UI changes for this release (current: $UI_VERSION, expected: $EXPECTED_UI_VERSION)"
|
||||
touch "ui_changelog.md"
|
||||
fi
|
||||
|
||||
# API has changes if its current version matches what we expect for this release
|
||||
if [ -n "$API_VERSION" ] && [ "$API_VERSION" = "$EXPECTED_API_VERSION" ]; then
|
||||
echo "HAS_API_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ API changes detected - version matches expected: $API_VERSION"
|
||||
extract_changelog "api/CHANGELOG.md" "$API_VERSION" "api_changelog.md"
|
||||
else
|
||||
echo "HAS_API_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No API changes for this release (current: $API_VERSION, expected: $EXPECTED_API_VERSION)"
|
||||
touch "api_changelog.md"
|
||||
fi
|
||||
|
||||
# SDK has changes if its current version matches the input version
|
||||
if [ -n "$SDK_VERSION" ] && [ "$SDK_VERSION" = "$PROWLER_VERSION" ]; then
|
||||
if [ -n "$SDK_VERSION" ]; then
|
||||
echo "HAS_SDK_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ SDK changes detected - version matches input: $SDK_VERSION"
|
||||
extract_changelog "prowler/CHANGELOG.md" "$PROWLER_VERSION" "prowler_changelog.md"
|
||||
HAS_SDK_CHANGES="true"
|
||||
echo "✓ SDK changes detected - version: $SDK_VERSION"
|
||||
extract_changelog "prowler/CHANGELOG.md" "$SDK_VERSION" "prowler_changelog.md"
|
||||
else
|
||||
echo "HAS_SDK_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No SDK changes for this release (current: $SDK_VERSION, input: $PROWLER_VERSION)"
|
||||
HAS_SDK_CHANGES="false"
|
||||
echo "ℹ No SDK changes for this release"
|
||||
touch "prowler_changelog.md"
|
||||
fi
|
||||
|
||||
# MCP has changes if the changelog references this Prowler version
|
||||
# Check if the changelog contains "(Prowler X.Y.Z)" or "(Prowler UNRELEASED)"
|
||||
if [ -f "mcp_server/CHANGELOG.md" ]; then
|
||||
MCP_PROWLER_REF=$(grep -m 1 "^## \[.*\] (Prowler" mcp_server/CHANGELOG.md | sed -E 's/.*\(Prowler ([^)]+)\).*/\1/' | tr -d '[:space:]')
|
||||
if [ "$MCP_PROWLER_REF" = "$PROWLER_VERSION" ] || [ "$MCP_PROWLER_REF" = "UNRELEASED" ]; then
|
||||
echo "HAS_MCP_CHANGES=true" >> $GITHUB_ENV
|
||||
echo "✓ MCP changes detected - Prowler reference: $MCP_PROWLER_REF (version: $MCP_VERSION)"
|
||||
extract_changelog "mcp_server/CHANGELOG.md" "$MCP_VERSION" "mcp_changelog.md"
|
||||
else
|
||||
echo "HAS_MCP_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No MCP changes for this release (Prowler reference: $MCP_PROWLER_REF, input: $PROWLER_VERSION)"
|
||||
touch "mcp_changelog.md"
|
||||
fi
|
||||
if [ -n "$API_VERSION" ]; then
|
||||
echo "HAS_API_CHANGES=true" >> $GITHUB_ENV
|
||||
HAS_API_CHANGES="true"
|
||||
echo "✓ API changes detected - version: $API_VERSION"
|
||||
extract_changelog "api/CHANGELOG.md" "$API_VERSION" "api_changelog.md"
|
||||
else
|
||||
echo "HAS_API_CHANGES=false" >> $GITHUB_ENV
|
||||
HAS_API_CHANGES="false"
|
||||
echo "ℹ No API changes for this release"
|
||||
touch "api_changelog.md"
|
||||
fi
|
||||
|
||||
if [ -n "$UI_VERSION" ]; then
|
||||
echo "HAS_UI_CHANGES=true" >> $GITHUB_ENV
|
||||
HAS_UI_CHANGES="true"
|
||||
echo "✓ UI changes detected - version: $UI_VERSION"
|
||||
extract_changelog "ui/CHANGELOG.md" "$UI_VERSION" "ui_changelog.md"
|
||||
else
|
||||
echo "HAS_UI_CHANGES=false" >> $GITHUB_ENV
|
||||
HAS_UI_CHANGES="false"
|
||||
echo "ℹ No UI changes for this release"
|
||||
touch "ui_changelog.md"
|
||||
fi
|
||||
|
||||
if [ -n "$MCP_VERSION" ]; then
|
||||
echo "HAS_MCP_CHANGES=true" >> $GITHUB_ENV
|
||||
HAS_MCP_CHANGES="true"
|
||||
echo "✓ MCP changes detected - version: $MCP_VERSION"
|
||||
extract_changelog "mcp_server/CHANGELOG.md" "$MCP_VERSION" "mcp_changelog.md"
|
||||
else
|
||||
echo "HAS_MCP_CHANGES=false" >> $GITHUB_ENV
|
||||
echo "ℹ No MCP changelog found"
|
||||
HAS_MCP_CHANGES="false"
|
||||
echo "ℹ No MCP changes for this release"
|
||||
touch "mcp_changelog.md"
|
||||
fi
|
||||
|
||||
@@ -325,6 +306,17 @@ jobs:
|
||||
fi
|
||||
echo "✓ api/src/backend/api/v1/views.py version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Verify API version in api/src/backend/api/specs/v1.yaml
|
||||
if: ${{ env.HAS_API_CHANGES == 'true' }}
|
||||
run: |
|
||||
CURRENT_API_VERSION=$(grep '^ version: ' api/src/backend/api/specs/v1.yaml | sed -E 's/ version: ([0-9]+\.[0-9]+\.[0-9]+)/\1/' | tr -d '[:space:]')
|
||||
API_VERSION_TRIMMED=$(echo "$API_VERSION" | tr -d '[:space:]')
|
||||
if [ "$CURRENT_API_VERSION" != "$API_VERSION_TRIMMED" ]; then
|
||||
echo "ERROR: API version mismatch in api/src/backend/api/specs/v1.yaml (expected: '$API_VERSION_TRIMMED', found: '$CURRENT_API_VERSION')"
|
||||
exit 1
|
||||
fi
|
||||
echo "✓ api/src/backend/api/specs/v1.yaml version: $CURRENT_API_VERSION"
|
||||
|
||||
- name: Update API prowler dependency for minor release
|
||||
if: ${{ env.PATCH_VERSION == '0' }}
|
||||
run: |
|
||||
|
||||
@@ -16,6 +16,12 @@ on:
|
||||
release:
|
||||
types:
|
||||
- 'published'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_tag:
|
||||
description: 'Release tag (e.g., 5.14.0)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -141,21 +147,9 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push SDK container for ${{ matrix.arch }}
|
||||
if: github.event_name == 'push' || github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
push: true
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Notify container push started
|
||||
if: github.event_name == 'release'
|
||||
id: slack-notification-started
|
||||
if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
@@ -168,11 +162,26 @@ jobs:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-started.json"
|
||||
|
||||
- name: Build and push SDK container for ${{ matrix.arch }}
|
||||
id: container-push
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
file: ${{ env.DOCKERFILE_PATH }}
|
||||
push: true
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.LATEST_TAG }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Notify container push completed
|
||||
if: github.event_name == 'release' && always()
|
||||
if: (github.event_name == 'release' || github.event_name == 'workflow_dispatch') && always()
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
MESSAGE_TS: ${{ steps.slack-notification-started.outputs.ts }}
|
||||
COMPONENT: SDK
|
||||
RELEASE_TAG: ${{ env.PROWLER_VERSION }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
@@ -182,11 +191,12 @@ jobs:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-completed.json"
|
||||
step-outcome: ${{ steps.container-push.outcome }}
|
||||
update-ts: ${{ steps.slack-notification-started.outputs.ts }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
needs: [container-build-push]
|
||||
if: github.event_name == 'push' || github.event_name == 'release'
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -219,7 +229,7 @@ jobs:
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.container-build-push.outputs.latest_tag }}-arm64
|
||||
|
||||
- name: Create and push manifests for release event
|
||||
if: github.event_name == 'release'
|
||||
if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
-t ${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ needs.container-build-push.outputs.prowler_version }} \
|
||||
|
||||
@@ -44,7 +44,16 @@ jobs:
|
||||
|
||||
sdk-container-build-and-scan:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -82,22 +91,23 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build SDK container
|
||||
- name: Build SDK container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Scan SDK container with Trivy
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
- name: Scan SDK container with Trivy for ${{ matrix.arch }}
|
||||
if: github.repository == 'prowler-cloud/prowler' && steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -10,6 +10,12 @@ on:
|
||||
release:
|
||||
types:
|
||||
- 'published'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
release_tag:
|
||||
description: 'Release tag (e.g., 5.14.0)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -21,7 +27,7 @@ concurrency:
|
||||
env:
|
||||
# Tags
|
||||
LATEST_TAG: latest
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name || inputs.release_tag }}
|
||||
STABLE_TAG: stable
|
||||
WORKING_DIRECTORY: ./ui
|
||||
|
||||
@@ -46,7 +52,7 @@ jobs:
|
||||
|
||||
container-build-push:
|
||||
needs: setup
|
||||
runs-on: ${{ matrix.runner }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
@@ -74,37 +80,9 @@ jobs:
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build and push UI container for ${{ matrix.arch }}
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=${{ needs.setup.outputs.short-sha }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
push: true
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Build and push UI container for ${{ matrix.arch }}
|
||||
if: github.event_name == 'push' || github.event_name == 'release'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=${{ github.event_name == 'release' && format('v{0}', env.RELEASE_TAG) || needs.setup.outputs.short_sha }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
push: true
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
- name: Notify container push started
|
||||
if: github.event_name == 'release'
|
||||
id: slack-notification-started
|
||||
if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
@@ -117,11 +95,28 @@ jobs:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-started.json"
|
||||
|
||||
- name: Build and push UI container for ${{ matrix.arch }}
|
||||
id: container-push
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: ${{ env.WORKING_DIRECTORY }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=${{ (github.event_name == 'release' || github.event_name == 'workflow_dispatch') && format('v{0}', env.RELEASE_TAG) || needs.setup.outputs.short-sha }}
|
||||
NEXT_PUBLIC_API_BASE_URL=${{ env.NEXT_PUBLIC_API_BASE_URL }}
|
||||
push: true
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: |
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
|
||||
- name: Notify container push completed
|
||||
if: github.event_name == 'release' && always()
|
||||
if: (github.event_name == 'release' || github.event_name == 'workflow_dispatch') && always()
|
||||
uses: ./.github/actions/slack-notification
|
||||
env:
|
||||
SLACK_CHANNEL_ID: ${{ secrets.SLACK_PLATFORM_DEPLOYMENTS }}
|
||||
MESSAGE_TS: ${{ steps.slack-notification-started.outputs.ts }}
|
||||
COMPONENT: UI
|
||||
RELEASE_TAG: ${{ env.RELEASE_TAG }}
|
||||
GITHUB_SERVER_URL: ${{ github.server_url }}
|
||||
@@ -131,11 +126,12 @@ jobs:
|
||||
slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
payload-file-path: "./.github/scripts/slack-messages/container-release-completed.json"
|
||||
step-outcome: ${{ steps.container-push.outcome }}
|
||||
update-ts: ${{ steps.slack-notification-started.outputs.ts }}
|
||||
|
||||
# Create and push multi-architecture manifest
|
||||
create-manifest:
|
||||
needs: [setup, container-build-push]
|
||||
if: github.event_name == 'push' || github.event_name == 'release'
|
||||
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -158,7 +154,7 @@ jobs:
|
||||
${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ needs.setup.outputs.short-sha }}-arm64
|
||||
|
||||
- name: Create and push manifests for release event
|
||||
if: github.event_name == 'release'
|
||||
if: github.event_name == 'release' || github.event_name == 'workflow_dispatch'
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
-t ${{ env.PROWLERCLOUD_DOCKERHUB_REPOSITORY }}/${{ env.PROWLERCLOUD_DOCKERHUB_IMAGE }}:${{ env.RELEASE_TAG }} \
|
||||
|
||||
@@ -43,7 +43,16 @@ jobs:
|
||||
ignore: DL3018
|
||||
|
||||
ui-container-build-and-scan:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
arch: amd64
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
arch: arm64
|
||||
timeout-minutes: 30
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -67,7 +76,7 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build UI container
|
||||
- name: Build UI container for ${{ matrix.arch }}
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
@@ -75,17 +84,18 @@ jobs:
|
||||
target: prod
|
||||
push: false
|
||||
load: true
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: ${{ matrix.platform }}
|
||||
tags: ${{ env.IMAGE_NAME }}:${{ github.sha }}-${{ matrix.arch }}
|
||||
cache-from: type=gha,scope=${{ matrix.arch }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.arch }}
|
||||
build-args: |
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_51LwpXXXX
|
||||
|
||||
- name: Scan UI container with Trivy
|
||||
- name: Scan UI container with Trivy for ${{ matrix.arch }}
|
||||
if: github.repository == 'prowler-cloud/prowler' && steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: ./.github/actions/trivy-scan
|
||||
with:
|
||||
image-name: ${{ env.IMAGE_NAME }}
|
||||
image-tag: ${{ github.sha }}
|
||||
image-tag: ${{ github.sha }}-${{ matrix.arch }}
|
||||
fail-on-critical: 'false'
|
||||
severity: 'CRITICAL'
|
||||
|
||||
@@ -10,6 +10,7 @@ on:
|
||||
- 'ui/**'
|
||||
|
||||
jobs:
|
||||
|
||||
e2e-tests:
|
||||
if: github.repository == 'prowler-cloud/prowler'
|
||||
runs-on: ubuntu-latest
|
||||
@@ -33,12 +34,50 @@ jobs:
|
||||
E2E_M365_SECRET_ID: ${{ secrets.E2E_M365_SECRET_ID }}
|
||||
E2E_M365_TENANT_ID: ${{ secrets.E2E_M365_TENANT_ID }}
|
||||
E2E_M365_CERTIFICATE_CONTENT: ${{ secrets.E2E_M365_CERTIFICATE_CONTENT }}
|
||||
E2E_NEW_PASSWORD: ${{ secrets.E2E_NEW_PASSWORD }}
|
||||
E2E_KUBERNETES_CONTEXT: 'kind-kind'
|
||||
E2E_KUBERNETES_KUBECONFIG_PATH: /home/runner/.kube/config
|
||||
E2E_GCP_BASE64_SERVICE_ACCOUNT_KEY: ${{ secrets.E2E_GCP_BASE64_SERVICE_ACCOUNT_KEY }}
|
||||
E2E_GCP_PROJECT_ID: ${{ secrets.E2E_GCP_PROJECT_ID }}
|
||||
E2E_GITHUB_APP_ID: ${{ secrets.E2E_GITHUB_APP_ID }}
|
||||
E2E_GITHUB_BASE64_APP_PRIVATE_KEY: ${{ secrets.E2E_GITHUB_BASE64_APP_PRIVATE_KEY }}
|
||||
E2E_GITHUB_USERNAME: ${{ secrets.E2E_GITHUB_USERNAME }}
|
||||
E2E_GITHUB_PERSONAL_ACCESS_TOKEN: ${{ secrets.E2E_GITHUB_PERSONAL_ACCESS_TOKEN }}
|
||||
E2E_GITHUB_ORGANIZATION: ${{ secrets.E2E_GITHUB_ORGANIZATION }}
|
||||
E2E_GITHUB_ORGANIZATION_ACCESS_TOKEN: ${{ secrets.E2E_GITHUB_ORGANIZATION_ACCESS_TOKEN }}
|
||||
E2E_ORGANIZATION_ID: ${{ secrets.E2E_ORGANIZATION_ID }}
|
||||
E2E_OCI_TENANCY_ID: ${{ secrets.E2E_OCI_TENANCY_ID }}
|
||||
E2E_OCI_USER_ID: ${{ secrets.E2E_OCI_USER_ID }}
|
||||
E2E_OCI_FINGERPRINT: ${{ secrets.E2E_OCI_FINGERPRINT }}
|
||||
E2E_OCI_KEY_CONTENT: ${{ secrets.E2E_OCI_KEY_CONTENT }}
|
||||
E2E_OCI_REGION: ${{ secrets.E2E_OCI_REGION }}
|
||||
E2E_NEW_USER_PASSWORD: ${{ secrets.E2E_NEW_USER_PASSWORD }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Create k8s Kind Cluster
|
||||
uses: helm/kind-action@v1
|
||||
with:
|
||||
cluster_name: kind
|
||||
- name: Modify kubeconfig
|
||||
run: |
|
||||
# Modify the kubeconfig to use the kind cluster server to https://kind-control-plane:6443
|
||||
# from worker service into docker-compose.yml
|
||||
kubectl config set-cluster kind-kind --server=https://kind-control-plane:6443
|
||||
kubectl config view
|
||||
- name: Add network kind to docker compose
|
||||
run: |
|
||||
# Add the network kind to the docker compose to interconnect to kind cluster
|
||||
yq -i '.networks.kind.external = true' docker-compose.yml
|
||||
# Add network kind to worker service and default network too
|
||||
yq -i '.services.worker.networks = ["kind","default"]' docker-compose.yml
|
||||
- name: Fix API data directory permissions
|
||||
run: docker run --rm -v $(pwd)/_data/api:/data alpine chown -R 1000:1000 /data
|
||||
- name: Add AWS credentials for testing AWS SDK Default Adding Provider
|
||||
run: |
|
||||
echo "Adding AWS credentials for testing AWS SDK Default Adding Provider..."
|
||||
echo "AWS_ACCESS_KEY_ID=${{ secrets.E2E_AWS_PROVIDER_ACCESS_KEY }}" >> .env
|
||||
echo "AWS_SECRET_ACCESS_KEY=${{ secrets.E2E_AWS_PROVIDER_SECRET_KEY }}" >> .env
|
||||
- name: Start API services
|
||||
run: |
|
||||
# Override docker-compose image tag to use latest instead of stable
|
||||
@@ -78,29 +117,42 @@ jobs:
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: '20.x'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
run_install: false
|
||||
- name: Get pnpm store directory
|
||||
shell: bash
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
- name: Setup pnpm cache
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('ui/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
- name: Install UI dependencies
|
||||
working-directory: ./ui
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
- name: Build UI application
|
||||
working-directory: ./ui
|
||||
run: npm run build
|
||||
run: pnpm run build
|
||||
- name: Cache Playwright browsers
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
id: playwright-cache
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-${{ hashFiles('ui/package-lock.json') }}
|
||||
key: ${{ runner.os }}-playwright-${{ hashFiles('ui/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright-
|
||||
- name: Install Playwright browsers
|
||||
working-directory: ./ui
|
||||
if: steps.playwright-cache.outputs.cache-hit != 'true'
|
||||
run: npm run test:e2e:install
|
||||
run: pnpm run test:e2e:install
|
||||
- name: Run E2E tests
|
||||
working-directory: ./ui
|
||||
run: npm run test:e2e
|
||||
run: pnpm run test:e2e
|
||||
- name: Upload test reports
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
if: failure()
|
||||
|
||||
@@ -48,17 +48,36 @@ jobs:
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||
with:
|
||||
node-version: ${{ env.NODE_VERSION }}
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './ui/package-lock.json'
|
||||
|
||||
- name: Setup pnpm
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 10
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
shell: bash
|
||||
run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup pnpm cache
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.STORE_PATH }}
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('ui/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Install dependencies
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: npm ci
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run healthcheck
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: npm run healthcheck
|
||||
run: pnpm run healthcheck
|
||||
|
||||
- name: Build application
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: npm run build
|
||||
run: pnpm run build
|
||||
|
||||
@@ -150,9 +150,5 @@ _data/
|
||||
# Claude
|
||||
CLAUDE.md
|
||||
|
||||
# MCP Server
|
||||
mcp_server/prowler_mcp_server/prowler_app/server.py
|
||||
mcp_server/prowler_mcp_server/prowler_app/utils/schema.yaml
|
||||
|
||||
# Compliance report
|
||||
*.pdf
|
||||
|
||||
@@ -12,6 +12,7 @@ ENV TRIVY_VERSION=${TRIVY_VERSION}
|
||||
# hadolint ignore=DL3008
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget libicu72 libunwind8 libssl3 libcurl4 ca-certificates apt-transport-https gnupg \
|
||||
build-essential pkg-config libzstd-dev zlib1g-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install PowerShell
|
||||
|
||||
@@ -89,8 +89,9 @@ prowler dashboard
|
||||
| GitHub | 17 | 2 | 1 | 0 | Official | Stable | UI, API, CLI |
|
||||
| M365 | 70 | 7 | 3 | 2 | Official | UI, API, CLI |
|
||||
| OCI | 51 | 13 | 1 | 10 | Official | UI, API, CLI |
|
||||
| Alibaba Cloud | 61 | 9 | 1 | 9 | Official | CLI |
|
||||
| IaC | [See `trivy` docs.](https://trivy.dev/latest/docs/coverage/iac/) | N/A | N/A | N/A | Official | UI, API, CLI |
|
||||
| MongoDB Atlas | 10 | 3 | 0 | 0 | Official | CLI, API |
|
||||
| MongoDB Atlas | 10 | 3 | 0 | 0 | Official | UI, API, CLI |
|
||||
| LLM | [See `promptfoo` docs.](https://www.promptfoo.dev/docs/red-team/plugins/) | N/A | N/A | N/A | Official | CLI |
|
||||
| NHN | 6 | 2 | 1 | 0 | Unofficial | CLI |
|
||||
|
||||
@@ -153,7 +154,7 @@ You can find more information in the [Troubleshooting](./docs/troubleshooting.md
|
||||
|
||||
* `git` installed.
|
||||
* `poetry` v2 installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* `npm` installed: [npm installation](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm).
|
||||
* `pnpm` installed: [pnpm installation](https://pnpm.io/installation).
|
||||
* `Docker Compose` installed: https://docs.docker.com/compose/install/.
|
||||
|
||||
**Commands to run the API**
|
||||
@@ -209,9 +210,9 @@ python -m celery -A config.celery beat -l info --scheduler django_celery_beat.sc
|
||||
``` console
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler/ui
|
||||
npm install
|
||||
npm run build
|
||||
npm start
|
||||
pnpm install
|
||||
pnpm run build
|
||||
pnpm start
|
||||
```
|
||||
|
||||
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
|
||||
|
||||
@@ -2,6 +2,39 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [1.16.0] (Unreleased)
|
||||
|
||||
### Added
|
||||
- New endpoint to retrieve an overview of the attack surfaces [(#9309)](https://github.com/prowler-cloud/prowler/pull/9309)
|
||||
- New endpoint `GET /api/v1/overviews/findings_severity/timeseries` to retrieve daily aggregated findings by severity level [(#9363)](https://github.com/prowler-cloud/prowler/pull/9363)
|
||||
- Lighthouse AI support for Amazon Bedrock API key [(#9343)](https://github.com/prowler-cloud/prowler/pull/9343)
|
||||
- Exception handler for provider deletions during scans [(#9414)](https://github.com/prowler-cloud/prowler/pull/9414)
|
||||
- Support to use admin credentials through the read replica database [(#9440)](https://github.com/prowler-cloud/prowler/pull/9440)
|
||||
|
||||
### Changed
|
||||
|
||||
- Error messages from Lighthouse celery tasks [(#9165)](https://github.com/prowler-cloud/prowler/pull/9165)
|
||||
- Restore the compliance overview endpoint's mandatory filters [(#9338)](https://github.com/prowler-cloud/prowler/pull/9338)
|
||||
|
||||
---
|
||||
|
||||
## [1.15.2] (Prowler v5.14.2)
|
||||
|
||||
### Fixed
|
||||
- Unique constraint violation during compliance overviews task [(#9436)](https://github.com/prowler-cloud/prowler/pull/9436)
|
||||
- Division by zero error in ENS PDF report when all requirements are manual [(#9443)](https://github.com/prowler-cloud/prowler/pull/9443)
|
||||
|
||||
---
|
||||
|
||||
## [1.15.1] (Prowler v5.14.1)
|
||||
|
||||
### Fixed
|
||||
- Fix typo in PDF reporting [(#9345)](https://github.com/prowler-cloud/prowler/pull/9345)
|
||||
- Fix IaC provider initialization failure when mutelist processor is configured [(#9331)](https://github.com/prowler-cloud/prowler/pull/9331)
|
||||
- Match logic for ThreatScore when counting findings [(#9348)](https://github.com/prowler-cloud/prowler/pull/9348)
|
||||
|
||||
---
|
||||
|
||||
## [1.15.0] (Prowler v5.14.0)
|
||||
|
||||
### Added
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "about-time"
|
||||
@@ -2468,6 +2468,72 @@ files = [
|
||||
{file = "frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gevent"
|
||||
version = "25.9.1"
|
||||
description = "Coroutine-based network library"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "gevent-25.9.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:856b990be5590e44c3a3dc6c8d48a40eaccbb42e99d2b791d11d1e7711a4297e"},
|
||||
{file = "gevent-25.9.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:fe1599d0b30e6093eb3213551751b24feeb43db79f07e89d98dd2f3330c9063e"},
|
||||
{file = "gevent-25.9.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:f0d8b64057b4bf1529b9ef9bd2259495747fba93d1f836c77bfeaacfec373fd0"},
|
||||
{file = "gevent-25.9.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b56cbc820e3136ba52cd690bdf77e47a4c239964d5f80dc657c1068e0fe9521c"},
|
||||
{file = "gevent-25.9.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c5fa9ce5122c085983e33e0dc058f81f5264cebe746de5c401654ab96dddfca8"},
|
||||
{file = "gevent-25.9.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:03c74fec58eda4b4edc043311fca8ba4f8744ad1632eb0a41d5ec25413581975"},
|
||||
{file = "gevent-25.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a8ae9f895e8651d10b0a8328a61c9c53da11ea51b666388aa99b0ce90f9fdc27"},
|
||||
{file = "gevent-25.9.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:18e5aff9e8342dc954adb9c9c524db56c2f3557999463445ba3d9cbe3dada7b7"},
|
||||
{file = "gevent-25.9.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1cdf6db28f050ee103441caa8b0448ace545364f775059d5e2de089da975c457"},
|
||||
{file = "gevent-25.9.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:812debe235a8295be3b2a63b136c2474241fa5c58af55e6a0f8cfc29d4936235"},
|
||||
{file = "gevent-25.9.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b28b61ff9216a3d73fe8f35669eefcafa957f143ac534faf77e8a19eb9e6883a"},
|
||||
{file = "gevent-25.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5e4b6278b37373306fc6b1e5f0f1cf56339a1377f67c35972775143d8d7776ff"},
|
||||
{file = "gevent-25.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d99f0cb2ce43c2e8305bf75bee61a8bde06619d21b9d0316ea190fc7a0620a56"},
|
||||
{file = "gevent-25.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:72152517ecf548e2f838c61b4be76637d99279dbaa7e01b3924df040aa996586"},
|
||||
{file = "gevent-25.9.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:46b188248c84ffdec18a686fcac5dbb32365d76912e14fda350db5dc0bfd4f86"},
|
||||
{file = "gevent-25.9.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f2b54ea3ca6f0c763281cd3f96010ac7e98c2e267feb1221b5a26e2ca0b9a692"},
|
||||
{file = "gevent-25.9.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7a834804ac00ed8a92a69d3826342c677be651b1c3cd66cc35df8bc711057aa2"},
|
||||
{file = "gevent-25.9.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:323a27192ec4da6b22a9e51c3d9d896ff20bc53fdc9e45e56eaab76d1c39dd74"},
|
||||
{file = "gevent-25.9.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6ea78b39a2c51d47ff0f130f4c755a9a4bbb2dd9721149420ad4712743911a51"},
|
||||
{file = "gevent-25.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:dc45cd3e1cc07514a419960af932a62eb8515552ed004e56755e4bf20bad30c5"},
|
||||
{file = "gevent-25.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34e01e50c71eaf67e92c186ee0196a039d6e4f4b35670396baed4a2d8f1b347f"},
|
||||
{file = "gevent-25.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acd6bcd5feabf22c7c5174bd3b9535ee9f088d2bbce789f740ad8d6554b18f3"},
|
||||
{file = "gevent-25.9.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:4f84591d13845ee31c13f44bdf6bd6c3dbf385b5af98b2f25ec328213775f2ed"},
|
||||
{file = "gevent-25.9.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9cdbb24c276a2d0110ad5c978e49daf620b153719ac8a548ce1250a7eb1b9245"},
|
||||
{file = "gevent-25.9.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:88b6c07169468af631dcf0fdd3658f9246d6822cc51461d43f7c44f28b0abb82"},
|
||||
{file = "gevent-25.9.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b7bb0e29a7b3e6ca9bed2394aa820244069982c36dc30b70eb1004dd67851a48"},
|
||||
{file = "gevent-25.9.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2951bb070c0ee37b632ac9134e4fdaad70d2e660c931bb792983a0837fe5b7d7"},
|
||||
{file = "gevent-25.9.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e4e17c2d57e9a42e25f2a73d297b22b60b2470a74be5a515b36c984e1a246d47"},
|
||||
{file = "gevent-25.9.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d94936f8f8b23d9de2251798fcb603b84f083fdf0d7f427183c1828fb64f117"},
|
||||
{file = "gevent-25.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:eb51c5f9537b07da673258b4832f6635014fee31690c3f0944d34741b69f92fa"},
|
||||
{file = "gevent-25.9.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:1a3fe4ea1c312dbf6b375b416925036fe79a40054e6bf6248ee46526ea628be1"},
|
||||
{file = "gevent-25.9.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0adb937f13e5fb90cca2edf66d8d7e99d62a299687400ce2edee3f3504009356"},
|
||||
{file = "gevent-25.9.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:427f869a2050a4202d93cf7fd6ab5cffb06d3e9113c10c967b6e2a0d45237cb8"},
|
||||
{file = "gevent-25.9.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c049880175e8c93124188f9d926af0a62826a3b81aa6d3074928345f8238279e"},
|
||||
{file = "gevent-25.9.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b5a67a0974ad9f24721034d1e008856111e0535f1541499f72a733a73d658d1c"},
|
||||
{file = "gevent-25.9.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1d0f5d8d73f97e24ea8d24d8be0f51e0cf7c54b8021c1fddb580bf239474690f"},
|
||||
{file = "gevent-25.9.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ddd3ff26e5c4240d3fbf5516c2d9d5f2a998ef87cfb73e1429cfaeaaec860fa6"},
|
||||
{file = "gevent-25.9.1-cp314-cp314-win_amd64.whl", hash = "sha256:bb63c0d6cb9950cc94036a4995b9cc4667b8915366613449236970f4394f94d7"},
|
||||
{file = "gevent-25.9.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f18f80aef6b1f6907219affe15b36677904f7cfeed1f6a6bc198616e507ae2d7"},
|
||||
{file = "gevent-25.9.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b274a53e818124a281540ebb4e7a2c524778f745b7a99b01bdecf0ca3ac0ddb0"},
|
||||
{file = "gevent-25.9.1-cp39-cp39-win32.whl", hash = "sha256:c6c91f7e33c7f01237755884316110ee7ea076f5bdb9aa0982b6dc63243c0a38"},
|
||||
{file = "gevent-25.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:012a44b0121f3d7c800740ff80351c897e85e76a7e4764690f35c5ad9ec17de5"},
|
||||
{file = "gevent-25.9.1.tar.gz", hash = "sha256:adf9cd552de44a4e6754c51ff2e78d9193b7fa6eab123db9578a210e657235dd"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cffi = {version = ">=1.17.1", markers = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""}
|
||||
greenlet = {version = ">=3.2.2", markers = "platform_python_implementation == \"CPython\""}
|
||||
"zope.event" = "*"
|
||||
"zope.interface" = "*"
|
||||
|
||||
[package.extras]
|
||||
dnspython = ["dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\""]
|
||||
docs = ["furo", "repoze.sphinx.autointerface", "sphinx", "sphinxcontrib-programoutput", "zope.schema"]
|
||||
monitor = ["psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\""]
|
||||
recommended = ["cffi (>=1.17.1) ; platform_python_implementation == \"CPython\"", "dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\"", "psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\""]
|
||||
test = ["cffi (>=1.17.1) ; platform_python_implementation == \"CPython\"", "coverage (>=5.0) ; sys_platform != \"win32\"", "dnspython (>=1.16.0,<2.0) ; python_version < \"3.10\"", "idna ; python_version < \"3.10\"", "objgraph", "psutil (>=5.7.0) ; sys_platform != \"win32\" or platform_python_implementation == \"CPython\"", "requests"]
|
||||
|
||||
[[package]]
|
||||
name = "google-api-core"
|
||||
version = "2.25.1"
|
||||
@@ -2601,6 +2667,87 @@ files = [
|
||||
dev = ["pytest"]
|
||||
docs = ["sphinx", "sphinx-autobuild"]
|
||||
|
||||
[[package]]
|
||||
name = "greenlet"
|
||||
version = "3.2.4"
|
||||
description = "Lightweight in-process concurrent programming"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "platform_python_implementation == \"CPython\""
|
||||
files = [
|
||||
{file = "greenlet-3.2.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8c68325b0d0acf8d91dde4e6f930967dd52a5302cd4062932a6b2e7c2969f47c"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:94385f101946790ae13da500603491f04a76b6e4c059dab271b3ce2e283b2590"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f10fd42b5ee276335863712fa3da6608e93f70629c631bf77145021600abc23c"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c8c9e331e58180d0d83c5b7999255721b725913ff6bc6cf39fa2a45841a4fd4b"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58b97143c9cc7b86fc458f215bd0932f1757ce649e05b640fea2e79b54cedb31"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8"},
|
||||
{file = "greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5"},
|
||||
{file = "greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d"},
|
||||
{file = "greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929"},
|
||||
{file = "greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c17b6b34111ea72fc5a4e4beec9711d2226285f0386ea83477cbb97c30a3f3a5"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681"},
|
||||
{file = "greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:18d9260df2b5fbf41ae5139e1be4e796d99655f023a636cd0e11e6406cca7d58"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:671df96c1f23c4a0d4077a325483c1503c96a1b7d9db26592ae770daa41233d4"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:16458c245a38991aa19676900d48bd1a6f2ce3e16595051a4db9d012154e8433"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-win32.whl", hash = "sha256:65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b"},
|
||||
{file = "greenlet-3.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb"},
|
||||
{file = "greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["Sphinx", "furo"]
|
||||
test = ["objgraph", "psutil", "setuptools"]
|
||||
|
||||
[[package]]
|
||||
name = "gunicorn"
|
||||
version = "23.0.0"
|
||||
@@ -5918,7 +6065,6 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
|
||||
@@ -5927,7 +6073,6 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
|
||||
@@ -5936,7 +6081,6 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
|
||||
@@ -5945,7 +6089,6 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
|
||||
@@ -5954,7 +6097,6 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
|
||||
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
|
||||
@@ -6857,7 +6999,69 @@ enabler = ["pytest-enabler (>=2.2)"]
|
||||
test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
|
||||
type = ["pytest-mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "zope-event"
|
||||
version = "6.1"
|
||||
description = "Very basic event publishing system"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "zope_event-6.1-py3-none-any.whl", hash = "sha256:0ca78b6391b694272b23ec1335c0294cc471065ed10f7f606858fc54566c25a0"},
|
||||
{file = "zope_event-6.1.tar.gz", hash = "sha256:6052a3e0cb8565d3d4ef1a3a7809336ac519bc4fe38398cb8d466db09adef4f0"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["Sphinx"]
|
||||
test = ["zope.testrunner (>=6.4)"]
|
||||
|
||||
[[package]]
|
||||
name = "zope-interface"
|
||||
version = "8.1.1"
|
||||
description = "Interfaces for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "zope_interface-8.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c6b12b656c7d7e3d79cad8e2afc4a37eae6b6076e2c209a33345143148e435e"},
|
||||
{file = "zope_interface-8.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:557c0f1363c300db406e9eeaae8ab6d1ba429d4fed60d8ab7dadab5ca66ccd35"},
|
||||
{file = "zope_interface-8.1.1-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:127b0e4c873752b777721543cf8525b3db5e76b88bd33bab807f03c568e9003f"},
|
||||
{file = "zope_interface-8.1.1-cp310-cp310-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e0892c9d2dd47b45f62d1861bcae8b427fcc49b4a04fff67f12c5c55e56654d7"},
|
||||
{file = "zope_interface-8.1.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ff8a92dc8c8a2c605074e464984e25b9b5a8ac9b2a0238dd73a0f374df59a77e"},
|
||||
{file = "zope_interface-8.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:54627ddf6034aab1f506ba750dd093f67d353be6249467d720e9f278a578efe5"},
|
||||
{file = "zope_interface-8.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e8a0fdd5048c1bb733e4693eae9bc4145a19419ea6a1c95299318a93fe9f3d72"},
|
||||
{file = "zope_interface-8.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4cb0ea75a26b606f5bc8524fbce7b7d8628161b6da002c80e6417ce5ec757c0"},
|
||||
{file = "zope_interface-8.1.1-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:c267b00b5a49a12743f5e1d3b4beef45479d696dab090f11fe3faded078a5133"},
|
||||
{file = "zope_interface-8.1.1-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e25d3e2b9299e7ec54b626573673bdf0d740cf628c22aef0a3afef85b438aa54"},
|
||||
{file = "zope_interface-8.1.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:63db1241804417aff95ac229c13376c8c12752b83cc06964d62581b493e6551b"},
|
||||
{file = "zope_interface-8.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:9639bf4ed07b5277fb231e54109117c30d608254685e48a7104a34618bcbfc83"},
|
||||
{file = "zope_interface-8.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a16715808408db7252b8c1597ed9008bdad7bf378ed48eb9b0595fad4170e49d"},
|
||||
{file = "zope_interface-8.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce6b58752acc3352c4aa0b55bbeae2a941d61537e6afdad2467a624219025aae"},
|
||||
{file = "zope_interface-8.1.1-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:807778883d07177713136479de7fd566f9056a13aef63b686f0ab4807c6be259"},
|
||||
{file = "zope_interface-8.1.1-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50e5eb3b504a7d63dc25211b9298071d5b10a3eb754d6bf2f8ef06cb49f807ab"},
|
||||
{file = "zope_interface-8.1.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:eee6f93b2512ec9466cf30c37548fd3ed7bc4436ab29cd5943d7a0b561f14f0f"},
|
||||
{file = "zope_interface-8.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:80edee6116d569883c58ff8efcecac3b737733d646802036dc337aa839a5f06b"},
|
||||
{file = "zope_interface-8.1.1-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:84f9be6d959640de9da5d14ac1f6a89148b16da766e88db37ed17e936160b0b1"},
|
||||
{file = "zope_interface-8.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:531fba91dcb97538f70cf4642a19d6574269460274e3f6004bba6fe684449c51"},
|
||||
{file = "zope_interface-8.1.1-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:fc65f5633d5a9583ee8d88d1f5de6b46cd42c62e47757cfe86be36fb7c8c4c9b"},
|
||||
{file = "zope_interface-8.1.1-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:efef80ddec4d7d99618ef71bc93b88859248075ca2e1ae1c78636654d3d55533"},
|
||||
{file = "zope_interface-8.1.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:49aad83525eca3b4747ef51117d302e891f0042b06f32aa1c7023c62642f962b"},
|
||||
{file = "zope_interface-8.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:71cf329a21f98cb2bd9077340a589e316ac8a415cac900575a32544b3dffcb98"},
|
||||
{file = "zope_interface-8.1.1-cp314-cp314-macosx_10_9_x86_64.whl", hash = "sha256:da311e9d253991ca327601f47c4644d72359bac6950fbb22f971b24cd7850f8c"},
|
||||
{file = "zope_interface-8.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3fb25fca0442c7fb93c4ee40b42e3e033fef2f648730c4b7ae6d43222a3e8946"},
|
||||
{file = "zope_interface-8.1.1-cp314-cp314-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:bac588d0742b4e35efb7c7df1dacc0397b51ed37a17d4169a38019a1cebacf0a"},
|
||||
{file = "zope_interface-8.1.1-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3d1f053d2d5e2b393e619bce1e55954885c2e63969159aa521839e719442db49"},
|
||||
{file = "zope_interface-8.1.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:64a1ad7f4cb17d948c6bdc525a1d60c0e567b2526feb4fa38b38f249961306b8"},
|
||||
{file = "zope_interface-8.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:169214da1b82b7695d1a36f92d70b11166d66b6b09d03df35d150cc62ac52276"},
|
||||
{file = "zope_interface-8.1.1.tar.gz", hash = "sha256:51b10e6e8e238d719636a401f44f1e366146912407b58453936b781a19be19ec"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["Sphinx", "furo", "repoze.sphinx.autointerface"]
|
||||
test = ["coverage[toml]", "zope.event", "zope.testing"]
|
||||
testing = ["coverage[toml]", "zope.event", "zope.testing"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.11,<3.13"
|
||||
content-hash = "943e2cd6b87229704550d4e140b36509fb9f58896ebb5834b9fbabe28a9ee92f"
|
||||
content-hash = "77ef098291cb8631565a1ab5027ce33e7fcb5a04883dc7160bf373eac9e1fb49"
|
||||
|
||||
@@ -35,7 +35,8 @@ dependencies = [
|
||||
"markdown (>=3.9,<4.0)",
|
||||
"drf-simple-apikey (==2.2.1)",
|
||||
"matplotlib (>=3.10.6,<4.0.0)",
|
||||
"reportlab (>=4.4.4,<5.0.0)"
|
||||
"reportlab (>=4.4.4,<5.0.0)",
|
||||
"gevent (>=25.9.1,<26.0.0)"
|
||||
]
|
||||
description = "Prowler's API (Django/DRF)"
|
||||
license = "Apache-2.0"
|
||||
@@ -43,7 +44,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.15.0"
|
||||
version = "1.16.0"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -40,6 +40,7 @@ class ApiConfig(AppConfig):
|
||||
self._ensure_crypto_keys()
|
||||
|
||||
load_prowler_compliance()
|
||||
self._initialize_attack_surface_mapping()
|
||||
|
||||
def _ensure_crypto_keys(self):
|
||||
"""
|
||||
@@ -167,3 +168,13 @@ class ApiConfig(AppConfig):
|
||||
f"Error generating JWT keys: {e}. Please set '{SIGNING_KEY_ENV}' and '{VERIFYING_KEY_ENV}' manually."
|
||||
)
|
||||
raise e
|
||||
|
||||
def _initialize_attack_surface_mapping(self):
|
||||
from tasks.jobs.scan import ( # noqa: F401
|
||||
_get_attack_surface_mapping_from_provider,
|
||||
)
|
||||
|
||||
from api.models import Provider # noqa: F401
|
||||
|
||||
for provider_type, _label in Provider.ProviderChoices.choices:
|
||||
_get_attack_surface_mapping_from_provider(provider_type)
|
||||
|
||||
@@ -26,6 +26,7 @@ class MainRouter:
|
||||
default_db = "default"
|
||||
admin_db = "admin"
|
||||
replica_db = "replica"
|
||||
admin_replica_db = "admin_replica"
|
||||
|
||||
def db_for_read(self, model, **hints): # noqa: F841
|
||||
model_table_name = model._meta.db_table
|
||||
@@ -49,7 +50,12 @@ class MainRouter:
|
||||
|
||||
def allow_relation(self, obj1, obj2, **hints): # noqa: F841
|
||||
# Allow relations when both objects originate from allowed connectors
|
||||
allowed_dbs = {self.default_db, self.admin_db, self.replica_db}
|
||||
allowed_dbs = {
|
||||
self.default_db,
|
||||
self.admin_db,
|
||||
self.replica_db,
|
||||
self.admin_replica_db,
|
||||
}
|
||||
if {obj1._state.db, obj2._state.db} <= allowed_dbs:
|
||||
return True
|
||||
return None
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
import uuid
|
||||
from functools import wraps
|
||||
|
||||
from django.db import connection, transaction
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import IntegrityError, connection, transaction
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
|
||||
from api.db_router import READ_REPLICA_ALIAS
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY, rls_transaction
|
||||
from api.exceptions import ProviderDeletedException
|
||||
from api.models import Provider, Scan
|
||||
|
||||
|
||||
def set_tenant(func=None, *, keep_tenant=False):
|
||||
@@ -66,3 +70,49 @@ def set_tenant(func=None, *, keep_tenant=False):
|
||||
return decorator
|
||||
else:
|
||||
return decorator(func)
|
||||
|
||||
|
||||
def handle_provider_deletion(func):
|
||||
"""
|
||||
Decorator that raises ProviderDeletedException if provider was deleted during execution.
|
||||
|
||||
Catches ObjectDoesNotExist and IntegrityError, checks if provider still exists,
|
||||
and raises ProviderDeletedException if not. Otherwise, re-raises original exception.
|
||||
|
||||
Requires tenant_id and provider_id in kwargs.
|
||||
|
||||
Example:
|
||||
@shared_task
|
||||
@handle_provider_deletion
|
||||
def scan_task(scan_id, tenant_id, provider_id):
|
||||
...
|
||||
"""
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except (ObjectDoesNotExist, IntegrityError):
|
||||
tenant_id = kwargs.get("tenant_id")
|
||||
provider_id = kwargs.get("provider_id")
|
||||
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
if provider_id is None:
|
||||
scan_id = kwargs.get("scan_id")
|
||||
if scan_id is None:
|
||||
raise AssertionError(
|
||||
"This task does not have provider or scan in the kwargs"
|
||||
)
|
||||
scan = Scan.objects.filter(pk=scan_id).first()
|
||||
if scan is None:
|
||||
raise ProviderDeletedException(
|
||||
f"Provider for scan '{scan_id}' was deleted during the scan"
|
||||
) from None
|
||||
provider_id = str(scan.provider_id)
|
||||
if not Provider.objects.filter(pk=provider_id).exists():
|
||||
raise ProviderDeletedException(
|
||||
f"Provider '{provider_id}' was deleted during the scan"
|
||||
) from None
|
||||
raise
|
||||
|
||||
return wrapper
|
||||
|
||||
@@ -66,6 +66,10 @@ class ProviderConnectionError(Exception):
|
||||
"""Base exception for provider connection errors."""
|
||||
|
||||
|
||||
class ProviderDeletedException(Exception):
|
||||
"""Raised when a provider has been deleted during scan/task execution."""
|
||||
|
||||
|
||||
def custom_exception_handler(exc, context):
|
||||
if isinstance(exc, django_validation_error):
|
||||
if hasattr(exc, "error_dict"):
|
||||
|
||||
@@ -23,7 +23,9 @@ from api.db_utils import (
|
||||
StatusEnumField,
|
||||
)
|
||||
from api.models import (
|
||||
AttackSurfaceOverview,
|
||||
ComplianceRequirementOverview,
|
||||
DailySeveritySummary,
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
@@ -761,14 +763,6 @@ class RoleFilter(FilterSet):
|
||||
class ComplianceOverviewFilter(FilterSet):
|
||||
inserted_at = DateFilter(field_name="inserted_at", lookup_expr="date")
|
||||
scan_id = UUIDFilter(field_name="scan_id")
|
||||
provider_id = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider_id__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
region = CharFilter(field_name="region")
|
||||
|
||||
class Meta:
|
||||
@@ -802,6 +796,68 @@ class ScanSummaryFilter(FilterSet):
|
||||
}
|
||||
|
||||
|
||||
class DailySeveritySummaryFilter(FilterSet):
|
||||
"""Filter for findings_severity/timeseries endpoint."""
|
||||
|
||||
MAX_DATE_RANGE_DAYS = 365
|
||||
|
||||
provider_id = UUIDFilter(field_name="provider_id", lookup_expr="exact")
|
||||
provider_id__in = UUIDInFilter(field_name="provider_id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
field_name="provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
field_name="provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
date_from = DateFilter(method="filter_noop")
|
||||
date_to = DateFilter(method="filter_noop")
|
||||
|
||||
class Meta:
|
||||
model = DailySeveritySummary
|
||||
fields = ["provider_id"]
|
||||
|
||||
def filter_noop(self, queryset, name, value):
|
||||
return queryset
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not self.data.get("date_from"):
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "This query parameter is required.",
|
||||
"status": "400",
|
||||
"source": {"pointer": "filter[date_from]"},
|
||||
"code": "required",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
today = date.today()
|
||||
date_from = self.form.cleaned_data.get("date_from")
|
||||
date_to = min(self.form.cleaned_data.get("date_to") or today, today)
|
||||
|
||||
if (date_to - date_from).days > self.MAX_DATE_RANGE_DAYS:
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": f"Date range cannot exceed {self.MAX_DATE_RANGE_DAYS} days.",
|
||||
"status": "400",
|
||||
"source": {"pointer": "filter[date_from]"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
# View access
|
||||
self.request._date_from = date_from
|
||||
self.request._date_to = date_to
|
||||
|
||||
# Apply date filter (only lte for fill-forward logic)
|
||||
queryset = queryset.filter(date__lte=date_to)
|
||||
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
|
||||
class ScanSummarySeverityFilter(ScanSummaryFilter):
|
||||
"""Filter for findings_severity ScanSummary endpoint - includes status filters"""
|
||||
|
||||
@@ -1021,3 +1077,22 @@ class ThreatScoreSnapshotFilter(FilterSet):
|
||||
"inserted_at": ["date", "gte", "lte"],
|
||||
"overall_score": ["exact", "gte", "lte"],
|
||||
}
|
||||
|
||||
|
||||
class AttackSurfaceOverviewFilter(FilterSet):
|
||||
"""Filter for attack surface overview aggregations by provider."""
|
||||
|
||||
provider_id = UUIDFilter(field_name="scan__provider__id", lookup_expr="exact")
|
||||
provider_id__in = UUIDInFilter(field_name="scan__provider__id", lookup_expr="in")
|
||||
provider_type = ChoiceFilter(
|
||||
field_name="scan__provider__provider", choices=Provider.ProviderChoices.choices
|
||||
)
|
||||
provider_type__in = ChoiceInFilter(
|
||||
field_name="scan__provider__provider",
|
||||
choices=Provider.ProviderChoices.choices,
|
||||
lookup_expr="in",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = AttackSurfaceOverview
|
||||
fields = {}
|
||||
|
||||
@@ -22,7 +22,7 @@ class Migration(migrations.Migration):
|
||||
("kubernetes", "Kubernetes"),
|
||||
("m365", "M365"),
|
||||
("github", "GitHub"),
|
||||
("oci", "Oracle Cloud Infrastructure"),
|
||||
("oraclecloud", "Oracle Cloud Infrastructure"),
|
||||
("iac", "IaC"),
|
||||
],
|
||||
default="aws",
|
||||
|
||||
@@ -29,4 +29,8 @@ class Migration(migrations.Migration):
|
||||
default="aws",
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"ALTER TYPE provider ADD VALUE IF NOT EXISTS 'mongodbatlas';",
|
||||
reverse_sql=migrations.RunSQL.noop,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -0,0 +1,89 @@
|
||||
# Generated by Django 5.1.14 on 2025-11-19 13:03
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0059_compliance_overview_summary"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="AttackSurfaceOverview",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("inserted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"attack_surface_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("internet-exposed", "Internet Exposed"),
|
||||
("secrets", "Exposed Secrets"),
|
||||
("privilege-escalation", "Privilege Escalation"),
|
||||
("ec2-imdsv1", "EC2 IMDSv1 Enabled"),
|
||||
],
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
("total_findings", models.IntegerField(default=0)),
|
||||
("failed_findings", models.IntegerField(default=0)),
|
||||
("muted_failed_findings", models.IntegerField(default=0)),
|
||||
],
|
||||
options={
|
||||
"db_table": "attack_surface_overviews",
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="attacksurfaceoverview",
|
||||
name="scan",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="attack_surface_overviews",
|
||||
related_query_name="attack_surface_overview",
|
||||
to="api.scan",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="attacksurfaceoverview",
|
||||
name="tenant",
|
||||
field=models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="api.tenant"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="attacksurfaceoverview",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "scan_id"], name="attack_surf_tenant_scan_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="attacksurfaceoverview",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "scan_id", "attack_surface_type"),
|
||||
name="unique_attack_surface_per_scan",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="attacksurfaceoverview",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_attacksurfaceoverview",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,96 @@
|
||||
# Generated by Django 5.1.14 on 2025-12-03 13:38
|
||||
|
||||
import uuid
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
import api.rls
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0060_attack_surface_overview"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="DailySeveritySummary",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("date", models.DateField()),
|
||||
("critical", models.IntegerField(default=0)),
|
||||
("high", models.IntegerField(default=0)),
|
||||
("medium", models.IntegerField(default=0)),
|
||||
("low", models.IntegerField(default=0)),
|
||||
("informational", models.IntegerField(default=0)),
|
||||
("muted", models.IntegerField(default=0)),
|
||||
(
|
||||
"provider",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="daily_severity_summaries",
|
||||
related_query_name="daily_severity_summary",
|
||||
to="api.provider",
|
||||
),
|
||||
),
|
||||
(
|
||||
"scan",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="daily_severity_summaries",
|
||||
related_query_name="daily_severity_summary",
|
||||
to="api.scan",
|
||||
),
|
||||
),
|
||||
(
|
||||
"tenant",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="api.tenant",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "daily_severity_summaries",
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="dailyseveritysummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "id"],
|
||||
name="dss_tenant_id_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="dailyseveritysummary",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "provider_id"],
|
||||
name="dss_tenant_provider_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="dailyseveritysummary",
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=("tenant_id", "provider", "date"),
|
||||
name="unique_daily_severity_summary",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="dailyseveritysummary",
|
||||
constraint=api.rls.RowLevelSecurityConstraint(
|
||||
"tenant_id",
|
||||
name="rls_on_dailyseveritysummary",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1500,6 +1500,65 @@ class ScanSummary(RowLevelSecurityProtectedModel):
|
||||
resource_name = "scan-summaries"
|
||||
|
||||
|
||||
class DailySeveritySummary(RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
Pre-aggregated daily severity counts per provider.
|
||||
Used by findings_severity/timeseries endpoint for efficient queries.
|
||||
"""
|
||||
|
||||
objects = ActiveProviderManager()
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
date = models.DateField()
|
||||
|
||||
provider = models.ForeignKey(
|
||||
Provider,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="daily_severity_summaries",
|
||||
related_query_name="daily_severity_summary",
|
||||
)
|
||||
scan = models.ForeignKey(
|
||||
Scan,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="daily_severity_summaries",
|
||||
related_query_name="daily_severity_summary",
|
||||
)
|
||||
|
||||
# Aggregated fail counts by severity
|
||||
critical = models.IntegerField(default=0)
|
||||
high = models.IntegerField(default=0)
|
||||
medium = models.IntegerField(default=0)
|
||||
low = models.IntegerField(default=0)
|
||||
informational = models.IntegerField(default=0)
|
||||
muted = models.IntegerField(default=0)
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "daily_severity_summaries"
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "provider", "date"),
|
||||
name="unique_daily_severity_summary",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "id"],
|
||||
name="dss_tenant_id_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_id"],
|
||||
name="dss_tenant_provider_idx",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class Integration(RowLevelSecurityProtectedModel):
|
||||
class IntegrationChoices(models.TextChoices):
|
||||
AMAZON_S3 = "amazon_s3", _("Amazon S3")
|
||||
@@ -2405,3 +2464,63 @@ class ThreatScoreSnapshot(RowLevelSecurityProtectedModel):
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "threatscore-snapshots"
|
||||
|
||||
|
||||
class AttackSurfaceOverview(RowLevelSecurityProtectedModel):
|
||||
"""
|
||||
Pre-aggregated attack surface metrics per scan.
|
||||
|
||||
Stores counts for each attack surface type (internet-exposed, secrets,
|
||||
privilege-escalation, ec2-imdsv1) to enable fast overview queries.
|
||||
"""
|
||||
|
||||
class AttackSurfaceTypeChoices(models.TextChoices):
|
||||
INTERNET_EXPOSED = "internet-exposed", _("Internet Exposed")
|
||||
SECRETS = "secrets", _("Exposed Secrets")
|
||||
PRIVILEGE_ESCALATION = "privilege-escalation", _("Privilege Escalation")
|
||||
EC2_IMDSV1 = "ec2-imdsv1", _("EC2 IMDSv1 Enabled")
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
|
||||
inserted_at = models.DateTimeField(auto_now_add=True, editable=False)
|
||||
|
||||
scan = models.ForeignKey(
|
||||
Scan,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="attack_surface_overviews",
|
||||
related_query_name="attack_surface_overview",
|
||||
)
|
||||
|
||||
attack_surface_type = models.CharField(
|
||||
max_length=50,
|
||||
choices=AttackSurfaceTypeChoices.choices,
|
||||
)
|
||||
|
||||
# Finding counts
|
||||
total_findings = models.IntegerField(default=0) # All findings (PASS + FAIL)
|
||||
failed_findings = models.IntegerField(default=0) # Non-muted failed findings
|
||||
muted_failed_findings = models.IntegerField(default=0) # Muted failed findings
|
||||
|
||||
class Meta(RowLevelSecurityProtectedModel.Meta):
|
||||
db_table = "attack_surface_overviews"
|
||||
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
fields=("tenant_id", "scan_id", "attack_surface_type"),
|
||||
name="unique_attack_surface_per_scan",
|
||||
),
|
||||
RowLevelSecurityConstraint(
|
||||
field="tenant_id",
|
||||
name="rls_on_%(class)s",
|
||||
statements=["SELECT", "INSERT", "UPDATE", "DELETE"],
|
||||
),
|
||||
]
|
||||
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["tenant_id", "scan_id"],
|
||||
name="attack_surf_tenant_scan_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "attack-surface-overviews"
|
||||
|
||||
@@ -65,11 +65,11 @@ def get_providers(role: Role) -> QuerySet[Provider]:
|
||||
A QuerySet of Provider objects filtered by the role's provider groups.
|
||||
If the role has no provider groups, returns an empty queryset.
|
||||
"""
|
||||
tenant = role.tenant
|
||||
tenant_id = role.tenant_id
|
||||
provider_groups = role.provider_groups.all()
|
||||
if not provider_groups.exists():
|
||||
return Provider.objects.none()
|
||||
|
||||
return Provider.objects.filter(
|
||||
tenant=tenant, provider_groups__in=provider_groups
|
||||
tenant_id=tenant_id, provider_groups__in=provider_groups
|
||||
).distinct()
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
openapi: 3.0.3
|
||||
info:
|
||||
title: Prowler API
|
||||
version: 1.15.0
|
||||
version: 1.16.0
|
||||
description: |-
|
||||
Prowler API specification.
|
||||
|
||||
@@ -283,11 +283,8 @@ paths:
|
||||
/api/v1/compliance-overviews:
|
||||
get:
|
||||
operationId: compliance_overviews_list
|
||||
description: Retrieve an overview of all compliance frameworks. If scan_id is
|
||||
provided, returns compliance data for that specific scan. If scan_id is omitted,
|
||||
returns compliance data aggregated from the latest completed scan of each
|
||||
provider.
|
||||
summary: List compliance overviews
|
||||
description: Retrieve an overview of all the compliance in a given scan.
|
||||
summary: List compliance overviews for a scan
|
||||
parameters:
|
||||
- in: query
|
||||
name: fields[compliance-overviews]
|
||||
@@ -346,32 +343,6 @@ paths:
|
||||
schema:
|
||||
type: string
|
||||
format: date-time
|
||||
- in: query
|
||||
name: filter[provider_id]
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
description: Filter by specific provider ID.
|
||||
- in: query
|
||||
name: filter[provider_id__in]
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
format: uuid
|
||||
description: Filter by multiple provider IDs (comma-separated).
|
||||
- in: query
|
||||
name: filter[provider_type]
|
||||
schema:
|
||||
type: string
|
||||
description: Filter by provider type (e.g., aws, azure, gcp).
|
||||
- in: query
|
||||
name: filter[provider_type__in]
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: Filter by multiple provider types (comma-separated).
|
||||
- in: query
|
||||
name: filter[region]
|
||||
schema:
|
||||
@@ -394,8 +365,8 @@ paths:
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
description: Optional scan ID. If provided, returns compliance for that scan.
|
||||
If omitted, returns compliance for the latest completed scan per provider.
|
||||
description: Related scan ID.
|
||||
required: true
|
||||
- name: filter[search]
|
||||
required: false
|
||||
in: query
|
||||
@@ -635,77 +606,6 @@ paths:
|
||||
schema:
|
||||
type: string
|
||||
format: date-time
|
||||
- in: query
|
||||
name: filter[provider_id]
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- in: query
|
||||
name: filter[provider_id__in]
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
format: uuid
|
||||
description: Multiple values may be separated by commas.
|
||||
explode: false
|
||||
style: form
|
||||
- in: query
|
||||
name: filter[provider_type]
|
||||
schema:
|
||||
type: string
|
||||
x-spec-enum-id: eca8c51e6bd28935
|
||||
enum:
|
||||
- aws
|
||||
- azure
|
||||
- gcp
|
||||
- github
|
||||
- iac
|
||||
- kubernetes
|
||||
- m365
|
||||
- mongodbatlas
|
||||
- oraclecloud
|
||||
description: |-
|
||||
* `aws` - AWS
|
||||
* `azure` - Azure
|
||||
* `gcp` - GCP
|
||||
* `kubernetes` - Kubernetes
|
||||
* `m365` - M365
|
||||
* `github` - GitHub
|
||||
* `mongodbatlas` - MongoDB Atlas
|
||||
* `iac` - IaC
|
||||
* `oraclecloud` - Oracle Cloud Infrastructure
|
||||
- in: query
|
||||
name: filter[provider_type__in]
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
x-spec-enum-id: eca8c51e6bd28935
|
||||
enum:
|
||||
- aws
|
||||
- azure
|
||||
- gcp
|
||||
- github
|
||||
- iac
|
||||
- kubernetes
|
||||
- m365
|
||||
- mongodbatlas
|
||||
- oraclecloud
|
||||
description: |-
|
||||
Multiple values may be separated by commas.
|
||||
|
||||
* `aws` - AWS
|
||||
* `azure` - Azure
|
||||
* `gcp` - GCP
|
||||
* `kubernetes` - Kubernetes
|
||||
* `m365` - M365
|
||||
* `github` - GitHub
|
||||
* `mongodbatlas` - MongoDB Atlas
|
||||
* `iac` - IaC
|
||||
* `oraclecloud` - Oracle Cloud Infrastructure
|
||||
explode: false
|
||||
style: form
|
||||
- in: query
|
||||
name: filter[region]
|
||||
schema:
|
||||
@@ -4597,6 +4497,60 @@ paths:
|
||||
responses:
|
||||
'204':
|
||||
description: No response body
|
||||
/api/v1/overviews/attack-surfaces:
|
||||
get:
|
||||
operationId: overviews_attack_surfaces_retrieve
|
||||
description: Retrieve aggregated attack surface metrics from latest completed
|
||||
scans per provider.
|
||||
summary: Get attack surface overview
|
||||
parameters:
|
||||
- in: query
|
||||
name: fields[attack-surface-overviews]
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- id
|
||||
- total_findings
|
||||
- failed_findings
|
||||
- muted_failed_findings
|
||||
- check_ids
|
||||
description: endpoint return only specific fields in the response on a per-type
|
||||
basis by including a fields[TYPE] query parameter.
|
||||
explode: false
|
||||
- in: query
|
||||
name: filter[provider_id.in]
|
||||
schema:
|
||||
type: string
|
||||
description: Filter by multiple provider IDs (comma-separated UUIDs)
|
||||
- in: query
|
||||
name: filter[provider_id]
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
description: Filter by specific provider ID
|
||||
- in: query
|
||||
name: filter[provider_type.in]
|
||||
schema:
|
||||
type: string
|
||||
description: Filter by multiple provider types (comma-separated)
|
||||
- in: query
|
||||
name: filter[provider_type]
|
||||
schema:
|
||||
type: string
|
||||
description: Filter by provider type (aws, azure, gcp, etc.)
|
||||
tags:
|
||||
- Overview
|
||||
security:
|
||||
- JWT or API Key: []
|
||||
responses:
|
||||
'200':
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/AttackSurfaceOverviewResponse'
|
||||
description: ''
|
||||
/api/v1/overviews/findings:
|
||||
get:
|
||||
operationId: overviews_findings_retrieve
|
||||
@@ -4986,6 +4940,154 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/OverviewSeverityResponse'
|
||||
description: ''
|
||||
/api/v1/overviews/findings_severity/timeseries:
|
||||
get:
|
||||
operationId: overviews_findings_severity_timeseries_retrieve
|
||||
description: Retrieve daily aggregated findings data grouped by severity levels
|
||||
over a date range. Returns one data point per day with counts of failed findings
|
||||
by severity (critical, high, medium, low, informational) and muted findings.
|
||||
Days without scans are filled forward with the most recent known values. Use
|
||||
date_from (required) and date_to filters to specify the range.
|
||||
summary: Get findings severity data over time
|
||||
parameters:
|
||||
- in: query
|
||||
name: fields[findings-severity-timeseries]
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- id
|
||||
- critical
|
||||
- high
|
||||
- medium
|
||||
- low
|
||||
- informational
|
||||
- muted
|
||||
- scan_ids
|
||||
description: endpoint return only specific fields in the response on a per-type
|
||||
basis by including a fields[TYPE] query parameter.
|
||||
explode: false
|
||||
- in: query
|
||||
name: filter[date_from]
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: filter[date_to]
|
||||
schema:
|
||||
type: string
|
||||
- in: query
|
||||
name: filter[provider_id]
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
- in: query
|
||||
name: filter[provider_id__in]
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
format: uuid
|
||||
description: Multiple values may be separated by commas.
|
||||
explode: false
|
||||
style: form
|
||||
- in: query
|
||||
name: filter[provider_type]
|
||||
schema:
|
||||
type: string
|
||||
enum:
|
||||
- aws
|
||||
- azure
|
||||
- gcp
|
||||
- github
|
||||
- iac
|
||||
- kubernetes
|
||||
- m365
|
||||
- mongodbatlas
|
||||
- oraclecloud
|
||||
description: |-
|
||||
* `aws` - AWS
|
||||
* `azure` - Azure
|
||||
* `gcp` - GCP
|
||||
* `kubernetes` - Kubernetes
|
||||
* `m365` - M365
|
||||
* `github` - GitHub
|
||||
* `mongodbatlas` - MongoDB Atlas
|
||||
* `iac` - IaC
|
||||
* `oraclecloud` - Oracle Cloud Infrastructure
|
||||
- in: query
|
||||
name: filter[provider_type__in]
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- aws
|
||||
- azure
|
||||
- gcp
|
||||
- github
|
||||
- iac
|
||||
- kubernetes
|
||||
- m365
|
||||
- mongodbatlas
|
||||
- oraclecloud
|
||||
description: |-
|
||||
Multiple values may be separated by commas.
|
||||
|
||||
* `aws` - AWS
|
||||
* `azure` - Azure
|
||||
* `gcp` - GCP
|
||||
* `kubernetes` - Kubernetes
|
||||
* `m365` - M365
|
||||
* `github` - GitHub
|
||||
* `mongodbatlas` - MongoDB Atlas
|
||||
* `iac` - IaC
|
||||
* `oraclecloud` - Oracle Cloud Infrastructure
|
||||
explode: false
|
||||
style: form
|
||||
- name: filter[search]
|
||||
required: false
|
||||
in: query
|
||||
description: A search term.
|
||||
schema:
|
||||
type: string
|
||||
- name: sort
|
||||
required: false
|
||||
in: query
|
||||
description: '[list of fields to sort by](https://jsonapi.org/format/#fetching-sorting)'
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- id
|
||||
- -id
|
||||
- critical
|
||||
- -critical
|
||||
- high
|
||||
- -high
|
||||
- medium
|
||||
- -medium
|
||||
- low
|
||||
- -low
|
||||
- informational
|
||||
- -informational
|
||||
- muted
|
||||
- -muted
|
||||
- scan_ids
|
||||
- -scan_ids
|
||||
explode: false
|
||||
tags:
|
||||
- Overview
|
||||
security:
|
||||
- JWT or API Key: []
|
||||
responses:
|
||||
'200':
|
||||
content:
|
||||
application/vnd.api+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/FindingsSeverityOverTimeResponse'
|
||||
description: ''
|
||||
/api/v1/overviews/providers:
|
||||
get:
|
||||
operationId: overviews_providers_retrieve
|
||||
@@ -5068,6 +5170,8 @@ paths:
|
||||
type: string
|
||||
enum:
|
||||
- id
|
||||
- provider_type
|
||||
- region
|
||||
- total
|
||||
- fail
|
||||
- muted
|
||||
@@ -5200,6 +5304,10 @@ paths:
|
||||
enum:
|
||||
- id
|
||||
- -id
|
||||
- provider_type
|
||||
- -provider_type
|
||||
- region
|
||||
- -region
|
||||
- total
|
||||
- -total
|
||||
- fail
|
||||
@@ -8984,116 +9092,12 @@ paths:
|
||||
description: CSV file containing the compliance report
|
||||
'404':
|
||||
description: Compliance report not found
|
||||
/api/v1/scans/{id}/report:
|
||||
get:
|
||||
operationId: scans_report_retrieve
|
||||
description: Returns a ZIP file containing the requested report
|
||||
summary: Download ZIP report
|
||||
parameters:
|
||||
- in: query
|
||||
name: fields[scan-reports]
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- id
|
||||
description: endpoint return only specific fields in the response on a per-type
|
||||
basis by including a fields[TYPE] query parameter.
|
||||
explode: false
|
||||
- in: path
|
||||
name: id
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
description: A UUID string identifying this scan.
|
||||
required: true
|
||||
tags:
|
||||
- Scan
|
||||
security:
|
||||
- JWT or API Key: []
|
||||
responses:
|
||||
'200':
|
||||
description: Report obtained successfully
|
||||
'202':
|
||||
description: The task is in progress
|
||||
'403':
|
||||
description: There is a problem with credentials
|
||||
'404':
|
||||
description: The scan has no reports, or the report generation task has
|
||||
not started yet
|
||||
/api/v1/scans/{id}/threatscore:
|
||||
get:
|
||||
operationId: scans_threatscore_retrieve
|
||||
description: Download a specific threatscore report (e.g., 'prowler_threatscore_aws')
|
||||
as a PDF file.
|
||||
summary: Retrieve threatscore report
|
||||
parameters:
|
||||
- in: query
|
||||
name: fields[scans]
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- name
|
||||
- trigger
|
||||
- state
|
||||
- unique_resource_count
|
||||
- progress
|
||||
- duration
|
||||
- provider
|
||||
- task
|
||||
- inserted_at
|
||||
- started_at
|
||||
- completed_at
|
||||
- scheduled_at
|
||||
- next_scan_at
|
||||
- processor
|
||||
- url
|
||||
description: endpoint return only specific fields in the response on a per-type
|
||||
basis by including a fields[TYPE] query parameter.
|
||||
explode: false
|
||||
- in: path
|
||||
name: id
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
description: A UUID string identifying this scan.
|
||||
required: true
|
||||
- in: query
|
||||
name: include
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- provider
|
||||
description: include query parameter to allow the client to customize which
|
||||
related resources should be returned.
|
||||
explode: false
|
||||
tags:
|
||||
- Scan
|
||||
security:
|
||||
- JWT or API Key: []
|
||||
responses:
|
||||
'200':
|
||||
description: PDF file containing the threatscore report
|
||||
'202':
|
||||
description: The task is in progress
|
||||
'401':
|
||||
description: API key missing or user not Authenticated
|
||||
'403':
|
||||
description: There is a problem with credentials
|
||||
'404':
|
||||
description: The scan has no threatscore reports, or the threatscore report
|
||||
generation task has not started yet
|
||||
/api/v1/scans/{id}/ens:
|
||||
get:
|
||||
operationId: scans_ens_retrieve
|
||||
description: Download a specific ENS compliance report (e.g., 'prowler_ens_aws')
|
||||
description: Download ENS RD2022 compliance report (e.g., 'ens_rd2022_aws')
|
||||
as a PDF file.
|
||||
summary: Retrieve ENS compliance report
|
||||
summary: Retrieve ENS RD2022 compliance report
|
||||
parameters:
|
||||
- in: query
|
||||
name: fields[scans]
|
||||
@@ -9220,6 +9224,110 @@ paths:
|
||||
'404':
|
||||
description: The scan has no NIS2 reports, or the NIS2 report generation
|
||||
task has not started yet
|
||||
/api/v1/scans/{id}/report:
|
||||
get:
|
||||
operationId: scans_report_retrieve
|
||||
description: Returns a ZIP file containing the requested report
|
||||
summary: Download ZIP report
|
||||
parameters:
|
||||
- in: query
|
||||
name: fields[scan-reports]
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- id
|
||||
description: endpoint return only specific fields in the response on a per-type
|
||||
basis by including a fields[TYPE] query parameter.
|
||||
explode: false
|
||||
- in: path
|
||||
name: id
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
description: A UUID string identifying this scan.
|
||||
required: true
|
||||
tags:
|
||||
- Scan
|
||||
security:
|
||||
- JWT or API Key: []
|
||||
responses:
|
||||
'200':
|
||||
description: Report obtained successfully
|
||||
'202':
|
||||
description: The task is in progress
|
||||
'403':
|
||||
description: There is a problem with credentials
|
||||
'404':
|
||||
description: The scan has no reports, or the report generation task has
|
||||
not started yet
|
||||
/api/v1/scans/{id}/threatscore:
|
||||
get:
|
||||
operationId: scans_threatscore_retrieve
|
||||
description: Download a specific threatscore report (e.g., 'prowler_threatscore_aws')
|
||||
as a PDF file.
|
||||
summary: Retrieve threatscore report
|
||||
parameters:
|
||||
- in: query
|
||||
name: fields[scans]
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- name
|
||||
- trigger
|
||||
- state
|
||||
- unique_resource_count
|
||||
- progress
|
||||
- duration
|
||||
- provider
|
||||
- task
|
||||
- inserted_at
|
||||
- started_at
|
||||
- completed_at
|
||||
- scheduled_at
|
||||
- next_scan_at
|
||||
- processor
|
||||
- url
|
||||
description: endpoint return only specific fields in the response on a per-type
|
||||
basis by including a fields[TYPE] query parameter.
|
||||
explode: false
|
||||
- in: path
|
||||
name: id
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
description: A UUID string identifying this scan.
|
||||
required: true
|
||||
- in: query
|
||||
name: include
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- provider
|
||||
description: include query parameter to allow the client to customize which
|
||||
related resources should be returned.
|
||||
explode: false
|
||||
tags:
|
||||
- Scan
|
||||
security:
|
||||
- JWT or API Key: []
|
||||
responses:
|
||||
'200':
|
||||
description: PDF file containing the threatscore report
|
||||
'202':
|
||||
description: The task is in progress
|
||||
'401':
|
||||
description: API key missing or user not Authenticated
|
||||
'403':
|
||||
description: There is a problem with credentials
|
||||
'404':
|
||||
description: The scan has no threatscore reports, or the threatscore report
|
||||
generation task has not started yet
|
||||
/api/v1/schedules/daily:
|
||||
post:
|
||||
operationId: schedules_daily_create
|
||||
@@ -10712,6 +10820,49 @@ paths:
|
||||
description: ''
|
||||
components:
|
||||
schemas:
|
||||
AttackSurfaceOverview:
|
||||
type: object
|
||||
required:
|
||||
- type
|
||||
- id
|
||||
additionalProperties: false
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
description: The [type](https://jsonapi.org/format/#document-resource-object-identification)
|
||||
member is used to describe resource objects that share common attributes
|
||||
and relationships.
|
||||
enum:
|
||||
- attack-surface-overviews
|
||||
id: {}
|
||||
attributes:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
total_findings:
|
||||
type: integer
|
||||
failed_findings:
|
||||
type: integer
|
||||
muted_failed_findings:
|
||||
type: integer
|
||||
check_ids:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
readOnly: true
|
||||
required:
|
||||
- id
|
||||
- total_findings
|
||||
- failed_findings
|
||||
- muted_failed_findings
|
||||
AttackSurfaceOverviewResponse:
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/AttackSurfaceOverview'
|
||||
required:
|
||||
- data
|
||||
ComplianceOverview:
|
||||
type: object
|
||||
required:
|
||||
@@ -11096,6 +11247,60 @@ components:
|
||||
$ref: '#/components/schemas/Finding'
|
||||
required:
|
||||
- data
|
||||
FindingsSeverityOverTime:
|
||||
type: object
|
||||
required:
|
||||
- type
|
||||
- id
|
||||
additionalProperties: false
|
||||
properties:
|
||||
type:
|
||||
type: string
|
||||
description: The [type](https://jsonapi.org/format/#document-resource-object-identification)
|
||||
member is used to describe resource objects that share common attributes
|
||||
and relationships.
|
||||
enum:
|
||||
- findings-severity-over-time
|
||||
id: {}
|
||||
attributes:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
format: date
|
||||
critical:
|
||||
type: integer
|
||||
high:
|
||||
type: integer
|
||||
medium:
|
||||
type: integer
|
||||
low:
|
||||
type: integer
|
||||
informational:
|
||||
type: integer
|
||||
muted:
|
||||
type: integer
|
||||
scan_ids:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
format: uuid
|
||||
required:
|
||||
- id
|
||||
- critical
|
||||
- high
|
||||
- medium
|
||||
- low
|
||||
- informational
|
||||
- muted
|
||||
- scan_ids
|
||||
FindingsSeverityOverTimeResponse:
|
||||
type: object
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/FindingsSeverityOverTime'
|
||||
required:
|
||||
- data
|
||||
Integration:
|
||||
type: object
|
||||
required:
|
||||
@@ -12652,26 +12857,46 @@ components:
|
||||
pattern: ^sk-[\w-]+$
|
||||
required:
|
||||
- api_key
|
||||
- type: object
|
||||
title: AWS Bedrock Credentials
|
||||
properties:
|
||||
access_key_id:
|
||||
type: string
|
||||
description: AWS access key ID.
|
||||
pattern: ^AKIA[0-9A-Z]{16}$
|
||||
secret_access_key:
|
||||
type: string
|
||||
description: AWS secret access key.
|
||||
pattern: ^[A-Za-z0-9/+=]{40}$
|
||||
region:
|
||||
type: string
|
||||
description: 'AWS region identifier where Bedrock is available.
|
||||
Examples: us-east-1, us-west-2, eu-west-1, ap-northeast-1.'
|
||||
pattern: ^[a-z]{2}-[a-z]+-\d+$
|
||||
required:
|
||||
- access_key_id
|
||||
- secret_access_key
|
||||
- region
|
||||
- title: AWS Bedrock Credentials
|
||||
oneOf:
|
||||
- title: IAM Access Key Pair
|
||||
type: object
|
||||
description: Authenticate with AWS access key and secret key. Recommended
|
||||
when you manage IAM users or roles.
|
||||
properties:
|
||||
access_key_id:
|
||||
type: string
|
||||
description: AWS access key ID.
|
||||
pattern: ^AKIA[0-9A-Z]{16}$
|
||||
secret_access_key:
|
||||
type: string
|
||||
description: AWS secret access key.
|
||||
pattern: ^[A-Za-z0-9/+=]{40}$
|
||||
region:
|
||||
type: string
|
||||
description: 'AWS region identifier where Bedrock is available.
|
||||
Examples: us-east-1, us-west-2, eu-west-1, ap-northeast-1.'
|
||||
pattern: ^[a-z]{2}-[a-z]+-\d+$
|
||||
required:
|
||||
- access_key_id
|
||||
- secret_access_key
|
||||
- region
|
||||
- title: Amazon Bedrock API Key
|
||||
type: object
|
||||
description: Authenticate with an Amazon Bedrock API key (bearer
|
||||
token). Region is still required.
|
||||
properties:
|
||||
api_key:
|
||||
type: string
|
||||
description: Amazon Bedrock API key (bearer token).
|
||||
region:
|
||||
type: string
|
||||
description: 'AWS region identifier where Bedrock is available.
|
||||
Examples: us-east-1, us-west-2, eu-west-1, ap-northeast-1.'
|
||||
pattern: ^[a-z]{2}-[a-z]+-\d+$
|
||||
required:
|
||||
- api_key
|
||||
- region
|
||||
- type: object
|
||||
title: OpenAI Compatible Credentials
|
||||
properties:
|
||||
@@ -12739,26 +12964,46 @@ components:
|
||||
pattern: ^sk-[\w-]+$
|
||||
required:
|
||||
- api_key
|
||||
- type: object
|
||||
title: AWS Bedrock Credentials
|
||||
properties:
|
||||
access_key_id:
|
||||
type: string
|
||||
description: AWS access key ID.
|
||||
pattern: ^AKIA[0-9A-Z]{16}$
|
||||
secret_access_key:
|
||||
type: string
|
||||
description: AWS secret access key.
|
||||
pattern: ^[A-Za-z0-9/+=]{40}$
|
||||
region:
|
||||
type: string
|
||||
description: 'AWS region identifier where Bedrock is available.
|
||||
Examples: us-east-1, us-west-2, eu-west-1, ap-northeast-1.'
|
||||
pattern: ^[a-z]{2}-[a-z]+-\d+$
|
||||
required:
|
||||
- access_key_id
|
||||
- secret_access_key
|
||||
- region
|
||||
- title: AWS Bedrock Credentials
|
||||
oneOf:
|
||||
- title: IAM Access Key Pair
|
||||
type: object
|
||||
description: Authenticate with AWS access key and secret key.
|
||||
Recommended when you manage IAM users or roles.
|
||||
properties:
|
||||
access_key_id:
|
||||
type: string
|
||||
description: AWS access key ID.
|
||||
pattern: ^AKIA[0-9A-Z]{16}$
|
||||
secret_access_key:
|
||||
type: string
|
||||
description: AWS secret access key.
|
||||
pattern: ^[A-Za-z0-9/+=]{40}$
|
||||
region:
|
||||
type: string
|
||||
description: 'AWS region identifier where Bedrock is available.
|
||||
Examples: us-east-1, us-west-2, eu-west-1, ap-northeast-1.'
|
||||
pattern: ^[a-z]{2}-[a-z]+-\d+$
|
||||
required:
|
||||
- access_key_id
|
||||
- secret_access_key
|
||||
- region
|
||||
- title: Amazon Bedrock API Key
|
||||
type: object
|
||||
description: Authenticate with an Amazon Bedrock API key (bearer
|
||||
token). Region is still required.
|
||||
properties:
|
||||
api_key:
|
||||
type: string
|
||||
description: Amazon Bedrock API key (bearer token).
|
||||
region:
|
||||
type: string
|
||||
description: 'AWS region identifier where Bedrock is available.
|
||||
Examples: us-east-1, us-west-2, eu-west-1, ap-northeast-1.'
|
||||
pattern: ^[a-z]{2}-[a-z]+-\d+$
|
||||
required:
|
||||
- api_key
|
||||
- region
|
||||
- type: object
|
||||
title: OpenAI Compatible Credentials
|
||||
properties:
|
||||
@@ -12844,26 +13089,46 @@ components:
|
||||
pattern: ^sk-[\w-]+$
|
||||
required:
|
||||
- api_key
|
||||
- type: object
|
||||
title: AWS Bedrock Credentials
|
||||
properties:
|
||||
access_key_id:
|
||||
type: string
|
||||
description: AWS access key ID.
|
||||
pattern: ^AKIA[0-9A-Z]{16}$
|
||||
secret_access_key:
|
||||
type: string
|
||||
description: AWS secret access key.
|
||||
pattern: ^[A-Za-z0-9/+=]{40}$
|
||||
region:
|
||||
type: string
|
||||
description: 'AWS region identifier where Bedrock is available.
|
||||
Examples: us-east-1, us-west-2, eu-west-1, ap-northeast-1.'
|
||||
pattern: ^[a-z]{2}-[a-z]+-\d+$
|
||||
required:
|
||||
- access_key_id
|
||||
- secret_access_key
|
||||
- region
|
||||
- title: AWS Bedrock Credentials
|
||||
oneOf:
|
||||
- title: IAM Access Key Pair
|
||||
type: object
|
||||
description: Authenticate with AWS access key and secret key. Recommended
|
||||
when you manage IAM users or roles.
|
||||
properties:
|
||||
access_key_id:
|
||||
type: string
|
||||
description: AWS access key ID.
|
||||
pattern: ^AKIA[0-9A-Z]{16}$
|
||||
secret_access_key:
|
||||
type: string
|
||||
description: AWS secret access key.
|
||||
pattern: ^[A-Za-z0-9/+=]{40}$
|
||||
region:
|
||||
type: string
|
||||
description: 'AWS region identifier where Bedrock is available.
|
||||
Examples: us-east-1, us-west-2, eu-west-1, ap-northeast-1.'
|
||||
pattern: ^[a-z]{2}-[a-z]+-\d+$
|
||||
required:
|
||||
- access_key_id
|
||||
- secret_access_key
|
||||
- region
|
||||
- title: Amazon Bedrock API Key
|
||||
type: object
|
||||
description: Authenticate with an Amazon Bedrock API key (bearer
|
||||
token). Region is still required.
|
||||
properties:
|
||||
api_key:
|
||||
type: string
|
||||
description: Amazon Bedrock API key (bearer token).
|
||||
region:
|
||||
type: string
|
||||
description: 'AWS region identifier where Bedrock is available.
|
||||
Examples: us-east-1, us-west-2, eu-west-1, ap-northeast-1.'
|
||||
pattern: ^[a-z]{2}-[a-z]+-\d+$
|
||||
required:
|
||||
- api_key
|
||||
- region
|
||||
- type: object
|
||||
title: OpenAI Compatible Credentials
|
||||
properties:
|
||||
@@ -13558,6 +13823,11 @@ components:
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
readOnly: true
|
||||
provider_type:
|
||||
type: string
|
||||
region:
|
||||
type: string
|
||||
total:
|
||||
type: integer
|
||||
fail:
|
||||
@@ -13567,7 +13837,8 @@ components:
|
||||
pass:
|
||||
type: integer
|
||||
required:
|
||||
- id
|
||||
- provider_type
|
||||
- region
|
||||
- total
|
||||
- fail
|
||||
- muted
|
||||
@@ -14280,26 +14551,46 @@ components:
|
||||
pattern: ^sk-[\w-]+$
|
||||
required:
|
||||
- api_key
|
||||
- type: object
|
||||
title: AWS Bedrock Credentials
|
||||
properties:
|
||||
access_key_id:
|
||||
type: string
|
||||
description: AWS access key ID.
|
||||
pattern: ^AKIA[0-9A-Z]{16}$
|
||||
secret_access_key:
|
||||
type: string
|
||||
description: AWS secret access key.
|
||||
pattern: ^[A-Za-z0-9/+=]{40}$
|
||||
region:
|
||||
type: string
|
||||
description: 'AWS region identifier where Bedrock is available.
|
||||
Examples: us-east-1, us-west-2, eu-west-1, ap-northeast-1.'
|
||||
pattern: ^[a-z]{2}-[a-z]+-\d+$
|
||||
required:
|
||||
- access_key_id
|
||||
- secret_access_key
|
||||
- region
|
||||
- title: AWS Bedrock Credentials
|
||||
oneOf:
|
||||
- title: IAM Access Key Pair
|
||||
type: object
|
||||
description: Authenticate with AWS access key and secret key.
|
||||
Recommended when you manage IAM users or roles.
|
||||
properties:
|
||||
access_key_id:
|
||||
type: string
|
||||
description: AWS access key ID.
|
||||
pattern: ^AKIA[0-9A-Z]{16}$
|
||||
secret_access_key:
|
||||
type: string
|
||||
description: AWS secret access key.
|
||||
pattern: ^[A-Za-z0-9/+=]{40}$
|
||||
region:
|
||||
type: string
|
||||
description: 'AWS region identifier where Bedrock is available.
|
||||
Examples: us-east-1, us-west-2, eu-west-1, ap-northeast-1.'
|
||||
pattern: ^[a-z]{2}-[a-z]+-\d+$
|
||||
required:
|
||||
- access_key_id
|
||||
- secret_access_key
|
||||
- region
|
||||
- title: Amazon Bedrock API Key
|
||||
type: object
|
||||
description: Authenticate with an Amazon Bedrock API key (bearer
|
||||
token). Region is still required.
|
||||
properties:
|
||||
api_key:
|
||||
type: string
|
||||
description: Amazon Bedrock API key (bearer token).
|
||||
region:
|
||||
type: string
|
||||
description: 'AWS region identifier where Bedrock is available.
|
||||
Examples: us-east-1, us-west-2, eu-west-1, ap-northeast-1.'
|
||||
pattern: ^[a-z]{2}-[a-z]+-\d+$
|
||||
required:
|
||||
- api_key
|
||||
- region
|
||||
- type: object
|
||||
title: OpenAI Compatible Credentials
|
||||
properties:
|
||||
|
||||
@@ -2,9 +2,12 @@ import uuid
|
||||
from unittest.mock import call, patch
|
||||
|
||||
import pytest
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import IntegrityError
|
||||
|
||||
from api.db_utils import POSTGRES_TENANT_VAR, SET_CONFIG_QUERY
|
||||
from api.decorators import set_tenant
|
||||
from api.decorators import handle_provider_deletion, set_tenant
|
||||
from api.exceptions import ProviderDeletedException
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -34,3 +37,142 @@ class TestSetTenantDecorator:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
random_func("test_arg")
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestHandleProviderDeletionDecorator:
|
||||
def test_success_no_exception(self, tenants_fixture, providers_fixture):
|
||||
"""Decorated function runs normally when no exception is raised."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
|
||||
@handle_provider_deletion
|
||||
def task_func(**kwargs):
|
||||
return "success"
|
||||
|
||||
result = task_func(
|
||||
tenant_id=str(tenant.id),
|
||||
provider_id=str(provider.id),
|
||||
)
|
||||
assert result == "success"
|
||||
|
||||
@patch("api.decorators.rls_transaction")
|
||||
@patch("api.decorators.Provider.objects.filter")
|
||||
def test_provider_deleted_with_provider_id(
|
||||
self, mock_filter, mock_rls, tenants_fixture
|
||||
):
|
||||
"""Raises ProviderDeletedException when provider_id provided and provider deleted."""
|
||||
tenant = tenants_fixture[0]
|
||||
deleted_provider_id = str(uuid.uuid4())
|
||||
|
||||
mock_rls.return_value.__enter__ = lambda s: None
|
||||
mock_rls.return_value.__exit__ = lambda s, *args: None
|
||||
mock_filter.return_value.exists.return_value = False
|
||||
|
||||
@handle_provider_deletion
|
||||
def task_func(**kwargs):
|
||||
raise ObjectDoesNotExist("Some object not found")
|
||||
|
||||
with pytest.raises(ProviderDeletedException) as exc_info:
|
||||
task_func(tenant_id=str(tenant.id), provider_id=deleted_provider_id)
|
||||
|
||||
assert deleted_provider_id in str(exc_info.value)
|
||||
|
||||
@patch("api.decorators.rls_transaction")
|
||||
@patch("api.decorators.Provider.objects.filter")
|
||||
@patch("api.decorators.Scan.objects.filter")
|
||||
def test_provider_deleted_with_scan_id(
|
||||
self, mock_scan_filter, mock_provider_filter, mock_rls, tenants_fixture
|
||||
):
|
||||
"""Raises ProviderDeletedException when scan exists but provider deleted."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan_id = str(uuid.uuid4())
|
||||
provider_id = str(uuid.uuid4())
|
||||
|
||||
mock_rls.return_value.__enter__ = lambda s: None
|
||||
mock_rls.return_value.__exit__ = lambda s, *args: None
|
||||
|
||||
mock_scan = type("MockScan", (), {"provider_id": provider_id})()
|
||||
mock_scan_filter.return_value.first.return_value = mock_scan
|
||||
mock_provider_filter.return_value.exists.return_value = False
|
||||
|
||||
@handle_provider_deletion
|
||||
def task_func(**kwargs):
|
||||
raise ObjectDoesNotExist("Some object not found")
|
||||
|
||||
with pytest.raises(ProviderDeletedException) as exc_info:
|
||||
task_func(tenant_id=str(tenant.id), scan_id=scan_id)
|
||||
|
||||
assert provider_id in str(exc_info.value)
|
||||
|
||||
@patch("api.decorators.rls_transaction")
|
||||
@patch("api.decorators.Scan.objects.filter")
|
||||
def test_scan_deleted_cascade(self, mock_scan_filter, mock_rls, tenants_fixture):
|
||||
"""Raises ProviderDeletedException when scan was deleted (CASCADE from provider)."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan_id = str(uuid.uuid4())
|
||||
|
||||
mock_rls.return_value.__enter__ = lambda s: None
|
||||
mock_rls.return_value.__exit__ = lambda s, *args: None
|
||||
mock_scan_filter.return_value.first.return_value = None
|
||||
|
||||
@handle_provider_deletion
|
||||
def task_func(**kwargs):
|
||||
raise ObjectDoesNotExist("Some object not found")
|
||||
|
||||
with pytest.raises(ProviderDeletedException) as exc_info:
|
||||
task_func(tenant_id=str(tenant.id), scan_id=scan_id)
|
||||
|
||||
assert scan_id in str(exc_info.value)
|
||||
|
||||
@patch("api.decorators.rls_transaction")
|
||||
@patch("api.decorators.Provider.objects.filter")
|
||||
def test_provider_exists_reraises_original(
|
||||
self, mock_filter, mock_rls, tenants_fixture, providers_fixture
|
||||
):
|
||||
"""Re-raises original exception when provider still exists."""
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
|
||||
mock_rls.return_value.__enter__ = lambda s: None
|
||||
mock_rls.return_value.__exit__ = lambda s, *args: None
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
|
||||
@handle_provider_deletion
|
||||
def task_func(**kwargs):
|
||||
raise ObjectDoesNotExist("Actual object missing")
|
||||
|
||||
with pytest.raises(ObjectDoesNotExist):
|
||||
task_func(tenant_id=str(tenant.id), provider_id=str(provider.id))
|
||||
|
||||
@patch("api.decorators.rls_transaction")
|
||||
@patch("api.decorators.Provider.objects.filter")
|
||||
def test_integrity_error_provider_deleted(
|
||||
self, mock_filter, mock_rls, tenants_fixture
|
||||
):
|
||||
"""Raises ProviderDeletedException on IntegrityError when provider deleted."""
|
||||
tenant = tenants_fixture[0]
|
||||
deleted_provider_id = str(uuid.uuid4())
|
||||
|
||||
mock_rls.return_value.__enter__ = lambda s: None
|
||||
mock_rls.return_value.__exit__ = lambda s, *args: None
|
||||
mock_filter.return_value.exists.return_value = False
|
||||
|
||||
@handle_provider_deletion
|
||||
def task_func(**kwargs):
|
||||
raise IntegrityError("FK constraint violation")
|
||||
|
||||
with pytest.raises(ProviderDeletedException):
|
||||
task_func(tenant_id=str(tenant.id), provider_id=deleted_provider_id)
|
||||
|
||||
def test_missing_provider_and_scan_raises_assertion(self, tenants_fixture):
|
||||
"""Raises AssertionError when neither provider_id nor scan_id in kwargs."""
|
||||
|
||||
@handle_provider_deletion
|
||||
def task_func(**kwargs):
|
||||
raise ObjectDoesNotExist("Some object not found")
|
||||
|
||||
with pytest.raises(AssertionError) as exc_info:
|
||||
task_func(tenant_id=str(tenants_fixture[0].id))
|
||||
|
||||
assert "provider or scan" in str(exc_info.value)
|
||||
|
||||
@@ -21,6 +21,7 @@ from prowler.providers.aws.lib.security_hub.security_hub import SecurityHubConne
|
||||
from prowler.providers.azure.azure_provider import AzureProvider
|
||||
from prowler.providers.gcp.gcp_provider import GcpProvider
|
||||
from prowler.providers.github.github_provider import GithubProvider
|
||||
from prowler.providers.iac.iac_provider import IacProvider
|
||||
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
|
||||
from prowler.providers.m365.m365_provider import M365Provider
|
||||
from prowler.providers.mongodbatlas.mongodbatlas_provider import MongodbatlasProvider
|
||||
@@ -114,6 +115,7 @@ class TestReturnProwlerProvider:
|
||||
(Provider.ProviderChoices.GITHUB.value, GithubProvider),
|
||||
(Provider.ProviderChoices.MONGODBATLAS.value, MongodbatlasProvider),
|
||||
(Provider.ProviderChoices.ORACLECLOUD.value, OraclecloudProvider),
|
||||
(Provider.ProviderChoices.IAC.value, IacProvider),
|
||||
],
|
||||
)
|
||||
def test_return_prowler_provider(self, provider_type, expected_provider):
|
||||
@@ -254,6 +256,72 @@ class TestGetProwlerProviderKwargs:
|
||||
expected_result = {**secret_dict, "mutelist_content": {"key": "value"}}
|
||||
assert result == expected_result
|
||||
|
||||
def test_get_prowler_provider_kwargs_iac_provider(self):
|
||||
"""Test that IaC provider gets correct kwargs with repository URL."""
|
||||
provider_uid = "https://github.com/org/repo"
|
||||
secret_dict = {"access_token": "test_token"}
|
||||
secret_mock = MagicMock()
|
||||
secret_mock.secret = secret_dict
|
||||
|
||||
provider = MagicMock()
|
||||
provider.provider = Provider.ProviderChoices.IAC.value
|
||||
provider.secret = secret_mock
|
||||
provider.uid = provider_uid
|
||||
|
||||
result = get_prowler_provider_kwargs(provider)
|
||||
|
||||
expected_result = {
|
||||
"scan_repository_url": provider_uid,
|
||||
"oauth_app_token": "test_token",
|
||||
}
|
||||
assert result == expected_result
|
||||
|
||||
def test_get_prowler_provider_kwargs_iac_provider_without_token(self):
|
||||
"""Test that IaC provider works without access token for public repos."""
|
||||
provider_uid = "https://github.com/org/public-repo"
|
||||
secret_dict = {}
|
||||
secret_mock = MagicMock()
|
||||
secret_mock.secret = secret_dict
|
||||
|
||||
provider = MagicMock()
|
||||
provider.provider = Provider.ProviderChoices.IAC.value
|
||||
provider.secret = secret_mock
|
||||
provider.uid = provider_uid
|
||||
|
||||
result = get_prowler_provider_kwargs(provider)
|
||||
|
||||
expected_result = {"scan_repository_url": provider_uid}
|
||||
assert result == expected_result
|
||||
|
||||
def test_get_prowler_provider_kwargs_iac_provider_ignores_mutelist(self):
|
||||
"""Test that IaC provider does NOT receive mutelist_content.
|
||||
|
||||
IaC provider uses Trivy's built-in mutelist logic, so it should not
|
||||
receive mutelist_content even when a mutelist processor is configured.
|
||||
"""
|
||||
provider_uid = "https://github.com/org/repo"
|
||||
secret_dict = {"access_token": "test_token"}
|
||||
secret_mock = MagicMock()
|
||||
secret_mock.secret = secret_dict
|
||||
|
||||
mutelist_processor = MagicMock()
|
||||
mutelist_processor.configuration = {"Mutelist": {"key": "value"}}
|
||||
|
||||
provider = MagicMock()
|
||||
provider.provider = Provider.ProviderChoices.IAC.value
|
||||
provider.secret = secret_mock
|
||||
provider.uid = provider_uid
|
||||
|
||||
result = get_prowler_provider_kwargs(provider, mutelist_processor)
|
||||
|
||||
# IaC provider should NOT have mutelist_content
|
||||
assert "mutelist_content" not in result
|
||||
expected_result = {
|
||||
"scan_repository_url": provider_uid,
|
||||
"oauth_app_token": "test_token",
|
||||
}
|
||||
assert result == expected_result
|
||||
|
||||
def test_get_prowler_provider_kwargs_unsupported_provider(self):
|
||||
# Setup
|
||||
provider_uid = "provider_uid"
|
||||
|
||||
@@ -158,7 +158,8 @@ def get_prowler_provider_kwargs(
|
||||
|
||||
if mutelist_processor:
|
||||
mutelist_content = mutelist_processor.configuration.get("Mutelist", {})
|
||||
if mutelist_content:
|
||||
# IaC provider doesn't support mutelist (uses Trivy's built-in logic)
|
||||
if mutelist_content and provider.provider != Provider.ProviderChoices.IAC.value:
|
||||
prowler_provider_kwargs["mutelist_content"] = mutelist_content
|
||||
|
||||
return prowler_provider_kwargs
|
||||
|
||||
@@ -40,11 +40,16 @@ class BedrockCredentialsSerializer(serializers.Serializer):
|
||||
"""
|
||||
Serializer for AWS Bedrock credentials validation.
|
||||
|
||||
Validates long-term AWS credentials (AKIA) and region format.
|
||||
Supports two authentication methods:
|
||||
1. AWS access key + secret key
|
||||
2. Bedrock API key (bearer token)
|
||||
|
||||
In both cases, region is mandatory.
|
||||
"""
|
||||
|
||||
access_key_id = serializers.CharField()
|
||||
secret_access_key = serializers.CharField()
|
||||
access_key_id = serializers.CharField(required=False, allow_blank=False)
|
||||
secret_access_key = serializers.CharField(required=False, allow_blank=False)
|
||||
api_key = serializers.CharField(required=False, allow_blank=False)
|
||||
region = serializers.CharField()
|
||||
|
||||
def validate_access_key_id(self, value: str) -> str:
|
||||
@@ -65,6 +70,15 @@ class BedrockCredentialsSerializer(serializers.Serializer):
|
||||
)
|
||||
return value
|
||||
|
||||
def validate_api_key(self, value: str) -> str:
|
||||
"""
|
||||
Validate Bedrock API key (bearer token).
|
||||
"""
|
||||
pattern = r"^ABSKQmVkcm9ja0FQSUtleS[A-Za-z0-9+/=]{110}$"
|
||||
if not re.match(pattern, value or ""):
|
||||
raise serializers.ValidationError("Invalid Bedrock API key format.")
|
||||
return value
|
||||
|
||||
def validate_region(self, value: str) -> str:
|
||||
"""Validate AWS region format."""
|
||||
pattern = r"^[a-z]{2}-[a-z]+-\d+$"
|
||||
@@ -74,6 +88,50 @@ class BedrockCredentialsSerializer(serializers.Serializer):
|
||||
)
|
||||
return value
|
||||
|
||||
def validate(self, attrs):
|
||||
"""
|
||||
Enforce either:
|
||||
- access_key_id + secret_access_key + region
|
||||
OR
|
||||
- api_key + region
|
||||
"""
|
||||
access_key_id = attrs.get("access_key_id")
|
||||
secret_access_key = attrs.get("secret_access_key")
|
||||
api_key = attrs.get("api_key")
|
||||
region = attrs.get("region")
|
||||
|
||||
errors = {}
|
||||
|
||||
if not region:
|
||||
errors["region"] = ["Region is required."]
|
||||
|
||||
using_access_keys = bool(access_key_id or secret_access_key)
|
||||
using_api_key = api_key is not None and api_key != ""
|
||||
|
||||
if using_access_keys and using_api_key:
|
||||
errors["non_field_errors"] = [
|
||||
"Provide either access key + secret key OR api key, not both."
|
||||
]
|
||||
elif not using_access_keys and not using_api_key:
|
||||
errors["non_field_errors"] = [
|
||||
"You must provide either access key + secret key OR api key."
|
||||
]
|
||||
elif using_access_keys:
|
||||
# Both access_key_id and secret_access_key must be present together
|
||||
if not access_key_id:
|
||||
errors.setdefault("access_key_id", []).append(
|
||||
"AWS access key ID is required when using access key authentication."
|
||||
)
|
||||
if not secret_access_key:
|
||||
errors.setdefault("secret_access_key", []).append(
|
||||
"AWS secret access key is required when using access key authentication."
|
||||
)
|
||||
|
||||
if errors:
|
||||
raise serializers.ValidationError(errors)
|
||||
|
||||
return attrs
|
||||
|
||||
def to_internal_value(self, data):
|
||||
"""Check for unknown fields before DRF filters them out."""
|
||||
if not isinstance(data, dict):
|
||||
@@ -111,6 +169,15 @@ class BedrockCredentialsUpdateSerializer(BedrockCredentialsSerializer):
|
||||
for field in self.fields.values():
|
||||
field.required = False
|
||||
|
||||
def validate(self, attrs):
|
||||
"""
|
||||
For updates, this serializer only checks individual fields.
|
||||
It does NOT enforce the "either access keys OR api key" rule.
|
||||
That rule is applied later, after merging with existing stored
|
||||
credentials, in LighthouseProviderConfigUpdateSerializer.
|
||||
"""
|
||||
return attrs
|
||||
|
||||
|
||||
class OpenAICompatibleCredentialsSerializer(serializers.Serializer):
|
||||
"""
|
||||
@@ -168,27 +235,51 @@ class OpenAICompatibleCredentialsSerializer(serializers.Serializer):
|
||||
"required": ["api_key"],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"title": "AWS Bedrock Credentials",
|
||||
"properties": {
|
||||
"access_key_id": {
|
||||
"type": "string",
|
||||
"description": "AWS access key ID.",
|
||||
"pattern": "^AKIA[0-9A-Z]{16}$",
|
||||
"oneOf": [
|
||||
{
|
||||
"title": "IAM Access Key Pair",
|
||||
"type": "object",
|
||||
"description": "Authenticate with AWS access key and secret key. Recommended when you manage IAM users or roles.",
|
||||
"properties": {
|
||||
"access_key_id": {
|
||||
"type": "string",
|
||||
"description": "AWS access key ID.",
|
||||
"pattern": "^AKIA[0-9A-Z]{16}$",
|
||||
},
|
||||
"secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "AWS secret access key.",
|
||||
"pattern": "^[A-Za-z0-9/+=]{40}$",
|
||||
},
|
||||
"region": {
|
||||
"type": "string",
|
||||
"description": "AWS region identifier where Bedrock is available. Examples: us-east-1, "
|
||||
"us-west-2, eu-west-1, ap-northeast-1.",
|
||||
"pattern": "^[a-z]{2}-[a-z]+-\\d+$",
|
||||
},
|
||||
},
|
||||
"required": ["access_key_id", "secret_access_key", "region"],
|
||||
},
|
||||
"secret_access_key": {
|
||||
"type": "string",
|
||||
"description": "AWS secret access key.",
|
||||
"pattern": "^[A-Za-z0-9/+=]{40}$",
|
||||
{
|
||||
"title": "Amazon Bedrock API Key",
|
||||
"type": "object",
|
||||
"description": "Authenticate with an Amazon Bedrock API key (bearer token). Region is still required.",
|
||||
"properties": {
|
||||
"api_key": {
|
||||
"type": "string",
|
||||
"description": "Amazon Bedrock API key (bearer token).",
|
||||
},
|
||||
"region": {
|
||||
"type": "string",
|
||||
"description": "AWS region identifier where Bedrock is available. Examples: us-east-1, "
|
||||
"us-west-2, eu-west-1, ap-northeast-1.",
|
||||
"pattern": "^[a-z]{2}-[a-z]+-\\d+$",
|
||||
},
|
||||
},
|
||||
"required": ["api_key", "region"],
|
||||
},
|
||||
"region": {
|
||||
"type": "string",
|
||||
"description": "AWS region identifier where Bedrock is available. Examples: us-east-1, "
|
||||
"us-west-2, eu-west-1, ap-northeast-1.",
|
||||
"pattern": "^[a-z]{2}-[a-z]+-\\d+$",
|
||||
},
|
||||
},
|
||||
"required": ["access_key_id", "secret_access_key", "region"],
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
|
||||
@@ -72,6 +72,42 @@ from api.v1.serializer_utils.processors import ProcessorConfigField
|
||||
from api.v1.serializer_utils.providers import ProviderSecretField
|
||||
from prowler.lib.mutelist.mutelist import Mutelist
|
||||
|
||||
# Base
|
||||
|
||||
|
||||
class BaseModelSerializerV1(serializers.ModelSerializer):
|
||||
def get_root_meta(self, _resource, _many):
|
||||
return {"version": "v1"}
|
||||
|
||||
|
||||
class BaseSerializerV1(serializers.Serializer):
|
||||
def get_root_meta(self, _resource, _many):
|
||||
return {"version": "v1"}
|
||||
|
||||
|
||||
class BaseWriteSerializer(BaseModelSerializerV1):
|
||||
def validate(self, data):
|
||||
if hasattr(self, "initial_data"):
|
||||
initial_data = set(self.initial_data.keys()) - {"id", "type"}
|
||||
unknown_keys = initial_data - set(self.fields.keys())
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
class RLSSerializer(BaseModelSerializerV1):
|
||||
def create(self, validated_data):
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
validated_data["tenant_id"] = tenant_id
|
||||
return super().create(validated_data)
|
||||
|
||||
|
||||
class StateEnumSerializerField(serializers.ChoiceField):
|
||||
def __init__(self, **kwargs):
|
||||
kwargs["choices"] = StateChoices.choices
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
# Tokens
|
||||
|
||||
|
||||
@@ -179,7 +215,7 @@ class TokenSocialLoginSerializer(BaseTokenSerializer):
|
||||
|
||||
|
||||
# TODO: Check if we can change the parent class to TokenRefreshSerializer from rest_framework_simplejwt.serializers
|
||||
class TokenRefreshSerializer(serializers.Serializer):
|
||||
class TokenRefreshSerializer(BaseSerializerV1):
|
||||
refresh = serializers.CharField()
|
||||
|
||||
# Output token
|
||||
@@ -213,7 +249,7 @@ class TokenRefreshSerializer(serializers.Serializer):
|
||||
raise ValidationError({"refresh": "Invalid or expired token"})
|
||||
|
||||
|
||||
class TokenSwitchTenantSerializer(serializers.Serializer):
|
||||
class TokenSwitchTenantSerializer(BaseSerializerV1):
|
||||
tenant_id = serializers.UUIDField(
|
||||
write_only=True, help_text="The tenant ID for which to request a new token."
|
||||
)
|
||||
@@ -237,41 +273,10 @@ class TokenSwitchTenantSerializer(serializers.Serializer):
|
||||
return generate_tokens(user, tenant_id)
|
||||
|
||||
|
||||
# Base
|
||||
|
||||
|
||||
class BaseSerializerV1(serializers.ModelSerializer):
|
||||
def get_root_meta(self, _resource, _many):
|
||||
return {"version": "v1"}
|
||||
|
||||
|
||||
class BaseWriteSerializer(BaseSerializerV1):
|
||||
def validate(self, data):
|
||||
if hasattr(self, "initial_data"):
|
||||
initial_data = set(self.initial_data.keys()) - {"id", "type"}
|
||||
unknown_keys = initial_data - set(self.fields.keys())
|
||||
if unknown_keys:
|
||||
raise ValidationError(f"Invalid fields: {unknown_keys}")
|
||||
return data
|
||||
|
||||
|
||||
class RLSSerializer(BaseSerializerV1):
|
||||
def create(self, validated_data):
|
||||
tenant_id = self.context.get("tenant_id")
|
||||
validated_data["tenant_id"] = tenant_id
|
||||
return super().create(validated_data)
|
||||
|
||||
|
||||
class StateEnumSerializerField(serializers.ChoiceField):
|
||||
def __init__(self, **kwargs):
|
||||
kwargs["choices"] = StateChoices.choices
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
# Users
|
||||
|
||||
|
||||
class UserSerializer(BaseSerializerV1):
|
||||
class UserSerializer(BaseModelSerializerV1):
|
||||
"""
|
||||
Serializer for the User model.
|
||||
"""
|
||||
@@ -402,7 +407,7 @@ class UserUpdateSerializer(BaseWriteSerializer):
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class RoleResourceIdentifierSerializer(serializers.Serializer):
|
||||
class RoleResourceIdentifierSerializer(BaseSerializerV1):
|
||||
resource_type = serializers.CharField(source="type")
|
||||
id = serializers.UUIDField()
|
||||
|
||||
@@ -585,7 +590,7 @@ class TaskSerializer(RLSSerializer, TaskBase):
|
||||
# Tenants
|
||||
|
||||
|
||||
class TenantSerializer(BaseSerializerV1):
|
||||
class TenantSerializer(BaseModelSerializerV1):
|
||||
"""
|
||||
Serializer for the Tenant model.
|
||||
"""
|
||||
@@ -597,7 +602,7 @@ class TenantSerializer(BaseSerializerV1):
|
||||
fields = ["id", "name", "memberships"]
|
||||
|
||||
|
||||
class TenantIncludeSerializer(BaseSerializerV1):
|
||||
class TenantIncludeSerializer(BaseModelSerializerV1):
|
||||
class Meta:
|
||||
model = Tenant
|
||||
fields = ["id", "name"]
|
||||
@@ -773,7 +778,7 @@ class ProviderGroupUpdateSerializer(ProviderGroupSerializer):
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class ProviderResourceIdentifierSerializer(serializers.Serializer):
|
||||
class ProviderResourceIdentifierSerializer(BaseSerializerV1):
|
||||
resource_type = serializers.CharField(source="type")
|
||||
id = serializers.UUIDField()
|
||||
|
||||
@@ -1110,7 +1115,7 @@ class ScanTaskSerializer(RLSSerializer):
|
||||
]
|
||||
|
||||
|
||||
class ScanReportSerializer(serializers.Serializer):
|
||||
class ScanReportSerializer(BaseSerializerV1):
|
||||
id = serializers.CharField(source="scan")
|
||||
|
||||
class Meta:
|
||||
@@ -1118,7 +1123,7 @@ class ScanReportSerializer(serializers.Serializer):
|
||||
fields = ["id"]
|
||||
|
||||
|
||||
class ScanComplianceReportSerializer(serializers.Serializer):
|
||||
class ScanComplianceReportSerializer(BaseSerializerV1):
|
||||
id = serializers.CharField(source="scan")
|
||||
name = serializers.CharField()
|
||||
|
||||
@@ -1267,7 +1272,7 @@ class ResourceIncludeSerializer(RLSSerializer):
|
||||
return fields
|
||||
|
||||
|
||||
class ResourceMetadataSerializer(serializers.Serializer):
|
||||
class ResourceMetadataSerializer(BaseSerializerV1):
|
||||
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
types = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
@@ -1337,7 +1342,7 @@ class FindingIncludeSerializer(RLSSerializer):
|
||||
|
||||
|
||||
# To be removed when the related endpoint is removed as well
|
||||
class FindingDynamicFilterSerializer(serializers.Serializer):
|
||||
class FindingDynamicFilterSerializer(BaseSerializerV1):
|
||||
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
|
||||
@@ -1345,7 +1350,7 @@ class FindingDynamicFilterSerializer(serializers.Serializer):
|
||||
resource_name = "finding-dynamic-filters"
|
||||
|
||||
|
||||
class FindingMetadataSerializer(serializers.Serializer):
|
||||
class FindingMetadataSerializer(BaseSerializerV1):
|
||||
services = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
resource_types = serializers.ListField(
|
||||
@@ -2039,7 +2044,7 @@ class RoleProviderGroupRelationshipSerializer(RLSSerializer, BaseWriteSerializer
|
||||
# Compliance overview
|
||||
|
||||
|
||||
class ComplianceOverviewSerializer(serializers.Serializer):
|
||||
class ComplianceOverviewSerializer(BaseSerializerV1):
|
||||
"""
|
||||
Serializer for compliance requirement status aggregated by compliance framework.
|
||||
|
||||
@@ -2061,7 +2066,7 @@ class ComplianceOverviewSerializer(serializers.Serializer):
|
||||
resource_name = "compliance-overviews"
|
||||
|
||||
|
||||
class ComplianceOverviewDetailSerializer(serializers.Serializer):
|
||||
class ComplianceOverviewDetailSerializer(BaseSerializerV1):
|
||||
"""
|
||||
Serializer for detailed compliance requirement information.
|
||||
|
||||
@@ -2090,7 +2095,7 @@ class ComplianceOverviewDetailThreatscoreSerializer(ComplianceOverviewDetailSeri
|
||||
total_findings = serializers.IntegerField()
|
||||
|
||||
|
||||
class ComplianceOverviewAttributesSerializer(serializers.Serializer):
|
||||
class ComplianceOverviewAttributesSerializer(BaseSerializerV1):
|
||||
id = serializers.CharField()
|
||||
compliance_name = serializers.CharField()
|
||||
framework_description = serializers.CharField()
|
||||
@@ -2104,7 +2109,7 @@ class ComplianceOverviewAttributesSerializer(serializers.Serializer):
|
||||
resource_name = "compliance-requirements-attributes"
|
||||
|
||||
|
||||
class ComplianceOverviewMetadataSerializer(serializers.Serializer):
|
||||
class ComplianceOverviewMetadataSerializer(BaseSerializerV1):
|
||||
regions = serializers.ListField(child=serializers.CharField(), allow_empty=True)
|
||||
|
||||
class JSONAPIMeta:
|
||||
@@ -2114,7 +2119,7 @@ class ComplianceOverviewMetadataSerializer(serializers.Serializer):
|
||||
# Overviews
|
||||
|
||||
|
||||
class OverviewProviderSerializer(serializers.Serializer):
|
||||
class OverviewProviderSerializer(BaseSerializerV1):
|
||||
id = serializers.CharField(source="provider")
|
||||
findings = serializers.SerializerMethodField(read_only=True)
|
||||
resources = serializers.SerializerMethodField(read_only=True)
|
||||
@@ -2122,9 +2127,6 @@ class OverviewProviderSerializer(serializers.Serializer):
|
||||
class JSONAPIMeta:
|
||||
resource_name = "providers-overview"
|
||||
|
||||
def get_root_meta(self, _resource, _many):
|
||||
return {"version": "v1"}
|
||||
|
||||
@extend_schema_field(
|
||||
{
|
||||
"type": "object",
|
||||
@@ -2158,18 +2160,15 @@ class OverviewProviderSerializer(serializers.Serializer):
|
||||
}
|
||||
|
||||
|
||||
class OverviewProviderCountSerializer(serializers.Serializer):
|
||||
class OverviewProviderCountSerializer(BaseSerializerV1):
|
||||
id = serializers.CharField(source="provider")
|
||||
count = serializers.IntegerField()
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "providers-count-overview"
|
||||
|
||||
def get_root_meta(self, _resource, _many):
|
||||
return {"version": "v1"}
|
||||
|
||||
|
||||
class OverviewFindingSerializer(serializers.Serializer):
|
||||
class OverviewFindingSerializer(BaseSerializerV1):
|
||||
id = serializers.CharField(default="n/a")
|
||||
new = serializers.IntegerField()
|
||||
changed = serializers.IntegerField()
|
||||
@@ -2188,15 +2187,12 @@ class OverviewFindingSerializer(serializers.Serializer):
|
||||
class JSONAPIMeta:
|
||||
resource_name = "findings-overview"
|
||||
|
||||
def get_root_meta(self, _resource, _many):
|
||||
return {"version": "v1"}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields["pass"] = self.fields.pop("_pass")
|
||||
|
||||
|
||||
class OverviewSeveritySerializer(serializers.Serializer):
|
||||
class OverviewSeveritySerializer(BaseSerializerV1):
|
||||
id = serializers.CharField(default="n/a")
|
||||
critical = serializers.IntegerField()
|
||||
high = serializers.IntegerField()
|
||||
@@ -2207,11 +2203,24 @@ class OverviewSeveritySerializer(serializers.Serializer):
|
||||
class JSONAPIMeta:
|
||||
resource_name = "findings-severity-overview"
|
||||
|
||||
def get_root_meta(self, _resource, _many):
|
||||
return {"version": "v1"}
|
||||
|
||||
class FindingsSeverityOverTimeSerializer(BaseSerializerV1):
|
||||
"""Serializer for daily findings severity trend data."""
|
||||
|
||||
id = serializers.DateField(source="date")
|
||||
critical = serializers.IntegerField()
|
||||
high = serializers.IntegerField()
|
||||
medium = serializers.IntegerField()
|
||||
low = serializers.IntegerField()
|
||||
informational = serializers.IntegerField()
|
||||
muted = serializers.IntegerField()
|
||||
scan_ids = serializers.ListField(child=serializers.UUIDField())
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "findings-severity-over-time"
|
||||
|
||||
|
||||
class OverviewServiceSerializer(serializers.Serializer):
|
||||
class OverviewServiceSerializer(BaseSerializerV1):
|
||||
id = serializers.CharField(source="service")
|
||||
total = serializers.IntegerField()
|
||||
_pass = serializers.IntegerField()
|
||||
@@ -2225,8 +2234,20 @@ class OverviewServiceSerializer(serializers.Serializer):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields["pass"] = self.fields.pop("_pass")
|
||||
|
||||
def get_root_meta(self, _resource, _many):
|
||||
return {"version": "v1"}
|
||||
|
||||
class AttackSurfaceOverviewSerializer(BaseSerializerV1):
|
||||
"""Serializer for attack surface overview aggregations."""
|
||||
|
||||
id = serializers.CharField(source="attack_surface_type")
|
||||
total_findings = serializers.IntegerField()
|
||||
failed_findings = serializers.IntegerField()
|
||||
muted_failed_findings = serializers.IntegerField()
|
||||
check_ids = serializers.ListField(
|
||||
child=serializers.CharField(), allow_empty=True, default=list, read_only=True
|
||||
)
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "attack-surface-overviews"
|
||||
|
||||
|
||||
class OverviewRegionSerializer(serializers.Serializer):
|
||||
@@ -2256,7 +2277,7 @@ class OverviewRegionSerializer(serializers.Serializer):
|
||||
# Schedules
|
||||
|
||||
|
||||
class ScheduleDailyCreateSerializer(serializers.Serializer):
|
||||
class ScheduleDailyCreateSerializer(BaseSerializerV1):
|
||||
provider_id = serializers.UUIDField(required=True)
|
||||
|
||||
class JSONAPIMeta:
|
||||
@@ -2592,7 +2613,7 @@ class IntegrationUpdateSerializer(BaseWriteIntegrationSerializer):
|
||||
return representation
|
||||
|
||||
|
||||
class IntegrationJiraDispatchSerializer(serializers.Serializer):
|
||||
class IntegrationJiraDispatchSerializer(BaseSerializerV1):
|
||||
"""
|
||||
Serializer for dispatching findings to JIRA integration.
|
||||
"""
|
||||
@@ -2755,14 +2776,14 @@ class ProcessorUpdateSerializer(BaseWriteSerializer):
|
||||
# SSO
|
||||
|
||||
|
||||
class SamlInitiateSerializer(serializers.Serializer):
|
||||
class SamlInitiateSerializer(BaseSerializerV1):
|
||||
email_domain = serializers.CharField()
|
||||
|
||||
class JSONAPIMeta:
|
||||
resource_name = "saml-initiate"
|
||||
|
||||
|
||||
class SamlMetadataSerializer(serializers.Serializer):
|
||||
class SamlMetadataSerializer(BaseSerializerV1):
|
||||
class JSONAPIMeta:
|
||||
resource_name = "saml-meta"
|
||||
|
||||
@@ -3294,6 +3315,19 @@ class LighthouseProviderConfigUpdateSerializer(BaseWriteSerializer):
|
||||
and provider_type
|
||||
== LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK
|
||||
):
|
||||
# For updates, enforce that the authentication method (access keys vs API key)
|
||||
# is immutable. To switch methods, the UI must delete and recreate the provider.
|
||||
existing_credentials = (
|
||||
self.instance.credentials_decoded if self.instance else {}
|
||||
) or {}
|
||||
|
||||
existing_uses_api_key = "api_key" in existing_credentials
|
||||
existing_uses_access_keys = any(
|
||||
k in existing_credentials
|
||||
for k in ("access_key_id", "secret_access_key")
|
||||
)
|
||||
|
||||
# First run field-level validation on the partial payload
|
||||
try:
|
||||
BedrockCredentialsUpdateSerializer(data=credentials).is_valid(
|
||||
raise_exception=True
|
||||
@@ -3304,6 +3338,31 @@ class LighthouseProviderConfigUpdateSerializer(BaseWriteSerializer):
|
||||
e.detail[f"credentials/{key}"] = value
|
||||
del e.detail[key]
|
||||
raise e
|
||||
|
||||
# Then enforce invariants about not changing the auth method
|
||||
# If the existing config uses an API key, forbid introducing access keys.
|
||||
if existing_uses_api_key and any(
|
||||
k in credentials for k in ("access_key_id", "secret_access_key")
|
||||
):
|
||||
raise ValidationError(
|
||||
{
|
||||
"credentials/non_field_errors": [
|
||||
"Cannot change Bedrock authentication method from API key "
|
||||
"to access key via update. Delete and recreate the provider instead."
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
# If the existing config uses access keys, forbid introducing an API key.
|
||||
if existing_uses_access_keys and "api_key" in credentials:
|
||||
raise ValidationError(
|
||||
{
|
||||
"credentials/non_field_errors": [
|
||||
"Cannot change Bedrock authentication method from access key "
|
||||
"to API key via update. Delete and recreate the provider instead."
|
||||
]
|
||||
}
|
||||
)
|
||||
elif (
|
||||
credentials is not None
|
||||
and provider_type
|
||||
|
||||
@@ -74,6 +74,7 @@ from rest_framework_json_api.views import RelationshipView, Response
|
||||
from rest_framework_simplejwt.exceptions import InvalidToken, TokenError
|
||||
from tasks.beat import schedule_provider_scan
|
||||
from tasks.jobs.export import get_s3_client
|
||||
from tasks.jobs.scan import _get_attack_surface_mapping_from_provider
|
||||
from tasks.tasks import (
|
||||
backfill_compliance_summaries_task,
|
||||
backfill_scan_resource_summaries_task,
|
||||
@@ -98,8 +99,10 @@ from api.db_router import MainRouter
|
||||
from api.db_utils import rls_transaction
|
||||
from api.exceptions import TaskFailedException
|
||||
from api.filters import (
|
||||
AttackSurfaceOverviewFilter,
|
||||
ComplianceOverviewFilter,
|
||||
CustomDjangoFilterBackend,
|
||||
DailySeveritySummaryFilter,
|
||||
FindingFilter,
|
||||
IntegrationFilter,
|
||||
IntegrationJiraFindingsFilter,
|
||||
@@ -126,8 +129,10 @@ from api.filters import (
|
||||
UserFilter,
|
||||
)
|
||||
from api.models import (
|
||||
AttackSurfaceOverview,
|
||||
ComplianceOverviewSummary,
|
||||
ComplianceRequirementOverview,
|
||||
DailySeveritySummary,
|
||||
Finding,
|
||||
Integration,
|
||||
Invitation,
|
||||
@@ -172,6 +177,7 @@ from api.utils import (
|
||||
from api.uuid_utils import datetime_to_uuid7, uuid7_start
|
||||
from api.v1.mixins import DisablePaginationMixin, PaginateByPkMixin, TaskManagementMixin
|
||||
from api.v1.serializers import (
|
||||
AttackSurfaceOverviewSerializer,
|
||||
ComplianceOverviewAttributesSerializer,
|
||||
ComplianceOverviewDetailSerializer,
|
||||
ComplianceOverviewDetailThreatscoreSerializer,
|
||||
@@ -180,6 +186,7 @@ from api.v1.serializers import (
|
||||
FindingDynamicFilterSerializer,
|
||||
FindingMetadataSerializer,
|
||||
FindingSerializer,
|
||||
FindingsSeverityOverTimeSerializer,
|
||||
IntegrationCreateSerializer,
|
||||
IntegrationJiraDispatchSerializer,
|
||||
IntegrationSerializer,
|
||||
@@ -350,7 +357,7 @@ class SchemaView(SpectacularAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
spectacular_settings.TITLE = "Prowler API"
|
||||
spectacular_settings.VERSION = "1.15.0"
|
||||
spectacular_settings.VERSION = "1.16.0"
|
||||
spectacular_settings.DESCRIPTION = (
|
||||
"Prowler API specification.\n\nThis file is auto-generated."
|
||||
)
|
||||
@@ -3359,50 +3366,15 @@ class RoleProviderGroupRelationshipView(RelationshipView, BaseRLSViewSet):
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
tags=["Compliance Overview"],
|
||||
summary="List compliance overviews",
|
||||
description=(
|
||||
"Retrieve an overview of all compliance frameworks. "
|
||||
"If scan_id is provided, returns compliance data for that specific scan. "
|
||||
"If scan_id is omitted, returns compliance data aggregated from the latest completed scan of each provider."
|
||||
),
|
||||
summary="List compliance overviews for a scan",
|
||||
description="Retrieve an overview of all the compliance in a given scan.",
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="filter[scan_id]",
|
||||
required=False,
|
||||
required=True,
|
||||
type=OpenApiTypes.UUID,
|
||||
location=OpenApiParameter.QUERY,
|
||||
description=(
|
||||
"Optional scan ID. If provided, returns compliance for that scan. "
|
||||
"If omitted, returns compliance for the latest completed scan per provider."
|
||||
),
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="filter[provider_id]",
|
||||
required=False,
|
||||
type=OpenApiTypes.UUID,
|
||||
location=OpenApiParameter.QUERY,
|
||||
description="Filter by specific provider ID.",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="filter[provider_id__in]",
|
||||
required=False,
|
||||
type={"type": "array", "items": {"type": "string", "format": "uuid"}},
|
||||
location=OpenApiParameter.QUERY,
|
||||
description="Filter by multiple provider IDs (comma-separated).",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="filter[provider_type]",
|
||||
required=False,
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.QUERY,
|
||||
description="Filter by provider type (e.g., aws, azure, gcp).",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="filter[provider_type__in]",
|
||||
required=False,
|
||||
type={"type": "array", "items": {"type": "string"}},
|
||||
location=OpenApiParameter.QUERY,
|
||||
description="Filter by multiple provider types (comma-separated).",
|
||||
description="Related scan ID.",
|
||||
),
|
||||
],
|
||||
responses={
|
||||
@@ -3581,7 +3553,19 @@ class ComplianceOverviewViewSet(BaseRLSViewSet, TaskManagementMixin):
|
||||
def _get_compliance_template(self, *, provider=None, scan_id=None):
|
||||
"""Return the compliance template for the given provider or scan."""
|
||||
if provider is None and scan_id is not None:
|
||||
scan = Scan.all_objects.select_related("provider").get(pk=scan_id)
|
||||
try:
|
||||
scan = Scan.all_objects.select_related("provider").get(pk=scan_id)
|
||||
except Scan.DoesNotExist:
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "Scan not found",
|
||||
"status": 404,
|
||||
"source": {"pointer": "filter[scan_id]"},
|
||||
"code": "not_found",
|
||||
}
|
||||
]
|
||||
)
|
||||
provider = scan.provider
|
||||
|
||||
if not provider:
|
||||
@@ -3723,93 +3707,47 @@ class ComplianceOverviewViewSet(BaseRLSViewSet, TaskManagementMixin):
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
scan_id = request.query_params.get("filter[scan_id]")
|
||||
tenant_id = self.request.tenant_id
|
||||
|
||||
if scan_id:
|
||||
# Specific scan requested - use optimized summaries with region support
|
||||
region_filter = request.query_params.get(
|
||||
"filter[region]"
|
||||
) or request.query_params.get("filter[region__in]")
|
||||
# Specific scan requested - use optimized summaries with region support
|
||||
region_filter = request.query_params.get(
|
||||
"filter[region]"
|
||||
) or request.query_params.get("filter[region__in]")
|
||||
|
||||
if region_filter:
|
||||
# Fall back to detailed query with region filtering
|
||||
return self._list_with_region_filter(scan_id, region_filter)
|
||||
if region_filter:
|
||||
# Fall back to detailed query with region filtering
|
||||
return self._list_with_region_filter(scan_id, region_filter)
|
||||
|
||||
summaries = list(self._compliance_summaries_queryset(scan_id))
|
||||
if not summaries:
|
||||
# Trigger async backfill for next time
|
||||
backfill_compliance_summaries_task.delay(
|
||||
tenant_id=self.request.tenant_id, scan_id=scan_id
|
||||
)
|
||||
# Use fallback aggregation for this request
|
||||
return self._list_without_region_aggregation(scan_id)
|
||||
summaries = list(self._compliance_summaries_queryset(scan_id))
|
||||
if not summaries:
|
||||
# Trigger async backfill for next time
|
||||
backfill_compliance_summaries_task.delay(
|
||||
tenant_id=self.request.tenant_id, scan_id=scan_id
|
||||
)
|
||||
# Use fallback aggregation for this request
|
||||
return self._list_without_region_aggregation(scan_id)
|
||||
|
||||
# Get compliance template for provider to enrich with framework/version
|
||||
compliance_template = self._get_compliance_template(scan_id=scan_id)
|
||||
# Get compliance template for provider to enrich with framework/version
|
||||
compliance_template = self._get_compliance_template(scan_id=scan_id)
|
||||
|
||||
# Convert to response format with framework/version enrichment
|
||||
response_data = []
|
||||
for summary in summaries:
|
||||
compliance_metadata = compliance_template.get(summary.compliance_id, {})
|
||||
response_data.append(
|
||||
{
|
||||
"id": summary.compliance_id,
|
||||
"compliance_id": summary.compliance_id,
|
||||
"framework": compliance_metadata.get("framework", ""),
|
||||
"version": compliance_metadata.get("version", ""),
|
||||
"requirements_passed": summary.requirements_passed,
|
||||
"requirements_failed": summary.requirements_failed,
|
||||
"requirements_manual": summary.requirements_manual,
|
||||
"total_requirements": summary.total_requirements,
|
||||
}
|
||||
)
|
||||
|
||||
serializer = self.get_serializer(response_data, many=True)
|
||||
return Response(serializer.data)
|
||||
else:
|
||||
# No scan_id provided - use latest scans per provider
|
||||
# First, check if provider filters are present
|
||||
provider_id = request.query_params.get("filter[provider_id]")
|
||||
provider_id__in = request.query_params.get("filter[provider_id__in]")
|
||||
provider_type = request.query_params.get("filter[provider_type]")
|
||||
provider_type__in = request.query_params.get("filter[provider_type__in]")
|
||||
|
||||
scan_filters = {"tenant_id": tenant_id, "state": StateChoices.COMPLETED}
|
||||
|
||||
# Apply provider ID filters
|
||||
if provider_id:
|
||||
scan_filters["provider_id"] = provider_id
|
||||
elif provider_id__in:
|
||||
# Convert comma-separated string to list
|
||||
provider_ids = [pid.strip() for pid in provider_id__in.split(",")]
|
||||
scan_filters["provider_id__in"] = provider_ids
|
||||
|
||||
# Apply provider type filters
|
||||
if provider_type:
|
||||
scan_filters["provider__provider"] = provider_type
|
||||
elif provider_type__in:
|
||||
# Convert comma-separated string to list
|
||||
provider_types = [pt.strip() for pt in provider_type__in.split(",")]
|
||||
scan_filters["provider__provider__in"] = provider_types
|
||||
|
||||
latest_scan_ids = (
|
||||
Scan.all_objects.filter(**scan_filters)
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
.values_list("id", flat=True)
|
||||
# Convert to response format with framework/version enrichment
|
||||
response_data = []
|
||||
for summary in summaries:
|
||||
compliance_metadata = compliance_template.get(summary.compliance_id, {})
|
||||
response_data.append(
|
||||
{
|
||||
"id": summary.compliance_id,
|
||||
"compliance_id": summary.compliance_id,
|
||||
"framework": compliance_metadata.get("framework", ""),
|
||||
"version": compliance_metadata.get("version", ""),
|
||||
"requirements_passed": summary.requirements_passed,
|
||||
"requirements_failed": summary.requirements_failed,
|
||||
"requirements_manual": summary.requirements_manual,
|
||||
"total_requirements": summary.total_requirements,
|
||||
}
|
||||
)
|
||||
|
||||
base_queryset = self.get_queryset()
|
||||
queryset = self.filter_queryset(
|
||||
base_queryset.filter(scan_id__in=latest_scan_ids)
|
||||
)
|
||||
|
||||
# Aggregate compliance data across latest scans
|
||||
compliance_template = self._get_compliance_template()
|
||||
data = self._aggregate_compliance_overview(
|
||||
queryset, template_metadata=compliance_template
|
||||
)
|
||||
return Response(data)
|
||||
serializer = self.get_serializer(response_data, many=True)
|
||||
return Response(serializer.data)
|
||||
|
||||
@action(detail=False, methods=["get"], url_name="metadata")
|
||||
def metadata(self, request):
|
||||
@@ -4074,6 +4012,47 @@ class ComplianceOverviewViewSet(BaseRLSViewSet, TaskManagementMixin):
|
||||
),
|
||||
filters=True,
|
||||
),
|
||||
findings_severity_timeseries=extend_schema(
|
||||
summary="Get findings severity data over time",
|
||||
description=(
|
||||
"Retrieve daily aggregated findings data grouped by severity levels over a date range. "
|
||||
"Returns one data point per day with counts of failed findings by severity (critical, high, "
|
||||
"medium, low, informational) and muted findings. Days without scans are filled forward with "
|
||||
"the most recent known values. Use date_from (required) and date_to filters to specify the range."
|
||||
),
|
||||
filters=True,
|
||||
),
|
||||
attack_surface=extend_schema(
|
||||
summary="Get attack surface overview",
|
||||
description="Retrieve aggregated attack surface metrics from latest completed scans per provider.",
|
||||
tags=["Overview"],
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="filter[provider_id]",
|
||||
type=OpenApiTypes.UUID,
|
||||
location=OpenApiParameter.QUERY,
|
||||
description="Filter by specific provider ID",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="filter[provider_id.in]",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.QUERY,
|
||||
description="Filter by multiple provider IDs (comma-separated UUIDs)",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="filter[provider_type]",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.QUERY,
|
||||
description="Filter by provider type (aws, azure, gcp, etc.)",
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="filter[provider_type.in]",
|
||||
type=OpenApiTypes.STR,
|
||||
location=OpenApiParameter.QUERY,
|
||||
description="Filter by multiple provider types (comma-separated)",
|
||||
),
|
||||
],
|
||||
),
|
||||
)
|
||||
@method_decorator(CACHE_DECORATOR, name="list")
|
||||
class OverviewViewSet(BaseRLSViewSet):
|
||||
@@ -4091,7 +4070,16 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
if not role.unlimited_visibility:
|
||||
self.allowed_providers = providers
|
||||
|
||||
return ScanSummary.all_objects.filter(tenant_id=self.request.tenant_id)
|
||||
tenant_id = self.request.tenant_id
|
||||
|
||||
# Return appropriate queryset per action
|
||||
if self.action == "findings_severity_timeseries":
|
||||
qs = DailySeveritySummary.objects.filter(tenant_id=tenant_id)
|
||||
if hasattr(self, "allowed_providers"):
|
||||
qs = qs.filter(provider_id__in=self.allowed_providers)
|
||||
return qs
|
||||
|
||||
return ScanSummary.all_objects.filter(tenant_id=tenant_id)
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "providers":
|
||||
@@ -4102,12 +4090,16 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
return OverviewFindingSerializer
|
||||
elif self.action == "findings_severity":
|
||||
return OverviewSeveritySerializer
|
||||
elif self.action == "findings_severity_timeseries":
|
||||
return FindingsSeverityOverTimeSerializer
|
||||
elif self.action == "services":
|
||||
return OverviewServiceSerializer
|
||||
elif self.action == "regions":
|
||||
return OverviewRegionSerializer
|
||||
elif self.action == "threatscore":
|
||||
return ThreatScoreSnapshotSerializer
|
||||
elif self.action == "attack_surface":
|
||||
return AttackSurfaceOverviewSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def get_filterset_class(self):
|
||||
@@ -4117,8 +4109,18 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
return ScanSummaryFilter
|
||||
elif self.action == "findings_severity":
|
||||
return ScanSummarySeverityFilter
|
||||
elif self.action == "findings_severity_timeseries":
|
||||
return DailySeveritySummaryFilter
|
||||
return None
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
# Skip OrderingFilter for findings_severity_timeseries (no inserted_at field)
|
||||
if self.action == "findings_severity_timeseries":
|
||||
return CustomDjangoFilterBackend().filter_queryset(
|
||||
self.request, queryset, self
|
||||
)
|
||||
return super().filter_queryset(queryset)
|
||||
|
||||
@extend_schema(exclude=True)
|
||||
def list(self, request, *args, **kwargs):
|
||||
raise MethodNotAllowed(method="GET")
|
||||
@@ -4156,6 +4158,68 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
tenant_id=tenant_id, scan_id__in=latest_scan_ids
|
||||
)
|
||||
|
||||
def _normalize_jsonapi_params(self, query_params, exclude_keys=None):
|
||||
"""Convert JSON:API filter params (filter[X]) to flat params (X)."""
|
||||
exclude_keys = exclude_keys or set()
|
||||
normalized = QueryDict(mutable=True)
|
||||
for key, values in query_params.lists():
|
||||
normalized_key = (
|
||||
key[7:-1] if key.startswith("filter[") and key.endswith("]") else key
|
||||
)
|
||||
if normalized_key not in exclude_keys:
|
||||
normalized.setlist(normalized_key, values)
|
||||
return normalized
|
||||
|
||||
def _ensure_allowed_providers(self):
|
||||
"""Populate allowed providers for RBAC-aware queries once per request."""
|
||||
if getattr(self, "_providers_initialized", False):
|
||||
return
|
||||
self.get_queryset()
|
||||
self._providers_initialized = True
|
||||
|
||||
def _get_provider_filter(self, provider_field="provider"):
|
||||
self._ensure_allowed_providers()
|
||||
if hasattr(self, "allowed_providers"):
|
||||
return {f"{provider_field}__in": self.allowed_providers}
|
||||
return {}
|
||||
|
||||
def _apply_provider_filter(self, queryset, provider_field="provider"):
|
||||
provider_filter = self._get_provider_filter(provider_field)
|
||||
if provider_filter:
|
||||
return queryset.filter(**provider_filter)
|
||||
return queryset
|
||||
|
||||
def _apply_filterset(self, queryset, filterset_class, exclude_keys=None):
|
||||
normalized_params = self._normalize_jsonapi_params(
|
||||
self.request.query_params, exclude_keys=set(exclude_keys or [])
|
||||
)
|
||||
filterset = filterset_class(normalized_params, queryset=queryset)
|
||||
return filterset.qs
|
||||
|
||||
def _latest_scan_ids_for_allowed_providers(self, tenant_id):
|
||||
provider_filter = self._get_provider_filter()
|
||||
return (
|
||||
Scan.all_objects.filter(
|
||||
tenant_id=tenant_id, state=StateChoices.COMPLETED, **provider_filter
|
||||
)
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
.values_list("id", flat=True)
|
||||
)
|
||||
|
||||
def _attack_surface_check_ids_by_provider_types(self, provider_types):
|
||||
check_ids_by_type = {
|
||||
attack_surface_type: set()
|
||||
for attack_surface_type in AttackSurfaceOverview.AttackSurfaceTypeChoices.values
|
||||
}
|
||||
for provider_type in provider_types:
|
||||
attack_surface_mapping = _get_attack_surface_mapping_from_provider(
|
||||
provider_type=provider_type
|
||||
)
|
||||
for attack_surface_type, check_ids in attack_surface_mapping.items():
|
||||
check_ids_by_type[attack_surface_type].update(check_ids)
|
||||
return check_ids_by_type
|
||||
|
||||
@action(detail=False, methods=["get"], url_name="providers")
|
||||
def providers(self, request):
|
||||
tenant_id = self.request.tenant_id
|
||||
@@ -4333,6 +4397,108 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(
|
||||
detail=False,
|
||||
methods=["get"],
|
||||
url_path="findings_severity/timeseries",
|
||||
url_name="findings_severity_timeseries",
|
||||
)
|
||||
def findings_severity_timeseries(self, request):
|
||||
"""
|
||||
Daily severity trends for charts. Uses DailySeveritySummary pre-aggregation.
|
||||
Requires date_from filter.
|
||||
"""
|
||||
# Get queryset with RBAC, provider, and date filters applied
|
||||
# Date validation is handled by DailySeveritySummaryFilter
|
||||
daily_qs = self.filter_queryset(self.get_queryset())
|
||||
|
||||
date_from = request._date_from
|
||||
date_to = request._date_to
|
||||
|
||||
if not daily_qs.exists():
|
||||
# No data matches filters - return zeros
|
||||
result = self._generate_zero_result(date_from, date_to)
|
||||
serializer = self.get_serializer(result, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
# Fetch all data for fill-forward logic
|
||||
daily_summaries = list(
|
||||
daily_qs.order_by("provider_id", "-date").values(
|
||||
"provider_id",
|
||||
"scan_id",
|
||||
"date",
|
||||
"critical",
|
||||
"high",
|
||||
"medium",
|
||||
"low",
|
||||
"informational",
|
||||
"muted",
|
||||
)
|
||||
)
|
||||
|
||||
if not daily_summaries:
|
||||
result = self._generate_zero_result(date_from, date_to)
|
||||
serializer = self.get_serializer(result, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
# Build provider_data: {provider_id: [(date, data), ...]} sorted by date desc
|
||||
provider_data = defaultdict(list)
|
||||
for summary in daily_summaries:
|
||||
provider_data[summary["provider_id"]].append(summary)
|
||||
|
||||
# For each day, find the latest data per provider and sum values
|
||||
result = []
|
||||
current_date = date_from
|
||||
while current_date <= date_to:
|
||||
day_totals = {
|
||||
"critical": 0,
|
||||
"high": 0,
|
||||
"medium": 0,
|
||||
"low": 0,
|
||||
"informational": 0,
|
||||
"muted": 0,
|
||||
}
|
||||
day_scan_ids = []
|
||||
|
||||
for provider_id, summaries in provider_data.items():
|
||||
# Find the latest data for this provider <= current_date
|
||||
for summary in summaries: # Already sorted by date desc
|
||||
if summary["date"] <= current_date:
|
||||
day_totals["critical"] += summary["critical"] or 0
|
||||
day_totals["high"] += summary["high"] or 0
|
||||
day_totals["medium"] += summary["medium"] or 0
|
||||
day_totals["low"] += summary["low"] or 0
|
||||
day_totals["informational"] += summary["informational"] or 0
|
||||
day_totals["muted"] += summary["muted"] or 0
|
||||
day_scan_ids.append(summary["scan_id"])
|
||||
break # Found the latest data for this provider
|
||||
|
||||
result.append(
|
||||
{"date": current_date, "scan_ids": day_scan_ids, **day_totals}
|
||||
)
|
||||
current_date += timedelta(days=1)
|
||||
|
||||
serializer = self.get_serializer(result, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def _generate_zero_result(self, date_from, date_to):
|
||||
"""Generate a list of zero-filled results for each date in range."""
|
||||
result = []
|
||||
current_date = date_from
|
||||
zero_values = {
|
||||
"critical": 0,
|
||||
"high": 0,
|
||||
"medium": 0,
|
||||
"low": 0,
|
||||
"informational": 0,
|
||||
"muted": 0,
|
||||
"scan_ids": [],
|
||||
}
|
||||
while current_date <= date_to:
|
||||
result.append({"date": current_date, **zero_values})
|
||||
current_date += timedelta(days=1)
|
||||
return result
|
||||
|
||||
@extend_schema(
|
||||
summary="Get ThreatScore snapshots",
|
||||
description=(
|
||||
@@ -4385,11 +4551,9 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
snapshot_id = request.query_params.get("snapshot_id")
|
||||
|
||||
# Base queryset with RLS
|
||||
base_queryset = ThreatScoreSnapshot.objects.filter(tenant_id=tenant_id)
|
||||
|
||||
# Apply RBAC filtering
|
||||
if hasattr(self, "allowed_providers"):
|
||||
base_queryset = base_queryset.filter(provider__in=self.allowed_providers)
|
||||
base_queryset = self._apply_provider_filter(
|
||||
ThreatScoreSnapshot.objects.filter(tenant_id=tenant_id)
|
||||
)
|
||||
|
||||
# Case 1: Specific snapshot requested
|
||||
if snapshot_id:
|
||||
@@ -4405,17 +4569,9 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
# Case 2: Latest snapshot per provider (default)
|
||||
# Apply filters manually: this @action is outside the standard list endpoint flow,
|
||||
# so DRF's filter backends don't execute and we must flatten JSON:API params ourselves.
|
||||
normalized_params = QueryDict(mutable=True)
|
||||
for param_key, values in request.query_params.lists():
|
||||
normalized_key = param_key
|
||||
if param_key.startswith("filter[") and param_key.endswith("]"):
|
||||
normalized_key = param_key[7:-1]
|
||||
if normalized_key == "snapshot_id":
|
||||
continue
|
||||
normalized_params.setlist(normalized_key, values)
|
||||
|
||||
filterset = ThreatScoreSnapshotFilter(normalized_params, queryset=base_queryset)
|
||||
filtered_queryset = filterset.qs
|
||||
filtered_queryset = self._apply_filterset(
|
||||
base_queryset, ThreatScoreSnapshotFilter, exclude_keys={"snapshot_id"}
|
||||
)
|
||||
|
||||
# Get distinct provider IDs from filtered queryset
|
||||
# Pick the latest snapshot per provider using Postgres DISTINCT ON pattern.
|
||||
@@ -4659,6 +4815,67 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
|
||||
return aggregated_snapshot
|
||||
|
||||
@action(
|
||||
detail=False,
|
||||
methods=["get"],
|
||||
url_name="attack-surface",
|
||||
url_path="attack-surfaces",
|
||||
)
|
||||
def attack_surface(self, request):
|
||||
tenant_id = request.tenant_id
|
||||
latest_scan_ids = self._latest_scan_ids_for_allowed_providers(tenant_id)
|
||||
|
||||
# Build base queryset and apply user filters via FilterSet
|
||||
base_queryset = AttackSurfaceOverview.objects.filter(
|
||||
tenant_id=tenant_id, scan_id__in=latest_scan_ids
|
||||
)
|
||||
filtered_queryset = self._apply_filterset(
|
||||
base_queryset, AttackSurfaceOverviewFilter
|
||||
)
|
||||
provider_types = list(
|
||||
filtered_queryset.values_list(
|
||||
"scan__provider__provider", flat=True
|
||||
).distinct()
|
||||
)
|
||||
attack_surface_check_ids = self._attack_surface_check_ids_by_provider_types(
|
||||
provider_types
|
||||
)
|
||||
# Aggregate attack surface data
|
||||
aggregation = filtered_queryset.values("attack_surface_type").annotate(
|
||||
total_findings=Coalesce(Sum("total_findings"), 0),
|
||||
failed_findings=Coalesce(Sum("failed_findings"), 0),
|
||||
muted_failed_findings=Coalesce(Sum("muted_failed_findings"), 0),
|
||||
)
|
||||
|
||||
results = {
|
||||
attack_surface_type: {
|
||||
"total_findings": 0,
|
||||
"failed_findings": 0,
|
||||
"muted_failed_findings": 0,
|
||||
}
|
||||
for attack_surface_type in AttackSurfaceOverview.AttackSurfaceTypeChoices.values
|
||||
}
|
||||
for item in aggregation:
|
||||
results[item["attack_surface_type"]] = {
|
||||
"total_findings": item["total_findings"],
|
||||
"failed_findings": item["failed_findings"],
|
||||
"muted_failed_findings": item["muted_failed_findings"],
|
||||
}
|
||||
|
||||
response_data = [
|
||||
{
|
||||
"attack_surface_type": key,
|
||||
**value,
|
||||
"check_ids": attack_surface_check_ids.get(key, []),
|
||||
}
|
||||
for key, value in results.items()
|
||||
]
|
||||
|
||||
return Response(
|
||||
self.get_serializer(response_data, many=True).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
@extend_schema(tags=["Schedule"])
|
||||
@extend_schema_view(
|
||||
|
||||
@@ -36,6 +36,14 @@ DATABASES = {
|
||||
"HOST": env("POSTGRES_REPLICA_HOST", default=default_db_host),
|
||||
"PORT": env("POSTGRES_REPLICA_PORT", default=default_db_port),
|
||||
},
|
||||
"admin_replica": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_REPLICA_DB", default=default_db_name),
|
||||
"USER": env("POSTGRES_ADMIN_USER", default="prowler"),
|
||||
"PASSWORD": env("POSTGRES_ADMIN_PASSWORD", default="S3cret"),
|
||||
"HOST": env("POSTGRES_REPLICA_HOST", default=default_db_host),
|
||||
"PORT": env("POSTGRES_REPLICA_PORT", default=default_db_port),
|
||||
},
|
||||
}
|
||||
|
||||
DATABASES["default"] = DATABASES["prowler_user"]
|
||||
|
||||
@@ -37,6 +37,14 @@ DATABASES = {
|
||||
"HOST": env("POSTGRES_REPLICA_HOST", default=default_db_host),
|
||||
"PORT": env("POSTGRES_REPLICA_PORT", default=default_db_port),
|
||||
},
|
||||
"admin_replica": {
|
||||
"ENGINE": "psqlextra.backend",
|
||||
"NAME": env("POSTGRES_REPLICA_DB", default=default_db_name),
|
||||
"USER": env("POSTGRES_ADMIN_USER"),
|
||||
"PASSWORD": env("POSTGRES_ADMIN_PASSWORD"),
|
||||
"HOST": env("POSTGRES_REPLICA_HOST", default=default_db_host),
|
||||
"PORT": env("POSTGRES_REPLICA_PORT", default=default_db_port),
|
||||
},
|
||||
}
|
||||
|
||||
DATABASES["default"] = DATABASES["prowler_user"]
|
||||
|
||||
@@ -19,6 +19,8 @@ PORT = env("DJANGO_PORT", default=8000)
|
||||
|
||||
# Server settings
|
||||
bind = f"{BIND_ADDRESS}:{PORT}"
|
||||
# TODO: Remove after the category filter is implemented
|
||||
limit_request_line = 0
|
||||
|
||||
workers = env.int("DJANGO_WORKERS", default=multiprocessing.cpu_count() * 2 + 1)
|
||||
reload = DEBUG
|
||||
|
||||
@@ -5,6 +5,9 @@ IGNORED_EXCEPTIONS = [
|
||||
# Provider is not connected due to credentials errors
|
||||
"is not connected",
|
||||
"ProviderConnectionError",
|
||||
# Provider was deleted during a scan
|
||||
"ProviderDeletedException",
|
||||
"violates foreign key constraint",
|
||||
# Authentication Errors from AWS
|
||||
"InvalidToken",
|
||||
"AccessDeniedException",
|
||||
|
||||
@@ -15,6 +15,7 @@ from tasks.jobs.backfill import backfill_resource_scan_summaries
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
AttackSurfaceOverview,
|
||||
ComplianceOverview,
|
||||
ComplianceRequirementOverview,
|
||||
Finding,
|
||||
@@ -1469,6 +1470,21 @@ def mute_rules_fixture(tenants_fixture, create_test_user, findings_fixture):
|
||||
return mute_rule1, mute_rule2
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def create_attack_surface_overview():
|
||||
def _create(tenant, scan, attack_surface_type, total=10, failed=5, muted_failed=2):
|
||||
return AttackSurfaceOverview.objects.create(
|
||||
tenant=tenant,
|
||||
scan=scan,
|
||||
attack_surface_type=attack_surface_type,
|
||||
total_findings=total,
|
||||
failed_findings=failed,
|
||||
muted_failed_findings=muted_failed,
|
||||
)
|
||||
|
||||
return _create
|
||||
|
||||
|
||||
def get_authorization_header(access_token: str) -> dict:
|
||||
return {"Authorization": f"Bearer {access_token}"}
|
||||
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
from collections import defaultdict
|
||||
|
||||
from django.db.models import Sum
|
||||
|
||||
from api.db_router import READ_REPLICA_ALIAS
|
||||
from api.db_utils import rls_transaction
|
||||
from api.models import (
|
||||
ComplianceOverviewSummary,
|
||||
ComplianceRequirementOverview,
|
||||
DailySeveritySummary,
|
||||
Resource,
|
||||
ResourceFindingMapping,
|
||||
ResourceScanSummary,
|
||||
Scan,
|
||||
ScanSummary,
|
||||
StateChoices,
|
||||
)
|
||||
|
||||
@@ -175,3 +179,100 @@ def backfill_compliance_summaries(tenant_id: str, scan_id: str):
|
||||
)
|
||||
|
||||
return {"status": "backfilled", "inserted": len(summary_objects)}
|
||||
|
||||
|
||||
def backfill_daily_severity_summaries(tenant_id: str, days: int = None):
|
||||
"""
|
||||
Backfill DailySeveritySummary from completed scans.
|
||||
Groups by provider+date, keeps latest scan per day.
|
||||
"""
|
||||
from datetime import timedelta
|
||||
|
||||
from django.utils import timezone
|
||||
|
||||
created_count = 0
|
||||
updated_count = 0
|
||||
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
scan_filter = {
|
||||
"tenant_id": tenant_id,
|
||||
"state": StateChoices.COMPLETED,
|
||||
"completed_at__isnull": False,
|
||||
}
|
||||
|
||||
if days is not None:
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
scan_filter["completed_at__gte"] = cutoff_date
|
||||
|
||||
completed_scans = (
|
||||
Scan.objects.filter(**scan_filter)
|
||||
.order_by("provider_id", "-completed_at")
|
||||
.values("id", "provider_id", "completed_at")
|
||||
)
|
||||
|
||||
if not completed_scans:
|
||||
return {"status": "no scans to backfill"}
|
||||
|
||||
# Keep only latest scan per provider/day
|
||||
latest_scans_by_day = {}
|
||||
for scan in completed_scans:
|
||||
key = (scan["provider_id"], scan["completed_at"].date())
|
||||
if key not in latest_scans_by_day:
|
||||
latest_scans_by_day[key] = scan
|
||||
|
||||
# Process each provider/day
|
||||
for (provider_id, scan_date), scan in latest_scans_by_day.items():
|
||||
scan_id = scan["id"]
|
||||
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
severity_totals = (
|
||||
ScanSummary.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
)
|
||||
.values("severity")
|
||||
.annotate(total_fail=Sum("fail"), total_muted=Sum("muted"))
|
||||
)
|
||||
|
||||
severity_data = {
|
||||
"critical": 0,
|
||||
"high": 0,
|
||||
"medium": 0,
|
||||
"low": 0,
|
||||
"informational": 0,
|
||||
"muted": 0,
|
||||
}
|
||||
|
||||
for row in severity_totals:
|
||||
severity = row["severity"]
|
||||
if severity in severity_data:
|
||||
severity_data[severity] = row["total_fail"] or 0
|
||||
severity_data["muted"] += row["total_muted"] or 0
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
_, created = DailySeveritySummary.objects.update_or_create(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
date=scan_date,
|
||||
defaults={
|
||||
"scan_id": scan_id,
|
||||
"critical": severity_data["critical"],
|
||||
"high": severity_data["high"],
|
||||
"medium": severity_data["medium"],
|
||||
"low": severity_data["low"],
|
||||
"informational": severity_data["informational"],
|
||||
"muted": severity_data["muted"],
|
||||
},
|
||||
)
|
||||
|
||||
if created:
|
||||
created_count += 1
|
||||
else:
|
||||
updated_count += 1
|
||||
|
||||
return {
|
||||
"status": "backfilled",
|
||||
"created": created_count,
|
||||
"updated": updated_count,
|
||||
"total_days": len(latest_scans_by_day),
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ from typing import Dict
|
||||
|
||||
import boto3
|
||||
import openai
|
||||
from botocore import UNSIGNED
|
||||
from botocore.config import Config
|
||||
from botocore.exceptions import BotoCoreError, ClientError
|
||||
from celery.utils.log import get_task_logger
|
||||
|
||||
@@ -10,6 +12,39 @@ from api.models import LighthouseProviderConfiguration, LighthouseProviderModels
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
def _extract_error_message(e: Exception) -> str:
|
||||
"""
|
||||
Extract a user-friendly error message from various exception types.
|
||||
|
||||
This function handles exceptions from different providers (OpenAI, AWS Bedrock)
|
||||
and extracts the most relevant error message for display to users.
|
||||
|
||||
Args:
|
||||
e: The exception to extract a message from.
|
||||
|
||||
Returns:
|
||||
str: A user-friendly error message.
|
||||
"""
|
||||
# For OpenAI SDK errors (>= v1.0)
|
||||
# OpenAI exceptions have a 'body' attribute with error details
|
||||
if hasattr(e, "body") and isinstance(e.body, dict):
|
||||
if "message" in e.body:
|
||||
return e.body["message"]
|
||||
# Sometimes nested under 'error' key
|
||||
if "error" in e.body and isinstance(e.body["error"], dict):
|
||||
return e.body["error"].get("message", str(e))
|
||||
|
||||
# For boto3 ClientError
|
||||
# Boto3 exceptions have a 'response' attribute with error details
|
||||
if hasattr(e, "response") and isinstance(e.response, dict):
|
||||
error_info = e.response.get("Error", {})
|
||||
if error_info.get("Message"):
|
||||
return error_info["Message"]
|
||||
|
||||
# Fallback to string representation for unknown error types
|
||||
return str(e)
|
||||
|
||||
|
||||
def _extract_openai_api_key(
|
||||
provider_cfg: LighthouseProviderConfiguration,
|
||||
) -> str | None:
|
||||
@@ -56,21 +91,39 @@ def _extract_bedrock_credentials(
|
||||
"""
|
||||
Safely extract AWS Bedrock credentials from a provider configuration.
|
||||
|
||||
Supports two authentication methods:
|
||||
1. AWS access key + secret key + region
|
||||
2. Bedrock API key (bearer token) + region
|
||||
|
||||
Args:
|
||||
provider_cfg (LighthouseProviderConfiguration): The provider configuration instance
|
||||
containing the credentials.
|
||||
|
||||
Returns:
|
||||
Dict[str, str] | None: Dictionary with 'access_key_id', 'secret_access_key', and
|
||||
'region' if present and valid, otherwise None.
|
||||
Dict[str, str] | None: Dictionary with either:
|
||||
- 'access_key_id', 'secret_access_key', and 'region' for access key auth
|
||||
- 'api_key' and 'region' for API key (bearer token) auth
|
||||
Returns None if credentials are invalid or missing.
|
||||
"""
|
||||
creds = provider_cfg.credentials_decoded
|
||||
if not isinstance(creds, dict):
|
||||
return None
|
||||
|
||||
region = creds.get("region")
|
||||
if not isinstance(region, str) or not region:
|
||||
return None
|
||||
|
||||
# Check for API key authentication first
|
||||
api_key = creds.get("api_key")
|
||||
if isinstance(api_key, str) and api_key:
|
||||
return {
|
||||
"api_key": api_key,
|
||||
"region": region,
|
||||
}
|
||||
|
||||
# Fall back to access key authentication
|
||||
access_key_id = creds.get("access_key_id")
|
||||
secret_access_key = creds.get("secret_access_key")
|
||||
region = creds.get("region")
|
||||
|
||||
# Validate all required fields are present and are strings
|
||||
if (
|
||||
@@ -78,8 +131,6 @@ def _extract_bedrock_credentials(
|
||||
or not access_key_id
|
||||
or not isinstance(secret_access_key, str)
|
||||
or not secret_access_key
|
||||
or not isinstance(region, str)
|
||||
or not region
|
||||
):
|
||||
return None
|
||||
|
||||
@@ -90,6 +141,51 @@ def _extract_bedrock_credentials(
|
||||
}
|
||||
|
||||
|
||||
def _create_bedrock_client(
|
||||
bedrock_creds: Dict[str, str], service_name: str = "bedrock"
|
||||
):
|
||||
"""
|
||||
Create a boto3 Bedrock client with the appropriate authentication method.
|
||||
|
||||
Supports two authentication methods:
|
||||
1. API key (bearer token) - uses unsigned requests with Authorization header
|
||||
2. AWS access key + secret key - uses standard SigV4 signing
|
||||
|
||||
Args:
|
||||
bedrock_creds: Dictionary with either:
|
||||
- 'api_key' and 'region' for API key (bearer token) auth
|
||||
- 'access_key_id', 'secret_access_key', and 'region' for access key auth
|
||||
service_name: The Bedrock service name. Use 'bedrock' for control plane
|
||||
operations (list_foundation_models, etc.) or 'bedrock-runtime' for
|
||||
inference operations.
|
||||
|
||||
Returns:
|
||||
boto3 client configured for the specified Bedrock service.
|
||||
"""
|
||||
region = bedrock_creds["region"]
|
||||
|
||||
if "api_key" in bedrock_creds:
|
||||
bearer_token = bedrock_creds["api_key"]
|
||||
client = boto3.client(
|
||||
service_name=service_name,
|
||||
region_name=region,
|
||||
config=Config(signature_version=UNSIGNED),
|
||||
)
|
||||
|
||||
def inject_bearer_token(request, **kwargs):
|
||||
request.headers["Authorization"] = f"Bearer {bearer_token}"
|
||||
|
||||
client.meta.events.register("before-send.*.*", inject_bearer_token)
|
||||
return client
|
||||
|
||||
return boto3.client(
|
||||
service_name=service_name,
|
||||
region_name=region,
|
||||
aws_access_key_id=bedrock_creds["access_key_id"],
|
||||
aws_secret_access_key=bedrock_creds["secret_access_key"],
|
||||
)
|
||||
|
||||
|
||||
def check_lighthouse_provider_connection(provider_config_id: str) -> Dict:
|
||||
"""
|
||||
Validate a Lighthouse provider configuration by calling the provider API and
|
||||
@@ -141,12 +237,7 @@ def check_lighthouse_provider_connection(provider_config_id: str) -> Dict:
|
||||
}
|
||||
|
||||
# Test connection by listing foundation models
|
||||
bedrock_client = boto3.client(
|
||||
"bedrock",
|
||||
aws_access_key_id=bedrock_creds["access_key_id"],
|
||||
aws_secret_access_key=bedrock_creds["secret_access_key"],
|
||||
region_name=bedrock_creds["region"],
|
||||
)
|
||||
bedrock_client = _create_bedrock_client(bedrock_creds)
|
||||
_ = bedrock_client.list_foundation_models()
|
||||
|
||||
elif (
|
||||
@@ -179,12 +270,13 @@ def check_lighthouse_provider_connection(provider_config_id: str) -> Dict:
|
||||
return {"connected": True, "error": None}
|
||||
|
||||
except Exception as e:
|
||||
error_message = _extract_error_message(e)
|
||||
logger.warning(
|
||||
"%s connection check failed: %s", provider_cfg.provider_type, str(e)
|
||||
"%s connection check failed: %s", provider_cfg.provider_type, error_message
|
||||
)
|
||||
provider_cfg.is_active = False
|
||||
provider_cfg.save()
|
||||
return {"connected": False, "error": str(e)}
|
||||
return {"connected": False, "error": error_message}
|
||||
|
||||
|
||||
def _fetch_openai_models(api_key: str) -> Dict[str, str]:
|
||||
@@ -232,105 +324,219 @@ def _fetch_openai_compatible_models(base_url: str, api_key: str) -> Dict[str, st
|
||||
return available_models
|
||||
|
||||
|
||||
def _fetch_bedrock_models(bedrock_creds: Dict[str, str]) -> Dict[str, str]:
|
||||
def _get_region_prefix(region: str) -> str:
|
||||
"""
|
||||
Fetch available models from AWS Bedrock with entitlement verification.
|
||||
Determine geographic prefix for AWS region.
|
||||
|
||||
This function:
|
||||
1. Lists foundation models with TEXT modality support
|
||||
2. Lists inference profiles with TEXT modality support
|
||||
3. Verifies user has entitlement access to each model
|
||||
Examples: ap-south-1 -> apac, us-east-1 -> us, eu-west-1 -> eu
|
||||
"""
|
||||
if region.startswith(("us-", "ca-", "sa-")):
|
||||
return "us"
|
||||
elif region.startswith("eu-"):
|
||||
return "eu"
|
||||
elif region.startswith("ap-"):
|
||||
return "apac"
|
||||
return "global"
|
||||
|
||||
Args:
|
||||
bedrock_creds: Dictionary with 'access_key_id', 'secret_access_key', and 'region'.
|
||||
|
||||
def _clean_inference_profile_name(profile_name: str) -> str:
|
||||
"""
|
||||
Remove geographic prefix from inference profile name.
|
||||
|
||||
AWS includes geographic prefixes in profile names which are redundant
|
||||
since the profile ID already contains this information.
|
||||
|
||||
Examples:
|
||||
"APAC Anthropic Claude 3.5 Sonnet" -> "Anthropic Claude 3.5 Sonnet"
|
||||
"GLOBAL Claude Sonnet 4.5" -> "Claude Sonnet 4.5"
|
||||
"US Anthropic Claude 3 Haiku" -> "Anthropic Claude 3 Haiku"
|
||||
"""
|
||||
prefixes = ["APAC ", "GLOBAL ", "US ", "EU ", "APAC-", "GLOBAL-", "US-", "EU-"]
|
||||
|
||||
for prefix in prefixes:
|
||||
if profile_name.upper().startswith(prefix.upper()):
|
||||
return profile_name[len(prefix) :].strip()
|
||||
|
||||
return profile_name
|
||||
|
||||
|
||||
def _supports_text_modality(input_modalities: list, output_modalities: list) -> bool:
|
||||
"""Check if model supports TEXT for both input and output."""
|
||||
return "TEXT" in input_modalities and "TEXT" in output_modalities
|
||||
|
||||
|
||||
def _get_foundation_model_modalities(
|
||||
bedrock_client, model_id: str
|
||||
) -> tuple[list, list] | None:
|
||||
"""
|
||||
Fetch input and output modalities for a foundation model.
|
||||
|
||||
Returns:
|
||||
Dict mapping model_id to model_name for all accessible models.
|
||||
|
||||
Raises:
|
||||
BotoCoreError, ClientError: If AWS API calls fail.
|
||||
(input_modalities, output_modalities) or None if fetch fails
|
||||
"""
|
||||
bedrock_client = boto3.client(
|
||||
"bedrock",
|
||||
aws_access_key_id=bedrock_creds["access_key_id"],
|
||||
aws_secret_access_key=bedrock_creds["secret_access_key"],
|
||||
region_name=bedrock_creds["region"],
|
||||
)
|
||||
try:
|
||||
model_info = bedrock_client.get_foundation_model(modelIdentifier=model_id)
|
||||
model_details = model_info.get("modelDetails", {})
|
||||
input_mods = model_details.get("inputModalities", [])
|
||||
output_mods = model_details.get("outputModalities", [])
|
||||
return (input_mods, output_mods)
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
logger.debug("Could not fetch model details for %s: %s", model_id, str(e))
|
||||
return None
|
||||
|
||||
models_to_check: Dict[str, str] = {}
|
||||
|
||||
# Step 1: Get foundation models with TEXT modality
|
||||
def _extract_foundation_model_ids(profile_models: list) -> list[str]:
|
||||
"""
|
||||
Extract foundation model IDs from inference profile model ARNs.
|
||||
|
||||
Args:
|
||||
profile_models: List of model references from inference profile
|
||||
|
||||
Returns:
|
||||
List of foundation model IDs extracted from ARNs
|
||||
"""
|
||||
model_ids = []
|
||||
for model_ref in profile_models:
|
||||
model_arn = model_ref.get("modelArn", "")
|
||||
if "foundation-model/" in model_arn:
|
||||
model_id = model_arn.split("foundation-model/")[1]
|
||||
model_ids.append(model_id)
|
||||
return model_ids
|
||||
|
||||
|
||||
def _build_inference_profile_map(
|
||||
bedrock_client, region: str
|
||||
) -> Dict[str, tuple[str, str]]:
|
||||
"""
|
||||
Build map of foundation_model_id -> best inference profile.
|
||||
|
||||
Returns:
|
||||
Dict mapping foundation_model_id to (profile_id, profile_name)
|
||||
Only includes profiles with TEXT modality support
|
||||
Prefers region-matched profiles over others
|
||||
"""
|
||||
region_prefix = _get_region_prefix(region)
|
||||
model_to_profile: Dict[str, tuple[str, str]] = {}
|
||||
|
||||
try:
|
||||
response = bedrock_client.list_inference_profiles()
|
||||
profiles = response.get("inferenceProfileSummaries", [])
|
||||
|
||||
for profile in profiles:
|
||||
profile_id = profile.get("inferenceProfileId")
|
||||
profile_name = profile.get("inferenceProfileName")
|
||||
|
||||
if not profile_id or not profile_name:
|
||||
continue
|
||||
|
||||
profile_models = profile.get("models", [])
|
||||
if not profile_models:
|
||||
continue
|
||||
|
||||
foundation_model_ids = _extract_foundation_model_ids(profile_models)
|
||||
if not foundation_model_ids:
|
||||
continue
|
||||
|
||||
modalities = _get_foundation_model_modalities(
|
||||
bedrock_client, foundation_model_ids[0]
|
||||
)
|
||||
if not modalities:
|
||||
continue
|
||||
|
||||
input_mods, output_mods = modalities
|
||||
if not _supports_text_modality(input_mods, output_mods):
|
||||
continue
|
||||
|
||||
is_preferred = profile_id.startswith(f"{region_prefix}.")
|
||||
clean_name = _clean_inference_profile_name(profile_name)
|
||||
|
||||
for foundation_model_id in foundation_model_ids:
|
||||
if foundation_model_id not in model_to_profile:
|
||||
model_to_profile[foundation_model_id] = (profile_id, clean_name)
|
||||
elif is_preferred and not model_to_profile[foundation_model_id][
|
||||
0
|
||||
].startswith(f"{region_prefix}."):
|
||||
model_to_profile[foundation_model_id] = (profile_id, clean_name)
|
||||
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
logger.info("Could not fetch inference profiles in %s: %s", region, str(e))
|
||||
|
||||
return model_to_profile
|
||||
|
||||
|
||||
def _check_on_demand_availability(bedrock_client, model_id: str) -> bool:
|
||||
"""Check if an ON_DEMAND foundation model is entitled and available."""
|
||||
try:
|
||||
availability = bedrock_client.get_foundation_model_availability(
|
||||
modelId=model_id
|
||||
)
|
||||
entitlement = availability.get("entitlementAvailability")
|
||||
return entitlement == "AVAILABLE"
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
logger.debug("Could not check availability for %s: %s", model_id, str(e))
|
||||
return False
|
||||
|
||||
|
||||
def _fetch_bedrock_models(bedrock_creds: Dict[str, str]) -> Dict[str, str]:
|
||||
"""
|
||||
Fetch available models from AWS Bedrock, preferring inference profiles over ON_DEMAND.
|
||||
|
||||
Strategy:
|
||||
1. Build map of foundation_model -> best_inference_profile (with TEXT validation)
|
||||
2. For each TEXT-capable foundation model:
|
||||
- Use inference profile ID if available (preferred - better throughput)
|
||||
- Fallback to foundation model ID if only ON_DEMAND available
|
||||
3. Verify entitlement for ON_DEMAND models
|
||||
|
||||
Args:
|
||||
bedrock_creds: Dict with 'region' and auth credentials
|
||||
|
||||
Returns:
|
||||
Dict mapping model_id to model_name. IDs can be:
|
||||
- Inference profile IDs (e.g., "apac.anthropic.claude-3-5-sonnet-20240620-v1:0")
|
||||
- Foundation model IDs (e.g., "anthropic.claude-3-5-sonnet-20240620-v1:0")
|
||||
"""
|
||||
bedrock_client = _create_bedrock_client(bedrock_creds)
|
||||
region = bedrock_creds["region"]
|
||||
|
||||
model_to_profile = _build_inference_profile_map(bedrock_client, region)
|
||||
|
||||
foundation_response = bedrock_client.list_foundation_models()
|
||||
model_summaries = foundation_response.get("modelSummaries", [])
|
||||
|
||||
for model in model_summaries:
|
||||
# Check if model supports TEXT input and output modality
|
||||
input_modalities = model.get("inputModalities", [])
|
||||
output_modalities = model.get("outputModalities", [])
|
||||
models_to_return: Dict[str, str] = {}
|
||||
on_demand_models: set[str] = set()
|
||||
|
||||
if "TEXT" not in input_modalities or "TEXT" not in output_modalities:
|
||||
for model in model_summaries:
|
||||
input_mods = model.get("inputModalities", [])
|
||||
output_mods = model.get("outputModalities", [])
|
||||
|
||||
if not _supports_text_modality(input_mods, output_mods):
|
||||
continue
|
||||
|
||||
model_id = model.get("modelId")
|
||||
if not model_id:
|
||||
model_name = model.get("modelName")
|
||||
|
||||
if not model_id or not model_name:
|
||||
continue
|
||||
|
||||
inference_types = model.get("inferenceTypesSupported", [])
|
||||
if model_id in model_to_profile:
|
||||
profile_id, profile_name = model_to_profile[model_id]
|
||||
models_to_return[profile_id] = profile_name
|
||||
else:
|
||||
inference_types = model.get("inferenceTypesSupported", [])
|
||||
if "ON_DEMAND" in inference_types:
|
||||
models_to_return[model_id] = model_name
|
||||
on_demand_models.add(model_id)
|
||||
|
||||
# Only include models with ON_DEMAND inference support
|
||||
if "ON_DEMAND" in inference_types:
|
||||
models_to_check[model_id] = model["modelName"]
|
||||
|
||||
# Step 2: Get inference profiles
|
||||
try:
|
||||
inference_profiles_response = bedrock_client.list_inference_profiles()
|
||||
inference_profiles = inference_profiles_response.get(
|
||||
"inferenceProfileSummaries", []
|
||||
)
|
||||
|
||||
for profile in inference_profiles:
|
||||
# Check if profile supports TEXT modality
|
||||
input_modalities = profile.get("inputModalities", [])
|
||||
output_modalities = profile.get("outputModalities", [])
|
||||
|
||||
if "TEXT" not in input_modalities or "TEXT" not in output_modalities:
|
||||
continue
|
||||
|
||||
profile_id = profile.get("inferenceProfileId")
|
||||
if profile_id:
|
||||
models_to_check[profile_id] = profile["inferenceProfileName"]
|
||||
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
logger.info(
|
||||
"Could not fetch inference profiles in %s: %s",
|
||||
bedrock_creds["region"],
|
||||
str(e),
|
||||
)
|
||||
|
||||
# Step 3: Verify entitlement availability for each model
|
||||
available_models: Dict[str, str] = {}
|
||||
|
||||
for model_id, model_name in models_to_check.items():
|
||||
try:
|
||||
availability = bedrock_client.get_foundation_model_availability(
|
||||
modelId=model_id
|
||||
)
|
||||
|
||||
entitlement = availability.get("entitlementAvailability")
|
||||
|
||||
# Only include models user has access to
|
||||
if entitlement == "AVAILABLE":
|
||||
for model_id, model_name in models_to_return.items():
|
||||
if model_id in on_demand_models:
|
||||
if _check_on_demand_availability(bedrock_client, model_id):
|
||||
available_models[model_id] = model_name
|
||||
else:
|
||||
logger.debug(
|
||||
"Skipping model %s - entitlement status: %s", model_id, entitlement
|
||||
)
|
||||
|
||||
except (BotoCoreError, ClientError) as e:
|
||||
logger.debug(
|
||||
"Could not check availability for model %s: %s", model_id, str(e)
|
||||
)
|
||||
continue
|
||||
else:
|
||||
available_models[model_id] = model_name
|
||||
|
||||
return available_models
|
||||
|
||||
@@ -359,7 +565,6 @@ def refresh_lighthouse_provider_models(provider_config_id: str) -> Dict:
|
||||
provider_cfg = LighthouseProviderConfiguration.objects.get(pk=provider_config_id)
|
||||
fetched_models: Dict[str, str] = {}
|
||||
|
||||
# Fetch models from the appropriate provider
|
||||
try:
|
||||
if (
|
||||
provider_cfg.provider_type
|
||||
@@ -414,12 +619,13 @@ def refresh_lighthouse_provider_models(provider_config_id: str) -> Dict:
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
error_message = _extract_error_message(e)
|
||||
logger.warning(
|
||||
"Unexpected error refreshing %s models: %s",
|
||||
provider_cfg.provider_type,
|
||||
str(e),
|
||||
error_message,
|
||||
)
|
||||
return {"created": 0, "updated": 0, "deleted": 0, "error": str(e)}
|
||||
return {"created": 0, "updated": 0, "deleted": 0, "error": error_message}
|
||||
|
||||
# Upsert models into the catalog
|
||||
created = 0
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import io
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from functools import partial
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
|
||||
@@ -772,7 +773,9 @@ def _create_section_score_chart(
|
||||
return buffer
|
||||
|
||||
|
||||
def _add_pdf_footer(canvas_obj: canvas.Canvas, doc: SimpleDocTemplate) -> None:
|
||||
def _add_pdf_footer(
|
||||
canvas_obj: canvas.Canvas, doc: SimpleDocTemplate, compliance_name: str
|
||||
) -> None:
|
||||
"""
|
||||
Add footer with page number and branding to each page of the PDF.
|
||||
|
||||
@@ -782,7 +785,9 @@ def _add_pdf_footer(canvas_obj: canvas.Canvas, doc: SimpleDocTemplate) -> None:
|
||||
"""
|
||||
canvas_obj.saveState()
|
||||
width, height = doc.pagesize
|
||||
page_num_text = f"Página {doc.page}"
|
||||
page_num_text = (
|
||||
f"{'Página' if 'ens' in compliance_name.lower() else 'Page'} {doc.page}"
|
||||
)
|
||||
canvas_obj.setFont("PlusJakartaSans", 9)
|
||||
canvas_obj.setFillColorRGB(0.4, 0.4, 0.4)
|
||||
canvas_obj.drawString(30, 20, page_num_text)
|
||||
@@ -1595,7 +1600,11 @@ def generate_threatscore_report(
|
||||
elements.append(PageBreak())
|
||||
|
||||
# Build the PDF
|
||||
doc.build(elements, onFirstPage=_add_pdf_footer, onLaterPages=_add_pdf_footer)
|
||||
doc.build(
|
||||
elements,
|
||||
onFirstPage=partial(_add_pdf_footer, compliance_name=compliance_name),
|
||||
onLaterPages=partial(_add_pdf_footer, compliance_name=compliance_name),
|
||||
)
|
||||
except Exception as e:
|
||||
tb_lineno = e.__traceback__.tb_lineno if e.__traceback__ else "unknown"
|
||||
logger.info(f"Error building the document, line {tb_lineno} -- {e}")
|
||||
@@ -2229,12 +2238,20 @@ def generate_ens_report(
|
||||
[
|
||||
"CUMPLE",
|
||||
str(passed_requirements),
|
||||
f"{(passed_requirements / total_requirements * 100):.1f}%",
|
||||
(
|
||||
f"{(passed_requirements / total_requirements * 100):.1f}%"
|
||||
if total_requirements > 0
|
||||
else "0.0%"
|
||||
),
|
||||
],
|
||||
[
|
||||
"NO CUMPLE",
|
||||
str(failed_requirements),
|
||||
f"{(failed_requirements / total_requirements * 100):.1f}%",
|
||||
(
|
||||
f"{(failed_requirements / total_requirements * 100):.1f}%"
|
||||
if total_requirements > 0
|
||||
else "0.0%"
|
||||
),
|
||||
],
|
||||
["TOTAL", str(total_requirements), "100%"],
|
||||
]
|
||||
@@ -2818,7 +2835,11 @@ def generate_ens_report(
|
||||
|
||||
# Build the PDF
|
||||
logger.info("Building PDF...")
|
||||
doc.build(elements, onFirstPage=_add_pdf_footer, onLaterPages=_add_pdf_footer)
|
||||
doc.build(
|
||||
elements,
|
||||
onFirstPage=partial(_add_pdf_footer, compliance_name=compliance_name),
|
||||
onLaterPages=partial(_add_pdf_footer, compliance_name=compliance_name),
|
||||
)
|
||||
except Exception as e:
|
||||
tb_lineno = e.__traceback__.tb_lineno if e.__traceback__ else "unknown"
|
||||
logger.error(f"Error building ENS report, line {tb_lineno} -- {e}")
|
||||
@@ -3365,7 +3386,11 @@ def generate_nis2_report(
|
||||
|
||||
# Build the PDF
|
||||
logger.info("Building NIS2 PDF...")
|
||||
doc.build(elements, onFirstPage=_add_pdf_footer, onLaterPages=_add_pdf_footer)
|
||||
doc.build(
|
||||
elements,
|
||||
onFirstPage=partial(_add_pdf_footer, compliance_name=compliance_name),
|
||||
onLaterPages=partial(_add_pdf_footer, compliance_name=compliance_name),
|
||||
)
|
||||
logger.info(f"NIS2 report successfully generated at {output_path}")
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -12,7 +12,7 @@ from celery.utils.log import get_task_logger
|
||||
from config.env import env
|
||||
from config.settings.celery import CELERY_DEADLOCK_ATTEMPTS
|
||||
from django.db import IntegrityError, OperationalError
|
||||
from django.db.models import Case, Count, IntegerField, Prefetch, Sum, When
|
||||
from django.db.models import Case, Count, IntegerField, Prefetch, Q, Sum, When
|
||||
from tasks.utils import CustomEncoder
|
||||
|
||||
from api.compliance import PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE
|
||||
@@ -26,8 +26,10 @@ from api.db_utils import (
|
||||
)
|
||||
from api.exceptions import ProviderConnectionError
|
||||
from api.models import (
|
||||
AttackSurfaceOverview,
|
||||
ComplianceOverviewSummary,
|
||||
ComplianceRequirementOverview,
|
||||
DailySeveritySummary,
|
||||
Finding,
|
||||
MuteRule,
|
||||
Processor,
|
||||
@@ -43,6 +45,7 @@ from api.models import (
|
||||
from api.models import StatusChoices as FindingStatus
|
||||
from api.utils import initialize_prowler_provider, return_prowler_provider
|
||||
from api.v1.serializers import ScanTaskSerializer
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
from prowler.lib.outputs.finding import Finding as ProwlerFinding
|
||||
from prowler.lib.scan.scan import Scan as ProwlerScan
|
||||
|
||||
@@ -75,6 +78,44 @@ FINDINGS_MICRO_BATCH_SIZE = env.int("DJANGO_FINDINGS_MICRO_BATCH_SIZE", default=
|
||||
SCAN_DB_BATCH_SIZE = env.int("DJANGO_SCAN_DB_BATCH_SIZE", default=500)
|
||||
|
||||
|
||||
ATTACK_SURFACE_PROVIDER_COMPATIBILITY = {
|
||||
"internet-exposed": None, # Compatible with all providers
|
||||
"secrets": None, # Compatible with all providers
|
||||
"privilege-escalation": ["aws", "kubernetes"],
|
||||
"ec2-imdsv1": ["aws"],
|
||||
}
|
||||
|
||||
_ATTACK_SURFACE_MAPPING_CACHE: dict[str, dict] = {}
|
||||
|
||||
|
||||
def _get_attack_surface_mapping_from_provider(provider_type: str) -> dict:
|
||||
global _ATTACK_SURFACE_MAPPING_CACHE
|
||||
|
||||
if provider_type in _ATTACK_SURFACE_MAPPING_CACHE:
|
||||
return _ATTACK_SURFACE_MAPPING_CACHE[provider_type]
|
||||
|
||||
attack_surface_check_mappings = {
|
||||
"internet-exposed": None,
|
||||
"secrets": None,
|
||||
"privilege-escalation": {
|
||||
"iam_policy_allows_privilege_escalation",
|
||||
"iam_inline_policy_allows_privilege_escalation",
|
||||
},
|
||||
"ec2-imdsv1": {
|
||||
"ec2_instance_imdsv2_enabled"
|
||||
}, # AWS only - IMDSv1 enabled findings
|
||||
}
|
||||
for category_name, check_ids in attack_surface_check_mappings.items():
|
||||
if check_ids is None:
|
||||
sdk_check_ids = CheckMetadata.list(
|
||||
provider=provider_type, category=category_name
|
||||
)
|
||||
attack_surface_check_mappings[category_name] = sdk_check_ids
|
||||
|
||||
_ATTACK_SURFACE_MAPPING_CACHE[provider_type] = attack_surface_check_mappings
|
||||
return attack_surface_check_mappings
|
||||
|
||||
|
||||
def _create_finding_delta(
|
||||
last_status: FindingStatus | None | str, new_status: FindingStatus | None
|
||||
) -> Finding.DeltaChoices:
|
||||
@@ -330,7 +371,7 @@ def _create_compliance_summaries(
|
||||
if summary_objects:
|
||||
with rls_transaction(tenant_id):
|
||||
ComplianceOverviewSummary.objects.bulk_create(
|
||||
summary_objects, batch_size=500
|
||||
summary_objects, batch_size=500, ignore_conflicts=True
|
||||
)
|
||||
|
||||
|
||||
@@ -979,11 +1020,14 @@ def _aggregate_findings_by_region(
|
||||
findings_count_by_compliance = {}
|
||||
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
# Fetch findings with resources in a single efficient query
|
||||
# Use select_related for finding fields and prefetch_related for many-to-many resources
|
||||
# Fetch only PASS/FAIL findings (optimized query reduces data transfer)
|
||||
# Other statuses are not needed for check_status or ThreatScore calculation
|
||||
findings = (
|
||||
Finding.all_objects.filter(
|
||||
tenant_id=tenant_id, scan_id=scan_id, muted=False
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
muted=False,
|
||||
status__in=["PASS", "FAIL"],
|
||||
)
|
||||
.only("id", "check_id", "status", "compliance")
|
||||
.prefetch_related(
|
||||
@@ -1001,6 +1045,8 @@ def _aggregate_findings_by_region(
|
||||
)
|
||||
|
||||
for finding in findings:
|
||||
status = finding.status
|
||||
|
||||
for resource in finding.small_resources:
|
||||
region = resource.region
|
||||
|
||||
@@ -1008,7 +1054,7 @@ def _aggregate_findings_by_region(
|
||||
current_status = check_status_by_region.setdefault(region, {})
|
||||
# Priority: FAIL > any other status
|
||||
if current_status.get(finding.check_id) != "FAIL":
|
||||
current_status[finding.check_id] = finding.status
|
||||
current_status[finding.check_id] = status
|
||||
|
||||
# Aggregate ThreatScore compliance counts
|
||||
if modeled_threatscore_compliance_id in (finding.compliance or {}):
|
||||
@@ -1023,7 +1069,7 @@ def _aggregate_findings_by_region(
|
||||
requirement_id, {"total": 0, "pass": 0}
|
||||
)
|
||||
requirement_stats["total"] += 1
|
||||
if finding.status == "PASS":
|
||||
if status == "PASS":
|
||||
requirement_stats["pass"] += 1
|
||||
|
||||
return check_status_by_region, findings_count_by_compliance
|
||||
@@ -1191,3 +1237,184 @@ def create_compliance_requirements(tenant_id: str, scan_id: str):
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating compliance requirements for scan {scan_id}: {e}")
|
||||
raise e
|
||||
|
||||
|
||||
def aggregate_attack_surface(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Aggregate findings into attack surface overview records.
|
||||
|
||||
Creates one AttackSurfaceOverview record per attack surface type
|
||||
for the given scan, based on check_id mappings.
|
||||
|
||||
Args:
|
||||
tenant_id: Tenant that owns the scan.
|
||||
scan_id: Scan UUID whose findings should be aggregated.
|
||||
"""
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
scan_instance = Scan.all_objects.select_related("provider").get(pk=scan_id)
|
||||
provider_type = scan_instance.provider.provider
|
||||
|
||||
provider_attack_surface_mapping = _get_attack_surface_mapping_from_provider(
|
||||
provider_type=provider_type
|
||||
)
|
||||
|
||||
# Filter out attack surfaces that are not compatible or have no resolved check IDs
|
||||
supported_mappings: dict[str, list[str]] = {}
|
||||
for attack_surface_type, check_ids in provider_attack_surface_mapping.items():
|
||||
compatible_providers = ATTACK_SURFACE_PROVIDER_COMPATIBILITY.get(
|
||||
attack_surface_type
|
||||
)
|
||||
if (
|
||||
compatible_providers is not None
|
||||
and provider_type not in compatible_providers
|
||||
):
|
||||
logger.info(
|
||||
f"Skipping {attack_surface_type} - not supported for {provider_type}"
|
||||
)
|
||||
continue
|
||||
|
||||
if not check_ids:
|
||||
logger.info(
|
||||
f"Skipping {attack_surface_type} - no check IDs resolved for {provider_type}"
|
||||
)
|
||||
continue
|
||||
|
||||
supported_mappings[attack_surface_type] = list(check_ids)
|
||||
|
||||
if not supported_mappings:
|
||||
logger.info(
|
||||
f"No attack surface mappings available for scan {scan_id} and provider {provider_type}"
|
||||
)
|
||||
logger.info(f"No attack surface overview records created for scan {scan_id}")
|
||||
return
|
||||
|
||||
# Map every check_id to its attack surface, so we can aggregate with a single query
|
||||
check_id_to_surface: dict[str, str] = {}
|
||||
for attack_surface_type, check_ids in supported_mappings.items():
|
||||
for check_id in check_ids:
|
||||
check_id_to_surface[check_id] = attack_surface_type
|
||||
|
||||
aggregated_counts = {
|
||||
attack_surface_type: {"total": 0, "failed": 0, "muted": 0}
|
||||
for attack_surface_type in supported_mappings.keys()
|
||||
}
|
||||
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
finding_stats = (
|
||||
Finding.all_objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
check_id__in=list(check_id_to_surface.keys()),
|
||||
)
|
||||
.values("check_id")
|
||||
.annotate(
|
||||
total=Count("id"),
|
||||
failed=Count("id", filter=Q(status="FAIL", muted=False)),
|
||||
muted=Count("id", filter=Q(status="FAIL", muted=True)),
|
||||
)
|
||||
)
|
||||
|
||||
for stats in finding_stats:
|
||||
attack_surface_type = check_id_to_surface.get(stats["check_id"])
|
||||
if not attack_surface_type:
|
||||
continue
|
||||
|
||||
aggregated_counts[attack_surface_type]["total"] += stats["total"] or 0
|
||||
aggregated_counts[attack_surface_type]["failed"] += stats["failed"] or 0
|
||||
aggregated_counts[attack_surface_type]["muted"] += stats["muted"] or 0
|
||||
|
||||
overview_objects = []
|
||||
for attack_surface_type, counts in aggregated_counts.items():
|
||||
total = counts["total"]
|
||||
if not total:
|
||||
continue
|
||||
|
||||
overview_objects.append(
|
||||
AttackSurfaceOverview(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
attack_surface_type=attack_surface_type,
|
||||
total_findings=total,
|
||||
failed_findings=counts["failed"],
|
||||
muted_failed_findings=counts["muted"],
|
||||
)
|
||||
)
|
||||
|
||||
# Bulk create overview records
|
||||
if overview_objects:
|
||||
with rls_transaction(tenant_id):
|
||||
AttackSurfaceOverview.objects.bulk_create(overview_objects, batch_size=500)
|
||||
logger.info(
|
||||
f"Created {len(overview_objects)} attack surface overview records for scan {scan_id}"
|
||||
)
|
||||
else:
|
||||
logger.info(f"No attack surface overview records created for scan {scan_id}")
|
||||
|
||||
|
||||
def aggregate_daily_severity(tenant_id: str, scan_id: str):
|
||||
"""Aggregate scan severity counts into DailySeveritySummary (one record per provider/day)."""
|
||||
with rls_transaction(tenant_id, using=READ_REPLICA_ALIAS):
|
||||
scan = Scan.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
id=scan_id,
|
||||
state=StateChoices.COMPLETED,
|
||||
).first()
|
||||
|
||||
if not scan:
|
||||
logger.warning(f"Scan {scan_id} not found or not completed")
|
||||
return {"status": "scan is not completed"}
|
||||
|
||||
provider_id = scan.provider_id
|
||||
scan_date = scan.completed_at.date()
|
||||
|
||||
severity_totals = (
|
||||
ScanSummary.objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
)
|
||||
.values("severity")
|
||||
.annotate(total_fail=Sum("fail"), total_muted=Sum("muted"))
|
||||
)
|
||||
|
||||
severity_data = {
|
||||
"critical": 0,
|
||||
"high": 0,
|
||||
"medium": 0,
|
||||
"low": 0,
|
||||
"informational": 0,
|
||||
"muted": 0,
|
||||
}
|
||||
|
||||
for row in severity_totals:
|
||||
severity = row["severity"]
|
||||
if severity in severity_data:
|
||||
severity_data[severity] = row["total_fail"] or 0
|
||||
severity_data["muted"] += row["total_muted"] or 0
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
summary, created = DailySeveritySummary.objects.update_or_create(
|
||||
tenant_id=tenant_id,
|
||||
provider_id=provider_id,
|
||||
date=scan_date,
|
||||
defaults={
|
||||
"scan_id": scan_id,
|
||||
"critical": severity_data["critical"],
|
||||
"high": severity_data["high"],
|
||||
"medium": severity_data["medium"],
|
||||
"low": severity_data["low"],
|
||||
"informational": severity_data["informational"],
|
||||
"muted": severity_data["muted"],
|
||||
},
|
||||
)
|
||||
|
||||
action = "created" if created else "updated"
|
||||
logger.info(
|
||||
f"Daily severity summary {action} for provider {provider_id} on {scan_date}"
|
||||
)
|
||||
|
||||
return {
|
||||
"status": action,
|
||||
"provider_id": str(provider_id),
|
||||
"date": str(scan_date),
|
||||
"severity_data": severity_data,
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ from config.django.base import DJANGO_FINDINGS_BATCH_SIZE, DJANGO_TMP_OUTPUT_DIR
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from tasks.jobs.backfill import (
|
||||
backfill_compliance_summaries,
|
||||
backfill_daily_severity_summaries,
|
||||
backfill_resource_scan_summaries,
|
||||
)
|
||||
from tasks.jobs.connection import (
|
||||
@@ -37,6 +38,8 @@ from tasks.jobs.lighthouse_providers import (
|
||||
from tasks.jobs.muting import mute_historical_findings
|
||||
from tasks.jobs.report import generate_compliance_reports_job
|
||||
from tasks.jobs.scan import (
|
||||
aggregate_attack_surface,
|
||||
aggregate_daily_severity,
|
||||
aggregate_findings,
|
||||
create_compliance_requirements,
|
||||
perform_prowler_scan,
|
||||
@@ -46,7 +49,7 @@ from tasks.utils import batched, get_next_execution_datetime
|
||||
from api.compliance import get_compliance_frameworks
|
||||
from api.db_router import READ_REPLICA_ALIAS
|
||||
from api.db_utils import rls_transaction
|
||||
from api.decorators import set_tenant
|
||||
from api.decorators import handle_provider_deletion, set_tenant
|
||||
from api.models import Finding, Integration, Provider, Scan, ScanSummary, StateChoices
|
||||
from api.utils import initialize_prowler_provider
|
||||
from api.v1.serializers import ScanTaskSerializer
|
||||
@@ -69,10 +72,16 @@ def _perform_scan_complete_tasks(tenant_id: str, scan_id: str, provider_id: str)
|
||||
create_compliance_requirements_task.apply_async(
|
||||
kwargs={"tenant_id": tenant_id, "scan_id": scan_id}
|
||||
)
|
||||
aggregate_attack_surface_task.apply_async(
|
||||
kwargs={"tenant_id": tenant_id, "scan_id": scan_id}
|
||||
)
|
||||
chain(
|
||||
perform_scan_summary_task.si(tenant_id=tenant_id, scan_id=scan_id),
|
||||
generate_outputs_task.si(
|
||||
scan_id=scan_id, provider_id=provider_id, tenant_id=tenant_id
|
||||
group(
|
||||
aggregate_daily_severity_task.si(tenant_id=tenant_id, scan_id=scan_id),
|
||||
generate_outputs_task.si(
|
||||
scan_id=scan_id, provider_id=provider_id, tenant_id=tenant_id
|
||||
),
|
||||
),
|
||||
group(
|
||||
# Use optimized task that generates both reports with shared queries
|
||||
@@ -140,6 +149,7 @@ def delete_provider_task(provider_id: str, tenant_id: str):
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="scan-perform", queue="scans")
|
||||
@handle_provider_deletion
|
||||
def perform_scan_task(
|
||||
tenant_id: str, scan_id: str, provider_id: str, checks_to_execute: list[str] = None
|
||||
):
|
||||
@@ -172,6 +182,7 @@ def perform_scan_task(
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, bind=True, name="scan-perform-scheduled", queue="scans")
|
||||
@handle_provider_deletion
|
||||
def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
"""
|
||||
Task to perform a scheduled Prowler scan on a given provider.
|
||||
@@ -277,6 +288,7 @@ def perform_scheduled_scan_task(self, tenant_id: str, provider_id: str):
|
||||
|
||||
|
||||
@shared_task(name="scan-summary", queue="overview")
|
||||
@handle_provider_deletion
|
||||
def perform_scan_summary_task(tenant_id: str, scan_id: str):
|
||||
return aggregate_findings(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
@@ -292,6 +304,7 @@ def delete_tenant_task(tenant_id: str):
|
||||
queue="scan-reports",
|
||||
)
|
||||
@set_tenant(keep_tenant=True)
|
||||
@handle_provider_deletion
|
||||
def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
"""
|
||||
Process findings in batches and generate output files in multiple formats.
|
||||
@@ -487,6 +500,7 @@ def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
|
||||
|
||||
@shared_task(name="backfill-scan-resource-summaries", queue="backfill")
|
||||
@handle_provider_deletion
|
||||
def backfill_scan_resource_summaries_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Tries to backfill the resource scan summaries table for a given scan.
|
||||
@@ -499,6 +513,7 @@ def backfill_scan_resource_summaries_task(tenant_id: str, scan_id: str):
|
||||
|
||||
|
||||
@shared_task(name="backfill-compliance-summaries", queue="backfill")
|
||||
@handle_provider_deletion
|
||||
def backfill_compliance_summaries_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Tries to backfill compliance overview summaries for a completed scan.
|
||||
@@ -513,7 +528,14 @@ def backfill_compliance_summaries_task(tenant_id: str, scan_id: str):
|
||||
return backfill_compliance_summaries(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(name="backfill-daily-severity-summaries", queue="backfill")
|
||||
def backfill_daily_severity_summaries_task(tenant_id: str, days: int = None):
|
||||
"""Backfill DailySeveritySummary from historical scans. Use days param to limit scope."""
|
||||
return backfill_daily_severity_summaries(tenant_id=tenant_id, days=days)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="scan-compliance-overviews", queue="compliance")
|
||||
@handle_provider_deletion
|
||||
def create_compliance_requirements_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Creates detailed compliance requirement records for a scan.
|
||||
@@ -529,6 +551,29 @@ def create_compliance_requirements_task(tenant_id: str, scan_id: str):
|
||||
return create_compliance_requirements(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(name="scan-attack-surface-overviews", queue="overview")
|
||||
@handle_provider_deletion
|
||||
def aggregate_attack_surface_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Creates attack surface overview records for a scan.
|
||||
|
||||
This task processes findings and aggregates them into attack surface categories
|
||||
(internet-exposed, secrets, privilege-escalation, ec2-imdsv1) for quick overview queries.
|
||||
|
||||
Args:
|
||||
tenant_id (str): The tenant ID for which to create records.
|
||||
scan_id (str): The ID of the scan for which to create records.
|
||||
"""
|
||||
return aggregate_attack_surface(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(name="scan-daily-severity", queue="overview")
|
||||
@handle_provider_deletion
|
||||
def aggregate_daily_severity_task(tenant_id: str, scan_id: str):
|
||||
"""Aggregate scan severity into DailySeveritySummary for findings_severity/timeseries endpoint."""
|
||||
return aggregate_daily_severity(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="lighthouse-connection-check")
|
||||
@set_tenant
|
||||
def check_lighthouse_connection_task(lighthouse_config_id: str, tenant_id: str = None):
|
||||
@@ -567,6 +612,7 @@ def refresh_lighthouse_provider_models_task(
|
||||
|
||||
|
||||
@shared_task(name="integration-check")
|
||||
@handle_provider_deletion
|
||||
def check_integrations_task(tenant_id: str, provider_id: str, scan_id: str = None):
|
||||
"""
|
||||
Check and execute all configured integrations for a provider.
|
||||
@@ -631,6 +677,7 @@ def check_integrations_task(tenant_id: str, provider_id: str, scan_id: str = Non
|
||||
name="integration-s3",
|
||||
queue="integrations",
|
||||
)
|
||||
@handle_provider_deletion
|
||||
def s3_integration_task(
|
||||
tenant_id: str,
|
||||
provider_id: str,
|
||||
@@ -690,6 +737,7 @@ def jira_integration_task(
|
||||
name="scan-compliance-reports",
|
||||
queue="scan-reports",
|
||||
)
|
||||
@handle_provider_deletion
|
||||
def generate_compliance_reports_task(tenant_id: str, scan_id: str, provider_id: str):
|
||||
"""
|
||||
Optimized task to generate ThreatScore, ENS, and NIS2 reports with shared queries.
|
||||
|
||||
@@ -9,14 +9,17 @@ from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from tasks.jobs.scan import (
|
||||
_ATTACK_SURFACE_MAPPING_CACHE,
|
||||
_aggregate_findings_by_region,
|
||||
_copy_compliance_requirement_rows,
|
||||
_create_compliance_summaries,
|
||||
_create_finding_delta,
|
||||
_get_attack_surface_mapping_from_provider,
|
||||
_normalized_compliance_key,
|
||||
_persist_compliance_requirement_rows,
|
||||
_process_finding_micro_batch,
|
||||
_store_resources,
|
||||
aggregate_attack_surface,
|
||||
aggregate_findings,
|
||||
create_compliance_requirements,
|
||||
perform_prowler_scan,
|
||||
@@ -3338,7 +3341,10 @@ class TestAggregateFindingsByRegion:
|
||||
|
||||
# Verify filter was called with muted=False
|
||||
mock_findings_filter.assert_called_once_with(
|
||||
tenant_id=tenant_id, scan_id=scan_id, muted=False
|
||||
tenant_id=tenant_id,
|
||||
scan_id=scan_id,
|
||||
muted=False,
|
||||
status__in=["PASS", "FAIL"],
|
||||
)
|
||||
|
||||
@patch("tasks.jobs.scan.Finding.all_objects.filter")
|
||||
@@ -3471,3 +3477,282 @@ class TestAggregateFindingsByRegion:
|
||||
|
||||
assert check_status_by_region == {}
|
||||
assert findings_count_by_compliance == {}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestAggregateAttackSurface:
|
||||
"""Test aggregate_attack_surface function and related caching."""
|
||||
|
||||
def setup_method(self):
|
||||
"""Clear cache before each test."""
|
||||
_ATTACK_SURFACE_MAPPING_CACHE.clear()
|
||||
|
||||
def teardown_method(self):
|
||||
"""Clear cache after each test."""
|
||||
_ATTACK_SURFACE_MAPPING_CACHE.clear()
|
||||
|
||||
@patch("tasks.jobs.scan.CheckMetadata.list")
|
||||
def test_get_attack_surface_mapping_caches_result(self, mock_check_metadata_list):
|
||||
"""Test that _get_attack_surface_mapping_from_provider caches results."""
|
||||
mock_check_metadata_list.return_value = {"check_internet_exposed_1"}
|
||||
|
||||
# First call should hit CheckMetadata.list
|
||||
result1 = _get_attack_surface_mapping_from_provider("aws")
|
||||
assert mock_check_metadata_list.call_count == 2 # internet-exposed, secrets
|
||||
|
||||
# Second call should use cache
|
||||
result2 = _get_attack_surface_mapping_from_provider("aws")
|
||||
assert mock_check_metadata_list.call_count == 2 # No additional calls
|
||||
|
||||
assert result1 is result2
|
||||
assert "aws" in _ATTACK_SURFACE_MAPPING_CACHE
|
||||
|
||||
@patch("tasks.jobs.scan.CheckMetadata.list")
|
||||
def test_get_attack_surface_mapping_different_providers(
|
||||
self, mock_check_metadata_list
|
||||
):
|
||||
"""Test caching works independently for different providers."""
|
||||
mock_check_metadata_list.return_value = {"check_1"}
|
||||
|
||||
_get_attack_surface_mapping_from_provider("aws")
|
||||
aws_call_count = mock_check_metadata_list.call_count
|
||||
|
||||
_get_attack_surface_mapping_from_provider("gcp")
|
||||
gcp_call_count = mock_check_metadata_list.call_count
|
||||
|
||||
# Both providers should have made calls
|
||||
assert gcp_call_count > aws_call_count
|
||||
assert "aws" in _ATTACK_SURFACE_MAPPING_CACHE
|
||||
assert "gcp" in _ATTACK_SURFACE_MAPPING_CACHE
|
||||
|
||||
@patch("tasks.jobs.scan.CheckMetadata.list")
|
||||
def test_get_attack_surface_mapping_returns_hardcoded_checks(
|
||||
self, mock_check_metadata_list
|
||||
):
|
||||
"""Test that hardcoded check IDs are returned for privilege-escalation and ec2-imdsv1."""
|
||||
mock_check_metadata_list.return_value = set()
|
||||
|
||||
result = _get_attack_surface_mapping_from_provider("aws")
|
||||
|
||||
# Hardcoded checks should be present
|
||||
assert (
|
||||
"iam_policy_allows_privilege_escalation" in result["privilege-escalation"]
|
||||
)
|
||||
assert (
|
||||
"iam_inline_policy_allows_privilege_escalation"
|
||||
in result["privilege-escalation"]
|
||||
)
|
||||
assert "ec2_instance_imdsv2_enabled" in result["ec2-imdsv1"]
|
||||
|
||||
@patch("tasks.jobs.scan.AttackSurfaceOverview.objects.bulk_create")
|
||||
@patch("tasks.jobs.scan.Finding.all_objects.filter")
|
||||
@patch("tasks.jobs.scan._get_attack_surface_mapping_from_provider")
|
||||
@patch("tasks.jobs.scan.rls_transaction")
|
||||
def test_aggregate_attack_surface_creates_overview_records(
|
||||
self,
|
||||
mock_rls_transaction,
|
||||
mock_get_mapping,
|
||||
mock_findings_filter,
|
||||
mock_bulk_create,
|
||||
tenants_fixture,
|
||||
scans_fixture,
|
||||
):
|
||||
"""Test that aggregate_attack_surface creates AttackSurfaceOverview records."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
scan.provider.provider = "aws"
|
||||
scan.provider.save()
|
||||
|
||||
mock_get_mapping.return_value = {
|
||||
"internet-exposed": {"check_internet_1", "check_internet_2"},
|
||||
"secrets": {"check_secrets_1"},
|
||||
"privilege-escalation": {"check_privesc_1"},
|
||||
"ec2-imdsv1": {"check_imdsv1_1"},
|
||||
}
|
||||
|
||||
# Mock findings aggregation
|
||||
mock_queryset = MagicMock()
|
||||
mock_queryset.values.return_value = mock_queryset
|
||||
mock_queryset.annotate.return_value = [
|
||||
{"check_id": "check_internet_1", "total": 10, "failed": 3, "muted": 1},
|
||||
{"check_id": "check_secrets_1", "total": 5, "failed": 2, "muted": 0},
|
||||
]
|
||||
|
||||
ctx = MagicMock()
|
||||
ctx.__enter__.return_value = None
|
||||
ctx.__exit__.return_value = False
|
||||
mock_rls_transaction.return_value = ctx
|
||||
mock_findings_filter.return_value = mock_queryset
|
||||
|
||||
aggregate_attack_surface(str(tenant.id), str(scan.id))
|
||||
|
||||
mock_bulk_create.assert_called_once()
|
||||
args, kwargs = mock_bulk_create.call_args
|
||||
objects = args[0]
|
||||
|
||||
# Should create records for internet-exposed and secrets (the ones with findings)
|
||||
assert len(objects) == 2
|
||||
assert kwargs["batch_size"] == 500
|
||||
|
||||
@patch("tasks.jobs.scan.AttackSurfaceOverview.objects.bulk_create")
|
||||
@patch("tasks.jobs.scan.Finding.all_objects.filter")
|
||||
@patch("tasks.jobs.scan._get_attack_surface_mapping_from_provider")
|
||||
@patch("tasks.jobs.scan.rls_transaction")
|
||||
def test_aggregate_attack_surface_skips_unsupported_provider(
|
||||
self,
|
||||
mock_rls_transaction,
|
||||
mock_get_mapping,
|
||||
mock_findings_filter,
|
||||
mock_bulk_create,
|
||||
tenants_fixture,
|
||||
scans_fixture,
|
||||
):
|
||||
"""Test that ec2-imdsv1 is skipped for non-AWS providers."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
scan.provider.provider = "gcp"
|
||||
scan.provider.uid = "gcp-test-project-id"
|
||||
scan.provider.save()
|
||||
|
||||
mock_get_mapping.return_value = {
|
||||
"internet-exposed": {"check_internet_1"},
|
||||
"secrets": {"check_secrets_1"},
|
||||
"privilege-escalation": set(), # Not supported for GCP
|
||||
"ec2-imdsv1": {"check_imdsv1_1"}, # Should be skipped for GCP
|
||||
}
|
||||
|
||||
mock_queryset = MagicMock()
|
||||
mock_queryset.values.return_value = mock_queryset
|
||||
mock_queryset.annotate.return_value = [
|
||||
{"check_id": "check_internet_1", "total": 5, "failed": 1, "muted": 0},
|
||||
]
|
||||
|
||||
ctx = MagicMock()
|
||||
ctx.__enter__.return_value = None
|
||||
ctx.__exit__.return_value = False
|
||||
mock_rls_transaction.return_value = ctx
|
||||
mock_findings_filter.return_value = mock_queryset
|
||||
|
||||
aggregate_attack_surface(str(tenant.id), str(scan.id))
|
||||
|
||||
# ec2-imdsv1 check_ids should not be in the filter
|
||||
filter_call = mock_findings_filter.call_args
|
||||
check_ids_in_filter = filter_call[1]["check_id__in"]
|
||||
assert "check_imdsv1_1" not in check_ids_in_filter
|
||||
|
||||
@patch("tasks.jobs.scan.AttackSurfaceOverview.objects.bulk_create")
|
||||
@patch("tasks.jobs.scan.Finding.all_objects.filter")
|
||||
@patch("tasks.jobs.scan._get_attack_surface_mapping_from_provider")
|
||||
@patch("tasks.jobs.scan.rls_transaction")
|
||||
def test_aggregate_attack_surface_no_findings(
|
||||
self,
|
||||
mock_rls_transaction,
|
||||
mock_get_mapping,
|
||||
mock_findings_filter,
|
||||
mock_bulk_create,
|
||||
tenants_fixture,
|
||||
scans_fixture,
|
||||
):
|
||||
"""Test that no records are created when there are no findings."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
mock_get_mapping.return_value = {
|
||||
"internet-exposed": {"check_1"},
|
||||
"secrets": {"check_2"},
|
||||
"privilege-escalation": set(),
|
||||
"ec2-imdsv1": set(),
|
||||
}
|
||||
|
||||
mock_queryset = MagicMock()
|
||||
mock_queryset.values.return_value = mock_queryset
|
||||
mock_queryset.annotate.return_value = [] # No findings
|
||||
|
||||
ctx = MagicMock()
|
||||
ctx.__enter__.return_value = None
|
||||
ctx.__exit__.return_value = False
|
||||
mock_rls_transaction.return_value = ctx
|
||||
mock_findings_filter.return_value = mock_queryset
|
||||
|
||||
aggregate_attack_surface(str(tenant.id), str(scan.id))
|
||||
|
||||
mock_bulk_create.assert_not_called()
|
||||
|
||||
@patch("tasks.jobs.scan.AttackSurfaceOverview.objects.bulk_create")
|
||||
@patch("tasks.jobs.scan.Finding.all_objects.filter")
|
||||
@patch("tasks.jobs.scan._get_attack_surface_mapping_from_provider")
|
||||
@patch("tasks.jobs.scan.rls_transaction")
|
||||
def test_aggregate_attack_surface_aggregates_counts_correctly(
|
||||
self,
|
||||
mock_rls_transaction,
|
||||
mock_get_mapping,
|
||||
mock_findings_filter,
|
||||
mock_bulk_create,
|
||||
tenants_fixture,
|
||||
scans_fixture,
|
||||
):
|
||||
"""Test that counts from multiple check_ids are aggregated per attack surface type."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
scan.provider.provider = "aws"
|
||||
scan.provider.save()
|
||||
|
||||
mock_get_mapping.return_value = {
|
||||
"internet-exposed": {"check_internet_1", "check_internet_2"},
|
||||
"secrets": set(),
|
||||
"privilege-escalation": set(),
|
||||
"ec2-imdsv1": set(),
|
||||
}
|
||||
|
||||
mock_queryset = MagicMock()
|
||||
mock_queryset.values.return_value = mock_queryset
|
||||
mock_queryset.annotate.return_value = [
|
||||
{"check_id": "check_internet_1", "total": 10, "failed": 3, "muted": 1},
|
||||
{"check_id": "check_internet_2", "total": 5, "failed": 2, "muted": 0},
|
||||
]
|
||||
|
||||
ctx = MagicMock()
|
||||
ctx.__enter__.return_value = None
|
||||
ctx.__exit__.return_value = False
|
||||
mock_rls_transaction.return_value = ctx
|
||||
mock_findings_filter.return_value = mock_queryset
|
||||
|
||||
aggregate_attack_surface(str(tenant.id), str(scan.id))
|
||||
|
||||
args, kwargs = mock_bulk_create.call_args
|
||||
objects = args[0]
|
||||
|
||||
assert len(objects) == 1
|
||||
overview = objects[0]
|
||||
assert overview.attack_surface_type == "internet-exposed"
|
||||
assert overview.total_findings == 15 # 10 + 5
|
||||
assert overview.failed_findings == 5 # 3 + 2
|
||||
assert overview.muted_failed_findings == 1 # 1 + 0
|
||||
|
||||
@patch("tasks.jobs.scan.Scan.all_objects.select_related")
|
||||
@patch("tasks.jobs.scan.rls_transaction")
|
||||
def test_aggregate_attack_surface_uses_select_related(
|
||||
self, mock_rls_transaction, mock_select_related, tenants_fixture, scans_fixture
|
||||
):
|
||||
"""Test that select_related is used to avoid N+1 query."""
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
|
||||
mock_scan = MagicMock()
|
||||
mock_scan.provider.provider = "aws"
|
||||
|
||||
mock_select_related.return_value.get.return_value = mock_scan
|
||||
|
||||
ctx = MagicMock()
|
||||
ctx.__enter__.return_value = None
|
||||
ctx.__exit__.return_value = False
|
||||
mock_rls_transaction.return_value = ctx
|
||||
|
||||
with patch(
|
||||
"tasks.jobs.scan._get_attack_surface_mapping_from_provider"
|
||||
) as mock_map:
|
||||
mock_map.return_value = {}
|
||||
|
||||
aggregate_attack_surface(str(tenant.id), str(scan.id))
|
||||
|
||||
mock_select_related.assert_called_once_with("provider")
|
||||
|
||||
@@ -4,6 +4,10 @@ from unittest.mock import MagicMock, patch
|
||||
import openai
|
||||
import pytest
|
||||
from botocore.exceptions import ClientError
|
||||
from tasks.jobs.lighthouse_providers import (
|
||||
_create_bedrock_client,
|
||||
_extract_bedrock_credentials,
|
||||
)
|
||||
from tasks.tasks import (
|
||||
_perform_scan_complete_tasks,
|
||||
check_integrations_task,
|
||||
@@ -21,6 +25,198 @@ from api.models import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestExtractBedrockCredentials:
|
||||
"""Unit tests for _extract_bedrock_credentials helper function."""
|
||||
|
||||
def test_extract_access_key_credentials(self, tenants_fixture):
|
||||
"""Test extraction of access key + secret key credentials."""
|
||||
provider_cfg = LighthouseProviderConfiguration(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_type=LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
is_active=True,
|
||||
)
|
||||
provider_cfg.credentials_decoded = {
|
||||
"access_key_id": "AKIAIOSFODNN7EXAMPLE",
|
||||
"secret_access_key": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
|
||||
"region": "us-east-1",
|
||||
}
|
||||
provider_cfg.save()
|
||||
|
||||
result = _extract_bedrock_credentials(provider_cfg)
|
||||
|
||||
assert result is not None
|
||||
assert result["access_key_id"] == "AKIAIOSFODNN7EXAMPLE"
|
||||
assert result["secret_access_key"] == "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"
|
||||
assert result["region"] == "us-east-1"
|
||||
assert "api_key" not in result
|
||||
|
||||
def test_extract_api_key_credentials(self, tenants_fixture):
|
||||
"""Test extraction of API key (bearer token) credentials."""
|
||||
valid_api_key = "ABSKQmVkcm9ja0FQSUtleS" + ("A" * 110)
|
||||
provider_cfg = LighthouseProviderConfiguration(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_type=LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
is_active=True,
|
||||
)
|
||||
provider_cfg.credentials_decoded = {
|
||||
"api_key": valid_api_key,
|
||||
"region": "us-west-2",
|
||||
}
|
||||
provider_cfg.save()
|
||||
|
||||
result = _extract_bedrock_credentials(provider_cfg)
|
||||
|
||||
assert result is not None
|
||||
assert result["api_key"] == valid_api_key
|
||||
assert result["region"] == "us-west-2"
|
||||
assert "access_key_id" not in result
|
||||
assert "secret_access_key" not in result
|
||||
|
||||
def test_api_key_takes_precedence_over_access_keys(self, tenants_fixture):
|
||||
"""Test that API key is preferred when both auth methods are present."""
|
||||
valid_api_key = "ABSKQmVkcm9ja0FQSUtleS" + ("B" * 110)
|
||||
provider_cfg = LighthouseProviderConfiguration(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_type=LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
is_active=True,
|
||||
)
|
||||
provider_cfg.credentials_decoded = {
|
||||
"api_key": valid_api_key,
|
||||
"access_key_id": "AKIAIOSFODNN7EXAMPLE",
|
||||
"secret_access_key": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
|
||||
"region": "eu-west-1",
|
||||
}
|
||||
provider_cfg.save()
|
||||
|
||||
result = _extract_bedrock_credentials(provider_cfg)
|
||||
|
||||
assert result is not None
|
||||
assert result["api_key"] == valid_api_key
|
||||
assert result["region"] == "eu-west-1"
|
||||
assert "access_key_id" not in result
|
||||
|
||||
def test_missing_region_returns_none(self, tenants_fixture):
|
||||
"""Test that missing region returns None."""
|
||||
provider_cfg = LighthouseProviderConfiguration(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_type=LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
is_active=True,
|
||||
)
|
||||
provider_cfg.credentials_decoded = {
|
||||
"api_key": "ABSKQmVkcm9ja0FQSUtleS" + ("A" * 110),
|
||||
}
|
||||
provider_cfg.save()
|
||||
|
||||
result = _extract_bedrock_credentials(provider_cfg)
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_empty_credentials_returns_none(self, tenants_fixture):
|
||||
"""Test that empty credentials dict returns None (region only is not enough)."""
|
||||
provider_cfg = LighthouseProviderConfiguration(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_type=LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
is_active=True,
|
||||
)
|
||||
# Only region, no auth credentials - should return None
|
||||
provider_cfg.credentials_decoded = {
|
||||
"region": "us-east-1",
|
||||
}
|
||||
provider_cfg.save()
|
||||
|
||||
result = _extract_bedrock_credentials(provider_cfg)
|
||||
|
||||
assert result is None
|
||||
|
||||
def test_non_dict_credentials_returns_none(self, tenants_fixture):
|
||||
"""Test that non-dict credentials returns None."""
|
||||
provider_cfg = LighthouseProviderConfiguration(
|
||||
tenant_id=tenants_fixture[0].id,
|
||||
provider_type=LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
is_active=True,
|
||||
)
|
||||
# Store valid credentials first to pass model validation
|
||||
provider_cfg.credentials_decoded = {
|
||||
"api_key": "ABSKQmVkcm9ja0FQSUtleS" + ("A" * 110),
|
||||
"region": "us-east-1",
|
||||
}
|
||||
provider_cfg.save()
|
||||
|
||||
# Mock the credentials_decoded property to return a non-dict value
|
||||
# This simulates corrupted/invalid stored data
|
||||
with patch.object(
|
||||
type(provider_cfg),
|
||||
"credentials_decoded",
|
||||
new_callable=lambda: property(lambda self: "invalid"),
|
||||
):
|
||||
result = _extract_bedrock_credentials(provider_cfg)
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestCreateBedrockClient:
|
||||
"""Unit tests for _create_bedrock_client helper function."""
|
||||
|
||||
@patch("tasks.jobs.lighthouse_providers.boto3.client")
|
||||
def test_create_client_with_access_keys(self, mock_boto_client):
|
||||
"""Test creating client with access key authentication."""
|
||||
mock_client = MagicMock()
|
||||
mock_boto_client.return_value = mock_client
|
||||
|
||||
creds = {
|
||||
"access_key_id": "AKIAIOSFODNN7EXAMPLE",
|
||||
"secret_access_key": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
|
||||
"region": "us-east-1",
|
||||
}
|
||||
|
||||
result = _create_bedrock_client(creds)
|
||||
|
||||
assert result == mock_client
|
||||
mock_boto_client.assert_called_once_with(
|
||||
service_name="bedrock",
|
||||
region_name="us-east-1",
|
||||
aws_access_key_id="AKIAIOSFODNN7EXAMPLE",
|
||||
aws_secret_access_key="wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
|
||||
)
|
||||
|
||||
@patch("tasks.jobs.lighthouse_providers.Config")
|
||||
@patch("tasks.jobs.lighthouse_providers.boto3.client")
|
||||
def test_create_client_with_api_key(self, mock_boto_client, mock_config):
|
||||
"""Test creating client with API key authentication."""
|
||||
mock_client = MagicMock()
|
||||
mock_events = MagicMock()
|
||||
mock_client.meta.events = mock_events
|
||||
mock_boto_client.return_value = mock_client
|
||||
mock_config_instance = MagicMock()
|
||||
mock_config.return_value = mock_config_instance
|
||||
valid_api_key = "ABSKQmVkcm9ja0FQSUtleS" + ("A" * 110)
|
||||
|
||||
creds = {
|
||||
"api_key": valid_api_key,
|
||||
"region": "us-west-2",
|
||||
}
|
||||
|
||||
result = _create_bedrock_client(creds)
|
||||
|
||||
assert result == mock_client
|
||||
mock_boto_client.assert_called_once_with(
|
||||
service_name="bedrock",
|
||||
region_name="us-west-2",
|
||||
config=mock_config_instance,
|
||||
)
|
||||
mock_events.register.assert_called_once()
|
||||
call_args = mock_events.register.call_args
|
||||
assert call_args[0][0] == "before-send.*.*"
|
||||
|
||||
# Verify handler injects bearer token
|
||||
handler_fn = call_args[0][1]
|
||||
mock_request = MagicMock()
|
||||
mock_request.headers = {}
|
||||
handler_fn(mock_request)
|
||||
assert mock_request.headers["Authorization"] == f"Bearer {valid_api_key}"
|
||||
|
||||
|
||||
# TODO Move this to outputs/reports jobs
|
||||
@pytest.mark.django_db
|
||||
class TestGenerateOutputs:
|
||||
@@ -529,6 +725,7 @@ class TestGenerateOutputs:
|
||||
|
||||
|
||||
class TestScanCompleteTasks:
|
||||
@patch("tasks.tasks.aggregate_attack_surface_task.apply_async")
|
||||
@patch("tasks.tasks.create_compliance_requirements_task.apply_async")
|
||||
@patch("tasks.tasks.perform_scan_summary_task.si")
|
||||
@patch("tasks.tasks.generate_outputs_task.si")
|
||||
@@ -541,6 +738,7 @@ class TestScanCompleteTasks:
|
||||
mock_outputs_task,
|
||||
mock_scan_summary_task,
|
||||
mock_compliance_requirements_task,
|
||||
mock_attack_surface_task,
|
||||
):
|
||||
"""Test that scan complete tasks are properly orchestrated with optimized reports."""
|
||||
_perform_scan_complete_tasks("tenant-id", "scan-id", "provider-id")
|
||||
@@ -550,6 +748,11 @@ class TestScanCompleteTasks:
|
||||
kwargs={"tenant_id": "tenant-id", "scan_id": "scan-id"},
|
||||
)
|
||||
|
||||
# Verify attack surface task is called
|
||||
mock_attack_surface_task.assert_called_once_with(
|
||||
kwargs={"tenant_id": "tenant-id", "scan_id": "scan-id"},
|
||||
)
|
||||
|
||||
# Verify scan summary task is called
|
||||
mock_scan_summary_task.assert_called_once_with(
|
||||
scan_id="scan-id",
|
||||
@@ -1145,6 +1348,16 @@ class TestCheckLighthouseProviderConnectionTask:
|
||||
None,
|
||||
{"connected": True, "error": None},
|
||||
),
|
||||
# Bedrock API key authentication
|
||||
(
|
||||
LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
{
|
||||
"api_key": "ABSKQmVkcm9ja0FQSUtleS" + ("A" * 110),
|
||||
"region": "us-east-1",
|
||||
},
|
||||
None,
|
||||
{"connected": True, "error": None},
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_check_connection_success_all_providers(
|
||||
@@ -1213,6 +1426,24 @@ class TestCheckLighthouseProviderConnectionTask:
|
||||
"list_foundation_models",
|
||||
),
|
||||
),
|
||||
# Bedrock API key authentication failure
|
||||
(
|
||||
LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
{
|
||||
"api_key": "ABSKQmVkcm9ja0FQSUtleS" + ("X" * 110),
|
||||
"region": "us-east-1",
|
||||
},
|
||||
None,
|
||||
ClientError(
|
||||
{
|
||||
"Error": {
|
||||
"Code": "UnrecognizedClientException",
|
||||
"Message": "Invalid API key",
|
||||
}
|
||||
},
|
||||
"list_foundation_models",
|
||||
),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_check_connection_api_failure(
|
||||
@@ -1337,6 +1568,17 @@ class TestRefreshLighthouseProviderModelsTask:
|
||||
{"openai.gpt-oss-120b-1:0": "gpt-oss-120b"},
|
||||
1,
|
||||
),
|
||||
# Bedrock API key authentication
|
||||
(
|
||||
LighthouseProviderConfiguration.LLMProviderChoices.BEDROCK,
|
||||
{
|
||||
"api_key": "ABSKQmVkcm9ja0FQSUtleS" + ("A" * 110),
|
||||
"region": "us-east-1",
|
||||
},
|
||||
None,
|
||||
{"anthropic.claude-v3": "Claude 3"},
|
||||
1,
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_refresh_models_create_new(
|
||||
|
||||
|
After Width: | Height: | Size: 90 KiB |
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
@@ -61,6 +61,7 @@ def create_layout_overview(
|
||||
html.Div(className="flex", id="gcp_card", n_clicks=0),
|
||||
html.Div(className="flex", id="k8s_card", n_clicks=0),
|
||||
html.Div(className="flex", id="m365_card", n_clicks=0),
|
||||
html.Div(className="flex", id="alibabacloud_card", n_clicks=0),
|
||||
],
|
||||
className=f"grid gap-x-4 mb-[30px] sm:grid-cols-2 lg:grid-cols-{amount_providers}",
|
||||
),
|
||||
|
||||
@@ -78,6 +78,8 @@ def load_csv_files(csv_files):
|
||||
result = result.replace("_KUBERNETES", " - KUBERNETES")
|
||||
if "M65" in result:
|
||||
result = result.replace("_M65", " - M65")
|
||||
if "ALIBABACLOUD" in result:
|
||||
result = result.replace("_ALIBABACLOUD", " - ALIBABACLOUD")
|
||||
results.append(result)
|
||||
|
||||
unique_results = set(results)
|
||||
@@ -125,7 +127,7 @@ if data is None:
|
||||
)
|
||||
else:
|
||||
|
||||
data["ASSESSMENTDATE"] = pd.to_datetime(data["ASSESSMENTDATE"])
|
||||
data["ASSESSMENTDATE"] = pd.to_datetime(data["ASSESSMENTDATE"], format="mixed")
|
||||
data["ASSESSMENT_TIME"] = data["ASSESSMENTDATE"].dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
data_values = data["ASSESSMENT_TIME"].unique()
|
||||
@@ -278,9 +280,13 @@ def display_data(
|
||||
data["REQUIREMENTS_ATTRIBUTES_PROFILE"] = data[
|
||||
"REQUIREMENTS_ATTRIBUTES_PROFILE"
|
||||
].apply(lambda x: x.split(" - ")[0])
|
||||
|
||||
# Rename the column LOCATION to REGION for Alibaba Cloud
|
||||
if "alibabacloud" in analytics_input:
|
||||
data = data.rename(columns={"LOCATION": "REGION"})
|
||||
# Filter the chosen level of the CIS
|
||||
if is_level_1:
|
||||
data = data[data["REQUIREMENTS_ATTRIBUTES_PROFILE"] == "Level 1"]
|
||||
data = data[data["REQUIREMENTS_ATTRIBUTES_PROFILE"].str.contains("Level 1")]
|
||||
|
||||
# Rename the column PROJECTID to ACCOUNTID for GCP
|
||||
if data.columns.str.contains("PROJECTID").any():
|
||||
|
||||
@@ -79,6 +79,9 @@ ks8_provider_logo = html.Img(
|
||||
m365_provider_logo = html.Img(
|
||||
src="assets/images/providers/m365_provider.png", alt="m365 provider"
|
||||
)
|
||||
alibabacloud_provider_logo = html.Img(
|
||||
src="assets/images/providers/alibabacloud_provider.png", alt="alibabacloud provider"
|
||||
)
|
||||
|
||||
|
||||
def load_csv_files(csv_files):
|
||||
@@ -253,6 +256,8 @@ else:
|
||||
accounts.append(account + " - AWS")
|
||||
if "kubernetes" in list(data[data["ACCOUNT_UID"] == account]["PROVIDER"]):
|
||||
accounts.append(account + " - K8S")
|
||||
if "alibabacloud" in list(data[data["ACCOUNT_UID"] == account]["PROVIDER"]):
|
||||
accounts.append(account + " - ALIBABACLOUD")
|
||||
|
||||
account_dropdown = create_account_dropdown(accounts)
|
||||
|
||||
@@ -298,6 +303,8 @@ else:
|
||||
services.append(service + " - GCP")
|
||||
if "m365" in list(data[data["SERVICE_NAME"] == service]["PROVIDER"]):
|
||||
services.append(service + " - M365")
|
||||
if "alibabacloud" in list(data[data["SERVICE_NAME"] == service]["PROVIDER"]):
|
||||
services.append(service + " - ALIBABACLOUD")
|
||||
|
||||
services = ["All"] + services
|
||||
services = [
|
||||
@@ -520,6 +527,7 @@ else:
|
||||
Output("gcp_card", "children"),
|
||||
Output("k8s_card", "children"),
|
||||
Output("m365_card", "children"),
|
||||
Output("alibabacloud_card", "children"),
|
||||
Output("subscribe_card", "children"),
|
||||
Output("info-file-over", "title"),
|
||||
Output("severity-filter", "value"),
|
||||
@@ -537,6 +545,7 @@ else:
|
||||
Output("gcp_card", "n_clicks"),
|
||||
Output("k8s_card", "n_clicks"),
|
||||
Output("m365_card", "n_clicks"),
|
||||
Output("alibabacloud_card", "n_clicks"),
|
||||
],
|
||||
Input("cloud-account-filter", "value"),
|
||||
Input("region-filter", "value"),
|
||||
@@ -560,6 +569,7 @@ else:
|
||||
Input("sort_button_region", "n_clicks"),
|
||||
Input("sort_button_service", "n_clicks"),
|
||||
Input("sort_button_account", "n_clicks"),
|
||||
Input("alibabacloud_card", "n_clicks"),
|
||||
)
|
||||
def filter_data(
|
||||
cloud_account_values,
|
||||
@@ -584,6 +594,7 @@ def filter_data(
|
||||
sort_button_region,
|
||||
sort_button_service,
|
||||
sort_button_account,
|
||||
alibabacloud_clicks,
|
||||
):
|
||||
# Use n_clicks for vulture
|
||||
n_clicks_csv = n_clicks_csv
|
||||
@@ -599,6 +610,7 @@ def filter_data(
|
||||
gcp_clicks = 0
|
||||
k8s_clicks = 0
|
||||
m365_clicks = 0
|
||||
alibabacloud_clicks = 0
|
||||
if azure_clicks > 0:
|
||||
filtered_data = data.copy()
|
||||
if azure_clicks % 2 != 0 and "azure" in list(data["PROVIDER"]):
|
||||
@@ -607,6 +619,7 @@ def filter_data(
|
||||
gcp_clicks = 0
|
||||
k8s_clicks = 0
|
||||
m365_clicks = 0
|
||||
alibabacloud_clicks = 0
|
||||
if gcp_clicks > 0:
|
||||
filtered_data = data.copy()
|
||||
if gcp_clicks % 2 != 0 and "gcp" in list(data["PROVIDER"]):
|
||||
@@ -615,6 +628,7 @@ def filter_data(
|
||||
azure_clicks = 0
|
||||
k8s_clicks = 0
|
||||
m365_clicks = 0
|
||||
alibabacloud_clicks = 0
|
||||
if k8s_clicks > 0:
|
||||
filtered_data = data.copy()
|
||||
if k8s_clicks % 2 != 0 and "kubernetes" in list(data["PROVIDER"]):
|
||||
@@ -623,6 +637,7 @@ def filter_data(
|
||||
azure_clicks = 0
|
||||
gcp_clicks = 0
|
||||
m365_clicks = 0
|
||||
alibabacloud_clicks = 0
|
||||
if m365_clicks > 0:
|
||||
filtered_data = data.copy()
|
||||
if m365_clicks % 2 != 0 and "m365" in list(data["PROVIDER"]):
|
||||
@@ -631,7 +646,16 @@ def filter_data(
|
||||
azure_clicks = 0
|
||||
gcp_clicks = 0
|
||||
k8s_clicks = 0
|
||||
|
||||
alibabacloud_clicks = 0
|
||||
if alibabacloud_clicks > 0:
|
||||
filtered_data = data.copy()
|
||||
if alibabacloud_clicks % 2 != 0 and "alibabacloud" in list(data["PROVIDER"]):
|
||||
filtered_data = filtered_data[filtered_data["PROVIDER"] == "alibabacloud"]
|
||||
aws_clicks = 0
|
||||
azure_clicks = 0
|
||||
gcp_clicks = 0
|
||||
k8s_clicks = 0
|
||||
m365_clicks = 0
|
||||
# For all the data, we will add to the status column the value 'MUTED (FAIL)' and 'MUTED (PASS)' depending on the value of the column 'STATUS' and 'MUTED'
|
||||
if "MUTED" in filtered_data.columns:
|
||||
filtered_data["STATUS"] = filtered_data.apply(
|
||||
@@ -723,6 +747,8 @@ def filter_data(
|
||||
all_account_ids.append(account)
|
||||
if "kubernetes" in list(data[data["ACCOUNT_UID"] == account]["PROVIDER"]):
|
||||
all_account_ids.append(account)
|
||||
if "alibabacloud" in list(data[data["ACCOUNT_UID"] == account]["PROVIDER"]):
|
||||
all_account_ids.append(account)
|
||||
|
||||
all_account_names = []
|
||||
if "ACCOUNT_NAME" in filtered_data.columns:
|
||||
@@ -745,6 +771,10 @@ def filter_data(
|
||||
cloud_accounts_options.append(item + " - AWS")
|
||||
if "kubernetes" in list(data[data["ACCOUNT_UID"] == item]["PROVIDER"]):
|
||||
cloud_accounts_options.append(item + " - K8S")
|
||||
if "alibabacloud" in list(
|
||||
data[data["ACCOUNT_UID"] == item]["PROVIDER"]
|
||||
):
|
||||
cloud_accounts_options.append(item + " - ALIBABACLOUD")
|
||||
if "ACCOUNT_NAME" in filtered_data.columns:
|
||||
if "azure" in list(data[data["ACCOUNT_NAME"] == item]["PROVIDER"]):
|
||||
cloud_accounts_options.append(item + " - AZURE")
|
||||
@@ -873,6 +903,10 @@ def filter_data(
|
||||
filtered_data[filtered_data["SERVICE_NAME"] == item]["PROVIDER"]
|
||||
):
|
||||
service_filter_options.append(item + " - M365")
|
||||
if "alibabacloud" in list(
|
||||
filtered_data[filtered_data["SERVICE_NAME"] == item]["PROVIDER"]
|
||||
):
|
||||
service_filter_options.append(item + " - ALIBABACLOUD")
|
||||
|
||||
# Filter Service
|
||||
if service_values == ["All"]:
|
||||
@@ -1324,6 +1358,12 @@ def filter_data(
|
||||
filtered_data.loc[
|
||||
filtered_data["ACCOUNT_UID"] == account, "ACCOUNT_UID"
|
||||
] = (account + " - M365")
|
||||
if "alibabacloud" in list(
|
||||
data[data["ACCOUNT_UID"] == account]["PROVIDER"]
|
||||
):
|
||||
filtered_data.loc[
|
||||
filtered_data["ACCOUNT_UID"] == account, "ACCOUNT_UID"
|
||||
] = (account + " - ALIBABACLOUD")
|
||||
|
||||
table_collapsible = []
|
||||
for item in filtered_data.to_dict("records"):
|
||||
@@ -1410,6 +1450,13 @@ def filter_data(
|
||||
else:
|
||||
m365_card = None
|
||||
|
||||
if "alibabacloud" in list(data["PROVIDER"].unique()):
|
||||
alibabacloud_card = create_provider_card(
|
||||
"alibabacloud", alibabacloud_provider_logo, "Accounts", full_filtered_data
|
||||
)
|
||||
else:
|
||||
alibabacloud_card = None
|
||||
|
||||
# Subscribe to Prowler Cloud card
|
||||
subscribe_card = [
|
||||
html.Div(
|
||||
@@ -1454,6 +1501,7 @@ def filter_data(
|
||||
gcp_card,
|
||||
k8s_card,
|
||||
m365_card,
|
||||
alibabacloud_card,
|
||||
subscribe_card,
|
||||
list_files,
|
||||
severity_values,
|
||||
@@ -1469,6 +1517,7 @@ def filter_data(
|
||||
gcp_clicks,
|
||||
k8s_clicks,
|
||||
m365_clicks,
|
||||
alibabacloud_clicks,
|
||||
)
|
||||
else:
|
||||
return (
|
||||
@@ -1487,6 +1536,7 @@ def filter_data(
|
||||
gcp_card,
|
||||
k8s_card,
|
||||
m365_card,
|
||||
alibabacloud_card,
|
||||
subscribe_card,
|
||||
list_files,
|
||||
severity_values,
|
||||
@@ -1504,6 +1554,7 @@ def filter_data(
|
||||
gcp_clicks,
|
||||
k8s_clicks,
|
||||
m365_clicks,
|
||||
alibabacloud_clicks,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -52,7 +52,7 @@
|
||||
{
|
||||
"group": "Prowler Lighthouse AI",
|
||||
"pages": [
|
||||
"user-guide/tutorials/prowler-app-lighthouse"
|
||||
"getting-started/products/prowler-lighthouse-ai"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -109,7 +109,13 @@
|
||||
"user-guide/tutorials/prowler-app-jira-integration"
|
||||
]
|
||||
},
|
||||
"user-guide/tutorials/prowler-app-lighthouse",
|
||||
{
|
||||
"group": "Lighthouse AI",
|
||||
"pages": [
|
||||
"user-guide/tutorials/prowler-app-lighthouse",
|
||||
"user-guide/tutorials/prowler-app-lighthouse-multi-llm"
|
||||
]
|
||||
},
|
||||
"user-guide/tutorials/prowler-cloud-public-ips",
|
||||
{
|
||||
"group": "Tutorials",
|
||||
@@ -192,6 +198,13 @@
|
||||
"user-guide/providers/gcp/retry-configuration"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Alibaba Cloud",
|
||||
"pages": [
|
||||
"user-guide/providers/alibabacloud/getting-started-alibabacloud",
|
||||
"user-guide/providers/alibabacloud/authentication"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Kubernetes",
|
||||
"pages": [
|
||||
|
||||
@@ -4,12 +4,12 @@ title: "Installation"
|
||||
|
||||
### Installation
|
||||
|
||||
Prowler App supports multiple installation methods based on your environment.
|
||||
Prowler App offers flexible installation methods tailored to various environments.
|
||||
|
||||
Refer to the [Prowler App Tutorial](/user-guide/tutorials/prowler-app) for detailed usage instructions.
|
||||
|
||||
<Warning>
|
||||
Prowler configuration is based in `.env` files. Every version of Prowler can have differences on that file, so, please, use the file that corresponds with that version or repository branch or tag.
|
||||
Prowler configuration is based on `.env` files. Every version of Prowler can have differences on that file, so, please, use the file that corresponds with that version or repository branch or tag.
|
||||
</Warning>
|
||||
|
||||
<Tabs>
|
||||
@@ -26,8 +26,6 @@ Refer to the [Prowler App Tutorial](/user-guide/tutorials/prowler-app) for detai
|
||||
curl -sLO "https://raw.githubusercontent.com/prowler-cloud/prowler/refs/tags/${VERSION}/.env"
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
> Containers are built for `linux/amd64`. If your workstation's architecture is different, please set `DOCKER_DEFAULT_PLATFORM=linux/amd64` in your environment or use the `--platform linux/amd64` flag in the docker command.
|
||||
</Tab>
|
||||
<Tab title="GitHub">
|
||||
_Requirements_:
|
||||
@@ -106,11 +104,13 @@ Refer to the [Prowler App Tutorial](/user-guide/tutorials/prowler-app) for detai
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
### Update Prowler App
|
||||
### Updating Prowler App
|
||||
|
||||
Upgrade Prowler App installation using one of two options:
|
||||
|
||||
#### Option 1: Update Environment File
|
||||
#### Option 1: Updating the Environment File
|
||||
|
||||
To update the environment file:
|
||||
|
||||
Edit the `.env` file and change version values:
|
||||
|
||||
@@ -119,7 +119,7 @@ PROWLER_UI_VERSION="5.9.0"
|
||||
PROWLER_API_VERSION="5.9.0"
|
||||
```
|
||||
|
||||
#### Option 2: Use Docker Compose Pull
|
||||
#### Option 2: Using Docker Compose Pull
|
||||
|
||||
```bash
|
||||
docker compose pull --policy always
|
||||
@@ -133,7 +133,7 @@ The `--policy always` flag ensures that Docker pulls the latest images even if t
|
||||
Everything is preserved, nothing will be deleted after the update.
|
||||
</Note>
|
||||
|
||||
### Troubleshooting
|
||||
### Troubleshooting Installation Issues
|
||||
|
||||
If containers don't start, check logs for errors:
|
||||
|
||||
@@ -145,16 +145,16 @@ docker compose logs
|
||||
docker images | grep prowler
|
||||
```
|
||||
|
||||
If you encounter issues, you can rollback to the previous version by changing the `.env` file back to your previous version and running:
|
||||
If issues are encountered, rollback to the previous version by changing the `.env` file back to the previous version and running:
|
||||
|
||||
```bash
|
||||
docker compose pull
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
### Container versions
|
||||
### Container Versions
|
||||
|
||||
The available versions of Prowler CLI are the following:
|
||||
The available versions of Prowler App are the following:
|
||||
|
||||
- `latest`: in sync with `master` branch (please note that it is not a stable version)
|
||||
- `v4-latest`: in sync with `v4` branch (please note that it is not a stable version)
|
||||
|
||||
@@ -4,7 +4,7 @@ title: 'Installation'
|
||||
|
||||
## Installation
|
||||
|
||||
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/). Install it as a Python package with `Python >= 3.9, <= 3.12`:
|
||||
To install Prowler as a Python package, use `Python >= 3.9, <= 3.12`. Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/):
|
||||
|
||||
<Tabs>
|
||||
<Tab title="pipx">
|
||||
@@ -41,7 +41,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/).
|
||||
prowler -v
|
||||
```
|
||||
|
||||
Upgrade Prowler to the latest version:
|
||||
To upgrade Prowler to the latest version:
|
||||
|
||||
``` bash
|
||||
pip install --upgrade prowler
|
||||
@@ -54,8 +54,6 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/).
|
||||
* In the command below, change `-v` to your local directory path in order to access the reports.
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
> Containers are built for `linux/amd64`. If your workstation's architecture is different, please set `DOCKER_DEFAULT_PLATFORM=linux/amd64` in your environment or use the `--platform linux/amd64` flag in the docker command.
|
||||
|
||||
_Commands_:
|
||||
|
||||
``` bash
|
||||
@@ -75,7 +73,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/).
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
```bash
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
poetry install
|
||||
@@ -94,7 +92,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/).
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
```bash
|
||||
python3 -m pip install --user pipx
|
||||
python3 -m pipx ensurepath
|
||||
pipx install prowler
|
||||
@@ -104,7 +102,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/).
|
||||
<Tab title="Ubuntu">
|
||||
_Requirements_:
|
||||
|
||||
* `Ubuntu 23.04` or above, if you are using an older version of Ubuntu check [pipx installation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#__tabbed_1_1) and ensure you have `Python >= 3.9, <= 3.12`.
|
||||
* `Ubuntu 23.04` or above. For older Ubuntu versions, check [pipx installation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#__tabbed_1_1) and ensure `Python >= 3.9, <= 3.12` is installed.
|
||||
* `Python >= 3.9, <= 3.12`
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
@@ -121,7 +119,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/).
|
||||
<Tab title="Brew">
|
||||
_Requirements_:
|
||||
|
||||
* `Brew` installed in your Mac or Linux
|
||||
* `Brew` installed on Mac or Linux
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
_Commands_:
|
||||
@@ -171,7 +169,8 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/).
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
## Container versions
|
||||
|
||||
## Container Versions
|
||||
|
||||
The available versions of Prowler CLI are the following:
|
||||
|
||||
|
||||
@@ -0,0 +1,180 @@
|
||||
---
|
||||
title: 'Overview'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.8.0" />
|
||||
|
||||
Prowler Lighthouse AI is a Cloud Security Analyst chatbot that helps you understand, prioritize, and remediate security findings in your cloud environments. It's designed to provide security expertise for teams without dedicated resources, acting as your 24/7 virtual cloud security analyst.
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-intro.png" alt="Prowler Lighthouse" />
|
||||
|
||||
<Card title="Set Up Lighthouse AI" icon="rocket" href="/user-guide/tutorials/prowler-app-lighthouse#set-up">
|
||||
Learn how to configure Lighthouse AI with your preferred LLM provider
|
||||
</Card>
|
||||
|
||||
## Capabilities
|
||||
|
||||
Prowler Lighthouse AI is designed to be your AI security team member, with capabilities including:
|
||||
|
||||
### Natural Language Querying
|
||||
|
||||
Ask questions in plain English about your security findings. Examples:
|
||||
|
||||
- "What are my highest risk findings?"
|
||||
- "Show me all S3 buckets with public access."
|
||||
- "What security issues were found in my production accounts?"
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-feature1.png" alt="Natural language querying" />
|
||||
|
||||
### Detailed Remediation Guidance
|
||||
|
||||
Get tailored step-by-step instructions for fixing security issues:
|
||||
|
||||
- Clear explanations of the problem and its impact
|
||||
- Commands or console steps to implement fixes
|
||||
- Alternative approaches with different solutions
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-feature2.png" alt="Detailed Remediation" />
|
||||
|
||||
### Enhanced Context and Analysis
|
||||
|
||||
Lighthouse AI can provide additional context to help you understand the findings:
|
||||
|
||||
- Explain security concepts related to findings in simple terms
|
||||
- Provide risk assessments based on your environment and context
|
||||
- Connect related findings to show broader security patterns
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-config.png" alt="Business Context" />
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-feature3.png" alt="Contextual Responses" />
|
||||
|
||||
## Important Notes
|
||||
|
||||
Prowler Lighthouse AI is powerful, but there are limitations:
|
||||
|
||||
- **Continuous improvement**: Please report any issues, as the feature may make mistakes or encounter errors, despite extensive testing.
|
||||
- **Access limitations**: Lighthouse AI can only access data the logged-in user can view. If you can't see certain information, Lighthouse AI can't see it either.
|
||||
- **NextJS session dependence**: If your Prowler application session expires or logs out, Lighthouse AI will error out. Refresh and log back in to continue.
|
||||
- **Response quality**: The response quality depends on the selected LLM provider and model. Choose models with strong tool-calling capabilities for best results. We recommend `gpt-5` model from OpenAI.
|
||||
|
||||
### Getting Help
|
||||
|
||||
If you encounter issues with Prowler Lighthouse AI or have suggestions for improvements, please [reach out through our Slack channel](https://goto.prowler.com/slack).
|
||||
|
||||
### What Data Is Shared to LLM Providers?
|
||||
|
||||
The following API endpoints are accessible to Prowler Lighthouse AI. Data from the following API endpoints could be shared with LLM provider depending on the scope of user's query:
|
||||
|
||||
#### Accessible API Endpoints
|
||||
|
||||
**User Management:**
|
||||
|
||||
- List all users - `/api/v1/users`
|
||||
- Retrieve the current user's information - `/api/v1/users/me`
|
||||
|
||||
**Provider Management:**
|
||||
|
||||
- List all providers - `/api/v1/providers`
|
||||
- Retrieve data from a provider - `/api/v1/providers/{id}`
|
||||
|
||||
**Scan Management:**
|
||||
|
||||
- List all scans - `/api/v1/scans`
|
||||
- Retrieve data from a specific scan - `/api/v1/scans/{id}`
|
||||
|
||||
**Resource Management:**
|
||||
|
||||
- List all resources - `/api/v1/resources`
|
||||
- Retrieve data for a resource - `/api/v1/resources/{id}`
|
||||
|
||||
**Findings Management:**
|
||||
|
||||
- List all findings - `/api/v1/findings`
|
||||
- Retrieve data from a specific finding - `/api/v1/findings/{id}`
|
||||
- Retrieve metadata values from findings - `/api/v1/findings/metadata`
|
||||
|
||||
**Overview Data:**
|
||||
|
||||
- Get aggregated findings data - `/api/v1/overviews/findings`
|
||||
- Get findings data by severity - `/api/v1/overviews/findings_severity`
|
||||
- Get aggregated provider data - `/api/v1/overviews/providers`
|
||||
- Get findings data by service - `/api/v1/overviews/services`
|
||||
|
||||
**Compliance Management:**
|
||||
|
||||
- List compliance overviews (optionally filter by scan) - `/api/v1/compliance-overviews`
|
||||
- Retrieve data from a specific compliance overview - `/api/v1/compliance-overviews/{id}`
|
||||
|
||||
#### Excluded API Endpoints
|
||||
|
||||
Not all Prowler API endpoints are integrated with Lighthouse AI. They are intentionally excluded for the following reasons:
|
||||
|
||||
- OpenAI/other LLM providers shouldn't have access to sensitive data (like fetching provider secrets and other sensitive config)
|
||||
- Users queries don't need responses from those API endpoints (ex: tasks, tenant details, downloading zip file, etc.)
|
||||
|
||||
**Excluded Endpoints:**
|
||||
|
||||
**User Management:**
|
||||
|
||||
- List specific users information - `/api/v1/users/{id}`
|
||||
- List user memberships - `/api/v1/users/{user_pk}/memberships`
|
||||
- Retrieve membership data from the user - `/api/v1/users/{user_pk}/memberships/{id}`
|
||||
|
||||
**Tenant Management:**
|
||||
|
||||
- List all tenants - `/api/v1/tenants`
|
||||
- Retrieve data from a tenant - `/api/v1/tenants/{id}`
|
||||
- List tenant memberships - `/api/v1/tenants/{tenant_pk}/memberships`
|
||||
- List all invitations - `/api/v1/tenants/invitations`
|
||||
- Retrieve data from tenant invitation - `/api/v1/tenants/invitations/{id}`
|
||||
|
||||
**Security and Configuration:**
|
||||
|
||||
- List all secrets - `/api/v1/providers/secrets`
|
||||
- Retrieve data from a secret - `/api/v1/providers/secrets/{id}`
|
||||
- List all provider groups - `/api/v1/provider-groups`
|
||||
- Retrieve data from a provider group - `/api/v1/provider-groups/{id}`
|
||||
|
||||
**Reports and Tasks:**
|
||||
|
||||
- Download zip report - `/api/v1/scans/{v1}/report`
|
||||
- List all tasks - `/api/v1/tasks`
|
||||
- Retrieve data from a specific task - `/api/v1/tasks/{id}`
|
||||
|
||||
**Lighthouse AI Configuration:**
|
||||
|
||||
- List LLM providers - `/api/v1/lighthouse/providers`
|
||||
- Retrieve LLM provider - `/api/v1/lighthouse/providers/{id}`
|
||||
- List available models - `/api/v1/lighthouse/models`
|
||||
- Retrieve tenant configuration - `/api/v1/lighthouse/configuration`
|
||||
|
||||
<Note>
|
||||
Agents only have access to hit GET endpoints. They don't have access to other HTTP methods.
|
||||
|
||||
</Note>
|
||||
|
||||
## FAQs
|
||||
|
||||
**1. Which LLM providers are supported?**
|
||||
|
||||
Lighthouse AI supports three providers:
|
||||
|
||||
- **OpenAI** - GPT models (GPT-5, GPT-4o, etc.)
|
||||
- **Amazon Bedrock** - Claude, Llama, Titan, and other models via AWS
|
||||
- **OpenAI Compatible** - Custom endpoints like OpenRouter, Ollama, or any OpenAI-compatible service
|
||||
|
||||
For detailed configuration instructions, see [Using Multiple LLM Providers with Lighthouse](/user-guide/tutorials/prowler-app-lighthouse-multi-llm).
|
||||
|
||||
**2. Why a multi-agent supervisor model?**
|
||||
|
||||
Context windows are limited. While demo data fits inside the context window, querying real-world data often exceeds it. A multi-agent architecture is used so different agents fetch different sizes of data and respond with the minimum required data to the supervisor. This spreads the context window usage across agents.
|
||||
|
||||
**3. Is my security data shared with LLM providers?**
|
||||
|
||||
Minimal data is shared to generate useful responses. Agents can access security findings and remediation details when needed. Provider secrets are protected by design and cannot be read. The LLM provider credentials configured with Lighthouse AI are only accessible to our NextJS server and are never sent to the LLM providers. Resource metadata (names, tags, account/project IDs, etc) may be shared with the configured LLM provider based on query requirements.
|
||||
|
||||
**4. Can the Lighthouse AI change my cloud environment?**
|
||||
|
||||
No. The agent doesn't have the tools to make the changes, even if the configured cloud provider API keys contain permissions to modify resources.
|
||||
|
Before Width: | Height: | Size: 197 KiB After Width: | Height: | Size: 96 KiB |
|
After Width: | Height: | Size: 540 KiB |
|
Before Width: | Height: | Size: 204 KiB After Width: | Height: | Size: 136 KiB |
|
Before Width: | Height: | Size: 241 KiB After Width: | Height: | Size: 147 KiB |
|
Before Width: | Height: | Size: 268 KiB After Width: | Height: | Size: 180 KiB |
|
Before Width: | Height: | Size: 404 KiB After Width: | Height: | Size: 165 KiB |
|
After Width: | Height: | Size: 347 KiB |
|
After Width: | Height: | Size: 173 KiB |
@@ -33,7 +33,7 @@ The supported providers right now are:
|
||||
| [Github](/user-guide/providers/github/getting-started-github) | Official | UI, API, CLI |
|
||||
| [Oracle Cloud](/user-guide/providers/oci/getting-started-oci) | Official | UI, API, CLI |
|
||||
| [Infra as Code](/user-guide/providers/iac/getting-started-iac) | Official | UI, API, CLI |
|
||||
| [MongoDB Atlas](/user-guide/providers/mongodbatlas/getting-started-mongodbatlas) | Official | CLI, API |
|
||||
| [MongoDB Atlas](/user-guide/providers/mongodbatlas/getting-started-mongodbatlas) | Official | UI, API, CLI |
|
||||
| [LLM](/user-guide/providers/llm/getting-started-llm) | Official | CLI |
|
||||
| **NHN** | Unofficial | CLI |
|
||||
|
||||
|
||||
@@ -0,0 +1,112 @@
|
||||
---
|
||||
title: 'Alibaba Cloud Authentication in Prowler'
|
||||
---
|
||||
|
||||
Prowler requires Alibaba Cloud credentials to perform security checks. Authentication is supported via multiple methods, prioritized as follows:
|
||||
|
||||
1. **Credentials URI**
|
||||
2. **OIDC Role Authentication**
|
||||
3. **ECS RAM Role**
|
||||
4. **RAM Role Assumption**
|
||||
5. **STS Temporary Credentials**
|
||||
6. **Permanent Access Keys**
|
||||
7. **Default Credential Chain**
|
||||
|
||||
## Authentication Methods
|
||||
|
||||
### Credentials URI (Recommended for Centralized Services)
|
||||
|
||||
If `--credentials-uri` is provided (or `ALIBABA_CLOUD_CREDENTIALS_URI` environment variable), Prowler will retrieve credentials from the specified external URI endpoint. The URI must return credentials in the standard JSON format.
|
||||
|
||||
```bash
|
||||
export ALIBABA_CLOUD_CREDENTIALS_URI="http://localhost:8080/credentials"
|
||||
prowler alibabacloud
|
||||
```
|
||||
|
||||
### OIDC Role Authentication (Recommended for ACK/Kubernetes)
|
||||
|
||||
If OIDC environment variables are set, Prowler will use OIDC authentication to assume the specified role. This is the most secure method for containerized applications running in ACK (Alibaba Container Service for Kubernetes) with RRSA enabled.
|
||||
|
||||
Required environment variables:
|
||||
- `ALIBABA_CLOUD_ROLE_ARN`
|
||||
- `ALIBABA_CLOUD_OIDC_PROVIDER_ARN`
|
||||
- `ALIBABA_CLOUD_OIDC_TOKEN_FILE`
|
||||
|
||||
```bash
|
||||
export ALIBABA_CLOUD_ROLE_ARN="acs:ram::123456789012:role/YourRole"
|
||||
export ALIBABA_CLOUD_OIDC_PROVIDER_ARN="acs:ram::123456789012:oidc-provider/ack-rrsa-provider"
|
||||
export ALIBABA_CLOUD_OIDC_TOKEN_FILE="/var/run/secrets/tokens/oidc-token"
|
||||
prowler alibabacloud
|
||||
```
|
||||
|
||||
### ECS RAM Role (Recommended for ECS Instances)
|
||||
|
||||
When running on an ECS instance with an attached RAM role, Prowler can obtain credentials from the ECS instance metadata service.
|
||||
|
||||
```bash
|
||||
# Using CLI argument
|
||||
prowler alibabacloud --ecs-ram-role RoleName
|
||||
|
||||
# Or using environment variable
|
||||
export ALIBABA_CLOUD_ECS_METADATA="RoleName"
|
||||
prowler alibabacloud
|
||||
```
|
||||
|
||||
### RAM Role Assumption (Recommended for Cross-Account)
|
||||
|
||||
For cross-account access, use RAM role assumption. You must provide the initial credentials (access keys) and the target role ARN.
|
||||
|
||||
```bash
|
||||
export ALIBABA_CLOUD_ACCESS_KEY_ID="your-access-key-id"
|
||||
export ALIBABA_CLOUD_ACCESS_KEY_SECRET="your-access-key-secret"
|
||||
export ALIBABA_CLOUD_ROLE_ARN="acs:ram::123456789012:role/ProwlerAuditRole"
|
||||
prowler alibabacloud
|
||||
```
|
||||
|
||||
### STS Temporary Credentials
|
||||
|
||||
If you already have temporary STS credentials, you can provide them via environment variables.
|
||||
|
||||
```bash
|
||||
export ALIBABA_CLOUD_ACCESS_KEY_ID="your-sts-access-key-id"
|
||||
export ALIBABA_CLOUD_ACCESS_KEY_SECRET="your-sts-access-key-secret"
|
||||
export ALIBABA_CLOUD_SECURITY_TOKEN="your-sts-security-token"
|
||||
prowler alibabacloud
|
||||
```
|
||||
|
||||
### Permanent Access Keys
|
||||
|
||||
You can use standard permanent access keys via environment variables.
|
||||
|
||||
```bash
|
||||
export ALIBABA_CLOUD_ACCESS_KEY_ID="your-access-key-id"
|
||||
export ALIBABA_CLOUD_ACCESS_KEY_SECRET="your-access-key-secret"
|
||||
prowler alibabacloud
|
||||
```
|
||||
|
||||
## Required Permissions
|
||||
|
||||
The credentials used by Prowler should have the minimum required permissions to audit the resources. At a minimum, the following permissions are recommended:
|
||||
|
||||
- `ram:GetUser`
|
||||
- `ram:ListUsers`
|
||||
- `ram:GetPasswordPolicy`
|
||||
- `ram:GetAccountSummary`
|
||||
- `ram:ListVirtualMFADevices`
|
||||
- `ram:ListGroups`
|
||||
- `ram:ListPolicies`
|
||||
- `ram:ListAccessKeys`
|
||||
- `ram:GetLoginProfile`
|
||||
- `ram:ListPoliciesForUser`
|
||||
- `ram:ListGroupsForUser`
|
||||
- `actiontrail:DescribeTrails`
|
||||
- `oss:GetBucketLogging`
|
||||
- `oss:GetBucketAcl`
|
||||
- `rds:DescribeDBInstances`
|
||||
- `rds:DescribeDBInstanceAttribute`
|
||||
- `ecs:DescribeInstances`
|
||||
- `vpc:DescribeVpcs`
|
||||
- `sls:ListProject`
|
||||
- `sls:ListAlerts`
|
||||
- `sls:ListLogStores`
|
||||
- `sls:GetLogStore`
|
||||
@@ -0,0 +1,132 @@
|
||||
---
|
||||
title: 'Getting Started With Alibaba Cloud on Prowler'
|
||||
---
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
### Configure Alibaba Cloud Credentials
|
||||
|
||||
Prowler requires Alibaba Cloud credentials to perform security checks. Authentication is available through the following methods (in order of priority):
|
||||
|
||||
1. **Credentials URI** (Recommended for centralized credential services)
|
||||
2. **OIDC Role Authentication** (Recommended for ACK/Kubernetes)
|
||||
3. **ECS RAM Role** (Recommended for ECS instances)
|
||||
4. **RAM Role Assumption** (Recommended for cross-account access)
|
||||
5. **STS Temporary Credentials**
|
||||
6. **Permanent Access Keys**
|
||||
7. **Default Credential Chain**
|
||||
|
||||
<Warning>
|
||||
Prowler does not accept credentials through command-line arguments. Provide credentials through environment variables or the Alibaba Cloud credential chain.
|
||||
|
||||
</Warning>
|
||||
|
||||
#### Option 1: Environment Variables (Permanent Credentials)
|
||||
|
||||
```bash
|
||||
export ALIBABA_CLOUD_ACCESS_KEY_ID="your-access-key-id"
|
||||
export ALIBABA_CLOUD_ACCESS_KEY_SECRET="your-access-key-secret"
|
||||
prowler alibabacloud
|
||||
```
|
||||
|
||||
#### Option 2: Environment Variables (STS Temporary Credentials)
|
||||
|
||||
```bash
|
||||
export ALIBABA_CLOUD_ACCESS_KEY_ID="your-sts-access-key-id"
|
||||
export ALIBABA_CLOUD_ACCESS_KEY_SECRET="your-sts-access-key-secret"
|
||||
export ALIBABA_CLOUD_SECURITY_TOKEN="your-sts-security-token"
|
||||
prowler alibabacloud
|
||||
```
|
||||
|
||||
#### Option 3: RAM Role Assumption (Environment Variables)
|
||||
|
||||
```bash
|
||||
export ALIBABA_CLOUD_ACCESS_KEY_ID="your-access-key-id"
|
||||
export ALIBABA_CLOUD_ACCESS_KEY_SECRET="your-access-key-secret"
|
||||
export ALIBABA_CLOUD_ROLE_ARN="acs:ram::123456789012:role/ProwlerAuditRole"
|
||||
export ALIBABA_CLOUD_ROLE_SESSION_NAME="ProwlerAssessmentSession" # Optional
|
||||
prowler alibabacloud
|
||||
```
|
||||
|
||||
#### Option 4: RAM Role Assumption (CLI + Environment Variables)
|
||||
|
||||
```bash
|
||||
# Set credentials via environment variables
|
||||
export ALIBABA_CLOUD_ACCESS_KEY_ID="your-access-key-id"
|
||||
export ALIBABA_CLOUD_ACCESS_KEY_SECRET="your-access-key-secret"
|
||||
# Specify role via CLI argument
|
||||
prowler alibabacloud --role-arn acs:ram::123456789012:role/ProwlerAuditRole --role-session-name ProwlerAssessmentSession
|
||||
```
|
||||
|
||||
#### Option 5: ECS Instance Metadata (ECS RAM Role)
|
||||
|
||||
```bash
|
||||
# When running on an ECS instance with an attached RAM role
|
||||
prowler alibabacloud --ecs-ram-role RoleName
|
||||
|
||||
# Or using environment variable
|
||||
export ALIBABA_CLOUD_ECS_METADATA="RoleName"
|
||||
prowler alibabacloud
|
||||
```
|
||||
|
||||
#### Option 6: OIDC Role Authentication (for ACK/Kubernetes)
|
||||
|
||||
```bash
|
||||
# For applications running in ACK (Alibaba Container Service for Kubernetes) with RRSA enabled
|
||||
export ALIBABA_CLOUD_ROLE_ARN="acs:ram::123456789012:role/YourRole"
|
||||
export ALIBABA_CLOUD_OIDC_PROVIDER_ARN="acs:ram::123456789012:oidc-provider/ack-rrsa-provider"
|
||||
export ALIBABA_CLOUD_OIDC_TOKEN_FILE="/var/run/secrets/tokens/oidc-token"
|
||||
export ALIBABA_CLOUD_ROLE_SESSION_NAME="ProwlerOIDCSession" # Optional
|
||||
prowler alibabacloud
|
||||
|
||||
# Or using CLI argument
|
||||
prowler alibabacloud --oidc-role-arn acs:ram::123456789012:role/YourRole
|
||||
```
|
||||
|
||||
#### Option 7: Credentials URI (External Credential Service)
|
||||
|
||||
```bash
|
||||
# Retrieve credentials from an external URI endpoint
|
||||
export ALIBABA_CLOUD_CREDENTIALS_URI="http://localhost:8080/credentials"
|
||||
prowler alibabacloud
|
||||
|
||||
# Or using CLI argument
|
||||
prowler alibabacloud --credentials-uri http://localhost:8080/credentials
|
||||
```
|
||||
|
||||
#### Option 8: Default Credential Chain
|
||||
|
||||
The SDK automatically checks credentials in the following order:
|
||||
1. Environment variables (`ALIBABA_CLOUD_*` or `ALIYUN_*`)
|
||||
2. OIDC authentication (if OIDC environment variables are set)
|
||||
3. Configuration file (`~/.aliyun/config.json`)
|
||||
4. ECS instance metadata (if running on ECS)
|
||||
5. Credentials URI (if `ALIBABA_CLOUD_CREDENTIALS_URI` is set)
|
||||
|
||||
```bash
|
||||
prowler alibabacloud
|
||||
```
|
||||
|
||||
### Specify Regions
|
||||
|
||||
To run checks only in specific regions:
|
||||
|
||||
```bash
|
||||
prowler alibabacloud --regions cn-hangzhou cn-shanghai
|
||||
```
|
||||
|
||||
### Run Specific Checks
|
||||
|
||||
To run specific checks:
|
||||
|
||||
```bash
|
||||
prowler alibabacloud --checks ram_no_root_access_key ram_user_mfa_enabled_console_access
|
||||
```
|
||||
|
||||
### Run Compliance Framework
|
||||
|
||||
To run a specific compliance framework:
|
||||
|
||||
```bash
|
||||
prowler alibabacloud --compliance cis_2.0_alibabacloud
|
||||
```
|
||||
@@ -49,8 +49,9 @@ This method grants permanent access and is the recommended setup for production
|
||||

|
||||

|
||||
|
||||
!!! info
|
||||
An **External ID** is required when assuming the *ProwlerScan* role to comply with AWS [confused deputy prevention](https://docs.aws.amazon.com/IAM/latest/UserGuide/confused-deputy.html).
|
||||
<Info>
|
||||
An **External ID** is required when assuming the *ProwlerScan* role to prevent the [confused deputy problem](https://docs.aws.amazon.com/IAM/latest/UserGuide/confused-deputy.html).
|
||||
</Info>
|
||||
|
||||
6. Acknowledge the IAM resource creation warning and proceed
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ title: 'Getting Started With AWS on Prowler'
|
||||
|
||||
6. Choose the preferred authentication method (next step)
|
||||
|
||||

|
||||

|
||||
|
||||
|
||||
### Step 3: Set Up AWS Authentication
|
||||
|
||||
@@ -76,7 +76,7 @@ For Google Cloud, first enter your `GCP Project ID` and then select the authenti
|
||||
|
||||
7. Click "Next", then "Launch Scan"
|
||||
|
||||

|
||||

|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -2,25 +2,38 @@
|
||||
title: "Microsoft 365 Authentication in Prowler"
|
||||
---
|
||||
|
||||
Prowler for Microsoft 365 supports multiple authentication types. Authentication methods vary between Prowler App and Prowler CLI:
|
||||
Prowler for Microsoft 365 supports multiple authentication types across Prowler Cloud and Prowler CLI.
|
||||
|
||||
**Prowler App:**
|
||||
## Navigation
|
||||
- [Common Setup](#common-setup)
|
||||
- [Prowler Cloud Authentication](#prowler-cloud-authentication)
|
||||
- [Prowler CLI Authentication](#prowler-cli-authentication)
|
||||
- [Supported PowerShell Versions](#supported-powershell-versions)
|
||||
- [Required PowerShell Modules](#required-powershell-modules)
|
||||
|
||||
- [**Application Certificate Authentication**](#certificate-based-authentication) (**Recommended**)
|
||||
- [**Application Client Secret Authentication**](#client-secret-authentication)
|
||||
## Common Setup
|
||||
|
||||
### Authentication Methods Overview
|
||||
|
||||
Prowler Cloud uses app-only authentication. Prowler CLI supports the same app-only options and two delegated flows.
|
||||
|
||||
**Prowler Cloud:**
|
||||
|
||||
- [**Application Certificate Authentication**](#application-certificate-authentication-recommended) (**Recommended**)
|
||||
- [**Application Client Secret Authentication**](#application-client-secret-authentication)
|
||||
|
||||
**Prowler CLI:**
|
||||
|
||||
- [**Application Certificate Authentication**](#certificate-based-authentication) (**Recommended**)
|
||||
- [**Application Client Secret Authentication**](#client-secret-authentication)
|
||||
- [**Application Certificate Authentication**](#application-certificate-authentication-recommended) (**Recommended**)
|
||||
- [**Application Client Secret Authentication**](#application-client-secret-authentication)
|
||||
- [**Azure CLI Authentication**](#azure-cli-authentication)
|
||||
- [**Interactive Browser Authentication**](#interactive-browser-authentication)
|
||||
|
||||
## Required Permissions
|
||||
### Required Permissions
|
||||
|
||||
To run the full Prowler provider, including PowerShell checks, two types of permission scopes must be set in **Microsoft Entra ID**.
|
||||
|
||||
### Application Permissions for App-Only Authentication
|
||||
#### Application Permissions for App-Only Authentication
|
||||
|
||||
When using service principal authentication, add these **Application Permissions**:
|
||||
|
||||
@@ -44,6 +57,7 @@ When using service principal authentication, add these **Application Permissions
|
||||
These permissions enable application-based authentication methods (client secret and certificate). Using certificate-based authentication is the recommended way to run the full M365 provider, including PowerShell checks.
|
||||
|
||||
</Note>
|
||||
|
||||
### Browser Authentication Permissions
|
||||
|
||||
When using browser authentication, permissions are delegated to the user, so the user must have the appropriate permissions rather than the application.
|
||||
@@ -52,37 +66,38 @@ When using browser authentication, permissions are delegated to the user, so the
|
||||
Browser and Azure CLI authentication methods limit scanning capabilities to checks that operate through Microsoft Graph API. Checks requiring PowerShell modules will not execute, as they need application-level permissions that cannot be delegated through browser authentication.
|
||||
|
||||
</Warning>
|
||||
|
||||
### Step-by-Step Permission Assignment
|
||||
|
||||
#### Create Application Registration
|
||||
|
||||
1. Access **Microsoft Entra ID**
|
||||
1. Access **Microsoft Entra ID**.
|
||||
|
||||

|
||||
|
||||
2. Navigate to "Applications" > "App registrations"
|
||||
2. Navigate to "Applications" > "App registrations".
|
||||
|
||||

|
||||
|
||||
3. Click "+ New registration", complete the form, and click "Register"
|
||||
3. Click "+ New registration", complete the form, and click "Register".
|
||||
|
||||

|
||||
|
||||
4. Go to "Certificates & secrets" > "Client secrets" > "+ New client secret"
|
||||
4. Go to "Certificates & secrets" > "Client secrets" > "+ New client secret".
|
||||
|
||||

|
||||
|
||||
5. Fill in the required fields and click "Add", then copy the generated value (this will be `AZURE_CLIENT_SECRET`)
|
||||
5. Fill in the required fields and click "Add", then copy the generated value (this will be `AZURE_CLIENT_SECRET`).
|
||||
|
||||

|
||||
|
||||
#### Grant Microsoft Graph API Permissions
|
||||
|
||||
1. Go to App Registration > Select your Prowler App > click on "API permissions"
|
||||
1. Open **API permissions** for the Prowler application registration.
|
||||
|
||||

|
||||
|
||||
2. Click "+ Add a permission" > "Microsoft Graph" > "Application permissions"
|
||||
2. Click "+ Add a permission" > "Microsoft Graph" > "Application permissions".
|
||||
|
||||

|
||||
|
||||
@@ -97,38 +112,39 @@ Browser and Azure CLI authentication methods limit scanning capabilities to chec
|
||||
|
||||

|
||||
|
||||
4. Click "Add permissions", then click "Grant admin consent for `<your-tenant-name>`"
|
||||
4. Click "Add permissions", then click "Grant admin consent for `<your-tenant-name>`".
|
||||
|
||||
<a id="grant-powershell-module-permissions-for-app-only-authentication"></a>
|
||||
#### Grant PowerShell Module Permissions
|
||||
1. **Add Exchange API:**
|
||||
|
||||
- Search and select "Office 365 Exchange Online" API in **APIs my organization uses**
|
||||
- Search and select "Office 365 Exchange Online" API in **APIs my organization uses**.
|
||||
|
||||

|
||||
|
||||
- Select "Exchange.ManageAsApp" permission and click "Add permissions"
|
||||
- Select "Exchange.ManageAsApp" permission and click "Add permissions".
|
||||
|
||||

|
||||
|
||||
- Assign `Global Reader` role to the app: Go to `Roles and administrators` > click `here` for directory level assignment
|
||||
- Assign `Global Reader` role to the app: Go to `Roles and administrators` > click `here` for directory level assignment.
|
||||
|
||||

|
||||
|
||||
- Search for `Global Reader` and assign it to your application
|
||||
- Search for `Global Reader` and assign it to the application.
|
||||
|
||||

|
||||
|
||||
2. **Add Teams API:**
|
||||
|
||||
- Search and select "Skype and Teams Tenant Admin API" in **APIs my organization uses**
|
||||
- Search and select "Skype and Teams Tenant Admin API" in **APIs my organization uses**.
|
||||
|
||||

|
||||
|
||||
- Select "application_access" permission and click "Add permissions"
|
||||
- Select "application_access" permission and click "Add permissions".
|
||||
|
||||

|
||||
|
||||
3. Click "Grant admin consent for `<your-tenant-name>`" to grant admin consent
|
||||
3. Click "Grant admin consent for `<your-tenant-name>`" to grant admin consent.
|
||||
|
||||

|
||||
|
||||
@@ -136,11 +152,13 @@ Final permissions should look like this:
|
||||
|
||||

|
||||
|
||||
Use the same application registration for both Prowler Cloud and Prowler CLI while switching authentication methods as needed.
|
||||
|
||||
<a id="client-secret-authentication"></a>
|
||||
<a id="certificate-based-authentication"></a>
|
||||
## Application Certificate Authentication (Recommended)
|
||||
|
||||
_Available for both Prowler App and Prowler CLI_
|
||||
_Available for both Prowler Cloud and Prowler CLI_
|
||||
|
||||
**Authentication flag for CLI:** `--certificate-auth`
|
||||
|
||||
@@ -173,11 +191,11 @@ Guard `prowlerm365.key` and `prowlerm365.pfx`. Only upload the `.cer` file to th
|
||||
|
||||
</Warning>
|
||||
|
||||
If your organization uses a certificate authority, you can replace step 2 with a CSR workflow and import the signed certificate instead.
|
||||
If an internal certificate authority is preferred, replace step 2 with a CSR workflow and import the signed certificate instead.
|
||||
|
||||
### Upload the Certificate to Microsoft Entra ID
|
||||
|
||||
1. Open **Microsoft Entra ID** > **App registrations** > your application.
|
||||
1. Open **Microsoft Entra ID** > **App registrations** > the Prowler application.
|
||||
2. Go to **Certificates & secrets** > **Certificates**.
|
||||
3. Select **Upload certificate** and choose `prowlerm365.cer`.
|
||||
4. Confirm the certificate appears with the expected expiration date.
|
||||
@@ -189,45 +207,37 @@ base64 -i prowlerm365.pfx -o prowlerm365.pfx.b64
|
||||
cat prowlerm365.pfx.b64 | tr -d '\n'
|
||||
```
|
||||
|
||||
Copy the resulting single-line Base64 string (or the contents of `prowlerm365.pfx.b64`)—you will use it in the next step.
|
||||
Copy the resulting single-line Base64 string (or the contents of `prowlerm365.pfx.b64`) for the next step.
|
||||
|
||||
### Provide the Certificate to Prowler
|
||||
|
||||
You can supply the private certificate to Prowler in two ways:
|
||||
- **Prowler Cloud:** Paste the Base64-encoded PFX in the `certificate_content` field when configuring the Microsoft 365 provider in Prowler Cloud.
|
||||
- **Prowler CLI:** Export credential variables or pass the local file path when running Prowler.
|
||||
|
||||
- **Environment variables (recommended for headless execution)**
|
||||
```console
|
||||
export AZURE_CLIENT_ID="00000000-0000-0000-0000-000000000000"
|
||||
export AZURE_TENANT_ID="11111111-1111-1111-1111-111111111111"
|
||||
export M365_CERTIFICATE_CONTENT="$(base64 < prowlerm365.pfx | tr -d '\n')"
|
||||
```
|
||||
|
||||
```console
|
||||
export AZURE_CLIENT_ID="00000000-0000-0000-0000-000000000000"
|
||||
export AZURE_TENANT_ID="11111111-1111-1111-1111-111111111111"
|
||||
export M365_CERTIFICATE_CONTENT="$(base64 < prowlerm365.pfx | tr -d '\n')"
|
||||
```
|
||||
Store the PFX securely and reference it when running the CLI:
|
||||
|
||||
The `M365_CERTIFICATE_CONTENT` variable must contain a single-line Base64 string. Remove any line breaks or spaces before exporting.
|
||||
```console
|
||||
python3 prowler-cli.py m365 --certificate-auth --certificate-path /secure/path/prowlerm365.pfx
|
||||
```
|
||||
|
||||
- **Local file path**
|
||||
|
||||
Store the PFX securely and reference it when you run the CLI:
|
||||
|
||||
```console
|
||||
python3 prowler-cli.py m365 --certificate-auth --certificate-path /secure/path/prowlerm365.pfx
|
||||
```
|
||||
|
||||
The CLI still needs `AZURE_CLIENT_ID` and `AZURE_TENANT_ID` in the environment when you use `--certificate-path`.
|
||||
|
||||
For the **Prowler App**, paste the Base64-encoded PFX in the `certificate_content` field when you configure the provider secrets. The platform persists the encrypted certificate and supplies it during scans.
|
||||
The CLI still needs `AZURE_CLIENT_ID` and `AZURE_TENANT_ID` in the environment when `--certificate-path` is used.
|
||||
|
||||
<Note>
|
||||
Do not mix certificate authentication with a client secret. Provide either a certificate **or** a secret to the application registration and Prowler configuration.
|
||||
|
||||
</Note>
|
||||
|
||||
<a id="client-secret-authentication"></a>
|
||||
<a id="service-principal-authentication"></a>
|
||||
<a id="service-principal-authentication-recommended"></a>
|
||||
## Application Client Secret Authentication
|
||||
|
||||
_Available for both Prowler App and Prowler CLI_
|
||||
_Available for both Prowler Cloud and Prowler CLI_
|
||||
|
||||
**Authentication flag for CLI:** `--sp-env-auth`
|
||||
|
||||
@@ -239,35 +249,59 @@ export AZURE_CLIENT_SECRET="XXXXXXXXX"
|
||||
export AZURE_TENANT_ID="XXXXXXXXX"
|
||||
```
|
||||
|
||||
If these variables are not set or exported, execution using `--sp-env-auth` will fail.
|
||||
|
||||
Refer to the [Step-by-Step Permission Assignment](#step-by-step-permission-assignment) section below for setup instructions.
|
||||
|
||||
If the external API permissions described in the mentioned section above are not added only checks that work through MS Graph will be executed. This means that the full provider will not be executed.
|
||||
|
||||
This workflow is helpful for initial validation or temporary access. Plan to transition to certificate-based authentication to remove long-lived secrets and keep full provider coverage in unattended environments.
|
||||
If these variables are not set or exported, execution using `--sp-env-auth` will fail. This workflow is helpful for initial validation or temporary access. Plan to transition to certificate-based authentication to remove long-lived secrets and keep full provider coverage in unattended environments.
|
||||
|
||||
<Note>
|
||||
To scan every M365 check, ensure the required permissions are added to the application registration. Refer to the [PowerShell Module Permissions](#grant-powershell-module-permissions-for-app-only-authentication) section for more information.
|
||||
|
||||
</Note>
|
||||
|
||||
### Run Prowler with Certificate Authentication
|
||||
If the external API permissions described above are not added, only checks that work through Microsoft Graph will be executed. This means that the full provider will not be executed.
|
||||
|
||||
After the variables or path are in place, run the Microsoft 365 provider as usual:
|
||||
## Prowler Cloud Authentication
|
||||
|
||||
Use the shared permissions and credentials above, then complete the Microsoft 365 provider form in Prowler Cloud. The platform persists the encrypted credentials and supplies them during scans.
|
||||
|
||||
### Application Certificate Authentication (Recommended)
|
||||
|
||||
1. Select **Application Certificate Authentication**.
|
||||
2. Enter the **tenant ID** and **application (client) ID**.
|
||||
3. Paste the Base64-encoded certificate content.
|
||||
|
||||
This method keeps all Microsoft 365 checks available, including PowerShell-based checks.
|
||||
|
||||
### Application Client Secret Authentication
|
||||
|
||||
1. Select **Application Client Secret Authentication**.
|
||||
2. Enter the **tenant ID** and **application (client) ID**.
|
||||
3. Enter the **client secret**.
|
||||
|
||||
## Prowler CLI Authentication
|
||||
|
||||
### Certificate Authentication
|
||||
|
||||
**Authentication flag for CLI:** `--certificate-auth`
|
||||
|
||||
After credentials are exported, launch the Microsoft 365 provider with certificate authentication:
|
||||
|
||||
```console
|
||||
python3 prowler-cli.py m365 --certificate-auth --init-modules --log-level ERROR
|
||||
```
|
||||
|
||||
The command above initializes PowerShell modules if needed. You can combine other standard flags (for example, `--region M365USGovernment` or custom outputs) with `--certificate-auth`.
|
||||
Prowler prints the certificate thumbprint during execution so the correct credential can be verified.
|
||||
|
||||
Prowler prints the certificate thumbprint during execution so you can confirm the correct credential is in use.
|
||||
### Client Secret Authentication
|
||||
|
||||
**Authentication flag for CLI:** `--sp-env-auth`
|
||||
|
||||
After exporting the secret-based variables, run:
|
||||
|
||||
```console
|
||||
python3 prowler-cli.py m365 --sp-env-auth --init-modules --log-level ERROR
|
||||
```
|
||||
|
||||
<a id="azure-cli-authentication"></a>
|
||||
## Azure CLI Authentication
|
||||
|
||||
_Available only for Prowler CLI_
|
||||
### Azure CLI Authentication
|
||||
|
||||
**Authentication flag for CLI:** `--az-cli-auth`
|
||||
|
||||
@@ -279,7 +313,7 @@ az login --tenant <TENANT_ID>
|
||||
az account set --tenant <TENANT_ID>
|
||||
```
|
||||
|
||||
If you prefer to reuse the same service principal that powers certificate-based authentication, authenticate it through Azure CLI instead of exporting environment variables. Azure CLI expects the certificate in PEM format; convert the PFX produced earlier and sign in:
|
||||
If reusing the same service principal that powers certificate-based authentication, authenticate it through Azure CLI instead of exporting environment variables. Azure CLI expects the certificate in PEM format; convert the PFX produced earlier and sign in:
|
||||
|
||||
```console
|
||||
openssl pkcs12 -in prowlerm365.pfx -out prowlerm365.pem -nodes
|
||||
@@ -297,11 +331,9 @@ python3 prowler-cli.py m365 --az-cli-auth
|
||||
|
||||
The Azure CLI identity must hold the same Microsoft Graph and external API permissions required for the full provider. Signing in with a user account limits the scan to delegated Microsoft Graph endpoints and skips PowerShell-based checks. Use a service principal with the necessary application permissions to keep complete coverage.
|
||||
|
||||
## Interactive Browser Authentication
|
||||
### Interactive Browser Authentication
|
||||
|
||||
_Available only for Prowler CLI_
|
||||
|
||||
**Authentication flag:** `--browser-auth`
|
||||
**Authentication flag for CLI:** `--browser-auth`
|
||||
|
||||
Authenticate against Azure using the default browser to start the scan. The `--tenant-id` flag is also required.
|
||||
|
||||
|
||||
@@ -8,73 +8,81 @@ title: 'Getting Started With Microsoft 365 on Prowler'
|
||||
Government cloud accounts or tenants (Microsoft 365 Government) are currently unsupported, but we expect to add support for them in the near future.
|
||||
|
||||
</Note>
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Configure authentication for Microsoft 365 by following the [Microsoft 365 Authentication](/user-guide/providers/microsoft365/authentication) guide. This includes:
|
||||
Set up authentication for Microsoft 365 with the [Microsoft 365 Authentication](/user-guide/providers/microsoft365/authentication) guide before starting either path:
|
||||
|
||||
- Registering an application in Microsoft Entra ID
|
||||
- Granting all required Microsoft Graph and external API permissions
|
||||
- Generating the application certificate (recommended) or client secret
|
||||
- Setting up PowerShell module permissions (for full security coverage)
|
||||
- Register an application in Microsoft Entra ID
|
||||
- Grant the Microsoft Graph and external API permissions listed for the provider
|
||||
- Generate an application certificate (recommended) or client secret
|
||||
- Prepare PowerShell module permissions to enable every check
|
||||
|
||||
## Prowler App
|
||||
<CardGroup cols={2}>
|
||||
<Card title="Prowler Cloud" icon="cloud" href="#prowler-cloud">
|
||||
Onboard Microsoft 365 using Prowler Cloud
|
||||
</Card>
|
||||
<Card title="Prowler CLI" icon="terminal" href="#prowler-cli">
|
||||
Onboard Microsoft 365 using Prowler CLI
|
||||
</Card>
|
||||
</CardGroup>
|
||||
|
||||
### Step 1: Obtain Domain ID
|
||||
## Prowler Cloud
|
||||
|
||||
1. Go to the Entra ID portal, then search for "Domain" or go to Identity > Settings > Domain Names
|
||||
### Step 1: Locate the Domain ID
|
||||
|
||||
1. Open the Entra ID portal, then search for "Domain" or go to Identity > Settings > Domain Names.
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||
2. Select the domain to use as unique identifier for the Microsoft 365 account in Prowler App
|
||||
2. Select the domain that acts as the unique identifier for the Microsoft 365 account in Prowler Cloud.
|
||||
|
||||
### Step 2: Access Prowler App
|
||||
### Step 2: Open Prowler Cloud
|
||||
|
||||
1. Go to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app)
|
||||
2. Navigate to "Configuration" > "Cloud Providers"
|
||||
1. Go to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app).
|
||||
2. Navigate to "Configuration" > "Cloud Providers".
|
||||
|
||||

|
||||
|
||||
3. Click on "Add Cloud Provider"
|
||||
3. Click "Add Cloud Provider".
|
||||
|
||||

|
||||
|
||||
4. Select "Microsoft 365"
|
||||
4. Select "Microsoft 365".
|
||||
|
||||

|
||||
|
||||
5. Add the Domain ID and an optional alias, then click "Next"
|
||||
5. Add the Domain ID and an optional alias, then click "Next".
|
||||
|
||||

|
||||
|
||||
### Step 3: Select Authentication Method and Provide Credentials
|
||||
### Step 3: Choose and Provide Authentication
|
||||
|
||||
Prowler App now separates Microsoft 365 authentication into two app-only options. After adding the Domain ID (primary tenant domain), choose the method that matches your setup:
|
||||
After the Domain ID is in place, select the app-only authentication option that matches the Microsoft Entra ID setup:
|
||||
|
||||
<img src="/images/providers/m365-auth-selection-form.png" alt="M365 authentication method selection" width="700" />
|
||||
|
||||
#### Application Certificate Authentication (Recommended)
|
||||
|
||||
1. Enter your **tenant ID**: This is the unique identifier for your Microsoft Entra ID directory.
|
||||
2. Enter your **application (client) ID**: This is the unique identifier assigned to your app registration in Microsoft Entra ID.
|
||||
3. Upload your **certificate file content**: This is the Base64 encoded certificate content used to authenticate your application.
|
||||
1. Enter the **tenant ID**, the unique identifier for the Microsoft Entra ID directory.
|
||||
2. Enter the **application (client) ID**, the identifier for the Entra application registration.
|
||||
3. Upload the **certificate file content** (Base64-encoded PFX).
|
||||
|
||||
<img src="/images/providers/certificate-form.png" alt="M365 certificate authentication form" width="700" />
|
||||
|
||||
Use this method whenever possible to avoid managing client secrets and to unlock every Microsoft 365 check, including those that require PowerShell modules.
|
||||
|
||||
For detailed instructions on how to setup Application Certificate Authentication, see the [Authentication](/user-guide/providers/microsoft365/authentication#application-certificate-authentication-recommended) page.
|
||||
Use this method to avoid managing secrets and to unlock all Microsoft 365 checks, including the PowerShell-based ones. Full setup steps are in the [Authentication guide](/user-guide/providers/microsoft365/authentication#application-certificate-authentication-recommended).
|
||||
|
||||
#### Application Client Secret Authentication
|
||||
|
||||
1. Enter your **tenant ID**: This is the unique identifier for your Microsoft Entra ID directory.
|
||||
2. Enter your **application (client) ID**: This is the unique identifier assigned to your app registration in Microsoft Entra ID.
|
||||
3. Enter your **client secret**: This is the secret key used to authenticate your application.
|
||||
1. Enter the **tenant ID**.
|
||||
2. Enter the **application (client) ID**.
|
||||
3. Enter the **client secret**.
|
||||
|
||||
<img src="/images/providers/secret-form.png" alt="M365 client secret authentication form" width="700" />
|
||||
|
||||
For detailed instructions on how to setup Application Client Secret Authentication, see the [Authentication](/user-guide/providers/microsoft365/authentication#application-client-secret-authentication) page.
|
||||
For the complete setup workflow, follow the [Authentication guide](/user-guide/providers/microsoft365/authentication#application-client-secret-authentication).
|
||||
|
||||
### Step 4: Launch the Scan
|
||||
|
||||
@@ -90,30 +98,30 @@ For detailed instructions on how to setup Application Client Secret Authenticati
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
Use Prowler CLI to scan Microsoft 365 environments.
|
||||
### Step 1: Confirm PowerShell Coverage
|
||||
|
||||
### PowerShell Requirements
|
||||
PowerShell 7.4+ keeps the full Microsoft 365 coverage. Installation options are listed in the [Authentication guide](/user-guide/providers/microsoft365/authentication#supported-powershell-versions).
|
||||
|
||||
PowerShell 7.4+ is required for comprehensive Microsoft 365 security coverage. Installation instructions are available in the [Authentication guide](/user-guide/providers/microsoft365/authentication#supported-powershell-versions).
|
||||
### Step 2: Select an Authentication Method
|
||||
|
||||
### Authentication Options
|
||||
|
||||
Select an authentication method from the [Microsoft 365 Authentication](/user-guide/providers/microsoft365/authentication) guide:
|
||||
Choose the matching flag from the [Microsoft 365 Authentication](/user-guide/providers/microsoft365/authentication) guide:
|
||||
|
||||
- **Application Certificate Authentication** (recommended): `--certificate-auth`
|
||||
- **Application Client Secret Authentication**: `--sp-env-auth`
|
||||
- **Azure CLI Authentication**: `--az-cli-auth`
|
||||
- **Interactive Browser Authentication**: `--browser-auth`
|
||||
|
||||
### Basic Usage
|
||||
### Step 3: Run the First Scan
|
||||
|
||||
After configuring authentication, run a basic scan:
|
||||
Run a baseline scan after credentials are configured:
|
||||
|
||||
```console
|
||||
prowler m365 --sp-env-auth
|
||||
```
|
||||
|
||||
For comprehensive scans including PowerShell checks:
|
||||
### Step 4: Enable Full Coverage
|
||||
|
||||
Include PowerShell module initialization to run every check:
|
||||
|
||||
```console
|
||||
prowler m365 --sp-env-auth --init-modules
|
||||
|
||||
@@ -4,7 +4,41 @@ title: 'Getting Started with Oracle Cloud Infrastructure (OCI)'
|
||||
|
||||
Prowler supports security scanning of Oracle Cloud Infrastructure (OCI) environments. This guide will help you get started with using Prowler to audit your OCI tenancy.
|
||||
|
||||
## Prerequisites
|
||||
## Prowler Cloud
|
||||
|
||||
The following steps apply to Prowler Cloud and the self-hosted Prowler App.
|
||||
|
||||
### Step 1: Collect OCI Identifiers
|
||||
1. Sign in to the [OCI Console](https://cloud.oracle.com/) and open **Tenancy Details** to copy the Tenancy OCID.
|
||||
2. Go to **Identity & Security** → **Users**, select the principal that owns the API key, and copy the **User OCID**.
|
||||
3. Generate or locate the API key fingerprint and private key for that user. Follow the [Config File Authentication steps](/user-guide/providers/oci/authentication#config-file-authentication-manual-api-key-setup) to create or rotate the key pair and copy the fingerprint.
|
||||
4. Note the **Region** identifier to scan (for example, `us-ashburn-1`).
|
||||
|
||||
### Step 2: Access Prowler Cloud or Prowler App
|
||||
1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app).
|
||||
2. Go to **Configuration** → **Cloud Providers** and click **Add Cloud Provider**.
|
||||

|
||||
3. Select **Oracle Cloud** and enter the **Tenancy OCID** and an optional alias, then choose **Next**.
|
||||

|
||||
|
||||
### Step 3: Add OCI API Key Credentials
|
||||
Prowler App connects to OCI with API key credentials. Provide:
|
||||
|
||||
- **User OCID** for the API key owner
|
||||
- **Fingerprint** of the API key
|
||||
- **Region** (for example, `us-ashburn-1`)
|
||||
- **Private Key Content** (paste the full PEM value)
|
||||
- **Passphrase (Optional)** if the private key is encrypted
|
||||
|
||||
Select **Next**, then **Launch Scan** to validate the connection and start the first OCI scan. The private key content is encoded for secure transmission.
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Before you begin, ensure you have:
|
||||
|
||||
@@ -22,13 +56,13 @@ Before you begin, ensure you have:
|
||||
|
||||
3. **OCI Account Access** with appropriate permissions to read resources in your tenancy.
|
||||
|
||||
## Authentication
|
||||
### Authentication
|
||||
|
||||
Prowler supports multiple authentication methods for OCI. For detailed authentication setup, see the [OCI Authentication Guide](./authentication.mdx).
|
||||
|
||||
**Note:** OCI Session Authentication and Config File Authentication both use the same `~/.oci/config` file. The difference is how the config file is generated - automatically via browser (session auth) or manually with API keys.
|
||||
|
||||
### Quick Start: OCI Session Authentication (Recommended)
|
||||
#### Quick Start: OCI Session Authentication (Recommended)
|
||||
|
||||
The easiest and most secure method is using OCI session authentication, which automatically generates your config file via browser login.
|
||||
|
||||
@@ -71,13 +105,13 @@ The easiest and most secure method is using OCI session authentication, which au
|
||||
prowler oci
|
||||
```
|
||||
|
||||
### Alternative: Manual API Key Setup
|
||||
#### Alternative: Manual API Key Setup
|
||||
|
||||
If you prefer to manually generate API keys instead of using browser-based session authentication, see the detailed instructions in the [Authentication Guide](./authentication.mdx#config-file-authentication-manual-api-key-setup).
|
||||
|
||||
**Note:** Both methods use the same `~/.oci/config` file - the difference is that manual setup uses static API keys while session authentication uses temporary session tokens.
|
||||
|
||||
#### Using a Specific Profile
|
||||
##### Using a Specific Profile
|
||||
|
||||
If you have multiple profiles in your OCI config:
|
||||
|
||||
@@ -85,13 +119,13 @@ If you have multiple profiles in your OCI config:
|
||||
prowler oci --profile production
|
||||
```
|
||||
|
||||
#### Using a Custom Config File
|
||||
##### Using a Custom Config File
|
||||
|
||||
```bash
|
||||
prowler oci --config-file /path/to/custom/config
|
||||
```
|
||||
|
||||
### 2. Instance Principal Authentication
|
||||
#### Instance Principal Authentication
|
||||
|
||||
**IMPORTANT:** This authentication method **only works when Prowler is running inside an OCI compute instance**. If you're running Prowler from your local machine, use [OCI Session Authentication](#quick-start-oci-session-authentication-recommended) instead.
|
||||
|
||||
@@ -110,39 +144,39 @@ prowler oci --use-instance-principal
|
||||
Allow dynamic-group prowler-instances to read all-resources in tenancy
|
||||
```
|
||||
|
||||
## Basic Usage
|
||||
### Basic Usage
|
||||
|
||||
### Scan Entire Tenancy
|
||||
#### Scan Entire Tenancy
|
||||
|
||||
```bash
|
||||
prowler oci
|
||||
```
|
||||
|
||||
### Scan Specific Region
|
||||
#### Scan Specific Region
|
||||
|
||||
```bash
|
||||
prowler oci --region us-phoenix-1
|
||||
```
|
||||
|
||||
### Scan Specific Compartments
|
||||
#### Scan Specific Compartments
|
||||
|
||||
```bash
|
||||
prowler oci --compartment-id ocid1.compartment.oc1..example1 ocid1.compartment.oc1..example2
|
||||
```
|
||||
|
||||
### Run Specific Checks
|
||||
#### Run Specific Checks
|
||||
|
||||
```bash
|
||||
prowler oci --check identity_password_policy_minimum_length_14
|
||||
```
|
||||
|
||||
### Run Specific Services
|
||||
#### Run Specific Services
|
||||
|
||||
```bash
|
||||
prowler oci --service identity network
|
||||
```
|
||||
|
||||
### Compliance Frameworks
|
||||
#### Compliance Frameworks
|
||||
|
||||
Run CIS OCI Foundations Benchmark v3.0:
|
||||
|
||||
@@ -150,11 +184,11 @@ Run CIS OCI Foundations Benchmark v3.0:
|
||||
prowler oci --compliance cis_3.0_oci
|
||||
```
|
||||
|
||||
## Required Permissions
|
||||
### Required Permissions
|
||||
|
||||
Prowler requires **read-only** permissions to audit your OCI tenancy. Below are the minimum required permissions:
|
||||
|
||||
### Tenancy-Level Policy
|
||||
#### Tenancy-Level Policy
|
||||
|
||||
Create a group `prowler-users` and add your user to it, then create this policy:
|
||||
|
||||
@@ -167,7 +201,7 @@ Allow group prowler-users to read cloud-guard-problems in tenancy
|
||||
Allow group prowler-users to read cloud-guard-targets in tenancy
|
||||
```
|
||||
|
||||
### Service-Specific Permissions
|
||||
#### Service-Specific Permissions
|
||||
|
||||
For more granular control, you can grant specific permissions:
|
||||
|
||||
@@ -217,33 +251,33 @@ Allow group prowler-users to inspect ons-subscriptions in tenancy
|
||||
Allow group prowler-users to inspect rules in tenancy
|
||||
```
|
||||
|
||||
## Output Formats
|
||||
### Output Formats
|
||||
|
||||
Prowler supports multiple output formats for OCI:
|
||||
|
||||
### JSON
|
||||
#### JSON
|
||||
```bash
|
||||
prowler oci --output-formats json
|
||||
```
|
||||
|
||||
### CSV
|
||||
#### CSV
|
||||
```bash
|
||||
prowler oci --output-formats csv
|
||||
```
|
||||
|
||||
### HTML
|
||||
#### HTML
|
||||
```bash
|
||||
prowler oci --output-formats html
|
||||
```
|
||||
|
||||
### Multiple Formats
|
||||
#### Multiple Formats
|
||||
```bash
|
||||
prowler oci --output-formats json csv html
|
||||
```
|
||||
|
||||
## Common Scenarios
|
||||
### Common Scenarios
|
||||
|
||||
### Security Assessment
|
||||
#### Security Assessment
|
||||
|
||||
Full security assessment with CIS compliance:
|
||||
|
||||
@@ -254,7 +288,7 @@ prowler oci \
|
||||
--output-directory ./oci-assessment-$(date +%Y%m%d)
|
||||
```
|
||||
|
||||
### Continuous Monitoring
|
||||
#### Continuous Monitoring
|
||||
|
||||
Run specific security-critical checks:
|
||||
|
||||
@@ -266,7 +300,7 @@ prowler oci \
|
||||
--output-formats json
|
||||
```
|
||||
|
||||
### Compartment-Specific Audit
|
||||
#### Compartment-Specific Audit
|
||||
|
||||
Audit a specific project compartment:
|
||||
|
||||
@@ -277,9 +311,9 @@ prowler oci \
|
||||
--region us-ashburn-1
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
### Troubleshooting
|
||||
|
||||
### Authentication Issues
|
||||
#### Authentication Issues
|
||||
|
||||
**Error: "Could not find a valid config file"**
|
||||
- Ensure `~/.oci/config` exists and is properly formatted
|
||||
@@ -291,23 +325,23 @@ prowler oci \
|
||||
- Ensure the public key is uploaded to your OCI user account
|
||||
- Check that the private key file is accessible
|
||||
|
||||
### Permission Issues
|
||||
#### Permission Issues
|
||||
|
||||
**Error: "Authorization failed or requested resource not found"**
|
||||
- Verify your user has the required policies (see [Required Permissions](#required-permissions))
|
||||
- Check that policies apply to the correct compartments
|
||||
- Ensure policies are not restricted by conditions that exclude your user
|
||||
|
||||
### Region Issues
|
||||
#### Region Issues
|
||||
|
||||
**Error: "Invalid region"**
|
||||
- Check available regions: `prowler oci --list-regions`
|
||||
- Verify your tenancy is subscribed to the region
|
||||
- Use the region identifier (e.g., `us-ashburn-1`), not the display name
|
||||
|
||||
## Advanced Usage
|
||||
### Advanced Usage
|
||||
|
||||
### Using Mutelist
|
||||
#### Using Mutelist
|
||||
|
||||
Create a mutelist file to suppress specific findings:
|
||||
|
||||
@@ -329,7 +363,7 @@ Run with mutelist:
|
||||
prowler oci --mutelist-file oci-mutelist.yaml
|
||||
```
|
||||
|
||||
### Custom Checks Metadata
|
||||
#### Custom Checks Metadata
|
||||
|
||||
Override check metadata:
|
||||
|
||||
@@ -346,7 +380,7 @@ Run with custom metadata:
|
||||
prowler oci --custom-checks-metadata-file custom-metadata.yaml
|
||||
```
|
||||
|
||||
### Filtering by Status
|
||||
#### Filtering by Status
|
||||
|
||||
Only show failed checks:
|
||||
|
||||
@@ -354,7 +388,7 @@ Only show failed checks:
|
||||
prowler oci --status FAIL
|
||||
```
|
||||
|
||||
### Filtering by Severity
|
||||
#### Filtering by Severity
|
||||
|
||||
Only show critical and high severity findings:
|
||||
|
||||
@@ -362,13 +396,13 @@ Only show critical and high severity findings:
|
||||
prowler oci --severity critical high
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
### Next Steps
|
||||
|
||||
- Learn about [Compliance Frameworks](/user-guide/cli/tutorials/compliance) in Prowler
|
||||
- Review [Prowler Output Formats](/user-guide/cli/tutorials/reporting)
|
||||
- Explore [Integrations](/user-guide/cli/tutorials/integrations) with SIEM and ticketing systems
|
||||
|
||||
## Additional Resources
|
||||
### Additional Resources
|
||||
|
||||
- [OCI Documentation](https://docs.oracle.com/en-us/iaas/Content/home.htm)
|
||||
- [CIS OCI Foundations Benchmark](https://www.cisecurity.org/benchmark/oracle_cloud)
|
||||
|
||||
|
After Width: | Height: | Size: 472 KiB |
|
After Width: | Height: | Size: 367 KiB |
|
After Width: | Height: | Size: 288 KiB |
@@ -0,0 +1,169 @@
|
||||
---
|
||||
title: 'Using Multiple LLM Providers with Lighthouse'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.14.0" />
|
||||
|
||||
Prowler Lighthouse AI supports multiple Large Language Model (LLM) providers, offering flexibility to choose the provider that best fits infrastructure, compliance requirements, and cost considerations. This guide explains how to configure and use different LLM providers with Lighthouse AI.
|
||||
|
||||
## Supported Providers
|
||||
|
||||
Lighthouse AI supports the following LLM providers:
|
||||
|
||||
- **OpenAI**: Provides access to GPT models (GPT-4o, GPT-4, etc.)
|
||||
- **Amazon Bedrock**: Offers AWS-hosted access to Claude, Llama, Titan, and other models
|
||||
- **OpenAI Compatible**: Supports custom endpoints like OpenRouter, Ollama, or any OpenAI-compatible service
|
||||
|
||||
## Model Requirements
|
||||
|
||||
For Lighthouse AI to work properly, models **must** support all of the following capabilities:
|
||||
|
||||
- **Text input**: Ability to receive text prompts.
|
||||
- **Text output**: Ability to generate text responses.
|
||||
- **Tool calling**: Ability to invoke tools and functions.
|
||||
|
||||
If any of these capabilities are missing, the model will not be compatible with Lighthouse AI.
|
||||
|
||||
## How Default Providers Work
|
||||
|
||||
All three providers can be configured for a tenant, but only one can be set as the default provider. The first configured provider automatically becomes the default.
|
||||
|
||||
When visiting Lighthouse AI chat, the default provider's default model loads automatically. Users can switch to any available LLM model (including those from non-default providers) using the dropdown in chat.
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-switch-models.png" alt="Switch models in Lighthouse AI chat interface" />
|
||||
|
||||
## Configuring Providers
|
||||
|
||||
Navigate to **Configuration** → **Lighthouse AI** to see all three provider options with a **Connect** button under each.
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-configuration.png" alt="Prowler Lighthouse Configuration" />
|
||||
|
||||
### Connecting a Provider
|
||||
|
||||
To connect a provider:
|
||||
|
||||
1. Click **Connect** under the desired provider
|
||||
2. Enter the required credentials
|
||||
3. Select a default model for that provider
|
||||
4. Click **Connect** to save
|
||||
|
||||
<Tabs>
|
||||
<Tab title="OpenAI">
|
||||
### Required Information
|
||||
|
||||
- **API Key**: OpenAI API key (starts with `sk-` or `sk-proj-`). API keys can be created from the [OpenAI platform](https://platform.openai.com/api-keys).
|
||||
|
||||
### Before Connecting
|
||||
|
||||
- Ensure the OpenAI account has sufficient credits.
|
||||
- Verify that the `gpt-5` model (recommended for Lighthouse AI) is not blocked in the OpenAI organization settings.
|
||||
</Tab>
|
||||
|
||||
<Tab title="Amazon Bedrock">
|
||||
Prowler connects to Amazon Bedrock using either [Amazon Bedrock API keys](https://docs.aws.amazon.com/bedrock/latest/userguide/getting-started-api-keys.html) or IAM credentials.
|
||||
|
||||
<Note>
|
||||
Amazon Bedrock models depend on AWS region and account entitlements. Lighthouse AI displays only accessible models that support tool calling and text input/output.
|
||||
</Note>
|
||||
|
||||
### Amazon Bedrock Long-Term API Key
|
||||
|
||||
<VersionBadge version="5.15.0" />
|
||||
|
||||
<Warning>
|
||||
Amazon Bedrock Long-Term API keys are recommended only for exploration purposes. For production environments, use AWS IAM Access Keys with properly scoped permissions.
|
||||
</Warning>
|
||||
|
||||
Amazon Bedrock API keys provide simpler authentication with automatically assigned permissions.
|
||||
|
||||
#### Required Information
|
||||
|
||||
- **Bedrock Long-Term API Key**: The API key generated from Amazon Bedrock.
|
||||
- **AWS Region**: Region where Bedrock is available.
|
||||
|
||||
<Note>
|
||||
Amazon Bedrock Long-Term API keys are automatically assigned the necessary permissions (`AmazonBedrockLimitedAccess` policy).
|
||||
|
||||
Learn more: [Getting Started with Amazon Bedrock API Keys](https://docs.aws.amazon.com/bedrock/latest/userguide/getting-started-api-keys.html)
|
||||
</Note>
|
||||
|
||||
### AWS IAM Access Keys
|
||||
|
||||
Standard AWS IAM credentials can be used as an alternative authentication method.
|
||||
|
||||
#### Required Information
|
||||
|
||||
- **AWS Access Key ID**: The access key ID for the IAM user.
|
||||
- **AWS Secret Access Key**: The secret access key for the IAM user.
|
||||
- **AWS Region**: Region where Bedrock is available.
|
||||
|
||||
#### Required Permissions
|
||||
|
||||
The AWS IAM user must have the `AmazonBedrockLimitedAccess` managed policy attached:
|
||||
|
||||
```text
|
||||
arn:aws:iam::aws:policy/AmazonBedrockLimitedAccess
|
||||
```
|
||||
|
||||
<Note>
|
||||
Access to all Amazon Bedrock foundation models is enabled by default. When you select a model or invoke it for the first time (using Prowler or otherwise), you agree to Amazon's EULA. More info: [Amazon Bedrock Model Access](https://docs.aws.amazon.com/bedrock/latest/userguide/model-access.html)
|
||||
</Note>
|
||||
|
||||
</Tab>
|
||||
|
||||
<Tab title="OpenAI Compatible">
|
||||
Use this option to connect to any LLM provider exposing an OpenAI compatible API endpoint (OpenRouter, Ollama, etc.).
|
||||
|
||||
### Required Information
|
||||
|
||||
- **API Key**: API key from the compatible service.
|
||||
- **Base URL**: API endpoint URL including the API version (e.g., `https://openrouter.ai/api/v1`).
|
||||
|
||||
### Example: OpenRouter
|
||||
|
||||
1. Create an account at [OpenRouter](https://openrouter.ai/)
|
||||
2. [Generate an API key](https://openrouter.ai/docs/guides/overview/auth/provisioning-api-keys) from the OpenRouter dashboard
|
||||
3. Configure in Lighthouse AI:
|
||||
- **API Key**: OpenRouter API key
|
||||
- **Base URL**: `https://openrouter.ai/api/v1`
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
## Changing the Default Provider
|
||||
|
||||
To set a different provider as default:
|
||||
|
||||
1. Navigate to **Configuration** → **Lighthouse AI**
|
||||
2. Click **Configure** under the desired provider to set as default
|
||||
3. Click **Set as Default**
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-set-default-provider.png" alt="Set default LLM provider" />
|
||||
|
||||
## Updating Provider Credentials
|
||||
|
||||
To update credentials for a connected provider:
|
||||
|
||||
1. Navigate to **Configuration** → **Lighthouse AI**
|
||||
2. Click **Configure** under the provider
|
||||
3. Enter the new credentials
|
||||
4. Click **Update**
|
||||
|
||||
## Deleting a Provider
|
||||
|
||||
To remove a configured provider:
|
||||
|
||||
1. Navigate to **Configuration** → **Lighthouse AI**
|
||||
2. Click **Configure** under the provider
|
||||
3. Click **Delete**
|
||||
|
||||
## Model Recommendations
|
||||
|
||||
For best results with Lighthouse AI, the recommended model is `gpt-5` from OpenAI.
|
||||
|
||||
Models from other providers such as Amazon Bedrock and OpenAI Compatible endpoints can be connected and used, but performance is not guaranteed. Ensure that any selected model supports text input, text output, and tool calling capabilities.
|
||||
|
||||
## Getting Help
|
||||
|
||||
For issues or suggestions, [reach out through our Slack channel](https://goto.prowler.com/slack).
|
||||
@@ -1,26 +1,20 @@
|
||||
---
|
||||
title: 'Prowler Lighthouse AI'
|
||||
title: 'How It Works'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.8.0" />
|
||||
|
||||
Prowler Lighthouse AI is a Cloud Security Analyst chatbot that helps you understand, prioritize, and remediate security findings in your cloud environments. It's designed to provide security expertise for teams without dedicated resources, acting as your 24/7 virtual cloud security analyst.
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-intro.png" alt="Prowler Lighthouse" />
|
||||
|
||||
## How It Works
|
||||
|
||||
Prowler Lighthouse AI uses OpenAI's language models and integrates with your Prowler security findings data.
|
||||
Prowler Lighthouse AI integrates Large Language Models (LLMs) with Prowler security findings data.
|
||||
|
||||
Here's what's happening behind the scenes:
|
||||
|
||||
- The system uses a multi-agent architecture built with [LanggraphJS](https://github.com/langchain-ai/langgraphjs) for LLM logic and [Vercel AI SDK UI](https://sdk.vercel.ai/docs/ai-sdk-ui/overview) for frontend chatbot.
|
||||
- It uses a ["supervisor" architecture](https://langchain-ai.lang.chat/langgraphjs/tutorials/multi_agent/agent_supervisor/) that interacts with different agents for specialized tasks. For example, `findings_agent` can analyze detected security findings, while `overview_agent` provides a summary of connected cloud accounts.
|
||||
- The system connects to OpenAI models to understand, fetch the right data, and respond to the user's query.
|
||||
- The system connects to the configured LLM provider to understand user's query, fetches the right data, and responds to the query.
|
||||
<Note>
|
||||
Lighthouse AI is tested against `gpt-4o` and `gpt-4o-mini` OpenAI models.
|
||||
Lighthouse AI supports multiple LLM providers including OpenAI, Amazon Bedrock, and OpenAI-compatible services. For configuration details, see [Using Multiple LLM Providers with Lighthouse](/user-guide/tutorials/prowler-app-lighthouse-multi-llm).
|
||||
</Note>
|
||||
- The supervisor agent is the main contact point. It is what users interact with directly from the chat interface. It coordinates with other agents to answer users' questions comprehensively.
|
||||
|
||||
@@ -30,16 +24,22 @@ Lighthouse AI is tested against `gpt-4o` and `gpt-4o-mini` OpenAI models.
|
||||
All agents can only read relevant security data. They cannot modify your data or access sensitive information like configured secrets or tenant details.
|
||||
|
||||
</Note>
|
||||
|
||||
## Set up
|
||||
|
||||
Getting started with Prowler Lighthouse AI is easy:
|
||||
|
||||
1. Go to the configuration page in your Prowler dashboard.
|
||||
2. Enter your OpenAI API key.
|
||||
3. Select your preferred model. The recommended one for best results is `gpt-4o`.
|
||||
4. (Optional) Add business context to improve response quality and prioritization.
|
||||
1. Navigate to **Configuration** → **Lighthouse AI**
|
||||
2. Click **Connect** under the desired provider (OpenAI, Amazon Bedrock, or OpenAI Compatible)
|
||||
3. Enter the required credentials
|
||||
4. Select a default model
|
||||
5. Click **Connect** to save
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-config.png" alt="Lighthouse AI Configuration" />
|
||||
<Note>
|
||||
For detailed configuration instructions for each provider, see [Using Multiple LLM Providers with Lighthouse](/user-guide/tutorials/prowler-app-lighthouse-multi-llm).
|
||||
</Note>
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-configuration.png" alt="Lighthouse AI Configuration" />
|
||||
|
||||
### Adding Business Context
|
||||
|
||||
@@ -51,163 +51,3 @@ The optional business context field lets you provide additional information to h
|
||||
- Current security initiatives or focus areas
|
||||
|
||||
Better context leads to more relevant responses and prioritization that aligns with your needs.
|
||||
|
||||
## Capabilities
|
||||
|
||||
Prowler Lighthouse AI is designed to be your AI security team member, with capabilities including:
|
||||
|
||||
### Natural Language Querying
|
||||
|
||||
Ask questions in plain English about your security findings. Examples:
|
||||
|
||||
- "What are my highest risk findings?"
|
||||
- "Show me all S3 buckets with public access."
|
||||
- "What security issues were found in my production accounts?"
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-feature1.png" alt="Natural language querying" />
|
||||
|
||||
### Detailed Remediation Guidance
|
||||
|
||||
Get tailored step-by-step instructions for fixing security issues:
|
||||
|
||||
- Clear explanations of the problem and its impact
|
||||
- Commands or console steps to implement fixes
|
||||
- Alternative approaches with different solutions
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-feature2.png" alt="Detailed Remediation" />
|
||||
|
||||
### Enhanced Context and Analysis
|
||||
|
||||
Lighthouse AI can provide additional context to help you understand the findings:
|
||||
|
||||
- Explain security concepts related to findings in simple terms
|
||||
- Provide risk assessments based on your environment and context
|
||||
- Connect related findings to show broader security patterns
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-config.png" alt="Business Context" />
|
||||
|
||||
<img src="/images/prowler-app/lighthouse-feature3.png" alt="Contextual Responses" />
|
||||
|
||||
## Important Notes
|
||||
|
||||
Prowler Lighthouse AI is powerful, but there are limitations:
|
||||
|
||||
- **Continuous improvement**: Please report any issues, as the feature may make mistakes or encounter errors, despite extensive testing.
|
||||
- **Access limitations**: Lighthouse AI can only access data the logged-in user can view. If you can't see certain information, Lighthouse AI can't see it either.
|
||||
- **NextJS session dependence**: If your Prowler application session expires or logs out, Lighthouse AI will error out. Refresh and log back in to continue.
|
||||
- **Response quality**: The response quality depends on the selected OpenAI model. For best results, use gpt-4o.
|
||||
|
||||
### Getting Help
|
||||
|
||||
If you encounter issues with Prowler Lighthouse AI or have suggestions for improvements, please [reach out through our Slack channel](https://goto.prowler.com/slack).
|
||||
|
||||
### What Data Is Shared to OpenAI?
|
||||
|
||||
The following API endpoints are accessible to Prowler Lighthouse AI. Data from the following API endpoints could be shared with OpenAI depending on the scope of user's query:
|
||||
|
||||
#### Accessible API Endpoints
|
||||
|
||||
**User Management:**
|
||||
|
||||
- List all users - `/api/v1/users`
|
||||
- Retrieve the current user's information - `/api/v1/users/me`
|
||||
|
||||
**Provider Management:**
|
||||
|
||||
- List all providers - `/api/v1/providers`
|
||||
- Retrieve data from a provider - `/api/v1/providers/{id}`
|
||||
|
||||
**Scan Management:**
|
||||
|
||||
- List all scans - `/api/v1/scans`
|
||||
- Retrieve data from a specific scan - `/api/v1/scans/{id}`
|
||||
|
||||
**Resource Management:**
|
||||
|
||||
- List all resources - `/api/v1/resources`
|
||||
- Retrieve data for a resource - `/api/v1/resources/{id}`
|
||||
|
||||
**Findings Management:**
|
||||
|
||||
- List all findings - `/api/v1/findings`
|
||||
- Retrieve data from a specific finding - `/api/v1/findings/{id}`
|
||||
- Retrieve metadata values from findings - `/api/v1/findings/metadata`
|
||||
|
||||
**Overview Data:**
|
||||
|
||||
- Get aggregated findings data - `/api/v1/overviews/findings`
|
||||
- Get findings data by severity - `/api/v1/overviews/findings_severity`
|
||||
- Get aggregated provider data - `/api/v1/overviews/providers`
|
||||
- Get findings data by service - `/api/v1/overviews/services`
|
||||
|
||||
**Compliance Management:**
|
||||
|
||||
- List compliance overviews (optionally filter by scan) - `/api/v1/compliance-overviews`
|
||||
- Retrieve data from a specific compliance overview - `/api/v1/compliance-overviews/{id}`
|
||||
|
||||
#### Excluded API Endpoints
|
||||
|
||||
Not all Prowler API endpoints are integrated with Lighthouse AI. They are intentionally excluded for the following reasons:
|
||||
|
||||
- OpenAI/other LLM providers shouldn't have access to sensitive data (like fetching provider secrets and other sensitive config)
|
||||
- Users queries don't need responses from those API endpoints (ex: tasks, tenant details, downloading zip file, etc.)
|
||||
|
||||
**Excluded Endpoints:**
|
||||
|
||||
**User Management:**
|
||||
|
||||
- List specific users information - `/api/v1/users/{id}`
|
||||
- List user memberships - `/api/v1/users/{user_pk}/memberships`
|
||||
- Retrieve membership data from the user - `/api/v1/users/{user_pk}/memberships/{id}`
|
||||
|
||||
**Tenant Management:**
|
||||
|
||||
- List all tenants - `/api/v1/tenants`
|
||||
- Retrieve data from a tenant - `/api/v1/tenants/{id}`
|
||||
- List tenant memberships - `/api/v1/tenants/{tenant_pk}/memberships`
|
||||
- List all invitations - `/api/v1/tenants/invitations`
|
||||
- Retrieve data from tenant invitation - `/api/v1/tenants/invitations/{id}`
|
||||
|
||||
**Security and Configuration:**
|
||||
|
||||
- List all secrets - `/api/v1/providers/secrets`
|
||||
- Retrieve data from a secret - `/api/v1/providers/secrets/{id}`
|
||||
- List all provider groups - `/api/v1/provider-groups`
|
||||
- Retrieve data from a provider group - `/api/v1/provider-groups/{id}`
|
||||
|
||||
**Reports and Tasks:**
|
||||
|
||||
- Download zip report - `/api/v1/scans/{v1}/report`
|
||||
- List all tasks - `/api/v1/tasks`
|
||||
- Retrieve data from a specific task - `/api/v1/tasks/{id}`
|
||||
|
||||
**Lighthouse AI Configuration:**
|
||||
|
||||
- List OpenAI configuration - `/api/v1/lighthouse-config`
|
||||
- Retrieve OpenAI key and configuration - `/api/v1/lighthouse-config/{id}`
|
||||
|
||||
<Note>
|
||||
Agents only have access to hit GET endpoints. They don't have access to other HTTP methods.
|
||||
|
||||
</Note>
|
||||
## FAQs
|
||||
|
||||
**1. Why only OpenAI models?**
|
||||
|
||||
During feature development, we evaluated other LLM models.
|
||||
|
||||
- **Claude AI** - Claude models have [tier-based ratelimits](https://docs.anthropic.com/en/api/rate-limits#requirements-to-advance-tier). For Lighthouse AI to answer slightly complex questions, there are a handful of API calls to the LLM provider within few seconds. With Claude's tiering system, users must purchase $400 credits or convert their subscription to monthly invoicing after talking to their sales team. This pricing may not suit all Prowler users.
|
||||
- **Gemini Models** - Gemini lacks a solid tool calling feature like OpenAI. It calls functions recursively until exceeding limits. Gemini-2.5-Pro-Experimental is better than previous models regarding tool calling and responding, but it's still experimental.
|
||||
- **Deepseek V3** - Doesn't support system prompt messages.
|
||||
|
||||
**2. Why a multi-agent supervisor model?**
|
||||
|
||||
Context windows are limited. While demo data fits inside the context window, querying real-world data often exceeds it. A multi-agent architecture is used so different agents fetch different sizes of data and respond with the minimum required data to the supervisor. This spreads the context window usage across agents.
|
||||
|
||||
**3. Is my security data shared with OpenAI?**
|
||||
|
||||
Minimal data is shared to generate useful responses. Agents can access security findings and remediation details when needed. Provider secrets are protected by design and cannot be read. The OpenAI key configured with Lighthouse AI is only accessible to our NextJS server and is never sent to LLMs. Resource metadata (names, tags, account/project IDs, etc) may be shared with OpenAI based on your query requirements.
|
||||
|
||||
**4. Can the Lighthouse AI change my cloud environment?**
|
||||
|
||||
No. The agent doesn't have the tools to make the changes, even if the configured cloud provider API keys contain permissions to modify resources.
|
||||
|
||||
@@ -68,6 +68,8 @@ To perform security scans, link a cloud provider account. Prowler supports the f
|
||||
|
||||
- **GitHub**
|
||||
|
||||
- **Oracle Cloud Infrastructure (OCI)**
|
||||
|
||||
Steps to add a provider:
|
||||
|
||||
1. Navigate to `Settings > Cloud Providers`.
|
||||
@@ -93,6 +95,9 @@ For detailed instructions on configuring credentials for each provider, refer to
|
||||
<Card title="Google Cloud" icon="google" href="/user-guide/providers/gcp/getting-started-gcp">
|
||||
Configure GCP authentication with Service Account or Application Default Credentials.
|
||||
</Card>
|
||||
<Card title="Oracle Cloud Infrastructure" icon="cloud" href="/user-guide/providers/oci/getting-started-oci">
|
||||
Connect OCI with API key credentials to scan compartments and regions.
|
||||
</Card>
|
||||
<Card title="Kubernetes" icon="cloud" href="/user-guide/providers/kubernetes/getting-started-k8s">
|
||||
Set up Kubernetes authentication using kubeconfig files for cluster access.
|
||||
</Card>
|
||||
|
||||
@@ -2,14 +2,26 @@
|
||||
|
||||
All notable changes to the **Prowler MCP Server** are documented in this file.
|
||||
|
||||
## [0.1.1] (Prowler 5.14.0)
|
||||
|
||||
### Fixed
|
||||
- Fix documentation MCP Server to return list of dictionaries [(#9205)](https://github.com/prowler-cloud/prowler/pull/9205)
|
||||
|
||||
## [0.1.0] (Prowler 5.13.0)
|
||||
## [0.2.0] (Prowler UNRELEASED)
|
||||
|
||||
### Added
|
||||
|
||||
- Remove all Prowler App MCP tools; and add new MCP Server tools for Prowler Findings and Compliance [(#9300)](https://github.com/prowler-cloud/prowler/pull/9300)
|
||||
|
||||
---
|
||||
|
||||
## [0.1.1] (Prowler v5.14.0)
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fix documentation MCP Server to return list of dictionaries [(#9205)](https://github.com/prowler-cloud/prowler/pull/9205)
|
||||
|
||||
---
|
||||
|
||||
## [0.1.0] (Prowler v5.13.0)
|
||||
|
||||
### Added
|
||||
|
||||
- Initial release of Prowler MCP Server [(#8695)](https://github.com/prowler-cloud/prowler/pull/8695)
|
||||
- Set appropiate user-agent in requests [(#8724)](https://github.com/prowler-cloud/prowler/pull/8724)
|
||||
- Basic logger functionality [(#8740)](https://github.com/prowler-cloud/prowler/pull/8740)
|
||||
|
||||
@@ -33,8 +33,6 @@ def main():
|
||||
try:
|
||||
args = parse_arguments()
|
||||
|
||||
print(f"args.transport: {args.transport}")
|
||||
|
||||
if args.transport is None:
|
||||
args.transport = os.getenv("PROWLER_MCP_TRANSPORT_MODE", "stdio")
|
||||
else:
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
"""Pydantic models for Prowler App MCP Server."""
|
||||
|
||||
from prowler_mcp_server.prowler_app.models.base import MinimalSerializerMixin
|
||||
|
||||
from prowler_mcp_server.prowler_app.models.findings import (
|
||||
CheckMetadata,
|
||||
CheckRemediation,
|
||||
DetailedFinding,
|
||||
FindingsListResponse,
|
||||
FindingsOverview,
|
||||
SimplifiedFinding,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# Base models
|
||||
"MinimalSerializerMixin",
|
||||
# Findings models
|
||||
"CheckMetadata",
|
||||
"CheckRemediation",
|
||||
"DetailedFinding",
|
||||
"FindingsListResponse",
|
||||
"FindingsOverview",
|
||||
"SimplifiedFinding",
|
||||
]
|
||||
@@ -0,0 +1,59 @@
|
||||
"""Base models and mixins for Prowler MCP Server models."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, SerializerFunctionWrapHandler, model_serializer
|
||||
|
||||
|
||||
class MinimalSerializerMixin(BaseModel):
|
||||
"""Mixin that excludes empty values from serialization.
|
||||
|
||||
This mixin optimizes model serialization for LLM consumption by removing noise
|
||||
and reducing token usage. It excludes:
|
||||
- None values
|
||||
- Empty strings
|
||||
- Empty lists
|
||||
- Empty dicts
|
||||
"""
|
||||
|
||||
@model_serializer(mode="wrap")
|
||||
def _serialize(self, handler: SerializerFunctionWrapHandler) -> dict[str, Any]:
|
||||
"""Serialize model excluding empty values.
|
||||
|
||||
Args:
|
||||
handler: Pydantic serializer function wrapper
|
||||
|
||||
Returns:
|
||||
Dictionary with non-empty values only
|
||||
"""
|
||||
data = handler(self)
|
||||
return {k: v for k, v in data.items() if not self._should_exclude(v)}
|
||||
|
||||
def _should_exclude(self, value: Any) -> bool:
|
||||
"""Determine if a value should be excluded from serialization.
|
||||
|
||||
Override this method in subclasses for custom exclusion logic.
|
||||
|
||||
Args:
|
||||
value: Field value
|
||||
|
||||
Returns:
|
||||
True if the value should be excluded, False otherwise
|
||||
"""
|
||||
# None values
|
||||
if value is None:
|
||||
return True
|
||||
|
||||
# Empty strings
|
||||
if value == "":
|
||||
return True
|
||||
|
||||
# Empty lists
|
||||
if isinstance(value, list) and not value:
|
||||
return True
|
||||
|
||||
# Empty dicts
|
||||
if isinstance(value, dict) and not value:
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -0,0 +1,333 @@
|
||||
"""Pydantic models for simplified security findings responses."""
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from prowler_mcp_server.prowler_app.models.base import MinimalSerializerMixin
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
|
||||
class CheckRemediation(MinimalSerializerMixin, BaseModel):
|
||||
"""Remediation information for a security check."""
|
||||
|
||||
model_config = ConfigDict(frozen=True)
|
||||
|
||||
cli: str | None = Field(
|
||||
default=None,
|
||||
description="Command-line interface commands for remediation",
|
||||
)
|
||||
terraform: str | None = Field(
|
||||
default=None,
|
||||
description="Terraform code snippet with best practices for remediation",
|
||||
)
|
||||
recommendation_text: str | None = Field(
|
||||
default=None, description="Text description with best practices"
|
||||
)
|
||||
recommendation_url: str | None = Field(
|
||||
default=None,
|
||||
description="URL to external remediation documentation",
|
||||
)
|
||||
|
||||
|
||||
class CheckMetadata(MinimalSerializerMixin, BaseModel):
|
||||
"""Essential metadata for a security check."""
|
||||
|
||||
model_config = ConfigDict(frozen=True)
|
||||
|
||||
check_id: str = Field(
|
||||
description="Unique provider identifier for the security check (e.g., 's3_bucket_public_access')",
|
||||
)
|
||||
title: str = Field(
|
||||
description="Human-readable title of the security check",
|
||||
)
|
||||
description: str = Field(
|
||||
description="Detailed description of what the check validates",
|
||||
)
|
||||
provider: str = Field(
|
||||
description="Prowler provider this check belongs to (e.g., 'aws', 'azure', 'gcp')",
|
||||
)
|
||||
service: str = Field(
|
||||
description="Prowler service being checked (e.g., 's3', 'ec2', 'keyvault')",
|
||||
)
|
||||
resource_type: str = Field(
|
||||
description="Type of resource being evaluated (e.g., 'AwsS3Bucket')",
|
||||
)
|
||||
risk: str | None = Field(
|
||||
default=None,
|
||||
description="Risk description if the check fails",
|
||||
)
|
||||
remediation: CheckRemediation | None = Field(
|
||||
default=None,
|
||||
description="Remediation guidance including CLI commands and recommendations",
|
||||
)
|
||||
related_url: str | None = Field(
|
||||
default=None,
|
||||
description="URL to additional documentation or references",
|
||||
)
|
||||
categories: list[str] = Field(
|
||||
default_factory=list,
|
||||
description="Categories this check belongs to (e.g., ['encryption', 'logging'])",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "CheckMetadata":
|
||||
"""Transform API check_metadata to simplified format."""
|
||||
remediation_data = data.get("remediation")
|
||||
|
||||
remediation = None
|
||||
if remediation_data:
|
||||
code = remediation_data.get("code", {})
|
||||
recommendation = remediation_data.get("recommendation", {})
|
||||
|
||||
remediation = CheckRemediation(
|
||||
cli=code.get("cli"),
|
||||
terraform=code.get("terraform"),
|
||||
recommendation_text=recommendation.get("text"),
|
||||
recommendation_url=recommendation.get("url"),
|
||||
)
|
||||
|
||||
return cls(
|
||||
check_id=data["checkid"],
|
||||
title=data["checktitle"],
|
||||
description=data["description"],
|
||||
provider=data["provider"],
|
||||
risk=data.get("risk"),
|
||||
service=data["servicename"],
|
||||
resource_type=data["resourcetype"],
|
||||
remediation=remediation,
|
||||
related_url=data.get("relatedurl"),
|
||||
categories=data.get("categories", []),
|
||||
)
|
||||
|
||||
|
||||
class SimplifiedFinding(MinimalSerializerMixin, BaseModel):
|
||||
"""Simplified security finding with only LLM-relevant information."""
|
||||
|
||||
model_config = ConfigDict(frozen=True)
|
||||
|
||||
id: str = Field(
|
||||
description="Unique UUIDv4 identifier for this finding in Prowler database"
|
||||
)
|
||||
uid: str = Field(
|
||||
description="Human-readable unique identifier assigned by Prowler. Format: prowler-{provider}-{check_id}-{account_uid}-{region}-{resource_name}",
|
||||
)
|
||||
status: Literal["FAIL", "PASS", "MANUAL"] = Field(
|
||||
description="Result status: FAIL (security issue found), PASS (no issue), MANUAL (requires manual verification)",
|
||||
)
|
||||
severity: Literal["critical", "high", "medium", "low", "informational"] = Field(
|
||||
description="Severity level of the finding",
|
||||
)
|
||||
check_metadata: CheckMetadata = Field(
|
||||
description="Metadata about the security check that generated this finding",
|
||||
)
|
||||
status_extended: str = Field(
|
||||
description="Extended status information providing additional context",
|
||||
)
|
||||
delta: Literal["new", "changed"] = Field(
|
||||
description="Change status: 'new' (not seen before), 'changed' (modified since last scan), or None (unchanged)",
|
||||
)
|
||||
muted: bool = Field(
|
||||
description="Whether this finding has been muted/suppressed by the user",
|
||||
)
|
||||
muted_reason: str = Field(
|
||||
default=None,
|
||||
description="Reason provided when muting this finding (3-500 chars if muted)",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "SimplifiedFinding":
|
||||
"""Transform JSON:API finding response to simplified format."""
|
||||
attributes = data["attributes"]
|
||||
check_metadata = attributes["check_metadata"]
|
||||
|
||||
return cls(
|
||||
id=data["id"],
|
||||
uid=attributes["uid"],
|
||||
status=attributes["status"],
|
||||
severity=attributes["severity"],
|
||||
check_metadata=CheckMetadata.from_api_response(check_metadata),
|
||||
status_extended=attributes["status_extended"],
|
||||
delta=attributes["delta"],
|
||||
muted=attributes["muted"],
|
||||
muted_reason=attributes["muted_reason"],
|
||||
)
|
||||
|
||||
|
||||
class DetailedFinding(SimplifiedFinding):
|
||||
"""Detailed security finding with comprehensive information for deep analysis.
|
||||
|
||||
Extends SimplifiedFinding with temporal metadata and relationships to scans and resources.
|
||||
Use this when you need complete context about a specific finding.
|
||||
"""
|
||||
|
||||
model_config = ConfigDict(frozen=True)
|
||||
|
||||
inserted_at: str = Field(
|
||||
description="ISO 8601 timestamp when this finding was first inserted into the database",
|
||||
)
|
||||
updated_at: str = Field(
|
||||
description="ISO 8601 timestamp when this finding was last updated",
|
||||
)
|
||||
first_seen_at: str | None = Field(
|
||||
default=None,
|
||||
description="ISO 8601 timestamp when this finding was first detected across all scans",
|
||||
)
|
||||
scan_id: str | None = Field(
|
||||
default=None,
|
||||
description="UUID of the scan that generated this finding",
|
||||
)
|
||||
resource_ids: list[str] = Field(
|
||||
default_factory=list,
|
||||
description="List of UUIDs for cloud resources associated with this finding",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "DetailedFinding":
|
||||
"""Transform JSON:API finding response to detailed format."""
|
||||
attributes = data["attributes"]
|
||||
check_metadata = attributes["check_metadata"]
|
||||
relationships = data.get("relationships", {})
|
||||
|
||||
# Parse scan relationship
|
||||
scan_id = None
|
||||
scan_data = relationships.get("scan", {}).get("data")
|
||||
if scan_data:
|
||||
scan_id = scan_data["id"]
|
||||
|
||||
# Parse resources relationship
|
||||
resource_ids = []
|
||||
resources_data = relationships.get("resources", {}).get("data", [])
|
||||
if resources_data:
|
||||
resource_ids = [r["id"] for r in resources_data]
|
||||
|
||||
return cls(
|
||||
id=data["id"],
|
||||
uid=attributes["uid"],
|
||||
status=attributes["status"],
|
||||
severity=attributes["severity"],
|
||||
check_metadata=CheckMetadata.from_api_response(check_metadata),
|
||||
status_extended=attributes.get("status_extended"),
|
||||
delta=attributes.get("delta"),
|
||||
muted=attributes["muted"],
|
||||
muted_reason=attributes.get("muted_reason"),
|
||||
inserted_at=attributes["inserted_at"],
|
||||
updated_at=attributes["updated_at"],
|
||||
first_seen_at=attributes.get("first_seen_at"),
|
||||
scan_id=scan_id,
|
||||
resource_ids=resource_ids,
|
||||
)
|
||||
|
||||
|
||||
class FindingsListResponse(BaseModel):
|
||||
"""Simplified response for findings list queries."""
|
||||
|
||||
model_config = ConfigDict(frozen=True)
|
||||
|
||||
findings: list[SimplifiedFinding] = Field(
|
||||
description="List of security findings matching the query",
|
||||
)
|
||||
total_num_finding: int = Field(
|
||||
description="Total number of findings matching the query across all pages",
|
||||
ge=0,
|
||||
)
|
||||
total_num_pages: int = Field(
|
||||
description="Total number of pages available",
|
||||
ge=0,
|
||||
)
|
||||
current_page: int = Field(
|
||||
description="Current page number (1-indexed)",
|
||||
ge=1,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, response: dict) -> "FindingsListResponse":
|
||||
"""Transform JSON:API response to simplified format."""
|
||||
data = response["data"]
|
||||
meta = response["meta"]
|
||||
pagination = meta["pagination"]
|
||||
|
||||
findings = [SimplifiedFinding.from_api_response(item) for item in data]
|
||||
|
||||
return cls(
|
||||
findings=findings,
|
||||
total_num_finding=pagination["count"],
|
||||
total_num_pages=pagination["pages"],
|
||||
current_page=pagination["page"],
|
||||
)
|
||||
|
||||
|
||||
class FindingsOverview(BaseModel):
|
||||
"""Simplified findings overview with aggregate statistics."""
|
||||
|
||||
model_config = ConfigDict(frozen=True)
|
||||
|
||||
total: int = Field(
|
||||
description="Total number of findings",
|
||||
ge=0,
|
||||
)
|
||||
fail: int = Field(
|
||||
description="Total number of failed security checks",
|
||||
ge=0,
|
||||
)
|
||||
passed: int = ( # Using 'passed' instead of 'pass' since 'pass' is a Python keyword
|
||||
Field(
|
||||
description="Total number of passed security checks",
|
||||
ge=0,
|
||||
)
|
||||
)
|
||||
muted: int = Field(
|
||||
description="Total number of muted findings",
|
||||
ge=0,
|
||||
)
|
||||
new: int = Field(
|
||||
description="Total number of new findings (not seen in previous scans)",
|
||||
ge=0,
|
||||
)
|
||||
changed: int = Field(
|
||||
description="Total number of changed findings (modified since last scan)",
|
||||
ge=0,
|
||||
)
|
||||
fail_new: int = Field(
|
||||
description="Number of new findings with FAIL status",
|
||||
ge=0,
|
||||
)
|
||||
fail_changed: int = Field(
|
||||
description="Number of changed findings with FAIL status",
|
||||
ge=0,
|
||||
)
|
||||
pass_new: int = Field(
|
||||
description="Number of new findings with PASS status",
|
||||
ge=0,
|
||||
)
|
||||
pass_changed: int = Field(
|
||||
description="Number of changed findings with PASS status",
|
||||
ge=0,
|
||||
)
|
||||
muted_new: int = Field(
|
||||
description="Number of new muted findings",
|
||||
ge=0,
|
||||
)
|
||||
muted_changed: int = Field(
|
||||
description="Number of changed muted findings",
|
||||
ge=0,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, response: dict) -> "FindingsOverview":
|
||||
"""Transform JSON:API overview response to simplified format."""
|
||||
data = response["data"]
|
||||
attributes = data["attributes"]
|
||||
|
||||
return cls(
|
||||
total=attributes["total"],
|
||||
fail=attributes["fail"],
|
||||
passed=attributes["pass"],
|
||||
muted=attributes["muted"],
|
||||
new=attributes["new"],
|
||||
changed=attributes["changed"],
|
||||
fail_new=attributes["fail_new"],
|
||||
fail_changed=attributes["fail_changed"],
|
||||
pass_new=attributes["pass_new"],
|
||||
pass_changed=attributes["pass_changed"],
|
||||
muted_new=attributes["muted_new"],
|
||||
muted_changed=attributes["muted_changed"],
|
||||
)
|
||||
@@ -0,0 +1,8 @@
|
||||
from fastmcp import FastMCP
|
||||
from prowler_mcp_server.prowler_app.utils.tool_loader import load_all_tools
|
||||
|
||||
# Initialize MCP server
|
||||
app_mcp_server = FastMCP("prowler-app")
|
||||
|
||||
# Auto-discover and load all tools from the tools package
|
||||
load_all_tools(app_mcp_server)
|
||||
@@ -0,0 +1,7 @@
|
||||
"""Domain-specific tools for Prowler App MCP Server.
|
||||
|
||||
Each module in this package contains a BaseTool subclass that registers
|
||||
and implements tools for a specific domain (findings, providers, scans, etc.).
|
||||
|
||||
Tools are automatically discovered and loaded by the load_all_tools() function.
|
||||
"""
|
||||
@@ -0,0 +1,102 @@
|
||||
import inspect
|
||||
from abc import ABC
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fastmcp import FastMCP
|
||||
|
||||
from prowler_mcp_server.lib.logger import logger
|
||||
from prowler_mcp_server.prowler_app.utils.api_client import ProwlerAPIClient
|
||||
|
||||
|
||||
class BaseTool(ABC):
|
||||
"""Abstract base class for all MCP tools.
|
||||
|
||||
This class defines the contract that all domain-specific tools must follow.
|
||||
It ensures consistency across tool registration and provides common utilities.
|
||||
|
||||
Key responsibilities:
|
||||
- Enforce implementation of register_tools() via ABC
|
||||
- Provide shared access to API client and logger
|
||||
- Define common patterns for tool registration
|
||||
- Support dependency injection for the FastMCP instance
|
||||
|
||||
Attributes:
|
||||
_api_client: Singleton instance of ProwlerAPIClient for API requests
|
||||
_logger: Logger instance for structured logging
|
||||
|
||||
Example:
|
||||
class FindingsTools(BaseTool):
|
||||
def register_tools(self, mcp: FastMCP) -> None:
|
||||
mcp.tool(self.search_security_findings)
|
||||
mcp.tool(self.get_finding_details)
|
||||
|
||||
async def search_security_findings(self, severity: list[str] = Field(...)):
|
||||
# Implementation with access to self.api_client
|
||||
response = await self.api_client.get("/api/v1/findings")
|
||||
return response
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the tool.
|
||||
|
||||
Sets up shared dependencies that all tools can access:
|
||||
- API client (singleton) for making authenticated requests
|
||||
- Logger instance for structured logging
|
||||
"""
|
||||
self._api_client = ProwlerAPIClient()
|
||||
self._logger = logger
|
||||
|
||||
@property
|
||||
def api_client(self) -> ProwlerAPIClient:
|
||||
"""Get the shared API client instance.
|
||||
|
||||
Returns:
|
||||
Singleton instance of ProwlerAPIClient for making API requests
|
||||
"""
|
||||
return self._api_client
|
||||
|
||||
@property
|
||||
def logger(self):
|
||||
"""Get the logger instance.
|
||||
|
||||
Returns:
|
||||
Logger instance for structured logging
|
||||
"""
|
||||
return self._logger
|
||||
|
||||
def register_tools(self, mcp: "FastMCP") -> None:
|
||||
"""Automatically register all public async methods as tools with FastMCP.
|
||||
|
||||
This method inspects the subclass and automatically registers all public
|
||||
async methods (not starting with '_') as tools. Subclasses do not need
|
||||
to override this method.
|
||||
|
||||
Args:
|
||||
mcp: The FastMCP instance to register tools with
|
||||
"""
|
||||
# Get all methods from the subclass
|
||||
registered_count = 0
|
||||
|
||||
for name, method in inspect.getmembers(self, predicate=inspect.ismethod):
|
||||
# Skip private/protected methods
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
|
||||
# Skip methods inherited from BaseTool
|
||||
if name in ["register_tools"]:
|
||||
continue
|
||||
|
||||
# Skip property getters
|
||||
if name in ["api_client", "logger"]:
|
||||
continue
|
||||
|
||||
# Check if the method is a coroutine function (async)
|
||||
if inspect.iscoroutinefunction(method):
|
||||
mcp.tool(method)
|
||||
registered_count += 1
|
||||
self.logger.debug(f"Auto-registered tool: {name}")
|
||||
|
||||
self.logger.info(
|
||||
f"Auto-registered {registered_count} tools from {self.__class__.__name__}"
|
||||
)
|
||||
@@ -0,0 +1,300 @@
|
||||
"""Security Findings tools for Prowler App MCP Server.
|
||||
|
||||
This module provides tools for searching, viewing, and analyzing security findings
|
||||
across all cloud providers.
|
||||
"""
|
||||
|
||||
from typing import Any, Literal
|
||||
|
||||
from prowler_mcp_server.prowler_app.models.findings import (
|
||||
DetailedFinding,
|
||||
FindingsListResponse,
|
||||
FindingsOverview,
|
||||
)
|
||||
from prowler_mcp_server.prowler_app.tools.base import BaseTool
|
||||
from pydantic import Field
|
||||
|
||||
|
||||
class FindingsTools(BaseTool):
|
||||
"""Tools for security findings operations.
|
||||
|
||||
Provides tools for:
|
||||
- Searching and filtering security findings
|
||||
- Getting detailed finding information
|
||||
- Viewing findings overview/statistics
|
||||
"""
|
||||
|
||||
async def search_security_findings(
|
||||
self,
|
||||
severity: list[
|
||||
Literal["critical", "high", "medium", "low", "informational"]
|
||||
] = Field(
|
||||
default=[],
|
||||
description="Filter by severity levels. Multiple values allowed: critical, high, medium, low, informational. If empty, all severities are returned.",
|
||||
),
|
||||
status: list[Literal["FAIL", "PASS", "MANUAL"]] = Field(
|
||||
default=["FAIL"],
|
||||
description="Filter by finding status. Multiple values allowed: FAIL (security issue found), PASS (no issue found), MANUAL (requires manual verification). Default: ['FAIL'] - only returns findings with security issues. To get all findings, pass an empty list [].",
|
||||
),
|
||||
provider_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud provider type. Multiple values allowed. If the parameter is not provided, all providers are returned. For valid values, please refer to Prowler Hub/Prowler Documentation that you can also find in form of tools in this MCP Server.",
|
||||
),
|
||||
provider_alias: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by specific provider alias/name (partial match supported)",
|
||||
),
|
||||
region: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud regions. Multiple values allowed (e.g., us-east-1, eu-west-1). If empty, all regions are returned.",
|
||||
),
|
||||
service: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud service. Multiple values allowed (e.g., s3, ec2, iam, keyvault). If empty, all services are returned.",
|
||||
),
|
||||
resource_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by resource type. Multiple values allowed. If empty, all resource types are returned.",
|
||||
),
|
||||
check_id: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by specific security check IDs. Multiple values allowed. If empty, all check IDs are returned.",
|
||||
),
|
||||
muted: (
|
||||
bool | str | None
|
||||
) = Field( # Wrong `str` hint type due to bad MCP Clients implementation
|
||||
default=None,
|
||||
description="Filter by muted status. True for muted findings only, False for active findings only. If not specified, returns both",
|
||||
),
|
||||
delta: list[Literal["new", "changed"]] = Field(
|
||||
default=[],
|
||||
description="Show only new or changed findings. Multiple values allowed: new (not seen in previous scans), changed (modified since last scan). If empty, all findings are returned.",
|
||||
),
|
||||
date_from: str | None = Field(
|
||||
default=None,
|
||||
description="Start date for range query in ISO 8601 format (YYYY-MM-DD, e.g., '2025-01-15'). Full date required - partial dates like '2025' or '2025-01' are not accepted. IMPORTANT: Maximum date range is 2 days. If only date_from is provided, date_to is automatically set to 2 days later. If only one boundary is provided, the other will be auto-calculated to maintain the 2-day window.",
|
||||
),
|
||||
date_to: str | None = Field(
|
||||
default=None,
|
||||
description="End date for range query in ISO 8601 format (YYYY-MM-DD, e.g., '2025-01-15'). Full date required - partial dates are not accepted. If only date_to is provided, date_from is automatically set to 2 days earlier. Can be used alone or with date_from.",
|
||||
),
|
||||
search: str | None = Field(
|
||||
default=None, description="Free-text search term across finding details"
|
||||
),
|
||||
page_size: int = Field(
|
||||
default=50, description="Number of results to return per page"
|
||||
),
|
||||
page_number: int = Field(
|
||||
default=1, description="Page number to retrieve (1-indexed)"
|
||||
),
|
||||
) -> dict[str, Any]:
|
||||
"""Search and filter security findings across all cloud providers with rich filtering capabilities.
|
||||
|
||||
This is the primary tool for browsing and filtering security findings. Returns lightweight findings
|
||||
optimized for searching across large result sets. For detailed information about a specific finding,
|
||||
use get_finding_details.
|
||||
|
||||
Default behavior:
|
||||
- Returns latest findings from most recent scans (no date parameters needed)
|
||||
- Filters to FAIL status only (security issues found)
|
||||
- Returns 100 results per page
|
||||
|
||||
Date filtering:
|
||||
- Without dates: queries findings from the most recent completed scan across all providers (most efficient). This returns the latest snapshot of findings, not a time-based query.
|
||||
- With dates: queries historical findings (2-day maximum range)
|
||||
|
||||
Each finding includes:
|
||||
- Core identification: id, uid, check_id
|
||||
- Security context: status, severity, check_metadata (title, description, remediation)
|
||||
- State tracking: delta (new/changed), muted status
|
||||
- Extended details: status_extended for additional context
|
||||
|
||||
Returns:
|
||||
Paginated list of simplified findings with total count and pagination metadata
|
||||
"""
|
||||
# Validate page_size parameter
|
||||
self.api_client.validate_page_size(page_size)
|
||||
|
||||
# Determine endpoint based on date parameters
|
||||
date_range = self.api_client.normalize_date_range(
|
||||
date_from, date_to, max_days=2
|
||||
)
|
||||
|
||||
if date_range is None:
|
||||
# No dates provided - use latest findings endpoint
|
||||
endpoint = "/api/v1/findings/latest"
|
||||
params = {}
|
||||
else:
|
||||
# Dates provided - use historical findings endpoint
|
||||
endpoint = "/api/v1/findings"
|
||||
params = {
|
||||
"filter[inserted_at__gte]": date_range[0],
|
||||
"filter[inserted_at__lte]": date_range[1],
|
||||
}
|
||||
|
||||
# Build filter parameters
|
||||
if severity:
|
||||
params["filter[severity__in]"] = severity
|
||||
if status:
|
||||
params["filter[status__in]"] = status
|
||||
if provider_type:
|
||||
params["filter[provider_type__in]"] = provider_type
|
||||
if provider_alias:
|
||||
params["filter[provider_alias__icontains]"] = provider_alias
|
||||
if region:
|
||||
params["filter[region__in]"] = region
|
||||
if service:
|
||||
params["filter[service__in]"] = service
|
||||
if resource_type:
|
||||
params["filter[resource_type__in]"] = resource_type
|
||||
if check_id:
|
||||
params["filter[check_id__in]"] = check_id
|
||||
if muted is not None:
|
||||
params["filter[muted]"] = (
|
||||
muted if isinstance(muted, bool) else muted == "true"
|
||||
)
|
||||
if delta:
|
||||
params["filter[delta__in]"] = delta
|
||||
if search:
|
||||
params["filter[search]"] = search
|
||||
|
||||
# Pagination
|
||||
params["page[size]"] = page_size
|
||||
params["page[number]"] = page_number
|
||||
|
||||
# Return only LLM-relevant fields
|
||||
params["fields[findings]"] = (
|
||||
"uid,status,severity,check_id,check_metadata,status_extended,delta,muted,muted_reason"
|
||||
)
|
||||
params["sort"] = "severity,-inserted_at"
|
||||
|
||||
# Convert lists to comma-separated strings
|
||||
clean_params = self.api_client.build_filter_params(params)
|
||||
|
||||
# Get API response and transform to simplified format
|
||||
api_response = await self.api_client.get(endpoint, params=clean_params)
|
||||
simplified_response = FindingsListResponse.from_api_response(api_response)
|
||||
|
||||
return simplified_response.model_dump()
|
||||
|
||||
async def get_finding_details(
|
||||
self,
|
||||
finding_id: str = Field(
|
||||
description="UUID of the finding to retrieve (must be a valid UUID format, e.g., '019ac0d6-90d5-73e9-9acf-c22e256f1bac'). Returns an error if the finding ID is invalid or not found."
|
||||
),
|
||||
) -> dict[str, Any]:
|
||||
"""Retrieve comprehensive details about a specific security finding by its ID.
|
||||
|
||||
This tool provides MORE detailed information than search_security_findings. Use this when you need
|
||||
to deeply analyze a specific finding or understand its complete context and history.
|
||||
|
||||
Additional information compared to search_security_findings:
|
||||
- Temporal metadata: when the finding was first seen, inserted, and last updated
|
||||
- Scan relationship: ID of the scan that generated this finding
|
||||
- Resource relationships: IDs of all cloud resources associated with this finding
|
||||
|
||||
Workflow:
|
||||
1. Use search_security_findings to browse and filter across many findings
|
||||
2. Use get_finding_details to drill down into specific findings of interest
|
||||
|
||||
Returns:
|
||||
dict containing detailed finding with comprehensive security metadata, temporal information,
|
||||
and relationships to scans and resources
|
||||
"""
|
||||
params = {
|
||||
# Return comprehensive fields including temporal metadata
|
||||
"fields[findings]": "uid,status,severity,check_id,check_metadata,status_extended,delta,muted,muted_reason,inserted_at,updated_at,first_seen_at",
|
||||
# Include relationships to scan and resources
|
||||
"include": "scan,resources",
|
||||
}
|
||||
|
||||
# Get API response and transform to detailed format
|
||||
api_response = await self.api_client.get(
|
||||
f"/api/v1/findings/{finding_id}", params=params
|
||||
)
|
||||
detailed_finding = DetailedFinding.from_api_response(
|
||||
api_response.get("data", {})
|
||||
)
|
||||
|
||||
return detailed_finding.model_dump()
|
||||
|
||||
async def get_findings_overview(
|
||||
self,
|
||||
provider_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter statistics by cloud provider. Multiple values allowed. If empty, all providers are returned. For valid values, please refer to Prowler Hub/Prowler Documentation that you can also find in form of tools in this MCP Server.",
|
||||
),
|
||||
) -> dict[str, Any]:
|
||||
"""Get high-level statistics about security findings formatted as a human-readable markdown report.
|
||||
|
||||
Use this tool to get a quick overview of your security posture without retrieving individual findings.
|
||||
Perfect for understanding trends, identifying areas of concern, and tracking improvements over time.
|
||||
|
||||
The report includes:
|
||||
- Summary statistics: total findings, fail/pass/muted counts with percentages
|
||||
- Delta analysis: breakdown of new vs changed findings
|
||||
- Trending information: how findings are evolving over time
|
||||
|
||||
Output format: Markdown-formatted report ready to present to users or include in documentation.
|
||||
|
||||
Use cases:
|
||||
- Quick security posture assessment
|
||||
- Tracking remediation progress over time
|
||||
- Identifying which providers have most issues
|
||||
- Understanding finding trends (improving or degrading)
|
||||
|
||||
Returns:
|
||||
Dictionary with 'report' key containing markdown-formatted summary statistics
|
||||
"""
|
||||
params = {
|
||||
# Return only LLM-relevant aggregate statistics
|
||||
"fields[findings-overview]": "new,changed,fail_new,fail_changed,pass_new,pass_changed,muted_new,muted_changed,total,fail,muted,pass"
|
||||
}
|
||||
|
||||
if provider_type:
|
||||
params["filter[provider_type__in]"] = provider_type
|
||||
|
||||
clean_params = self.api_client.build_filter_params(params)
|
||||
|
||||
# Get API response and transform to simplified format
|
||||
api_response = await self.api_client.get(
|
||||
"/api/v1/overviews/findings", params=clean_params
|
||||
)
|
||||
overview = FindingsOverview.from_api_response(api_response)
|
||||
|
||||
# Format as markdown report
|
||||
total = overview.total
|
||||
fail = overview.fail
|
||||
passed = overview.passed
|
||||
muted = overview.muted
|
||||
new = overview.new
|
||||
changed = overview.changed
|
||||
|
||||
# Calculate percentages
|
||||
fail_pct = (fail / total * 100) if total > 0 else 0
|
||||
passed_pct = (passed / total * 100) if total > 0 else 0
|
||||
muted_pct = (muted / total * 100) if total > 0 else 0
|
||||
unchanged = total - new - changed
|
||||
|
||||
# Build markdown report
|
||||
report = f"""# Security Findings Overview
|
||||
|
||||
## Summary Statistics
|
||||
- **Total Findings**: {total:,}
|
||||
- **Failed Checks**: {fail:,} ({fail_pct:.1f}%)
|
||||
- **Passed Checks**: {passed:,} ({passed_pct:.1f}%)
|
||||
- **Muted Findings**: {muted:,} ({muted_pct:.1f}%)
|
||||
|
||||
## Delta Analysis
|
||||
- **New Findings**: {new:,}
|
||||
- New failures: {overview.fail_new:,}
|
||||
- New passes: {overview.pass_new:,}
|
||||
- New muted: {overview.muted_new:,}
|
||||
- **Changed Findings**: {changed:,}
|
||||
- Changed to fail: {overview.fail_changed:,}
|
||||
- Changed to pass: {overview.pass_changed:,}
|
||||
- Changed to muted: {overview.muted_changed:,}
|
||||
- **Unchanged**: {unchanged:,}
|
||||
"""
|
||||
|
||||
return {"report": report}
|
||||
@@ -0,0 +1,292 @@
|
||||
"""Shared API client utilities for Prowler App tools."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from enum import Enum
|
||||
from typing import Any, Dict
|
||||
|
||||
import httpx
|
||||
from prowler_mcp_server.lib.logger import logger
|
||||
from prowler_mcp_server.prowler_app.utils.auth import ProwlerAppAuth
|
||||
|
||||
|
||||
class HTTPMethod(str, Enum):
|
||||
"""HTTP methods enum."""
|
||||
|
||||
GET = "GET"
|
||||
POST = "POST"
|
||||
PATCH = "PATCH"
|
||||
DELETE = "DELETE"
|
||||
|
||||
|
||||
class SingletonMeta(type):
|
||||
"""Metaclass that implements the Singleton pattern.
|
||||
|
||||
This metaclass ensures that only one instance of a class exists.
|
||||
All calls to the constructor return the same instance.
|
||||
"""
|
||||
|
||||
_instances: Dict[type, Any] = {}
|
||||
|
||||
def __call__(cls, *args, **kwargs):
|
||||
"""Control instance creation to ensure singleton behavior."""
|
||||
if cls not in cls._instances:
|
||||
instance = super().__call__(*args, **kwargs)
|
||||
cls._instances[cls] = instance
|
||||
return cls._instances[cls]
|
||||
|
||||
|
||||
class ProwlerAPIClient(metaclass=SingletonMeta):
|
||||
"""Shared API client with smart defaults and helper methods.
|
||||
|
||||
This class uses the Singleton pattern via metaclass to ensure only one
|
||||
instance exists across the application, reducing initialization overhead
|
||||
and enabling HTTP connection pooling.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the API client (only called once due to singleton pattern)."""
|
||||
self.auth_manager: ProwlerAppAuth = ProwlerAppAuth()
|
||||
self.client: httpx.AsyncClient = httpx.AsyncClient(timeout=30.0)
|
||||
|
||||
async def _make_request(
|
||||
self,
|
||||
method: HTTPMethod,
|
||||
path: str,
|
||||
params: dict[str, any] | None = None,
|
||||
json_data: dict[str, any] | None = None,
|
||||
) -> dict[str, any]:
|
||||
"""Make authenticated API request.
|
||||
|
||||
Args:
|
||||
method: HTTP method (GET, POST, PATCH, DELETE)
|
||||
path: API endpoint path
|
||||
params: Optional query parameters
|
||||
json_data: Optional JSON body data
|
||||
|
||||
Returns:
|
||||
API response as dictionary
|
||||
|
||||
Raises:
|
||||
Exception: If API request fails
|
||||
"""
|
||||
try:
|
||||
token: str = await self.auth_manager.get_valid_token()
|
||||
url: str = f"{self.auth_manager.base_url}{path}"
|
||||
headers: dict[str, str] = self.auth_manager.get_headers(token)
|
||||
|
||||
response: httpx.Response = await self.client.request(
|
||||
method=method.value,
|
||||
url=url,
|
||||
headers=headers,
|
||||
params=params,
|
||||
json=json_data,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
return response.json()
|
||||
except httpx.HTTPStatusError as e:
|
||||
logger.error(f"HTTP error during {method.value} {path}: {e}")
|
||||
error_detail: str = ""
|
||||
try:
|
||||
error_data: dict[str, any] = e.response.json()
|
||||
error_detail = error_data.get("errors", [{}])[0].get("detail", "")
|
||||
except Exception:
|
||||
error_detail = e.response.text
|
||||
|
||||
raise Exception(
|
||||
f"API request failed: {e.response.status_code} - {error_detail}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error during {method.value} {path}: {e}")
|
||||
raise
|
||||
|
||||
async def get(
|
||||
self, path: str, params: dict[str, any] | None = None
|
||||
) -> dict[str, any]:
|
||||
"""Make GET request.
|
||||
|
||||
Args:
|
||||
path: API endpoint path
|
||||
params: Optional query parameters
|
||||
|
||||
Returns:
|
||||
API response as dictionary
|
||||
|
||||
Raises:
|
||||
Exception: If API request fails
|
||||
"""
|
||||
return await self._make_request(HTTPMethod.GET, path, params=params)
|
||||
|
||||
async def post(
|
||||
self,
|
||||
path: str,
|
||||
params: dict[str, any] | None = None,
|
||||
json_data: dict[str, any] | None = None,
|
||||
) -> dict[str, any]:
|
||||
"""Make POST request.
|
||||
|
||||
Args:
|
||||
path: API endpoint path
|
||||
params: Optional query parameters
|
||||
json_data: Optional JSON body data
|
||||
|
||||
Returns:
|
||||
API response as dictionary
|
||||
|
||||
Raises:
|
||||
Exception: If API request fails
|
||||
"""
|
||||
return await self._make_request(
|
||||
HTTPMethod.POST, path, params=params, json_data=json_data
|
||||
)
|
||||
|
||||
async def patch(
|
||||
self,
|
||||
path: str,
|
||||
params: dict[str, any] | None = None,
|
||||
json_data: dict[str, any] | None = None,
|
||||
) -> dict[str, any]:
|
||||
"""Make PATCH request.
|
||||
|
||||
Args:
|
||||
path: API endpoint path
|
||||
params: Optional query parameters
|
||||
json_data: Optional JSON body data
|
||||
|
||||
Returns:
|
||||
API response as dictionary
|
||||
|
||||
Raises:
|
||||
Exception: If API request fails
|
||||
"""
|
||||
return await self._make_request(
|
||||
HTTPMethod.PATCH, path, params=params, json_data=json_data
|
||||
)
|
||||
|
||||
async def delete(
|
||||
self, path: str, params: dict[str, any] | None = None
|
||||
) -> dict[str, any]:
|
||||
"""Make DELETE request.
|
||||
|
||||
Args:
|
||||
path: API endpoint path
|
||||
params: Optional query parameters
|
||||
|
||||
Returns:
|
||||
API response as dictionary
|
||||
|
||||
Raises:
|
||||
Exception: If API request fails
|
||||
"""
|
||||
return await self._make_request(HTTPMethod.DELETE, path, params=params)
|
||||
|
||||
def _validate_date_format(self, date_str: str, param_name: str) -> datetime:
|
||||
"""Validate date string format.
|
||||
|
||||
Args:
|
||||
date_str: Date string to validate
|
||||
param_name: Parameter name for error messages
|
||||
|
||||
Returns:
|
||||
Parsed datetime object
|
||||
|
||||
Raises:
|
||||
ValueError: If date format is invalid
|
||||
"""
|
||||
try:
|
||||
return datetime.strptime(date_str, "%Y-%m-%d")
|
||||
except ValueError:
|
||||
raise ValueError(
|
||||
f"Invalid date format for {param_name}. Expected YYYY-MM-DD (e.g., '2025-01-15'), got '{date_str}'. "
|
||||
f"Full date required - partial dates like '2025' or '2025-01' are not accepted."
|
||||
)
|
||||
|
||||
def validate_page_size(self, page_size: int) -> None:
|
||||
"""Validate page size parameter.
|
||||
|
||||
Args:
|
||||
page_size: Page size to validate
|
||||
|
||||
Raises:
|
||||
ValueError: If page size is out of valid range (1-1000)
|
||||
"""
|
||||
if page_size < 1 or page_size > 1000:
|
||||
raise ValueError(
|
||||
f"Invalid page_size: {page_size}. Must be between 1 and 1000 (inclusive)."
|
||||
)
|
||||
|
||||
def normalize_date_range(
|
||||
self, date_from: str | None, date_to: str | None, max_days: int = 2
|
||||
) -> tuple[str, str] | None:
|
||||
"""Normalize and validate date range, auto-completing missing boundary.
|
||||
|
||||
The Prowler API has a 2-day limit for historical queries. This helper:
|
||||
1. Returns None if no dates provided (signals: use latest/default endpoint)
|
||||
2. Auto-completes missing boundary to maintain 2-day window
|
||||
3. Validates the range doesn't exceed max_days
|
||||
|
||||
Args:
|
||||
date_from: Start date (YYYY-MM-DD format) or None
|
||||
date_to: End date (YYYY-MM-DD format) or None
|
||||
max_days: Maximum allowed days between dates (default: 2)
|
||||
|
||||
Returns:
|
||||
None if no dates provided, otherwise tuple of (date_from, date_to) as strings
|
||||
|
||||
Raises:
|
||||
ValueError: If date range exceeds max_days or date format is invalid
|
||||
"""
|
||||
if not date_from and not date_to:
|
||||
return None
|
||||
|
||||
# Parse and validate provided dates
|
||||
from_date: datetime | None = (
|
||||
self._validate_date_format(date_from, "date_from") if date_from else None
|
||||
)
|
||||
to_date: datetime | None = (
|
||||
self._validate_date_format(date_to, "date_to") if date_to else None
|
||||
)
|
||||
|
||||
# Auto-complete missing boundary to maintain max_days window
|
||||
if from_date and not to_date:
|
||||
to_date = from_date + timedelta(days=max_days - 1)
|
||||
elif to_date and not from_date:
|
||||
from_date = to_date - timedelta(days=max_days - 1)
|
||||
|
||||
# Validate range doesn't exceed max_days
|
||||
delta: int = (to_date - from_date).days + 1
|
||||
if delta > max_days:
|
||||
raise ValueError(
|
||||
f"Date range cannot exceed {max_days} days. "
|
||||
f"Requested range: {from_date.date()} to {to_date.date()} ({delta} days)"
|
||||
)
|
||||
|
||||
return from_date.strftime("%Y-%m-%d"), to_date.strftime("%Y-%m-%d")
|
||||
|
||||
def build_filter_params(
|
||||
self, params: dict[str, any], exclude_none: bool = True
|
||||
) -> dict[str, any]:
|
||||
"""Build filter parameters for API, converting types to API-compatible formats.
|
||||
|
||||
Args:
|
||||
params: Dictionary of parameters
|
||||
exclude_none: If True, exclude None values from result
|
||||
|
||||
Returns:
|
||||
Cleaned parameter dictionary ready for API
|
||||
"""
|
||||
result: dict[str, any] = {}
|
||||
for key, value in params.items():
|
||||
if value is None and exclude_none:
|
||||
continue
|
||||
|
||||
# Convert boolean values to lowercase strings for API compatibility
|
||||
if isinstance(value, bool):
|
||||
result[key] = str(value).lower()
|
||||
# Convert lists/arrays to comma-separated strings
|
||||
elif isinstance(value, (list, tuple)):
|
||||
result[key] = ",".join(str(v) for v in value)
|
||||
else:
|
||||
result[key] = value
|
||||
|
||||
return result
|
||||
@@ -1,732 +0,0 @@
|
||||
{
|
||||
"endpoints": {
|
||||
"* /api/v1/providers*": {
|
||||
"parameters": {
|
||||
"id": {
|
||||
"name": "provider_id",
|
||||
"description": "The UUID of the provider. This UUID is generated by Prowler and it is not related with the UID of the provider (that is the one that is set by the provider).\n\tThe format is UUIDv4: \"4d0e2614-6385-4fa7-bf0b-c2e2f75c6877\""
|
||||
}
|
||||
}
|
||||
},
|
||||
"GET /api/v1/providers": {
|
||||
"name": "list_providers",
|
||||
"description": "List all providers with options for filtering by various criteria.",
|
||||
"parameters": {
|
||||
"fields[providers]": {
|
||||
"name": "fields",
|
||||
"description": "The tool will return only the specified fields, if not set all are returned (comma-separated, e.g. \"uid,delta,status\")"
|
||||
},
|
||||
"filter[alias]": {
|
||||
"name": "filter_alias",
|
||||
"description": "Filter by exact alias name"
|
||||
},
|
||||
"filter[alias__icontains]": {
|
||||
"name": "filter_alias_contains",
|
||||
"description": "Filter by partial alias match"
|
||||
},
|
||||
"filter[alias__in]": {
|
||||
"name": "filter_alias_in",
|
||||
"description": "Filter by multiple aliases (comma-separated, e.g. \"aws_alias_1,azure_alias_2\"). Useful when searching for multiple providers at once."
|
||||
},
|
||||
"filter[connected]": {
|
||||
"name": "filter_connected",
|
||||
"description": "Filter by connected status (True for connected, False for connection failed, if not set all both are returned).\n\tIf the connection haven't been attempted yet, the status will be None and does not apply for this filter."
|
||||
},
|
||||
"filter[id]": {
|
||||
"name": "filter_id",
|
||||
"description": "Filter by exact ID of the provider (UUID)"
|
||||
},
|
||||
"filter[id__in]": {
|
||||
"name": "filter_id_in",
|
||||
"description": "Filter by multiple IDs of the providers (comma-separated UUIDs, e.g. \"a1b2c3d4-5678-90ab-cdef-1234567890ab,deadbeef-1234-5678-9abc-def012345678,0f1e2d3c-4b5a-6978-8c9d-0e1f2a3b4c5d\"). Useful when searching for multiple providers at once."
|
||||
},
|
||||
"filter[inserted_at]": {
|
||||
"name": "filter_inserted_at",
|
||||
"description": "Filter by exact date (format: YYYY-MM-DD). This is the date when the provider was inserted into the database."
|
||||
},
|
||||
"filter[inserted_at__gte]": {
|
||||
"name": "filter_inserted_at_gte",
|
||||
"description": "Filter providers inserted on or after this date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[inserted_at__lte]": {
|
||||
"name": "filter_inserted_at_lte",
|
||||
"description": "Filter providers inserted on or before this date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[provider]": {
|
||||
"name": "filter_provider",
|
||||
"description": "Filter by single provider type"
|
||||
},
|
||||
"filter[provider__in]": {
|
||||
"name": "filter_provider_in",
|
||||
"description": "Filter by multiple provider types (comma-separated, e.g. \"aws,azure,gcp\")"
|
||||
},
|
||||
"filter[search]": {
|
||||
"name": "filter_search",
|
||||
"description": "A search term accross \"provider\", \"alias\" and \"uid\""
|
||||
},
|
||||
"filter[uid]": {
|
||||
"name": "filter_uid",
|
||||
"description": "Filter by exact finding UID"
|
||||
},
|
||||
"filter[uid__icontains]": {
|
||||
"name": "filter_uid_contains",
|
||||
"description": "Filter by partial finding UID match"
|
||||
},
|
||||
"filter[uid__in]": {
|
||||
"name": "filter_uid_in",
|
||||
"description": "Filter by multiple UIDs (comma-separated UUIDs)"
|
||||
},
|
||||
"filter[updated_at]": {
|
||||
"name": "filter_updated_at",
|
||||
"description": "Filter by exact date (format: YYYY-MM-DD). This is the date when the provider was updated in the database."
|
||||
},
|
||||
"filter[updated_at__gte]": {
|
||||
"name": "filter_updated_at_gte",
|
||||
"description": "Filter providers updated on or after this date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[updated_at__lte]": {
|
||||
"name": "filter_updated_at_lte",
|
||||
"description": "Filter providers updated on or before this date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"include": {
|
||||
"name": "include",
|
||||
"description": "Include related resources in the response, for now only \"provider_groups\" is supported"
|
||||
},
|
||||
"page[number]": {
|
||||
"name": "page_number",
|
||||
"description": "Page number to retrieve (default: 1)"
|
||||
},
|
||||
"page[size]": {
|
||||
"name": "page_size",
|
||||
"description": "Number of results per page (default: 100)"
|
||||
},
|
||||
"sort": {
|
||||
"name": "sort",
|
||||
"description": "Sort the results by the specified fields. Use '-' prefix for descending order. (e.g. \"-provider,inserted_at\", this first sorts by provider alphabetically and then inside of each category by inserted_at date)"
|
||||
}
|
||||
}
|
||||
},
|
||||
"POST /api/v1/providers": {
|
||||
"name": "create_provider",
|
||||
"description": "Create a new provider in the current Prowler Tenant.\n\tThis is just for creating a new provider, not for adding/configuring credentials. To add credentials to an existing provider, use tool add_provider_secret from Prowler MCP server",
|
||||
"parameters": {
|
||||
"alias": {
|
||||
"description": "Pseudonym name to identify the provider"
|
||||
},
|
||||
"provider": {
|
||||
"description": "Type of provider to create"
|
||||
},
|
||||
"uid": {
|
||||
"description": "UID for the provider. This UID is dependent on the provider type: \n\tAWS: AWS account ID\n\tAzure: Azure subscription ID\n\tGCP: GCP project ID\n\tKubernetes: Kubernetes namespace\n\tM365: M365 domain ID\n\tGitHub: GitHub username or organization name"
|
||||
}
|
||||
}
|
||||
},
|
||||
"GET /api/v1/providers/{id}": {
|
||||
"name": "get_provider",
|
||||
"description": "Get detailed information about a specific provider",
|
||||
"parameters": {
|
||||
"fields[providers]": {
|
||||
"name": "fields",
|
||||
"description": "The tool will return only the specified fields, if not set all are returned (comma-separated, e.g. \"uid,alias,connection\")."
|
||||
},
|
||||
"include": {
|
||||
"description": "Include related resources in the response, for now only \"provider_groups\" is supported"
|
||||
}
|
||||
}
|
||||
},
|
||||
"PATCH /api/v1/providers/{id}": {
|
||||
"name": "update_provider",
|
||||
"description": "Update the details of a specific provider",
|
||||
"parameters": {
|
||||
"alias": {
|
||||
"description": "Pseudonym name to identify the provider, if not set, the alias will not be updated"
|
||||
}
|
||||
}
|
||||
},
|
||||
"DELETE /api/v1/providers/{id}": {
|
||||
"name": "delete_provider",
|
||||
"description": "Delete a specific provider"
|
||||
},
|
||||
"POST /api/v1/providers/{id}/connection": {
|
||||
"name": "test_provider_connection",
|
||||
"description": "Test the connection status of a specific provider with the credentials set in the provider secret. Needed to be done before running a scan."
|
||||
},
|
||||
"GET /api/v1/providers/secrets": {
|
||||
"name": "list_provider_secrets",
|
||||
"description": "List all provider secrets with options for filtering by various criteria",
|
||||
"parameters": {
|
||||
"fields[provider-secrets]": {
|
||||
"name": "fields",
|
||||
"description": "The tool will return only the specified fields, if not set all are returned (comma-separated, e.g. \"name,secret_type,provider\")"
|
||||
},
|
||||
"filter[inserted_at]": {
|
||||
"name": "filter_inserted_at",
|
||||
"description": "Filter by exact date when the secret was inserted (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[name]": {
|
||||
"name": "filter_name",
|
||||
"description": "Filter by exact secret name"
|
||||
},
|
||||
"filter[name__icontains]": {
|
||||
"name": "filter_name_contains",
|
||||
"description": "Filter by partial secret name match"
|
||||
},
|
||||
"filter[provider]": {
|
||||
"name": "filter_provider",
|
||||
"description": "Filter by prowler provider UUID (UUIDv4)"
|
||||
},
|
||||
"filter[search]": {
|
||||
"name": "filter_search",
|
||||
"description": "Search term in name attribute"
|
||||
},
|
||||
"filter[updated_at]": {
|
||||
"name": "filter_updated_at",
|
||||
"description": "Filter by exact update date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"page[number]": {
|
||||
"name": "page_number",
|
||||
"description": "Page number to retrieve (default: 1)"
|
||||
},
|
||||
"page[size]": {
|
||||
"name": "page_size",
|
||||
"description": "Number of results per page"
|
||||
},
|
||||
"sort": {
|
||||
"name": "sort",
|
||||
"description": "Sort the results by the specified fields. You can specify multiple fields separated by commas; the results will be sorted by the first field, then by the second within each group of the first, and so on. Use '-' as a prefix to a field name for descending order (e.g. \"-name,inserted_at\" sorts by name descending, then by inserted_at ascending within each name). If not set, the default sort order will be applied"
|
||||
}
|
||||
}
|
||||
},
|
||||
"* /api/v1/providers/secrets*": {
|
||||
"parameters": {
|
||||
"secret": {
|
||||
"name": "credentials",
|
||||
"description": "Provider-specific credentials dictionary. Supported formats:\n - AWS Static: {\"aws_access_key_id\": \"...\", \"aws_secret_access_key\": \"...\", \"aws_session_token\": \"...\"}\n - AWS Assume Role: {\"role_arn\": \"...\", \"external_id\": \"...\", \"session_duration\": 3600, \"role_session_name\": \"...\"}\n - Azure: {\"tenant_id\": \"...\", \"client_id\": \"...\", \"client_secret\": \"...\"}\n - M365: {\"tenant_id\": \"...\", \"client_id\": \"...\", \"client_secret\": \"...\", \"user\": \"...\", \"password\": \"...\"}\n - GCP Static: {\"client_id\": \"...\", \"client_secret\": \"...\", \"refresh_token\": \"...\"}\n - GCP Service Account: {\"service_account_key\": {...}}\n - Kubernetes: {\"kubeconfig_content\": \"...\"}\n - GitHub PAT: {\"personal_access_token\": \"...\"}\n - GitHub OAuth: {\"oauth_app_token\": \"...\"}\n - GitHub App: {\"github_app_id\": 123, \"github_app_key\": \"path/to/key\"}"
|
||||
},
|
||||
"secret_type": {
|
||||
"description": "Type of secret:\n\tstatic: Static credentials\n\trole: Assume role credentials (for now only AWS is supported)\n\tservice_account: Service account credentials (for now only GCP is supported)"
|
||||
}
|
||||
}
|
||||
},
|
||||
"POST /api/v1/providers/secrets": {
|
||||
"name": "add_provider_secret",
|
||||
"description": "Add or update complete credentials for an existing provider",
|
||||
"parameters": {
|
||||
"provider_id": {
|
||||
"description": "The UUID of the provider. This UUID is generated by Prowler and it is not related with the UID of the provider, the format is UUIDv4: \"4d0e2614-6385-4fa7-bf0b-c2e2f75c6877\""
|
||||
},
|
||||
"name": {
|
||||
"name": "secret_name",
|
||||
"description": "Name for the credential secret. This must be between 3 and 100 characters long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"GET /api/v1/providers/secrets/{id}": {
|
||||
"name": "get_provider_secret",
|
||||
"description": "Get detailed information about a specific provider secret",
|
||||
"parameters": {
|
||||
"id": {
|
||||
"name": "provider_secret_id",
|
||||
"description": "The UUID of the provider secret"
|
||||
},
|
||||
"fields[provider-secrets]": {
|
||||
"name": "fields",
|
||||
"description": "The tool will return only the specified fields, if not set all are returned (comma-separated, e.g. \"name,secret_type,provider\")"
|
||||
}
|
||||
}
|
||||
},
|
||||
"PATCH /api/v1/providers/secrets/{id}": {
|
||||
"name": "update_provider_secret",
|
||||
"description": "Update the details of a specific provider secret",
|
||||
"parameters": {
|
||||
"id": {
|
||||
"name": "provider_secret_id",
|
||||
"description": "The UUID of the provider secret."
|
||||
},
|
||||
"name": {
|
||||
"name": "secret_name",
|
||||
"description": "Name for the credential secret. This must be between 3 and 100 characters long"
|
||||
}
|
||||
}
|
||||
},
|
||||
"DELETE /api/v1/providers/secrets/{id}": {
|
||||
"name": "delete_provider_secret",
|
||||
"description": "Delete a specific provider secret",
|
||||
"parameters": {
|
||||
"id": {
|
||||
"name": "provider_secret_id",
|
||||
"description": "The UUID of the provider secret."
|
||||
}
|
||||
}
|
||||
},
|
||||
"GET /api/v1/findings*": {
|
||||
"parameters": {
|
||||
"fields[findings]": {
|
||||
"name": "fields",
|
||||
"description": "The tool will return only the specified fields, if not set all are returned (comma-separated, e.g. \"uid,delta,status,status_extended,severity,check_id,scan\")"
|
||||
},
|
||||
"filter[check_id]": {
|
||||
"name": "filter_check_id",
|
||||
"description": "Filter by exact check ID (e.g. ec2_launch_template_imdsv2_required). To get the list of available checks for a provider, use tool get_checks from Prowler Hub MCP server"
|
||||
},
|
||||
"filter[check_id__icontains]": {
|
||||
"name": "filter_check_id_contains",
|
||||
"description": "Filter by partial check ID match (e.g. \"iam\" matches all IAM-related checks for all providers)"
|
||||
},
|
||||
"filter[check_id__in]": {
|
||||
"name": "filter_check_id_in",
|
||||
"description": "Filter by multiple check IDs (comma-separated, e.g. \"ec2_launch_template_imdsv2_required,bedrock_guardrail_prompt_attack_filter_enabled,vpc_endpoint_multi_az_enabled\")"
|
||||
},
|
||||
"filter[delta]": {
|
||||
"name": "filter_delta",
|
||||
"description": "Filter by finding delta status"
|
||||
},
|
||||
"filter[id]": {
|
||||
"name": "filter_id",
|
||||
"description": "Filter by exact finding ID (main key in the database, it is a UUIDv7). It is not the same as the finding UID."
|
||||
},
|
||||
"filter[id__in]": {
|
||||
"name": "filter_id_in",
|
||||
"description": "Filter by multiple finding IDs (comma-separated UUIDs)"
|
||||
},
|
||||
"filter[inserted_at]": {
|
||||
"name": "filter_inserted_at",
|
||||
"description": "Filter by exact date (format: YYYY-MM-DD)."
|
||||
},
|
||||
"filter[inserted_at__date]": {
|
||||
"name": "filter_inserted_at_date",
|
||||
"description": "Filter by exact date (format: YYYY-MM-DD). Same as filter_inserted_at parameter."
|
||||
},
|
||||
"filter[inserted_at__gte]": {
|
||||
"name": "filter_inserted_at_gte",
|
||||
"description": "Filter findings inserted on or after this date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[inserted_at__lte]": {
|
||||
"name": "filter_inserted_at_lte",
|
||||
"description": "Filter findings inserted on or before this date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[muted]": {
|
||||
"name": "filter_muted",
|
||||
"description": "Filter by muted status (True for muted, False for non-muted, if not set all both are returned). A muted finding is a finding that has been muted by the user to ignore it."
|
||||
},
|
||||
"filter[provider]": {
|
||||
"name": "filter_provider",
|
||||
"description": "Filter by exact provider UUID (UUIDv4). This UUID is generated by Prowler and it is not related with the UID of the provider (that is the one that is set by the provider). The format is UUIDv4: \"4d0e2614-6385-4fa7-bf0b-c2e2f75c6877\""
|
||||
},
|
||||
"filter[provider__in]": {
|
||||
"name": "filter_provider_in",
|
||||
"description": "Filter by multiple provider UUIDs (comma-separated UUIDs, e.g. \"4d0e2614-6385-4fa7-bf0b-c2e2f75c6877,deadbeef-1234-5678-9abc-def012345678,0f1e2d3c-4b5a-6978-8c9d-0e1f2a3b4c5d\"). Useful when searching for multiple providers at once."
|
||||
},
|
||||
"filter[provider_alias]": {
|
||||
"name": "filter_provider_alias",
|
||||
"description": "Filter by exact provider alias name"
|
||||
},
|
||||
"filter[provider_alias__icontains]": {
|
||||
"name": "filter_provider_alias_contains",
|
||||
"description": "Filter by partial provider alias match"
|
||||
},
|
||||
"filter[provider_alias__in]": {
|
||||
"name": "filter_provider_alias_in",
|
||||
"description": "Filter by multiple provider aliases (comma-separated)"
|
||||
},
|
||||
"filter[provider_id]": {
|
||||
"name": "filter_provider_id",
|
||||
"description": "Filter by exact provider ID (UUID)"
|
||||
},
|
||||
"filter[provider_id__in]": {
|
||||
"name": "filter_provider_id_in",
|
||||
"description": "Filter by multiple provider IDs (comma-separated UUIDs)"
|
||||
},
|
||||
"filter[provider_type]": {
|
||||
"name": "filter_provider_type",
|
||||
"description": "Filter by single provider type"
|
||||
},
|
||||
"filter[provider_type__in]": {
|
||||
"name": "filter_provider_type_in",
|
||||
"description": "Filter by multiple provider types (comma-separated, e.g. \"aws,azure,gcp\"). Allowed values are: aws, azure, gcp, kubernetes, m365, github"
|
||||
},
|
||||
"filter[provider_uid]": {
|
||||
"name": "filter_provider_uid",
|
||||
"description": "Filter by exact provider UID. This UID is dependent on the provider type: \n\tAWS: AWS account ID\n\tAzure: Azure subscription ID\n\tGCP: GCP project ID\n\tKubernetes: Kubernetes namespace\n\tM365: M365 domain ID\n\tGitHub: GitHub username or organization name"
|
||||
},
|
||||
"filter[provider_uid__icontains]": {
|
||||
"name": "filter_provider_uid_contains",
|
||||
"description": "Filter by partial provider UID match"
|
||||
},
|
||||
"filter[provider_uid__in]": {
|
||||
"name": "filter_provider_uid_in",
|
||||
"description": "Filter by multiple provider UIDs (comma-separated UUIDs)"
|
||||
},
|
||||
"filter[region]": {
|
||||
"name": "filter_region",
|
||||
"description": "Filter by exact region name (e.g. us-east-1, eu-west-1, etc.). To get a list of available regions in a subset of findings, use tool get_findings_metadata from Prowler MCP server"
|
||||
},
|
||||
"filter[region__icontains]": {
|
||||
"name": "filter_region_contains",
|
||||
"description": "Filter by partial region match (e.g. \"us-\" matches all US regions)"
|
||||
},
|
||||
"filter[region__in]": {
|
||||
"name": "filter_region_in",
|
||||
"description": "Filter by multiple regions (comma-separated, e.g. \"us-east-1,us-west-2,eu-west-1\")"
|
||||
},
|
||||
"filter[resource_name]": {
|
||||
"name": "filter_resource_name",
|
||||
"description": "Filter by exact resource name that finding is associated with"
|
||||
},
|
||||
"filter[resource_name__icontains]": {
|
||||
"name": "filter_resource_name_contains",
|
||||
"description": "Filter by partial resource name match that finding is associated with"
|
||||
},
|
||||
"filter[resource_name__in]": {
|
||||
"name": "filter_resource_name_in",
|
||||
"description": "Filter by multiple resource names (comma-separated) that finding is associated with"
|
||||
},
|
||||
"filter[resource_type]": {
|
||||
"name": "filter_resource_type",
|
||||
"description": "Filter by exact resource type that finding is associated with"
|
||||
},
|
||||
"filter[resource_type__icontains]": {
|
||||
"name": "filter_resource_type_contains",
|
||||
"description": "Filter by partial resource type match that finding is associated with"
|
||||
},
|
||||
"filter[resource_type__in]": {
|
||||
"name": "filter_resource_type_in",
|
||||
"description": "Filter by multiple resource types (comma-separated) that finding is associated with"
|
||||
},
|
||||
"filter[resource_uid]": {
|
||||
"name": "filter_resource_uid",
|
||||
"description": "Filter by exact resource UID that finding is associated with"
|
||||
},
|
||||
"filter[resource_uid__icontains]": {
|
||||
"name": "filter_resource_uid_contains",
|
||||
"description": "Filter by partial resource UID match that finding is associated with"
|
||||
},
|
||||
"filter[resource_uid__in]": {
|
||||
"name": "filter_resource_uid_in",
|
||||
"description": "Filter by multiple resource UIDss (comma-separated) that finding is associated with"
|
||||
},
|
||||
"filter[resources]": {
|
||||
"name": "filter_resources",
|
||||
"description": "Filter by multiple resources (comma-separated) that finding is associated with. The accepted vaules are internal Prowler generated resource UUIDs"
|
||||
},
|
||||
"filter[scan]": {
|
||||
"name": "filter_scan",
|
||||
"description": "Filter by scan UUID"
|
||||
},
|
||||
"filter[scan__in]": {
|
||||
"name": "filter_scan_in",
|
||||
"description": "Filter by multiple scan UUIDs (comma-separated UUIDs)"
|
||||
},
|
||||
"filter[service]": {
|
||||
"name": "filter_service",
|
||||
"description": "Filter by exact service name (e.g. s3, rds, ec2, keyvault, etc.). To get the list of available services, use tool list_providers from Prowler Hub MCP server"
|
||||
},
|
||||
"filter[service__icontains]": {
|
||||
"name": "filter_service_contains",
|
||||
"description": "Filter by partial service name match (e.g. \"storage\" matches all storage-related services)"
|
||||
},
|
||||
"filter[service__in]": {
|
||||
"name": "filter_service_in",
|
||||
"description": "Filter by multiple service names (comma-separated, e.g. \"s3,ec2,iam\")"
|
||||
},
|
||||
"filter[severity]": {
|
||||
"name": "filter_severity",
|
||||
"description": "Filter by single severity (critical, high, medium, low, informational)"
|
||||
},
|
||||
"filter[severity__in]": {
|
||||
"name": "filter_severity_in",
|
||||
"description": "Filter by multiple severities (comma-separated, e.g. \"critical,high\")"
|
||||
},
|
||||
"filter[status]": {
|
||||
"name": "filter_status",
|
||||
"description": "Filter by single status"
|
||||
},
|
||||
"filter[status__in]": {
|
||||
"name": "filter_status_in",
|
||||
"description": "Filter by multiple statuses (comma-separated, e.g. \"FAIL,MANUAL\"). Allowed values are: PASS, FAIL, MANUAL"
|
||||
},
|
||||
"filter[uid]": {
|
||||
"name": "filter_uid",
|
||||
"description": "Filter by exact finding UID assigned by Prowler"
|
||||
},
|
||||
"filter[uid__in]": {
|
||||
"name": "filter_uid_in",
|
||||
"description": "Filter by multiple finding UIDs (comma-separated UUIDs)"
|
||||
},
|
||||
"filter[updated_at]": {
|
||||
"name": "filter_updated_at",
|
||||
"description": "Filter by exact update date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[updated_at__gte]": {
|
||||
"name": "filter_updated_at_gte",
|
||||
"description": "Filter by update date on or after this date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[updated_at__lte]": {
|
||||
"name": "filter_updated_at_lte",
|
||||
"description": "Filter by update date on or before this date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"include": {
|
||||
"name": "include",
|
||||
"description": "Include related resources in the response, supported values are: \"resources\" and \"scan\""
|
||||
},
|
||||
"page[number]": {
|
||||
"name": "page_number",
|
||||
"description": "Page number to retrieve (default: 1)"
|
||||
},
|
||||
"page[size]": {
|
||||
"name": "page_size",
|
||||
"description": "Number of results per page (default: 100)"
|
||||
},
|
||||
"sort": {
|
||||
"name": "sort",
|
||||
"description": "Sort the results by the specified fields. You can specify multiple fields separated by commas; the results will be sorted by the first field, then by the second within each group of the first, and so on. Use '-' as a prefix to a field name for descending order (e.g. \"status,-severity\" sorts by status ascending alphabetically and then by severity descending within each status alphabetically)"
|
||||
}
|
||||
}
|
||||
},
|
||||
"GET /api/v1/findings": {
|
||||
"name": "list_findings",
|
||||
"description": "List security findings from Prowler scans with advanced filtering.\n\tAt least one of the variations of the filter[inserted_at] is required. If not provided, defaults to findings from the last day."
|
||||
},
|
||||
"GET /api/v1/findings/{id}": {
|
||||
"name": "get_finding",
|
||||
"description": "Get detailed information about a specific security finding",
|
||||
"parameters": {
|
||||
"id": {
|
||||
"name": "finding_id",
|
||||
"description": "The UUID of the finding"
|
||||
}
|
||||
}
|
||||
},
|
||||
"GET /api/v1/findings/latest": {
|
||||
"name": "get_latest_findings",
|
||||
"description": "Retrieve a list of the latest findings from the latest scans for each provider with advanced filtering options"
|
||||
},
|
||||
"GET /api/v1/findings/metadata": {
|
||||
"name": "get_findings_metadata",
|
||||
"description": "Fetch unique metadata values from a filtered set of findings. This is useful for dynamic filtering",
|
||||
"parameters": {
|
||||
"fields[findings-metadata]": {
|
||||
"name": "metadata_fields",
|
||||
"description": "Specific metadata fields to return (comma-separated, e.g. 'regions,services,check_ids')"
|
||||
}
|
||||
}
|
||||
},
|
||||
"GET /api/v1/findings/metadata/latest": {
|
||||
"name": "get_latest_findings_metadata",
|
||||
"description": "Fetch unique metadata values from the latest findings across all providers"
|
||||
},
|
||||
"* /api/v1/scans*": {
|
||||
"parameters": {
|
||||
"id": {
|
||||
"name": "scan_id",
|
||||
"description": "The UUID of the scan. The format is UUIDv4: \"4d0e2614-6385-4fa7-bf0b-c2e2f75c6877\""
|
||||
}
|
||||
}
|
||||
},
|
||||
"GET /api/v1/scans": {
|
||||
"name": "list_scans",
|
||||
"description": "List all scans with options for filtering by various criteria.",
|
||||
"parameters": {
|
||||
"fields[scans]": {
|
||||
"name": "fields",
|
||||
"description": "The tool will return only the specified fields, if not set all are returned (comma-separated, e.g. \"name,state,progress,duration\")"
|
||||
},
|
||||
"filter[completed_at]": {
|
||||
"name": "filter_completed_at",
|
||||
"description": "Filter by exact completion date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[inserted_at]": {
|
||||
"name": "filter_inserted_at",
|
||||
"description": "Filter by exact insertion date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[name]": {
|
||||
"name": "filter_name",
|
||||
"description": "Filter by exact scan name"
|
||||
},
|
||||
"filter[name__icontains]": {
|
||||
"name": "filter_name_contains",
|
||||
"description": "Filter by partial scan name match"
|
||||
},
|
||||
"filter[next_scan_at]": {
|
||||
"name": "filter_next_scan_at",
|
||||
"description": "Filter by exact next scan date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[next_scan_at__gte]": {
|
||||
"name": "filter_next_scan_at_gte",
|
||||
"description": "Filter scans scheduled on or after this date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[next_scan_at__lte]": {
|
||||
"name": "filter_next_scan_at_lte",
|
||||
"description": "Filter scans scheduled on or before this date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[provider]": {
|
||||
"name": "filter_provider",
|
||||
"description": "Filter by exact provider UUID (UUIDv4). This UUID is generated by Prowler and it is not related with the UID of the provider (that is the one that is set by the provider). The format is UUIDv4: \"4d0e2614-6385-4fa7-bf0b-c2e2f75c6877\""
|
||||
},
|
||||
"filter[provider__in]": {
|
||||
"name": "filter_provider_in",
|
||||
"description": "Filter by multiple provider UUIDs (comma-separated UUIDs, e.g. \"4d0e2614-6385-4fa7-bf0b-c2e2f75c6877,deadbeef-1234-5678-9abc-def012345678,0f1e2d3c-4b5a-6978-8c9d-0e1f2a3b4c5d\"). Useful when searching for multiple providers at once."
|
||||
},
|
||||
"filter[provider_alias]": {
|
||||
"name": "filter_provider_alias",
|
||||
"description": "Filter by exact provider alias name"
|
||||
},
|
||||
"filter[provider_alias__icontains]": {
|
||||
"name": "filter_provider_alias_contains",
|
||||
"description": "Filter by partial provider alias match"
|
||||
},
|
||||
"filter[provider_alias__in]": {
|
||||
"name": "filter_provider_alias_in",
|
||||
"description": "Filter by multiple provider aliases (comma-separated)"
|
||||
},
|
||||
"filter[provider_type]": {
|
||||
"name": "filter_provider_type",
|
||||
"description": "Filter by single provider type (aws, azure, gcp, github, kubernetes, m365)"
|
||||
},
|
||||
"filter[provider_type__in]": {
|
||||
"name": "filter_provider_type_in",
|
||||
"description": "Filter by multiple provider types (comma-separated, e.g. \"aws,azure,gcp\"). Allowed values are: aws, azure, gcp, kubernetes, m365, github"
|
||||
},
|
||||
"filter[provider_uid]": {
|
||||
"name": "filter_provider_uid",
|
||||
"description": "Filter by exact provider UID. This UID is dependent on the provider type: \n\tAWS: AWS account ID\n\tAzure: Azure subscription ID\n\tGCP: GCP project ID\n\tKubernetes: Kubernetes namespace\n\tM365: M365 domain ID\n\tGitHub: GitHub username or organization name"
|
||||
},
|
||||
"filter[provider_uid__icontains]": {
|
||||
"name": "filter_provider_uid_contains",
|
||||
"description": "Filter by partial provider UID match"
|
||||
},
|
||||
"filter[provider_uid__in]": {
|
||||
"name": "filter_provider_uid_in",
|
||||
"description": "Filter by multiple provider UIDs (comma-separated)"
|
||||
},
|
||||
"filter[scheduled_at]": {
|
||||
"name": "filter_scheduled_at",
|
||||
"description": "Filter by exact scheduled date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[scheduled_at__gte]": {
|
||||
"name": "filter_scheduled_at_gte",
|
||||
"description": "Filter scans scheduled on or after this date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[scheduled_at__lte]": {
|
||||
"name": "filter_scheduled_at_lte",
|
||||
"description": "Filter scans scheduled on or before this date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[search]": {
|
||||
"name": "filter_search",
|
||||
"description": "Search term across multiple scan attributes including: name (scan name), trigger (Manual/Scheduled), state (Available, Executing, Completed, Failed, etc.), unique_resource_count (number of resources found), progress (scan progress percentage), duration (scan duration), scheduled_at (when scan is scheduled), started_at (when scan started), completed_at (when scan completed), and next_scan_at (next scheduled scan time)"
|
||||
},
|
||||
"filter[started_at]": {
|
||||
"name": "filter_started_at",
|
||||
"description": "Filter by exact start date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[started_at__gte]": {
|
||||
"name": "filter_started_at_gte",
|
||||
"description": "Filter scans started on or after this date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[started_at__lte]": {
|
||||
"name": "filter_started_at_lte",
|
||||
"description": "Filter scans started on or before this date (format: YYYY-MM-DD)"
|
||||
},
|
||||
"filter[state]": {
|
||||
"name": "filter_state",
|
||||
"description": "Filter by exact scan state"
|
||||
},
|
||||
"filter[state__in]": {
|
||||
"name": "filter_state_in",
|
||||
"description": "Filter by multiple scan states (comma-separated)"
|
||||
},
|
||||
"filter[trigger]": {
|
||||
"name": "filter_trigger",
|
||||
"description": "Filter by scan trigger type"
|
||||
},
|
||||
"filter[trigger__in]": {
|
||||
"name": "filter_trigger_in",
|
||||
"description": "Filter by multiple trigger types (comma-separated)"
|
||||
},
|
||||
"include": {
|
||||
"name": "include",
|
||||
"description": "Include related resources in the response, supported value is \"provider\""
|
||||
},
|
||||
"page[number]": {
|
||||
"name": "page_number",
|
||||
"description": "Page number to retrieve (default: 1)"
|
||||
},
|
||||
"page[size]": {
|
||||
"name": "page_size",
|
||||
"description": "Number of results per page (default: 100)"
|
||||
},
|
||||
"sort": {
|
||||
"name": "sort",
|
||||
"description": "Sort the results by the specified fields. Use '-' prefix for descending order. (e.g. \"-started_at,name\")"
|
||||
}
|
||||
}
|
||||
},
|
||||
"POST /api/v1/scans": {
|
||||
"name": "create_scan",
|
||||
"description": "Trigger a manual scan for a specific provider",
|
||||
"parameters": {
|
||||
"provider_id": {
|
||||
"name": "provider_id",
|
||||
"description": "Prowler generated UUID of the provider to scan. The format is UUIDv4: \"4d0e2614-6385-4fa7-bf0b-c2e2f75c6877\""
|
||||
},
|
||||
"name": {
|
||||
"description": "Optional name for the scan"
|
||||
}
|
||||
}
|
||||
},
|
||||
"GET /api/v1/scans/{id}": {
|
||||
"name": "get_scan",
|
||||
"description": "Get detailed information about a specific scan",
|
||||
"parameters": {
|
||||
"fields[scans]": {
|
||||
"name": "fields",
|
||||
"description": "The tool will return only the specified fields, if not set all are returned (comma-separated, e.g. \"name,state,progress,duration\")"
|
||||
},
|
||||
"include": {
|
||||
"description": "Include related resources in the response, supported value is \"provider\""
|
||||
}
|
||||
}
|
||||
},
|
||||
"PATCH /api/v1/scans/{id}": {
|
||||
"name": "update_scan",
|
||||
"description": "Update the details of a specific scan",
|
||||
"parameters": {
|
||||
"name": {
|
||||
"description": "Name for the scan to be updated"
|
||||
}
|
||||
}
|
||||
},
|
||||
"GET /api/v1/scans/{id}/compliance/{name}": {
|
||||
"name": "get_scan_compliance_report",
|
||||
"description": "Download a specific compliance report (e.g., 'cis_1.4_aws') as a CSV file",
|
||||
"parameters": {
|
||||
"name": {
|
||||
"name": "compliance_name"
|
||||
},
|
||||
"fields[scan-reports]": {
|
||||
"name": "fields",
|
||||
"description": "The tool will return only the specified fields, if not set all are returned (comma-separated, e.g. \"id,name\")"
|
||||
}
|
||||
}
|
||||
},
|
||||
"GET /api/v1/scans/{id}/report": {
|
||||
"name": "get_scan_report",
|
||||
"description": "Download a ZIP file containing the scan report",
|
||||
"parameters": {
|
||||
"fields[scan-reports]": {
|
||||
"name": "fields",
|
||||
"description": "Not use this parameter for now"
|
||||
}
|
||||
}
|
||||
},
|
||||
"POST /api/v1/schedules/daily": {
|
||||
"name": "schedules_daily_scan",
|
||||
"parameters": {
|
||||
"provider_id": {
|
||||
"name": "provider_id",
|
||||
"description": "Prowler generated UUID of the provider to scan. The format is UUIDv4: \"4d0e2614-6385-4fa7-bf0b-c2e2f75c6877\""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,974 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate FastMCP server code from OpenAPI specification.
|
||||
|
||||
This script parses an OpenAPI specification file and generates FastMCP tool functions
|
||||
with proper type hints, parameters, and docstrings.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
|
||||
|
||||
class OpenAPIToMCPGenerator:
|
||||
def __init__(
|
||||
self,
|
||||
spec_file: str,
|
||||
custom_auth_module: Optional[str] = None,
|
||||
exclude_patterns: Optional[list[str]] = None,
|
||||
exclude_operations: Optional[list[str]] = None,
|
||||
exclude_tags: Optional[list[str]] = None,
|
||||
include_only_tags: Optional[list[str]] = None,
|
||||
config_file: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Initialize the generator with an OpenAPI spec file.
|
||||
|
||||
Args:
|
||||
spec_file: Path to OpenAPI specification file
|
||||
custom_auth_module: Module path for custom authentication
|
||||
exclude_patterns: list of regex patterns to exclude endpoints (matches against path)
|
||||
exclude_operations: list of operation IDs to exclude
|
||||
exclude_tags: list of tags to exclude
|
||||
include_only_tags: If specified, only include endpoints with these tags
|
||||
config_file: Path to JSON configuration file for custom mappings
|
||||
"""
|
||||
self.spec_file = spec_file
|
||||
self.custom_auth_module = custom_auth_module
|
||||
self.exclude_patterns = exclude_patterns or []
|
||||
self.exclude_operations = exclude_operations or []
|
||||
self.exclude_tags = exclude_tags or []
|
||||
self.include_only_tags = include_only_tags
|
||||
self.config_file = config_file
|
||||
self.config = self._load_config() if config_file else {}
|
||||
self.spec = self._load_spec()
|
||||
self.generated_tools = []
|
||||
self.imports = set()
|
||||
self.needs_query_array_normalizer = False
|
||||
self.type_mapping = {
|
||||
"string": "str",
|
||||
"integer": "str",
|
||||
"number": "str",
|
||||
"boolean": "bool | str",
|
||||
"array": "list[Any] | str",
|
||||
"object": "dict[str, Any] | str",
|
||||
}
|
||||
|
||||
def _load_config(self) -> dict:
|
||||
"""Load configuration from JSON file."""
|
||||
try:
|
||||
with open(self.config_file, "r") as f:
|
||||
return json.load(f)
|
||||
except FileNotFoundError:
|
||||
return {}
|
||||
except json.JSONDecodeError:
|
||||
return {}
|
||||
|
||||
def _load_spec(self) -> dict:
|
||||
"""Load OpenAPI specification from file."""
|
||||
with open(self.spec_file, "r") as f:
|
||||
if self.spec_file.endswith(".yaml") or self.spec_file.endswith(".yml"):
|
||||
return yaml.safe_load(f)
|
||||
else:
|
||||
return json.load(f)
|
||||
|
||||
def _get_endpoint_config(self, path: str, method: str) -> dict:
|
||||
"""Get endpoint configuration from config file with pattern matching and inheritance.
|
||||
|
||||
Configuration resolution order (most to least specific):
|
||||
1. Exact endpoint match (e.g., "GET /api/v1/findings/metadata")
|
||||
2. Pattern matches, sorted by specificity:
|
||||
- Patterns without wildcards are more specific
|
||||
- Longer patterns are more specific
|
||||
- Example: "GET /api/v1/findings/*" matches all findings endpoints
|
||||
|
||||
When multiple configurations match, they are merged with more specific
|
||||
configurations overriding less specific ones.
|
||||
"""
|
||||
if not self.config:
|
||||
return {}
|
||||
|
||||
endpoint_key = f"{method.upper()} {path}"
|
||||
merged_config = {}
|
||||
|
||||
# Get endpoints configuration (now supports both exact and pattern matches)
|
||||
endpoints = self.config.get("endpoints", {})
|
||||
|
||||
# Separate exact matches from patterns
|
||||
exact_match = None
|
||||
pattern_matches = []
|
||||
|
||||
for config_key, config_value in endpoints.items():
|
||||
if "*" in config_key or "?" in config_key:
|
||||
# This is a pattern - convert wildcards to regex
|
||||
regex_pattern = config_key.replace("*", ".*").replace("?", ".")
|
||||
if re.match(f"^{regex_pattern}$", endpoint_key):
|
||||
pattern_matches.append((config_key, config_value))
|
||||
elif config_key == endpoint_key:
|
||||
# Exact match
|
||||
exact_match = (config_key, config_value)
|
||||
|
||||
# Also check for patterns in endpoint_patterns for backward compatibility
|
||||
endpoint_patterns = self.config.get("endpoint_patterns", {})
|
||||
for pattern, pattern_config in endpoint_patterns.items():
|
||||
regex_pattern = pattern.replace("*", ".*").replace("?", ".")
|
||||
if re.match(f"^{regex_pattern}$", endpoint_key):
|
||||
pattern_matches.append((pattern, pattern_config))
|
||||
|
||||
# Sort pattern matches by specificity
|
||||
# More specific patterns should be applied last to override less specific ones
|
||||
pattern_matches.sort(
|
||||
key=lambda x: (
|
||||
x[0].count("*") + x[0].count("?"), # Fewer wildcards = more specific
|
||||
-len(
|
||||
x[0]
|
||||
), # Longer patterns = more specific (negative for reverse sort)
|
||||
),
|
||||
reverse=True,
|
||||
) # Reverse so least specific comes first
|
||||
|
||||
# Apply configurations from least to most specific
|
||||
# First apply pattern matches (from least to most specific)
|
||||
for pattern, pattern_config in pattern_matches:
|
||||
merged_config = self._merge_configs(merged_config, pattern_config)
|
||||
|
||||
# Finally apply exact match (most specific)
|
||||
if exact_match:
|
||||
merged_config = self._merge_configs(merged_config, exact_match[1])
|
||||
|
||||
# Fallback to old endpoint_mappings for backward compatibility
|
||||
if not merged_config:
|
||||
endpoint_mappings = self.config.get("endpoint_mappings", {})
|
||||
if endpoint_key in endpoint_mappings:
|
||||
merged_config = {"name": endpoint_mappings[endpoint_key]}
|
||||
|
||||
return merged_config
|
||||
|
||||
def _merge_configs(self, base_config: dict, override_config: dict) -> dict:
|
||||
"""Merge two configurations, with override_config taking precedence.
|
||||
|
||||
Special handling for parameters: merges parameter configurations deeply.
|
||||
"""
|
||||
import copy
|
||||
|
||||
result = copy.deepcopy(base_config)
|
||||
|
||||
for key, value in override_config.items():
|
||||
if key == "parameters" and key in result:
|
||||
# Deep merge parameters
|
||||
if not isinstance(result[key], dict):
|
||||
result[key] = {}
|
||||
if isinstance(value, dict):
|
||||
for param_name, param_config in value.items():
|
||||
if param_name in result[key] and isinstance(
|
||||
result[key][param_name], dict
|
||||
):
|
||||
# Merge parameter configurations
|
||||
result[key][param_name] = {
|
||||
**result[key][param_name],
|
||||
**param_config,
|
||||
}
|
||||
else:
|
||||
result[key][param_name] = param_config
|
||||
else:
|
||||
# For other keys, override completely
|
||||
result[key] = value
|
||||
|
||||
return result
|
||||
|
||||
def _sanitize_function_name(self, operation_id: str) -> str:
|
||||
"""Convert operation ID to valid Python function name."""
|
||||
# Replace non-alphanumeric characters with underscores
|
||||
name = re.sub(r"[^a-zA-Z0-9_]", "_", operation_id)
|
||||
# Ensure it doesn't start with a number
|
||||
if name and name[0].isdigit():
|
||||
name = f"op_{name}"
|
||||
return name.lower()
|
||||
|
||||
def _get_python_type(self, schema: dict) -> tuple[str, str]:
|
||||
"""Convert OpenAPI schema to Python type hint.
|
||||
|
||||
Returns:
|
||||
Tuple of (type_hint, original_type) where original_type is used for casting
|
||||
"""
|
||||
if not schema:
|
||||
return "Any", "any"
|
||||
|
||||
# Handle oneOf/anyOf/allOf schemas - these are typically objects
|
||||
if "oneOf" in schema or "anyOf" in schema or "allOf" in schema:
|
||||
# These are complex schemas, typically representing different object variants
|
||||
return "dict[str, Any] | str", "object"
|
||||
|
||||
schema_type = schema.get("type", "string")
|
||||
|
||||
# Handle enums
|
||||
if "enum" in schema:
|
||||
enum_values = schema["enum"]
|
||||
if all(isinstance(v, str) for v in enum_values):
|
||||
# Create Literal type for string enums - already strings, no casting needed
|
||||
self.imports.add("from typing import Literal")
|
||||
enum_str = ", ".join(f'"{v}"' for v in enum_values)
|
||||
return f"Literal[{enum_str}]", "string"
|
||||
else:
|
||||
return self.type_mapping.get(schema_type, "Any"), schema_type
|
||||
|
||||
# Handle arrays
|
||||
if schema_type == "array":
|
||||
return "list[Any] | str", "array"
|
||||
|
||||
# Handle format specifications
|
||||
if schema_type == "string":
|
||||
format_type = schema.get("format", "")
|
||||
if format_type in ["date", "date-time", "uuid", "email"]:
|
||||
return "str", "string"
|
||||
|
||||
return self.type_mapping.get(schema_type, "Any"), schema_type
|
||||
|
||||
def _resolve_ref(self, ref: str) -> dict:
|
||||
"""Resolve a $ref reference in the OpenAPI spec."""
|
||||
if not ref.startswith("#/"):
|
||||
return {}
|
||||
|
||||
# Split the reference path
|
||||
ref_parts = ref[2:].split("/") # Remove '#/' and split
|
||||
|
||||
# Navigate through the spec to find the referenced schema
|
||||
resolved = self.spec
|
||||
for part in ref_parts:
|
||||
resolved = resolved.get(part, {})
|
||||
|
||||
return resolved
|
||||
|
||||
def _extract_parameters(
|
||||
self, operation: dict, endpoint_config: Optional[dict] = None
|
||||
) -> list[dict]:
|
||||
"""Extract and process parameters from an operation."""
|
||||
parameters = []
|
||||
|
||||
for param in operation.get("parameters", []):
|
||||
# Sanitize parameter name for Python
|
||||
python_name = (
|
||||
param.get("name", "")
|
||||
.replace("[", "_")
|
||||
.replace("]", "")
|
||||
.replace(".", "_")
|
||||
.replace("-", "_")
|
||||
) # Also replace hyphens
|
||||
|
||||
type_hint, original_type = self._get_python_type(param.get("schema", {}))
|
||||
param_info = {
|
||||
"name": param.get("name", ""),
|
||||
"python_name": python_name,
|
||||
"in": param.get("in", "query"),
|
||||
"required": param.get("required", False),
|
||||
"description": param.get("description", ""),
|
||||
"type": type_hint,
|
||||
"original_type": original_type,
|
||||
"original_schema": param.get("schema", {}),
|
||||
}
|
||||
|
||||
# Apply custom parameter configuration from endpoint config
|
||||
if endpoint_config and "parameters" in endpoint_config:
|
||||
param_config = endpoint_config["parameters"]
|
||||
if param_info["name"] in param_config:
|
||||
custom_param = param_config[param_info["name"]]
|
||||
if "name" in custom_param:
|
||||
param_info["python_name"] = custom_param["name"]
|
||||
if "description" in custom_param:
|
||||
param_info["description"] = custom_param["description"]
|
||||
|
||||
parameters.append(param_info)
|
||||
|
||||
# Handle request body if present - extract as individual parameters
|
||||
if "requestBody" in operation:
|
||||
body = operation["requestBody"]
|
||||
content = body.get("content", {})
|
||||
|
||||
# Check for different content types
|
||||
schema = None
|
||||
if "application/vnd.api+json" in content:
|
||||
schema = content["application/vnd.api+json"].get("schema", {})
|
||||
elif "application/json" in content:
|
||||
schema = content["application/json"].get("schema", {})
|
||||
|
||||
if schema:
|
||||
# Resolve $ref if present
|
||||
if "$ref" in schema:
|
||||
schema = self._resolve_ref(schema["$ref"])
|
||||
|
||||
# Try to extract individual fields from the schema
|
||||
body_params = self._extract_body_parameters(
|
||||
schema, body.get("required", False)
|
||||
)
|
||||
|
||||
# Apply custom parameter config to body parameters
|
||||
if endpoint_config and "parameters" in endpoint_config:
|
||||
param_config = endpoint_config["parameters"]
|
||||
for param in body_params:
|
||||
if param["name"] in param_config:
|
||||
custom_param = param_config[param["name"]]
|
||||
if "name" in custom_param:
|
||||
param["python_name"] = custom_param["name"]
|
||||
if "description" in custom_param:
|
||||
param["description"] = custom_param["description"]
|
||||
|
||||
parameters.extend(body_params)
|
||||
|
||||
return parameters
|
||||
|
||||
def _extract_body_parameters(self, schema: dict, is_required: bool) -> list[dict]:
|
||||
"""Extract individual parameters from request body schema."""
|
||||
parameters = []
|
||||
|
||||
# Handle JSON:API format with data.attributes structure
|
||||
if "properties" in schema:
|
||||
data = schema["properties"].get("data", {})
|
||||
if "properties" in data:
|
||||
# Extract attributes
|
||||
attributes = data["properties"].get("attributes", {})
|
||||
if "properties" in attributes:
|
||||
# Get required fields from attributes
|
||||
required_attrs = attributes.get("required", [])
|
||||
|
||||
for prop_name, prop_schema in attributes["properties"].items():
|
||||
# Skip read-only fields for POST/PUT/PATCH operations
|
||||
if prop_schema.get("readOnly", False):
|
||||
continue
|
||||
|
||||
python_name = prop_name.replace("-", "_")
|
||||
# Check if this field is required
|
||||
is_field_required = prop_name in required_attrs
|
||||
|
||||
type_hint, original_type = self._get_python_type(prop_schema)
|
||||
param_info = {
|
||||
"name": prop_name, # Keep original name for API
|
||||
"python_name": python_name,
|
||||
"in": "body",
|
||||
"required": is_field_required,
|
||||
"description": prop_schema.get(
|
||||
"description",
|
||||
prop_schema.get("title", f"{prop_name} parameter"),
|
||||
),
|
||||
"type": type_hint,
|
||||
"original_type": original_type,
|
||||
"original_schema": prop_schema,
|
||||
"resource_type": (
|
||||
data["properties"]
|
||||
.get("type", {})
|
||||
.get("enum", ["resource"])[0]
|
||||
if "type" in data["properties"]
|
||||
else "resource"
|
||||
),
|
||||
}
|
||||
parameters.append(param_info)
|
||||
|
||||
# Also check for relationships (like provider_id)
|
||||
relationships = data["properties"].get("relationships", {})
|
||||
if "properties" in relationships:
|
||||
required_rels = relationships.get("required", [])
|
||||
for rel_name, rel_schema in relationships["properties"].items():
|
||||
# Extract ID from relationship
|
||||
python_name = f"{rel_name}_id"
|
||||
is_rel_required = rel_name in required_rels
|
||||
|
||||
param_info = {
|
||||
"name": f"{rel_name}_id",
|
||||
"python_name": python_name,
|
||||
"in": "body",
|
||||
"required": is_rel_required,
|
||||
"description": f"ID of the related {rel_name}",
|
||||
"type": "str",
|
||||
"original_type": "string",
|
||||
"original_schema": rel_schema,
|
||||
}
|
||||
parameters.append(param_info)
|
||||
|
||||
# If no structured params found, fall back to generic body parameter
|
||||
if not parameters and schema:
|
||||
parameters.append(
|
||||
{
|
||||
"name": "body",
|
||||
"python_name": "body",
|
||||
"in": "body",
|
||||
"required": is_required,
|
||||
"description": "Request body data",
|
||||
"type": "dict[str, Any] | str",
|
||||
"original_type": "object",
|
||||
"original_schema": schema,
|
||||
}
|
||||
)
|
||||
|
||||
return parameters
|
||||
|
||||
def _generate_docstring(
|
||||
self,
|
||||
operation: dict,
|
||||
parameters: list[dict],
|
||||
path: str,
|
||||
method: str,
|
||||
endpoint_config: Optional[dict] = None,
|
||||
) -> str:
|
||||
"""Generate a comprehensive docstring for the tool function."""
|
||||
lines = []
|
||||
|
||||
# Main description - use custom or default
|
||||
endpoint_config = endpoint_config or {}
|
||||
|
||||
# Use custom description if provided, otherwise fall back to OpenAPI
|
||||
if "description" in endpoint_config:
|
||||
lines.append(f' """{endpoint_config["description"]}')
|
||||
else:
|
||||
summary = operation.get("summary", "")
|
||||
description = operation.get("description", "")
|
||||
if summary:
|
||||
lines.append(f' """{summary}')
|
||||
else:
|
||||
lines.append(f' """Execute {method.upper()} {path}')
|
||||
|
||||
if "description" not in endpoint_config:
|
||||
# Only add OpenAPI description if no custom description was provided
|
||||
description = operation.get("description", "")
|
||||
if description and description != summary:
|
||||
lines.append("")
|
||||
# Clean up description - remove extra whitespace
|
||||
clean_desc = " ".join(description.split())
|
||||
lines.append(f" {clean_desc}")
|
||||
|
||||
# Add endpoint info
|
||||
lines.append("")
|
||||
lines.append(f" Endpoint: {method.upper()} {path}")
|
||||
|
||||
# Parameters section
|
||||
if parameters:
|
||||
lines.append("")
|
||||
lines.append(" Args:")
|
||||
for param in parameters:
|
||||
# Use custom description if available
|
||||
param_desc = param["description"] or "Self-explanatory parameter"
|
||||
|
||||
# Handle multi-line descriptions properly
|
||||
required_text = "(required)" if param["required"] else "(optional)"
|
||||
|
||||
if "\n" in param_desc:
|
||||
# Split on actual newlines (not escaped)
|
||||
desc_lines = param_desc.split("\n")
|
||||
first_line = desc_lines[0].strip()
|
||||
lines.append(
|
||||
f" {param['python_name']} {required_text}: {first_line}"
|
||||
)
|
||||
# Add subsequent lines with proper indentation (12 spaces for continuation)
|
||||
for desc_line in desc_lines[1:]:
|
||||
desc_line = desc_line.strip()
|
||||
if desc_line:
|
||||
lines.append(f" {desc_line}")
|
||||
else:
|
||||
# Clean up parameter description for single line
|
||||
param_desc = " ".join(param_desc.split())
|
||||
lines.append(
|
||||
f" {param['python_name']} {required_text}: {param_desc}"
|
||||
)
|
||||
|
||||
# Add enum values if present
|
||||
if "enum" in param.get("original_schema", {}):
|
||||
enum_values = param["original_schema"]["enum"]
|
||||
lines.append(
|
||||
f" Allowed values: {', '.join(str(v) for v in enum_values)}"
|
||||
)
|
||||
|
||||
# Returns section
|
||||
lines.append("")
|
||||
lines.append(" Returns:")
|
||||
lines.append(" dict containing the API response")
|
||||
|
||||
lines.append(' """')
|
||||
return "\n".join(lines)
|
||||
|
||||
def _generate_function_signature(
|
||||
self, func_name: str, parameters: list[dict]
|
||||
) -> str:
|
||||
"""Generate the function signature with proper type hints."""
|
||||
# Sort parameters: required first, then optional
|
||||
sorted_params = sorted(
|
||||
parameters, key=lambda x: (not x["required"], x["python_name"])
|
||||
)
|
||||
|
||||
param_strings = []
|
||||
for param in sorted_params:
|
||||
if param["required"]:
|
||||
param_strings.append(f" {param['python_name']}: {param['type']}")
|
||||
else:
|
||||
param_strings.append(
|
||||
f" {param['python_name']}: Optional[{param['type']}] = None"
|
||||
)
|
||||
|
||||
if param_strings:
|
||||
params_str = ",\n".join(param_strings)
|
||||
return f"async def {func_name}(\n{params_str}\n) -> dict[str, Any]:"
|
||||
else:
|
||||
return f"async def {func_name}() -> dict[str, Any]:"
|
||||
|
||||
def _get_cast_expression(self, param: dict) -> str:
|
||||
"""Generate type casting expression for a parameter.
|
||||
|
||||
Args:
|
||||
param: Parameter dict with 'python_name' and 'original_type'
|
||||
|
||||
Returns:
|
||||
Expression string that casts the parameter value to the correct type
|
||||
"""
|
||||
python_name = param["python_name"]
|
||||
original_type = param.get("original_type", "string")
|
||||
|
||||
if original_type == "boolean":
|
||||
# Convert string to boolean using simple comparison
|
||||
return f"({python_name}.lower() in ('true', '1', 'yes', 'on') if isinstance({python_name}, str) else {python_name})"
|
||||
elif original_type == "array":
|
||||
if param.get("in") == "query":
|
||||
self.needs_query_array_normalizer = True
|
||||
return f"_normalize_query_array({python_name})"
|
||||
return f"json.loads({python_name}) if isinstance({python_name}, str) else {python_name}"
|
||||
elif original_type == "object":
|
||||
return f"json.loads({python_name}) if isinstance({python_name}, str) else {python_name}"
|
||||
else:
|
||||
# string or any other type - no casting needed
|
||||
return python_name
|
||||
|
||||
def _generate_function_body(
|
||||
self, path: str, method: str, parameters: list[dict], operation_id: str
|
||||
) -> str:
|
||||
"""Generate the function body for making API calls."""
|
||||
lines = []
|
||||
|
||||
# Add try block
|
||||
lines.append(" try:")
|
||||
|
||||
# Get authentication token if custom auth module is provided
|
||||
if self.custom_auth_module:
|
||||
lines.append(" token = await auth_manager.get_valid_token()")
|
||||
lines.append("")
|
||||
|
||||
# Build parameters
|
||||
query_params = [p for p in parameters if p["in"] == "query"]
|
||||
path_params = [p for p in parameters if p["in"] == "path"]
|
||||
body_params = [p for p in parameters if p["in"] == "body"]
|
||||
|
||||
# Add json import if needed for object or array type casting
|
||||
if any(p.get("original_type") in ["object", "array"] for p in parameters):
|
||||
self.imports.add("import json")
|
||||
|
||||
# Build query parameters
|
||||
if query_params:
|
||||
lines.append(" params = {}")
|
||||
for param in query_params:
|
||||
cast_expr = self._get_cast_expression(param)
|
||||
if param["required"]:
|
||||
lines.append(f" params['{param['name']}'] = {cast_expr}")
|
||||
else:
|
||||
lines.append(f" if {param['python_name']} is not None:")
|
||||
lines.append(f" params['{param['name']}'] = {cast_expr}")
|
||||
lines.append("")
|
||||
|
||||
# Build path with path parameters
|
||||
final_path = path
|
||||
for param in path_params:
|
||||
cast_expr = self._get_cast_expression(param)
|
||||
lines.append(
|
||||
f" path = '{path}'.replace('{{{param['name']}}}', str({cast_expr}))"
|
||||
)
|
||||
final_path = "path"
|
||||
|
||||
# Build request body if there are body parameters
|
||||
if body_params:
|
||||
# Check if we have individual params or a single body param
|
||||
if len(body_params) == 1 and body_params[0]["python_name"] == "body":
|
||||
# Single body parameter - use it directly with casting
|
||||
cast_expr = self._get_cast_expression(body_params[0])
|
||||
lines.append(f" request_body = {cast_expr}")
|
||||
else:
|
||||
# Get resource type from first body param (they should all have the same)
|
||||
resource_type = (
|
||||
body_params[0].get("resource_type", "resource")
|
||||
if body_params
|
||||
else "resource"
|
||||
)
|
||||
|
||||
# Build JSON:API structure from individual parameters
|
||||
lines.append(" # Build request body")
|
||||
lines.append(" request_body = {")
|
||||
lines.append(' "data": {')
|
||||
lines.append(f' "type": "{resource_type}"')
|
||||
|
||||
# Separate attributes from relationships
|
||||
# Note: Check if param was originally from attributes section, not just by name
|
||||
attribute_params = []
|
||||
relationship_params = []
|
||||
|
||||
for p in body_params:
|
||||
# If this param came from the attributes section (has resource_type), it's an attribute
|
||||
# even if its name ends with _id
|
||||
if "resource_type" in p:
|
||||
attribute_params.append(p)
|
||||
elif p["python_name"].endswith("_id") and "resource_type" not in p:
|
||||
relationship_params.append(p)
|
||||
else:
|
||||
attribute_params.append(p)
|
||||
|
||||
if attribute_params:
|
||||
lines.append(",")
|
||||
lines.append(' "attributes": {}')
|
||||
|
||||
lines.append(" }")
|
||||
lines.append(" }")
|
||||
|
||||
if attribute_params:
|
||||
lines.append("")
|
||||
lines.append(" # Add attributes")
|
||||
for param in attribute_params:
|
||||
cast_expr = self._get_cast_expression(param)
|
||||
if param["required"]:
|
||||
lines.append(
|
||||
f' request_body["data"]["attributes"]["{param["name"]}"] = {cast_expr}'
|
||||
)
|
||||
else:
|
||||
lines.append(
|
||||
f" if {param['python_name']} is not None:"
|
||||
)
|
||||
lines.append(
|
||||
f' request_body["data"]["attributes"]["{param["name"]}"] = {cast_expr}'
|
||||
)
|
||||
|
||||
if relationship_params:
|
||||
lines.append("")
|
||||
lines.append(" # Add relationships")
|
||||
lines.append(' request_body["data"]["relationships"] = {}')
|
||||
for param in relationship_params:
|
||||
rel_name = param["python_name"].replace("_id", "")
|
||||
cast_expr = self._get_cast_expression(param)
|
||||
if param["required"]:
|
||||
lines.append(
|
||||
f' request_body["data"]["relationships"]["{rel_name}"] = {{'
|
||||
)
|
||||
lines.append(' "data": {')
|
||||
lines.append(f' "type": "{rel_name}s",')
|
||||
lines.append(f' "id": {cast_expr}')
|
||||
lines.append(" }")
|
||||
lines.append(" }")
|
||||
else:
|
||||
lines.append(
|
||||
f" if {param['python_name']} is not None:"
|
||||
)
|
||||
lines.append(
|
||||
f' request_body["data"]["relationships"]["{rel_name}"] = {{'
|
||||
)
|
||||
lines.append(' "data": {')
|
||||
lines.append(f' "type": "{rel_name}s",')
|
||||
lines.append(f' "id": {cast_expr}')
|
||||
lines.append(" }")
|
||||
lines.append(" }")
|
||||
lines.append("")
|
||||
|
||||
# Build the request URL
|
||||
url_line = (
|
||||
f'f"{{auth_manager.base_url}}{{{final_path}}}"'
|
||||
if final_path == "path"
|
||||
else f'f"{{auth_manager.base_url}}{path}"'
|
||||
)
|
||||
lines.append(f" url = {url_line}")
|
||||
lines.append("")
|
||||
|
||||
# Build request parameters
|
||||
request_params = ["url"]
|
||||
|
||||
if self.custom_auth_module:
|
||||
request_params.append("headers=auth_manager.get_headers(token)")
|
||||
|
||||
if query_params:
|
||||
request_params.append("params=params")
|
||||
|
||||
if body_params:
|
||||
request_params.append("json=request_body")
|
||||
|
||||
params_str = ",\n ".join(request_params)
|
||||
|
||||
lines.append(f" response = await prowler_app_client.{method}(")
|
||||
lines.append(f" {params_str}")
|
||||
lines.append(" )")
|
||||
lines.append(" response.raise_for_status()")
|
||||
lines.append("")
|
||||
|
||||
# Parse response
|
||||
lines.append(" data = response.json()")
|
||||
lines.append("")
|
||||
lines.append(" return {")
|
||||
lines.append(' "success": True,')
|
||||
lines.append(' "data": data.get("data", data),')
|
||||
lines.append(' "meta": data.get("meta", {}),')
|
||||
lines.append(" }")
|
||||
lines.append("")
|
||||
|
||||
# Exception handling
|
||||
lines.append(" except Exception as e:")
|
||||
lines.append(" return {")
|
||||
lines.append(' "success": False,')
|
||||
lines.append(
|
||||
f' "error": f"Failed to execute {operation_id}: {{str(e)}}"'
|
||||
)
|
||||
lines.append(" }")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _should_exclude_endpoint(self, path: str, operation: dict) -> bool:
|
||||
"""
|
||||
Determine if an endpoint should be excluded from generation.
|
||||
|
||||
Args:
|
||||
path: The API endpoint path
|
||||
operation: The operation dictionary from OpenAPI spec
|
||||
|
||||
Returns:
|
||||
True if endpoint should be excluded, False otherwise
|
||||
"""
|
||||
# Check if operation is marked as deprecated
|
||||
if operation.get("deprecated", False):
|
||||
return True
|
||||
|
||||
# Check operation ID exclusion
|
||||
operation_id = operation.get("operationId", "")
|
||||
if operation_id in self.exclude_operations:
|
||||
return True
|
||||
|
||||
# Check path pattern exclusion
|
||||
for pattern in self.exclude_patterns:
|
||||
if re.search(pattern, path):
|
||||
return True
|
||||
|
||||
# Check tags
|
||||
tags = operation.get("tags", [])
|
||||
|
||||
# If include_only_tags is specified, exclude if no matching tag
|
||||
if self.include_only_tags:
|
||||
if not any(tag in self.include_only_tags for tag in tags):
|
||||
return True
|
||||
|
||||
# Check excluded tags
|
||||
if any(tag in self.exclude_tags for tag in tags):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def generate_tools(self) -> str:
|
||||
"""Generate all FastMCP tools from the OpenAPI spec."""
|
||||
output_lines = []
|
||||
|
||||
# Generate header
|
||||
output_lines.append('"""')
|
||||
output_lines.append("Auto-generated FastMCP server from OpenAPI specification")
|
||||
output_lines.append(f"Generated on: {datetime.now().isoformat()}")
|
||||
output_lines.append(
|
||||
f"Source: {self.spec_file} (version: {self.spec.get('info', {}).get('version', 'unknown')})"
|
||||
)
|
||||
output_lines.append('"""')
|
||||
output_lines.append("")
|
||||
|
||||
# Add imports
|
||||
self.imports.add("from typing import Any, Optional")
|
||||
self.imports.add("import httpx")
|
||||
self.imports.add("from fastmcp import FastMCP")
|
||||
|
||||
if self.custom_auth_module:
|
||||
self.imports.add(f"from {self.custom_auth_module} import ProwlerAppAuth")
|
||||
|
||||
# Process all paths and operations
|
||||
paths = self.spec.get("paths", {})
|
||||
|
||||
tools_by_tag = {} # Group tools by tag for better organization
|
||||
excluded_count = 0
|
||||
|
||||
for path, path_item in paths.items():
|
||||
for method in ["get", "post", "put", "patch", "delete"]:
|
||||
if method in path_item:
|
||||
operation = path_item[method]
|
||||
|
||||
# Check if this endpoint should be excluded
|
||||
if self._should_exclude_endpoint(path, operation):
|
||||
excluded_count += 1
|
||||
continue
|
||||
|
||||
operation_id = operation.get("operationId", f"{method}_{path}")
|
||||
tags = operation.get("tags", ["default"])
|
||||
|
||||
# Get endpoint configuration
|
||||
endpoint_config = self._get_endpoint_config(path, method)
|
||||
|
||||
# Use custom function name if provided
|
||||
if "name" in endpoint_config:
|
||||
func_name = endpoint_config["name"]
|
||||
else:
|
||||
func_name = self._sanitize_function_name(operation_id)
|
||||
|
||||
parameters = self._extract_parameters(operation, endpoint_config)
|
||||
|
||||
tool_code = []
|
||||
|
||||
# Add @app_mcp_server.tool() decorator
|
||||
tool_code.append("@app_mcp_server.tool()")
|
||||
|
||||
# Generate function signature
|
||||
tool_code.append(
|
||||
self._generate_function_signature(func_name, parameters)
|
||||
)
|
||||
|
||||
# Generate docstring with custom description if provided
|
||||
tool_code.append(
|
||||
self._generate_docstring(
|
||||
operation, parameters, path, method, endpoint_config
|
||||
)
|
||||
)
|
||||
|
||||
# Generate function body
|
||||
tool_code.append(
|
||||
self._generate_function_body(
|
||||
path, method, parameters, operation_id
|
||||
)
|
||||
)
|
||||
|
||||
# Group by tag
|
||||
for tag in tags:
|
||||
if tag not in tools_by_tag:
|
||||
tools_by_tag[tag] = []
|
||||
tools_by_tag[tag].append("\n".join(tool_code))
|
||||
|
||||
# Write imports (consolidate typing imports)
|
||||
typing_imports = set()
|
||||
other_imports = []
|
||||
|
||||
for imp in sorted(self.imports):
|
||||
if imp.startswith("from typing import"):
|
||||
# Extract the imported items
|
||||
items = imp.replace("from typing import", "").strip()
|
||||
typing_imports.update([item.strip() for item in items.split(",")])
|
||||
else:
|
||||
other_imports.append(imp)
|
||||
|
||||
# Add consolidated typing import if needed
|
||||
if typing_imports:
|
||||
output_lines.append(
|
||||
f"from typing import {', '.join(sorted(typing_imports))}"
|
||||
)
|
||||
|
||||
# Add other imports
|
||||
for imp in other_imports:
|
||||
output_lines.append(imp)
|
||||
|
||||
if self.needs_query_array_normalizer:
|
||||
output_lines.append("")
|
||||
output_lines.append("")
|
||||
output_lines.append("def _normalize_query_array(value):")
|
||||
output_lines.append(
|
||||
' """Normalize query array inputs to comma-separated strings."""'
|
||||
)
|
||||
output_lines.append(" if isinstance(value, str):")
|
||||
output_lines.append(" stripped = value.strip()")
|
||||
output_lines.append(" if not stripped:")
|
||||
output_lines.append(" return stripped")
|
||||
output_lines.append(
|
||||
" if stripped.startswith('[') and stripped.endswith(']'):"
|
||||
)
|
||||
output_lines.append(" try:")
|
||||
output_lines.append(" parsed = json.loads(stripped)")
|
||||
output_lines.append(" except json.JSONDecodeError:")
|
||||
output_lines.append(" return stripped")
|
||||
output_lines.append(" if isinstance(parsed, list):")
|
||||
output_lines.append(
|
||||
" return ','.join(str(item) for item in parsed)"
|
||||
)
|
||||
output_lines.append(" return stripped")
|
||||
output_lines.append(" if isinstance(value, (list, tuple, set)):")
|
||||
output_lines.append(" return ','.join(str(item) for item in value)")
|
||||
output_lines.append(
|
||||
" if hasattr(value, '__iter__') and not isinstance(value, dict):"
|
||||
)
|
||||
output_lines.append(" try:")
|
||||
output_lines.append(
|
||||
" return ','.join(str(item) for item in value)"
|
||||
)
|
||||
output_lines.append(" except TypeError:")
|
||||
output_lines.append(" return str(value)")
|
||||
output_lines.append(" if isinstance(value, dict):")
|
||||
output_lines.append(" return ','.join(str(key) for key in value)")
|
||||
output_lines.append(" return str(value)")
|
||||
|
||||
output_lines.append("")
|
||||
output_lines.append("# Initialize MCP server")
|
||||
output_lines.append('app_mcp_server = FastMCP("prowler-app")')
|
||||
output_lines.append("")
|
||||
|
||||
if self.custom_auth_module:
|
||||
output_lines.append("# Initialize authentication manager")
|
||||
output_lines.append("auth_manager = ProwlerAppAuth()")
|
||||
output_lines.append("")
|
||||
output_lines.append("# Initialize HTTP client")
|
||||
output_lines.append("prowler_app_client = httpx.AsyncClient(")
|
||||
output_lines.append(" timeout=30.0,")
|
||||
output_lines.append(")")
|
||||
output_lines.append("")
|
||||
|
||||
# Write tools grouped by tag
|
||||
for tag, tools in tools_by_tag.items():
|
||||
output_lines.append("")
|
||||
output_lines.append("# " + "=" * 76)
|
||||
output_lines.append(f"# {tag.upper()} ENDPOINTS")
|
||||
output_lines.append("# " + "=" * 76)
|
||||
output_lines.append("")
|
||||
|
||||
for tool in tools:
|
||||
output_lines.append("")
|
||||
output_lines.append(tool)
|
||||
|
||||
return "\n".join(output_lines)
|
||||
|
||||
def save_to_file(self, output_file: str):
|
||||
"""Save the generated code to a file."""
|
||||
generated_code = self.generate_tools()
|
||||
Path(output_file).write_text(generated_code)
|
||||
|
||||
|
||||
def generate_server_file():
|
||||
# Get the spec file from the API directly (https://api.prowler.com/api/v1/schema)
|
||||
api_base_url = os.getenv("PROWLER_API_BASE_URL", "https://api.prowler.com")
|
||||
spec_file = f"{api_base_url}/api/v1/schema"
|
||||
|
||||
# Download the spec yaml file
|
||||
response = requests.get(spec_file)
|
||||
response.raise_for_status()
|
||||
spec_data = response.text
|
||||
|
||||
# Save the spec data to a file
|
||||
with open(str(Path(__file__).parent / "schema.yaml"), "w") as f:
|
||||
f.write(spec_data)
|
||||
|
||||
# Example usage
|
||||
generator = OpenAPIToMCPGenerator(
|
||||
spec_file=str(Path(__file__).parent / "schema.yaml"),
|
||||
custom_auth_module="prowler_mcp_server.prowler_app.utils.auth",
|
||||
include_only_tags=[
|
||||
"Provider",
|
||||
"Scan",
|
||||
"Schedule",
|
||||
"Finding",
|
||||
"Processor",
|
||||
],
|
||||
config_file=str(
|
||||
Path(__file__).parent / "mcp_config.json"
|
||||
), # Use custom naming config
|
||||
)
|
||||
|
||||
# Generate and save the MCP server
|
||||
generator.save_to_file(str(Path(__file__).parent.parent / "server.py"))
|
||||
@@ -0,0 +1,79 @@
|
||||
"""Utility for auto-discovering and loading MCP tools.
|
||||
|
||||
This module provides functionality to automatically discover and register
|
||||
all BaseTool subclasses from the tools package.
|
||||
"""
|
||||
|
||||
import importlib
|
||||
import pkgutil
|
||||
|
||||
from fastmcp import FastMCP
|
||||
from prowler_mcp_server.lib.logger import logger
|
||||
from prowler_mcp_server.prowler_app.tools.base import BaseTool
|
||||
|
||||
|
||||
def load_all_tools(mcp: FastMCP) -> None:
|
||||
"""Auto-discover and load all BaseTool subclasses from the tools package.
|
||||
|
||||
This function:
|
||||
1. Dynamically imports all Python modules in the tools package
|
||||
2. Discovers all concrete BaseTool subclasses
|
||||
3. Instantiates each tool class
|
||||
4. Registers all tools with the provided FastMCP instance
|
||||
|
||||
Args:
|
||||
mcp: The FastMCP instance to register tools with
|
||||
TOOLS_PACKAGE: The package path containing tool modules (default: prowler_mcp_server.prowler_app.tools)
|
||||
|
||||
Example:
|
||||
from fastmcp import FastMCP
|
||||
from prowler_mcp_server.prowler_app.utils.tool_loader import load_all_tools
|
||||
|
||||
app = FastMCP("prowler-app")
|
||||
load_all_tools(app)
|
||||
"""
|
||||
TOOLS_PACKAGE = "prowler_mcp_server.prowler_app.tools"
|
||||
logger.info(f"Auto-discovering tools from package: {TOOLS_PACKAGE}")
|
||||
|
||||
# Import the tools package
|
||||
try:
|
||||
tools_module = importlib.import_module(TOOLS_PACKAGE)
|
||||
except ImportError as e:
|
||||
logger.error(f"Failed to import tools package {TOOLS_PACKAGE}: {e}")
|
||||
return
|
||||
|
||||
# Get the package path
|
||||
if hasattr(tools_module, "__path__"):
|
||||
package_path = tools_module.__path__
|
||||
else:
|
||||
logger.error(f"Package {TOOLS_PACKAGE} has no __path__ attribute")
|
||||
return
|
||||
|
||||
# Import all modules in the package
|
||||
for _, module_name, _ in pkgutil.iter_modules(package_path):
|
||||
try:
|
||||
full_module_name = f"{TOOLS_PACKAGE}.{module_name}"
|
||||
importlib.import_module(full_module_name)
|
||||
logger.debug(f"Imported module: {full_module_name}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to import module {module_name}: {e}")
|
||||
|
||||
# Discover all concrete BaseTool subclasses
|
||||
concrete_tools = [
|
||||
tool_class
|
||||
for tool_class in BaseTool.__subclasses__()
|
||||
if not getattr(tool_class, "__abstractmethods__", None)
|
||||
]
|
||||
|
||||
logger.info(f"Discovered {len(concrete_tools)} tool classes")
|
||||
|
||||
# Instantiate and register each tool
|
||||
for tool_class in concrete_tools:
|
||||
try:
|
||||
tool_instance = tool_class()
|
||||
tool_instance.register_tools(mcp)
|
||||
logger.info(f"Loaded and registered: {tool_class.__name__}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load tool {tool_class.__name__}: {e}")
|
||||
|
||||
logger.info("Tool loading complete")
|
||||
@@ -1,5 +1,4 @@
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
from fastmcp import FastMCP
|
||||
from prowler_mcp_server import __version__
|
||||
@@ -24,17 +23,6 @@ async def setup_main_server():
|
||||
# Import Prowler App tools with prowler_app_ prefix
|
||||
try:
|
||||
logger.info("Importing Prowler App server...")
|
||||
|
||||
if not os.path.exists(
|
||||
os.path.join(os.path.dirname(__file__), "prowler_app", "server.py")
|
||||
):
|
||||
from prowler_mcp_server.prowler_app.utils.server_generator import (
|
||||
generate_server_file,
|
||||
)
|
||||
|
||||
logger.info("Prowler App server not found, generating...")
|
||||
generate_server_file()
|
||||
|
||||
from prowler_mcp_server.prowler_app.server import app_mcp_server
|
||||
|
||||
await prowler_mcp_server.import_server(app_mcp_server, prefix="prowler_app")
|
||||
|
||||
@@ -4,8 +4,8 @@ requires = ["setuptools>=61.0", "wheel"]
|
||||
|
||||
[project]
|
||||
dependencies = [
|
||||
"fastmcp>=2.11.3",
|
||||
"httpx>=0.27.0",
|
||||
"fastmcp==2.13.1",
|
||||
"httpx>=0.28.0"
|
||||
]
|
||||
description = "MCP server for Prowler ecosystem"
|
||||
name = "prowler-mcp"
|
||||
|
||||
@@ -12,6 +12,18 @@ files = [
|
||||
{file = "about_time-4.2.1-py3-none-any.whl", hash = "sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aiofiles"
|
||||
version = "24.1.0"
|
||||
description = "File support for asyncio."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"},
|
||||
{file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aiohappyeyeballs"
|
||||
version = "2.6.1"
|
||||
@@ -149,6 +161,480 @@ files = [
|
||||
frozenlist = ">=1.1.0"
|
||||
typing-extensions = {version = ">=4.2", markers = "python_version < \"3.13\""}
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-actiontrail20200706"
|
||||
version = "2.4.1"
|
||||
description = "Alibaba Cloud ActionTrail (20200706) SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_actiontrail20200706-2.4.1-py3-none-any.whl", hash = "sha256:5dee0009db9b7cba182fbac742820f6a949287a8faafb843b5107f7dc89136da"},
|
||||
{file = "alibabacloud_actiontrail20200706-2.4.1.tar.gz", hash = "sha256:b65c6b37a96443fbe625dd5a4dd1be52a7476006a411db75206908b11588ffa8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-endpoint-util = ">=0.0.4,<1.0.0"
|
||||
alibabacloud-openapi-util = ">=0.2.2,<1.0.0"
|
||||
alibabacloud-tea-openapi = ">=0.3.16,<1.0.0"
|
||||
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-credentials"
|
||||
version = "1.0.3"
|
||||
description = "The alibabacloud credentials module of alibabaCloud Python SDK."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud-credentials-1.0.3.tar.gz", hash = "sha256:9d8707e96afc6f348e23f5677ed15a21c2dfce7cfe6669776548ee4c80e1dfaf"},
|
||||
{file = "alibabacloud_credentials-1.0.3-py3-none-any.whl", hash = "sha256:30c8302f204b663c655d97e1c283ee9f9f84a6257d7901b931477d6cf34445a8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiofiles = ">=22.1.0,<25.0.0"
|
||||
alibabacloud-credentials-api = ">=1.0.0,<2.0.0"
|
||||
alibabacloud-tea = ">=0.4.0"
|
||||
APScheduler = ">=3.10.0,<4.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-credentials-api"
|
||||
version = "1.0.0"
|
||||
description = "Alibaba Cloud Gateway SPI SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud-credentials-api-1.0.0.tar.gz", hash = "sha256:8c340038d904f0218d7214a8f4088c31912bfcf279af2cbc7d9be4897a97dd2f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-cs20151215"
|
||||
version = "6.1.0"
|
||||
description = "Alibaba Cloud CS (20151215) SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_cs20151215-6.1.0-py3-none-any.whl", hash = "sha256:75e90b1bb9acca2236244bb0e44234ca4805d456ea4303ba4225ac15152a458e"},
|
||||
{file = "alibabacloud_cs20151215-6.1.0.tar.gz", hash = "sha256:5b3d99306701bf499ddd57cd9f2905b7721cb1bb4bb38ffe4d051f7b4e80e355"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-endpoint-util = ">=0.0.4,<1.0.0"
|
||||
alibabacloud-openapi-util = ">=0.2.2,<1.0.0"
|
||||
alibabacloud-tea-openapi = ">=0.3.16,<1.0.0"
|
||||
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-darabonba-array"
|
||||
version = "0.1.0"
|
||||
description = "Alibaba Cloud Darabonba Array SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_darabonba_array-0.1.0.tar.gz", hash = "sha256:7f9a7c632518ff4f0cebb0d4e825a48c12e7cf0b9016ea25054dd73732e155aa"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-darabonba-encode-util"
|
||||
version = "0.0.2"
|
||||
description = "Darabonba Util Library for Alibaba Cloud Python SDK"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_darabonba_encode_util-0.0.2.tar.gz", hash = "sha256:f1c484f276d60450fa49b4b2987194e741fcb2f7faae7f287c0ae65abc85fd4d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-darabonba-map"
|
||||
version = "0.0.1"
|
||||
description = "Alibaba Cloud Darabonba Map SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_darabonba_map-0.0.1.tar.gz", hash = "sha256:adb17384658a1a8f72418f1838d4b6a5fd2566bfd392a3ef06d9dbb0a595a23f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-darabonba-signature-util"
|
||||
version = "0.0.4"
|
||||
description = "Darabonba Util Library for Alibaba Cloud Python SDK"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_darabonba_signature_util-0.0.4.tar.gz", hash = "sha256:71d79b2ae65957bcfbf699ced894fda782b32f9635f1616635533e5a90d5feb0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = ">=3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-darabonba-string"
|
||||
version = "0.0.4"
|
||||
description = "Alibaba Cloud Darabonba String Library for Python"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud-darabonba-string-0.0.4.tar.gz", hash = "sha256:ec6614c0448dadcbc5e466485838a1f8cfdd911135bea739e20b14511270c6f7"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-darabonba-time"
|
||||
version = "0.0.1"
|
||||
description = "Alibaba Cloud Darabonba Time SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_darabonba_time-0.0.1.tar.gz", hash = "sha256:0ad9c7b0696570d1a3f40106cc7777f755fd92baa0d1dcab5b7df78dde5b922d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-ecs20140526"
|
||||
version = "7.2.5"
|
||||
description = "Alibaba Cloud Elastic Compute Service (20140526) SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_ecs20140526-7.2.5-py3-none-any.whl", hash = "sha256:10bda5e185f6ba899e7d51477373595c629d66db7530a8a37433fb4e9034a96f"},
|
||||
{file = "alibabacloud_ecs20140526-7.2.5.tar.gz", hash = "sha256:2abbe630ce42d69061821f38950b938c5982cc31902ccd7132d05be328765a55"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-endpoint-util = ">=0.0.4,<1.0.0"
|
||||
alibabacloud-openapi-util = ">=0.2.2,<1.0.0"
|
||||
alibabacloud-tea-openapi = ">=0.3.16,<1.0.0"
|
||||
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-endpoint-util"
|
||||
version = "0.0.4"
|
||||
description = "The endpoint-util module of alibabaCloud Python SDK."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_endpoint_util-0.0.4.tar.gz", hash = "sha256:a593eb8ddd8168d5dc2216cd33111b144f9189fcd6e9ca20e48f358a739bbf90"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-gateway-oss"
|
||||
version = "0.0.17"
|
||||
description = "Alibaba Cloud OSS SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_gateway_oss-0.0.17.tar.gz", hash = "sha256:8c4b66c8c7dd285fc210ee232ab3f062b5573258752804d19382000746531e29"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud_credentials = ">=0.3.5"
|
||||
alibabacloud_darabonba_array = ">=0.1.0,<1.0.0"
|
||||
alibabacloud_darabonba_encode_util = ">=0.0.2,<1.0.0"
|
||||
alibabacloud_darabonba_map = ">=0.0.1,<1.0.0"
|
||||
alibabacloud_darabonba_signature_util = ">=0.0.4,<1.0.0"
|
||||
alibabacloud_darabonba_string = ">=0.0.4,<1.0.0"
|
||||
alibabacloud_darabonba_time = ">=0.0.1,<1.0.0"
|
||||
alibabacloud_gateway_oss_util = ">=0.0.3,<1.0.0"
|
||||
alibabacloud_gateway_spi = ">=0.0.1,<1.0.0"
|
||||
alibabacloud_openapi_util = ">=0.2.1,<1.0.0"
|
||||
alibabacloud_oss_util = ">=0.0.5,<1.0.0"
|
||||
alibabacloud_tea_util = ">=0.3.11,<1.0.0"
|
||||
alibabacloud_tea_xml = ">=0.0.2,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-gateway-oss-util"
|
||||
version = "0.0.3"
|
||||
description = "Alibaba Cloud OSS Util Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_gateway_oss_util-0.0.3.tar.gz", hash = "sha256:5eb7fa450dc7350d5c71577974b9d7f489479e5c5ec7efc1c5376385e8c1c0a5"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-gateway-sls"
|
||||
version = "0.4.0"
|
||||
description = "Alibaba Cloud SLS Gateway Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_gateway_sls-0.4.0-py3-none-any.whl", hash = "sha256:a0299a83a5528025983b42b7533a28028461bced5e180a66f97999e0134760a6"},
|
||||
{file = "alibabacloud_gateway_sls-0.4.0.tar.gz", hash = "sha256:9d2aceb377c9b3ed0558149fda16fe39fa114cc0a22e22a88dc76efdda34633b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-credentials = ">=1.0.2,<2.0.0"
|
||||
alibabacloud-darabonba-array = ">=0.1.0,<1.0.0"
|
||||
alibabacloud-darabonba-encode-util = ">=0.0.2,<1.0.0"
|
||||
alibabacloud-darabonba-map = ">=0.0.1,<1.0.0"
|
||||
alibabacloud-darabonba-signature-util = ">=0.0.4,<1.0.0"
|
||||
alibabacloud-darabonba-string = ">=0.0.4,<1.0.0"
|
||||
alibabacloud-gateway-sls-util = ">=0.4.0,<1.0.0"
|
||||
alibabacloud-gateway-spi = ">=0.0.2,<1.0.0"
|
||||
alibabacloud-openapi-util = ">=0.2.2,<1.0.0"
|
||||
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-gateway-sls-util"
|
||||
version = "0.4.0"
|
||||
description = "Alibaba Cloud SLS Util Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_gateway_sls_util-0.4.0-py3-none-any.whl", hash = "sha256:c91ab7fe55af526a01d25b0d431088c4d241b160db055da3d8cb7330bd74595a"},
|
||||
{file = "alibabacloud_gateway_sls_util-0.4.0.tar.gz", hash = "sha256:f8b683a36a2ae3fe9a8225d3d97773ea769bdf9cdf4f4d033eab2eb6062ddd1f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aliyun-log-fastpb = ">=0.2.0"
|
||||
lz4 = ">=4.3.2"
|
||||
zstd = ">=1.5.5.1"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-gateway-spi"
|
||||
version = "0.0.3"
|
||||
description = "Alibaba Cloud Gateway SPI SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_gateway_spi-0.0.3.tar.gz", hash = "sha256:10d1c53a3fc5f87915fbd6b4985b98338a776e9b44a0263f56643c5048223b8b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud_credentials = ">=0.3.4"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-openapi-util"
|
||||
version = "0.2.2"
|
||||
description = "Aliyun Tea OpenApi Library for Python"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_openapi_util-0.2.2.tar.gz", hash = "sha256:ebbc3906f554cb4bf8f513e43e8a33e8b6a3d4a0ef13617a0e14c3dda8ef52a8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud_tea_util = ">=0.0.2"
|
||||
cryptography = ">=3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-oss-util"
|
||||
version = "0.0.6"
|
||||
description = "The oss util module of alibabaCloud Python SDK."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_oss_util-0.0.6.tar.gz", hash = "sha256:d3ecec36632434bd509a113e8cf327dc23e830ac8d9dd6949926f4e334c8b5d6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-tea = "*"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-oss20190517"
|
||||
version = "1.0.6"
|
||||
description = "Alibaba Cloud Object Storage Service (20190517) SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_oss20190517-1.0.6-py3-none-any.whl", hash = "sha256:365fda353de6658a1a289f4d70dcd0394e2a8e2921b6b5834ba6d9772121d2f6"},
|
||||
{file = "alibabacloud_oss20190517-1.0.6.tar.gz", hash = "sha256:7cd0fb16af613ceb38d2e0e529aa1f58038c7cf59eb67c8c8775ae44ea717852"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-gateway-oss = ">=0.0.9,<1.0.0"
|
||||
alibabacloud-gateway-spi = ">=0.0.1,<1.0.0"
|
||||
alibabacloud-openapi-util = ">=0.2.1,<1.0.0"
|
||||
alibabacloud-tea-openapi = ">=0.3.6,<1.0.0"
|
||||
alibabacloud-tea-util = ">=0.3.11,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-ram20150501"
|
||||
version = "1.2.0"
|
||||
description = "Alibaba Cloud Resource Access Management (20150501) SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_ram20150501-1.2.0-py3-none-any.whl", hash = "sha256:03a0f2a0259848787c1f74e802b486184a88e04183486bd9398766971e5eb00a"},
|
||||
{file = "alibabacloud_ram20150501-1.2.0.tar.gz", hash = "sha256:6253513c8880769f4fd5b36fedddb362a9ca628ad9ae9c05c0eeacf5fbc95b42"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-endpoint-util = ">=0.0.4,<1.0.0"
|
||||
alibabacloud-openapi-util = ">=0.2.2,<1.0.0"
|
||||
alibabacloud-tea-openapi = ">=0.3.15,<1.0.0"
|
||||
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-rds20140815"
|
||||
version = "12.0.0"
|
||||
description = "Alibaba Cloud rds (20140815) SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_rds20140815-12.0.0-py3-none-any.whl", hash = "sha256:0bd7e2018a428d86b1b0681087336e74665b48fc3eb0a13c4f4377ed5eab2b08"},
|
||||
{file = "alibabacloud_rds20140815-12.0.0.tar.gz", hash = "sha256:e7421d94f18a914c0a06b0e7fad0daff557713f1c97d415d463a78c1270e9b98"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-endpoint-util = ">=0.0.4,<1.0.0"
|
||||
alibabacloud-openapi-util = ">=0.2.2,<1.0.0"
|
||||
alibabacloud-tea-openapi = ">=0.3.15,<1.0.0"
|
||||
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-sas20181203"
|
||||
version = "6.1.0"
|
||||
description = "Alibaba Cloud Threat Detection (20181203) SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_sas20181203-6.1.0-py3-none-any.whl", hash = "sha256:1ad735332c50c7961be036b17420d56b5ec3b5557e3aea1daa19491e8b75da20"},
|
||||
{file = "alibabacloud_sas20181203-6.1.0.tar.gz", hash = "sha256:e49ffd53e630274a8bf5a8299ca753023ad118510c80f6d9c6fb018b7479bf37"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-endpoint-util = ">=0.0.4,<1.0.0"
|
||||
alibabacloud-openapi-util = ">=0.2.2,<1.0.0"
|
||||
alibabacloud-tea-openapi = ">=0.3.16,<1.0.0"
|
||||
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-sls20201230"
|
||||
version = "5.9.0"
|
||||
description = "Alibaba Cloud Log Service (20201230) SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_sls20201230-5.9.0-py3-none-any.whl", hash = "sha256:c4ae14096817a9686af5a0ae2389f1f6a8781e60b9edb8643445250cf15c26f1"},
|
||||
{file = "alibabacloud_sls20201230-5.9.0.tar.gz", hash = "sha256:bea830b64fbc7ed1719ba386ceeefb120f08d705f03eb0e02409dc6f12a291da"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-gateway-sls = ">=0.3.0,<1.0.0"
|
||||
alibabacloud-openapi-util = ">=0.2.2,<1.0.0"
|
||||
alibabacloud-tea-openapi = ">=0.3.16,<1.0.0"
|
||||
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-sts20150401"
|
||||
version = "1.1.6"
|
||||
description = "Alibaba Cloud Sts (20150401) SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_sts20150401-1.1.6-py3-none-any.whl", hash = "sha256:627f5ca1f86e19b0bf8ce0e99071a36fb65579fad9256fbee38fdc8d500598e9"},
|
||||
{file = "alibabacloud_sts20150401-1.1.6.tar.gz", hash = "sha256:c2529b41e0e4531e21cb393e4df346e19fd6d54cc6337d1138dbcd2191438d4c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-endpoint-util = ">=0.0.4,<1.0.0"
|
||||
alibabacloud-openapi-util = ">=0.2.2,<1.0.0"
|
||||
alibabacloud-tea-openapi = ">=0.3.15,<1.0.0"
|
||||
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-tea"
|
||||
version = "0.4.3"
|
||||
description = "The tea module of alibabaCloud Python SDK."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud-tea-0.4.3.tar.gz", hash = "sha256:ec8053d0aa8d43ebe1deb632d5c5404339b39ec9a18a0707d57765838418504a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.7.0,<4.0.0"
|
||||
requests = ">=2.21.0,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-tea-openapi"
|
||||
version = "0.4.1"
|
||||
description = "Alibaba Cloud openapi SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_tea_openapi-0.4.1-py3-none-any.whl", hash = "sha256:e46bfa3ca34086d2c357d217a0b7284ecbd4b3bab5c88e075e73aec637b0e4a0"},
|
||||
{file = "alibabacloud_tea_openapi-0.4.1.tar.gz", hash = "sha256:2384b090870fdb089c3c40f3fb8cf0145b8c7d6c14abbac521f86a01abb5edaf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-credentials = ">=1.0.2,<2.0.0"
|
||||
alibabacloud-gateway-spi = ">=0.0.2,<1.0.0"
|
||||
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
|
||||
cryptography = ">=3.0.0,<45.0.0"
|
||||
darabonba-core = ">=1.0.3,<2.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-tea-util"
|
||||
version = "0.3.14"
|
||||
description = "The tea-util module of alibabaCloud Python SDK."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_tea_util-0.3.14-py3-none-any.whl", hash = "sha256:10d3e5c340d8f7ec69dd27345eb2fc5a1dab07875742525edf07bbe86db93bfe"},
|
||||
{file = "alibabacloud_tea_util-0.3.14.tar.gz", hash = "sha256:708e7c9f64641a3c9e0e566365d2f23675f8d7c2a3e2971d9402ceede0408cdb"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-tea = ">=0.3.3"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-tea-xml"
|
||||
version = "0.0.3"
|
||||
description = "The tea-xml module of alibabaCloud Python SDK."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_tea_xml-0.0.3.tar.gz", hash = "sha256:979cb51fadf43de77f41c69fc69c12529728919f849723eb0cd24eb7b048a90c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-tea = ">=0.4.0"
|
||||
|
||||
[[package]]
|
||||
name = "alibabacloud-vpc20160428"
|
||||
version = "6.13.0"
|
||||
description = "Alibaba Cloud Virtual Private Cloud (20160428) SDK Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alibabacloud_vpc20160428-6.13.0-py3-none-any.whl", hash = "sha256:933cf1e74322a20a2df27ca6323760d857744a4246eeadc9fb3eae01322fb1c6"},
|
||||
{file = "alibabacloud_vpc20160428-6.13.0.tar.gz", hash = "sha256:daf00679a83d422799f9fcf263739fe1f360641675843cbfbe623833fc8b1681"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
alibabacloud-endpoint-util = ">=0.0.4,<1.0.0"
|
||||
alibabacloud-openapi-util = ">=0.2.2,<1.0.0"
|
||||
alibabacloud-tea-openapi = ">=0.3.16,<1.0.0"
|
||||
alibabacloud-tea-util = ">=0.3.13,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "alive-progress"
|
||||
version = "3.3.0"
|
||||
@@ -165,6 +651,32 @@ files = [
|
||||
about-time = "4.2.1"
|
||||
graphemeu = "0.7.2"
|
||||
|
||||
[[package]]
|
||||
name = "aliyun-log-fastpb"
|
||||
version = "0.2.0"
|
||||
description = "Fast protobuf serialization for Aliyun Log using PyO3 and quick-protobuf"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:51633d92d2b349aed4843c0b503454fb4f7d73eeaaa54f82aa5a36c10c064ef5"},
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:d2984aafc61ccbbf1db2589ce90b6d5a26e72dba137fb1fdf7f61ce3faa967c0"},
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:181fc61ac9934f58b0880fa5617a4a4dc709dba09f8be95b5a71e828f2e48053"},
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:12b8bfddf0bc5450f16f1954c6387a73da124fae10d1205a17a0117e66bb56db"},
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8fbc83cbaa51d332e5e68871c1200014f1f3de54a8cba4fb55a634ee145cd4e4"},
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42a86a6e11dd227d595fa23f69d30588446af19d045d1003bd1b66b5c9a55485"},
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd92c0b84ba300c1d1c227204c5f2fff243cea80bc3f9399293385e87c82ee3e"},
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7c07a6d81a3eab6666949240da305236ed2350c305154d7e39fcc121fc52291"},
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2cff4fbdd0edff94adcee1dcabf16daacb5d336a12fc897887aa6e4f0ad25152"},
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5a451809e2a062accbb8dae8750e507e58806e4a8da48d69215cdeef428e9d63"},
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:61f09df30232f1f5628d13310cf0e175171399ea1c75a8470e9f9d97b045bfb5"},
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:a5fbf0d41d8c0c964a3dc8dd0ee2e732f876b803e0ed3432550ef3b84dde84f1"},
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ae2f84ed0777e00045791044a56413f370afbd5b061505f5ded540c04b19c58e"},
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-win32.whl", hash = "sha256:967f9656c805602fd9be07d8c2756ad89204c852c99689c3c71aa035416ef42a"},
|
||||
{file = "aliyun_log_fastpb-0.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:bbdcf7b85f0f3437c2a8e8a1db0ef5584d21468b7c7a358269a4c651c84f4a54"},
|
||||
{file = "aliyun_log_fastpb-0.2.0.tar.gz", hash = "sha256:91c714e76fb941c9a0db6b1aa1f4c56cb1626254ff5444c1179860f5e5b63d93"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
version = "0.7.0"
|
||||
@@ -212,6 +724,34 @@ doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)",
|
||||
test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""]
|
||||
trio = ["trio (>=0.26.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "apscheduler"
|
||||
version = "3.11.1"
|
||||
description = "In-process task scheduler with Cron-like capabilities"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "apscheduler-3.11.1-py3-none-any.whl", hash = "sha256:6162cb5683cb09923654fa9bdd3130c4be4bfda6ad8990971c9597ecd52965d2"},
|
||||
{file = "apscheduler-3.11.1.tar.gz", hash = "sha256:0db77af6400c84d1747fe98a04b8b58f0080c77d11d338c4f507a9752880f221"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
tzlocal = ">=3.0"
|
||||
|
||||
[package.extras]
|
||||
doc = ["packaging", "sphinx", "sphinx-rtd-theme (>=1.3.0)"]
|
||||
etcd = ["etcd3", "protobuf (<=3.21.0)"]
|
||||
gevent = ["gevent"]
|
||||
mongodb = ["pymongo (>=3.0)"]
|
||||
redis = ["redis (>=3.0)"]
|
||||
rethinkdb = ["rethinkdb (>=2.4.0)"]
|
||||
sqlalchemy = ["sqlalchemy (>=1.4)"]
|
||||
test = ["APScheduler[etcd,mongodb,redis,rethinkdb,sqlalchemy,tornado,zookeeper]", "PySide6 ; platform_python_implementation == \"CPython\" and python_version < \"3.14\"", "anyio (>=4.5.2)", "gevent ; python_version < \"3.14\"", "pytest", "pytz", "twisted ; python_version < \"3.14\""]
|
||||
tornado = ["tornado (>=4.3)"]
|
||||
twisted = ["twisted"]
|
||||
zookeeper = ["kazoo"]
|
||||
|
||||
[[package]]
|
||||
name = "astroid"
|
||||
version = "3.3.11"
|
||||
@@ -1444,6 +1984,23 @@ ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
name = "darabonba-core"
|
||||
version = "1.0.4"
|
||||
description = "The darabonba module of alibabaCloud Python SDK."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "darabonba_core-1.0.4-py3-none-any.whl", hash = "sha256:4c3bc1d76d5af1087297b6afde8e960ea2f54f93e725e2df8453f0b4bb27dd24"},
|
||||
{file = "darabonba_core-1.0.4.tar.gz", hash = "sha256:6ede4e9bfd458148bab19ab2331716ae9b5c226ba5f6d221de6f88ee65704137"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.7.0,<4.0.0"
|
||||
alibabacloud-tea = "*"
|
||||
requests = ">=2.21.0,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "dash"
|
||||
version = "3.1.1"
|
||||
@@ -2490,6 +3047,78 @@ files = [
|
||||
{file = "lazy_object_proxy-1.11.0.tar.gz", hash = "sha256:18874411864c9fbbbaa47f9fc1dd7aea754c86cfde21278ef427639d1dd78e9c"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lz4"
|
||||
version = "4.4.5"
|
||||
description = "LZ4 Bindings for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "lz4-4.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d221fa421b389ab2345640a508db57da36947a437dfe31aeddb8d5c7b646c22d"},
|
||||
{file = "lz4-4.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7dc1e1e2dbd872f8fae529acd5e4839efd0b141eaa8ae7ce835a9fe80fbad89f"},
|
||||
{file = "lz4-4.4.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e928ec2d84dc8d13285b4a9288fd6246c5cde4f5f935b479f50d986911f085e3"},
|
||||
{file = "lz4-4.4.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:daffa4807ef54b927451208f5f85750c545a4abbff03d740835fc444cd97f758"},
|
||||
{file = "lz4-4.4.5-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2a2b7504d2dffed3fd19d4085fe1cc30cf221263fd01030819bdd8d2bb101cf1"},
|
||||
{file = "lz4-4.4.5-cp310-cp310-win32.whl", hash = "sha256:0846e6e78f374156ccf21c631de80967e03cc3c01c373c665789dc0c5431e7fc"},
|
||||
{file = "lz4-4.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:7c4e7c44b6a31de77d4dc9772b7d2561937c9588a734681f70ec547cfbc51ecd"},
|
||||
{file = "lz4-4.4.5-cp310-cp310-win_arm64.whl", hash = "sha256:15551280f5656d2206b9b43262799c89b25a25460416ec554075a8dc568e4397"},
|
||||
{file = "lz4-4.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d6da84a26b3aa5da13a62e4b89ab36a396e9327de8cd48b436a3467077f8ccd4"},
|
||||
{file = "lz4-4.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61d0ee03e6c616f4a8b69987d03d514e8896c8b1b7cc7598ad029e5c6aedfd43"},
|
||||
{file = "lz4-4.4.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:33dd86cea8375d8e5dd001e41f321d0a4b1eb7985f39be1b6a4f466cd480b8a7"},
|
||||
{file = "lz4-4.4.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:609a69c68e7cfcfa9d894dc06be13f2e00761485b62df4e2472f1b66f7b405fb"},
|
||||
{file = "lz4-4.4.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:75419bb1a559af00250b8f1360d508444e80ed4b26d9d40ec5b09fe7875cb989"},
|
||||
{file = "lz4-4.4.5-cp311-cp311-win32.whl", hash = "sha256:12233624f1bc2cebc414f9efb3113a03e89acce3ab6f72035577bc61b270d24d"},
|
||||
{file = "lz4-4.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:8a842ead8ca7c0ee2f396ca5d878c4c40439a527ebad2b996b0444f0074ed004"},
|
||||
{file = "lz4-4.4.5-cp311-cp311-win_arm64.whl", hash = "sha256:83bc23ef65b6ae44f3287c38cbf82c269e2e96a26e560aa551735883388dcc4b"},
|
||||
{file = "lz4-4.4.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:df5aa4cead2044bab83e0ebae56e0944cc7fcc1505c7787e9e1057d6d549897e"},
|
||||
{file = "lz4-4.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d0bf51e7745484d2092b3a51ae6eb58c3bd3ce0300cf2b2c14f76c536d5697a"},
|
||||
{file = "lz4-4.4.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7b62f94b523c251cf32aa4ab555f14d39bd1a9df385b72443fd76d7c7fb051f5"},
|
||||
{file = "lz4-4.4.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2c3ea562c3af274264444819ae9b14dbbf1ab070aff214a05e97db6896c7597e"},
|
||||
{file = "lz4-4.4.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24092635f47538b392c4eaeff14c7270d2c8e806bf4be2a6446a378591c5e69e"},
|
||||
{file = "lz4-4.4.5-cp312-cp312-win32.whl", hash = "sha256:214e37cfe270948ea7eb777229e211c601a3e0875541c1035ab408fbceaddf50"},
|
||||
{file = "lz4-4.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:713a777de88a73425cf08eb11f742cd2c98628e79a8673d6a52e3c5f0c116f33"},
|
||||
{file = "lz4-4.4.5-cp312-cp312-win_arm64.whl", hash = "sha256:a88cbb729cc333334ccfb52f070463c21560fca63afcf636a9f160a55fac3301"},
|
||||
{file = "lz4-4.4.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6bb05416444fafea170b07181bc70640975ecc2a8c92b3b658c554119519716c"},
|
||||
{file = "lz4-4.4.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b424df1076e40d4e884cfcc4c77d815368b7fb9ebcd7e634f937725cd9a8a72a"},
|
||||
{file = "lz4-4.4.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:216ca0c6c90719731c64f41cfbd6f27a736d7e50a10b70fad2a9c9b262ec923d"},
|
||||
{file = "lz4-4.4.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:533298d208b58b651662dd972f52d807d48915176e5b032fb4f8c3b6f5fe535c"},
|
||||
{file = "lz4-4.4.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:451039b609b9a88a934800b5fc6ee401c89ad9c175abf2f4d9f8b2e4ef1afc64"},
|
||||
{file = "lz4-4.4.5-cp313-cp313-win32.whl", hash = "sha256:a5f197ffa6fc0e93207b0af71b302e0a2f6f29982e5de0fbda61606dd3a55832"},
|
||||
{file = "lz4-4.4.5-cp313-cp313-win_amd64.whl", hash = "sha256:da68497f78953017deb20edff0dba95641cc86e7423dfadf7c0264e1ac60dc22"},
|
||||
{file = "lz4-4.4.5-cp313-cp313-win_arm64.whl", hash = "sha256:c1cfa663468a189dab510ab231aad030970593f997746d7a324d40104db0d0a9"},
|
||||
{file = "lz4-4.4.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67531da3b62f49c939e09d56492baf397175ff39926d0bd5bd2d191ac2bff95f"},
|
||||
{file = "lz4-4.4.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a1acbbba9edbcbb982bc2cac5e7108f0f553aebac1040fbec67a011a45afa1ba"},
|
||||
{file = "lz4-4.4.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a482eecc0b7829c89b498fda883dbd50e98153a116de612ee7c111c8bcf82d1d"},
|
||||
{file = "lz4-4.4.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e099ddfaa88f59dd8d36c8a3c66bd982b4984edf127eb18e30bb49bdba68ce67"},
|
||||
{file = "lz4-4.4.5-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a2af2897333b421360fdcce895c6f6281dc3fab018d19d341cf64d043fc8d90d"},
|
||||
{file = "lz4-4.4.5-cp313-cp313t-win32.whl", hash = "sha256:66c5de72bf4988e1b284ebdd6524c4bead2c507a2d7f172201572bac6f593901"},
|
||||
{file = "lz4-4.4.5-cp313-cp313t-win_amd64.whl", hash = "sha256:cdd4bdcbaf35056086d910d219106f6a04e1ab0daa40ec0eeef1626c27d0fddb"},
|
||||
{file = "lz4-4.4.5-cp313-cp313t-win_arm64.whl", hash = "sha256:28ccaeb7c5222454cd5f60fcd152564205bcb801bd80e125949d2dfbadc76bbd"},
|
||||
{file = "lz4-4.4.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:c216b6d5275fc060c6280936bb3bb0e0be6126afb08abccde27eed23dead135f"},
|
||||
{file = "lz4-4.4.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c8e71b14938082ebaf78144f3b3917ac715f72d14c076f384a4c062df96f9df6"},
|
||||
{file = "lz4-4.4.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9b5e6abca8df9f9bdc5c3085f33ff32cdc86ed04c65e0355506d46a5ac19b6e9"},
|
||||
{file = "lz4-4.4.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3b84a42da86e8ad8537aabef062e7f661f4a877d1c74d65606c49d835d36d668"},
|
||||
{file = "lz4-4.4.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bba042ec5a61fa77c7e380351a61cb768277801240249841defd2ff0a10742f"},
|
||||
{file = "lz4-4.4.5-cp314-cp314-win32.whl", hash = "sha256:bd85d118316b53ed73956435bee1997bd06cc66dd2fa74073e3b1322bd520a67"},
|
||||
{file = "lz4-4.4.5-cp314-cp314-win_amd64.whl", hash = "sha256:92159782a4502858a21e0079d77cdcaade23e8a5d252ddf46b0652604300d7be"},
|
||||
{file = "lz4-4.4.5-cp314-cp314-win_arm64.whl", hash = "sha256:d994b87abaa7a88ceb7a37c90f547b8284ff9da694e6afcfaa8568d739faf3f7"},
|
||||
{file = "lz4-4.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f6538aaaedd091d6e5abdaa19b99e6e82697d67518f114721b5248709b639fad"},
|
||||
{file = "lz4-4.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:13254bd78fef50105872989a2dc3418ff09aefc7d0765528adc21646a7288294"},
|
||||
{file = "lz4-4.4.5-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e64e61f29cf95afb43549063d8433b46352baf0c8a70aa45e2585618fcf59d86"},
|
||||
{file = "lz4-4.4.5-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ff1b50aeeec64df5603f17984e4b5be6166058dcf8f1e26a3da40d7a0f6ab547"},
|
||||
{file = "lz4-4.4.5-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1dd4d91d25937c2441b9fc0f4af01704a2d09f30a38c5798bc1d1b5a15ec9581"},
|
||||
{file = "lz4-4.4.5-cp39-cp39-win32.whl", hash = "sha256:d64141085864918392c3159cdad15b102a620a67975c786777874e1e90ef15ce"},
|
||||
{file = "lz4-4.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:f32b9e65d70f3684532358255dc053f143835c5f5991e28a5ac4c93ce94b9ea7"},
|
||||
{file = "lz4-4.4.5-cp39-cp39-win_arm64.whl", hash = "sha256:f9b8bde9909a010c75b3aea58ec3910393b758f3c219beed67063693df854db0"},
|
||||
{file = "lz4-4.4.5.tar.gz", hash = "sha256:5f0b9e53c1e82e88c10d7c180069363980136b9d7a8306c4dca4f760d60c39f0"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=1.6.0)", "sphinx_bootstrap_theme"]
|
||||
flake8 = ["flake8"]
|
||||
tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "markdown"
|
||||
version = "3.9"
|
||||
@@ -5685,7 +6314,143 @@ enabler = ["pytest-enabler (>=2.2)"]
|
||||
test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
|
||||
type = ["pytest-mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "zstd"
|
||||
version = "1.5.7.2"
|
||||
description = "ZSTD Bindings for Python"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "zstd-1.5.7.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e17104d0e88367a7571dde4286e233126c8551691ceff11f9ae2e3a3ac1bb483"},
|
||||
{file = "zstd-1.5.7.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:d6ee5dfada4c8fa32f43cc092fcf7d8482da6ad242c22fdf780f7eebd0febcc7"},
|
||||
{file = "zstd-1.5.7.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:ae1100776cb400100e2d2f427b50dc983c005c38cd59502eb56d2cfea3402ad5"},
|
||||
{file = "zstd-1.5.7.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:489a0ff15caf7640851e63f85b680c4279c99094cd500a29c7ed3ab82505fce0"},
|
||||
{file = "zstd-1.5.7.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:92590cf54318849d492445c885f1a42b9dbb47cdc070659c7cb61df6e8531047"},
|
||||
{file = "zstd-1.5.7.2-cp27-cp27mu-manylinux_2_4_i686.whl", hash = "sha256:2bc21650f7b9c058a3c4cb503e906fe9cce293941ec1b48bc5d005c3b4422b42"},
|
||||
{file = "zstd-1.5.7.2-cp27-cp27mu-manylinux_2_4_x86_64.whl", hash = "sha256:7b13e7eef9aa192804d38bf413924d347c6f6c6ac07f5a0c1ae4a6d7b3af70f0"},
|
||||
{file = "zstd-1.5.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d3f14c5c405ea353b68fe105236780494eb67c756ecd346fd295498f5eab6d24"},
|
||||
{file = "zstd-1.5.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07d2061df22a3efc06453089e6e8b96e58f5bb7a0c4074dcfd0b0ce243ddde72"},
|
||||
{file = "zstd-1.5.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:27e55aa2043ba7d8a08aba0978c652d4d5857338a8188aa84522569f3586c7bb"},
|
||||
{file = "zstd-1.5.7.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e97933addfd71ea9608306f18dc18e7d2a5e64212ba2bb9a4ccb6d714f9f280"},
|
||||
{file = "zstd-1.5.7.2-cp310-cp310-manylinux_2_4_i686.whl", hash = "sha256:27e2ed58b64001c9ef0a8e028625477f1a6ed4ca949412ff6548544945cc59c2"},
|
||||
{file = "zstd-1.5.7.2-cp310-cp310-manylinux_2_4_x86_64.whl", hash = "sha256:92f072819fc0c7e8445f51a232c9ad76642027c069d2f36470cdb5e663839cdb"},
|
||||
{file = "zstd-1.5.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:2a653cdd2c52d60c28e519d44bde8d759f2c1837f0ff8e8e1b0045ca62fcf70e"},
|
||||
{file = "zstd-1.5.7.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:047803d87d910f4905f48d99aeff1e0539ec2e4f4bf17d077701b5d0b2392a95"},
|
||||
{file = "zstd-1.5.7.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0d8c1dc947e5ccea3bd81043080213685faf1d43886c27c51851fabf325f05c0"},
|
||||
{file = "zstd-1.5.7.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8291d393321fac30604c6bbf40067103fee315aa476647a5eaecf877ee53496f"},
|
||||
{file = "zstd-1.5.7.2-cp310-cp310-win32.whl", hash = "sha256:6922ceac5f2d60bb57a7875168c8aa442477b83e8951f2206cf1e9be788b0a6e"},
|
||||
{file = "zstd-1.5.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:346d1e4774d89a77d67fc70d53964bfca57c0abecfd885a4e00f87fd7c71e074"},
|
||||
{file = "zstd-1.5.7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f799c1e9900ad77e7a3d994b9b5146d7cfd1cbd1b61c3db53a697bf21ffcc57b"},
|
||||
{file = "zstd-1.5.7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ff4c667f29101566a7b71f06bbd677a63192818396003354131f586383db042"},
|
||||
{file = "zstd-1.5.7.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8526a32fa9f67b07fd09e62474e345f8ca1daf3e37a41137643d45bd1bc90773"},
|
||||
{file = "zstd-1.5.7.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:2cec2472760d48a7a3445beaba509d3f7850e200fed65db15a1a66e315baec6a"},
|
||||
{file = "zstd-1.5.7.2-cp311-cp311-manylinux_2_4_i686.whl", hash = "sha256:a200c479ee1bb661bc45518e016a1fdc215a1d8f7e4bf6c7de0af254976cfdf6"},
|
||||
{file = "zstd-1.5.7.2-cp311-cp311-manylinux_2_4_x86_64.whl", hash = "sha256:f5d159e57a13147aa8293c0f14803a75e9039fd8afdf6cf1c8c2289fb4d2333a"},
|
||||
{file = "zstd-1.5.7.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:7206934a2bd390080e972a1fed5a897e184dfd71dbb54e978dc11c6b295e1806"},
|
||||
{file = "zstd-1.5.7.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e0027b20f296d1c9a8e85b8436834cf46560240a29d623aa8eaa8911832eb58"},
|
||||
{file = "zstd-1.5.7.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d6b17e5581dd1a13437079bd62838d2635db8eb8aca9c0e9251faa5d4d40a6d7"},
|
||||
{file = "zstd-1.5.7.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b13285c99cc710f60dd270785ec75233018870a1831f5655d862745470a0ca29"},
|
||||
{file = "zstd-1.5.7.2-cp311-cp311-win32.whl", hash = "sha256:cdb5ec80da299f63f8aeccec0bff3247e96252d4c8442876363ff1b438d8049b"},
|
||||
{file = "zstd-1.5.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:4f6861c8edceb25fda37cdaf422fc5f15dcc88ced37c6a5b3c9011eda51aa218"},
|
||||
{file = "zstd-1.5.7.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2ebe3e60dbace52525fa7aa604479e231dc3e4fcc76d0b4c54d8abce5e58734"},
|
||||
{file = "zstd-1.5.7.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ef201b6f7d3a6751d85cc52f9e6198d4d870e83d490172016b64a6dd654a9583"},
|
||||
{file = "zstd-1.5.7.2-cp312-cp312-manylinux_2_14_x86_64.whl", hash = "sha256:ac7bdfedda51b1fcdcf0ab69267d01256fc97ddf666ce894fde0fae9f3630eac"},
|
||||
{file = "zstd-1.5.7.2-cp312-cp312-manylinux_2_4_i686.whl", hash = "sha256:b835405cc4080b378e45029f2fe500e408d1eaedfba7dd7402aba27af16955f9"},
|
||||
{file = "zstd-1.5.7.2-cp312-cp312-win32.whl", hash = "sha256:e4cf97bb97ed6dbb62d139d68fd42fa1af51fd26fd178c501f7b62040e897c50"},
|
||||
{file = "zstd-1.5.7.2-cp312-cp312-win_amd64.whl", hash = "sha256:55e2edc4560a5cf8ee9908595e90a15b1f47536ea9aad4b2889f0e6165890a38"},
|
||||
{file = "zstd-1.5.7.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6e684e27064b6550aa2e7dc85d171ea1b62cb5930a2c99b3df9b30bf620b5c06"},
|
||||
{file = "zstd-1.5.7.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fd6262788a98807d6b2befd065d127db177c1cd76bb8e536e0dded419eb7c7fb"},
|
||||
{file = "zstd-1.5.7.2-cp313-cp313-manylinux_2_14_x86_64.whl", hash = "sha256:53948be45f286a1b25c07a6aa2aca5c902208eb3df9fe36cf891efa0394c8b71"},
|
||||
{file = "zstd-1.5.7.2-cp313-cp313-win32.whl", hash = "sha256:edf816c218e5978033b7bb47dcb453dfb71038cb8a9bf4877f3f823e74d58174"},
|
||||
{file = "zstd-1.5.7.2-cp313-cp313-win_amd64.whl", hash = "sha256:eea9bddf06f3f5e1e450fd647665c86df048a45e8b956d53522387c1dff41b7a"},
|
||||
{file = "zstd-1.5.7.2-cp313-cp313t-manylinux_2_14_x86_64.whl", hash = "sha256:1d71f9f92b3abe18b06b5f0aefa5b9c42112beef3bff27e36028d147cb4426a6"},
|
||||
{file = "zstd-1.5.7.2-cp314-cp314-manylinux_2_14_x86_64.whl", hash = "sha256:a6105b8fa21dbc59e05b6113e8e5d5aaf56c5d2886aa5778d61030af3256bbb7"},
|
||||
{file = "zstd-1.5.7.2-cp314-cp314t-manylinux_2_14_x86_64.whl", hash = "sha256:d0b0ca097efb5f67157c61a744c926848dcccf6e913df2f814e719aa78197a4b"},
|
||||
{file = "zstd-1.5.7.2-cp34-cp34m-manylinux_2_4_i686.whl", hash = "sha256:a371274668182ae06be2e321089b207fa0a75a58ae2fd4dfb7eafded9e041b2f"},
|
||||
{file = "zstd-1.5.7.2-cp34-cp34m-manylinux_2_4_x86_64.whl", hash = "sha256:74c3f006c9a3a191ed454183f0fb78172444f5cb431be04d85044a27f1b58c7b"},
|
||||
{file = "zstd-1.5.7.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:f19a3e658d92b6b52020c4c6d4c159480bcd3b47658773ea0e8d343cee849f33"},
|
||||
{file = "zstd-1.5.7.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:d9d1bcb6441841c599883139c1b0e47bddb262cce04b37dc2c817da5802c1158"},
|
||||
{file = "zstd-1.5.7.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:bb1cb423fc40468cc9b7ab51a5b33c618eefd2c910a5bffed6ed76fe1cbb20b0"},
|
||||
{file = "zstd-1.5.7.2-cp35-cp35m-manylinux_2_14_x86_64.whl", hash = "sha256:e2476ba12597e58c5fc7a3ae547ee1bef9dd6b9d5ea80cf8d4034930c5a336e0"},
|
||||
{file = "zstd-1.5.7.2-cp35-cp35m-manylinux_2_4_i686.whl", hash = "sha256:2bf6447373782a2a9df3015121715f6d0b80a49a884c2d7d4518c9571e9fca16"},
|
||||
{file = "zstd-1.5.7.2-cp35-cp35m-win32.whl", hash = "sha256:a59a136a9eaa1849d715c004e30344177e85ad6e7bc4a5d0b6ad2495c5402675"},
|
||||
{file = "zstd-1.5.7.2-cp35-cp35m-win_amd64.whl", hash = "sha256:114115af8c68772a3205414597f626b604c7879f6662a2a79c88312e0f50361f"},
|
||||
{file = "zstd-1.5.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f576ec00e99db124309dac1e1f34bc320eb69624189f5fdaf9ebe1dc81581a84"},
|
||||
{file = "zstd-1.5.7.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:f97d8593da0e23a47f148a1cb33300dccd513fb0df9f7911c274e228a8c1a300"},
|
||||
{file = "zstd-1.5.7.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:a130243e875de5aeda6099d12b11bc2fcf548dce618cf6b17f731336ba5338e4"},
|
||||
{file = "zstd-1.5.7.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:73cec37649fda383348dc8b3b5fba535f1dbb1bbaeb60fd36f4c145820208619"},
|
||||
{file = "zstd-1.5.7.2-cp36-cp36m-manylinux_2_14_x86_64.whl", hash = "sha256:883e7b77a3124011b8badd0c7c9402af3884700a3431d07877972e157d85afb8"},
|
||||
{file = "zstd-1.5.7.2-cp36-cp36m-manylinux_2_4_i686.whl", hash = "sha256:b5af6aa041b5515934afef2ef4af08566850875c3c890109088eedbe190eeefb"},
|
||||
{file = "zstd-1.5.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:53abf577aec7b30afa3c024143f4866676397c846b44f1b30d8097b5e4f5c7d7"},
|
||||
{file = "zstd-1.5.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:660945ba16c16957c94dafc40aff1db02a57af0489aa3a896866239d47bb44b0"},
|
||||
{file = "zstd-1.5.7.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:3e220d2d7005822bb72a52e76410ca4634f941d8062c08e8e3285733c63b1db7"},
|
||||
{file = "zstd-1.5.7.2-cp37-cp37m-manylinux_2_4_i686.whl", hash = "sha256:7e998f86a9d1e576c0158bf0b0a6a5c4685679d74ba0053a2e87f684f9bdc8eb"},
|
||||
{file = "zstd-1.5.7.2-cp37-cp37m-manylinux_2_4_x86_64.whl", hash = "sha256:70d0c4324549073e05aa72e9eb6a593f89cba59da804b946d325d68467b93ad5"},
|
||||
{file = "zstd-1.5.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:b9518caabf59405eddd667bbb161d9ae7f13dbf96967fd998d095589c8d41c86"},
|
||||
{file = "zstd-1.5.7.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:30d339d8e5c4b14c2015b50371fcdb8a93b451ca6d3ef813269ccbb8b3b3ef7d"},
|
||||
{file = "zstd-1.5.7.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:6f5539a10b838ee576084870eed65b63c13845e30a5b552cfe40f7e6b621e61a"},
|
||||
{file = "zstd-1.5.7.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:5540ce1c99fa0b59dad2eff771deb33872754000da875be50ac8c2beab42b433"},
|
||||
{file = "zstd-1.5.7.2-cp37-cp37m-win32.whl", hash = "sha256:56c4b8cd0a88fd721213661c28b87b64fbd14b6019df39b21b0117a68162b0f2"},
|
||||
{file = "zstd-1.5.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:594f256fa72852ade60e3acb909f983d5cf6839b9fc79728dd4b48b31112058f"},
|
||||
{file = "zstd-1.5.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9dc05618eb0abceb296b77e5f608669c12abc69cbf447d08151bcb14d290ab07"},
|
||||
{file = "zstd-1.5.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:70231ba799d681b6fc17456c3e39895c493b5dff400aa7842166322a952b7f2a"},
|
||||
{file = "zstd-1.5.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:5a73f0f20f71d4eef970a3fed7baac64d9a2a00b238acc4eca2bd7172bd7effb"},
|
||||
{file = "zstd-1.5.7.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0a470f8938f69f632b8f88b96578a5e8825c18ddbbea7de63493f74874f963ef"},
|
||||
{file = "zstd-1.5.7.2-cp38-cp38-manylinux_2_4_i686.whl", hash = "sha256:d104f1cb2a7c142007c29a2a62dfe633155c648317a465674e583c295e5f792d"},
|
||||
{file = "zstd-1.5.7.2-cp38-cp38-manylinux_2_4_x86_64.whl", hash = "sha256:70f29e0504fc511d4b9f921e69637fca79c050e618ba23732a3f75c044814d89"},
|
||||
{file = "zstd-1.5.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:a62c2f6f7b8fc69767392084828740bd6faf35ff54d4ccb2e90e199327c64140"},
|
||||
{file = "zstd-1.5.7.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f2dda0c76f87723fb7f75d7ad3bbd90f7fb47b75051978d22535099325111b41"},
|
||||
{file = "zstd-1.5.7.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f9cf09c2aa6f67750fe9f33fdd122f021b1a23bf7326064a8e21f7af7e77faee"},
|
||||
{file = "zstd-1.5.7.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:910bd9eac2488439f597504756b03c74aa63ed71b21e5d0aa2c7e249b3f1c13f"},
|
||||
{file = "zstd-1.5.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9838ec7eb9f1beb2f611b9bcac7a169cb3de708ccf779aead29787e4482fe232"},
|
||||
{file = "zstd-1.5.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:83a36bb1fd574422a77b36ccf3315ab687aef9a802b0c3312ca7006b74eeb109"},
|
||||
{file = "zstd-1.5.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:6f8189bc58415758bbbd419695012194f5e5e22c34553712d9a3eb009c09808d"},
|
||||
{file = "zstd-1.5.7.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:632e3c1b7e1ebb0580f6d92b781a8f7901d367cf72725d5642e6d3a32e404e45"},
|
||||
{file = "zstd-1.5.7.2-cp39-cp39-manylinux_2_4_i686.whl", hash = "sha256:df8083c40fdbfe970324f743f0b5ecc244c37736e5f3ad2670de61dde5e0b024"},
|
||||
{file = "zstd-1.5.7.2-cp39-cp39-manylinux_2_4_x86_64.whl", hash = "sha256:300db1ede4d10f8b9b3b99ca52b22f0e2303dc4f1cf6994d1f8345ce22dd5a7e"},
|
||||
{file = "zstd-1.5.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:97b908ccb385047b0c020ce3dc55e6f51078c9790722fdb3620c076be4a69ecf"},
|
||||
{file = "zstd-1.5.7.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c59218bd36a7431a40591504f299de836ea0d63bc68ea76d58c4cf5262f0fa3c"},
|
||||
{file = "zstd-1.5.7.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4d5a85344193ec967d05da8e2c10aed400e2d83e16041d2fdfb713cfc8caceeb"},
|
||||
{file = "zstd-1.5.7.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ebf6c1d7f0ceb0af5a383d2a1edc8ab9ace655e62a41c8a4ed5a031ee2ef8006"},
|
||||
{file = "zstd-1.5.7.2-cp39-cp39-win32.whl", hash = "sha256:44a5142123d59a0dbbd9ba9720c23521be57edbc24202223a5e17405c3bdd4a6"},
|
||||
{file = "zstd-1.5.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:8dc542a9818712a9fb37563fa88cdbbbb2b5f8733111d412b718fa602b83ba45"},
|
||||
{file = "zstd-1.5.7.2-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:24371a7b0475eef7d933c72067d363c5dc17282d2aa5d4f5837774378718509e"},
|
||||
{file = "zstd-1.5.7.2-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:c21d44981b068551f13097be3809fadb7f81617d0c21b2c28a7d04653dde958f"},
|
||||
{file = "zstd-1.5.7.2-pp27-pypy_73-manylinux_2_14_x86_64.whl", hash = "sha256:b011bf4cfad78cdf9116d6731234ff181deb9560645ffdcc8d54861ae5d1edfc"},
|
||||
{file = "zstd-1.5.7.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:426e5c6b7b3e2401b734bfd08050b071e17c15df5e3b31e63651d1fd9ba4c751"},
|
||||
{file = "zstd-1.5.7.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:53375b23f2f39359ade944169bbd88f8895eed91290ee608ccbc28810ac360ba"},
|
||||
{file = "zstd-1.5.7.2-pp310-pypy310_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:1b301b2f9dbb0e848093127fb10cbe6334a697dc3aea6740f0bb726450ee9a34"},
|
||||
{file = "zstd-1.5.7.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5414c9ae27069ab3ec8420fe8d005cb1b227806cbc874a7b4c73a96b4697a633"},
|
||||
{file = "zstd-1.5.7.2-pp311-pypy311_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:5fb2ff5718fe89181223c23ce7308bd0b4a427239379e2566294da805d8df68a"},
|
||||
{file = "zstd-1.5.7.2-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:9714d5642867fceb22e4ab74aebf81a2e62dc9206184d603cb39277b752d5885"},
|
||||
{file = "zstd-1.5.7.2-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:6584fd081a6e7d92dffa8e7373d1fced6b3cbf473154b82c17a99438c5e1de51"},
|
||||
{file = "zstd-1.5.7.2-pp36-pypy36_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:52f27a198e2a72632bae12ec63ebaa31b10e3d5f3dd3df2e01376979b168e2e6"},
|
||||
{file = "zstd-1.5.7.2-pp36-pypy36_pp73-win32.whl", hash = "sha256:3b14793d2a2cb3a7ddd1cf083321b662dd20bc11143abc719456e9bfd22a32aa"},
|
||||
{file = "zstd-1.5.7.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:faf3fd38ba26167c5a085c04b8c931a216f1baf072709db7a38e61dea52e316e"},
|
||||
{file = "zstd-1.5.7.2-pp37-pypy37_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:d17ac6d2584168247796174e599d4adbee00153246287e68881efaf8d48a6970"},
|
||||
{file = "zstd-1.5.7.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:9a24d492c63555b55e6bc73a9e82a38bf7c3e8f7cde600f079210ed19cb061f2"},
|
||||
{file = "zstd-1.5.7.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c6abf4ab9a9d1feb14bc3cbcc32d723d340ce43b79b1812805916f3ac069b073"},
|
||||
{file = "zstd-1.5.7.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:d7131bb4e55d075cb7847555a1e17fca5b816a550c9b9ac260c01799b6f8e8d9"},
|
||||
{file = "zstd-1.5.7.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a03608499794148f39c932c508d4eb3622e79ca2411b1d0438a2ee8cafdc0111"},
|
||||
{file = "zstd-1.5.7.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:86e64c71b4d00bf28be50e4941586e7874bdfa74858274d9f7571dd5dda92086"},
|
||||
{file = "zstd-1.5.7.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0f79492bf86aef6e594b11e29c5589ddd13253db3ada0c7a14fb176b132fb65e"},
|
||||
{file = "zstd-1.5.7.2-pp38-pypy38_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:8c3f4bb8508bc54c00532931da4a5261f08493363da14a5526c986765973e35d"},
|
||||
{file = "zstd-1.5.7.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:787bcf55cefc08d27aca34c6dcaae1a24940963d1a73d4cec894ee458c541ac4"},
|
||||
{file = "zstd-1.5.7.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0f97f872cb78a4fd60b6c1024a65a4c52a971e9d991f33c7acd833ee73050f85"},
|
||||
{file = "zstd-1.5.7.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:5e530b75452fdcff4ea67268d9e7cb37a38e7abbac84fa845205f0b36da81aaf"},
|
||||
{file = "zstd-1.5.7.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7c1cc65fc2789dd97a98202df840537de186ed04fd1804a17fcb15d1232442c4"},
|
||||
{file = "zstd-1.5.7.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:05604a693fa53b60ca083992324b08dafd15a4ac37ac4cffe4b43b9eb93d4440"},
|
||||
{file = "zstd-1.5.7.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:baf4e8b46d8934d4e85373f303eb048c63897fc4191d8ab301a1bbdf30b7a3cc"},
|
||||
{file = "zstd-1.5.7.2-pp39-pypy39_pp73-manylinux_2_14_x86_64.whl", hash = "sha256:8cc35cc25e2d4a0f68020f05cba96912a2881ebaca890d990abe37aa3aa27045"},
|
||||
{file = "zstd-1.5.7.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:ceae57e369e1b821b8f2b4c59bc08acd27d8e4bf9687bfa5211bc4cdb080fe7b"},
|
||||
{file = "zstd-1.5.7.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5189fb44c44ab9b6c45f734bd7093a67686193110dc90dcfaf0e3a31b2385f38"},
|
||||
{file = "zstd-1.5.7.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:f51a965871b25911e06d421212f9be7f7bcd3cedc43ea441a8a73fad9952baa0"},
|
||||
{file = "zstd-1.5.7.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:624022851c51dd6d6b31dbfd793347c4bd6339095e8383e2f74faf4f990b04c6"},
|
||||
{file = "zstd-1.5.7.2.tar.gz", hash = "sha256:6d8684c69009be49e1b18ec251a5eb0d7e24f93624990a8a124a1da66a92fc8a"},
|
||||
]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">3.9.1,<3.13"
|
||||
content-hash = "a367e65bc43c0a16495a3d0f6eab8b356cc49b509e329b61c6641cd87f374ff4"
|
||||
content-hash = "433468987cb3c4499d094d90e9f8cc9062a25ce115fde991a4e1b39edbfb7815"
|
||||
|
||||
@@ -2,6 +2,42 @@
|
||||
|
||||
All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
## [v5.15.0] (Prowler UNRELEASED)
|
||||
|
||||
### Added
|
||||
- `cloudstorage_uses_vpc_service_controls` check for GCP provider [(#9256)](https://github.com/prowler-cloud/prowler/pull/9256)
|
||||
- Alibaba Cloud provider with CIS 2.0 benchmark [(#9329)](https://github.com/prowler-cloud/prowler/pull/9329)
|
||||
- `repository_immutable_releases_enabled` check for GitHub provider [(#9162)](https://github.com/prowler-cloud/prowler/pull/9162)
|
||||
- `compute_instance_preemptible_vm_disabled` check for GCP provider [(#9342)](https://github.com/prowler-cloud/prowler/pull/9342)
|
||||
- `compute_instance_automatic_restart_enabled` check for GCP provider [(#9271)](https://github.com/prowler-cloud/prowler/pull/9271)
|
||||
- `compute_instance_deletion_protection_enabled` check for GCP provider [(#9358)](https://github.com/prowler-cloud/prowler/pull/9358)
|
||||
|
||||
### Changed
|
||||
- Update AWS Macie service metadata to new format [(#9265)](https://github.com/prowler-cloud/prowler/pull/9265)
|
||||
- Update AWS Lightsail service metadata to new format [(#9264)](https://github.com/prowler-cloud/prowler/pull/9264)
|
||||
- Update AWS GuardDuty service metadata to new format [(#9259)](https://github.com/prowler-cloud/prowler/pull/9259)
|
||||
- Update AWS Network Firewall service metadata to new format [(#9382)](https://github.com/prowler-cloud/prowler/pull/9382)
|
||||
- Update AWS MQ service metadata to new format [(#9267)](https://github.com/prowler-cloud/prowler/pull/9267)
|
||||
- Update AWS Macie service metadata to new format [(#9265)](https://github.com/prowler-cloud/prowler/pull/9265)
|
||||
- Update AWS Lightsail service metadata to new format [(#9264)](https://github.com/prowler-cloud/prowler/pull/9264)
|
||||
|
||||
---
|
||||
|
||||
## [v5.14.2] (Prowler UNRELEASED)
|
||||
|
||||
### Fixed
|
||||
- Custom check folder metadata validation [(#9335)](https://github.com/prowler-cloud/prowler/pull/9335)
|
||||
|
||||
---
|
||||
|
||||
## [v5.14.1] (Prowler v5.14.1)
|
||||
|
||||
### Fixed
|
||||
- `sharepoint_external_sharing_managed` check to handle external sharing disabled at organization level [(#9298)](https://github.com/prowler-cloud/prowler/pull/9298)
|
||||
- Support multiple Exchange mailbox policies in M365 `exchange_mailbox_policy_additional_storage_restricted` check [(#9241)](https://github.com/prowler-cloud/prowler/pull/9241)
|
||||
|
||||
---
|
||||
|
||||
## [v5.14.0] (Prowler v5.14.0)
|
||||
|
||||
### Added
|
||||
@@ -64,6 +100,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- Update AWS Kinesis service metadata to new format [(#9262)](https://github.com/prowler-cloud/prowler/pull/9262)
|
||||
- Update AWS DocumentDB service metadata to new format [(#8862)](https://github.com/prowler-cloud/prowler/pull/8862)
|
||||
|
||||
|
||||
### Fixed
|
||||
- Check `check_name` has no `resource_name` error for GCP provider [(#9169)](https://github.com/prowler-cloud/prowler/pull/9169)
|
||||
- Depth Truncation and parsing error in PowerShell queries [(#9181)](https://github.com/prowler-cloud/prowler/pull/9181)
|
||||
|
||||