Compare commits

...

20 Commits

Author SHA1 Message Date
Pepe Fagoaga
e07e45c8e5 chore(api): update lock for SDK 2025-12-23 16:28:14 +01:00
Pepe Fagoaga
a37aea84e7 chore: changelog for v5.16.1 (#9661) 2025-12-23 12:51:47 +01:00
Pedro Martín
8d1d041092 chore(aws): support new eusc partition (#9649)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-12-23 12:28:10 +01:00
Rubén De la Torre Vico
6f018183cd ci(mcp): add GitHub Actions workflow for PyPI release (#9660) 2025-12-23 12:27:08 +01:00
Pedro Martín
8ce56b5ed6 feat(ui): add search bar when adding a provider (#9634)
Co-authored-by: alejandrobailo <alejandrobailo94@gmail.com>
2025-12-23 12:09:55 +01:00
lydiavilchez
ad5095595c feat(gcp): add compute check to ensure VM disks have auto-delete disabled (#9604)
Co-authored-by: Daniel Barranquero <danielbo2001@gmail.com>
2025-12-23 10:57:11 +01:00
Alejandro Bailo
3fbe157d10 feat(ui): add shadcn Alert component (#9655)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-23 10:52:48 +01:00
Rubén De la Torre Vico
83d04753ef docs: add resource types for new providers (#9113) 2025-12-23 10:19:53 +01:00
Ulissis Correa
de8e2219c2 fix(ui): add API docs URL build arg for self-hosted deployments (#9388)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-12-23 09:54:04 +01:00
dependabot[bot]
2850c40dd5 build(deps): bump trufflesecurity/trufflehog from 3.90.12 to 3.91.1 (#9395)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-23 09:51:30 +01:00
dependabot[bot]
e213afd4e1 build(deps): bump aws-actions/configure-aws-credentials from 5.1.0 to 5.1.1 (#9392)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-23 09:50:49 +01:00
dependabot[bot]
deada62d66 build(deps): bump peter-evans/repository-dispatch from 4.0.0 to 4.0.1 (#9391)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-23 09:50:36 +01:00
dependabot[bot]
b8d9860a2f build(deps): bump github/codeql-action from 4.31.2 to 4.31.6 (#9393)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-23 09:38:13 +01:00
Pedro Martín
be759216c4 fix(compliance): handle ZeroDivision error from Prowler ThreatScore (#9653) 2025-12-23 09:29:14 +01:00
dependabot[bot]
ca9211b5ed build(deps): bump actions/setup-python from 6.0.0 to 6.1.0 (#9390)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-23 09:26:54 +01:00
dependabot[bot]
3cf7f7845e build(deps): bump actions/checkout from 5.0.0 to 6.0.0 (#9397)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-23 09:20:19 +01:00
Ryan Nolette
81e046ecf6 feat(bedrock): API pagination (#9606)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-12-23 09:06:19 +01:00
Ryan Nolette
0d363e6100 feat(sagemaker): parallelize tag listing for better performance (#9609)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-12-23 08:51:16 +01:00
Pepe Fagoaga
0719e31b58 chore(security-hub): handle SecurityHubNoEnabledRegionsError (#9635) 2025-12-22 16:50:36 +01:00
StylusFrost
19ceb7db88 docs: add end-to-end testing documentation for Prowler App (#9557) 2025-12-22 16:39:53 +01:00
72 changed files with 5454 additions and 2299 deletions

View File

@@ -28,7 +28,7 @@ jobs:
current_api_version: ${{ steps.get_api_version.outputs.current_api_version }}
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Get current API version
id: get_api_version
@@ -78,7 +78,7 @@ jobs:
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Calculate next API minor version
run: |
@@ -129,7 +129,7 @@ jobs:
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
- name: Checkout version branch
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
@@ -192,7 +192,7 @@ jobs:
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Calculate next API patch version
run: |

View File

@@ -33,7 +33,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check for API changes
id: check-changes

View File

@@ -42,15 +42,15 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Initialize CodeQL
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/api-codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
with:
category: '/language:${{ matrix.language }}'

View File

@@ -57,7 +57,7 @@ jobs:
message-ts: ${{ steps.slack-notification.outputs.ts }}
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Notify container push started
id: slack-notification
@@ -93,7 +93,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Login to DockerHub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
@@ -170,7 +170,7 @@ jobs:
timeout-minutes: 5
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Determine overall outcome
id: outcome
@@ -207,7 +207,7 @@ jobs:
steps:
- name: Trigger API deployment
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
repository: ${{ secrets.CLOUD_DISPATCH }}

View File

@@ -28,7 +28,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check if Dockerfile changed
id: dockerfile-changed
@@ -63,7 +63,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check for API changes
id: check-changes

View File

@@ -33,7 +33,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check for API changes
id: check-changes

View File

@@ -73,7 +73,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check for API changes
id: check-changes

View File

@@ -28,7 +28,7 @@ jobs:
current_docs_version: ${{ steps.get_docs_version.outputs.current_docs_version }}
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Get current documentation version
id: get_docs_version
@@ -78,7 +78,7 @@ jobs:
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Calculate next minor version
run: |
@@ -129,7 +129,7 @@ jobs:
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
- name: Checkout version branch
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
@@ -192,7 +192,7 @@ jobs:
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Calculate next patch version
run: |

View File

@@ -23,11 +23,11 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
fetch-depth: 0
- name: Scan for secrets with TruffleHog
uses: trufflesecurity/trufflehog@b84c3d14d189e16da175e2c27fa8136603783ffc # v3.90.12
uses: trufflesecurity/trufflehog@aade3bff5594fe8808578dd4db3dfeae9bf2abdc # v3.91.1
with:
extra_args: '--results=verified,unknown'

View File

@@ -56,7 +56,7 @@ jobs:
message-ts: ${{ steps.slack-notification.outputs.ts }}
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Notify container push started
id: slack-notification
@@ -91,7 +91,7 @@ jobs:
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Login to DockerHub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
@@ -176,7 +176,7 @@ jobs:
timeout-minutes: 5
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Determine overall outcome
id: outcome
@@ -213,7 +213,7 @@ jobs:
steps:
- name: Trigger MCP deployment
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
repository: ${{ secrets.CLOUD_DISPATCH }}

View File

@@ -28,7 +28,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check if Dockerfile changed
id: dockerfile-changed
@@ -62,7 +62,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check for MCP changes
id: check-changes

81
.github/workflows/mcp-pypi-release.yml vendored Normal file
View File

@@ -0,0 +1,81 @@
name: "MCP: PyPI Release"
on:
release:
types:
- "published"
concurrency:
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
cancel-in-progress: false
env:
RELEASE_TAG: ${{ github.event.release.tag_name }}
PYTHON_VERSION: "3.12"
WORKING_DIRECTORY: ./mcp_server
jobs:
validate-release:
if: github.repository == 'prowler-cloud/prowler'
runs-on: ubuntu-latest
timeout-minutes: 5
permissions:
contents: read
outputs:
prowler_version: ${{ steps.parse-version.outputs.version }}
major_version: ${{ steps.parse-version.outputs.major }}
steps:
- name: Parse and validate version
id: parse-version
run: |
PROWLER_VERSION="${{ env.RELEASE_TAG }}"
echo "version=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
# Extract major version
MAJOR_VERSION="${PROWLER_VERSION%%.*}"
echo "major=${MAJOR_VERSION}" >> "${GITHUB_OUTPUT}"
# Validate major version (only Prowler 3, 4, 5 supported)
case ${MAJOR_VERSION} in
3|4|5)
echo "✓ Releasing Prowler MCP for tag ${PROWLER_VERSION}"
;;
*)
echo "::error::Unsupported Prowler major version: ${MAJOR_VERSION}"
exit 1
;;
esac
publish-prowler-mcp:
needs: validate-release
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
id-token: write
environment:
name: pypi-prowler-mcp
url: https://pypi.org/project/prowler-mcp/
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ env.PYTHON_VERSION }}
- name: Build prowler-mcp package
working-directory: ${{ env.WORKING_DIRECTORY }}
run: uv build
- name: Publish prowler-mcp package to PyPI
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
with:
packages-dir: ${{ env.WORKING_DIRECTORY }}/dist/
print-hash: true

View File

@@ -29,7 +29,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
fetch-depth: 0

View File

@@ -25,7 +25,7 @@ jobs:
steps:
- name: Checkout PR head
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0

View File

@@ -29,7 +29,7 @@ jobs:
echo "SHORT_SHA=${SHORT_SHA::7}" >> $GITHUB_ENV
- name: Trigger Cloud repository pull request
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
repository: ${{ secrets.CLOUD_DISPATCH }}

View File

@@ -27,13 +27,13 @@ jobs:
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
fetch-depth: 0
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: '3.12'

View File

@@ -67,7 +67,7 @@ jobs:
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Calculate next minor version
run: |
@@ -110,7 +110,7 @@ jobs:
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
- name: Checkout version branch
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
@@ -167,7 +167,7 @@ jobs:
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Calculate next patch version
run: |

View File

@@ -31,7 +31,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check for SDK changes
id: check-changes
@@ -62,7 +62,7 @@ jobs:
- name: Set up Python ${{ matrix.python-version }}
if: steps.check-changes.outputs.any_changed == 'true'
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ matrix.python-version }}
cache: 'poetry'

View File

@@ -49,15 +49,15 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Initialize CodeQL
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/sdk-codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
with:
category: '/language:${{ matrix.language }}'

View File

@@ -61,10 +61,10 @@ jobs:
stable_tag: ${{ steps.get-prowler-version.outputs.stable_tag }}
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ env.PYTHON_VERSION }}
@@ -115,7 +115,7 @@ jobs:
message-ts: ${{ steps.slack-notification.outputs.ts }}
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Notify container push started
id: slack-notification
@@ -151,7 +151,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Login to DockerHub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
@@ -252,7 +252,7 @@ jobs:
timeout-minutes: 5
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Determine overall outcome
id: outcome
@@ -294,7 +294,7 @@ jobs:
- name: Dispatch v3 deployment (latest)
if: github.event_name == 'push'
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
repository: ${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}
@@ -303,7 +303,7 @@ jobs:
- name: Dispatch v3 deployment (release)
if: github.event_name == 'release'
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
repository: ${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}

View File

@@ -27,7 +27,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check if Dockerfile changed
id: dockerfile-changed
@@ -62,7 +62,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check for SDK changes
id: check-changes

View File

@@ -59,13 +59,13 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Install Poetry
run: pipx install poetry==2.1.1
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ env.PYTHON_VERSION }}
cache: 'poetry'
@@ -91,13 +91,13 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Install Poetry
run: pipx install poetry==2.1.1
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ env.PYTHON_VERSION }}
cache: 'poetry'

View File

@@ -25,12 +25,12 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: 'master'
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ env.PYTHON_VERSION }}
cache: 'pip'
@@ -39,7 +39,7 @@ jobs:
run: pip install boto3
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@00943011d9042930efac3dcd3a170e4273319bc8 # v5.1.0
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 # v5.1.1
with:
aws-region: ${{ env.AWS_REGION }}
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}

View File

@@ -24,7 +24,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check for SDK changes
id: check-changes
@@ -55,7 +55,7 @@ jobs:
- name: Set up Python 3.12
if: steps.check-changes.outputs.any_changed == 'true'
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: '3.12'
cache: 'poetry'

View File

@@ -31,7 +31,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check for SDK changes
id: check-changes
@@ -62,7 +62,7 @@ jobs:
- name: Set up Python ${{ matrix.python-version }}
if: steps.check-changes.outputs.any_changed == 'true'
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ matrix.python-version }}
cache: 'poetry'

View File

@@ -67,7 +67,7 @@ jobs:
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Calculate next minor version
run: |
@@ -112,7 +112,7 @@ jobs:
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
- name: Checkout version branch
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
@@ -171,7 +171,7 @@ jobs:
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Calculate next patch version
run: |

View File

@@ -45,15 +45,15 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Initialize CodeQL
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
with:
languages: ${{ matrix.language }}
config-file: ./.github/codeql/ui-codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
with:
category: '/language:${{ matrix.language }}'

View File

@@ -59,7 +59,7 @@ jobs:
message-ts: ${{ steps.slack-notification.outputs.ts }}
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Notify container push started
id: slack-notification
@@ -95,7 +95,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Login to DockerHub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
@@ -175,7 +175,7 @@ jobs:
timeout-minutes: 5
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Determine overall outcome
id: outcome
@@ -212,7 +212,7 @@ jobs:
steps:
- name: Trigger UI deployment
uses: peter-evans/repository-dispatch@5fc4efd1a4797ddb68ffd0714a238564e4cc0e6f # v4.0.0
uses: peter-evans/repository-dispatch@28959ce8df70de7be546dd1250a005dd32156697 # v4.0.1
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
repository: ${{ secrets.CLOUD_DISPATCH }}

View File

@@ -28,7 +28,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check if Dockerfile changed
id: dockerfile-changed
@@ -63,7 +63,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check for UI changes
id: check-changes

View File

@@ -54,7 +54,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Create k8s Kind Cluster
uses: helm/kind-action@v1
with:

View File

@@ -30,7 +30,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Check for UI changes
id: check-changes

View File

@@ -9,7 +9,10 @@ All notable changes to the **Prowler API** are documented in this file.
---
## [1.17.1] (Prowler UNRELEASED)
## [1.17.1] (Prowler v5.16.1)
### Changed
- Security Hub integration error when no regions [(#9635)](https://github.com/prowler-cloud/prowler/pull/9635)
### Fixed
- Orphan scheduled scans caused by transaction isolation during provider creation [(#9633)](https://github.com/prowler-cloud/prowler/pull/9633)

4869
api/poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -19,6 +19,9 @@ from prowler.providers.aws.aws_provider import AwsProvider
from prowler.providers.aws.lib.s3.s3 import S3
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHub
from prowler.providers.common.models import Connection
from prowler.providers.aws.lib.security_hub.exceptions.exceptions import (
SecurityHubNoEnabledRegionsError,
)
logger = get_task_logger(__name__)
@@ -222,8 +225,9 @@ def get_security_hub_client_from_integration(
)
return True, security_hub
else:
# Reset regions information if connection fails
# Reset regions information if connection fails and integration is not connected
with rls_transaction(tenant_id, using=MainRouter.default_db):
integration.connected = False
integration.configuration["regions"] = {}
integration.save()
@@ -330,15 +334,18 @@ def upload_security_hub_integration(
)
if not connected:
logger.error(
f"Security Hub connection failed for integration {integration.id}: "
f"{security_hub.error}"
)
with rls_transaction(
tenant_id, using=MainRouter.default_db
if isinstance(
security_hub.error,
SecurityHubNoEnabledRegionsError,
):
integration.connected = False
integration.save()
logger.warning(
f"Security Hub integration {integration.id} has no enabled regions"
)
else:
logger.error(
f"Security Hub connection failed for integration {integration.id}: "
f"{security_hub.error}"
)
break # Skip this integration
security_hub_client = security_hub
@@ -409,22 +416,16 @@ def upload_security_hub_integration(
logger.warning(
f"Failed to archive previous findings: {str(archive_error)}"
)
except Exception as e:
logger.error(
f"Security Hub integration {integration.id} failed: {str(e)}"
)
continue
result = integration_executions == len(integrations)
if result:
logger.info(
f"All Security Hub integrations completed successfully for provider {provider_id}"
)
else:
logger.error(
f"Some Security Hub integrations failed for provider {provider_id}"
)
return result

View File

@@ -1199,9 +1199,6 @@ class TestSecurityHubIntegrationUploads:
)
assert result is False
# Integration should be marked as disconnected
integration.save.assert_called_once()
assert integration.connected is False
@patch("tasks.jobs.integrations.ASFF")
@patch("tasks.jobs.integrations.FindingOutput")

View File

@@ -312,7 +312,8 @@ The type of resource being audited. This field helps categorize and organize fin
- **Azure**: Use types from [Azure Resource Graph](https://learn.microsoft.com/en-us/azure/governance/resource-graph/reference/supported-tables-resources), for example: `Microsoft.Storage/storageAccounts`.
- **Google Cloud**: Use [Cloud Asset Inventory asset types](https://cloud.google.com/asset-inventory/docs/asset-types), for example: `compute.googleapis.com/Instance`.
- **Kubernetes**: Use types shown under `KIND` from `kubectl api-resources`.
- **M365 / GitHub**: Leave empty due to lack of standardized types.
- **Oracle Cloud Infrastructure**: Use types from [Oracle Cloud Infrastructure documentation](https://docs.public.oneportal.content.oci.oraclecloud.com/en-us/iaas/Content/Search/Tasks/queryingresources_topic-Listing_Supported_Resource_Types.htm).
- **M365 / GitHub / MongoDB Atlas**: Leave empty due to lack of standardized types.
#### Description

View File

@@ -0,0 +1,327 @@
---
title: 'End-2-End Tests for Prowler App'
---
End-to-end (E2E) tests validate complete user flows in Prowler App (UI + API). These tests are implemented with [Playwright](https://playwright.dev/) under the `ui/tests` folder and are designed to run against a Prowler App environment.
## General Recommendations
When adding or maintaining E2E tests for Prowler App, follow these guidelines:
1. **Test real user journeys**
Focus on full workflows (for example, sign-up → login → add provider → launch scan) instead of low-level UI details already covered by unit or integration tests.
2. **Group tests by entity or feature area**
- Organize E2E tests by entity or feature area (for example, `providers.spec.ts`, `scans.spec.ts`, `invitations.spec.ts`, `sign-up.spec.ts`).
- Each entity should have its own test file and corresponding page model class (for example, `ProvidersPage`, `ScansPage`, `InvitationsPage`).
- Related tests for the same entity should be grouped together in the same test file to improve maintainability and make it easier to find and update tests for a specific feature.
3. **Use a Page Model (Page Object Model)**
- Encapsulate selectors and common actions in page classes instead of repeating them in each test.
- Leverage and extend the existing Playwright page models in `ui/tests`—such as `ProvidersPage`, `ScansPage`, and others—which are all based on the shared `BasePage`.
- Page models for Prowler App pages should be placed in their respective entity folders (for example, `ui/tests/providers/providers-page.ts`).
- Page models for external pages (not part of Prowler App) should be grouped in the `external` folder (for example, `ui/tests/external/github-page.ts`).
- This approach improves readability, reduces duplication, and makes refactors safer.
4. **Reuse authentication states (StorageState)**
- Multiple authentication setup projects are available that generate pre-authenticated state files stored in `playwright/.auth/`. Each project requires specific environment variables:
- `admin.auth.setup` Admin users with full system permissions (requires `E2E_ADMIN_USER` / `E2E_ADMIN_PASSWORD`)
- `manage-scans.auth.setup` Users with scan management permissions (requires `E2E_MANAGE_SCANS_USER` / `E2E_MANAGE_SCANS_PASSWORD`)
- `manage-integrations.auth.setup` Users with integration management permissions (requires `E2E_MANAGE_INTEGRATIONS_USER` / `E2E_MANAGE_INTEGRATIONS_PASSWORD`)
- `manage-account.auth.setup` Users with account management permissions (requires `E2E_MANAGE_ACCOUNT_USER` / `E2E_MANAGE_ACCOUNT_PASSWORD`)
- `manage-cloud-providers.auth.setup` Users with cloud provider management permissions (requires `E2E_MANAGE_CLOUD_PROVIDERS_USER` / `E2E_MANAGE_CLOUD_PROVIDERS_PASSWORD`)
- `unlimited-visibility.auth.setup` Users with unlimited visibility permissions (requires `E2E_UNLIMITED_VISIBILITY_USER` / `E2E_UNLIMITED_VISIBILITY_PASSWORD`)
- `invite-and-manage-users.auth.setup` Users with user invitation and management permissions (requires `E2E_INVITE_AND_MANAGE_USERS_USER` / `E2E_INVITE_AND_MANAGE_USERS_PASSWORD`)
<Note>
If fixtures have been applied (fixtures are used to populate the database with initial development data), you can use the user `e2e@prowler.com` with password `Thisisapassword123@` to configure the Admin credentials by setting `E2E_ADMIN_USER=e2e@prowler.com` and `E2E_ADMIN_PASSWORD=Thisisapassword123@`.
</Note>
- Within test files, use `test.use({ storageState: "playwright/.auth/admin_user.json" })` to load the pre-authenticated state, avoiding redundant authentication steps in each test. This must be placed at the test level (not inside the test function) to apply the authentication state to all tests in that scope. This approach is preferred over declaring dependencies in `playwright.config.ts` because it provides more control over which authentication states are used in specific tests.
**Example:**
```typescript
// Use admin authentication state for all tests in this scope
test.use({ storageState: "playwright/.auth/admin_user.json" });
test("should perform admin action", async ({ page }) => {
// Test implementation
});
```
5. **Tag and document scenarios**
- Follow the existing naming convention for suites and test cases (for example, `SCANS-E2E-001`, `PROVIDER-E2E-003`) and use tags such as `@e2e`, `@serial` and feature tags (for example, `@providers`, `@scans`,`@aws`) to filter and organize tests.
**Example:**
```typescript
test(
"should add a new AWS provider with static credentials",
{
tag: [
"@critical",
"@e2e",
"@providers",
"@aws",
"@serial",
"@PROVIDER-E2E-001",
],
},
async ({ page }) => {
// Test implementation
}
);
```
- Document each one in the Markdown files under `ui/tests`, including **Priority**, **Tags**, **Description**, **Preconditions**, **Flow steps**, **Expected results**,**Key verification points** and **Notes**.
**Example**
```Markdown
## Test Case: `SCANS-E2E-001` - Execute On-Demand Scan
**Priority:** `critical`
**Tags:**
- type → @e2e, @serial
- feature → @scans
**Description/Objective:** Validates the complete flow to execute an on-demand scan selecting a provider by UID and confirming success on the Scans page.
**Preconditions:**
- Admin user authentication required (admin.auth.setup setup)
- Environment variables configured for : E2E_AWS_PROVIDER_ACCOUNT_ID,E2E_AWS_PROVIDER_ACCESS_KEY and E2E_AWS_PROVIDER_SECRET_KEY
- Remove any existing AWS provider with the same Account ID before starting the test
- This test must be run serially and never in parallel with other tests, as it requires the Account ID Provider to be already registered.
### Flow Steps:
1. Navigate to Scans page
2. Open provider selector and choose the entry whose text contains E2E_AWS_PROVIDER_ACCOUNT_ID
3. Optionally fill scan label (alias)
4. Click "Start now" to launch the scan
5. Verify the success toast appears
6. Verify a row in the Scans table contains the provided scan label (or shows the new scan entry)
### Expected Result:
- Scan is launched successfully
- Success toast is displayed to the user
- Scans table displays the new scan entry (including the alias when provided)
### Key verification points:
- Scans page loads correctly
- Provider select is available and lists the configured provider UID
- "Start now" button is rendered and enabled when form is valid
- Success toast message: "The scan was launched successfully."
- Table contains a row with the scan label or new scan state (queued/available/executing)
### Notes:
- The table may take a short time to reflect the new scan; assertions look for a row containing the alias.
- Provider cleanup performed before each test to ensure clean state
- Tests should run serially to avoid state conflicts.
```
6. **Use environment variables for secrets and dynamic data**
Credentials, provider identifiers, secrets, tokens must come from environment variables (for example, `E2E_AWS_PROVIDER_ACCOUNT_ID`, `E2E_AWS_PROVIDER_ACCESS_KEY`, `E2E_AWS_PROVIDER_SECRET_KEY`, `E2E_GCP_PROJECT_ID`).
<Warning>
Never commit real secrets, tokens, or account IDs to the repository.
</Warning>
7. **Keep tests deterministic and isolated**
- Use Playwright's `test.beforeEach()` and `test.afterEach()` hooks to manage test state:
- **`test.beforeEach()`**: Execute cleanup or setup logic before each test runs (for example, delete existing providers with a specific account ID to ensure a clean state).
- **`test.afterEach()`**: Execute cleanup logic after each test completes (for example, remove test data created during the test execution to prevent interference with subsequent tests).
- Define tests as serial using `test.describe.serial()` when they share state or resources that could interfere with parallel execution (for example, tests that use the same provider account ID or create dependent resources). This ensures tests within the serial group run sequentially, preventing race conditions and data conflicts.
- Use unique identifiers (for example, random suffixes for emails or labels) to prevent data collisions.
8. **Use explicit waiting strategies**
- Avoid using `waitForLoadState('networkidle')` as it is unreliable and can lead to flaky tests or unnecessary delays.
- Leverage Playwright's auto-waiting capabilities by waiting for specific elements to be actionable (for example, `locator.click()`, `locator.fill()`, `locator.waitFor()`).
- **Prioritize selector strategies**: Prefer `page.getByRole()` over other approaches like `page.getByText()`. `getByRole()` is more resilient to UI changes, aligns with accessibility best practices, and better reflects how users interact with the application (by role and accessible name rather than implementation details).
- For dynamic content, wait for specific UI elements that indicate the page is ready (for example, button becoming enabled, a specific text appearing, etc).
- This approach makes tests more reliable, faster, and aligned with how users actually interact with the application.
**Common waiting patterns used in Prowler E2E tests:**
- **Element visibility assertions**: Use `expect(locator).toBeVisible()` or `expect(locator).not.toBeVisible()` to wait for elements to appear or disappear (Playwright automatically waits for these conditions).
- **URL changes**: Use `expect(page).toHaveURL(url)` or `page.waitForURL(url)` to wait for navigation to complete.
- **Element states**: Use `locator.waitFor({ state: "visible" })` or `locator.waitFor({ state: "hidden" })` when you need explicit state control.
- **Text content**: Use `expect(locator).toHaveText(text)` or `expect(locator).toContainText(text)` to wait for specific text to appear.
- **Element attributes**: Use `expect(locator).toHaveAttribute(name, value)` to wait for attributes like `aria-disabled="false"` indicating a button is enabled.
- **Custom conditions**: Use `page.waitForFunction(() => condition)` for complex conditions that cannot be expressed with locators (for example, checking DOM element dimensions or computed styles).
- **Retryable assertions**: Use `expect(async () => { ... }).toPass({ timeout })` for conditions that may take time to stabilize (for example, waiting for table rows to filter after a server request).
- **Scroll into view**: Use `locator.scrollIntoViewIfNeeded()` before interacting with elements that may be outside the viewport.
**Example from Prowler tests:**
```typescript
// Wait for page to load by checking main content is visible
await expect(page.locator("main")).toBeVisible();
// Wait for URL change after form submission
await expect(page).toHaveURL("/providers");
// Wait for button to become enabled
await expect(submitButton).toHaveAttribute("aria-disabled", "false");
// Wait for loading spinner to disappear
await expect(page.getByText("Loading")).not.toBeVisible();
// Wait for custom condition
await page.waitForFunction(() => {
const main = document.querySelector("main");
return main && main.offsetHeight > 0;
});
// Wait for retryable condition (e.g., table filtering)
await expect(async () => {
const rowCount = await tableRows.count();
expect(rowCount).toBeLessThanOrEqual(1);
}).toPass({ timeout: 20000 });
```
## Running Prowler Tests
E2E tests for Prowler App run from the `ui` project using Playwright. The Playwright configuration lives in `ui/playwright.config.ts` and defines:
- `testDir: "./tests"` location of E2E test files (relative to the `ui` project root, so `ui/tests`).
- `webServer` how to start the Next.js development server and connect to Prowler API.
- `use.baseURL` base URL for browser interactions (defaults to `http://localhost:3000` or `AUTH_URL` if set).
- `reporter: [["list"]]` uses the list reporter to display test results in a concise format in the terminal. Other reporter options are available (for example, `html`, `json`, `junit`, `github`), and multiple reporters can be configured simultaneously. See the [Playwright reporter documentation](https://playwright.dev/docs/test-reporters) for all available options.
- `expect.timeout: 20000` timeout for assertions (20 seconds). This is the maximum time Playwright will wait for an assertion to pass before considering it failed.
- **Test artifacts** (in `use` configuration): By default, `trace`, `screenshot`, and `video` are set to `"off"` to minimize resource usage. To review test failures or debug issues, these can be enabled in `playwright.config.ts` by changing them to `"on"`, `"on-first-retry"`, or `"retain-on-failure"` depending on your needs.
- `outputDir: "/tmp/playwright-tests"` directory where Playwright stores test artifacts (screenshots, videos, traces) during test execution.
- **CI-specific configuration**: The configuration uses different settings when running in CI environments (detected via `process.env.CI`):
- **Retries**: `2` retries in CI (to handle flaky tests), `0` retries locally (for faster feedback during development).
- **Workers**: `1` worker in CI (sequential execution for stability), `undefined` locally (parallel execution by default for faster test runs).
### Prerequisites
Before running E2E tests:
- **Install root and UI dependencies**
- Follow the [developer guide introduction](/developer-guide/introduction#getting-the-code-and-installing-all-dependencies) to clone the repository and install core dependencies.
- From the `ui` directory, install frontend dependencies:
```bash
cd ui
pnpm install
pnpm run test:e2e:install # Install Playwright browsers
```
- **Ensure Prowler API is available**
- By default, Playwright uses `NEXT_PUBLIC_API_BASE_URL=http://localhost:8080/api/v1` (configured in `playwright.config.ts`).
- Start Prowler API so it is reachable on that URL (for example, via `docker-compose-dev.yml` or the development orchestration used locally).
- If a different API URL is required, set `NEXT_PUBLIC_API_BASE_URL` accordingly before running the tests.
- **Ensure Prowler App UI is available**
- Playwright automatically starts the Next.js server through the `webServer` block in `playwright.config.ts` (`pnpm run dev` by default).
- If the UI is already running on `http://localhost:3000`, Playwright will reuse the existing server when `reuseExistingServer` is `true`.
- **Configure E2E environment variables**
- Suite-specific variables (for example, provider account IDs, credentials, and E2E user data) must be provided before running tests.
- They can be defined either:
- As exported environment variables in the shell before executing the Playwright commands, or
- In a `.env.local` or `.env` file under `ui/`, and then loaded into the shell before running tests, for example:
```bash
cd ui
set -a
source .env.local # or .env
set +a
```
- Refer to the Markdown documentation files in `ui/tests` for each E2E suite (for example, the `*.md` files that describe sign-up, providers, scans, invitations, and other flows) to see the exact list of required variables and their meaning.
- Each E2E test suite explicitly checks that its required environment variables are defined at runtime and will fail with a clear error message if any mandatory variable is missing, making misconfiguration easy to detect.
### Executing Tests
To execute E2E tests for Prowler App:
1. **Run the full E2E suite (headless)**
From the `ui` directory:
```bash
pnpm run test:e2e
```
This command runs Playwright with the configured projects
2. **Run E2E tests with the Playwright UI runner**
```bash
pnpm run test:e2e:ui
```
This opens the Playwright test runner UI to inspect, debug, and rerun specific tests or projects.
3. **Debug E2E tests interactively**
```bash
pnpm run test:e2e:debug
```
Use this mode to step through flows, inspect selectors, and adjust timings. It runs tests in headed mode with debugging tools enabled.
4. **Run tests in headed mode without debugger**
```bash
pnpm run test:e2e:headed
```
This is useful to visually confirm flows while still running the full suite.
5. **View previous test reports**
```bash
pnpm run test:e2e:report
```
This opens the latest Playwright HTML report, including traces and screenshots when enabled.
6. **Run specific tests or subsets**
In addition to the predefined scripts, Playwright allows filtering which tests run. These examples use the Playwright CLI directly through `pnpm`:
- **By test ID (`@ID` in the test metadata or description)**
To run a single test case identified by its ID (for example, `@PROVIDER-E2E-001` or `@SCANS-E2E-001`):
```bash
pnpm playwright test --grep @PROVIDER-E2E-001
```
- **By tags**
To run all tests that share a common tag (for example, all provider E2E tests tagged with `@providers`):
```bash
pnpm playwright test --grep @providers
```
This is useful to focus on a specific feature area such as providers, scans, invitations, or sign-up.
- **By Playwright project**
To run only the tests associated with a given project defined in `playwright.config.ts` (for example, `providers` or `scans`):
```bash
pnpm playwright test --project=providers
```
Combining project and grep filters is also supported, enabling very narrow runs (for example, a single test ID within the `providers` project). For additional CLI options and combinations, see the [Playwright command line documentation](https://playwright.dev/docs/test-cli).
<Note>
For detailed flows, preconditions, and environment variable requirements per feature, always refer to the Markdown files in `ui/tests`. Those documents are the single source of truth for business expectations and validation points in each E2E suite.
</Note>

View File

@@ -220,6 +220,7 @@ The function returns a JSON file containing the list of regions for the provider
"sa-east-1", "us-east-1", "us-east-2", "us-west-1", "us-west-2"
],
"aws-cn": ["cn-north-1", "cn-northwest-1"],
"aws-eusc": ["eusc-de-east-1"],
"aws-us-gov": ["us-gov-east-1", "us-gov-west-1"]
}
}

View File

@@ -19,7 +19,9 @@
"groups": [
{
"group": "Welcome",
"pages": ["introduction"]
"pages": [
"introduction"
]
},
{
"group": "Prowler Cloud",
@@ -49,7 +51,9 @@
},
{
"group": "Prowler Lighthouse AI",
"pages": ["getting-started/products/prowler-lighthouse-ai"]
"pages": [
"getting-started/products/prowler-lighthouse-ai"
]
},
{
"group": "Prowler MCP Server",
@@ -156,7 +160,9 @@
"user-guide/cli/tutorials/quick-inventory",
{
"group": "Tutorials",
"pages": ["user-guide/cli/tutorials/parallel-execution"]
"pages": [
"user-guide/cli/tutorials/parallel-execution"
]
}
]
},
@@ -244,7 +250,9 @@
},
{
"group": "LLM",
"pages": ["user-guide/providers/llm/getting-started-llm"]
"pages": [
"user-guide/providers/llm/getting-started-llm"
]
},
{
"group": "Oracle Cloud Infrastructure",
@@ -257,7 +265,9 @@
},
{
"group": "Compliance",
"pages": ["user-guide/compliance/tutorials/threatscore"]
"pages": [
"user-guide/compliance/tutorials/threatscore"
]
}
]
},
@@ -298,7 +308,8 @@
"group": "Testing",
"pages": [
"developer-guide/unit-testing",
"developer-guide/integration-testing"
"developer-guide/integration-testing",
"developer-guide/end2end-testing"
]
},
"developer-guide/debugging",
@@ -311,15 +322,21 @@
},
{
"tab": "Security",
"pages": ["security"]
"pages": [
"security"
]
},
{
"tab": "Contact Us",
"pages": ["contact"]
"pages": [
"contact"
]
},
{
"tab": "Troubleshooting",
"pages": ["troubleshooting"]
"pages": [
"troubleshooting"
]
},
{
"tab": "About Us",

View File

@@ -6,15 +6,16 @@ By default Prowler is able to scan the following AWS partitions:
- Commercial: `aws`
- China: `aws-cn`
- European Sovereign Cloud: `aws-eusc`
- GovCloud (US): `aws-us-gov`
<Note>
To check the available regions for each partition and service, refer to: [aws\_regions\_by\_service.json](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/aws_regions_by_service.json)
</Note>
## Scanning AWS China and GovCloud Partitions in Prowler
## Scanning AWS China, European Sovereign Cloud and GovCloud Partitions in Prowler
When scanning the China (`aws-cn`) or GovCloud (`aws-us-gov`), ensure one of the following:
When scanning the China (`aws-cn`), European Sovereign Cloud (`aws-eusc`) or GovCloud (`aws-us-gov`) partitions, ensure one of the following:
- Your AWS credentials include a valid region within the desired partition.
@@ -83,6 +84,29 @@ To scan an account in the AWS GovCloud (US) partition (`aws-us-gov`):
<Note>
With this configuration, all partition regions will be scanned without needing the `-f/--region` flag
</Note>
### AWS European Sovereign Cloud
To scan an account in the AWS European Sovereign Cloud partition (`aws-eusc`):
- By using the `-f/--region` flag:
```
prowler aws --region eusc-de-east-1
```
- By using the region configured in your AWS profile at `~/.aws/credentials` or `~/.aws/config`:
```
[default]
aws_access_key_id = XXXXXXXXXXXXXXXXXXX
aws_secret_access_key = XXXXXXXXXXXXXXXXXXX
region = eusc-de-east-1
```
<Note>
With this configuration, all partition regions will be scanned without needing the `-f/--region` flag
</Note>
### AWS ISO (US \& Europe)
@@ -99,6 +123,9 @@ The AWS ISO partitions—commonly referred to as "secret partitions"—are air-g
"cn-north-1",
"cn-northwest-1"
],
"aws-eusc": [
"eusc-de-east-1"
],
"aws-us-gov": [
"us-gov-east-1",
"us-gov-west-1"

View File

@@ -5,8 +5,8 @@ This package provides MCP tools for accessing:
- Prowler Hub: All security artifacts (detections, remediations and frameworks) supported by Prowler
"""
__version__ = "0.1.0"
__version__ = "0.3.0"
__author__ = "Prowler Team"
__email__ = "engineering@prowler.com"
__all__ = ["__version__", "prowler_mcp_server"]
__all__ = ["__version__", "__author__", "__email__"]

View File

@@ -6,14 +6,13 @@ across all providers.
from typing import Any
from pydantic import Field
from prowler_mcp_server.prowler_app.models.resources import (
DetailedResource,
ResourcesListResponse,
ResourcesMetadataResponse,
)
from prowler_mcp_server.prowler_app.tools.base import BaseTool
from pydantic import Field
class ResourcesTools(BaseTool):
@@ -188,7 +187,7 @@ class ResourcesTools(BaseTool):
1. Configuration Details:
- metadata: Provider-specific configuration (tags, policies, encryption settings, network rules)
- partition: Provider-specific partition/region grouping (e.g., aws, aws-cn, aws-us-gov for AWS)
- partition: Provider-specific partition/region grouping (e.g., aws, aws-cn, aws-eusc, aws-us-gov for AWS)
2. Temporal Tracking:
- inserted_at: When Prowler first discovered this resource

View File

@@ -14,7 +14,6 @@ requires-python = ">=3.12"
version = "0.3.0"
[project.scripts]
generate-prowler-app-mcp-server = "prowler_mcp_server.prowler_app.utils.server_generator:generate_server_file"
prowler-mcp = "prowler_mcp_server.main:main"
[tool.uv]

View File

@@ -7,6 +7,9 @@ All notable changes to the **Prowler SDK** are documented in this file.
### Added
- Add Prowler ThreatScore for the Alibaba Cloud provider [(#9511)](https://github.com/prowler-cloud/prowler/pull/9511)
- `compute_instance_group_multiple_zones` check for GCP provider [(#9566)](https://github.com/prowler-cloud/prowler/pull/9566)
- Support AWS European Sovereign Cloud [(#9649)](https://github.com/prowler-cloud/prowler/pull/9649)
- `compute_instance_disk_auto_delete_disabled` check for GCP provider [(#9604)](https://github.com/prowler-cloud/prowler/pull/9604)
- Bedrock service pagination [(#9606)](https://github.com/prowler-cloud/prowler/pull/9606)
### Changed
- Update AWS Step Functions service metadata to new format [(#9432)](https://github.com/prowler-cloud/prowler/pull/9432)
@@ -14,18 +17,24 @@ All notable changes to the **Prowler SDK** are documented in this file.
- Update AWS SQS service metadata to new format [(#9429)](https://github.com/prowler-cloud/prowler/pull/9429)
- Update AWS Shield service metadata to new format [(#9427)](https://github.com/prowler-cloud/prowler/pull/9427)
- Update AWS Secrets Manager service metadata to new format [(#9408)](https://github.com/prowler-cloud/prowler/pull/9408)
- Improve SageMaker service tag retrieval with parallel execution [(#9609)](https://github.com/prowler-cloud/prowler/pull/9609)
---
## [5.16.1] (Prowler v5.16.1)
### Fixed
- ZeroDivision error from Prowler ThreatScore [(#9653)](https://github.com/prowler-cloud/prowler/pull/9653)
---
## [5.16.0] (Prowler v5.16.0)
### Added
- `privilege-escalation` and `ec2-imdsv1` categories for AWS checks [(#9537)](https://github.com/prowler-cloud/prowler/pull/9537)
- Supported IaC formats and scanner documentation for the IaC provider [(#9553)](https://github.com/prowler-cloud/prowler/pull/9553)
### Changed
- Update AWS Glue service metadata to new format [(#9258)](https://github.com/prowler-cloud/prowler/pull/9258)
- Update AWS Kafka service metadata to new format [(#9261)](https://github.com/prowler-cloud/prowler/pull/9261)
- Update AWS KMS service metadata to new format [(#9263)](https://github.com/prowler-cloud/prowler/pull/9263)

View File

@@ -103,8 +103,16 @@ def get_prowler_threatscore_table(
for pillar in pillars:
pillar_table["Provider"].append(compliance.Provider)
pillar_table["Pillar"].append(pillar)
if max_score_per_pillar[pillar] == 0:
pillar_score = 100.0
score_color = Fore.GREEN
else:
pillar_score = (
score_per_pillar[pillar] / max_score_per_pillar[pillar]
) * 100
score_color = Fore.RED
pillar_table["Score"].append(
f"{Style.BRIGHT}{Fore.RED}{(score_per_pillar[pillar] / max_score_per_pillar[pillar]) * 100:.2f}%{Style.RESET_ALL}"
f"{Style.BRIGHT}{score_color}{pillar_score:.2f}%{Style.RESET_ALL}"
)
if pillars[pillar]["FAIL"] > 0:
pillar_table["Status"].append(
@@ -148,9 +156,12 @@ def get_prowler_threatscore_table(
print(
f"\nFramework {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Results:"
)
print(
f"\nGeneric Threat Score: {generic_score / max_generic_score * 100:.2f}%"
)
# Handle division by zero when all findings are muted
if max_generic_score == 0:
generic_threat_score = 100.0
else:
generic_threat_score = generic_score / max_generic_score * 100
print(f"\nGeneric Threat Score: {generic_threat_score:.2f}%")
print(
tabulate(
pillar_table,

View File

@@ -984,6 +984,8 @@ class AwsProvider(Provider):
global_region = "us-east-1"
if self._identity.partition == "aws-cn":
global_region = "cn-north-1"
elif self._identity.partition == "aws-eusc":
global_region = "eusc-de-east-1"
elif self._identity.partition == "aws-us-gov":
global_region = "us-gov-east-1"
elif "aws-iso" in self._identity.partition:
@@ -1473,11 +1475,12 @@ class AwsProvider(Provider):
sts_client = create_sts_session(session, 'us-west-2')
"""
try:
sts_endpoint_url = (
f"https://sts.{aws_region}.amazonaws.com"
if not aws_region.startswith("cn-")
else f"https://sts.{aws_region}.amazonaws.com.cn"
)
if aws_region.startswith("cn-"):
sts_endpoint_url = f"https://sts.{aws_region}.amazonaws.com.cn"
elif aws_region.startswith("eusc-"):
sts_endpoint_url = f"https://sts.{aws_region}.amazonaws.eu"
else:
sts_endpoint_url = f"https://sts.{aws_region}.amazonaws.com"
return session.client("sts", aws_region, endpoint_url=sts_endpoint_url)
except Exception as error:
logger.critical(

File diff suppressed because it is too large Load Diff

View File

@@ -59,5 +59,5 @@ def parse_iam_credentials_arn(arn: str) -> ARN:
def is_valid_arn(arn: str) -> bool:
"""is_valid_arn returns True or False whether the given AWS ARN (Amazon Resource Name) is valid or not."""
regex = r"^arn:aws(-cn|-us-gov|-iso|-iso-b)?:[a-zA-Z0-9\-]+:([a-z]{2}-[a-z]+-\d{1})?:(\d{12})?:[a-zA-Z0-9\-_\/:\.\*]+(:\d+)?$"
regex = r"^arn:aws(-cn|-eusc|-us-gov|-iso|-iso-b)?:[a-zA-Z0-9\-]+:([a-z]{2}-[a-z]+-\d{1})?:(\d{12})?:[a-zA-Z0-9\-_\/:\.\*]+(:\d+)?$"
return re.match(regex, arn) is not None

View File

@@ -55,7 +55,7 @@ class SecurityHubConnection(Connection):
Attributes:
enabled_regions (set): Set of regions where Security Hub is enabled.
disabled_regions (set): Set of regions where Security Hub is disabled.
partition (str): AWS partition (e.g., aws, aws-cn, aws-us-gov) where SecurityHub is deployed.
partition (str): AWS partition (e.g., aws, aws-cn, aws-eusc, aws-us-gov) where SecurityHub is deployed.
"""
enabled_regions: set = None
@@ -70,7 +70,7 @@ class SecurityHub:
Attributes:
_session (Session): AWS session object for authentication and communication with AWS services.
_aws_account_id (str): AWS account ID associated with the SecurityHub instance.
_aws_partition (str): AWS partition (e.g., aws, aws-cn, aws-us-gov) where SecurityHub is deployed.
_aws_partition (str): AWS partition (e.g., aws, aws-cn, aws-eusc, aws-us-gov) where SecurityHub is deployed.
_findings_per_region (dict): Dictionary containing findings per region.
_enabled_regions (dict): Dictionary containing enabled regions with SecurityHub clients.
@@ -115,7 +115,7 @@ class SecurityHub:
Args:
- aws_session (Session): AWS session object for authentication and communication with AWS services.
- aws_account_id (str): AWS account ID associated with the SecurityHub instance.
- aws_partition (str): AWS partition (e.g., aws, aws-cn, aws-us-gov) where SecurityHub is deployed.
- aws_partition (str): AWS partition (e.g., aws, aws-cn, aws-eusc, aws-us-gov) where SecurityHub is deployed.
- findings (list[AWSSecurityFindingFormat]): List of findings to filter and send to Security Hub.
- aws_security_hub_available_regions (list[str]): List of regions where Security Hub is available.
- send_only_fails (bool): Flag indicating whether to send only findings with status 'FAIL'.
@@ -477,7 +477,7 @@ class SecurityHub:
Args:
aws_account_id (str): AWS account ID to check for Prowler integration.
aws_partition (str): AWS partition (e.g., aws, aws-cn, aws-us-gov).
aws_partition (str): AWS partition (e.g., aws, aws-cn, aws-eusc, aws-us-gov).
regions (set): Set of regions to check for Security Hub integration.
raise_on_exception (bool): Whether to raise an exception if an error occurs.
profile (str): AWS profile name to use for authentication.

View File

@@ -90,6 +90,7 @@ class Partition(str, Enum):
Attributes:
aws (str): Represents the standard AWS commercial regions.
aws_cn (str): Represents the AWS China regions.
aws_eusc (str): Represents the AWS European Sovereign Cloud regions.
aws_us_gov (str): Represents the AWS GovCloud (US) Regions.
aws_iso (str): Represents the AWS ISO (US) Regions.
aws_iso_b (str): Represents the AWS ISOB (US) Regions.
@@ -99,6 +100,7 @@ class Partition(str, Enum):
aws = "aws"
aws_cn = "aws-cn"
aws_eusc = "aws-eusc"
aws_us_gov = "aws-us-gov"
aws_iso = "aws-iso"
aws_iso_b = "aws-iso-b"

View File

@@ -55,16 +55,18 @@ class Bedrock(AWSService):
def _list_guardrails(self, regional_client):
logger.info("Bedrock - Listing Guardrails...")
try:
for guardrail in regional_client.list_guardrails().get("guardrails", []):
if not self.audit_resources or (
is_resource_filtered(guardrail["arn"], self.audit_resources)
):
self.guardrails[guardrail["arn"]] = Guardrail(
id=guardrail["id"],
name=guardrail["name"],
arn=guardrail["arn"],
region=regional_client.region,
)
paginator = regional_client.get_paginator("list_guardrails")
for page in paginator.paginate():
for guardrail in page.get("guardrails", []):
if not self.audit_resources or (
is_resource_filtered(guardrail["arn"], self.audit_resources)
):
self.guardrails[guardrail["arn"]] = Guardrail(
id=guardrail["id"],
name=guardrail["name"],
arn=guardrail["arn"],
region=regional_client.region,
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -130,20 +132,22 @@ class BedrockAgent(AWSService):
def _list_agents(self, regional_client):
logger.info("Bedrock Agent - Listing Agents...")
try:
for agent in regional_client.list_agents().get("agentSummaries", []):
agent_arn = f"arn:aws:bedrock:{regional_client.region}:{self.audited_account}:agent/{agent['agentId']}"
if not self.audit_resources or (
is_resource_filtered(agent_arn, self.audit_resources)
):
self.agents[agent_arn] = Agent(
id=agent["agentId"],
name=agent["agentName"],
arn=agent_arn,
guardrail_id=agent.get("guardrailConfiguration", {}).get(
"guardrailIdentifier"
),
region=regional_client.region,
)
paginator = regional_client.get_paginator("list_agents")
for page in paginator.paginate():
for agent in page.get("agentSummaries", []):
agent_arn = f"arn:aws:bedrock:{regional_client.region}:{self.audited_account}:agent/{agent['agentId']}"
if not self.audit_resources or (
is_resource_filtered(agent_arn, self.audit_resources)
):
self.agents[agent_arn] = Agent(
id=agent["agentId"],
name=agent["agentName"],
arn=agent_arn,
guardrail_id=agent.get("guardrailConfiguration", {}).get(
"guardrailIdentifier"
),
region=regional_client.region,
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -16,10 +16,14 @@ class SageMaker(AWSService):
self.sagemaker_models = []
self.sagemaker_training_jobs = []
self.endpoint_configs = {}
# Retrieve resources concurrently
self.__threading_call__(self._list_notebook_instances)
self.__threading_call__(self._list_models)
self.__threading_call__(self._list_training_jobs)
self.__threading_call__(self._list_endpoint_configs)
# Describe resources concurrently
self.__threading_call__(self._describe_model, self.sagemaker_models)
self.__threading_call__(
self._describe_notebook_instance, self.sagemaker_notebook_instances
@@ -28,9 +32,21 @@ class SageMaker(AWSService):
self._describe_training_job, self.sagemaker_training_jobs
)
self.__threading_call__(
self._describe_endpoint_config, self.endpoint_configs.values()
self._describe_endpoint_config, list(self.endpoint_configs.values())
)
# List tags concurrently for each resource collection
# This replaces the previous sequential sequential execution to improve performance
self.__threading_call__(self._list_tags_for_resource, self.sagemaker_models)
self.__threading_call__(
self._list_tags_for_resource, self.sagemaker_notebook_instances
)
self.__threading_call__(
self._list_tags_for_resource, self.sagemaker_training_jobs
)
self.__threading_call__(
self._list_tags_for_resource, list(self.endpoint_configs.values())
)
self._list_tags_for_resource()
def _list_notebook_instances(self, regional_client):
logger.info("SageMaker - listing notebook instances...")
@@ -187,40 +203,16 @@ class SageMaker(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _list_tags_for_resource(self):
def _list_tags_for_resource(self, resource):
"""
Lists tags for a specific SageMaker resource.
This method is designed to be called in parallel threads for each resource.
"""
logger.info("SageMaker - List Tags...")
try:
for model in self.sagemaker_models:
regional_client = self.regional_clients[model.region]
response = regional_client.list_tags(ResourceArn=model.arn)["Tags"]
model.tags = response
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
try:
for instance in self.sagemaker_notebook_instances:
regional_client = self.regional_clients[instance.region]
response = regional_client.list_tags(ResourceArn=instance.arn)["Tags"]
instance.tags = response
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
try:
for job in self.sagemaker_training_jobs:
regional_client = self.regional_clients[job.region]
response = regional_client.list_tags(ResourceArn=job.arn)["Tags"]
job.tags = response
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
try:
for endpoint in self.endpoint_configs.values():
regional_client = self.regional_clients[endpoint.region]
response = regional_client.list_tags(ResourceArn=endpoint.arn)["Tags"]
endpoint.tags = response
regional_client = self.regional_clients[resource.region]
response = regional_client.list_tags(ResourceArn=resource.arn)["Tags"]
resource.tags = response
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -0,0 +1,36 @@
{
"Provider": "gcp",
"CheckID": "compute_instance_disk_auto_delete_disabled",
"CheckTitle": "VM instance attached disks have auto-delete disabled",
"CheckType": [],
"ServiceName": "compute",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "compute.googleapis.com/Instance",
"Description": "This check verifies whether GCP Compute Engine VM instances have **auto-delete** disabled for their attached persistent disks.\n\nWhen auto-delete is enabled, persistent disks are automatically removed when the associated VM instance is deleted, which can lead to unintended data loss.",
"Risk": "With auto-delete enabled, persistent disks are automatically deleted when the associated VM instance is terminated.\n\nThis could result in:\n- **Permanent data loss** if the instance is accidentally or intentionally deleted\n- **Recovery challenges** for mission-critical workloads\n- **Compliance violations** where data retention is required",
"RelatedUrl": "",
"AdditionalURLs": [
"https://cloud.google.com/compute/docs/disks/add-persistent-disk",
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/gcp/ComputeEngine/disable-auto-delete.html"
],
"Remediation": {
"Code": {
"CLI": "gcloud compute instances set-disk-auto-delete INSTANCE_NAME --zone=ZONE --no-auto-delete --disk=DISK_NAME",
"NativeIaC": "",
"Other": "1. Open the Google Cloud Console\n2. Navigate to Compute Engine > VM instances\n3. Click the target VM instance name\n4. Click Edit\n5. In the Boot disk section, select 'Keep disk' from the 'When deleting instance' dropdown\n6. For Additional disks, click each disk and select 'Keep disk' under 'Deletion rule'\n7. Click Save",
"Terraform": "```hcl\nresource \"google_compute_instance\" \"example_resource\" {\n name = \"example-instance\"\n machine_type = \"e2-medium\"\n zone = \"us-central1-a\"\n\n boot_disk {\n # Disable auto-delete for the boot disk\n auto_delete = false\n\n initialize_params {\n image = \"debian-cloud/debian-11\"\n }\n }\n\n attached_disk {\n source = google_compute_disk.example_disk.id\n # Disable auto-delete for attached disks\n auto_delete = false\n }\n\n network_interface {\n network = \"default\"\n }\n}\n```"
},
"Recommendation": {
"Text": "Disable `auto-delete` for all persistent disks attached to **production** and **business-critical** VM instances to prevent **accidental data loss**. Regularly review disk configurations to ensure data retention requirements are met.",
"Url": "https://hub.prowler.com/check/compute_instance_disk_auto_delete_disabled"
}
},
"Categories": [
"resilience"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -0,0 +1,34 @@
from prowler.lib.check.models import Check, Check_Report_GCP
from prowler.providers.gcp.services.compute.compute_client import compute_client
class compute_instance_disk_auto_delete_disabled(Check):
"""
Ensure that VM instance attached disks have auto-delete disabled.
This check verifies whether GCP Compute Engine VM instances have auto-delete
disabled for their attached persistent disks to prevent accidental data loss
when the instance is terminated.
- PASS: All attached disks have auto-delete disabled.
- FAIL: One or more attached disks have auto-delete enabled.
"""
def execute(self) -> list[Check_Report_GCP]:
findings = []
for instance in compute_client.instances:
report = Check_Report_GCP(metadata=self.metadata(), resource=instance)
report.status = "PASS"
report.status_extended = f"VM Instance {instance.name} has auto-delete disabled for all attached disks."
auto_delete_disks = [
disk.name for disk in instance.disks if disk.auto_delete
]
if auto_delete_disks:
report.status = "FAIL"
report.status_extended = f"VM Instance {instance.name} has auto-delete enabled for the following disks: {', '.join(auto_delete_disks)}."
findings.append(report)
return findings

View File

@@ -138,6 +138,19 @@ class Compute(GCPService):
)
for disk in instance.get("disks", [])
],
disks=[
Disk(
name=disk["deviceName"],
auto_delete=disk.get("autoDelete", False),
boot=disk.get("boot", False),
encryption=bool(
disk.get("diskEncryptionKey", {}).get(
"sha256"
)
),
)
for disk in instance.get("disks", [])
],
automatic_restart=instance.get("scheduling", {}).get(
"automaticRestart", False
),
@@ -449,6 +462,13 @@ class Compute(GCPService):
)
class Disk(BaseModel):
name: str
auto_delete: bool = False
boot: bool
encryption: bool = False
class Instance(BaseModel):
name: str
id: str
@@ -463,6 +483,7 @@ class Instance(BaseModel):
service_accounts: list
ip_forward: bool
disks_encryption: list
disks: list[Disk] = []
automatic_restart: bool = False
preemptible: bool = False
provisioning_model: str = "STANDARD"

View File

@@ -45,6 +45,7 @@ from tests.providers.aws.utils import (
AWS_ACCOUNT_NUMBER,
AWS_CHINA_PARTITION,
AWS_COMMERCIAL_PARTITION,
AWS_EUSC_PARTITION,
AWS_GOV_CLOUD_ACCOUNT_ARN,
AWS_GOV_CLOUD_PARTITION,
AWS_ISO_PARTITION,
@@ -52,6 +53,7 @@ from tests.providers.aws.utils import (
AWS_REGION_CN_NORTHWEST_1,
AWS_REGION_EU_CENTRAL_1,
AWS_REGION_EU_WEST_1,
AWS_REGION_EUSC_DE_EAST_1,
AWS_REGION_GOV_CLOUD_US_EAST_1,
AWS_REGION_ISO_GLOBAL,
AWS_REGION_US_EAST_1,
@@ -956,6 +958,13 @@ aws:
assert aws_provider.get_global_region() == AWS_REGION_ISO_GLOBAL
@mock_aws
def test_aws_eusc_get_global_region(self):
aws_provider = AwsProvider()
aws_provider._identity.partition = AWS_EUSC_PARTITION
assert aws_provider.get_global_region() == AWS_REGION_EUSC_DE_EAST_1
@mock_aws
def test_get_available_aws_service_regions_with_us_east_1_audited(self):
region = [AWS_REGION_US_EAST_1]
@@ -1506,6 +1515,17 @@ aws:
sts_session._endpoint.host == f"https://sts.{aws_region}.amazonaws.com.cn"
)
@mock_aws
def test_create_sts_session_eusc(self):
current_session = session.Session()
aws_region = AWS_REGION_EUSC_DE_EAST_1
sts_session = AwsProvider.create_sts_session(current_session, aws_region)
assert sts_session._service_model.service_name == "sts"
assert sts_session._client_config.region_name == aws_region
assert sts_session._endpoint._endpoint_prefix == "sts"
assert sts_session._endpoint.host == f"https://sts.{aws_region}.amazonaws.eu"
@mock_aws
@patch(
"prowler.lib.check.utils.recover_checks_from_provider",
@@ -1760,7 +1780,7 @@ aws:
assert len(AwsProvider.get_regions("aws-cn")) == 2
def test_get_regions_aws_count(self):
assert len(AwsProvider.get_regions(partition="aws")) == 35
assert len(AwsProvider.get_regions(partition="aws")) == 34
def test_get_all_regions(self):
with patch(

View File

@@ -19,6 +19,7 @@ IAM_ROLE = "test-role"
IAM_SERVICE = "iam"
COMMERCIAL_PARTITION = "aws"
CHINA_PARTITION = "aws-cn"
EUSC_PARTITION = "aws-eusc"
GOVCLOUD_PARTITION = "aws-us-gov"
@@ -245,6 +246,28 @@ class Test_ARN_Parsing:
"resource": IAM_ROLE,
},
},
{
"input_arn": f"arn:{EUSC_PARTITION}:{IAM_SERVICE}::{ACCOUNT_ID}:{RESOURCE_TYPE_ROLE}/{IAM_ROLE}",
"expected": {
"partition": EUSC_PARTITION,
"service": IAM_SERVICE,
"region": None,
"account_id": ACCOUNT_ID,
"resource_type": RESOURCE_TYPE_ROLE,
"resource": IAM_ROLE,
},
},
{
"input_arn": f"arn:{EUSC_PARTITION}:{IAM_SERVICE}::{ACCOUNT_ID}:{RESOUCE_TYPE_USER}/{IAM_ROLE}",
"expected": {
"partition": EUSC_PARTITION,
"service": IAM_SERVICE,
"region": None,
"account_id": ACCOUNT_ID,
"resource_type": RESOUCE_TYPE_USER,
"resource": IAM_ROLE,
},
},
# Root user
{
"input_arn": f"arn:aws:{IAM_SERVICE}::{ACCOUNT_ID}:root",
@@ -279,6 +302,17 @@ class Test_ARN_Parsing:
"resource": "root",
},
},
{
"input_arn": f"arn:{EUSC_PARTITION}:{IAM_SERVICE}::{ACCOUNT_ID}:root",
"expected": {
"partition": EUSC_PARTITION,
"service": IAM_SERVICE,
"region": None,
"account_id": ACCOUNT_ID,
"resource_type": "root",
"resource": "root",
},
},
{
"input_arn": f"arn:aws:sts::{ACCOUNT_ID}:federated-user/Bob",
"expected": {
@@ -312,6 +346,17 @@ class Test_ARN_Parsing:
"resource": "Bob",
},
},
{
"input_arn": f"arn:{EUSC_PARTITION}:sts::{ACCOUNT_ID}:federated-user/Bob",
"expected": {
"partition": EUSC_PARTITION,
"service": "sts",
"region": None,
"account_id": ACCOUNT_ID,
"resource_type": "federated-user",
"resource": "Bob",
},
},
]
for test in test_cases:
input_arn = test["input_arn"]
@@ -379,6 +424,7 @@ class Test_ARN_Parsing:
def test_is_valid_arn(self):
assert is_valid_arn("arn:aws:iam::012345678910:user/test")
assert is_valid_arn("arn:aws-cn:ec2:us-east-1:123456789012:vpc/vpc-12345678")
assert is_valid_arn("arn:aws-eusc:ec2:us-east-1:123456789012:vpc/vpc-12345678")
assert is_valid_arn("arn:aws-us-gov:s3:::bucket")
assert is_valid_arn("arn:aws-iso:iam::012345678910:user/test")
assert is_valid_arn("arn:aws-iso-b:ec2:us-east-1:123456789012:vpc/vpc-12345678")

View File

@@ -1,4 +1,5 @@
from unittest import mock
from unittest.mock import MagicMock
import botocore
from boto3 import client
@@ -215,3 +216,128 @@ class Test_Bedrock_Agent_Service:
"Key": "test-tag-key",
}
]
class TestBedrockPagination:
"""Test suite for Bedrock Guardrail pagination logic."""
def test_list_guardrails_pagination(self):
"""Test that list_guardrails iterates through all pages."""
# Mock the audit_info
audit_info = MagicMock()
audit_info.audited_partition = "aws"
audit_info.audited_account = "123456789012"
audit_info.audit_resources = None
# Mock the regional client
regional_client = MagicMock()
regional_client.region = "us-east-1"
# Mock paginator
paginator = MagicMock()
page1 = {
"guardrails": [
{
"id": "g-1",
"name": "guardrail-1",
"arn": "arn:aws:bedrock:us-east-1:123456789012:guardrail/g-1",
}
]
}
page2 = {
"guardrails": [
{
"id": "g-2",
"name": "guardrail-2",
"arn": "arn:aws:bedrock:us-east-1:123456789012:guardrail/g-2",
}
]
}
paginator.paginate.return_value = [page1, page2]
regional_client.get_paginator.return_value = paginator
# Initialize service and inject mock client
bedrock_service = Bedrock(audit_info)
bedrock_service.regional_clients = {"us-east-1": regional_client}
bedrock_service.guardrails = {} # Clear any init side effects
# Run the method under test
bedrock_service._list_guardrails(regional_client)
# Assertions
assert len(bedrock_service.guardrails) == 2
assert (
"arn:aws:bedrock:us-east-1:123456789012:guardrail/g-1"
in bedrock_service.guardrails
)
assert (
"arn:aws:bedrock:us-east-1:123456789012:guardrail/g-2"
in bedrock_service.guardrails
)
# Verify paginator was used
regional_client.get_paginator.assert_called_once_with("list_guardrails")
paginator.paginate.assert_called_once()
class TestBedrockAgentPagination:
"""Test suite for Bedrock Agent pagination logic."""
def test_list_agents_pagination(self):
"""Test that list_agents iterates through all pages."""
# Mock the audit_info
audit_info = MagicMock()
audit_info.audited_partition = "aws"
audit_info.audited_account = "123456789012"
audit_info.audit_resources = None
# Mock the regional client
regional_client = MagicMock()
regional_client.region = "us-east-1"
# Mock paginator
paginator = MagicMock()
page1 = {
"agentSummaries": [
{
"agentId": "agent-1",
"agentName": "agent-name-1",
"agentStatus": "PREPARED",
}
]
}
page2 = {
"agentSummaries": [
{
"agentId": "agent-2",
"agentName": "agent-name-2",
"agentStatus": "PREPARED",
}
]
}
paginator.paginate.return_value = [page1, page2]
regional_client.get_paginator.return_value = paginator
# Initialize service and inject mock client
bedrock_agent_service = BedrockAgent(audit_info)
bedrock_agent_service.regional_clients = {"us-east-1": regional_client}
bedrock_agent_service.agents = {} # Clear init side effects
bedrock_agent_service.audited_account = "123456789012"
# Run method
bedrock_agent_service._list_agents(regional_client)
# Assertions
assert len(bedrock_agent_service.agents) == 2
assert (
"arn:aws:bedrock:us-east-1:123456789012:agent/agent-1"
in bedrock_agent_service.agents
)
assert (
"arn:aws:bedrock:us-east-1:123456789012:agent/agent-2"
in bedrock_agent_service.agents
)
# Verify paginator was used
regional_client.get_paginator.assert_called_once_with("list_agents")
paginator.paginate.assert_called_once()

View File

@@ -1,9 +1,12 @@
from unittest.mock import patch
from unittest.mock import MagicMock, patch
from uuid import uuid4
import botocore
from prowler.providers.aws.services.sagemaker.sagemaker_service import SageMaker
from prowler.providers.aws.services.sagemaker.sagemaker_service import (
Model,
SageMaker,
)
from tests.providers.aws.utils import (
AWS_ACCOUNT_NUMBER,
AWS_REGION_EU_WEST_1,
@@ -245,3 +248,78 @@ class Test_SageMaker_Service:
assert prod_variant.initial_instance_count == 5
else:
assert prod_variant.initial_instance_count == 2
# Test SageMaker _list_tags_for_resource
def test_list_tags_for_resource_calls_client(self):
"""Test that _list_tags_for_resource calls the correct AWS client and updates the resource."""
# Mock audit info
audit_info = MagicMock()
audit_info.audited_partition = "aws"
audit_info.audited_account = AWS_ACCOUNT_NUMBER
audit_info.audit_resources = None
# Mock regional client
regional_client = MagicMock()
regional_client.region = AWS_REGION_EU_WEST_1
regional_client.list_tags.return_value = {
"Tags": [{"Key": "foo", "Value": "bar"}]
}
# Create service instance (mocking init to avoid full setup)
with patch.object(SageMaker, "__init__", return_value=None):
sagemaker_service = SageMaker(audit_info)
sagemaker_service.regional_clients = {AWS_REGION_EU_WEST_1: regional_client}
sagemaker_service.audit_info = audit_info
# Create a mock resource
resource = Model(
name="test-model",
region=AWS_REGION_EU_WEST_1,
arn=f"arn:aws:sagemaker:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:model/test-model",
)
# Execute method under test
sagemaker_service._list_tags_for_resource(resource)
# Verification
regional_client.list_tags.assert_called_once_with(ResourceArn=resource.arn)
assert len(resource.tags) == 1
assert resource.tags[0]["Key"] == "foo"
assert resource.tags[0]["Value"] == "bar"
# Test SageMaker parallel tag listing
def test_init_calls_threading_for_tags(self):
"""Test that __init__ calls __threading_call__ for tag listing for each resource type."""
audit_info = MagicMock()
audit_info.audited_partition = "aws"
audit_info.audited_account = AWS_ACCOUNT_NUMBER
# We mock __threading_call__ to verify it is called with the right arguments
with patch(
"prowler.providers.aws.services.sagemaker.sagemaker_service.SageMaker.__threading_call__"
) as mock_threading_call:
# We also need to mock the other methods called in init to avoid errors
with (
patch(
"prowler.providers.aws.services.sagemaker.sagemaker_service.SageMaker._list_notebook_instances"
),
patch(
"prowler.providers.aws.services.sagemaker.sagemaker_service.SageMaker._list_models"
),
patch(
"prowler.providers.aws.services.sagemaker.sagemaker_service.SageMaker._list_training_jobs"
),
patch(
"prowler.providers.aws.services.sagemaker.sagemaker_service.SageMaker._list_endpoint_configs"
),
):
sagemaker_service = SageMaker(audit_info)
# Check that __threading_call__ was called for _list_tags_for_resource
# (at least 4 calls expected, one for each resource type)
tag_calls = [
c
for c in mock_threading_call.call_args_list
if c[0][0] == sagemaker_service._list_tags_for_resource
]
assert len(tag_calls) == 4

View File

@@ -17,6 +17,7 @@ from prowler.providers.common.models import Audit_Metadata
AWS_COMMERCIAL_PARTITION = "aws"
AWS_GOV_CLOUD_PARTITION = "aws-us-gov"
AWS_CHINA_PARTITION = "aws-cn"
AWS_EUSC_PARTITION = "aws-eusc"
AWS_ISO_PARTITION = "aws-iso"
# Root AWS Account
@@ -52,6 +53,9 @@ AWS_REGION_GOV_CLOUD_US_EAST_1 = "us-gov-east-1"
# Iso Regions
AWS_REGION_ISO_GLOBAL = "aws-iso-global"
# European Sovereign Cloud Regions
AWS_REGION_EUSC_DE_EAST_1 = "eusc-de-east-1"
# EC2
EXAMPLE_AMI_ID = "ami-12c6146b"

View File

@@ -0,0 +1,388 @@
from unittest import mock
from tests.providers.gcp.gcp_fixtures import (
GCP_PROJECT_ID,
GCP_US_CENTER1_LOCATION,
set_mocked_gcp_provider,
)
class TestComputeInstanceDiskAutoDeleteDisabled:
def test_compute_no_instances(self):
compute_client = mock.MagicMock()
compute_client.instances = []
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.compute.compute_instance_disk_auto_delete_disabled.compute_instance_disk_auto_delete_disabled.compute_client",
new=compute_client,
),
):
from prowler.providers.gcp.services.compute.compute_instance_disk_auto_delete_disabled.compute_instance_disk_auto_delete_disabled import (
compute_instance_disk_auto_delete_disabled,
)
check = compute_instance_disk_auto_delete_disabled()
result = check.execute()
assert len(result) == 0
def test_instance_disk_auto_delete_disabled(self):
compute_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.compute.compute_instance_disk_auto_delete_disabled.compute_instance_disk_auto_delete_disabled.compute_client",
new=compute_client,
),
):
from prowler.providers.gcp.services.compute.compute_instance_disk_auto_delete_disabled.compute_instance_disk_auto_delete_disabled import (
compute_instance_disk_auto_delete_disabled,
)
from prowler.providers.gcp.services.compute.compute_service import (
Disk,
Instance,
)
compute_client.project_ids = [GCP_PROJECT_ID]
compute_client.region = GCP_US_CENTER1_LOCATION
compute_client.instances = [
Instance(
name="test-instance",
id="1234567890",
zone=f"{GCP_US_CENTER1_LOCATION}-a",
region=GCP_US_CENTER1_LOCATION,
public_ip=False,
metadata={},
shielded_enabled_vtpm=True,
shielded_enabled_integrity_monitoring=True,
confidential_computing=False,
service_accounts=[
{"email": "123-compute@developer.gserviceaccount.com"}
],
ip_forward=False,
disks_encryption=[],
disks=[
Disk(
name="boot-disk",
auto_delete=False,
boot=True,
encryption=False,
),
Disk(
name="data-disk",
auto_delete=False,
boot=False,
encryption=False,
),
],
project_id=GCP_PROJECT_ID,
)
]
check = compute_instance_disk_auto_delete_disabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "VM Instance test-instance has auto-delete disabled for all attached disks."
)
assert result[0].resource_id == "1234567890"
assert result[0].resource_name == "test-instance"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID
def test_instance_disk_auto_delete_enabled_single_disk(self):
compute_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.compute.compute_instance_disk_auto_delete_disabled.compute_instance_disk_auto_delete_disabled.compute_client",
new=compute_client,
),
):
from prowler.providers.gcp.services.compute.compute_instance_disk_auto_delete_disabled.compute_instance_disk_auto_delete_disabled import (
compute_instance_disk_auto_delete_disabled,
)
from prowler.providers.gcp.services.compute.compute_service import (
Disk,
Instance,
)
compute_client.project_ids = [GCP_PROJECT_ID]
compute_client.region = GCP_US_CENTER1_LOCATION
compute_client.instances = [
Instance(
name="test-instance",
id="1234567890",
zone=f"{GCP_US_CENTER1_LOCATION}-a",
region=GCP_US_CENTER1_LOCATION,
public_ip=False,
metadata={},
shielded_enabled_vtpm=True,
shielded_enabled_integrity_monitoring=True,
confidential_computing=False,
service_accounts=[
{"email": "123-compute@developer.gserviceaccount.com"}
],
ip_forward=False,
disks_encryption=[],
disks=[
Disk(
name="boot-disk",
auto_delete=True,
boot=True,
encryption=False,
),
Disk(
name="data-disk",
auto_delete=False,
boot=False,
encryption=False,
),
],
project_id=GCP_PROJECT_ID,
)
]
check = compute_instance_disk_auto_delete_disabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "VM Instance test-instance has auto-delete enabled for the following disks: boot-disk."
)
assert result[0].resource_id == "1234567890"
assert result[0].resource_name == "test-instance"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID
def test_instance_disk_auto_delete_enabled_multiple_disks(self):
compute_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.compute.compute_instance_disk_auto_delete_disabled.compute_instance_disk_auto_delete_disabled.compute_client",
new=compute_client,
),
):
from prowler.providers.gcp.services.compute.compute_instance_disk_auto_delete_disabled.compute_instance_disk_auto_delete_disabled import (
compute_instance_disk_auto_delete_disabled,
)
from prowler.providers.gcp.services.compute.compute_service import (
Disk,
Instance,
)
compute_client.project_ids = [GCP_PROJECT_ID]
compute_client.region = GCP_US_CENTER1_LOCATION
compute_client.instances = [
Instance(
name="test-instance",
id="1234567890",
zone=f"{GCP_US_CENTER1_LOCATION}-a",
region=GCP_US_CENTER1_LOCATION,
public_ip=False,
metadata={},
shielded_enabled_vtpm=True,
shielded_enabled_integrity_monitoring=True,
confidential_computing=False,
service_accounts=[
{"email": "123-compute@developer.gserviceaccount.com"}
],
ip_forward=False,
disks_encryption=[],
disks=[
Disk(
name="boot-disk",
auto_delete=True,
boot=True,
encryption=False,
),
Disk(
name="data-disk",
auto_delete=True,
boot=False,
encryption=False,
),
],
project_id=GCP_PROJECT_ID,
)
]
check = compute_instance_disk_auto_delete_disabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== "VM Instance test-instance has auto-delete enabled for the following disks: boot-disk, data-disk."
)
assert result[0].resource_id == "1234567890"
assert result[0].resource_name == "test-instance"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID
def test_instance_no_disks(self):
compute_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.compute.compute_instance_disk_auto_delete_disabled.compute_instance_disk_auto_delete_disabled.compute_client",
new=compute_client,
),
):
from prowler.providers.gcp.services.compute.compute_instance_disk_auto_delete_disabled.compute_instance_disk_auto_delete_disabled import (
compute_instance_disk_auto_delete_disabled,
)
from prowler.providers.gcp.services.compute.compute_service import Instance
compute_client.project_ids = [GCP_PROJECT_ID]
compute_client.region = GCP_US_CENTER1_LOCATION
compute_client.instances = [
Instance(
name="test-instance",
id="1234567890",
zone=f"{GCP_US_CENTER1_LOCATION}-a",
region=GCP_US_CENTER1_LOCATION,
public_ip=False,
metadata={},
shielded_enabled_vtpm=True,
shielded_enabled_integrity_monitoring=True,
confidential_computing=False,
service_accounts=[
{"email": "123-compute@developer.gserviceaccount.com"}
],
ip_forward=False,
disks_encryption=[],
disks=[],
project_id=GCP_PROJECT_ID,
)
]
check = compute_instance_disk_auto_delete_disabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== "VM Instance test-instance has auto-delete disabled for all attached disks."
)
assert result[0].resource_id == "1234567890"
assert result[0].resource_name == "test-instance"
assert result[0].location == GCP_US_CENTER1_LOCATION
assert result[0].project_id == GCP_PROJECT_ID
def test_multiple_instances_mixed_results(self):
compute_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_gcp_provider(),
),
mock.patch(
"prowler.providers.gcp.services.compute.compute_instance_disk_auto_delete_disabled.compute_instance_disk_auto_delete_disabled.compute_client",
new=compute_client,
),
):
from prowler.providers.gcp.services.compute.compute_instance_disk_auto_delete_disabled.compute_instance_disk_auto_delete_disabled import (
compute_instance_disk_auto_delete_disabled,
)
from prowler.providers.gcp.services.compute.compute_service import (
Disk,
Instance,
)
compute_client.project_ids = [GCP_PROJECT_ID]
compute_client.region = GCP_US_CENTER1_LOCATION
compute_client.instances = [
Instance(
name="compliant-instance",
id="1111111111",
zone=f"{GCP_US_CENTER1_LOCATION}-a",
region=GCP_US_CENTER1_LOCATION,
public_ip=False,
metadata={},
shielded_enabled_vtpm=True,
shielded_enabled_integrity_monitoring=True,
confidential_computing=False,
service_accounts=[],
ip_forward=False,
disks_encryption=[],
disks=[
Disk(
name="boot-disk",
auto_delete=False,
boot=True,
encryption=False,
),
],
project_id=GCP_PROJECT_ID,
),
Instance(
name="non-compliant-instance",
id="2222222222",
zone=f"{GCP_US_CENTER1_LOCATION}-b",
region=GCP_US_CENTER1_LOCATION,
public_ip=False,
metadata={},
shielded_enabled_vtpm=True,
shielded_enabled_integrity_monitoring=True,
confidential_computing=False,
service_accounts=[],
ip_forward=False,
disks_encryption=[],
disks=[
Disk(
name="auto-delete-disk",
auto_delete=True,
boot=True,
encryption=False,
),
],
project_id=GCP_PROJECT_ID,
),
]
check = compute_instance_disk_auto_delete_disabled()
result = check.execute()
assert len(result) == 2
assert result[0].status == "PASS"
assert result[0].resource_name == "compliant-instance"
assert result[1].status == "FAIL"
assert result[1].resource_name == "non-compliant-instance"
assert "auto-delete-disk" in result[1].status_extended

View File

@@ -2,7 +2,15 @@
All notable changes to the **Prowler UI** are documented in this file.
## [1.16.1] (Prowler v5.17.1)
## [1.17.0] (Prowler UNRELEASED)
### 🚀 Added
- Add search bar when adding a provider [(#9634)](https://github.com/prowler-cloud/prowler/pull/9634)
---
## [1.16.1] (Prowler v5.16.1)
### 🔄 Changed

View File

@@ -34,6 +34,8 @@ ARG NEXT_PUBLIC_GOOGLE_TAG_MANAGER_ID
ENV NEXT_PUBLIC_GOOGLE_TAG_MANAGER_ID=${NEXT_PUBLIC_GOOGLE_TAG_MANAGER_ID}
ARG NEXT_PUBLIC_API_BASE_URL
ENV NEXT_PUBLIC_API_BASE_URL=${NEXT_PUBLIC_API_BASE_URL}
ARG NEXT_PUBLIC_API_DOCS_URL
ENV NEXT_PUBLIC_API_DOCS_URL=${NEXT_PUBLIC_API_DOCS_URL}
RUN pnpm run build

View File

@@ -1,10 +1,11 @@
"use client";
import { RadioGroup } from "@heroui/radio";
import { FC } from "react";
import { FC, useState } from "react";
import { Control, Controller } from "react-hook-form";
import { z } from "zod";
import { SearchInput } from "@/components/shadcn";
import { cn } from "@/lib/utils";
import { addProviderFormSchema } from "@/types";
import {
@@ -19,9 +20,61 @@ import {
MongoDBAtlasProviderBadge,
OracleCloudProviderBadge,
} from "../icons/providers-badge";
import { CustomRadio } from "../ui/custom";
import { FormMessage } from "../ui/form";
const PROVIDERS = [
{
value: "aws",
label: "Amazon Web Services",
badge: AWSProviderBadge,
},
{
value: "gcp",
label: "Google Cloud Platform",
badge: GCPProviderBadge,
},
{
value: "azure",
label: "Microsoft Azure",
badge: AzureProviderBadge,
},
{
value: "m365",
label: "Microsoft 365",
badge: M365ProviderBadge,
},
{
value: "mongodbatlas",
label: "MongoDB Atlas",
badge: MongoDBAtlasProviderBadge,
},
{
value: "kubernetes",
label: "Kubernetes",
badge: KS8ProviderBadge,
},
{
value: "github",
label: "GitHub",
badge: GitHubProviderBadge,
},
{
value: "iac",
label: "Infrastructure as Code",
badge: IacProviderBadge,
},
{
value: "oraclecloud",
label: "Oracle Cloud Infrastructure",
badge: OracleCloudProviderBadge,
},
{
value: "alibabacloud",
label: "Alibaba Cloud",
badge: AlibabaCloudProviderBadge,
},
] as const;
interface RadioGroupProviderProps {
control: Control<z.infer<typeof addProviderFormSchema>>;
isInvalid: boolean;
@@ -33,90 +86,90 @@ export const RadioGroupProvider: FC<RadioGroupProviderProps> = ({
isInvalid,
errorMessage,
}) => {
const [searchTerm, setSearchTerm] = useState("");
const lowerSearch = searchTerm.trim().toLowerCase();
const filteredProviders = lowerSearch
? PROVIDERS.filter(
(provider) =>
provider.label.toLowerCase().includes(lowerSearch) ||
provider.value.toLowerCase().includes(lowerSearch),
)
: PROVIDERS;
return (
<Controller
name="providerType"
control={control}
render={({ field }) => (
<>
<RadioGroup
className="flex flex-wrap"
isInvalid={isInvalid}
{...field}
value={field.value || ""}
>
<div className="flex flex-col gap-4">
<CustomRadio description="Amazon Web Services" value="aws">
<div className="flex items-center">
<AWSProviderBadge size={26} />
<span className="ml-2">Amazon Web Services</span>
</div>
</CustomRadio>
<CustomRadio description="Google Cloud Platform" value="gcp">
<div className="flex items-center">
<GCPProviderBadge size={26} />
<span className="ml-2">Google Cloud Platform</span>
</div>
</CustomRadio>
<CustomRadio description="Microsoft Azure" value="azure">
<div className="flex items-center">
<AzureProviderBadge size={26} />
<span className="ml-2">Microsoft Azure</span>
</div>
</CustomRadio>
<CustomRadio description="Microsoft 365" value="m365">
<div className="flex items-center">
<M365ProviderBadge size={26} />
<span className="ml-2">Microsoft 365</span>
</div>
</CustomRadio>
<CustomRadio description="MongoDB Atlas" value="mongodbatlas">
<div className="flex items-center">
<MongoDBAtlasProviderBadge size={26} />
<span className="ml-2">MongoDB Atlas</span>
</div>
</CustomRadio>
<CustomRadio description="Kubernetes" value="kubernetes">
<div className="flex items-center">
<KS8ProviderBadge size={26} />
<span className="ml-2">Kubernetes</span>
</div>
</CustomRadio>
<CustomRadio description="GitHub" value="github">
<div className="flex items-center">
<GitHubProviderBadge size={26} />
<span className="ml-2">GitHub</span>
</div>
</CustomRadio>
<CustomRadio description="Infrastructure as Code" value="iac">
<div className="flex items-center">
<IacProviderBadge size={26} />
<span className="ml-2">Infrastructure as Code</span>
</div>
</CustomRadio>
<CustomRadio
description="Oracle Cloud Infrastructure"
value="oraclecloud"
>
<div className="flex items-center">
<OracleCloudProviderBadge size={26} />
<span className="ml-2">Oracle Cloud Infrastructure</span>
</div>
</CustomRadio>
<CustomRadio description="Alibaba Cloud" value="alibabacloud">
<div className="flex items-center">
<AlibabaCloudProviderBadge size={26} />
<span className="ml-2">Alibaba Cloud</span>
</div>
</CustomRadio>
<div className="flex h-[calc(100vh-200px)] flex-col px-4">
<div className="relative z-10 shrink-0 pb-4">
<SearchInput
aria-label="Search providers"
placeholder="Search providers..."
value={searchTerm}
onChange={(e) => setSearchTerm(e.target.value)}
onClear={() => setSearchTerm("")}
/>
</div>
<div className="minimal-scrollbar relative flex-1 overflow-y-auto pr-3">
<div
role="listbox"
aria-label="Select a provider"
className="flex flex-col gap-3"
style={{
maskImage:
"linear-gradient(to bottom, transparent, black 24px)",
WebkitMaskImage:
"linear-gradient(to bottom, transparent, black 24px)",
paddingTop: "24px",
marginTop: "-24px",
}}
>
{filteredProviders.length > 0 ? (
filteredProviders.map((provider) => {
const BadgeComponent = provider.badge;
const isSelected = field.value === provider.value;
return (
<button
key={provider.value}
type="button"
role="option"
aria-selected={isSelected}
onClick={() => field.onChange(provider.value)}
className={cn(
"flex w-full cursor-pointer items-center gap-3 rounded-lg border p-4 text-left transition-all",
"hover:border-button-primary",
"focus-visible:border-button-primary focus-visible:ring-button-primary focus:outline-none focus-visible:ring-1",
isSelected
? "border-button-primary bg-bg-neutral-tertiary"
: "border-border-neutral-secondary bg-bg-neutral-secondary",
isInvalid && "border-bg-fail",
)}
>
<BadgeComponent size={26} />
<span className="text-text-neutral-primary text-sm font-medium">
{provider.label}
</span>
</button>
);
})
) : (
<p className="text-text-neutral-tertiary py-4 text-sm">
No providers found matching &quot;{searchTerm}&quot;
</p>
)}
</div>
</RadioGroup>
</div>
{errorMessage && (
<FormMessage className="text-text-error">
{errorMessage}
</FormMessage>
)}
</>
</div>
)}
/>
);

View File

@@ -0,0 +1,92 @@
import { cva, type VariantProps } from "class-variance-authority";
import { X } from "lucide-react";
import { cn } from "@/lib/utils";
const alertVariants = cva(
"relative w-full rounded-lg border px-4 py-3 text-sm grid has-[>svg]:grid-cols-[calc(var(--spacing)*4)_1fr] grid-cols-[0_1fr] has-[>svg]:gap-x-3 gap-y-0.5 items-start [&>svg]:size-4 [&>svg]:translate-y-0.5 [&>svg]:text-current",
{
variants: {
variant: {
default: "bg-card text-card-foreground",
destructive:
"text-destructive bg-card [&>svg]:text-current *:data-[slot=alert-description]:text-destructive/90",
error:
"border-border-error bg-red-50 text-text-error-primary dark:bg-red-950/50 [&>svg]:text-current *:data-[slot=alert-description]:text-red-700 dark:*:data-[slot=alert-description]:text-red-300",
warning:
"border-orange-500 bg-orange-50 text-text-warning-primary dark:bg-orange-950/50 [&>svg]:text-current *:data-[slot=alert-description]:text-orange-700 dark:*:data-[slot=alert-description]:text-orange-300",
info: "border-bg-data-info bg-blue-50 text-bg-data-info dark:bg-blue-950/50 [&>svg]:text-current *:data-[slot=alert-description]:text-blue-700 dark:*:data-[slot=alert-description]:text-blue-300",
success:
"border-text-success bg-green-50 text-text-success-primary dark:bg-green-950/50 [&>svg]:text-current *:data-[slot=alert-description]:text-green-700 dark:*:data-[slot=alert-description]:text-green-300",
},
},
defaultVariants: {
variant: "default",
},
},
);
interface AlertProps
extends React.ComponentProps<"div">,
VariantProps<typeof alertVariants> {
onClose?: () => void;
}
function Alert({
className,
variant,
onClose,
children,
...props
}: AlertProps) {
return (
<div
data-slot="alert"
role="alert"
className={cn(alertVariants({ variant }), className)}
{...props}
>
{children}
{onClose && (
<button
onClick={onClose}
className="absolute top-3 right-3 rounded-sm opacity-70 transition-opacity hover:opacity-100"
aria-label="Close"
>
<X className="size-4" />
</button>
)}
</div>
);
}
function AlertTitle({ className, ...props }: React.ComponentProps<"div">) {
return (
<div
data-slot="alert-title"
className={cn(
"col-start-2 line-clamp-1 min-h-4 font-medium tracking-tight",
className,
)}
{...props}
/>
);
}
function AlertDescription({
className,
...props
}: React.ComponentProps<"div">) {
return (
<div
data-slot="alert-description"
className={cn(
"text-muted-foreground col-start-2 grid justify-items-start gap-1 text-sm [&_p]:leading-relaxed",
className,
)}
{...props}
/>
);
}
export { Alert, AlertDescription, AlertTitle };

View File

@@ -1,3 +1,4 @@
export * from "./alert";
export * from "./badge/badge";
export * from "./button/button";
export * from "./card/card";
@@ -7,6 +8,8 @@ export * from "./card/resource-stats-card/resource-stats-card-header";
export * from "./checkbox/checkbox";
export * from "./combobox";
export * from "./dropdown/dropdown";
export * from "./input/input";
export * from "./search-input/search-input";
export * from "./select/multiselect";
export * from "./select/select";
export * from "./separator/separator";

View File

@@ -0,0 +1,51 @@
"use client";
import { cva, type VariantProps } from "class-variance-authority";
import { ComponentProps, forwardRef } from "react";
import { cn } from "@/lib/utils";
const inputVariants = cva(
"flex w-full rounded-lg border text-sm transition-all outline-none file:border-0 file:bg-transparent file:text-sm file:font-medium disabled:cursor-not-allowed disabled:opacity-50",
{
variants: {
variant: {
default:
"border-border-input-primary bg-bg-input-primary dark:bg-input/30 hover:bg-bg-neutral-secondary dark:hover:bg-input/50 focus:border-border-input-primary-press focus:ring-1 focus:ring-border-input-primary-press focus:ring-offset-1 placeholder:text-text-neutral-tertiary",
ghost:
"border-transparent bg-transparent hover:bg-bg-neutral-tertiary focus:bg-bg-neutral-tertiary placeholder:text-text-neutral-tertiary",
},
inputSize: {
default: "h-10 px-4 py-3",
sm: "h-8 px-3 py-2 text-xs",
lg: "h-12 px-5 py-4",
},
},
defaultVariants: {
variant: "default",
inputSize: "default",
},
},
);
export interface InputProps
extends Omit<ComponentProps<"input">, "size">,
VariantProps<typeof inputVariants> {}
const Input = forwardRef<HTMLInputElement, InputProps>(
({ className, variant, inputSize, type = "text", ...props }, ref) => {
return (
<input
ref={ref}
type={type}
data-slot="input"
className={cn(inputVariants({ variant, inputSize, className }))}
{...props}
/>
);
},
);
Input.displayName = "Input";
export { Input, inputVariants };

View File

@@ -0,0 +1,125 @@
"use client";
import { cva, type VariantProps } from "class-variance-authority";
import { SearchIcon, XCircle } from "lucide-react";
import { ComponentProps, forwardRef } from "react";
import { cn } from "@/lib/utils";
const searchInputWrapperVariants = cva("relative flex items-center w-full", {
variants: {
size: {
default: "",
sm: "",
lg: "",
},
},
defaultVariants: {
size: "default",
},
});
const searchInputVariants = cva(
"flex w-full rounded-lg border text-sm transition-all outline-none placeholder:text-text-neutral-tertiary disabled:cursor-not-allowed disabled:opacity-50",
{
variants: {
variant: {
default:
"border-border-input-primary bg-bg-input-primary dark:bg-input/30 hover:bg-bg-neutral-secondary dark:hover:bg-input/50 focus:border-border-input-primary-press focus:ring-1 focus:ring-border-input-primary-press focus:ring-offset-1",
ghost:
"border-transparent bg-transparent hover:bg-bg-neutral-tertiary focus:bg-bg-neutral-tertiary",
},
size: {
default: "h-10 pl-10 pr-10 py-3",
sm: "h-8 pl-8 pr-8 py-2 text-xs",
lg: "h-12 pl-12 pr-12 py-4",
},
},
defaultVariants: {
variant: "default",
size: "default",
},
},
);
const iconSizeMap = {
default: 16,
sm: 14,
lg: 20,
} as const;
const iconPositionMap = {
default: "left-3",
sm: "left-2.5",
lg: "left-4",
} as const;
const clearButtonPositionMap = {
default: "right-3",
sm: "right-2.5",
lg: "right-4",
} as const;
export interface SearchInputProps
extends Omit<ComponentProps<"input">, "size">,
VariantProps<typeof searchInputVariants> {
onClear?: () => void;
}
const SearchInput = forwardRef<HTMLInputElement, SearchInputProps>(
(
{
className,
variant,
size = "default",
value,
onClear,
placeholder = "Search...",
...props
},
ref,
) => {
const iconSize = iconSizeMap[size || "default"];
const iconPosition = iconPositionMap[size || "default"];
const clearButtonPosition = clearButtonPositionMap[size || "default"];
const hasValue = value && String(value).length > 0;
return (
<div className={cn(searchInputWrapperVariants({ size }))}>
<SearchIcon
size={iconSize}
className={cn(
"text-text-neutral-tertiary pointer-events-none absolute",
iconPosition,
)}
/>
<input
ref={ref}
type="text"
data-slot="search-input"
value={value}
placeholder={placeholder}
className={cn(searchInputVariants({ variant, size, className }))}
{...props}
/>
{hasValue && onClear && (
<button
type="button"
aria-label="Clear search"
onClick={onClear}
className={cn(
"text-text-neutral-tertiary hover:text-text-neutral-primary absolute transition-colors focus:outline-none",
clearButtonPosition,
)}
>
<XCircle size={iconSize} />
</button>
)}
</div>
);
},
);
SearchInput.displayName = "SearchInput";
export { SearchInput, searchInputVariants };

View File

@@ -95,6 +95,14 @@
"strategy": "installed",
"generatedAt": "2025-12-10T11:34:11.122Z"
},
{
"section": "dependencies",
"name": "@radix-ui/react-checkbox",
"from": "1.3.3",
"to": "1.3.3",
"strategy": "installed",
"generatedAt": "2025-12-23T09:27:56.109Z"
},
{
"section": "dependencies",
"name": "@radix-ui/react-collapsible",

View File

@@ -119,7 +119,7 @@ export default defineConfig({
],
webServer: {
command: process.env.CI ? "npm run start" : "npm run dev",
command: process.env.CI ? "pnpm run start" : "pnpm run dev",
url: "http://localhost:3000",
reuseExistingServer: !process.env.CI,
timeout: 120 * 1000,

View File

@@ -25,7 +25,7 @@ for page in get_parameters_by_path_paginator.paginate(
for service in page["Parameters"]:
regions_by_service["services"][service["Value"]] = {}
# Get all AWS Regions for the specific service
regions = {"aws": [], "aws-cn": [], "aws-us-gov": []}
regions = {"aws": [], "aws-cn": [], "aws-eusc": [], "aws-us-gov": []}
for page in get_parameters_by_path_paginator.paginate(
Path="/aws/service/global-infrastructure/services/"
+ service["Value"]
@@ -34,6 +34,8 @@ for page in get_parameters_by_path_paginator.paginate(
for region in page["Parameters"]:
if "cn" in region["Value"]:
regions["aws-cn"].append(region["Value"])
elif "eusc" in region["Value"]:
regions["aws-eusc"].append(region["Value"])
elif "gov" in region["Value"]:
regions["aws-us-gov"].append(region["Value"])
else:
@@ -41,6 +43,7 @@ for page in get_parameters_by_path_paginator.paginate(
# Sort regions per partition
regions["aws"] = sorted(regions["aws"])
regions["aws-cn"] = sorted(regions["aws-cn"])
regions["aws-eusc"] = sorted(regions["aws-eusc"])
regions["aws-us-gov"] = sorted(regions["aws-us-gov"])
regions_by_service["services"][service["Value"]]["regions"] = regions
@@ -69,6 +72,7 @@ regions_by_service["services"]["bedrock-agent"] = {
"us-west-2",
],
"aws-cn": [],
"aws-eusc": [],
"aws-us-gov": [
"us-gov-west-1",
],