Compare commits

...

9 Commits

Author SHA1 Message Date
Rubén De la Torre Vico
35d5f6da4f chore: add azure/mysql metadata update to changelog 2025-12-22 09:46:32 +01:00
Rubén De la Torre Vico
5f8f787b43 chore(azure/mysql): adapt metadata to new standarized format 2025-12-22 09:46:26 +01:00
César Arroba
f4a78d64f1 chore(github): bump version for API, UI and Docs (#9601) 2025-12-22 09:35:00 +01:00
Alejandro Bailo
e5cd25e60c docs: simple mutelist added and advanced changed (#9600) 2025-12-19 16:01:21 +01:00
Rubén De la Torre Vico
7d963751aa chore(aws): enhance metadata for sqs service (#9429)
Co-authored-by: Daniel Barranquero <danielbo2001@gmail.com>
2025-12-19 11:18:50 +01:00
Rubén De la Torre Vico
fa4371bbf6 chore(aws): enhance metadata for route53 service (#9406)
Co-authored-by: Daniel Barranquero <danielbo2001@gmail.com>
2025-12-19 11:00:05 +01:00
Rubén De la Torre Vico
ff6fbcbf48 chore(aws): enhance metadata for stepfunctions service (#9432)
Co-authored-by: Daniel Barranquero <danielbo2001@gmail.com>
2025-12-19 10:39:29 +01:00
Pedro Martín
9bf3702d71 feat(compliance): add Prowler ThreatScore for the AlibabaCloud provider (#9511) 2025-12-19 09:36:42 +01:00
Prowler Bot
ec32be2f1d chore(release): Bump version to v5.17.0 (#9597)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2025-12-18 18:38:31 +01:00
33 changed files with 2528 additions and 179 deletions

2
.env
View File

@@ -119,7 +119,7 @@ NEXT_PUBLIC_SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT}
#### Prowler release version ####
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.12.2
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.16.0
# Social login credentials
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"

254
.github/workflows/api-bump-version.yml vendored Normal file
View File

@@ -0,0 +1,254 @@
name: 'API: Bump Version'
on:
release:
types:
- 'published'
concurrency:
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
cancel-in-progress: false
env:
PROWLER_VERSION: ${{ github.event.release.tag_name }}
BASE_BRANCH: master
jobs:
detect-release-type:
runs-on: ubuntu-latest
timeout-minutes: 5
permissions:
contents: read
outputs:
is_minor: ${{ steps.detect.outputs.is_minor }}
is_patch: ${{ steps.detect.outputs.is_patch }}
major_version: ${{ steps.detect.outputs.major_version }}
minor_version: ${{ steps.detect.outputs.minor_version }}
patch_version: ${{ steps.detect.outputs.patch_version }}
current_api_version: ${{ steps.get_api_version.outputs.current_api_version }}
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Get current API version
id: get_api_version
run: |
CURRENT_API_VERSION=$(grep -oP '^version = "\K[^"]+' api/pyproject.toml)
echo "current_api_version=${CURRENT_API_VERSION}" >> "${GITHUB_OUTPUT}"
echo "Current API version: $CURRENT_API_VERSION"
- name: Detect release type and parse version
id: detect
run: |
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
MAJOR_VERSION=${BASH_REMATCH[1]}
MINOR_VERSION=${BASH_REMATCH[2]}
PATCH_VERSION=${BASH_REMATCH[3]}
echo "major_version=${MAJOR_VERSION}" >> "${GITHUB_OUTPUT}"
echo "minor_version=${MINOR_VERSION}" >> "${GITHUB_OUTPUT}"
echo "patch_version=${PATCH_VERSION}" >> "${GITHUB_OUTPUT}"
if (( MAJOR_VERSION != 5 )); then
echo "::error::Releasing another Prowler major version, aborting..."
exit 1
fi
if (( PATCH_VERSION == 0 )); then
echo "is_minor=true" >> "${GITHUB_OUTPUT}"
echo "is_patch=false" >> "${GITHUB_OUTPUT}"
echo "✓ Minor release detected: $PROWLER_VERSION"
else
echo "is_minor=false" >> "${GITHUB_OUTPUT}"
echo "is_patch=true" >> "${GITHUB_OUTPUT}"
echo "✓ Patch release detected: $PROWLER_VERSION"
fi
else
echo "::error::Invalid version syntax: '$PROWLER_VERSION' (must be X.Y.Z)"
exit 1
fi
bump-minor-version:
needs: detect-release-type
if: needs.detect-release-type.outputs.is_minor == 'true'
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Calculate next API minor version
run: |
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
CURRENT_API_VERSION="${{ needs.detect-release-type.outputs.current_api_version }}"
# API version follows Prowler minor + 1
# For Prowler 5.17.0 -> API 1.18.0
# For next master (Prowler 5.18.0) -> API 1.19.0
NEXT_API_VERSION=1.$((MINOR_VERSION + 2)).0
echo "CURRENT_API_VERSION=${CURRENT_API_VERSION}" >> "${GITHUB_ENV}"
echo "NEXT_API_VERSION=${NEXT_API_VERSION}" >> "${GITHUB_ENV}"
echo "Prowler release version: ${MAJOR_VERSION}.${MINOR_VERSION}.0"
echo "Current API version: $CURRENT_API_VERSION"
echo "Next API minor version (for master): $NEXT_API_VERSION"
- name: Bump API versions in files for master
run: |
set -e
sed -i "s|version = \"${CURRENT_API_VERSION}\"|version = \"${NEXT_API_VERSION}\"|" api/pyproject.toml
sed -i "s|spectacular_settings.VERSION = \"${CURRENT_API_VERSION}\"|spectacular_settings.VERSION = \"${NEXT_API_VERSION}\"|" api/src/backend/api/v1/views.py
sed -i "s| version: ${CURRENT_API_VERSION}| version: ${NEXT_API_VERSION}|" api/src/backend/api/specs/v1.yaml
echo "Files modified:"
git --no-pager diff
- name: Create PR for next API minor version to master
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
base: master
commit-message: 'chore(api): Bump version to v${{ env.NEXT_API_VERSION }}'
branch: api-version-bump-to-v${{ env.NEXT_API_VERSION }}
title: 'chore(api): Bump version to v${{ env.NEXT_API_VERSION }}'
labels: no-changelog,skip-sync
body: |
### Description
Bump Prowler API version to v${{ env.NEXT_API_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
- name: Checkout version branch
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
- name: Calculate first API patch version
run: |
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
CURRENT_API_VERSION="${{ needs.detect-release-type.outputs.current_api_version }}"
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
# API version follows Prowler minor + 1
# For Prowler 5.17.0 release -> version branch v5.17 should have API 1.18.1
FIRST_API_PATCH_VERSION=1.$((MINOR_VERSION + 1)).1
echo "CURRENT_API_VERSION=${CURRENT_API_VERSION}" >> "${GITHUB_ENV}"
echo "FIRST_API_PATCH_VERSION=${FIRST_API_PATCH_VERSION}" >> "${GITHUB_ENV}"
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
echo "Prowler release version: ${MAJOR_VERSION}.${MINOR_VERSION}.0"
echo "First API patch version (for ${VERSION_BRANCH}): $FIRST_API_PATCH_VERSION"
echo "Version branch: $VERSION_BRANCH"
- name: Bump API versions in files for version branch
run: |
set -e
sed -i "s|version = \"${CURRENT_API_VERSION}\"|version = \"${FIRST_API_PATCH_VERSION}\"|" api/pyproject.toml
sed -i "s|spectacular_settings.VERSION = \"${CURRENT_API_VERSION}\"|spectacular_settings.VERSION = \"${FIRST_API_PATCH_VERSION}\"|" api/src/backend/api/v1/views.py
sed -i "s| version: ${CURRENT_API_VERSION}| version: ${FIRST_API_PATCH_VERSION}|" api/src/backend/api/specs/v1.yaml
echo "Files modified:"
git --no-pager diff
- name: Create PR for first API patch version to version branch
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
base: ${{ env.VERSION_BRANCH }}
commit-message: 'chore(api): Bump version to v${{ env.FIRST_API_PATCH_VERSION }}'
branch: api-version-bump-to-v${{ env.FIRST_API_PATCH_VERSION }}
title: 'chore(api): Bump version to v${{ env.FIRST_API_PATCH_VERSION }}'
labels: no-changelog,skip-sync
body: |
### Description
Bump Prowler API version to v${{ env.FIRST_API_PATCH_VERSION }} in version branch after releasing Prowler v${{ env.PROWLER_VERSION }}.
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
bump-patch-version:
needs: detect-release-type
if: needs.detect-release-type.outputs.is_patch == 'true'
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Calculate next API patch version
run: |
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
PATCH_VERSION=${{ needs.detect-release-type.outputs.patch_version }}
CURRENT_API_VERSION="${{ needs.detect-release-type.outputs.current_api_version }}"
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
# Extract current API patch to increment it
if [[ $CURRENT_API_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
API_PATCH=${BASH_REMATCH[3]}
# API version follows Prowler minor + 1
# Keep same API minor (based on Prowler minor), increment patch
NEXT_API_PATCH_VERSION=1.$((MINOR_VERSION + 1)).$((API_PATCH + 1))
echo "CURRENT_API_VERSION=${CURRENT_API_VERSION}" >> "${GITHUB_ENV}"
echo "NEXT_API_PATCH_VERSION=${NEXT_API_PATCH_VERSION}" >> "${GITHUB_ENV}"
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
echo "Prowler release version: ${MAJOR_VERSION}.${MINOR_VERSION}.${PATCH_VERSION}"
echo "Current API version: $CURRENT_API_VERSION"
echo "Next API patch version: $NEXT_API_PATCH_VERSION"
echo "Target branch: $VERSION_BRANCH"
else
echo "::error::Invalid API version format: $CURRENT_API_VERSION"
exit 1
fi
- name: Bump API versions in files for version branch
run: |
set -e
sed -i "s|version = \"${CURRENT_API_VERSION}\"|version = \"${NEXT_API_PATCH_VERSION}\"|" api/pyproject.toml
sed -i "s|spectacular_settings.VERSION = \"${CURRENT_API_VERSION}\"|spectacular_settings.VERSION = \"${NEXT_API_PATCH_VERSION}\"|" api/src/backend/api/v1/views.py
sed -i "s| version: ${CURRENT_API_VERSION}| version: ${NEXT_API_PATCH_VERSION}|" api/src/backend/api/specs/v1.yaml
echo "Files modified:"
git --no-pager diff
- name: Create PR for next API patch version to version branch
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
base: ${{ env.VERSION_BRANCH }}
commit-message: 'chore(api): Bump version to v${{ env.NEXT_API_PATCH_VERSION }}'
branch: api-version-bump-to-v${{ env.NEXT_API_PATCH_VERSION }}
title: 'chore(api): Bump version to v${{ env.NEXT_API_PATCH_VERSION }}'
labels: no-changelog,skip-sync
body: |
### Description
Bump Prowler API version to v${{ env.NEXT_API_PATCH_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.

247
.github/workflows/docs-bump-version.yml vendored Normal file
View File

@@ -0,0 +1,247 @@
name: 'Docs: Bump Version'
on:
release:
types:
- 'published'
concurrency:
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
cancel-in-progress: false
env:
PROWLER_VERSION: ${{ github.event.release.tag_name }}
BASE_BRANCH: master
jobs:
detect-release-type:
runs-on: ubuntu-latest
timeout-minutes: 5
permissions:
contents: read
outputs:
is_minor: ${{ steps.detect.outputs.is_minor }}
is_patch: ${{ steps.detect.outputs.is_patch }}
major_version: ${{ steps.detect.outputs.major_version }}
minor_version: ${{ steps.detect.outputs.minor_version }}
patch_version: ${{ steps.detect.outputs.patch_version }}
current_docs_version: ${{ steps.get_docs_version.outputs.current_docs_version }}
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Get current documentation version
id: get_docs_version
run: |
CURRENT_DOCS_VERSION=$(grep -oP 'PROWLER_UI_VERSION="\K[^"]+' docs/getting-started/installation/prowler-app.mdx)
echo "current_docs_version=${CURRENT_DOCS_VERSION}" >> "${GITHUB_OUTPUT}"
echo "Current documentation version: $CURRENT_DOCS_VERSION"
- name: Detect release type and parse version
id: detect
run: |
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
MAJOR_VERSION=${BASH_REMATCH[1]}
MINOR_VERSION=${BASH_REMATCH[2]}
PATCH_VERSION=${BASH_REMATCH[3]}
echo "major_version=${MAJOR_VERSION}" >> "${GITHUB_OUTPUT}"
echo "minor_version=${MINOR_VERSION}" >> "${GITHUB_OUTPUT}"
echo "patch_version=${PATCH_VERSION}" >> "${GITHUB_OUTPUT}"
if (( MAJOR_VERSION != 5 )); then
echo "::error::Releasing another Prowler major version, aborting..."
exit 1
fi
if (( PATCH_VERSION == 0 )); then
echo "is_minor=true" >> "${GITHUB_OUTPUT}"
echo "is_patch=false" >> "${GITHUB_OUTPUT}"
echo "✓ Minor release detected: $PROWLER_VERSION"
else
echo "is_minor=false" >> "${GITHUB_OUTPUT}"
echo "is_patch=true" >> "${GITHUB_OUTPUT}"
echo "✓ Patch release detected: $PROWLER_VERSION"
fi
else
echo "::error::Invalid version syntax: '$PROWLER_VERSION' (must be X.Y.Z)"
exit 1
fi
bump-minor-version:
needs: detect-release-type
if: needs.detect-release-type.outputs.is_minor == 'true'
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Calculate next minor version
run: |
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
CURRENT_DOCS_VERSION="${{ needs.detect-release-type.outputs.current_docs_version }}"
NEXT_MINOR_VERSION=${MAJOR_VERSION}.$((MINOR_VERSION + 1)).0
echo "CURRENT_DOCS_VERSION=${CURRENT_DOCS_VERSION}" >> "${GITHUB_ENV}"
echo "NEXT_MINOR_VERSION=${NEXT_MINOR_VERSION}" >> "${GITHUB_ENV}"
echo "Current documentation version: $CURRENT_DOCS_VERSION"
echo "Current release version: $PROWLER_VERSION"
echo "Next minor version: $NEXT_MINOR_VERSION"
- name: Bump versions in documentation for master
run: |
set -e
# Update prowler-app.mdx with current release version
sed -i "s|PROWLER_UI_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_UI_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
sed -i "s|PROWLER_API_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_API_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
echo "Files modified:"
git --no-pager diff
- name: Create PR for documentation update to master
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
base: master
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
labels: no-changelog,skip-sync
body: |
### Description
Update Prowler documentation version references to v${{ env.PROWLER_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
### Files Updated
- `docs/getting-started/installation/prowler-app.mdx`: `PROWLER_UI_VERSION` and `PROWLER_API_VERSION`
- All `*.mdx` files with `<VersionBadge>` components
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
- name: Checkout version branch
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
- name: Calculate first patch version
run: |
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
CURRENT_DOCS_VERSION="${{ needs.detect-release-type.outputs.current_docs_version }}"
FIRST_PATCH_VERSION=${MAJOR_VERSION}.${MINOR_VERSION}.1
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
echo "CURRENT_DOCS_VERSION=${CURRENT_DOCS_VERSION}" >> "${GITHUB_ENV}"
echo "FIRST_PATCH_VERSION=${FIRST_PATCH_VERSION}" >> "${GITHUB_ENV}"
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
echo "First patch version: $FIRST_PATCH_VERSION"
echo "Version branch: $VERSION_BRANCH"
- name: Bump versions in documentation for version branch
run: |
set -e
# Update prowler-app.mdx with current release version
sed -i "s|PROWLER_UI_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_UI_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
sed -i "s|PROWLER_API_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_API_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
echo "Files modified:"
git --no-pager diff
- name: Create PR for documentation update to version branch
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
base: ${{ env.VERSION_BRANCH }}
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}-branch
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
labels: no-changelog,skip-sync
body: |
### Description
Update Prowler documentation version references to v${{ env.PROWLER_VERSION }} in version branch after releasing Prowler v${{ env.PROWLER_VERSION }}.
### Files Updated
- `docs/getting-started/installation/prowler-app.mdx`: `PROWLER_UI_VERSION` and `PROWLER_API_VERSION`
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
bump-patch-version:
needs: detect-release-type
if: needs.detect-release-type.outputs.is_patch == 'true'
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Calculate next patch version
run: |
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
PATCH_VERSION=${{ needs.detect-release-type.outputs.patch_version }}
CURRENT_DOCS_VERSION="${{ needs.detect-release-type.outputs.current_docs_version }}"
NEXT_PATCH_VERSION=${MAJOR_VERSION}.${MINOR_VERSION}.$((PATCH_VERSION + 1))
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
echo "CURRENT_DOCS_VERSION=${CURRENT_DOCS_VERSION}" >> "${GITHUB_ENV}"
echo "NEXT_PATCH_VERSION=${NEXT_PATCH_VERSION}" >> "${GITHUB_ENV}"
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
echo "Current documentation version: $CURRENT_DOCS_VERSION"
echo "Current release version: $PROWLER_VERSION"
echo "Next patch version: $NEXT_PATCH_VERSION"
echo "Target branch: $VERSION_BRANCH"
- name: Bump versions in documentation for patch version
run: |
set -e
# Update prowler-app.mdx with current release version
sed -i "s|PROWLER_UI_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_UI_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
sed -i "s|PROWLER_API_VERSION=\"${CURRENT_DOCS_VERSION}\"|PROWLER_API_VERSION=\"${PROWLER_VERSION}\"|" docs/getting-started/installation/prowler-app.mdx
echo "Files modified:"
git --no-pager diff
- name: Create PR for documentation update to version branch
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
base: ${{ env.VERSION_BRANCH }}
commit-message: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
branch: docs-version-update-to-v${{ env.PROWLER_VERSION }}
title: 'docs: Update version to v${{ env.PROWLER_VERSION }}'
labels: no-changelog,skip-sync
body: |
### Description
Update Prowler documentation version references to v${{ env.PROWLER_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
### Files Updated
- `docs/getting-started/installation/prowler-app.mdx`: `PROWLER_UI_VERSION` and `PROWLER_API_VERSION`
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.

View File

@@ -13,7 +13,10 @@ concurrency:
jobs:
trigger-cloud-pull-request:
if: github.event.pull_request.merged == true && github.repository == 'prowler-cloud/prowler'
if: |
github.event.pull_request.merged == true &&
github.repository == 'prowler-cloud/prowler' &&
!contains(github.event.pull_request.labels.*.name, 'skip-sync')
runs-on: ubuntu-latest
timeout-minutes: 10
permissions:

View File

@@ -86,7 +86,6 @@ jobs:
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${NEXT_MINOR_VERSION}\"|" pyproject.toml
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${NEXT_MINOR_VERSION}\"|" prowler/config/config.py
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_MINOR_VERSION}|" .env
echo "Files modified:"
git --no-pager diff
@@ -100,7 +99,7 @@ jobs:
commit-message: 'chore(release): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
branch: version-bump-to-v${{ env.NEXT_MINOR_VERSION }}
title: 'chore(release): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
labels: no-changelog
labels: no-changelog,skip-sync
body: |
### Description
@@ -135,7 +134,6 @@ jobs:
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${FIRST_PATCH_VERSION}\"|" pyproject.toml
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${FIRST_PATCH_VERSION}\"|" prowler/config/config.py
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${FIRST_PATCH_VERSION}|" .env
echo "Files modified:"
git --no-pager diff
@@ -149,7 +147,7 @@ jobs:
commit-message: 'chore(release): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
branch: version-bump-to-v${{ env.FIRST_PATCH_VERSION }}
title: 'chore(release): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
labels: no-changelog
labels: no-changelog,skip-sync
body: |
### Description
@@ -193,7 +191,6 @@ jobs:
sed -i "s|version = \"${PROWLER_VERSION}\"|version = \"${NEXT_PATCH_VERSION}\"|" pyproject.toml
sed -i "s|prowler_version = \"${PROWLER_VERSION}\"|prowler_version = \"${NEXT_PATCH_VERSION}\"|" prowler/config/config.py
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_PATCH_VERSION}|" .env
echo "Files modified:"
git --no-pager diff
@@ -207,7 +204,7 @@ jobs:
commit-message: 'chore(release): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
branch: version-bump-to-v${{ env.NEXT_PATCH_VERSION }}
title: 'chore(release): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
labels: no-changelog
labels: no-changelog,skip-sync
body: |
### Description

221
.github/workflows/ui-bump-version.yml vendored Normal file
View File

@@ -0,0 +1,221 @@
name: 'UI: Bump Version'
on:
release:
types:
- 'published'
concurrency:
group: ${{ github.workflow }}-${{ github.event.release.tag_name }}
cancel-in-progress: false
env:
PROWLER_VERSION: ${{ github.event.release.tag_name }}
BASE_BRANCH: master
jobs:
detect-release-type:
runs-on: ubuntu-latest
timeout-minutes: 5
permissions:
contents: read
outputs:
is_minor: ${{ steps.detect.outputs.is_minor }}
is_patch: ${{ steps.detect.outputs.is_patch }}
major_version: ${{ steps.detect.outputs.major_version }}
minor_version: ${{ steps.detect.outputs.minor_version }}
patch_version: ${{ steps.detect.outputs.patch_version }}
steps:
- name: Detect release type and parse version
id: detect
run: |
if [[ $PROWLER_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
MAJOR_VERSION=${BASH_REMATCH[1]}
MINOR_VERSION=${BASH_REMATCH[2]}
PATCH_VERSION=${BASH_REMATCH[3]}
echo "major_version=${MAJOR_VERSION}" >> "${GITHUB_OUTPUT}"
echo "minor_version=${MINOR_VERSION}" >> "${GITHUB_OUTPUT}"
echo "patch_version=${PATCH_VERSION}" >> "${GITHUB_OUTPUT}"
if (( MAJOR_VERSION != 5 )); then
echo "::error::Releasing another Prowler major version, aborting..."
exit 1
fi
if (( PATCH_VERSION == 0 )); then
echo "is_minor=true" >> "${GITHUB_OUTPUT}"
echo "is_patch=false" >> "${GITHUB_OUTPUT}"
echo "✓ Minor release detected: $PROWLER_VERSION"
else
echo "is_minor=false" >> "${GITHUB_OUTPUT}"
echo "is_patch=true" >> "${GITHUB_OUTPUT}"
echo "✓ Patch release detected: $PROWLER_VERSION"
fi
else
echo "::error::Invalid version syntax: '$PROWLER_VERSION' (must be X.Y.Z)"
exit 1
fi
bump-minor-version:
needs: detect-release-type
if: needs.detect-release-type.outputs.is_minor == 'true'
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Calculate next minor version
run: |
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
NEXT_MINOR_VERSION=${MAJOR_VERSION}.$((MINOR_VERSION + 1)).0
echo "NEXT_MINOR_VERSION=${NEXT_MINOR_VERSION}" >> "${GITHUB_ENV}"
echo "Current version: $PROWLER_VERSION"
echo "Next minor version: $NEXT_MINOR_VERSION"
- name: Bump UI version in .env for master
run: |
set -e
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_MINOR_VERSION}|" .env
echo "Files modified:"
git --no-pager diff
- name: Create PR for next minor version to master
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
base: master
commit-message: 'chore(ui): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
branch: ui-version-bump-to-v${{ env.NEXT_MINOR_VERSION }}
title: 'chore(ui): Bump version to v${{ env.NEXT_MINOR_VERSION }}'
labels: no-changelog,skip-sync
body: |
### Description
Bump Prowler UI version to v${{ env.NEXT_MINOR_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
### Files Updated
- `.env`: `NEXT_PUBLIC_PROWLER_RELEASE_VERSION`
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
- name: Checkout version branch
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: v${{ needs.detect-release-type.outputs.major_version }}.${{ needs.detect-release-type.outputs.minor_version }}
- name: Calculate first patch version
run: |
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
FIRST_PATCH_VERSION=${MAJOR_VERSION}.${MINOR_VERSION}.1
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
echo "FIRST_PATCH_VERSION=${FIRST_PATCH_VERSION}" >> "${GITHUB_ENV}"
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
echo "First patch version: $FIRST_PATCH_VERSION"
echo "Version branch: $VERSION_BRANCH"
- name: Bump UI version in .env for version branch
run: |
set -e
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${FIRST_PATCH_VERSION}|" .env
echo "Files modified:"
git --no-pager diff
- name: Create PR for first patch version to version branch
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
base: ${{ env.VERSION_BRANCH }}
commit-message: 'chore(ui): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
branch: ui-version-bump-to-v${{ env.FIRST_PATCH_VERSION }}
title: 'chore(ui): Bump version to v${{ env.FIRST_PATCH_VERSION }}'
labels: no-changelog,skip-sync
body: |
### Description
Bump Prowler UI version to v${{ env.FIRST_PATCH_VERSION }} in version branch after releasing Prowler v${{ env.PROWLER_VERSION }}.
### Files Updated
- `.env`: `NEXT_PUBLIC_PROWLER_RELEASE_VERSION`
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
bump-patch-version:
needs: detect-release-type
if: needs.detect-release-type.outputs.is_patch == 'true'
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: read
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Calculate next patch version
run: |
MAJOR_VERSION=${{ needs.detect-release-type.outputs.major_version }}
MINOR_VERSION=${{ needs.detect-release-type.outputs.minor_version }}
PATCH_VERSION=${{ needs.detect-release-type.outputs.patch_version }}
NEXT_PATCH_VERSION=${MAJOR_VERSION}.${MINOR_VERSION}.$((PATCH_VERSION + 1))
VERSION_BRANCH=v${MAJOR_VERSION}.${MINOR_VERSION}
echo "NEXT_PATCH_VERSION=${NEXT_PATCH_VERSION}" >> "${GITHUB_ENV}"
echo "VERSION_BRANCH=${VERSION_BRANCH}" >> "${GITHUB_ENV}"
echo "Current version: $PROWLER_VERSION"
echo "Next patch version: $NEXT_PATCH_VERSION"
echo "Target branch: $VERSION_BRANCH"
- name: Bump UI version in .env for version branch
run: |
set -e
sed -i "s|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${PROWLER_VERSION}|NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v${NEXT_PATCH_VERSION}|" .env
echo "Files modified:"
git --no-pager diff
- name: Create PR for next patch version to version branch
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
with:
author: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
base: ${{ env.VERSION_BRANCH }}
commit-message: 'chore(ui): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
branch: ui-version-bump-to-v${{ env.NEXT_PATCH_VERSION }}
title: 'chore(ui): Bump version to v${{ env.NEXT_PATCH_VERSION }}'
labels: no-changelog,skip-sync
body: |
### Description
Bump Prowler UI version to v${{ env.NEXT_PATCH_VERSION }} after releasing Prowler v${{ env.PROWLER_VERSION }}.
### Files Updated
- `.env`: `NEXT_PUBLIC_PROWLER_RELEASE_VERSION`
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.

View File

@@ -44,7 +44,7 @@ name = "prowler-api"
package-mode = false
# Needed for the SDK compatibility
requires-python = ">=3.11,<3.13"
version = "1.16.0"
version = "1.18.0"
[project.scripts]
celery = "src.backend.config.settings.celery"

View File

@@ -1,7 +1,7 @@
openapi: 3.0.3
info:
title: Prowler API
version: 1.17.0
version: 1.18.0
description: |-
Prowler API specification.

View File

@@ -359,7 +359,7 @@ class SchemaView(SpectacularAPIView):
def get(self, request, *args, **kwargs):
spectacular_settings.TITLE = "Prowler API"
spectacular_settings.VERSION = "1.17.0"
spectacular_settings.VERSION = "1.18.0"
spectacular_settings.DESCRIPTION = (
"Prowler API specification.\n\nThis file is auto-generated."
)

View File

@@ -0,0 +1,28 @@
import warnings
from dashboard.common_methods import get_section_containers_threatscore
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_DESCRIPTION",
"REQUIREMENTS_ATTRIBUTES_SECTION",
"REQUIREMENTS_ATTRIBUTES_SUBSECTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
].copy()
return get_section_containers_threatscore(
aux,
"REQUIREMENTS_ATTRIBUTES_SECTION",
"REQUIREMENTS_ATTRIBUTES_SUBSECTION",
"REQUIREMENTS_ID",
)

View File

@@ -407,9 +407,11 @@ def display_data(
compliance_module = importlib.import_module(
f"dashboard.compliance.{current}"
)
data = data.drop_duplicates(
subset=["CHECKID", "STATUS", "MUTED", "RESOURCEID", "STATUSEXTENDED"]
)
# Build subset list based on available columns
dedup_columns = ["CHECKID", "STATUS", "RESOURCEID", "STATUSEXTENDED"]
if "MUTED" in data.columns:
dedup_columns.insert(2, "MUTED")
data = data.drop_duplicates(subset=dedup_columns)
if "threatscore" in analytics_input:
data = get_threatscore_mean_by_pillar(data)
@@ -652,6 +654,7 @@ def get_table(current_compliance, table):
def get_threatscore_mean_by_pillar(df):
score_per_pillar = {}
max_score_per_pillar = {}
counted_findings_per_pillar = {}
for _, row in df.iterrows():
pillar = (
@@ -663,6 +666,18 @@ def get_threatscore_mean_by_pillar(df):
if pillar not in score_per_pillar:
score_per_pillar[pillar] = 0
max_score_per_pillar[pillar] = 0
counted_findings_per_pillar[pillar] = set()
# Skip muted findings for score calculation
is_muted = "MUTED" in df.columns and row.get("MUTED") == "True"
if is_muted:
continue
# Create unique finding identifier to avoid counting duplicates
finding_id = f"{row.get('CHECKID', '')}_{row.get('RESOURCEID', '')}"
if finding_id in counted_findings_per_pillar[pillar]:
continue
counted_findings_per_pillar[pillar].add(finding_id)
level_of_risk = pd.to_numeric(
row["REQUIREMENTS_ATTRIBUTES_LEVELOFRISK"], errors="coerce"
@@ -706,6 +721,10 @@ def get_table_prowler_threatscore(df):
score_per_pillar = {}
max_score_per_pillar = {}
pillars = {}
counted_findings_per_pillar = {}
counted_pass = set()
counted_fail = set()
counted_muted = set()
df_copy = df.copy()
@@ -720,6 +739,24 @@ def get_table_prowler_threatscore(df):
pillars[pillar] = {"FAIL": 0, "PASS": 0, "MUTED": 0}
score_per_pillar[pillar] = 0
max_score_per_pillar[pillar] = 0
counted_findings_per_pillar[pillar] = set()
# Create unique finding identifier
finding_id = f"{row.get('CHECKID', '')}_{row.get('RESOURCEID', '')}"
# Check if muted
is_muted = "MUTED" in df_copy.columns and row.get("MUTED") == "True"
# Count muted findings (separate from score calculation)
if is_muted and finding_id not in counted_muted:
counted_muted.add(finding_id)
pillars[pillar]["MUTED"] += 1
continue # Skip muted findings for score calculation
# Skip if already counted for this pillar
if finding_id in counted_findings_per_pillar[pillar]:
continue
counted_findings_per_pillar[pillar].add(finding_id)
level_of_risk = pd.to_numeric(
row["REQUIREMENTS_ATTRIBUTES_LEVELOFRISK"], errors="coerce"
@@ -738,13 +775,14 @@ def get_table_prowler_threatscore(df):
max_score_per_pillar[pillar] += level_of_risk * weight
if row["STATUS"] == "PASS":
pillars[pillar]["PASS"] += 1
if finding_id not in counted_pass:
counted_pass.add(finding_id)
pillars[pillar]["PASS"] += 1
score_per_pillar[pillar] += level_of_risk * weight
elif row["STATUS"] == "FAIL":
pillars[pillar]["FAIL"] += 1
if "MUTED" in row and row["MUTED"] == "True":
pillars[pillar]["MUTED"] += 1
if finding_id not in counted_fail:
counted_fail.add(finding_id)
pillars[pillar]["FAIL"] += 1
result_df = []

View File

@@ -95,7 +95,14 @@
},
"user-guide/tutorials/prowler-app-rbac",
"user-guide/tutorials/prowler-app-api-keys",
"user-guide/tutorials/prowler-app-mute-findings",
{
"group": "Mutelist",
"expanded": true,
"pages": [
"user-guide/tutorials/prowler-app-simple-mutelist",
"user-guide/tutorials/prowler-app-mute-findings"
]
},
{
"group": "Integrations",
"expanded": true,

View File

@@ -115,8 +115,8 @@ To update the environment file:
Edit the `.env` file and change version values:
```env
PROWLER_UI_VERSION="5.15.0"
PROWLER_API_VERSION="5.15.0"
PROWLER_UI_VERSION="5.16.0"
PROWLER_API_VERSION="5.16.0"
```
<Note>

View File

@@ -1,20 +1,26 @@
---
title: 'Mute Findings (Mutelist)'
title: 'Advanced Mutelist (YAML)'
---
import { VersionBadge } from "/snippets/version-badge.mdx"
<VersionBadge version="5.9.0" />
Prowler App allows users to mute specific findings to focus on the most critical security issues. This comprehensive guide demonstrates how to effectively use the Mutelist feature to manage and prioritize security findings.
Prowler App allows users to mute specific findings to focus on the most critical security issues. This guide demonstrates how to use the Advanced Mutelist feature with YAML configuration for complex, pattern-based muting rules.
## What Is the Mutelist Feature?
<Note>
For muting individual findings without YAML configuration, use [Simple Mutelist](/user-guide/tutorials/prowler-app-simple-mutelist) to mute findings directly from the Findings table.
The Mutelist feature enables users to:
</Note>
- **Suppress specific findings** from appearing in future scans
- **Focus on critical issues** by hiding resolved or accepted risks
## What Is Advanced Mutelist?
Advanced Mutelist enables users to create powerful, pattern-based muting rules using YAML configuration:
- **Define complex muting patterns** using regular expressions
- **Mute findings by check, region, resource, or tag** across multiple accounts
- **Apply wildcards** to mute entire categories of findings
- **Create exceptions** within broad muting rules
- **Maintain audit trails** of muted findings for compliance purposes
- **Streamline security workflows** by reducing noise from non-critical findings
## Prerequisites
@@ -28,46 +34,51 @@ Before muting findings, ensure:
Muting findings does not resolve underlying security issues. Review each finding carefully before muting to ensure it represents an acceptable risk or has been properly addressed.
</Warning>
## Step 1: Add a provider
## Step 1: Connect a Provider
To configure Mutelist:
To configure Advanced Mutelist:
1. Log into Prowler App
2. Navigate to the providers page
2. Navigate to the Providers page
![Add provider](/images/mutelist-ui-1.png)
3. Add a provider, then "Configure Muted Findings" button will be enabled in providers page and scans page
3. Connect a provider to enable Mutelist configuration
![Button enabled in providers page](/images/mutelist-ui-2.png)
![Button enabled in scans pages](/images/mutelist-ui-3.png)
## Step 2: Configure Mutelist
## Step 2: Configure Advanced Mutelist
1. Open the modal by clicking "Configure Muted Findings" button
![Open modal](/images/mutelist-ui-4.png)
1. Provide a valid Mutelist in `YAML` format. More details about Mutelist [here](/user-guide/cli/tutorials/mutelist)
1. Navigate to the Mutelist page from the left navigation menu
2. Select the "Advanced" tab
3. Provide a valid Mutelist configuration in `YAML` format
<Note>
The YAML format follows the same specification as Prowler CLI. See [CLI Mutelist documentation](/user-guide/cli/tutorials/mutelist) for detailed syntax reference.
</Note>
![Valid YAML configuration](/images/mutelist-ui-5.png)
If the YAML configuration is invalid, an error message will be displayed
![Wrong YAML configuration](/images/mutelist-ui-7.png)
![Wrong YAML configuration 2](/images/mutelist-ui-8.png)
## Step 3: Review the Mutelist
## Step 3: Review and Update the Configuration
1. Once added, the configuration can be removed or updated
1. Once added, the configuration can be updated or removed from the Advanced tab
![Remove or update configuration](/images/mutelist-ui-6.png)
## Step 4: Check muted findings in the scan results
## Step 4: Verify Muted Findings in Scan Results
1. Run a new scan
2. Check the muted findings in the scan results
![Check muted fidings](/images/mutelist-ui-9.png)
2. Navigate to the Findings page to verify muted findings
![Check muted findings](/images/mutelist-ui-9.png)
<Note>
The Mutelist configuration takes effect on the next scans.
The Advanced Mutelist configuration takes effect on subsequent scans. Existing findings are not retroactively muted.
</Note>
## Mutelist Ready To Use Examples
## YAML Configuration Examples
Below are examples for different cloud providers supported by Prowler App. Check how the mutelist works [here](/user-guide/cli/tutorials/mutelist#how-the-mutelist-works).
Below are ready-to-use examples for different cloud providers. For detailed syntax and logic explanation, see [CLI Mutelist documentation](/user-guide/cli/tutorials/mutelist#how-the-mutelist-works).
### AWS Provider

View File

@@ -0,0 +1,180 @@
---
title: "Simple Mutelist"
---
import { VersionBadge } from "/snippets/version-badge.mdx";
<VersionBadge version="5.16.0" />
Prowler App provides Simple Mutelist, an intuitive way to mute findings directly from the Findings page without writing YAML configuration. This feature streamlines the muting workflow by allowing individual or bulk muting with just a few clicks.
## What Is Simple Mutelist?
Simple Mutelist enables users to:
- **Mute findings directly from the Findings table** using checkbox selection
- **Perform bulk muting** of multiple findings at once
- **Manage mute rules** through a dedicated interface
- **Toggle mute rules on and off** without deleting them
- **Edit mute rule justifications** after creation
<Note>
Simple Mutelist creates rules based on the finding's unique identifier (UID). For complex muting patterns based on checks, regions, tags, or regular expressions, use [Advanced Mutelist](/user-guide/tutorials/prowler-app-mute-findings) with YAML configuration.
</Note>
## Accessing the Mutelist Page
To access the Mutelist page:
1. Click "Mutelist" in the left navigation menu
The Mutelist page contains two tabs:
- **Simple:** Displays a table of mute rules created through Simple Mutelist
- **Advanced:** Provides YAML-based configuration for complex muting patterns
## Muting Findings from the Findings Page
### Muting Individual Findings
To mute a single finding:
1. Navigate to the Findings page
2. Locate the finding to mute
3. Click the actions menu (three dots) on the finding row
4. Select "Mute"
5. Enter a justification for muting this finding
6. Click "Confirm" to create the mute rule
### Muting Multiple Findings (Bulk Muting)
To mute multiple findings at once:
1. Navigate to the Findings page
2. Select findings using the checkboxes in the leftmost column
3. Click the floating "Mute" button that appears at the bottom of the screen
4. Enter a justification that applies to all selected findings
5. Click "Confirm" to create mute rules for all selected findings
<Note>
Findings that are already muted display a muted icon instead of a checkbox. These findings cannot be selected for bulk operations.
</Note>
## Managing Mute Rules
### Viewing Mute Rules
To view all mute rules:
1. Navigate to the Mutelist page
2. Select the "Simple" tab
3. The table displays all mute rules with the following information:
- **Finding UID:** The unique identifier of the muted finding
- **Justification:** The reason provided for muting
- **Enabled:** Whether the rule is currently active
- **Created:** When the rule was created
### Enabling and Disabling Mute Rules
To toggle a mute rule without deleting it:
1. Navigate to the Mutelist page
2. Select the "Simple" tab
3. Locate the mute rule
4. Use the toggle switch in the "Enabled" column to enable or disable the rule
<Note>
Disabled mute rules remain in the system but do not affect findings. Findings associated with disabled rules will appear as unmuted in subsequent scans.
</Note>
### Editing Mute Rules
To edit a mute rule's justification:
1. Navigate to the Mutelist page
2. Select the "Simple" tab
3. Click the actions menu (three dots) on the mute rule row
4. Select "Edit"
5. Update the justification
6. Click "Save" to apply changes
### Deleting Mute Rules
To permanently remove a mute rule:
1. Navigate to the Mutelist page
2. Select the "Simple" tab
3. Click the actions menu (three dots) on the mute rule row
4. Select "Delete"
5. Confirm the deletion
<Warning>
Deleting a mute rule is permanent. The finding will appear as unmuted in subsequent scans. To temporarily unmute a finding without losing the rule, disable the rule instead of deleting it.
</Warning>
## How Simple Mutelist Works
Simple Mutelist creates mute rules based on a finding's unique identifier (UID). When a mute rule is created:
- **Existing findings** matching the UID are immediately marked as muted
- **Historical findings** with the same UID are also muted
- **Future findings** from subsequent scans are automatically muted if they match the UID
### Uniqueness Constraint
Each finding UID can only have one mute rule. Attempting to create a duplicate mute rule for the same finding displays an error message indicating the rule already exists.
## Simple Mutelist vs. Advanced Mutelist
| Feature | Simple Mutelist | Advanced Mutelist |
| ------------------------ | ----------------------------------------- | ------------------------------------------------------ |
| **Configuration method** | Point-and-click interface | YAML configuration file |
| **Muting scope** | Individual finding UIDs | Patterns based on checks, regions, resources, and tags |
| **Regular expressions** | Not supported | Fully supported |
| **Bulk operations** | Checkbox selection in Findings table | YAML wildcards and patterns |
| **Best for** | Quick, ad-hoc muting of specific findings | Complex, policy-driven muting rules |
### When to Use Simple Mutelist
- Muting specific findings identified during review
- Quick suppression of known false positives
- Ad-hoc muting without YAML knowledge
### When to Use Advanced Mutelist
- Muting all findings for a specific check across regions
- Pattern-based muting using regular expressions
- Tag-based muting for environment-specific resources
- Complex rules with exceptions
## Best Practices
1. **Provide meaningful justifications:** Document why each finding is muted for audit trails and team communication
2. **Review muted findings regularly:** Periodically audit mute rules to ensure they remain valid
3. **Use disable instead of delete:** When temporarily unmuting findings, disable rules rather than deleting them
4. **Combine with Advanced Mutelist:** Use Simple Mutelist for specific findings and Advanced Mutelist for broad patterns
5. **Limit bulk muting:** Review findings individually when possible to ensure appropriate justification for each
## Troubleshooting
### Duplicate Rule Error
If an error indicates a mute rule already exists for a finding:
1. Navigate to the Mutelist page
2. Search for the existing rule in the Simple tab
3. Edit the existing rule's justification if needed, or
4. Delete the existing rule and create a new one
### Finding Still Appears Unmuted
If a muted finding still appears unmuted:
1. Verify the mute rule exists in the Mutelist page
2. Ensure the mute rule is enabled (toggle is on)
3. Check that the finding UID matches the mute rule
4. Wait for the next scan to see updated muting status on historical findings

View File

@@ -2,6 +2,20 @@
All notable changes to the **Prowler SDK** are documented in this file.
## [5.17.0] (Prowler UNRELEASED)
### Added
- Add Prowler ThreatScore for the Alibaba Cloud provider [(#9511)](https://github.com/prowler-cloud/prowler/pull/9511)
### Changed
- Update AWS Step Functions service metadata to new format [(#9432)](https://github.com/prowler-cloud/prowler/pull/9432)
- Update AWS Route 53 service metadata to new format [(#9406)](https://github.com/prowler-cloud/prowler/pull/9406)
- Update AWS SQS service metadata to new format [(#9429)](https://github.com/prowler-cloud/prowler/pull/9429)
- Update Azure MySQL service metadata to new format [(#9623)](https://github.com/prowler-cloud/prowler/pull/9623)
---
## [5.16.0] (Prowler v5.16.0)
### Added

View File

@@ -83,6 +83,9 @@ from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
AzureMitreAttack,
)
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_gcp import GCPMitreAttack
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_alibaba import (
ProwlerThreatScoreAlibaba,
)
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_aws import (
ProwlerThreatScoreAWS,
)
@@ -1039,6 +1042,18 @@ def prowler():
)
generated_outputs["compliance"].append(cis)
cis.batch_write_data_to_file()
elif compliance_name == "prowler_threatscore_alibabacloud":
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
prowler_threatscore = ProwlerThreatScoreAlibaba(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
file_path=filename,
)
generated_outputs["compliance"].append(prowler_threatscore)
prowler_threatscore.batch_write_data_to_file()
else:
filename = (
f"{output_options.output_directory}/compliance/"

File diff suppressed because it is too large Load Diff

View File

@@ -38,7 +38,7 @@ class _MutableTimestamp:
timestamp = _MutableTimestamp(datetime.today())
timestamp_utc = _MutableTimestamp(datetime.now(timezone.utc))
prowler_version = "5.16.0"
prowler_version = "5.17.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://raw.githubusercontent.com/prowler-cloud/prowler/dc7d2d5aeb92fdf12e8604f42ef6472cd3e8e889/docs/img/prowler-logo-black.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"

View File

@@ -146,3 +146,29 @@ class ProwlerThreatScoreKubernetesModel(BaseModel):
Muted: bool
Framework: str
Name: str
class ProwlerThreatScoreAlibabaModel(BaseModel):
"""
ProwlerThreatScoreAlibabaModel generates a finding's output in Alibaba Cloud Prowler ThreatScore Compliance format.
"""
Provider: str
Description: str
AccountId: str
Region: str
AssessmentDate: str
Requirements_Id: str
Requirements_Description: str
Requirements_Attributes_Title: str
Requirements_Attributes_Section: str
Requirements_Attributes_SubSection: Optional[str] = None
Requirements_Attributes_AttributeDescription: str
Requirements_Attributes_AdditionalInformation: str
Requirements_Attributes_LevelOfRisk: int
Requirements_Attributes_Weight: int
Status: str
StatusExtended: str
ResourceId: str
ResourceName: str
CheckId: str

View File

@@ -0,0 +1,98 @@
from prowler.config.config import timestamp
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.prowler_threatscore.models import (
ProwlerThreatScoreAlibabaModel,
)
from prowler.lib.outputs.finding import Finding
class ProwlerThreatScoreAlibaba(ComplianceOutput):
"""
This class represents the Alibaba Cloud Prowler ThreatScore compliance output.
Attributes:
- _data (list): A list to store transformed data from findings.
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
Methods:
- transform: Transforms findings into Alibaba Cloud Prowler ThreatScore compliance format.
"""
def transform(
self,
findings: list[Finding],
compliance: Compliance,
compliance_name: str,
) -> None:
"""
Transforms a list of findings into Alibaba Cloud Prowler ThreatScore compliance format.
Parameters:
- findings (list): A list of findings.
- compliance (Compliance): A compliance model.
- compliance_name (str): The name of the compliance model.
Returns:
- None
"""
for finding in findings:
# Get the compliance requirements for the finding
finding_requirements = finding.compliance.get(compliance_name, [])
for requirement in compliance.Requirements:
if requirement.Id in finding_requirements:
for attribute in requirement.Attributes:
compliance_row = ProwlerThreatScoreAlibabaModel(
Provider=finding.provider,
Description=compliance.Description,
AccountId=finding.account_uid,
Region=finding.region,
AssessmentDate=str(timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Title=attribute.Title,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_SubSection=attribute.SubSection,
Requirements_Attributes_AttributeDescription=attribute.AttributeDescription,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_LevelOfRisk=attribute.LevelOfRisk,
Requirements_Attributes_Weight=attribute.Weight,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_uid,
ResourceName=finding.resource_name,
CheckId=finding.check_id,
Muted=finding.muted,
Framework=compliance.Framework,
Name=compliance.Name,
)
self._data.append(compliance_row)
# Add manual requirements to the compliance output
for requirement in compliance.Requirements:
if not requirement.Checks:
for attribute in requirement.Attributes:
compliance_row = ProwlerThreatScoreAlibabaModel(
Provider=compliance.Provider.lower(),
Description=compliance.Description,
AccountId="",
Region="",
AssessmentDate=str(timestamp),
Requirements_Id=requirement.Id,
Requirements_Description=requirement.Description,
Requirements_Attributes_Title=attribute.Title,
Requirements_Attributes_Section=attribute.Section,
Requirements_Attributes_SubSection=attribute.SubSection,
Requirements_Attributes_AttributeDescription=attribute.AttributeDescription,
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
Requirements_Attributes_LevelOfRisk=attribute.LevelOfRisk,
Requirements_Attributes_Weight=attribute.Weight,
Status="MANUAL",
StatusExtended="Manual check",
ResourceId="manual_check",
ResourceName="Manual check",
CheckId="manual",
Muted=False,
Framework=compliance.Framework,
Name=compliance.Name,
)
self._data.append(compliance_row)

View File

@@ -1,30 +1,39 @@
{
"Provider": "aws",
"CheckID": "route53_dangling_ip_subdomain_takeover",
"CheckTitle": "Check if Route53 Records contains dangling IPs.",
"CheckType": [],
"CheckTitle": "Route53 A record does not point to a dangling IP address",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices/Network Reachability",
"TTPs/Initial Access",
"Effects/Data Exposure"
],
"ServiceName": "route53",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "high",
"ResourceType": "Other",
"Description": "Check if Route53 Records contains dangling IPs.",
"Risk": "When an ephemeral AWS resource such as an Elastic IP (EIP) is released into the Amazon's Elastic IP pool, an attacker may acquire the EIP resource and effectively control the domain/subdomain associated with that EIP in your Route 53 DNS records.",
"ResourceType": "AwsRoute53HostedZone",
"Description": "**Route 53 `A` records** (non-alias) that use literal IPs are evaluated for **public AWS addresses** not currently assigned to resources in the account. Entries that match AWS ranges yet lack ownership are identified as potential **dangling IP targets**.",
"Risk": "**Dangling DNS `A` records** pointing to released AWS IPs enable **subdomain takeover**. An attacker who later obtains that IP can:\n- Redirect or alter content (integrity)\n- Capture credentials/cookies (confidentiality)\n- Disrupt or impersonate services (availability)",
"RelatedUrl": "",
"AdditionalURLs": [
"https://support.icompaas.com/support/solutions/articles/62000233461-ensure-route53-records-contains-dangling-ips-",
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/Route53/dangling-dns-records.html",
"https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/resource-record-sets-deleting.html"
],
"Remediation": {
"Code": {
"CLI": "aws route53 change-resource-record-sets --hosted-zone-id <resource_id>",
"NativeIaC": "",
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/Route53/dangling-dns-records.html",
"Terraform": ""
"CLI": "aws route53 change-resource-record-sets --hosted-zone-id <example_resource_id> --change-batch '{\"Changes\":[{\"Action\":\"UPSERT\",\"ResourceRecordSet\":{\"Name\":\"<example_resource_name>\",\"Type\":\"A\",\"AliasTarget\":{\"HostedZoneId\":\"<ALIAS_TARGET_HOSTED_ZONE_ID>\",\"DNSName\":\"<ALIAS_TARGET_DNS_NAME>\",\"EvaluateTargetHealth\":false}}}]}'",
"NativeIaC": "```yaml\n# CloudFormation: convert A record to an Alias so it no longer points to a dangling IP\nResources:\n <example_resource_name>:\n Type: AWS::Route53::RecordSet\n Properties:\n HostedZoneId: <example_resource_id>\n Name: <example_resource_name>\n Type: A\n AliasTarget:\n HostedZoneId: <ALIAS_TARGET_HOSTED_ZONE_ID> # CRITICAL: use Alias to an AWS resource instead of an IP\n DNSName: <ALIAS_TARGET_DNS_NAME> # CRITICAL: target AWS resource DNS (e.g., ALB/CloudFront)\n EvaluateTargetHealth: false\n```",
"Other": "1. Open AWS Console > Route 53 > Hosted zones\n2. Select the hosted zone and locate the failing non-alias A record\n3. If not needed: click Delete and confirm\n4. If needed: select the record, click Edit, enable Alias, choose the correct AWS resource (e.g., ALB/CloudFront), then Save changes\n5. Wait for propagation (~60s) and re-run the check",
"Terraform": "```hcl\n# Terraform: convert A record to Alias to avoid dangling public IPs\nresource \"aws_route53_record\" \"<example_resource_name>\" {\n zone_id = \"<example_resource_id>\"\n name = \"<example_resource_name>\"\n type = \"A\"\n\n alias { # CRITICAL: Alias to AWS resource (no direct IP)\n name = \"<ALIAS_TARGET_DNS_NAME>\" # e.g., dualstack.<alb>.amazonaws.com\n zone_id = \"<ALIAS_TARGET_HOSTED_ZONE_ID>\"\n evaluate_target_health = false\n }\n}\n```"
},
"Recommendation": {
"Text": "Ensure that any dangling DNS records are deleted from your Amazon Route 53 public hosted zones in order to maintain the integrity and authenticity of your domains/subdomains and to protect against domain hijacking attacks.",
"Url": "https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/resource-record-sets-deleting.html"
"Text": "Remove or update any record that points to an unassigned IP. Avoid hard-coding AWS public IPs in `A` records; use **aliases/CNAMEs** to managed endpoints. Enforce **asset lifecycle** decommissioning, routine DNS-asset reconciliation, and **change control** with monitoring to prevent and detect drift.",
"Url": "https://hub.prowler.com/check/route53_dangling_ip_subdomain_takeover"
}
},
"Categories": [
"forensics-ready"
"internet-exposed"
],
"DependsOn": [],
"RelatedTo": [],

View File

@@ -1,29 +1,40 @@
{
"Provider": "aws",
"CheckID": "route53_domains_privacy_protection_enabled",
"CheckTitle": "Enable Privacy Protection for for a Route53 Domain.",
"CheckType": [],
"CheckTitle": "Route 53 domain has admin contact privacy protection enabled",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
"Sensitive Data Identifications/PII"
],
"ServiceName": "route53",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "Other",
"Description": "Enable Privacy Protection for for a Route53 Domain.",
"Risk": "Without privacy protection enabled, ones personal information is published to the public WHOIS database.",
"RelatedUrl": "https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/domain-privacy-protection.html",
"Description": "**Route 53 domain** administrative contact has **privacy protection** enabled, so WHOIS queries return redacted or proxy details.\n\nEvaluates whether contact data is hidden instead of publicly listed.",
"Risk": "**Public WHOIS contact data** exposes names, emails, phones, and addresses, enabling:\n- **Phishing/social engineering** of the registrar\n- **SIM-swap** or account takeover\n- **Domain hijacking**, affecting DNS integrity/availability\nIt also increases spam and targeted harassment.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/domain-privacy-protection.html",
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/Route53/privacy-protection.html",
"https://support.icompaas.com/support/solutions/articles/62000233459-enable-privacy-protection-for-for-a-route53-domain-"
],
"Remediation": {
"Code": {
"CLI": "aws route53domains update-domain-contact-privacy --domain-name domain.com --registrant-privacy",
"CLI": "aws route53domains update-domain-contact-privacy --domain-name <DOMAIN_NAME> --admin-privacy",
"NativeIaC": "",
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/Route53/privacy-protection.html",
"Terraform": ""
"Other": "1. Open the AWS Console and go to Route 53\n2. Click Registered domains and select <DOMAIN_NAME>\n3. Click Edit in Contact information\n4. Enable Privacy protection (ensures Admin contact privacy is on)\n5. Save changes",
"Terraform": "```hcl\nresource \"aws_route53domains_registered_domain\" \"<example_resource_name>\" {\n domain_name = \"<example_resource_name>\"\n admin_privacy = true # Critical: enables admin contact privacy to pass the check\n}\n```"
},
"Recommendation": {
"Text": "Ensure default Privacy is enabled.",
"Url": "https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/domain-privacy-protection.html"
"Text": "Enable **WHOIS privacy** for all contacts (admin, registrant, tech) to minimize exposure. Apply **defense in depth**: use dedicated, monitored contact emails, enforce **transfer lock** and **MFA** on registrar access, and regularly review settings. *If a TLD lacks privacy*, provide minimal, role-based contact details.",
"Url": "https://hub.prowler.com/check/route53_domains_privacy_protection_enabled"
}
},
"Categories": [],
"Categories": [
"internet-exposed"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""

View File

@@ -1,29 +1,37 @@
{
"Provider": "aws",
"CheckID": "route53_domains_transferlock_enabled",
"CheckTitle": "Enable Transfer Lock for a Route53 Domain.",
"CheckType": [],
"CheckTitle": "Route 53 domain has Transfer Lock enabled",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices",
"TTPs/Initial Access/Unauthorized Access"
],
"ServiceName": "route53",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "medium",
"Severity": "high",
"ResourceType": "Other",
"Description": "Enable Transfer Lock for a Route53 Domain.",
"Risk": "Without transfer lock enabled, a domain name could be incorrectly moved to a new registrar.",
"RelatedUrl": "https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/domain-lock.html",
"Description": "**Route 53 registered domains** are assessed for a transfer-lock state, indicated by the `clientTransferProhibited` status on the domain.",
"Risk": "Without **transfer lock**, a domain can be illicitly moved to another registrar, enabling **domain hijacking**. Attackers could alter DNS, redirect traffic, harvest credentials, and disrupt email and apps-compromising **confidentiality**, **integrity**, and **availability**.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/domain-lock.html"
],
"Remediation": {
"Code": {
"CLI": "aws route53domains enable-domain-transfer-lock --domain-name DOMAIN",
"CLI": "aws route53domains enable-domain-transfer-lock --domain-name <example_domain_name>",
"NativeIaC": "",
"Other": "",
"Terraform": ""
"Other": "1. Open the AWS Management Console and go to Route 53\n2. In the left pane, select Registered domains\n3. Click the domain name <example_domain_name>\n4. In Actions, choose Turn on transfer lock\n5. Confirm to enable the lock",
"Terraform": "```hcl\nresource \"aws_route53domains_registered_domain\" \"<example_resource_name>\" {\n domain_name = \"<example_domain_name>\"\n transfer_lock = true # Enables transfer lock (sets clientTransferProhibited)\n}\n```"
},
"Recommendation": {
"Text": "Ensure transfer lock is enabled.",
"Url": "https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/domain-lock.html"
"Text": "Enable **transfer lock** on domains to prevent unauthorized registrar moves. Enforce **least privilege** on domain management, require **MFA**, and monitor status changes. *For planned transfers*, remove the lock only under approved change control and re-enable immediately afterward.",
"Url": "https://hub.prowler.com/check/route53_domains_transferlock_enabled"
}
},
"Categories": [],
"Categories": [
"identity-access"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""

View File

@@ -1,30 +1,37 @@
{
"Provider": "aws",
"CheckID": "route53_public_hosted_zones_cloudwatch_logging_enabled",
"CheckTitle": "Check if Route53 public hosted zones are logging queries to CloudWatch Logs.",
"CheckType": [],
"CheckTitle": "Route53 public hosted zone has query logging enabled to a CloudWatch Logs log group",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices/Runtime Behavior Analysis",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices"
],
"ServiceName": "route53",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "AwsRoute53HostedZone",
"Description": "Check if Route53 public hosted zones are logging queries to CloudWatch Logs.",
"Risk": "If logs are not enabled, monitoring of service use and threat analysis is not possible.",
"RelatedUrl": "https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/monitoring-hosted-zones-with-cloudwatch.html",
"Description": "**Route 53 public hosted zones** have **DNS query logging** enabled to **CloudWatch Logs**, recording resolver requests for the zone and writing events to an associated log group.",
"Risk": "Missing **DNS query logs** removes visibility into domain use, weakening detection of:\n- **Data exfiltration** via DNS\n- **Malware C2/DGA** patterns\n- **Hijacking or misconfigurations**\nThis degrades **incident response**, threatens data **confidentiality** and **integrity**, and slows **availability** troubleshooting.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/monitoring-hosted-zones-with-cloudwatch.html",
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/Route53/enable-query-logging.html"
],
"Remediation": {
"Code": {
"CLI": "aws route53 create-query-logging-config --hosted-zone-id <zone_id> --cloud-watch-logs-log-group-arn <log_group_arn>",
"NativeIaC": "",
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/Route53/enable-query-logging.html",
"Terraform": ""
"CLI": "aws route53 create-query-logging-config --hosted-zone-id <HOSTED_ZONE_ID> --cloud-watch-logs-log-group-arn <LOG_GROUP_ARN>",
"NativeIaC": "```yaml\n# CloudFormation: Enable query logging for a public hosted zone\nResources:\n <example_resource_name>:\n Type: AWS::Route53::HostedZone\n Properties:\n Name: <example_domain_name>\n QueryLoggingConfig:\n CloudWatchLogsLogGroupArn: <example_log_group_arn> # Critical: enables Route53 query logging to this CloudWatch Logs group\n```",
"Other": "1. Open the AWS Console and go to Route 53 > Hosted zones\n2. Select the public hosted zone\n3. Choose Query logging > Enable\n4. Select the target CloudWatch Logs log group and click Save\n5. If prompted, allow Route 53 to write to the log group (approve the CloudWatch Logs resource policy)",
"Terraform": "```hcl\n# Enable Route53 query logging for a public hosted zone\nresource \"aws_route53_query_log\" \"example\" {\n zone_id = \"<example_resource_id>\" # Critical: target hosted zone\n cloudwatch_log_group_arn = \"<example_log_group_arn>\" # Critical: delivers logs to this CloudWatch Logs group\n}\n```"
},
"Recommendation": {
"Text": "Enable CloudWatch logs and define metrics and uses cases for the events recorded.",
"Url": "https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/monitoring-hosted-zones-with-cloudwatch.html"
"Text": "Enable **Route 53 query logging** for public zones to a centralized **CloudWatch Logs** group. Apply **least privilege** to log delivery, set **retention** and **metric filters/alerts**, and stream to your **SIEM**. Use **defense in depth** by correlating DNS logs with network and endpoint telemetry and regularly review baselines.",
"Url": "https://hub.prowler.com/check/route53_public_hosted_zones_cloudwatch_logging_enabled"
}
},
"Categories": [
"forensics-ready"
"logging"
],
"DependsOn": [],
"RelatedTo": [],

View File

@@ -1,26 +1,35 @@
{
"Provider": "aws",
"CheckID": "sqs_queues_not_publicly_accessible",
"CheckTitle": "Check if SQS queues have policy set as Public",
"CheckType": [],
"CheckTitle": "SQS queue policy does not allow public access",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices/Network Reachability",
"Software and Configuration Checks/Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"TTPs/Initial Access/Unauthorized Access",
"Effects/Data Exposure"
],
"ServiceName": "sqs",
"SubServiceName": "",
"ResourceIdTemplate": "arn:aws:sqs:region:account-id:queue",
"ResourceIdTemplate": "",
"Severity": "critical",
"ResourceType": "AwsSqsQueue",
"Description": "Check if SQS queues have policy set as Public",
"Risk": "Sensitive information could be disclosed",
"RelatedUrl": "https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-basic-examples-of-sqs-policies.html",
"Description": "Amazon SQS queue policies are assessed for **public access**. The finding highlights queues with `Allow` statements using a wildcard `Principal` without restrictive conditions, compared to queues that only grant access to the owning account or explicitly trusted principals.",
"Risk": "**Public SQS access** can expose message data (**confidentiality**), enable unauthorized send/receive or tampering (**integrity**), and allow purge/delete operations that disrupt processing (**availability**). It may also trigger unbounded message ingestion, causing cost spikes and consumer overload.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/SQS/sqs-queue-exposed.html",
"https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-basic-examples-of-sqs-policies.html"
],
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/SQS/sqs-queue-exposed.html",
"Terraform": "https://docs.prowler.com/checks/aws/general-policies/ensure-sqs-queue-policy-is-not-public-by-only-allowing-specific-services-or-principals-to-access-it#terraform"
"CLI": "aws sqs set-queue-attributes --queue-url <example_queue_url> --attributes Policy='{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"<example_account_id>\"},\"Action\":\"sqs:*\",\"Resource\":\"<example_queue_arn>\"}]}'",
"NativeIaC": "```yaml\n# CloudFormation: Restrict SQS policy to a specific principal (not public)\nResources:\n QueuePolicy:\n Type: AWS::SQS::QueuePolicy\n Properties:\n Queues:\n - \"<example_queue_url>\"\n PolicyDocument:\n Version: \"2012-10-17\"\n Statement:\n - Effect: Allow\n Principal:\n AWS: \"<example_account_id>\" # CRITICAL: restrict access to a specific account (removes public \"*\")\n Action: \"sqs:*\"\n Resource: \"<example_queue_arn>\"\n```",
"Other": "1. Open the Amazon SQS console and select the queue\n2. Go to Permissions (Access policy) and click Edit\n3. In the JSON policy, replace any \"Principal\": \"*\" with \"Principal\": { \"AWS\": \"<your_account_id>\" } or remove those public statements\n4. Save changes",
"Terraform": "```hcl\n# Restrict SQS policy to a specific principal (not public)\nresource \"aws_sqs_queue_policy\" \"<example_resource_name>\" {\n queue_url = \"<example_queue_url>\"\n policy = jsonencode({\n Version = \"2012-10-17\"\n Statement = [{\n Effect = \"Allow\"\n Principal = { AWS = \"<example_account_id>\" } # CRITICAL: restrict to a specific principal (removes public \"*\")\n Action = \"sqs:*\"\n Resource = \"<example_queue_arn>\"\n }]\n })\n}\n```"
},
"Recommendation": {
"Text": "Review service with overly permissive policies. Adhere to Principle of Least Privilege.",
"Url": "https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-basic-examples-of-sqs-policies.html"
"Text": "Apply **least privilege** on SQS resource policies:\n- Avoid `Principal: *`; grant access only to specific accounts, roles, or services\n- Add restrictive conditions to tightly scope access\n- Prefer private connectivity and defense-in-depth controls\n- Review policies and audit activity regularly to prevent drift",
"Url": "https://hub.prowler.com/check/sqs_queues_not_publicly_accessible"
}
},
"Categories": [

View File

@@ -1,26 +1,35 @@
{
"Provider": "aws",
"CheckID": "sqs_queues_server_side_encryption_enabled",
"CheckTitle": "Check if SQS queues have Server Side Encryption enabled",
"CheckType": [],
"CheckTitle": "SQS queue has server-side encryption enabled",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices",
"Industry and Regulatory Standards/AWS Foundational Security Best Practices",
"Effects/Data Exposure"
],
"ServiceName": "sqs",
"SubServiceName": "",
"ResourceIdTemplate": "arn:aws:sqs:region:account-id:queue",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "AwsSqsQueue",
"Description": "Check if SQS queues have Server Side Encryption enabled",
"Risk": "If not enabled sensitive information in transit is not protected.",
"RelatedUrl": "https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-configure-sse-existing-queue.html",
"Description": "**Amazon SQS queues** are evaluated for **server-side encryption** configured with a **KMS key** (`SSE-KMS`) protecting message bodies at rest.\n\nQueues without an associated KMS key are identified.",
"Risk": "Without **KMS-backed SSE**, message bodies lack tenant-controlled keys and detailed audit. Secrets, tokens, or PII in messages become easier to access through **privilege misuse**, misconfiguration, or unintended integrations, reducing **confidentiality** and limiting containment since you cannot revoke access via key disable/rotation.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-server-side-encryption.html",
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/SQS/queue-encrypted-with-kms-customer-master-keys.html",
"https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-configure-sse-existing-queue.html"
],
"Remediation": {
"Code": {
"CLI": "aws sqs set-queue-attributes --queue-url <QUEUE_URL> --attributes KmsMasterKeyId=<KEY>",
"NativeIaC": "https://docs.prowler.com/checks/aws/general-policies/general_16-encrypt-sqs-queue#cloudformation",
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/SQS/queue-encrypted-with-kms-customer-master-keys.html",
"Terraform": "https://docs.prowler.com/checks/aws/general-policies/general_16-encrypt-sqs-queue#terraform"
"CLI": "aws sqs set-queue-attributes --queue-url <QUEUE_URL> --attributes KmsMasterKeyId=<KMS_KEY_ID_OR_ALIAS>",
"NativeIaC": "```yaml\n# CloudFormation: Enable SSE-KMS for an SQS queue\nResources:\n <example_resource_name>:\n Type: AWS::SQS::Queue\n Properties:\n KmsMasterKeyId: alias/aws/sqs # Critical: sets a KMS key, enabling SSE-KMS so the queue reports a kms_key_id\n```",
"Other": "1. In the AWS Console, go to Amazon SQS > Queues\n2. Select the queue and click Edit\n3. Expand Encryption\n4. Set Server-side encryption to Enabled\n5. For AWS KMS key, select alias/aws/sqs (or choose a specific KMS key)\n6. Click Save",
"Terraform": "```hcl\n# Enable SSE-KMS for an SQS queue\nresource \"aws_sqs_queue\" \"<example_resource_name>\" {\n kms_master_key_id = \"alias/aws/sqs\" # Critical: sets a KMS key, enabling SSE-KMS so the queue reports a kms_key_id\n}\n```"
},
"Recommendation": {
"Text": "Enable Encryption. Use a CMK where possible. It will provide additional management and privacy benefits",
"Url": "https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-configure-sse-existing-queue.html"
"Text": "Enable **SSE-KMS** on all queues using a **customer-managed KMS key**.\n- Apply **least privilege** to key and queue policies; restrict `Encrypt/Decrypt`\n- Enforce key rotation and separation of duties\n- Tune data key reuse for security vs. cost\n- Monitor key and queue access to support **defense in depth**",
"Url": "https://hub.prowler.com/check/sqs_queues_server_side_encryption_enabled"
}
},
"Categories": [

View File

@@ -1,28 +1,33 @@
{
"Provider": "aws",
"CheckID": "stepfunctions_statemachine_logging_enabled",
"CheckTitle": "Step Functions state machines should have logging enabled",
"CheckTitle": "Step Functions state machine has logging enabled",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices"
"Software and Configuration Checks/AWS Security Best Practices/Runtime Behavior Analysis"
],
"ServiceName": "stepfunctions",
"SubServiceName": "",
"ResourceIdTemplate": "arn:aws:states:{region}:{account-id}:stateMachine/{stateMachine-id}",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "AwsStepFunctionStateMachine",
"Description": "This control checks if AWS Step Functions state machines have logging enabled. The control fails if the state machine doesn't have the loggingConfiguration property defined.",
"Risk": "Without logging enabled, important operational data may be lost, making it difficult to troubleshoot issues, monitor performance, and ensure compliance with auditing requirements.",
"RelatedUrl": "https://docs.aws.amazon.com/step-functions/latest/dg/logging.html",
"Description": "**AWS Step Functions state machines** are configured to emit **execution logs** to CloudWatch Logs via a defined `loggingConfiguration` with a `level` set above `OFF`.",
"Risk": "Without **execution logs**, workflow failures and anomalies are **undetectable**, increasing MTTR and risking silent data loss. Missing audit trails weaken **integrity** oversight and complicate **forensics**, enabling misuse of invoked services to go unnoticed and creating **compliance** gaps.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/step-functions/latest/dg/logging.html",
"https://docs.aws.amazon.com/securityhub/latest/userguide/stepfunctions-controls.html#stepfunctions-1",
"https://support.icompaas.com/support/solutions/articles/62000233757-ensure-step-functions-state-machines-should-have-logging-enabled"
],
"Remediation": {
"Code": {
"CLI": "aws stepfunctions update-state-machine --state-machine-arn <state-machine-arn> --logging-configuration file://logging-config.json",
"NativeIaC": "",
"Other": "https://docs.aws.amazon.com/securityhub/latest/userguide/stepfunctions-controls.html#stepfunctions-1",
"Terraform": "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/sfn_state_machine#logging_configuration"
"NativeIaC": "```yaml\nResources:\n <example_resource_name>:\n Type: AWS::StepFunctions::StateMachine\n Properties:\n RoleArn: arn:aws:iam::<account-id>:role/<example_role_name>\n DefinitionString: |\n {\"StartAt\":\"Pass\",\"States\":{\"Pass\":{\"Type\":\"Pass\",\"End\":true}}}\n LoggingConfiguration:\n Destinations:\n - CloudWatchLogsLogGroup:\n LogGroupArn: arn:aws:logs:<region>:<account-id>:log-group:<log-group-name>:* # Critical: target CloudWatch Logs group\n Level: ERROR # Critical: enables logging (not OFF)\n```",
"Other": "1. Open AWS Console > Step Functions > State machines\n2. Select the state machine and click Edit\n3. In Logging, enable logging\n4. Choose an existing CloudWatch Logs log group\n5. Set Level to Error (or All)\n6. Save changes",
"Terraform": "```hcl\nresource \"aws_sfn_state_machine\" \"<example_resource_name>\" {\n name = \"<example_resource_name>\"\n role_arn = \"arn:aws:iam::<account-id>:role/<example_role_name>\"\n definition = jsonencode({ StartAt = \"Pass\", States = { Pass = { Type = \"Pass\", End = true } } })\n\n logging_configuration {\n log_destination = \"arn:aws:logs:<region>:<account-id>:log-group:<log-group-name>:*\" # Critical: CloudWatch Logs destination\n level = \"ERROR\" # Critical: enables logging\n }\n}\n```"
},
"Recommendation": {
"Text": "Configure logging for your Step Functions state machines to ensure that operational data is captured and available for debugging, monitoring, and auditing purposes.",
"Url": "https://docs.aws.amazon.com/step-functions/latest/dg/logging.html"
"Text": "Enable CloudWatch logging on all state machines at an appropriate `level` (e.g., `ERROR` or `ALL`) and send logs to a protected log group. Apply **least privilege** to log write/read, set **retention**, and avoid sensitive data unless required using `includeExecutionData`. Use X-Ray tracing for **defense in depth**.",
"Url": "https://hub.prowler.com/check/stepfunctions_statemachine_logging_enabled"
}
},
"Categories": [

View File

@@ -1,29 +1,39 @@
{
"Provider": "azure",
"CheckID": "mysql_flexible_server_audit_log_connection_activated",
"CheckTitle": "Ensure server parameter 'audit_log_events' has 'CONNECTION' set for MySQL Database Server",
"CheckTitle": "MySQL flexible server has audit_log_events including CONNECTION",
"CheckType": [],
"ServiceName": "mysql",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "Microsoft.DBforMySQL/flexibleServers",
"Description": "Set audit_log_enabled to include CONNECTION on MySQL Servers.",
"Risk": "Enabling CONNECTION helps MySQL Database to log items such as successful and failed connection attempts to the server. Log data can be used to identify, troubleshoot, and repair configuration errors and suboptimal performance.",
"RelatedUrl": "https://docs.microsoft.com/en-us/azure/mysql/single-server/how-to-configure-audit-logs-portal",
"Severity": "high",
"ResourceType": "microsoft.dbformysql/flexibleservers",
"Description": "**Azure Database for MySQL Flexible Server** audit configuration includes the `CONNECTION` event in `audit_log_events`.",
"Risk": "Without **CONNECTION auditing**, login attempts are invisible, weakening detection of **brute-force**, **credential stuffing**, and anomalous access. This enables unnoticed account takeover and lateral movement, impacting **confidentiality** and **integrity**, and hinders **forensics** and timely response.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-logging-threat-detection#lt-3-enable-logging-for-security-investigation",
"https://docs.microsoft.com/en-us/azure/mysql/single-server/how-to-configure-audit-logs-portal",
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/azure/MySQL/configure-audit-log-events-for-mysql-flexible-servers.html",
"https://www.tenable.com/audits/items/CIS_Microsoft_Azure_Foundations_v2.0.0_L2.audit:06ec721d4c0ea9169db2b0c6876c5f38",
"https://orca.security/resources/cloud-risk-encyclopedia/audit_log_events-configuration-does-not-have-connection-set-for-mysql-database-server/",
"https://video2.skills-academy.com/en-us/azure/mysql/flexible-server/scripts/sample-cli-audit-logs"
],
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "https://www.tenable.com/audits/items/CIS_Microsoft_Azure_Foundations_v2.0.0_L2.audit:06ec721d4c0ea9169db2b0c6876c5f38",
"Terraform": ""
"CLI": "az mysql flexible-server parameter set --resource-group <RESOURCE_GROUP> --server-name <SERVER_NAME> --name audit_log_events --value CONNECTION",
"NativeIaC": "```bicep\n// Set MySQL Flexible Server audit_log_events to include CONNECTION\nresource cfg 'Microsoft.DBforMySQL/flexibleServers/configurations@2021-05-01' = {\n name: '<example_resource_name>/audit_log_events'\n properties: {\n value: 'CONNECTION' // Critical: ensures 'CONNECTION' is logged, making the check PASS\n }\n}\n```",
"Other": "1. In the Azure Portal, go to Azure Database for MySQL flexible server\n2. Select your server, then go to Server parameters\n3. Search for audit_log_events\n4. Set its value to CONNECTION\n5. Click Save",
"Terraform": "```hcl\nresource \"azurerm_mysql_flexible_server_configuration\" \"<example_resource_name>\" {\n name = \"audit_log_events\"\n server_id = \"<example_resource_id>\"\n value = \"CONNECTION\" # Critical: includes CONNECTION in audit logs to pass the check\n}\n```"
},
"Recommendation": {
"Text": "1. From Azure Home select the Portal Menu. 2. Select Azure Database for MySQL servers. 3. Select a database. 4. Under Settings, select Server parameters. 5. Update audit_log_enabled parameter to ON. 6. Update audit_log_events parameter to have at least CONNECTION checked. 7. Click Save. 8. Under Monitoring, select Diagnostic settings. 9. Select + Add diagnostic setting. 10. Provide a diagnostic setting name. 11. Under Categories, select MySQL Audit Logs. 12. Specify destination details. 13. Click Save.",
"Url": "https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-logging-threat-detection#lt-3-enable-logging-for-security-investigation"
"Text": "Include `CONNECTION` in `audit_log_events` to capture login activity. Centralize and retain **audit logs**, restrict access by **least privilege**, and protect logs from tampering. Monitor for anomalous sign-in patterns and alert. Pair with **defense-in-depth** controls (MFA, network allow-listing) to reduce exposure.",
"Url": "https://hub.prowler.com/check/mysql_flexible_server_audit_log_connection_activated"
}
},
"Categories": [],
"Categories": [
"logging"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": "There are further costs incurred for storage of logs. For high traffic databases these logs will be significant. Determine your organization's needs before enabling."

View File

@@ -1,29 +1,41 @@
{
"Provider": "azure",
"CheckID": "mysql_flexible_server_audit_log_enabled",
"CheckTitle": "Ensure server parameter 'audit_log_enabled' is set to 'ON' for MySQL Database Server",
"CheckTitle": "Azure Database for MySQL flexible server has 'audit_log_enabled' set to 'ON'",
"CheckType": [],
"ServiceName": "mysql",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "Microsoft.DBforMySQL/flexibleServers",
"Description": "Enable audit_log_enabled on MySQL Servers.",
"Risk": "Enabling audit_log_enabled helps MySQL Database to log items such as connection attempts to the server, DDL/DML access, and more. Log data can be used to identify, troubleshoot, and repair configuration errors and suboptimal performance.",
"RelatedUrl": "https://docs.microsoft.com/en-us/azure/mysql/single-server/how-to-configure-audit-logs-portal",
"ResourceType": "microsoft.dbformysql/flexibleservers",
"Description": "Azure Database for MySQL Flexible Server with `audit_log_enabled` set to `ON` generates **audit logs** for connections, authentication, DDL/DML, and administrative actions.",
"Risk": "Missing **audit logs** reduces **accountability** and obscures activity affecting **confidentiality** and **integrity**. Unauthorized logins, privilege abuse, or suspicious queries may go undetected, impeding **forensics**, slowing incident response, and enabling covert data exfiltration.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://learn.microsoft.com/en-us/azure/mysql/flexible-server/tutorial-configure-audit",
"https://www.tenable.com/audits/items/CIS_Microsoft_Azure_Foundations_v1.5.0_L2.audit:c073639a1ce546b535ba73afbf6542aa",
"https://portal.azure.com.",
"https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-logging-threat-detection#lt-3-enable-logging-for-security-investigation",
"https://www.datasunrise.com/guides/db-audit-trailing-for-ms-azure-mysql/",
"https://docs.microsoft.com/en-us/azure/mysql/single-server/how-to-configure-audit-logs-portal",
"https://learn.microsoft.com/en-us/azure/mysql/flexible-server/how-to-configure-server-parameters-cli",
"https://learn.microsoft.com/en-us/azure/mysql/flexible-server/scripts/sample-cli-audit-logs"
],
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "https://www.tenable.com/audits/items/CIS_Microsoft_Azure_Foundations_v1.5.0_L2.audit:c073639a1ce546b535ba73afbf6542aa",
"Terraform": ""
"CLI": "az mysql flexible-server parameter set --name audit_log_enabled --resource-group <RESOURCE_GROUP> --server-name <SERVER_NAME> --value ON",
"NativeIaC": "```bicep\n// Enable audit logs on an existing MySQL Flexible Server\nresource server 'Microsoft.DBforMySQL/flexibleServers@2021-12-01' existing = {\n name: '<example_resource_name>'\n}\n\nresource audit 'Microsoft.DBforMySQL/flexibleServers/configurations@2021-12-01' = {\n name: 'audit_log_enabled'\n parent: server\n properties: {\n value: 'ON' // CRITICAL: turns audit_log_enabled ON to pass the check\n }\n}\n```",
"Other": "1. Sign in to the Azure portal\n2. Go to: Azure Database for MySQL flexible server > Your server\n3. Under Settings, select Server parameters\n4. Find audit_log_enabled and set it to ON\n5. Click Save",
"Terraform": "```hcl\n# Enable audit logs on MySQL Flexible Server\nresource \"azurerm_mysql_flexible_server_configuration\" \"<example_resource_name>\" {\n name = \"audit_log_enabled\"\n resource_group_name = \"<example_resource_name>\"\n server_name = \"<example_resource_name>\"\n value = \"ON\" # CRITICAL: enables audit logging to pass the check\n}\n```"
},
"Recommendation": {
"Text": "1. Login to Azure Portal using https://portal.azure.com. 2. Select Azure Database for MySQL Servers. 3. Select a database. 4. Under Settings, select Server parameters. 5. Update audit_log_enabled parameter to ON 6. Under Monitoring, select Diagnostic settings. 7. Select + Add diagnostic setting. 8. Provide a diagnostic setting name. 9. Under Categories, select MySQL Audit Logs. 10. Specify destination details. 11. Click Save.",
"Url": "https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-logging-threat-detection#lt-3-enable-logging-for-security-investigation"
"Text": "Enable **audit logging** (`audit_log_enabled=ON`) and select events that matter. Export `MySqlAuditLogs` to a centralized store, enforce **least privilege** on log access, set retention, and create alerts for anomalies. Regularly review logs as part of **defense in depth**.",
"Url": "https://hub.prowler.com/check/mysql_flexible_server_audit_log_enabled"
}
},
"Categories": [],
"Categories": [
"logging"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""

View File

@@ -1,29 +1,38 @@
{
"Provider": "azure",
"CheckID": "mysql_flexible_server_minimum_tls_version_12",
"CheckTitle": "Ensure 'TLS Version' is set to 'TLSV1.2' for MySQL flexible Database Server",
"CheckTitle": "MySQL flexible server enforces TLS 1.2 or higher",
"CheckType": [],
"ServiceName": "mysql",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "high",
"ResourceType": "Microsoft.DBforMySQL/flexibleServers",
"Description": "Ensure TLS version on MySQL flexible servers is set to the default value.",
"Risk": "TLS connectivity helps to provide a new layer of security by connecting database server to client applications using Transport Layer Security (TLS). Enforcing TLS connections between database server and client applications helps protect against 'man in the middle' attacks by encrypting the data stream between the server and application.",
"RelatedUrl": "https://docs.microsoft.com/en-us/azure/mysql/concepts-ssl-connection-security",
"Severity": "medium",
"ResourceType": "microsoft.dbformysql/flexibleservers",
"Description": "**Azure Database for MySQL Flexible Server** uses the `tls_version` setting to permit only **modern TLS** for client connections, requiring `TLSv1.2+` and excluding `TLSv1.0` and `TLSv1.1`.",
"Risk": "Allowing legacy TLS (`TLSv1.0`/`TLSv1.1`) weakens **confidentiality** and **integrity** of data in transit. Attackers can force downgrades and perform **man-in-the-middle** interception, exposing credentials and queries or altering results, leading to unauthorized access and data exfiltration.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://portal.azure.com",
"https://www.trendmicro.com/cloudoneconformity/knowledge-base/azure/MySQL/mysql-flexible-server-tls-version.html",
"https://docs.microsoft.com/en-us/azure/mysql/howto-configure-ssl",
"https://docs.microsoft.com/en-us/azure/mysql/concepts-ssl-connection-security",
"https://docs.prowler.com/checks/azure/azure-general-policies/ensure-mysql-is-using-the-latest-version-of-tls-encryption#terraform"
],
"Remediation": {
"Code": {
"CLI": "az mysql flexible-server parameter set --name tls_version --resource-group <resourceGroupName> --server-name <serverName> --value TLSV1.2",
"NativeIaC": "",
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/azure/MySQL/mysql-flexible-server-tls-version.html",
"Terraform": "https://docs.prowler.com/checks/azure/azure-general-policies/ensure-mysql-is-using-the-latest-version-of-tls-encryption#terraform"
"CLI": "az mysql flexible-server parameter set --resource-group <resourceGroupName> --server-name <serverName> --name tls_version --value TLSv1.2",
"NativeIaC": "```bicep\n// Set MySQL Flexible Server to enforce TLS 1.2\nresource tlsVersion 'Microsoft.DBforMySQL/flexibleServers/configurations@2022-01-01' = {\n name: '<example_resource_name>/tls_version'\n properties: {\n value: 'TLSv1.2' // Critical: enforces minimum TLS 1.2 and rejects TLS 1.0/1.1\n }\n}\n```",
"Other": "1. In Azure portal, go to Azure Database for MySQL flexible server <example_resource_name>\n2. Select Server parameters\n3. Search for tls_version\n4. Set the value to TLSv1.2\n5. Click Save",
"Terraform": "```hcl\n# Enforce TLS 1.2 on MySQL Flexible Server\nresource \"azurerm_mysql_flexible_server_configuration\" \"tls\" {\n name = \"tls_version\"\n server_id = \"<example_resource_id>\"\n value = \"TLSv1.2\" # Critical: sets minimum TLS to 1.2 (no 1.0/1.1)\n}\n```"
},
"Recommendation": {
"Text": "1. Login to Azure Portal using https://portal.azure.com 2. Go to Azure Database for MySQL flexible servers 3. For each database, click on Server parameters under Settings 4. In the search box, type in tls_version 5. Click on the VALUE dropdown, and ensure only TLSV1.2 is selected for tls_version",
"Url": "https://docs.microsoft.com/en-us/azure/mysql/howto-configure-ssl"
"Text": "Enforce a **minimum TLS** of `TLSv1.2` (prefer `TLSv1.3`) and disable `TLSv1.0`/`TLSv1.1`. Ensure clients and drivers support modern TLS, deprecate weak cipher suites, and validate in staging. Apply **defense in depth** with private connectivity and restricted network access.",
"Url": "https://hub.prowler.com/check/mysql_flexible_server_minimum_tls_version_12"
}
},
"Categories": [],
"Categories": [
"encryption"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""

View File

@@ -1,29 +1,43 @@
{
"Provider": "azure",
"CheckID": "mysql_flexible_server_ssl_connection_enabled",
"CheckTitle": "Ensure 'Enforce SSL connection' is set to 'Enabled' for Standard MySQL Database Server",
"CheckTitle": "MySQL Flexible Server enforces SSL connections",
"CheckType": [],
"ServiceName": "mysql",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "high",
"ResourceType": "Microsoft.DBforMySQL/flexibleServers",
"Description": "Enable SSL connection on MYSQL Servers.",
"Risk": "SSL connectivity helps to provide a new layer of security by connecting database server to client applications using Secure Sockets Layer (SSL). Enforcing SSL connections between database server and client applications helps protect against 'man in the middle' attacks by encrypting the data stream between the server and application.",
"RelatedUrl": "https://learn.microsoft.com/en-us/azure/mysql/single-server/concepts-ssl-connection-security",
"ResourceType": "microsoft.dbformysql/flexibleservers",
"Description": "**Azure Database for MySQL Flexible Server** uses the `require_secure_transport` parameter to enforce **encrypted connections**. This evaluation determines whether the server is configured to require **TLS/SSL** for all client sessions.",
"Risk": "Without **TLS enforcement**, credentials and queries may traverse the network in cleartext, enabling **man-in-the-middle**, **credential theft**, tampering, and data exfiltration. This directly impacts **confidentiality** and **integrity** and can lead to compliance violations.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://portal.azure.com",
"https://forum.ghost.org/t/database-connection-tls-issue-with-azure-database-for-mysql-flexible-server/45340",
"https://learn.microsoft.com/en-us/azure/mysql/single-server/concepts-ssl-connection-security",
"https://learn.microsoft.com/en-us/azure/mysql/flexible-server/concepts-networking",
"https://docs.microsoft.com/en-us/azure/mysql/single-server/how-to-configure-ssl",
"https://stackoverflow.com/questions/tagged/azure-mysql-database",
"https://learn.microsoft.com/en-us/azure/mysql/flexible-server/how-to-troubleshoot-common-connection-issues",
"https://www.tenable.com/policies/",
"https://stackoverflow.com/questions/44035588/why-does-connection-to-my-mysql-server-in-azure-fail-if-my-app-does-not-have-ssl",
"https://learn.microsoft.com/en-us/azure/mysql/flexible-server/how-to-connect-tls-ssl"
],
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "https://www.tenable.com/policies/[type]/AC_AZURE_0131",
"Terraform": ""
"CLI": "az mysql flexible-server parameter set --resource-group <RESOURCE_GROUP> --server-name <SERVER_NAME> --name require_secure_transport --value ON",
"NativeIaC": "```bicep\n// Enforce SSL/TLS by enabling require_secure_transport on MySQL Flexible Server\nresource reqSecureTransport 'Microsoft.DBforMySQL/flexibleServers/configurations@2023-12-01' = {\n name: '<example_resource_name>/require_secure_transport'\n properties: {\n value: 'ON' // Critical: turns on SSL enforcement (require_secure_transport)\n }\n}\n```",
"Other": "1. Sign in to the Azure portal\n2. Go to: Azure Database for MySQL Flexible Server > <your server>\n3. Select Server parameters\n4. Find require_secure_transport and set it to ON\n5. Click Save\n6. Verify by refreshing Server parameters and confirming the value is ON",
"Terraform": "```hcl\n# Enforce SSL/TLS on MySQL Flexible Server\nresource \"azurerm_mysql_flexible_server_configuration\" \"secure\" {\n name = \"require_secure_transport\"\n server_id = \"<example_resource_id>\"\n value = \"ON\" # Critical: enables SSL enforcement\n}\n```"
},
"Recommendation": {
"Text": "1. Login to Azure Portal using https://portal.azure.com 2. Go to Azure Database for MySQL servers 3. For each database, click on Connection security 4. In SSL settings, click on ENABLED to Enforce SSL connections",
"Url": "https://docs.microsoft.com/en-us/azure/mysql/single-server/how-to-configure-ssl"
"Text": "Set `require_secure_transport=ON` and permit only **TLS 1.2+**. Ensure clients validate certificates and use FQDNs. Combine with **private access** (Private Link or VNet), restrictive firewall rules, and **least privilege** to reduce exposure. *Avoid legacy TLS or plaintext connections.*",
"Url": "https://hub.prowler.com/check/mysql_flexible_server_ssl_connection_enabled"
}
},
"Categories": [],
"Categories": [
"encryption"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""

View File

@@ -90,7 +90,7 @@ maintainers = [{name = "Prowler Engineering", email = "engineering@prowler.com"}
name = "prowler"
readme = "README.md"
requires-python = ">3.9.1,<3.13"
version = "5.16.0"
version = "5.17.0"
[project.scripts]
prowler = "prowler.__main__:prowler"