Compare commits

..

1 Commits

Author SHA1 Message Date
dependabot[bot] 4b21170d6d chore(deps): bump docker/build-push-action from 7.0.0 to 7.1.0
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 7.0.0 to 7.1.0.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/d08e5c354a6adb9ed34480a06d141179aa583294...bcafcacb16a39f128d818304e6c9c0c18556b85f)

---
updated-dependencies:
- dependency-name: docker/build-push-action
  dependency-version: 7.1.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-05-05 22:11:17 +00:00
584 changed files with 2213 additions and 16610 deletions
+1 -1
View File
@@ -145,7 +145,7 @@ SENTRY_RELEASE=local
NEXT_PUBLIC_SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT}
#### Prowler release version ####
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.26.2
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.26.0
# Social login credentials
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
-15
View File
@@ -1,15 +0,0 @@
# These are supported funding model platforms
github: [prowler-cloud]
# patreon: # Replace with a single Patreon username
# open_collective: # Replace with a single Open Collective username
# ko_fi: # Replace with a single Ko-fi username
# tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
# community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
# liberapay: # Replace with a single Liberapay username
# issuehunt: # Replace with a single IssueHunt username
# lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
# polar: # Replace with a single Polar username
# buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
# thanks_dev: # Replace with a single thanks.dev username
# custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
+10
View File
@@ -5,10 +5,20 @@ on:
branches:
- 'master'
- 'v5.*'
paths:
- 'api/**'
- '.github/workflows/api-tests.yml'
- '.github/workflows/api-code-quality.yml'
- '.github/actions/setup-python-poetry/**'
pull_request:
branches:
- 'master'
- 'v5.*'
paths:
- 'api/**'
- '.github/workflows/api-tests.yml'
- '.github/workflows/api-code-quality.yml'
- '.github/actions/setup-python-poetry/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@@ -150,7 +150,7 @@ jobs:
- name: Build and push API container for ${{ matrix.arch }}
id: container-push
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
context: ${{ env.WORKING_DIRECTORY }}
push: true
+1 -1
View File
@@ -118,7 +118,7 @@ jobs:
- name: Build container
if: steps.check-changes.outputs.any_changed == 'true'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
context: ${{ env.API_WORKING_DIR }}
push: false
+8
View File
@@ -5,10 +5,18 @@ on:
branches:
- 'master'
- 'v5.*'
paths:
- 'api/**'
- '.github/workflows/api-tests.yml'
- '.github/actions/setup-python-poetry/**'
pull_request:
branches:
- 'master'
- 'v5.*'
paths:
- 'api/**'
- '.github/workflows/api-tests.yml'
- '.github/actions/setup-python-poetry/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@@ -136,7 +136,7 @@ jobs:
- name: Build and push MCP container for ${{ matrix.arch }}
id: container-push
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
context: ${{ env.WORKING_DIRECTORY }}
push: true
+1 -1
View File
@@ -111,7 +111,7 @@ jobs:
- name: Build MCP container
if: steps.check-changes.outputs.any_changed == 'true'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
context: ${{ env.MCP_WORKING_DIR }}
push: false
@@ -61,7 +61,7 @@ jobs:
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
- name: Build ${{ matrix.component }} container (linux/arm64)
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
context: ${{ matrix.context }}
file: ${{ matrix.dockerfile }}
+1 -2
View File
@@ -37,8 +37,7 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 1
# zizmor: ignore[artipacked]
persist-credentials: true # Required by tj-actions/changed-files to fetch PR branch
persist-credentials: false
- name: Fetch PR base ref for tj-actions/changed-files
env:
+16
View File
@@ -5,10 +5,26 @@ on:
branches:
- 'master'
- 'v5.*'
paths:
- 'prowler/**'
- 'tests/**'
- 'pyproject.toml'
- 'poetry.lock'
- '.github/workflows/sdk-tests.yml'
- '.github/workflows/sdk-code-quality.yml'
- '.github/actions/setup-python-poetry/**'
pull_request:
branches:
- 'master'
- 'v5.*'
paths:
- 'prowler/**'
- 'tests/**'
- 'pyproject.toml'
- 'poetry.lock'
- '.github/workflows/sdk-tests.yml'
- '.github/workflows/sdk-code-quality.yml'
- '.github/actions/setup-python-poetry/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@@ -197,7 +197,7 @@ jobs:
- name: Build and push SDK container for ${{ matrix.arch }}
id: container-push
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
context: .
file: ${{ env.DOCKERFILE_PATH }}
+1 -1
View File
@@ -137,7 +137,7 @@ jobs:
- name: Build SDK container
if: steps.check-changes.outputs.any_changed == 'true'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
context: .
push: false
+14
View File
@@ -5,10 +5,24 @@ on:
branches:
- 'master'
- 'v5.*'
paths:
- 'prowler/**'
- 'tests/**'
- 'pyproject.toml'
- 'poetry.lock'
- '.github/workflows/sdk-tests.yml'
- '.github/actions/setup-python-poetry/**'
pull_request:
branches:
- 'master'
- 'v5.*'
paths:
- 'prowler/**'
- 'tests/**'
- 'pyproject.toml'
- 'poetry.lock'
- '.github/workflows/sdk-tests.yml'
- '.github/actions/setup-python-poetry/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
@@ -140,7 +140,7 @@ jobs:
- name: Build and push UI container for ${{ matrix.arch }}
id: container-push
if: github.event_name == 'push' || github.event_name == 'release' || github.event_name == 'workflow_dispatch'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
context: ${{ env.WORKING_DIRECTORY }}
build-args: |
+1 -1
View File
@@ -113,7 +113,7 @@ jobs:
- name: Build UI container
if: steps.check-changes.outputs.any_changed == 'true'
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 # v7.0.0
uses: docker/build-push-action@bcafcacb16a39f128d818304e6c9c0c18556b85f # v7.1.0
with:
context: ${{ env.UI_WORKING_DIR }}
target: prod
+6
View File
@@ -5,10 +5,16 @@ on:
branches:
- 'master'
- 'v5.*'
paths:
- 'ui/**'
- '.github/workflows/ui-tests.yml'
pull_request:
branches:
- 'master'
- 'v5.*'
paths:
- 'ui/**'
- '.github/workflows/ui-tests.yml'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
+19 -4
View File
@@ -6,7 +6,7 @@
# P40 — security scanners
# P50 — dependency validation
default_install_hook_types: [pre-commit]
default_install_hook_types: [pre-commit, pre-push]
repos:
## GENERAL (prek built-in — no external repo needed)
@@ -62,7 +62,12 @@ repos:
- id: autoflake
name: "SDK - autoflake"
files: { glob: ["{prowler,tests,dashboard,util,scripts}/**/*.py"] }
args: ["--in-place", "--remove-all-unused-imports", "--remove-unused-variable"]
args:
[
"--in-place",
"--remove-all-unused-imports",
"--remove-unused-variable",
]
priority: 20
- repo: https://github.com/pycqa/isort
@@ -174,7 +179,8 @@ repos:
language: system
types: [python]
files: '.*\.py'
exclude: { glob: ["{contrib,skills}/**", "**/.venv/**", "**/*_test.py"] }
exclude:
{ glob: ["{contrib,skills}/**", "**/.venv/**", "**/*_test.py"] }
priority: 40
- id: safety
@@ -184,7 +190,16 @@ repos:
entry: safety check --policy-file .safety-policy.yml
language: system
pass_filenames: false
files: { glob: ["**/pyproject.toml", "**/poetry.lock", "**/requirements*.txt", ".safety-policy.yml"] }
files:
{
glob:
[
"**/pyproject.toml",
"**/poetry.lock",
"**/requirements*.txt",
".safety-policy.yml",
],
}
priority: 40
- id: vulture
+1 -18
View File
@@ -2,28 +2,11 @@
All notable changes to the **Prowler API** are documented in this file.
## [1.27.2] (Prowler UNRELEASED)
### 🐞 Fixed
- Attack Paths: BEDROCK-001 and BEDROCK-002 now target roles trusting `bedrock-agentcore.amazonaws.com` instead of `bedrock.amazonaws.com`, eliminating false positives against regular Bedrock service roles (Agents, Knowledge Bases, model invocation) [(#11141)](https://github.com/prowler-cloud/prowler/pull/11141)
---
## [1.27.1] (Prowler v5.26.1)
### 🐞 Fixed
- `POST /api/v1/scans` was intermittently failing with `Scan matching query does not exist` in the `scan-perform` worker; the Celery task is now published via `transaction.on_commit` so the worker cannot read the Scan before the dispatch-wide transaction commits [(#11122)](https://github.com/prowler-cloud/prowler/pull/11122)
---
## [1.27.0] (Prowler v5.26.0)
## [1.27.0] (Prowler UNRELEASED)
### 🚀 Added
- `scan-reset-ephemeral-resources` post-scan task zeroes `failed_findings_count` for resources missing from the latest full-scope scan, keeping ephemeral resources from polluting the Resources page sort [(#10929)](https://github.com/prowler-cloud/prowler/pull/10929)
- ASD Essential Eight (AWS) compliance framework support [(#10982)](https://github.com/prowler-cloud/prowler/pull/10982)
### 🔐 Security
+4 -4
View File
@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.3.4 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand.
[[package]]
name = "about-time"
@@ -6754,8 +6754,8 @@ uuid6 = "2024.7.10"
[package.source]
type = "git"
url = "https://github.com/prowler-cloud/prowler.git"
reference = "v5.26"
resolved_reference = "02cdcb29dbcd8eb5ed442c1cd03830000324fb0f"
reference = "eb1b4190ab2d9c265b46c9ede0298b81bdcf35a8"
resolved_reference = "eb1b4190ab2d9c265b46c9ede0298b81bdcf35a8"
[[package]]
name = "psutil"
@@ -9424,4 +9424,4 @@ files = [
[metadata]
lock-version = "2.1"
python-versions = ">=3.11,<3.13"
content-hash = "24f7a92f6c72a8207ab15f75c813a5a244c018afb0a582a5abf8c96e2c7faf12"
content-hash = "df8a20081fe91c40d071e508dbe19590c8b7ffb5dcc61e71cf30ed016bad5a34"
+3 -3
View File
@@ -25,7 +25,7 @@ dependencies = [
"defusedxml==0.7.1",
"gunicorn==23.0.0",
"lxml==5.3.2",
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.26",
"prowler @ git+https://github.com/prowler-cloud/prowler.git@eb1b4190ab2d9c265b46c9ede0298b81bdcf35a8",
"psycopg2-binary==2.9.9",
"pytest-celery[redis] (==1.3.0)",
"sentry-sdk[django] (==2.56.0)",
@@ -50,7 +50,7 @@ name = "prowler-api"
package-mode = false
# Needed for the SDK compatibility
requires-python = ">=3.11,<3.13"
version = "1.27.2"
version = "1.27.0"
[project.scripts]
celery = "src.backend.config.settings.celery"
@@ -63,7 +63,6 @@ docker = "7.1.0"
filelock = "3.20.3"
freezegun = "1.5.1"
mypy = "1.10.1"
prek = "0.3.9"
pylint = "3.2.5"
pytest = "9.0.3"
pytest-cov = "5.0.0"
@@ -75,3 +74,4 @@ ruff = "0.5.0"
safety = "3.7.0"
tqdm = "4.67.1"
vulture = "2.14"
prek = "0.3.9"
@@ -484,8 +484,8 @@ AWS_BEDROCK_PRIVESC_PASSROLE_CODE_INTERPRETER = AttackPathsQueryDefinition(
OR action = '*'
)
// Find roles that trust the Bedrock AgentCore service (can be passed to a code interpreter)
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock-agentcore.amazonaws.com'}})
// Find roles that trust Bedrock service (can be passed to Bedrock)
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock.amazonaws.com'}})
WHERE any(resource IN stmt_passrole.resource WHERE
resource = '*'
OR target_role.arn CONTAINS resource
@@ -536,8 +536,8 @@ AWS_BEDROCK_PRIVESC_INVOKE_CODE_INTERPRETER = AttackPathsQueryDefinition(
OR action = '*'
)
// Find roles that trust the Bedrock AgentCore service (already attached to existing code interpreters)
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock-agentcore.amazonaws.com'}})
// Find roles that trust Bedrock service (already attached to existing code interpreters)
MATCH path_target = (aws)--(target_role:AWSRole)-[:TRUSTS_AWS_PRINCIPAL]->(:AWSPrincipal {{arn: 'bedrock.amazonaws.com'}})
WITH collect(path_principal) + collect(path_target) AS paths
UNWIND paths AS p
+1 -1
View File
@@ -1,7 +1,7 @@
openapi: 3.0.3
info:
title: Prowler API
version: 1.27.2
version: 1.27.0
description: |-
Prowler API specification.
+19 -38
View File
@@ -4,7 +4,6 @@ import json
import logging
import os
import time
import uuid
from collections import defaultdict
from copy import deepcopy
from datetime import datetime, timedelta, timezone
@@ -17,7 +16,7 @@ from allauth.socialaccount.providers.github.views import GitHubOAuth2Adapter
from allauth.socialaccount.providers.google.views import GoogleOAuth2Adapter
from allauth.socialaccount.providers.saml.views import FinishACSView, LoginView
from botocore.exceptions import ClientError, NoCredentialsError, ParamValidationError
from celery import chain, states
from celery import chain
from celery.result import AsyncResult
from config.custom_logging import BackendLogger
from config.env import env
@@ -61,7 +60,6 @@ from django.utils.dateparse import parse_date
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_control
from django_celery_beat.models import PeriodicTask
from django_celery_results.models import TaskResult
from drf_spectacular.settings import spectacular_settings
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import (
@@ -424,7 +422,7 @@ class SchemaView(SpectacularAPIView):
def get(self, request, *args, **kwargs):
spectacular_settings.TITLE = "Prowler API"
spectacular_settings.VERSION = "1.27.2"
spectacular_settings.VERSION = "1.27.0"
spectacular_settings.DESCRIPTION = (
"Prowler API specification.\n\nThis file is auto-generated."
)
@@ -2536,45 +2534,28 @@ class ScanViewSet(BaseRLSViewSet):
def create(self, request, *args, **kwargs):
input_serializer = self.get_serializer(data=request.data)
input_serializer.is_valid(raise_exception=True)
# Broker publish is deferred to on_commit so the worker cannot read
# Scan before BaseRLSViewSet's dispatch-wide atomic commits.
pre_task_id = str(uuid.uuid4())
with transaction.atomic():
scan = input_serializer.save()
scan.task_id = pre_task_id
scan.save(update_fields=["task_id"])
attack_paths_db_utils.create_attack_paths_scan(
tenant_id=self.request.tenant_id,
scan_id=str(scan.id),
provider_id=str(scan.provider_id),
with transaction.atomic():
task = perform_scan_task.apply_async(
kwargs={
"tenant_id": self.request.tenant_id,
"scan_id": str(scan.id),
"provider_id": str(scan.provider_id),
# Disabled for now
# checks_to_execute=scan.scanner_args.get("checks_to_execute")
},
)
task_result, _ = TaskResult.objects.get_or_create(
task_id=pre_task_id,
defaults={"status": states.PENDING, "task_name": "scan-perform"},
)
prowler_task, _ = Task.objects.update_or_create(
id=pre_task_id,
tenant_id=self.request.tenant_id,
defaults={"task_runner_task": task_result},
)
attack_paths_db_utils.create_attack_paths_scan(
tenant_id=self.request.tenant_id,
scan_id=str(scan.id),
provider_id=str(scan.provider_id),
)
scan_kwargs = {
"tenant_id": self.request.tenant_id,
"scan_id": str(scan.id),
"provider_id": str(scan.provider_id),
# Disabled for now
# checks_to_execute=scan.scanner_args.get("checks_to_execute")
}
transaction.on_commit(
lambda: perform_scan_task.apply_async(
kwargs=scan_kwargs, task_id=pre_task_id
)
)
prowler_task = Task.objects.get(id=task.id)
scan.task_id = task.id
scan.save(update_fields=["task_id"])
self.response_serializer_class = TaskSerializer
output_serializer = self.get_serializer(prowler_task)
-4
View File
@@ -47,9 +47,6 @@ from prowler.lib.outputs.compliance.csa.csa_oraclecloud import OracleCloudCSA
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
from prowler.lib.outputs.compliance.ens.ens_azure import AzureENS
from prowler.lib.outputs.compliance.ens.ens_gcp import GCPENS
from prowler.lib.outputs.compliance.asd_essential_eight.asd_essential_eight_aws import (
ASDEssentialEightAWS,
)
from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001
from prowler.lib.outputs.compliance.iso27001.iso27001_azure import AzureISO27001
from prowler.lib.outputs.compliance.iso27001.iso27001_gcp import GCPISO27001
@@ -103,7 +100,6 @@ COMPLIANCE_CLASS_MAP = {
(lambda name: name.startswith("ccc_"), CCC_AWS),
(lambda name: name.startswith("c5_"), AWSC5),
(lambda name: name.startswith("csa_"), AWSCSA),
(lambda name: name == "asd_essential_eight_aws", ASDEssentialEightAWS),
],
"azure": [
(lambda name: name.startswith("cis_"), AzureCIS),
-16
View File
@@ -134,22 +134,6 @@ prek installed at `.git/hooks/pre-commit`
If pre-commit hooks were previously installed, run `prek install --overwrite` to replace the existing hook. Otherwise, both tools will run on each commit.
</Warning>
#### Enable TruffleHog as a Pre-Push Hook
By default, only `pre-commit` hooks are installed. To enable [`TruffleHog`](https://github.com/trufflesecurity/trufflehog) secret scanning on every push, install the `pre-push` hook type explicitly:
```shell
prek install --hook-type pre-push
```
Successful installation should produce the following output:
```shell
prek installed at `.git/hooks/pre-push`
```
Once installed, TruffleHog runs before each push and blocks the operation when verified secrets are detected.
### Code Quality and Security Checks
Before merging pull requests, several automated checks and utilities ensure code security and updated dependencies:
-1
View File
@@ -119,7 +119,6 @@
"user-guide/tutorials/prowler-app-multi-tenant",
"user-guide/tutorials/prowler-app-api-keys",
"user-guide/tutorials/prowler-app-import-findings",
"user-guide/tutorials/prowler-app-alerts",
{
"group": "Mutelist",
"expanded": true,
@@ -121,8 +121,8 @@ To update the environment file:
Edit the `.env` file and change version values:
```env
PROWLER_UI_VERSION="5.25.3"
PROWLER_API_VERSION="5.25.3"
PROWLER_UI_VERSION="5.25.2"
PROWLER_API_VERSION="5.25.2"
```
<Note>
Binary file not shown.

Before

Width:  |  Height:  |  Size: 257 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 399 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 425 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 88 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 222 KiB

+8 -13
View File
@@ -1,17 +1,12 @@
export const VersionBadge = ({ version }) => {
return (
<a
href={`https://github.com/prowler-cloud/prowler/releases/tag/${version}`}
target="_blank"
rel="noopener noreferrer"
className="version-badge-link"
>
<span className="version-badge-container">
<span className="version-badge">
<span className="version-badge-label">Added in:</span>&nbsp;
<span className="version-badge-version">{version}</span>
</span>
</span>
</a>
<code className="version-badge-container">
<p className="version-badge">
<span className="version-badge-label">Added in:</span>&nbsp;
<code className="version-badge-version">{version}</code>
</p>
</code>
);
};
-17
View File
@@ -1,21 +1,4 @@
/* Version Badge Styling */
.version-badge-link,
.version-badge-link:hover,
.version-badge-link:focus,
.version-badge-link:active,
.version-badge-link:visited {
display: inline-block;
text-decoration: none !important;
background-image: none !important;
border-bottom: none !important;
color: inherit;
transition: opacity 0.15s ease-in-out;
}
.version-badge-link:hover {
opacity: 0.85;
}
.version-badge-container {
display: inline-block;
margin: 0 0 1rem 0;
@@ -1,146 +0,0 @@
---
title: 'Alerts'
description: 'Create email alerts from Prowler Cloud findings to monitor relevant security changes after scans or in daily digests.'
---
import { VersionBadge } from "/snippets/version-badge.mdx"
<VersionBadge version="5.26.0" />
Alerts notify recipients by email when security findings match saved filter conditions. Use Alerts to track high-priority findings, monitor specific providers or services, and keep teams informed about scan results that match defined criteria.
<Note>
This feature is available exclusively in **Prowler Cloud** with a paid subscription.
</Note>
## Prerequisites
Before creating Alerts, ensure that:
* At least one scan has completed and produced findings.
* The user role includes the `manage_alerts` permission.
The `manage_alerts` permission is required to create, edit, test, enable, disable, and delete Alerts. See [RBAC Administrative Permissions](/user-guide/tutorials/prowler-app-rbac#rbac-administrative-permissions) for details.
## How Alerts Work
Alerts are created from Findings filters. When an Alert runs, Prowler Cloud evaluates the saved conditions against findings and sends an email digest when matching findings exist.
<Note>
Alerts evaluate findings with status `FAIL` only. Findings with status `PASS` or `MANUAL`, and muted findings, never trigger an Alert regardless of the saved filters.
</Note>
Alerts run on one of three schedules:
| Frequency | Description |
|-----------|-------------|
| After each scan | Evaluates the Alert after each completed scan. |
| Daily digest | Evaluates the Alert once per day and sends a digest when findings match. |
| After each scan and daily | Evaluates the Alert after every scan and in the daily digest. |
## Creating an Alert From Findings
To create an Alert:
1. Navigate to **Findings** in Prowler Cloud.
2. Apply at least one [Alert-compatible filter](#alert-compatible-filters) to define the findings that should trigger the Alert.
3. Click **Create Alert**.
![Create Alert From Findings](/images/prowler-app/alerts/create-alert-from-findings.png)
4. Configure the Alert settings:
* **Name:** Add a short, descriptive name.
* **Description:** Add optional context for the Alert.
* **Frequency:** Select when Prowler Cloud should evaluate the Alert.
* **Recipients:** Select the recipients who should receive the email digest.
![Create Alert Modal](/images/prowler-app/alerts/create-alert-modal.png)
5. Click **Create**.
After the Alert is created, Prowler Cloud evaluates it based on the selected frequency.
## Alert-Compatible Filters
An **Alert-compatible filter** is a Findings-page filter that the Alert condition language can evaluate when the Alert runs. The Findings page exposes many filters, but only a specific subset can be saved into an Alert. Filters outside this subset, such as **Status**, free-text search, sort, or pagination, are ignored when seeding an Alert from the current Findings view.
When **Create Alert** is clicked on the Findings page, Prowler Cloud takes the active filters, keeps only the Alert-compatible ones, and uses them to build the Alert condition.
The following filters are Alert-compatible:
* Provider type
* Provider
* Severity
* Delta (new findings since the previous scan)
* Region
* Service
* Resource type
* Category
* Resource group
If only the **Status** filter is applied on the Findings page, Prowler Cloud substitutes all severities as the condition base so the Alert can still be created. Status itself never becomes part of the Alert condition.
## Managing Alerts
Navigate to **Alerts** to review and manage existing Alerts.
![Alerts List](/images/prowler-app/alerts/alerts-list.png)
Each Alert provides these actions:
| Action | Description |
|--------|-------------|
| Edit | Update name, description, recipients, frequency, or filters. |
| Enable/Disable | Start or stop Alert evaluation without deleting the Alert. |
| Delete | Permanently remove the Alert. |
## Testing Alert Filters
When editing an Alert, click **Test** to preview whether the current filters match existing findings.
The test result indicates whether the filters match findings and includes a summary of the matching results.
![Edit Alert Test Result](/images/prowler-app/alerts/edit-alert-test.png)
<Warning>
**The Test result is a snapshot, not a guarantee of future Alert triggers.**
The Test evaluates the current filters against existing findings at the moment **Test** is clicked. It does not predict whether the Alert will trigger on its next evaluation. The Alert trigger depends on the state at evaluation time:
* **After each scan:** The Alert is evaluated against the findings produced by that scan only. If the next scan produces no findings that match the filters, the Alert will not trigger, even if a Test run earlier in the day showed matches.
* **Daily digest:** The Alert is evaluated against the findings present on the digest day. If no matching findings exist for that day, the Alert will not trigger, even if previous days had matches.
The reverse is also true: a Test showing no matches does not guarantee the Alert will stay silent. Future scans may produce matching findings.
Use **Test** to validate that the filters are well-formed and target the intended findings, not to forecast future Alert behavior.
</Warning>
## Recipients
Alert recipients are selected from the email addresses available in the tenant. Recipients receive an email digest each time an Alert evaluates and matches findings.
<Note>
By default, the **organization owner** receives a **daily digest** for **critical findings**. Adjust the recipient, frequency, or filters in the Alert configuration to change this behavior.
</Note>
If a recipient unsubscribes from Alerts, that address stops receiving digests until it is reconfirmed.
## Email Notifications
When an Alert matches findings, Prowler Cloud sends a security alert email that summarizes the matching findings. The email includes:
* The scan name and evaluation time.
* The total number of matching findings.
* The number of Alert rules that triggered.
* A preview of the affected findings, grouped by severity, with resource details and the originating rule.
* A direct link to view all matching findings in Prowler Cloud.
![Alert Email Example](/images/prowler-app/alerts/alert-email-example.png)
## Best Practices
* **Start with focused filters:** Create Alerts for specific high-priority scopes, such as critical findings, production providers, or important services.
* **Use clear names:** Choose names that explain the intent of the Alert.
* **Review recipients regularly:** Keep recipient lists aligned with current ownership.
* **Test before saving edits:** Use **Test** after changing filters to confirm that the Alert matches the expected findings.
* **Disable instead of deleting during tuning:** Disable Alerts temporarily when adjusting filters or recipients.
+3 -3
View File
@@ -1009,7 +1009,7 @@ wheels = [
[[package]]
name = "requests"
version = "2.33.1"
version = "2.32.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
@@ -1017,9 +1017,9 @@ dependencies = [
{ name = "idna" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5f/a4/98b9c7c6428a668bf7e42ebb7c79d576a1c3c1e3ae2d47e674b468388871/requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517", size = 134120, upload-time = "2026-03-30T16:09:15.531Z" }
sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d7/8e/7540e8a2036f79a125c1d2ebadf69ed7901608859186c856fa0388ef4197/requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a", size = 64947, upload-time = "2026-03-30T16:09:13.83Z" },
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
]
[[package]]
+3 -31
View File
@@ -2,33 +2,15 @@
All notable changes to the **Prowler SDK** are documented in this file.
## [5.26.2] (Prowler UNRELEASED)
### 🐞 Fixed
- `entra_users_mfa_capable` and `entra_break_glass_account_fido2_security_key_registered` report a preventive FAIL per affected user (with the missing permission named) when the M365 service principal lacks `AuditLog.Read.All`, instead of mass false positives [(#10907)](https://github.com/prowler-cloud/prowler/pull/10907)
---
## [5.26.1] (Prowler v5.26.1)
### 🐞 Fixed
- `entra_users_mfa_capable` no longer flags disabled guest users by requesting `accountEnabled` and `userType` from Microsoft Graph via `$select` and using Graph as the source of truth for `account_enabled` (EXO `Get-User` does not return guest users) [(#11002)](https://github.com/prowler-cloud/prowler/pull/11002)
---
## [5.26.0] (Prowler v5.26.0)
## [5.26.0] (Prowler UNRELEASED)
### 🚀 Added
- `bedrock_guardrails_configured` check for AWS provider [(#10844)](https://github.com/prowler-cloud/prowler/pull/10844)
- Universal compliance with OCSF support [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301)
- Universal compliance pipeline integrated into the CLI: `--list-compliance` and `--list-compliance-requirements` show universal frameworks, and CSV plus OCSF outputs are generated for any framework declaring a `TableConfig` [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301)
- ASD Essential Eight Maturity Model compliance framework for AWS (Maturity Level One, Nov 2023) [(#10808)](https://github.com/prowler-cloud/prowler/pull/10808)
- Vercel checks to return personalized finding status extended depending on billing plan and classify them with billing-plan categories [(#10663)](https://github.com/prowler-cloud/prowler/pull/10663)
- Update Vercel checks to return personalized finding status extended depending on billing plan and classify them with billing-plan categories [(#10663)](https://github.com/prowler-cloud/prowler/pull/10663)
- `bedrock_prompt_management_exists` check for AWS provider [(#10878)](https://github.com/prowler-cloud/prowler/pull/10878)
- 8 Gmail attachment safety and spoofing protection checks for Google Workspace provider using the Cloud Identity Policy API [(#10980)](https://github.com/prowler-cloud/prowler/pull/10980)
- `bedrock_prompt_encrypted_with_cmk` check for AWS provider [(#10905)](https://github.com/prowler-cloud/prowler/pull/10905)
### 🔄 Changed
@@ -37,14 +19,12 @@ All notable changes to the **Prowler SDK** are documented in this file.
- AWS CodeBuild service now batches `BatchGetProjects` and `BatchGetBuilds` calls per region (up to 100 items per call) to reduce API call volume and prevent throttling-induced false positives in `codebuild_project_not_publicly_accessible` [(#10639)](https://github.com/prowler-cloud/prowler/pull/10639)
- `display_compliance_table` dispatch switched from substring `in` checks to `startswith` to prevent false matches between similarly named frameworks (e.g. `cisa` vs `cis`) [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301)
- Restore the `ec2-imdsv1` category for EC2 IMDS checks to keep Attack Surface and findings filters aligned [(#10998)](https://github.com/prowler-cloud/prowler/pull/10998)
- Container image CVE findings and IaC findings now use official CVE, Prowler Hub, or GitHub Security Advisory URLs instead of Aqua advisory URLs in remediation and references; Trivy rule IDs map to Prowler Hub without the `AVD-` prefix so links resolve [(#10853)](https://github.com/prowler-cloud/prowler/pull/10853)
### 🐞 Fixed
- AWS SDK test isolation: autouse `mock_aws` fixture and leak detector in `conftest.py` to prevent tests from hitting real AWS endpoints, with idempotent organization setup for tests calling `set_mocked_aws_provider` multiple times [(#10605)](https://github.com/prowler-cloud/prowler/pull/10605)
- AWS `boto` user agent extra is now applied to every client [(#10944)](https://github.com/prowler-cloud/prowler/pull/10944)
- Image provider connection check no longer fails with a misleading `host='https'` resolution error when the registry URL includes an `http://` or `https://` scheme prefix [(#10950)](https://github.com/prowler-cloud/prowler/pull/10950)
- Azure subscriptions sharing the same display name are no longer collapsed into a single identity entry, so every subscription is scanned [(#10718)](https://github.com/prowler-cloud/prowler/pull/10718)
### 🔐 Security
@@ -53,14 +33,6 @@ All notable changes to the **Prowler SDK** are documented in this file.
---
## [5.25.3] (Prowler v5.25.3)
### 🐞 Fixed
- Oracle Cloud identity scans known or supplied regions to better support non Ashburn tenancies [(#10529)](https://github.com/prowler-cloud/prowler/pull/10529)
---
## [5.25.2] (Prowler v5.25.2)
### 🐞 Fixed
+7 -7
View File
@@ -57,9 +57,6 @@ from prowler.lib.check.models import CheckMetadata
from prowler.lib.cli.parser import ProwlerArgumentParser
from prowler.lib.logger import logger, set_logging_config
from prowler.lib.outputs.asff.asff import ASFF
from prowler.lib.outputs.compliance.asd_essential_eight.asd_essential_eight_aws import (
ASDEssentialEightAWS,
)
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
AWSWellArchitected,
)
@@ -93,6 +90,9 @@ from prowler.lib.outputs.compliance.csa.csa_oraclecloud import OracleCloudCSA
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
from prowler.lib.outputs.compliance.ens.ens_azure import AzureENS
from prowler.lib.outputs.compliance.ens.ens_gcp import GCPENS
from prowler.lib.outputs.compliance.essential_eight.essential_eight_aws import (
EssentialEightAWS,
)
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001
from prowler.lib.outputs.compliance.iso27001.iso27001_azure import AzureISO27001
@@ -676,18 +676,18 @@ def prowler():
)
generated_outputs["compliance"].append(cis)
cis.batch_write_data_to_file()
elif compliance_name.startswith("asd_essential_eight"):
elif compliance_name.startswith("essential_eight"):
filename = (
f"{output_options.output_directory}/compliance/"
f"{output_options.output_filename}_{compliance_name}.csv"
)
asd_essential_eight = ASDEssentialEightAWS(
essential_eight = EssentialEightAWS(
findings=finding_outputs,
compliance=bulk_compliance_frameworks[compliance_name],
file_path=filename,
)
generated_outputs["compliance"].append(asd_essential_eight)
asd_essential_eight.batch_write_data_to_file()
generated_outputs["compliance"].append(essential_eight)
essential_eight.batch_write_data_to_file()
elif compliance_name == "mitre_attack_aws":
# Generate MITRE ATT&CK Finding Object
filename = (
-2
View File
@@ -6473,7 +6473,6 @@
"backup_recovery_point_encrypted",
"backup_vaults_encrypted",
"bedrock_model_invocation_logs_encryption_enabled",
"bedrock_prompt_encrypted_with_cmk",
"cloudfront_distributions_field_level_encryption_enabled",
"cloudfront_distributions_origin_traffic_encrypted",
"cloudtrail_kms_encryption_enabled",
@@ -6731,7 +6730,6 @@
"backup_recovery_point_encrypted",
"backup_vaults_encrypted",
"bedrock_model_invocation_logs_encryption_enabled",
"bedrock_prompt_encrypted_with_cmk",
"cloudfront_distributions_field_level_encryption_enabled",
"cloudfront_distributions_origin_traffic_encrypted",
"cloudtrail_kms_encryption_enabled",
@@ -1311,7 +1311,6 @@
"glue_development_endpoints_job_bookmark_encryption_enabled",
"glue_ml_transform_encrypted_at_rest",
"bedrock_model_invocation_logs_encryption_enabled",
"bedrock_prompt_encrypted_with_cmk",
"codebuild_project_s3_logs_encrypted",
"codebuild_report_group_export_encrypted"
]
@@ -1,5 +1,5 @@
{
"Framework": "ASD-Essential-Eight",
"Framework": "Essential-Eight",
"Name": "ASD Essential Eight Maturity Model - Maturity Level One (AWS)",
"Version": "Nov 2023",
"Provider": "AWS",
@@ -1767,7 +1767,6 @@
"backup_recovery_point_encrypted",
"backup_vaults_encrypted",
"bedrock_model_invocation_logs_encryption_enabled",
"bedrock_prompt_encrypted_with_cmk",
"cloudfront_distributions_field_level_encryption_enabled",
"cloudfront_distributions_origin_traffic_encrypted",
"cloudtrail_kms_encryption_enabled",
@@ -2115,7 +2115,6 @@
"Checks": [
"backup_vaults_encrypted",
"bedrock_model_invocation_logs_encryption_enabled",
"bedrock_prompt_encrypted_with_cmk",
"cloudtrail_kms_encryption_enabled",
"cloudwatch_log_group_kms_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
@@ -2117,7 +2117,6 @@
"Checks": [
"backup_vaults_encrypted",
"bedrock_model_invocation_logs_encryption_enabled",
"bedrock_prompt_encrypted_with_cmk",
"cloudtrail_kms_encryption_enabled",
"cloudwatch_log_group_kms_encryption_enabled",
"dynamodb_tables_kms_cmk_encryption_enabled",
@@ -903,7 +903,6 @@
"Checks": [
"backup_vaults_encrypted",
"backup_recovery_point_encrypted",
"bedrock_prompt_encrypted_with_cmk",
"cloudtrail_kms_encryption_enabled",
"cloudwatch_log_group_kms_encryption_enabled",
"s3_bucket_kms_encryption",
@@ -653,9 +653,7 @@
{
"Id": "3.1.3.4.1.1",
"Description": "Ensure protection against encrypted attachments from untrusted senders is enabled",
"Checks": [
"gmail_encrypted_attachment_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "3 Apps",
@@ -676,9 +674,7 @@
{
"Id": "3.1.3.4.1.2",
"Description": "Ensure protection against attachments with scripts from untrusted senders is enabled",
"Checks": [
"gmail_script_attachment_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "3 Apps",
@@ -699,9 +695,7 @@
{
"Id": "3.1.3.4.1.3",
"Description": "Ensure protection against anomalous attachment types in emails is enabled",
"Checks": [
"gmail_anomalous_attachment_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "3 Apps",
@@ -791,9 +785,7 @@
{
"Id": "3.1.3.4.3.1",
"Description": "Ensure protection against domain spoofing based on similar domain names is enabled",
"Checks": [
"gmail_domain_spoofing_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "3 Apps",
@@ -814,9 +806,7 @@
{
"Id": "3.1.3.4.3.2",
"Description": "Ensure protection against spoofing of employee names is enabled",
"Checks": [
"gmail_employee_name_spoofing_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "3 Apps",
@@ -837,9 +827,7 @@
{
"Id": "3.1.3.4.3.3",
"Description": "Ensure protection against inbound emails spoofing your domain is enabled",
"Checks": [
"gmail_inbound_domain_spoofing_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "3 Apps",
@@ -860,9 +848,7 @@
{
"Id": "3.1.3.4.3.4",
"Description": "Ensure protection against any unauthenticated emails is enabled",
"Checks": [
"gmail_unauthenticated_email_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "3 Apps",
@@ -883,9 +869,7 @@
{
"Id": "3.1.3.4.3.5",
"Description": "Ensure groups are protected from inbound emails spoofing your domain",
"Checks": [
"gmail_groups_spoofing_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "3 Apps",
@@ -649,9 +649,7 @@
{
"Id": "GWS.GMAIL.5.1",
"Description": "Protect against encrypted attachments from untrusted senders SHALL be enabled",
"Checks": [
"gmail_encrypted_attachment_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "Gmail",
@@ -664,9 +662,7 @@
{
"Id": "GWS.GMAIL.5.2",
"Description": "Protect against attachments with scripts from untrusted senders SHALL be enabled",
"Checks": [
"gmail_script_attachment_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "Gmail",
@@ -679,9 +675,7 @@
{
"Id": "GWS.GMAIL.5.3",
"Description": "Protect against anomalous attachment types in emails SHALL be enabled",
"Checks": [
"gmail_anomalous_attachment_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "Gmail",
@@ -804,9 +798,7 @@
{
"Id": "GWS.GMAIL.7.1",
"Description": "Protect against domain spoofing based on similar domain names SHALL be enabled",
"Checks": [
"gmail_domain_spoofing_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "Gmail",
@@ -819,9 +811,7 @@
{
"Id": "GWS.GMAIL.7.2",
"Description": "Protect against spoofing of employee names SHALL be enabled",
"Checks": [
"gmail_employee_name_spoofing_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "Gmail",
@@ -834,9 +824,7 @@
{
"Id": "GWS.GMAIL.7.3",
"Description": "Protect against inbound emails spoofing your domain SHALL be enabled",
"Checks": [
"gmail_inbound_domain_spoofing_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "Gmail",
@@ -849,9 +837,7 @@
{
"Id": "GWS.GMAIL.7.4",
"Description": "Protect against any unauthenticated emails SHALL be enabled",
"Checks": [
"gmail_unauthenticated_email_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "Gmail",
@@ -864,9 +850,7 @@
{
"Id": "GWS.GMAIL.7.5",
"Description": "Protect your Groups from inbound emails spoofing your domain SHALL be enabled",
"Checks": [
"gmail_groups_spoofing_protection_enabled"
],
"Checks": [],
"Attributes": [
{
"Section": "Gmail",
+1 -1
View File
@@ -48,7 +48,7 @@ class _MutableTimestamp:
timestamp = _MutableTimestamp(datetime.today())
timestamp_utc = _MutableTimestamp(datetime.now(timezone.utc))
prowler_version = "5.26.2"
prowler_version = "5.26.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://raw.githubusercontent.com/prowler-cloud/prowler/dc7d2d5aeb92fdf12e8604f42ef6472cd3e8e889/docs/img/prowler-logo-black.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
+2 -5
View File
@@ -749,11 +749,8 @@ def execute(
if global_provider.type == "cloudflare":
is_finding_muted_args["account_id"] = finding.account_id
if global_provider.type == "azure":
is_finding_muted_args["subscription_id"] = finding.subscription
is_finding_muted_args["subscription_name"] = (
global_provider.identity.subscriptions.get(
finding.subscription, finding.subscription
)
is_finding_muted_args["subscription_id"] = (
global_provider.identity.subscriptions.get(finding.subscription)
)
is_finding_muted_args["finding"] = finding
finding.muted = global_provider.mutelist.is_finding_muted(
+8 -8
View File
@@ -102,7 +102,7 @@ class CIS_Requirement_Attribute(BaseModel):
References: str
class ASDEssentialEight_Requirement_Attribute_MaturityLevel(str, Enum):
class EssentialEight_Requirement_Attribute_MaturityLevel(str, Enum):
"""ASD Essential Eight Maturity Level"""
ML1 = "ML1"
@@ -110,14 +110,14 @@ class ASDEssentialEight_Requirement_Attribute_MaturityLevel(str, Enum):
ML3 = "ML3"
class ASDEssentialEight_Requirement_Attribute_AssessmentStatus(str, Enum):
class EssentialEight_Requirement_Attribute_AssessmentStatus(str, Enum):
"""Essential Eight Requirement Attribute Assessment Status"""
Manual = "Manual"
Automated = "Automated"
class ASDEssentialEight_Requirement_Attribute_CloudApplicability(str, Enum):
class EssentialEight_Requirement_Attribute_CloudApplicability(str, Enum):
"""How well the ASD control maps to AWS cloud infrastructure."""
Full = "full"
@@ -127,13 +127,13 @@ class ASDEssentialEight_Requirement_Attribute_CloudApplicability(str, Enum):
# Essential Eight Requirement Attribute
class ASDEssentialEight_Requirement_Attribute(BaseModel):
class EssentialEight_Requirement_Attribute(BaseModel):
"""ASD Essential Eight Requirement Attribute"""
Section: str
MaturityLevel: ASDEssentialEight_Requirement_Attribute_MaturityLevel
AssessmentStatus: ASDEssentialEight_Requirement_Attribute_AssessmentStatus
CloudApplicability: ASDEssentialEight_Requirement_Attribute_CloudApplicability
MaturityLevel: EssentialEight_Requirement_Attribute_MaturityLevel
AssessmentStatus: EssentialEight_Requirement_Attribute_AssessmentStatus
CloudApplicability: EssentialEight_Requirement_Attribute_CloudApplicability
MitigatedThreats: list[str]
Description: str
RationaleStatement: str
@@ -292,7 +292,7 @@ class Compliance_Requirement(BaseModel):
Name: Optional[str] = None
Attributes: list[
Union[
ASDEssentialEight_Requirement_Attribute,
EssentialEight_Requirement_Attribute,
CIS_Requirement_Attribute,
ENS_Requirement_Attribute,
ISO27001_2013_Requirement_Attribute,
+5 -5
View File
@@ -1,9 +1,6 @@
import sys
from prowler.lib.logger import logger
from prowler.lib.outputs.compliance.asd_essential_eight.asd_essential_eight import (
get_asd_essential_eight_table,
)
from prowler.lib.outputs.compliance.c5.c5 import get_c5_table
from prowler.lib.outputs.compliance.ccc.ccc import get_ccc_table
from prowler.lib.outputs.compliance.cis.cis import get_cis_table
@@ -12,6 +9,9 @@ from prowler.lib.outputs.compliance.compliance_check import ( # noqa: F401 - re
)
from prowler.lib.outputs.compliance.csa.csa import get_csa_table
from prowler.lib.outputs.compliance.ens.ens import get_ens_table
from prowler.lib.outputs.compliance.essential_eight.essential_eight import (
get_essential_eight_table,
)
from prowler.lib.outputs.compliance.generic.generic_table import (
get_generic_compliance_table,
)
@@ -233,8 +233,8 @@ def display_compliance_table(
output_directory,
compliance_overview,
)
elif "asd_essential_eight" in compliance_framework:
get_asd_essential_eight_table(
elif "essential_eight" in compliance_framework:
get_essential_eight_table(
findings,
bulk_checks_metadata,
compliance_framework,
@@ -4,7 +4,7 @@ from tabulate import tabulate
from prowler.config.config import orange_color
def get_asd_essential_eight_table(
def get_essential_eight_table(
findings: list,
bulk_checks_metadata: dict,
compliance_framework: str,
@@ -13,7 +13,7 @@ def get_asd_essential_eight_table(
compliance_overview: bool,
):
sections = {}
asd_essential_eight_compliance_table = {
essential_eight_compliance_table = {
"Provider": [],
"Section": [],
"Status": [],
@@ -26,7 +26,7 @@ def get_asd_essential_eight_table(
check = bulk_checks_metadata[finding.check_metadata.CheckID]
check_compliances = check.Compliance
for compliance in check_compliances:
if compliance.Framework == "ASD-Essential-Eight":
if compliance.Framework == "Essential-Eight":
for requirement in compliance.Requirements:
for attribute in requirement.Attributes:
section = attribute.Section
@@ -50,19 +50,19 @@ def get_asd_essential_eight_table(
sections = dict(sorted(sections.items()))
for section in sections:
asd_essential_eight_compliance_table["Provider"].append(compliance.Provider)
asd_essential_eight_compliance_table["Section"].append(section)
essential_eight_compliance_table["Provider"].append(compliance.Provider)
essential_eight_compliance_table["Section"].append(section)
if sections[section]["FAIL"] > 0:
asd_essential_eight_compliance_table["Status"].append(
essential_eight_compliance_table["Status"].append(
f"{Fore.RED}FAIL({sections[section]['FAIL']}){Style.RESET_ALL}"
)
elif sections[section]["PASS"] > 0:
asd_essential_eight_compliance_table["Status"].append(
essential_eight_compliance_table["Status"].append(
f"{Fore.GREEN}PASS({sections[section]['PASS']}){Style.RESET_ALL}"
)
else:
asd_essential_eight_compliance_table["Status"].append("-")
asd_essential_eight_compliance_table["Muted"].append(
essential_eight_compliance_table["Status"].append("-")
essential_eight_compliance_table["Muted"].append(
f"{orange_color}{sections[section]['Muted']}{Style.RESET_ALL}"
)
if len(fail_count) + len(pass_count) + len(muted_count) > 1:
@@ -84,7 +84,7 @@ def get_asd_essential_eight_table(
)
print(
tabulate(
asd_essential_eight_compliance_table,
essential_eight_compliance_table,
headers="keys",
tablefmt="rounded_grid",
)
@@ -1,13 +1,13 @@
from prowler.config.config import timestamp
from prowler.lib.check.compliance_models import Compliance
from prowler.lib.outputs.compliance.asd_essential_eight.models import (
ASDEssentialEightAWSModel,
)
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
from prowler.lib.outputs.compliance.essential_eight.models import (
EssentialEightAWSModel,
)
from prowler.lib.outputs.finding import Finding
class ASDEssentialEightAWS(ComplianceOutput):
class EssentialEightAWS(ComplianceOutput):
"""
This class represents the AWS ASD Essential Eight compliance output.
@@ -41,7 +41,7 @@ class ASDEssentialEightAWS(ComplianceOutput):
for requirement in compliance.Requirements:
if requirement.Id in finding_requirements:
for attribute in requirement.Attributes:
compliance_row = ASDEssentialEightAWSModel(
compliance_row = EssentialEightAWSModel(
Provider=finding.provider,
Description=compliance.Description,
AccountId=finding.account_uid,
@@ -77,7 +77,7 @@ class ASDEssentialEightAWS(ComplianceOutput):
for requirement in compliance.Requirements:
if not requirement.Checks:
for attribute in requirement.Attributes:
compliance_row = ASDEssentialEightAWSModel(
compliance_row = EssentialEightAWSModel(
Provider=compliance.Provider.lower(),
Description=compliance.Description,
AccountId="",
@@ -1,9 +1,9 @@
from pydantic.v1 import BaseModel
class ASDEssentialEightAWSModel(BaseModel):
class EssentialEightAWSModel(BaseModel):
"""
ASDEssentialEightAWSModel generates a finding's output in AWS ASD Essential Eight Compliance format.
EssentialEightAWSModel generates a finding's output in AWS ASD Essential Eight Compliance format.
"""
Provider: str
+2 -4
View File
@@ -187,11 +187,9 @@ class Finding(BaseModel):
output_data["account_uid"] = (
output_data["account_organization_uid"]
if "Tenant:" in check_output.subscription
else check_output.subscription
)
output_data["account_name"] = provider.identity.subscriptions.get(
check_output.subscription, check_output.subscription
else provider.identity.subscriptions[check_output.subscription]
)
output_data["account_name"] = check_output.subscription
output_data["resource_name"] = check_output.resource_name
output_data["resource_uid"] = check_output.resource_id
output_data["region"] = check_output.location
+2 -5
View File
@@ -492,11 +492,8 @@ class HTML(Output):
"""
try:
printed_subscriptions = []
for (
subscription_id,
display_name,
) in provider.identity.subscriptions.items():
intermediate = f"{display_name} : {subscription_id}"
for key, value in provider.identity.subscriptions.items():
intermediate = f"{key} : {value}"
printed_subscriptions.append(intermediate)
# check if identity is str(coming from SP) or dict(coming from browser or)
+2 -5
View File
@@ -82,11 +82,8 @@ class Slack:
logo = gcp_logo
elif provider.type == "azure":
printed_subscriptions = []
for (
subscription_id,
display_name,
) in provider.identity.subscriptions.items():
intermediate = f"- *{subscription_id}: {display_name}*\n"
for key, value in provider.identity.subscriptions.items():
intermediate = f"- *{key}: {value}*\n"
printed_subscriptions.append(intermediate)
identity = f"Azure Subscriptions:\n{''.join(printed_subscriptions)}"
logo = azure_logo
+2 -6
View File
@@ -185,13 +185,9 @@ def display_summary_table(
print(
f"\n{entity_type} {Fore.YELLOW}{audited_entities}{Style.RESET_ALL} Scan Results (severity columns are for fails only):"
)
if provider.type == "azure":
scanned_subscriptions = ", ".join(
f"{display_name} ({subscription_id})"
for subscription_id, display_name in provider.identity.subscriptions.items()
)
if provider == "azure":
print(
f"\nSubscriptions scanned: {Fore.YELLOW}{scanned_subscriptions}{Style.RESET_ALL}"
f"\nSubscriptions scanned: {Fore.YELLOW}{' '.join(provider.identity.subscriptions.keys())}{Style.RESET_ALL}"
)
print(tabulate(findings_table, headers="keys", tablefmt="rounded_grid"))
print(
@@ -1,90 +0,0 @@
import re
from urllib.parse import parse_qs, urlparse
AQUA_REFERENCE_HOST = "avd.aquasec.com"
GITHUB_ADVISORY_URL = "https://github.com/advisories/{advisory_id}"
PROWLER_HUB_CHECK_URL = "https://hub.prowler.com/check/{check_id}"
_CVE_ID_PATTERN = re.compile(r"^CVE-\d{4}-\d+$", re.IGNORECASE)
_GHSA_ID_PATTERN = re.compile(r"^GHSA(?:-[a-z0-9]{4}){3}$", re.IGNORECASE)
def _dedupe_preserve_order(urls: list[str]) -> list[str]:
seen: set[str] = set()
ordered_urls: list[str] = []
for url in urls:
if not url or not url.strip():
continue
normalized_url = url.strip()
if normalized_url in seen:
continue
seen.add(normalized_url)
ordered_urls.append(normalized_url)
return ordered_urls
def _is_aqua_reference(url: str) -> bool:
return AQUA_REFERENCE_HOST in urlparse(url).netloc.lower()
def _build_cve_org_url(vulnerability_id: str) -> str:
return f"https://www.cve.org/CVERecord?id={vulnerability_id.upper()}"
def build_finding_reference_url(finding_id: str) -> str:
"""Map a Trivy finding ID to a stable, real reference URL.
- CVE-XXXX-NNNN cve.org record
- GHSA- github.com/advisories
- everything else hub.prowler.com/check/<id>, stripping a leading
"AVD-" prefix because Prowler Hub indexes Trivy rules by the
non-prefixed ID (e.g., "AWS-0001" not "AVD-AWS-0001").
"""
normalized = finding_id.strip().upper()
if _CVE_ID_PATTERN.match(normalized):
return _build_cve_org_url(normalized)
if _GHSA_ID_PATTERN.match(normalized):
return GITHUB_ADVISORY_URL.format(advisory_id=normalized)
hub_id = normalized[4:] if normalized.startswith("AVD-") else normalized
return PROWLER_HUB_CHECK_URL.format(check_id=hub_id)
def _is_cve_org_url(url: str, vulnerability_id: str) -> bool:
parsed_url = urlparse(url)
if parsed_url.netloc.lower() != "www.cve.org":
return False
query_value = parse_qs(parsed_url.query).get("id", [""])[0]
return query_value.upper() == vulnerability_id.upper()
def resolve_vulnerability_reference_urls(
vulnerability_id: str,
references: list[str] | None = None,
primary_url: str = "",
) -> tuple[str, list[str]]:
"""Resolve non-Aqua vulnerability URLs, prioritizing official CVE destinations."""
candidate_urls = list(references or [])
if primary_url and primary_url not in candidate_urls:
candidate_urls.append(primary_url)
filtered_urls = _dedupe_preserve_order(
[url for url in candidate_urls if not _is_aqua_reference(url)]
)
if not _CVE_ID_PATTERN.match(vulnerability_id):
return "", filtered_urls
cve_org_urls = [
url for url in filtered_urls if _is_cve_org_url(url, vulnerability_id)
]
recommendation_url = (
cve_org_urls[0] if cve_org_urls else _build_cve_org_url(vulnerability_id)
)
return recommendation_url, [recommendation_url]
@@ -1,43 +0,0 @@
{
"Provider": "aws",
"CheckID": "bedrock_prompt_encrypted_with_cmk",
"CheckTitle": "Amazon Bedrock prompt is encrypted at rest with a customer-managed KMS key",
"CheckType": [
"Software and Configuration Checks/AWS Security Best Practices"
],
"ServiceName": "bedrock",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "medium",
"ResourceType": "Other",
"ResourceGroup": "ai_ml",
"Description": "Bedrock prompts should be encrypted at rest with a **customer-managed KMS key (CMK)** rather than the AWS-owned default key. Prompts can contain sensitive instructions, business logic, and references to downstream tooling that warrant tenant-controlled key material and auditable access via AWS KMS.",
"Risk": "A prompt encrypted only with the AWS-owned default key offers limited tenant control over key access and lifecycle: no customer KMS key policy to govern decrypt permissions, no control over rotation cadence or scheduled deletion, and gaps against frameworks (ISO 27001 A.8.24, NIST CSF PR.DS, KISA-ISMS-P 2.7.2) that require customer-managed keys for sensitive data at rest.",
"RelatedUrl": "",
"AdditionalURLs": [
"https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-management.html",
"https://docs.aws.amazon.com/bedrock/latest/APIReference/API_agent_CreatePrompt.html",
"https://docs.aws.amazon.com/bedrock/latest/APIReference/API_agent_UpdatePrompt.html"
],
"Remediation": {
"Code": {
"CLI": "# Retrieve the current DRAFT prompt first and note the existing fields you want to preserve, such as description, defaultVariant, and variants:\naws bedrock-agent get-prompt --prompt-identifier <prompt_id> --prompt-version DRAFT --output json\n# Then update the prompt and include the existing fields you want to keep alongside the CMK change:\naws bedrock-agent update-prompt --prompt-identifier <prompt_id> --name <prompt_name> --description <current_or_new_description> --default-variant <current_default_variant> --variants <current_or_updated_variants_json> --customer-encryption-key-arn <kms_key_arn>",
"NativeIaC": "",
"Other": "1. Open the Amazon Bedrock console\n2. Navigate to Prompt management\n3. Select the prompt\n4. Edit the prompt and choose a customer-managed KMS key for encryption\n5. Save the prompt",
"Terraform": ""
},
"Recommendation": {
"Text": "Encrypt every Bedrock prompt with a **customer-managed KMS key** to retain control over key access, rotation, and lifecycle. When using `update-prompt`, first retrieve the current draft and carry forward the fields you want to preserve, such as the existing description, `defaultVariant`, and `variants`, so the encryption change does not unintentionally overwrite prompt configuration.",
"Url": "https://hub.prowler.com/check/bedrock_prompt_encrypted_with_cmk"
}
},
"Categories": [
"gen-ai",
"encryption"
],
"DependsOn": [],
"RelatedTo": [
"bedrock_prompt_management_exists"
],
"Notes": ""
}
@@ -1,32 +0,0 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.bedrock.bedrock_agent_client import (
bedrock_agent_client,
)
class bedrock_prompt_encrypted_with_cmk(Check):
"""Ensure that Bedrock prompts are encrypted with a customer-managed KMS key.
This check evaluates whether each Bedrock prompt is encrypted at rest using
a customer-managed KMS key (CMK) rather than the AWS-owned default key.
- PASS: The Bedrock prompt is encrypted with a customer-managed KMS key.
- FAIL: The Bedrock prompt is not encrypted with a customer-managed KMS key.
"""
def execute(self) -> list[Check_Report_AWS]:
"""Execute the Bedrock prompt CMK encryption check.
Returns:
A list of reports containing the result of the check.
"""
findings = []
for prompt in bedrock_agent_client.prompts.values():
report = Check_Report_AWS(metadata=self.metadata(), resource=prompt)
if prompt.customer_encryption_key_arn:
report.status = "PASS"
report.status_extended = f"Bedrock Prompt {prompt.name} is encrypted with a customer-managed KMS key."
else:
report.status = "FAIL"
report.status_extended = f"Bedrock Prompt {prompt.name} is not encrypted with a customer-managed KMS key."
findings.append(report)
return findings
@@ -34,8 +34,6 @@
"gen-ai"
],
"DependsOn": [],
"RelatedTo": [
"bedrock_prompt_encrypted_with_cmk"
],
"RelatedTo": [],
"Notes": "Results are generated per scanned region. Regions where `ListPrompts` cannot be queried are omitted from the findings."
}
@@ -136,10 +136,7 @@ class Guardrail(BaseModel):
class BedrockAgent(AWSService):
"""Bedrock Agent service class for managing agents and prompts."""
def __init__(self, provider):
"""Initialize the BedrockAgent service."""
# Call AWSService's __init__
super().__init__("bedrock-agent", provider)
self.agents = {}
@@ -147,7 +144,6 @@ class BedrockAgent(AWSService):
self.prompt_scanned_regions: set = set()
self.__threading_call__(self._list_agents)
self.__threading_call__(self._list_prompts)
self.__threading_call__(self._get_prompt, self.prompts.values())
self.__threading_call__(self._list_tags_for_resource, self.agents.values())
def _list_agents(self, regional_client):
@@ -175,43 +171,29 @@ class BedrockAgent(AWSService):
)
def _list_prompts(self, regional_client):
"""List all prompts in a region."""
"""List all prompts in a region.
Prompt Management is evaluated as a region-level adoption signal, so
prompt collection is intentionally not filtered by audit_resources.
"""
logger.info("Bedrock Agent - Listing Prompts...")
try:
paginator = regional_client.get_paginator("list_prompts")
for page in paginator.paginate():
for prompt in page.get("promptSummaries", []):
prompt_arn = prompt.get("arn", "")
if not self.audit_resources or (
is_resource_filtered(prompt_arn, self.audit_resources)
):
self.prompts[prompt_arn] = Prompt(
id=prompt.get("id", ""),
name=prompt.get("name", ""),
arn=prompt_arn,
region=regional_client.region,
)
self.prompts[prompt_arn] = Prompt(
id=prompt.get("id", ""),
name=prompt.get("name", ""),
arn=prompt_arn,
region=regional_client.region,
)
self.prompt_scanned_regions.add(regional_client.region)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _get_prompt(self, prompt):
"""Get detailed prompt information including encryption configuration."""
logger.info("Bedrock Agent - Getting Prompt...")
try:
prompt_info = self.regional_clients[prompt.region].get_prompt(
promptIdentifier=prompt.id
)
prompt.customer_encryption_key_arn = prompt_info.get(
"customerEncryptionKeyArn"
)
except Exception as error:
logger.error(
f"{prompt.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def _list_tags_for_resource(self, resource):
"""List tags for a Bedrock Agent resource."""
logger.info("Bedrock Agent - Listing Tags for Resource...")
@@ -230,8 +212,6 @@ class BedrockAgent(AWSService):
class Agent(BaseModel):
"""Model for a Bedrock Agent resource."""
id: str
name: str
arn: str
@@ -247,4 +227,3 @@ class Prompt(BaseModel):
name: str
arn: str
region: str
customer_encryption_key_arn: Optional[str] = None
+18 -29
View File
@@ -441,8 +441,8 @@ class AzureProvider(Provider):
None
"""
printed_subscriptions = []
for subscription_id, display_name in self._identity.subscriptions.items():
intermediate = display_name + ": " + subscription_id
for key, value in self._identity.subscriptions.items():
intermediate = key + ": " + value
printed_subscriptions.append(intermediate)
report_lines = [
f"Azure Tenant Domain: {Fore.YELLOW}{self._identity.tenant_domain}{Style.RESET_ALL} Azure Tenant ID: {Fore.YELLOW}{self._identity.tenant_ids[0]}{Style.RESET_ALL}",
@@ -969,30 +969,19 @@ class AzureProvider(Provider):
)
if not subscription_ids:
logger.info("Scanning all the Azure subscriptions...")
# TODO: get tags or labels
# TODO: fill with AzureSubscription
subscription_pairs = [
(subscription.display_name, subscription.subscription_id)
for subscription in subscriptions_client.subscriptions.list()
]
for subscription in subscriptions_client.subscriptions.list():
# TODO: get tags or labels
# TODO: fill with AzureSubscription
identity.subscriptions.update(
{subscription.display_name: subscription.subscription_id}
)
else:
logger.info("Scanning the subscriptions passed as argument ...")
subscription_pairs = [
(
subscriptions_client.subscriptions.get(
subscription_id=id
).display_name,
id,
for id in subscription_ids:
subscription = subscriptions_client.subscriptions.get(
subscription_id=id
)
for id in subscription_ids
]
# Key the subscriptions dict by subscription ID (which is
# guaranteed unique) and store the display name as the value.
# This avoids collisions when multiple subscriptions share
# the same display name.
for display_name, subscription_id in subscription_pairs:
identity.subscriptions[subscription_id] = display_name
identity.subscriptions.update({subscription.display_name: id})
# If there are no subscriptions listed -> checks are not going to be run against any resource
if not identity.subscriptions:
@@ -1028,28 +1017,28 @@ class AzureProvider(Provider):
Returns:
A dictionary containing the locations available for each subscription. The dictionary
has subscription IDs as keys and lists of location names as values.
has subscription display names as keys and lists of location names as values.
Examples:
>>> provider = AzureProvider(...)
>>> provider.get_locations()
{
'sub-id-1': ['eastus', 'eastus2', 'westus', 'westus2'],
'sub-id-2': ['eastus', 'eastus2', 'westus', 'westus2']
'Subscription 1': ['eastus', 'eastus2', 'westus', 'westus2'],
'Subscription 2': ['eastus', 'eastus2', 'westus', 'westus2']
}
"""
credentials = self.session
subscription_client = SubscriptionClient(credentials)
locations = {}
for subscription_id, display_name in self._identity.subscriptions.items():
locations[subscription_id] = []
for display_name, subscription_id in self._identity.subscriptions.items():
locations[display_name] = []
# List locations for each subscription
for location in subscription_client.subscriptions.list_locations(
subscription_id
):
locations[subscription_id].append(location.name)
locations[display_name].append(location.name)
return locations
@@ -8,23 +8,17 @@ class AzureMutelist(Mutelist):
self,
finding: Check_Report_Azure,
subscription_id: str,
subscription_name: str = "",
) -> bool:
account_names = [subscription_id]
for account_name in (subscription_name, finding.subscription):
if account_name and account_name not in account_names:
account_names.append(account_name)
tags = unroll_dict(unroll_tags(finding.resource_tags))
for account_name in account_names:
if self.is_muted(
account_name,
finding.check_metadata.CheckID,
finding.location,
finding.resource_name,
tags,
):
return True
return False
return self.is_muted(
subscription_id, # support Azure Subscription ID in mutelist
finding.check_metadata.CheckID,
finding.location,
finding.resource_name,
unroll_dict(unroll_tags(finding.resource_tags)),
) or self.is_muted(
finding.subscription, # support Azure Subscription Name in mutelist
finding.check_metadata.CheckID,
finding.location,
finding.resource_name,
unroll_dict(unroll_tags(finding.resource_tags)),
)
@@ -49,15 +49,15 @@ class AzureService:
if "GraphServiceClient" in str(service):
clients.update({identity.tenant_domain: service(credentials=session)})
elif "LogsQueryClient" in str(service):
for subscription_id, display_name in identity.subscriptions.items():
clients.update({subscription_id: service(credential=session)})
for display_name, id in identity.subscriptions.items():
clients.update({display_name: service(credential=session)})
else:
for subscription_id, display_name in identity.subscriptions.items():
for display_name, id in identity.subscriptions.items():
clients.update(
{
subscription_id: service(
display_name: service(
credential=session,
subscription_id=subscription_id,
subscription_id=id,
base_url=region_config.base_url,
credential_scopes=region_config.credential_scopes,
)
@@ -36,7 +36,7 @@ class AISearch(AzureService):
)
except Exception as error:
logger.error(
f"Subscription ID: {subscription} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
f"Subscription name: {subscription} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return aisearch_services
@@ -9,23 +9,20 @@ class aisearch_service_not_publicly_accessible(Check):
findings = []
for (
subscription_id,
subscription_name,
aisearch_services,
) in aisearch_client.aisearch_services.items():
subscription_name = aisearch_client.subscriptions.get(
subscription_id, subscription_id
)
for aisearch_service in aisearch_services.values():
report = Check_Report_Azure(
metadata=self.metadata(), resource=aisearch_service
)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
report.status_extended = f"AISearch Service {aisearch_service.name} from subscription {subscription_name} ({subscription_id}) allows public access."
report.status_extended = f"AISearch Service {aisearch_service.name} from subscription {subscription_name} allows public access."
if not aisearch_service.public_network_access:
report.status = "PASS"
report.status_extended = f"AISearch Service {aisearch_service.name} from subscription {subscription_name} ({subscription_id}) does not allows public access."
report.status_extended = f"AISearch Service {aisearch_service.name} from subscription {subscription_name} does not allows public access."
findings.append(report)
@@ -6,19 +6,16 @@ class aks_cluster_rbac_enabled(Check):
def execute(self) -> Check_Report_Azure:
findings = []
for subscription_id, clusters in aks_client.clusters.items():
subscription_name = aks_client.subscriptions.get(
subscription_id, subscription_id
)
for subscription_name, clusters in aks_client.clusters.items():
for cluster in clusters.values():
report = Check_Report_Azure(metadata=self.metadata(), resource=cluster)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "PASS"
report.status_extended = f"RBAC is enabled for cluster '{cluster.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"RBAC is enabled for cluster '{cluster.name}' in subscription '{subscription_name}'."
if not cluster.rbac_enabled:
report.status = "FAIL"
report.status_extended = f"RBAC is not enabled for cluster '{cluster.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"RBAC is not enabled for cluster '{cluster.name}' in subscription '{subscription_name}'."
findings.append(report)
@@ -6,20 +6,17 @@ class aks_clusters_created_with_private_nodes(Check):
def execute(self) -> Check_Report_Azure:
findings = []
for subscription_id, clusters in aks_client.clusters.items():
subscription_name = aks_client.subscriptions.get(
subscription_id, subscription_id
)
for subscription_name, clusters in aks_client.clusters.items():
for cluster in clusters.values():
report = Check_Report_Azure(metadata=self.metadata(), resource=cluster)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "PASS"
report.status_extended = f"Cluster '{cluster.name}' was created with private nodes in subscription '{subscription_name} ({subscription_id})'"
report.status_extended = f"Cluster '{cluster.name}' was created with private nodes in subscription '{subscription_name}'"
for agent_pool in cluster.agent_pool_profiles:
if getattr(agent_pool, "enable_node_public_ip", True):
report.status = "FAIL"
report.status_extended = f"Cluster '{cluster.name}' was not created with private nodes in subscription '{subscription_name} ({subscription_id})'"
report.status_extended = f"Cluster '{cluster.name}' was not created with private nodes in subscription '{subscription_name}'"
break
findings.append(report)
@@ -6,21 +6,18 @@ class aks_clusters_public_access_disabled(Check):
def execute(self) -> Check_Report_Azure:
findings = []
for subscription_id, clusters in aks_client.clusters.items():
subscription_name = aks_client.subscriptions.get(
subscription_id, subscription_id
)
for subscription_name, clusters in aks_client.clusters.items():
for cluster in clusters.values():
report = Check_Report_Azure(metadata=self.metadata(), resource=cluster)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
report.status_extended = f"Public access to nodes is enabled for cluster '{cluster.name}' in subscription '{subscription_name} ({subscription_id})'"
report.status_extended = f"Public access to nodes is enabled for cluster '{cluster.name}' in subscription '{subscription_name}'"
if cluster.private_fqdn:
for agent_pool in cluster.agent_pool_profiles:
if not getattr(agent_pool, "enable_node_public_ip", False):
report.status = "PASS"
report.status_extended = f"Public access to nodes is disabled for cluster '{cluster.name}' in subscription '{subscription_name} ({subscription_id})'"
report.status_extended = f"Public access to nodes is disabled for cluster '{cluster.name}' in subscription '{subscription_name}'"
findings.append(report)
@@ -6,19 +6,16 @@ class aks_network_policy_enabled(Check):
def execute(self) -> Check_Report_Azure:
findings = []
for subscription_id, clusters in aks_client.clusters.items():
subscription_name = aks_client.subscriptions.get(
subscription_id, subscription_id
)
for subscription_name, clusters in aks_client.clusters.items():
for cluster_id, cluster in clusters.items():
report = Check_Report_Azure(metadata=self.metadata(), resource=cluster)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "PASS"
report.status_extended = f"Network policy is enabled for cluster '{cluster.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"Network policy is enabled for cluster '{cluster.name}' in subscription '{subscription_name}'."
if not getattr(cluster, "network_policy", False):
report.status = "FAIL"
report.status_extended = f"Network policy is not enabled for cluster '{cluster.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"Network policy is not enabled for cluster '{cluster.name}' in subscription '{subscription_name}'."
findings.append(report)
@@ -17,14 +17,14 @@ class AKS(AzureService):
logger.info("AKS - Getting clusters...")
clusters = {}
for subscription_id, client in self.clients.items():
for subscription_name, client in self.clients.items():
try:
clusters_list = client.managed_clusters.list()
clusters.update({subscription_id: {}})
clusters.update({subscription_name: {}})
for cluster in clusters_list:
if getattr(cluster, "kubernetes_version", None):
clusters[subscription_id].update(
clusters[subscription_name].update(
{
cluster.id: Cluster(
id=cluster.id,
@@ -60,7 +60,7 @@ class AKS(AzureService):
)
except Exception as error:
logger.error(
f"Subscription ID: {subscription_id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
f"Subscription name: {subscription_name} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return clusters
@@ -147,7 +147,7 @@ class APIM(AzureService):
)
except Exception as error:
logger.error(
f"Subscription ID: {subscription} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
f"Subscription name: {subscription} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return instances
@@ -50,11 +50,9 @@ class apim_threat_detection_llm_jacking(Check):
],
)
# 1. Aggregate logs from all APIM instances first
all_llm_logs: List[LogsQueryLogEntry] = []
for subscription, instances in apim_client.instances.items():
subscription_name = apim_client.subscriptions.get(
subscription, subscription
)
all_llm_logs: List[LogsQueryLogEntry] = []
for instance in instances:
if instance.log_analytics_workspace_id:
logs = apim_client.get_llm_operations_logs(
@@ -62,8 +60,7 @@ class apim_threat_detection_llm_jacking(Check):
)
all_llm_logs.extend(logs)
# Analyze logs only within the current subscription to avoid
# cross-subscription attribution when scanning multiple subscriptions.
# 2. Perform a single, global analysis on all collected logs
potential_llm_jacking_attackers = {}
for log in all_llm_logs:
operation_name = log.operation_id
@@ -94,17 +91,19 @@ class apim_threat_detection_llm_jacking(Check):
report = Check_Report_Azure(self.metadata(), resource=resource)
report.subscription = subscription
report.status = "FAIL"
report.status_extended = f"Potential LLM Jacking attack detected from IP address {principal_ip} in subscription {subscription_name} ({subscription}) with an action ratio of {action_ratio}, above the configured threshold of {threshold}."
report.status_extended = f"Potential LLM Jacking attack detected from IP address {principal_ip} with a threshold of {action_ratio}."
findings.append(report)
# If no threats were found after checking all principals, create a single PASS report.
# 4. If no threats were found after checking all principals, create a single PASS report
if not found_potential_llm_jacking_attackers:
report = Check_Report_Azure(self.metadata(), resource={})
report.resource_name = subscription_name
report.resource_id = f"/subscriptions/{subscription}"
report.resource_name = subscription
report.resource_id = (
f"/subscriptions/{apim_client.subscriptions[subscription]}"
)
report.subscription = subscription
report.status = "PASS"
report.status_extended = f"No potential LLM Jacking attacks detected across monitored APIM instances in subscription {subscription_name} ({subscription}) in the last {threat_detection_minutes} minutes."
report.status_extended = f"No potential LLM Jacking attacks detected across all monitored APIM instances in the last {threat_detection_minutes} minutes."
findings.append(report)
return findings
@@ -7,21 +7,18 @@ class app_client_certificates_on(Check):
findings = []
for (
subscription_id,
subscription_name,
apps,
) in app_client.apps.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for app in apps.values():
report = Check_Report_Azure(metadata=self.metadata(), resource=app)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "PASS"
report.status_extended = f"Clients are required to present a certificate for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"Clients are required to present a certificate for app '{app.name}' in subscription '{subscription_name}'."
if app.client_cert_mode != "Required":
report.status = "FAIL"
report.status_extended = f"Clients are not required to present a certificate for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"Clients are not required to present a certificate for app '{app.name}' in subscription '{subscription_name}'."
findings.append(report)
@@ -7,21 +7,18 @@ class app_ensure_auth_is_set_up(Check):
findings = []
for (
subscription_id,
subscription_name,
apps,
) in app_client.apps.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for app in apps.values():
report = Check_Report_Azure(metadata=self.metadata(), resource=app)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "PASS"
report.status_extended = f"Authentication is set up for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"Authentication is set up for app '{app.name}' in subscription '{subscription_name}'."
if not app.auth_enabled:
report.status = "FAIL"
report.status_extended = f"Authentication is not set up for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"Authentication is not set up for app '{app.name}' in subscription '{subscription_name}'."
findings.append(report)
@@ -7,21 +7,18 @@ class app_ensure_http_is_redirected_to_https(Check):
findings = []
for (
subscription_id,
subscription_name,
apps,
) in app_client.apps.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for app in apps.values():
report = Check_Report_Azure(metadata=self.metadata(), resource=app)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "PASS"
report.status_extended = f"HTTP is redirected to HTTPS for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"HTTP is redirected to HTTPS for app '{app.name}' in subscription '{subscription_name}'."
if not app.https_only:
report.status = "FAIL"
report.status_extended = f"HTTP is not redirected to HTTPS for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"HTTP is not redirected to HTTPS for app '{app.name}' in subscription '{subscription_name}'."
findings.append(report)
@@ -7,12 +7,9 @@ class app_ensure_java_version_is_latest(Check):
findings = []
for (
subscription_id,
subscription_name,
apps,
) in app_client.apps.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for app in apps.values():
linux_framework = getattr(app.configurations, "linux_fx_version", "")
windows_framework_version = getattr(
@@ -21,19 +18,19 @@ class app_ensure_java_version_is_latest(Check):
if "java" in linux_framework.lower() or windows_framework_version:
report = Check_Report_Azure(metadata=self.metadata(), resource=app)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
java_latest_version = app_client.audit_config.get(
"java_latest_version", "17"
)
report.status_extended = f"Java version is set to '{f'java{windows_framework_version}' if windows_framework_version else linux_framework}', but should be set to 'java {java_latest_version}' for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"Java version is set to '{f'java{windows_framework_version}' if windows_framework_version else linux_framework}', but should be set to 'java {java_latest_version}' for app '{app.name}' in subscription '{subscription_name}'."
if (
f"java{java_latest_version}" in linux_framework
or java_latest_version == windows_framework_version
):
report.status = "PASS"
report.status_extended = f"Java version is set to 'java {java_latest_version}' for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"Java version is set to 'java {java_latest_version}' for app '{app.name}' in subscription '{subscription_name}'."
findings.append(report)
@@ -7,12 +7,9 @@ class app_ensure_php_version_is_latest(Check):
findings = []
for (
subscription_id,
subscription_name,
apps,
) in app_client.apps.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for app in apps.values():
framework = getattr(app.configurations, "linux_fx_version", "")
@@ -20,14 +17,14 @@ class app_ensure_php_version_is_latest(Check):
app.configurations, "php_version", ""
):
report = Check_Report_Azure(metadata=self.metadata(), resource=app)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
php_latest_version = app_client.audit_config.get(
"php_latest_version", "8.2"
)
report.status_extended = f"PHP version is set to '{framework}', the latest version that you could use is the '{php_latest_version}' version, for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"PHP version is set to '{framework}', the latest version that you could use is the '{php_latest_version}' version, for app '{app.name}' in subscription '{subscription_name}'."
if (
php_latest_version in framework
@@ -35,7 +32,7 @@ class app_ensure_php_version_is_latest(Check):
== php_latest_version
):
report.status = "PASS"
report.status_extended = f"PHP version is set to '{php_latest_version}' for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"PHP version is set to '{php_latest_version}' for app '{app.name}' in subscription '{subscription_name}'."
findings.append(report)
@@ -7,12 +7,9 @@ class app_ensure_python_version_is_latest(Check):
findings = []
for (
subscription_id,
subscription_name,
apps,
) in app_client.apps.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for app in apps.values():
framework = getattr(app.configurations, "linux_fx_version", "")
@@ -20,12 +17,12 @@ class app_ensure_python_version_is_latest(Check):
app.configurations, "python_version", ""
):
report = Check_Report_Azure(metadata=self.metadata(), resource=app)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
python_latest_version = app_client.audit_config.get(
"python_latest_version", "3.12"
)
report.status_extended = f"Python version is '{framework}', the latest version that you could use is the '{python_latest_version}' version, for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"Python version is '{framework}', the latest version that you could use is the '{python_latest_version}' version, for app '{app.name}' in subscription '{subscription_name}'."
if (
python_latest_version in framework
@@ -33,7 +30,7 @@ class app_ensure_python_version_is_latest(Check):
== python_latest_version
):
report.status = "PASS"
report.status_extended = f"Python version is set to '{python_latest_version}' for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"Python version is set to '{python_latest_version}' for app '{app.name}' in subscription '{subscription_name}'."
findings.append(report)
@@ -7,23 +7,20 @@ class app_ensure_using_http20(Check):
findings = []
for (
subscription_id,
subscription_name,
apps,
) in app_client.apps.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for app in apps.values():
report = Check_Report_Azure(metadata=self.metadata(), resource=app)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
report.status_extended = f"HTTP/2.0 is not enabled for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"HTTP/2.0 is not enabled for app '{app.name}' in subscription '{subscription_name}'."
if app.configurations and getattr(
app.configurations, "http20_enabled", False
):
report.status = "PASS"
report.status_extended = f"HTTP/2.0 is enabled for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"HTTP/2.0 is enabled for app '{app.name}' in subscription '{subscription_name}'."
findings.append(report)
@@ -7,24 +7,21 @@ class app_ftp_deployment_disabled(Check):
findings = []
for (
subscription_id,
subscription_name,
apps,
) in app_client.apps.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for app in apps.values():
report = Check_Report_Azure(metadata=self.metadata(), resource=app)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
report.status_extended = f"FTP is enabled for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"FTP is enabled for app '{app.name}' in subscription '{subscription_name}'."
if (
app.configurations
and getattr(app.configurations, "ftps_state", "AllAllowed")
!= "AllAllowed"
):
report.status = "PASS"
report.status_extended = f"FTP is disabled for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"FTP is disabled for app '{app.name}' in subscription '{subscription_name}'."
findings.append(report)
@@ -7,24 +7,23 @@ class app_function_access_keys_configured(Check):
findings = []
for (
subscription_id,
subscription_name,
functions,
) in app_client.functions.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for function in functions.values():
if function.function_keys is not None:
report = Check_Report_Azure(
metadata=self.metadata(), resource=function
)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) does not have function keys configured."
report.status_extended = f"Function {function.name} does not have function keys configured."
if len(function.function_keys) > 0:
report.status = "PASS"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) has function keys configured."
report.status_extended = (
f"Function {function.name} has function keys configured."
)
findings.append(report)
@@ -7,20 +7,19 @@ class app_function_application_insights_enabled(Check):
findings = []
for (
subscription_id,
subscription_name,
functions,
) in app_client.functions.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for function in functions.values():
if function.enviroment_variables is not None:
report = Check_Report_Azure(
metadata=self.metadata(), resource=function
)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) is not using Application Insights."
report.status_extended = (
f"Function {function.name} is not using Application Insights."
)
if function.enviroment_variables.get(
"APPINSIGHTS_INSTRUMENTATIONKEY", None
@@ -28,7 +27,9 @@ class app_function_application_insights_enabled(Check):
"APPLICATIONINSIGHTS_CONNECTION_STRING", None
):
report.status = "PASS"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) is using Application Insights."
report.status_extended = (
f"Function {function.name} is using Application Insights."
)
findings.append(report)
@@ -7,20 +7,19 @@ class app_function_ftps_deployment_disabled(Check):
findings = []
for (
subscription_id,
subscription_name,
functions,
) in app_client.functions.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for function in functions.values():
report = Check_Report_Azure(metadata=self.metadata(), resource=function)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) has {'FTP' if function.ftps_state == 'AllAllowed' else 'FTPS' if function.ftps_state == 'FtpsOnly' else 'FTP or FTPS'} deployment enabled."
report.status_extended = f"Function {function.name} has {'FTP' if function.ftps_state == 'AllAllowed' else 'FTPS' if function.ftps_state == 'FtpsOnly' else 'FTP or FTPS'} deployment enabled"
if function.ftps_state == "Disabled":
report.status = "PASS"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) has FTP and FTPS deployment disabled."
report.status_extended = (
f"Function {function.name} has FTP and FTPS deployment disabled"
)
findings.append(report)
@@ -7,26 +7,18 @@ class app_function_identity_is_configured(Check):
findings = []
for (
subscription_id,
subscription_name,
functions,
) in app_client.functions.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for function in functions.values():
report = Check_Report_Azure(metadata=self.metadata(), resource=function)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) does not have a managed identity enabled."
report.status_extended = f"Function {function.name} does not have a managed identity enabled."
if function.identity:
identity_type = (
function.identity.type
if getattr(function.identity, "type", "")
else "managed"
)
report.status = "PASS"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) has a {identity_type} identity enabled."
report.status_extended = f"Function {function.name} has a {function.identity.type if getattr(function.identity, 'type', '') else 'managed'} identity enabled."
findings.append(report)
@@ -14,25 +14,22 @@ class app_function_identity_without_admin_privileges(Check):
findings = []
for (
subscription_id,
subscription_name,
functions,
) in app_client.functions.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for function in functions.values():
if function.identity:
report = Check_Report_Azure(
metadata=self.metadata(), resource=function
)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "PASS"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) has a managed identity enabled but without admin privileges."
report.status_extended = f"Function {function.name} has a managed identity enabled but without admin privileges."
admin_roles_assigned = []
for role_assignment in iam_client.role_assignments[
subscription_id
subscription_name
].values():
if (
role_assignment.agent_id == function.identity.principal_id
@@ -46,8 +43,8 @@ class app_function_identity_without_admin_privileges(Check):
):
admin_roles_assigned.append(
getattr(
iam_client.roles[subscription_id].get(
f"/subscriptions/{subscription_id}/providers/Microsoft.Authorization/roleDefinitions/{role_assignment.role_id}"
iam_client.roles[subscription_name].get(
f"/subscriptions/{iam_client.subscriptions[subscription_name]}/providers/Microsoft.Authorization/roleDefinitions/{role_assignment.role_id}"
),
"name",
"",
@@ -56,7 +53,7 @@ class app_function_identity_without_admin_privileges(Check):
if admin_roles_assigned:
report.status = "FAIL"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) has a managed identity enabled and it is configure with admin privileges using {'roles: ' + ', '.join(admin_roles_assigned) if len(admin_roles_assigned) > 1 else 'role ' + admin_roles_assigned[0]}."
report.status_extended = f"Function {function.name} has a managed identity enabled and it is configure with admin privileges using {'roles: ' + ', '.join(admin_roles_assigned) if len(admin_roles_assigned) > 1 else 'role ' + admin_roles_assigned[0]}."
findings.append(report)
@@ -7,20 +7,19 @@ class app_function_latest_runtime_version(Check):
findings = []
for (
subscription_id,
subscription_name,
functions,
) in app_client.functions.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for function in functions.values():
if function.enviroment_variables is not None:
report = Check_Report_Azure(
metadata=self.metadata(), resource=function
)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "PASS"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) is using the latest runtime."
report.status_extended = (
f"Function {function.name} is using the latest runtime."
)
if (
function.enviroment_variables.get(
@@ -29,7 +28,7 @@ class app_function_latest_runtime_version(Check):
!= "~4"
):
report.status = "FAIL"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) is not using the latest runtime. The current runtime is '{function.enviroment_variables.get('FUNCTIONS_EXTENSION_VERSION', '')}' and should be '~4'."
report.status_extended = f"Function {function.name} is not using the latest runtime. The current runtime is '{function.enviroment_variables.get('FUNCTIONS_EXTENSION_VERSION', '')}' and should be '~4'."
findings.append(report)
@@ -7,21 +7,22 @@ class app_function_not_publicly_accessible(Check):
findings = []
for (
subscription_id,
subscription_name,
functions,
) in app_client.functions.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for function in functions.values():
report = Check_Report_Azure(metadata=self.metadata(), resource=function)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) is publicly accessible."
report.status_extended = (
f"Function {function.name} is publicly accessible."
)
if not function.public_access:
report.status = "PASS"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) is not publicly accessible."
report.status_extended = (
f"Function {function.name} is not publicly accessible."
)
findings.append(report)
@@ -7,21 +7,18 @@ class app_function_vnet_integration_enabled(Check):
findings = []
for (
subscription_id,
subscription_name,
functions,
) in app_client.functions.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for function in functions.values():
report = Check_Report_Azure(metadata=self.metadata(), resource=function)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) does not have virtual network integration enabled."
report.status_extended = f"Function {function.name} does not have virtual network integration enabled."
if function.vnet_subnet_id:
report.status = "PASS"
report.status_extended = f"Function {function.name} from subscription {subscription_name} ({subscription_id}) has Virtual Network integration enabled with subnet '{function.vnet_subnet_id}' enabled."
report.status_extended = f"Function {function.name} has Virtual Network integration enabled with subnet '{function.vnet_subnet_id}' enabled."
findings.append(report)
@@ -6,28 +6,25 @@ class app_http_logs_enabled(Check):
def execute(self) -> Check_Report_Azure:
findings = []
for subscription_id, apps in app_client.apps.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for subscription_name, apps in app_client.apps.items():
for app in apps.values():
if "functionapp" not in app.kind:
report = Check_Report_Azure(metadata=self.metadata(), resource=app)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
if not app.monitor_diagnostic_settings:
report.status_extended = f"App {app.name} does not have a diagnostic setting in subscription {subscription_name} ({subscription_id})."
report.status_extended = f"App {app.name} does not have a diagnostic setting in subscription {subscription_name}."
else:
for diagnostic_setting in app.monitor_diagnostic_settings:
report.status_extended = f"App {app.name} does not have HTTP Logs enabled in diagnostic setting {diagnostic_setting.name} in subscription {subscription_name} ({subscription_id})"
report.status_extended = f"App {app.name} does not have HTTP Logs enabled in diagnostic setting {diagnostic_setting.name} in subscription {subscription_name}"
for log in diagnostic_setting.logs:
if log.category == "AppServiceHTTPLogs" and log.enabled:
report.status = "PASS"
report.status_extended = f"App {app.name} has HTTP Logs enabled in diagnostic setting {diagnostic_setting.name} in subscription {subscription_name} ({subscription_id})"
report.status_extended = f"App {app.name} has HTTP Logs enabled in diagnostic setting {diagnostic_setting.name} in subscription {subscription_name}"
break
elif log.category_group == "allLogs" and log.enabled:
report.status = "PASS"
report.status_extended = f"App {app.name} has allLogs category group which includes HTTP Logs enabled in diagnostic setting {diagnostic_setting.name} in subscription {subscription_name} ({subscription_id})"
report.status_extended = f"App {app.name} has allLogs category group which includes HTTP Logs enabled in diagnostic setting {diagnostic_setting.name} in subscription {subscription_name}"
break
findings.append(report)
@@ -7,23 +7,20 @@ class app_minimum_tls_version_12(Check):
findings = []
for (
subscription_id,
subscription_name,
apps,
) in app_client.apps.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for app in apps.values():
report = Check_Report_Azure(metadata=self.metadata(), resource=app)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "FAIL"
report.status_extended = f"Minimum TLS version is not set to 1.2 for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"Minimum TLS version is not set to 1.2 for app '{app.name}' in subscription '{subscription_name}'."
if app.configurations and getattr(
app.configurations, "min_tls_version", ""
) in ["1.2", "1.3"]:
report.status = "PASS"
report.status_extended = f"Minimum TLS version is set to {app.configurations.min_tls_version} for app '{app.name}' in subscription '{subscription_name} ({subscription_id})'."
report.status_extended = f"Minimum TLS version is set to {app.configurations.min_tls_version} for app '{app.name}' in subscription '{subscription_name}'."
findings.append(report)
@@ -7,21 +7,18 @@ class app_register_with_identity(Check):
findings = []
for (
subscription_id,
subscription_name,
apps,
) in app_client.apps.items():
subscription_name = app_client.subscriptions.get(
subscription_id, subscription_id
)
for app in apps.values():
report = Check_Report_Azure(metadata=self.metadata(), resource=app)
report.subscription = subscription_id
report.subscription = subscription_name
report.status = "PASS"
report.status_extended = f"App '{app.name}' in subscription '{subscription_name} ({subscription_id})' has an identity configured."
report.status_extended = f"App '{app.name}' in subscription '{subscription_name}' has an identity configured."
if not app.identity:
report.status = "FAIL"
report.status_extended = f"App '{app.name}' in subscription '{subscription_name} ({subscription_id})' does not have an identity configured."
report.status_extended = f"App '{app.name}' in subscription '{subscription_name}' does not have an identity configured."
findings.append(report)
@@ -20,10 +20,10 @@ class App(AzureService):
logger.info("App - Getting apps...")
apps = {}
for subscription_id, client in self.clients.items():
for subscription_name, client in self.clients.items():
try:
apps_list = client.web_apps.list()
apps.update({subscription_id: {}})
apps.update({subscription_name: {}})
for app in apps_list:
# Filter function apps
@@ -41,7 +41,7 @@ class App(AzureService):
resource_group_name=app.resource_group, name=app.name
)
apps[subscription_id].update(
apps[subscription_name].update(
{
app.id: WebApp(
resource_id=app.id,
@@ -81,7 +81,7 @@ class App(AzureService):
getattr(app, "client_cert_mode", "Ignore"),
),
monitor_diagnostic_settings=self._get_app_monitor_settings(
app.name, app.resource_group, subscription_id
app.name, app.resource_group, subscription_name
),
https_only=getattr(app, "https_only", False),
identity=ManagedServiceIdentity(
@@ -106,7 +106,7 @@ class App(AzureService):
)
except Exception as error:
logger.error(
f"Subscription ID: {subscription_id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
f"Subscription name: {subscription_name} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return apps
@@ -115,17 +115,17 @@ class App(AzureService):
logger.info("Function - Getting functions...")
functions = {}
for subscription_id, client in self.clients.items():
for subscription_name, client in self.clients.items():
try:
functions_list = client.web_apps.list()
functions.update({subscription_id: {}})
functions.update({subscription_name: {}})
for function in functions_list:
# Filter function apps
if getattr(function, "kind", "").startswith("functionapp"):
# List host keys
host_keys = self._get_function_host_keys(
subscription_id, function.resource_group, function.name
subscription_name, function.resource_group, function.name
)
if host_keys is not None:
function_keys = getattr(host_keys, "function_keys", {})
@@ -133,16 +133,16 @@ class App(AzureService):
function_keys = None
application_settings = self._list_application_settings(
subscription_id, function.resource_group, function.name
subscription_name, function.resource_group, function.name
)
function_config = self._get_function_config(
subscription_id,
subscription_name,
function.resource_group,
function.name,
)
functions[subscription_id].update(
functions[subscription_name].update(
{
function.id: FunctionApp(
id=function.id,
@@ -175,7 +175,7 @@ class App(AzureService):
)
except Exception as error:
logger.error(
f"Subscription ID: {subscription_id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
f"Subscription name: {subscription_name} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return functions
@@ -200,13 +200,13 @@ class App(AzureService):
monitor_diagnostics_settings = []
try:
monitor_diagnostics_settings = monitor_client.diagnostic_settings_with_uri(
subscription,
f"subscriptions/{subscription}/resourceGroups/{resource_group}/providers/Microsoft.Web/sites/{app_name}",
self.subscriptions[subscription],
f"subscriptions/{self.subscriptions[subscription]}/resourceGroups/{resource_group}/providers/Microsoft.Web/sites/{app_name}",
monitor_client.clients[subscription],
)
except Exception as error:
logger.error(
f"Subscription ID: {self.subscription} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
f"Subscription name: {self.subscription} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return monitor_diagnostics_settings
@@ -8,20 +8,19 @@ class appinsights_ensure_is_configured(Check):
def execute(self) -> Check_Report_Azure:
findings = []
for subscription_id, components in appinsights_client.components.items():
subscription_name = appinsights_client.subscriptions.get(
subscription_id, subscription_id
)
for subscription_name, components in appinsights_client.components.items():
report = Check_Report_Azure(metadata=self.metadata(), resource={})
report.status = "PASS"
report.subscription = subscription_id
report.resource_name = subscription_id
report.resource_id = f"/subscriptions/{subscription_id}"
report.status_extended = f"There is at least one AppInsight configured in subscription {subscription_name} ({subscription_id})."
report.subscription = subscription_name
report.resource_name = subscription_name
report.resource_id = (
f"/subscriptions/{appinsights_client.subscriptions[subscription_name]}"
)
report.status_extended = f"There is at least one AppInsight configured in subscription {subscription_name}."
if len(components) < 1:
report.status = "FAIL"
report.status_extended = f"There are no AppInsight configured in subscription {subscription_name} ({subscription_id})."
report.status_extended = f"There are no AppInsight configured in subscription {subscription_name}."
findings.append(report)
@@ -15,13 +15,13 @@ class AppInsights(AzureService):
logger.info("AppInsights - Getting components...")
components = {}
for subscription_id, client in self.clients.items():
for subscription_name, client in self.clients.items():
try:
components_list = client.components.list()
components.update({subscription_id: {}})
components.update({subscription_name: {}})
for component in components_list:
components[subscription_id].update(
components[subscription_name].update(
{
component.app_id: Component(
resource_id=component.id,
@@ -35,7 +35,7 @@ class AppInsights(AzureService):
)
except Exception as error:
logger.error(
f"Subscription ID: {subscription_id} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
f"Subscription name: {subscription_name} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return components
@@ -9,20 +9,17 @@ class containerregistry_admin_user_disabled(Check):
findings = []
for subscription, registries in containerregistry_client.registries.items():
subscription_name = containerregistry_client.subscriptions.get(
subscription, subscription
)
for container_registry_info in registries.values():
report = Check_Report_Azure(
metadata=self.metadata(), resource=container_registry_info
)
report.subscription = subscription
report.status = "FAIL"
report.status_extended = f"Container Registry {container_registry_info.name} from subscription {subscription_name} ({subscription}) has its admin user enabled."
report.status_extended = f"Container Registry {container_registry_info.name} from subscription {subscription} has its admin user enabled."
if not container_registry_info.admin_user_enabled:
report.status = "PASS"
report.status_extended = f"Container Registry {container_registry_info.name} from subscription {subscription_name} ({subscription}) has its admin user disabled."
report.status_extended = f"Container Registry {container_registry_info.name} from subscription {subscription} has its admin user disabled."
findings.append(report)

Some files were not shown because too many files have changed in this diff Show More