Compare commits
43 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 5e2962f753 | |||
| ee71d26c8b | |||
| 0591ea0c8d | |||
| 481b44e606 | |||
| b2ed9ee221 | |||
| def2d3d188 | |||
| 1090ed59b7 | |||
| 67e4b1a082 | |||
| 7478ec9420 | |||
| a30b6623ed | |||
| 15bc317ec4 | |||
| 1536102784 | |||
| 1b99550572 | |||
| 6dfa135755 | |||
| 80482da1cb | |||
| 9cedbd3582 | |||
| c3d1c5c5f7 | |||
| 1fd6c51af6 | |||
| adbfc0bcd1 | |||
| 8f041f6f52 | |||
| 1b0e12ec51 | |||
| 759f7b84d6 | |||
| 0b26c1a39c | |||
| fc7fbddfe7 | |||
| 500b395125 | |||
| a1961d6d5f | |||
| a7e988c361 | |||
| 02cdcb29db | |||
| 6e0d7866cd | |||
| 4b71f37c91 | |||
| cdfbe5b2e3 | |||
| 1b6a459df4 | |||
| 73c0305dc4 | |||
| 0e01e67257 | |||
| 1ad329f9cf | |||
| d03d1d2393 | |||
| 832516be2a | |||
| 34727a7237 | |||
| 4216a3e23a | |||
| a59192e6f5 | |||
| 592bc6f6a8 | |||
| 962ebac8e4 | |||
| 2c5d47a8cd |
@@ -145,7 +145,7 @@ SENTRY_RELEASE=local
|
||||
NEXT_PUBLIC_SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT}
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.26.0
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.27.0
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: [prowler-cloud]
|
||||
# patreon: # Replace with a single Patreon username
|
||||
# open_collective: # Replace with a single Open Collective username
|
||||
# ko_fi: # Replace with a single Ko-fi username
|
||||
# tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
# community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
# liberapay: # Replace with a single Liberapay username
|
||||
# issuehunt: # Replace with a single IssueHunt username
|
||||
# lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||
# polar: # Replace with a single Polar username
|
||||
# buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
|
||||
# thanks_dev: # Replace with a single thanks.dev username
|
||||
# custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
@@ -1,14 +1,14 @@
|
||||
name: 'UI: Tests'
|
||||
name: "UI: Tests"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
- "master"
|
||||
- "v5.*"
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'v5.*'
|
||||
- "master"
|
||||
- "v5.*"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
@@ -16,7 +16,7 @@ concurrency:
|
||||
|
||||
env:
|
||||
UI_WORKING_DIR: ./ui
|
||||
NODE_VERSION: '24.13.0'
|
||||
NODE_VERSION: "24.13.0"
|
||||
|
||||
permissions: {}
|
||||
|
||||
@@ -42,6 +42,9 @@ jobs:
|
||||
fonts.gstatic.com:443
|
||||
api.github.com:443
|
||||
release-assets.githubusercontent.com:443
|
||||
cdn.playwright.dev:443
|
||||
objects.githubusercontent.com:443
|
||||
playwright.download.prss.microsoft.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
@@ -133,7 +136,7 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.critical-changes.outputs.any_changed == 'true'
|
||||
run: |
|
||||
echo "Critical paths changed - running ALL unit tests"
|
||||
pnpm run test:run
|
||||
pnpm run test:unit
|
||||
|
||||
- name: Run unit tests (related to changes only)
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.critical-changes.outputs.any_changed != 'true' && steps.changed-source.outputs.all_changed_files != ''
|
||||
@@ -142,7 +145,7 @@ jobs:
|
||||
echo "${STEPS_CHANGED_SOURCE_OUTPUTS_ALL_CHANGED_FILES}"
|
||||
# Convert space-separated to vitest related format (remove ui/ prefix for relative paths)
|
||||
CHANGED_FILES=$(echo "${STEPS_CHANGED_SOURCE_OUTPUTS_ALL_CHANGED_FILES}" | tr ' ' '\n' | sed 's|^ui/||' | tr '\n' ' ')
|
||||
pnpm exec vitest related $CHANGED_FILES --run
|
||||
pnpm exec vitest related $CHANGED_FILES --run --project unit
|
||||
env:
|
||||
STEPS_CHANGED_SOURCE_OUTPUTS_ALL_CHANGED_FILES: ${{ steps.changed-source.outputs.all_changed_files }}
|
||||
|
||||
@@ -150,7 +153,25 @@ jobs:
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.critical-changes.outputs.any_changed != 'true' && steps.changed-source.outputs.all_changed_files == ''
|
||||
run: |
|
||||
echo "Only test files changed - running ALL unit tests"
|
||||
pnpm run test:run
|
||||
pnpm run test:unit
|
||||
|
||||
- name: Cache Playwright browsers
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
id: playwright-cache
|
||||
uses: actions/cache@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-chromium-${{ hashFiles('ui/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright-chromium-
|
||||
|
||||
- name: Install Playwright Chromium browser
|
||||
if: steps.check-changes.outputs.any_changed == 'true' && steps.playwright-cache.outputs.cache-hit != 'true'
|
||||
run: pnpm exec playwright install chromium
|
||||
|
||||
- name: Run browser tests
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
run: pnpm run test:browser
|
||||
|
||||
- name: Build application
|
||||
if: steps.check-changes.outputs.any_changed == 'true'
|
||||
|
||||
@@ -44,7 +44,9 @@ repos:
|
||||
rev: v1.24.1
|
||||
hooks:
|
||||
- id: zizmor
|
||||
files: ^\.github/
|
||||
# zizmor only audits workflows, composite actions and dependabot
|
||||
# config; broader paths trip exit 3 ("no audit was performed").
|
||||
files: ^\.github/(workflows|actions)/.+\.ya?ml$|^\.github/dependabot\.ya?ml$
|
||||
priority: 30
|
||||
|
||||
## BASH
|
||||
|
||||
@@ -1,11 +1,34 @@
|
||||
# Do you want to learn on how to...
|
||||
|
||||
- Contribute with your code or fixes to Prowler
|
||||
- Create a new check for a provider
|
||||
- Create a new security compliance framework
|
||||
- Add a custom output format
|
||||
- Add a new integration
|
||||
- Contribute with documentation
|
||||
- [Contribute with your code or fixes to Prowler](https://docs.prowler.com/developer-guide/introduction)
|
||||
- [Create a new provider](https://docs.prowler.com/developer-guide/provider)
|
||||
- [Create a new service](https://docs.prowler.com/developer-guide/services)
|
||||
- [Create a new check for a provider](https://docs.prowler.com/developer-guide/checks)
|
||||
- [Create a new security compliance framework](https://docs.prowler.com/developer-guide/security-compliance-framework)
|
||||
- [Add a custom output format](https://docs.prowler.com/developer-guide/outputs)
|
||||
- [Add a new integration](https://docs.prowler.com/developer-guide/integrations)
|
||||
- [Contribute with documentation](https://docs.prowler.com/developer-guide/documentation)
|
||||
- [Write unit tests](https://docs.prowler.com/developer-guide/unit-testing)
|
||||
- [Write integration tests](https://docs.prowler.com/developer-guide/integration-testing)
|
||||
- [Write end-to-end tests](https://docs.prowler.com/developer-guide/end2end-testing)
|
||||
- [Debug Prowler](https://docs.prowler.com/developer-guide/debugging)
|
||||
- [Configure checks](https://docs.prowler.com/developer-guide/configurable-checks)
|
||||
- [Rename checks](https://docs.prowler.com/developer-guide/renaming-checks)
|
||||
- [Follow the check metadata guidelines](https://docs.prowler.com/developer-guide/check-metadata-guidelines)
|
||||
- [Extend the MCP server](https://docs.prowler.com/developer-guide/mcp-server)
|
||||
- [Extend Lighthouse AI](https://docs.prowler.com/developer-guide/lighthouse-architecture)
|
||||
- [Add AI skills](https://docs.prowler.com/developer-guide/ai-skills)
|
||||
|
||||
Provider-specific developer notes:
|
||||
|
||||
- [AWS](https://docs.prowler.com/developer-guide/aws-details)
|
||||
- [Azure](https://docs.prowler.com/developer-guide/azure-details)
|
||||
- [Google Cloud](https://docs.prowler.com/developer-guide/gcp-details)
|
||||
- [Alibaba Cloud](https://docs.prowler.com/developer-guide/alibabacloud-details)
|
||||
- [Kubernetes](https://docs.prowler.com/developer-guide/kubernetes-details)
|
||||
- [Microsoft 365](https://docs.prowler.com/developer-guide/m365-details)
|
||||
- [GitHub](https://docs.prowler.com/developer-guide/github-details)
|
||||
- [LLM](https://docs.prowler.com/developer-guide/llm-details)
|
||||
|
||||
Want some swag as appreciation for your contribution?
|
||||
|
||||
|
||||
@@ -2,14 +2,36 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [1.27.0] (Prowler UNRELEASED)
|
||||
## [1.28.0] (Prowler UNRELEASED)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- GIN index on `findings(categories, resource_services, resource_regions, resource_types)` to speed up `/api/v1/finding-groups` array filters [(#11001)](https://github.com/prowler-cloud/prowler/pull/11001)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Remove orphaned `gin_resources_search_idx` declaration from `Resource.Meta.indexes` (DB index dropped in `0072_drop_unused_indexes`) [(#11001)](https://github.com/prowler-cloud/prowler/pull/11001)
|
||||
|
||||
---
|
||||
|
||||
## [1.27.1] (Prowler v5.26.1)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- `POST /api/v1/scans` was intermittently failing with `Scan matching query does not exist` in the `scan-perform` worker; the Celery task is now published via `transaction.on_commit` so the worker cannot read the Scan before the dispatch-wide transaction commits [(#11122)](https://github.com/prowler-cloud/prowler/pull/11122)
|
||||
|
||||
---
|
||||
|
||||
## [1.27.0] (Prowler v5.26.0)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- New `scan-reset-ephemeral-resources` post-scan task zeroes `failed_findings_count` for resources missing from the latest full-scope scan, keeping ephemeral resources from polluting the Resources page sort [(#10929)](https://github.com/prowler-cloud/prowler/pull/10929)
|
||||
- ASD Essential Eight (AWS) compliance framework support [(#10982)](https://github.com/prowler-cloud/prowler/pull/10982)
|
||||
- `scan-reset-ephemeral-resources` post-scan task zeroes `failed_findings_count` for resources missing from the latest full-scope scan, keeping ephemeral resources from polluting the Resources page sort [(#10929)](https://github.com/prowler-cloud/prowler/pull/10929)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- ASD Essential Eight (AWS) compliance framework support [(#10982)](https://github.com/prowler-cloud/prowler/pull/10982)
|
||||
|
||||
### 🔐 Security
|
||||
|
||||
- `trivy` binary from 0.69.2 to 0.70.0 and `cryptography` from 46.0.6 to 46.0.7 (transitive via prowler SDK) in the API image for CVE-2026-33186 and CVE-2026-39892 [(#10978)](https://github.com/prowler-cloud/prowler/pull/10978)
|
||||
|
||||
@@ -50,7 +50,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.27.0"
|
||||
version = "1.28.0"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0090_attack_paths_cleanup_priority"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_arrays_idx",
|
||||
columns="categories, resource_services, resource_regions, resource_types",
|
||||
method="GIN",
|
||||
all_partitions=True,
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="findings",
|
||||
index_name="gin_find_arrays_idx",
|
||||
),
|
||||
)
|
||||
]
|
||||
@@ -0,0 +1,73 @@
|
||||
import django.contrib.postgres.indexes
|
||||
from django.db import migrations
|
||||
|
||||
INDEX_NAME = "gin_find_arrays_idx"
|
||||
PARENT_TABLE = "findings"
|
||||
|
||||
|
||||
def create_parent_and_attach(apps, schema_editor):
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
# Idempotent: the parent index may already exist if it was created
|
||||
# manually on an environment before this migration ran.
|
||||
cursor.execute(
|
||||
f"CREATE INDEX IF NOT EXISTS {INDEX_NAME} ON ONLY {PARENT_TABLE} "
|
||||
f"USING gin (categories, resource_services, resource_regions, resource_types)"
|
||||
)
|
||||
cursor.execute(
|
||||
"SELECT inhrelid::regclass::text "
|
||||
"FROM pg_inherits "
|
||||
"WHERE inhparent = %s::regclass",
|
||||
[PARENT_TABLE],
|
||||
)
|
||||
for (partition,) in cursor.fetchall():
|
||||
child_idx = f"{partition.replace('.', '_')}_{INDEX_NAME}"
|
||||
# ALTER INDEX ... ATTACH PARTITION has no IF NOT ATTACHED clause,
|
||||
# so check pg_inherits first to keep the migration re-runnable.
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT 1
|
||||
FROM pg_inherits i
|
||||
JOIN pg_class p ON p.oid = i.inhparent
|
||||
JOIN pg_class c ON c.oid = i.inhrelid
|
||||
WHERE p.relname = %s AND c.relname = %s
|
||||
""",
|
||||
[INDEX_NAME, child_idx],
|
||||
)
|
||||
if cursor.fetchone() is None:
|
||||
cursor.execute(f"ALTER INDEX {INDEX_NAME} ATTACH PARTITION {child_idx}")
|
||||
|
||||
|
||||
def drop_parent_index(apps, schema_editor):
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute(f"DROP INDEX IF EXISTS {INDEX_NAME}")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0091_findings_arrays_gin_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.SeparateDatabaseAndState(
|
||||
state_operations=[
|
||||
migrations.AddIndex(
|
||||
model_name="finding",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
fields=[
|
||||
"categories",
|
||||
"resource_services",
|
||||
"resource_regions",
|
||||
"resource_types",
|
||||
],
|
||||
name=INDEX_NAME,
|
||||
),
|
||||
),
|
||||
],
|
||||
database_operations=[
|
||||
migrations.RunPython(
|
||||
create_parent_and_attach,
|
||||
reverse_code=drop_parent_index,
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -946,7 +946,6 @@ class Resource(RowLevelSecurityProtectedModel):
|
||||
OpClass(Upper("name"), name="gin_trgm_ops"),
|
||||
name="res_name_trgm_idx",
|
||||
),
|
||||
GinIndex(fields=["text_search"], name="gin_resources_search_idx"),
|
||||
models.Index(fields=["tenant_id", "id"], name="resources_tenant_id_idx"),
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_id"],
|
||||
@@ -1152,6 +1151,15 @@ class Finding(PostgresPartitionedModel, RowLevelSecurityProtectedModel):
|
||||
fields=["tenant_id", "scan_id", "check_id"],
|
||||
name="find_tenant_scan_check_idx",
|
||||
),
|
||||
GinIndex(
|
||||
fields=[
|
||||
"categories",
|
||||
"resource_services",
|
||||
"resource_regions",
|
||||
"resource_types",
|
||||
],
|
||||
name="gin_find_arrays_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
openapi: 3.0.3
|
||||
info:
|
||||
title: Prowler API
|
||||
version: 1.27.0
|
||||
version: 1.28.0
|
||||
description: |-
|
||||
Prowler API specification.
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta, timezone
|
||||
@@ -16,7 +17,7 @@ from allauth.socialaccount.providers.github.views import GitHubOAuth2Adapter
|
||||
from allauth.socialaccount.providers.google.views import GoogleOAuth2Adapter
|
||||
from allauth.socialaccount.providers.saml.views import FinishACSView, LoginView
|
||||
from botocore.exceptions import ClientError, NoCredentialsError, ParamValidationError
|
||||
from celery import chain
|
||||
from celery import chain, states
|
||||
from celery.result import AsyncResult
|
||||
from config.custom_logging import BackendLogger
|
||||
from config.env import env
|
||||
@@ -60,6 +61,7 @@ from django.utils.dateparse import parse_date
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.cache import cache_control
|
||||
from django_celery_beat.models import PeriodicTask
|
||||
from django_celery_results.models import TaskResult
|
||||
from drf_spectacular.settings import spectacular_settings
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import (
|
||||
@@ -422,7 +424,7 @@ class SchemaView(SpectacularAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
spectacular_settings.TITLE = "Prowler API"
|
||||
spectacular_settings.VERSION = "1.27.0"
|
||||
spectacular_settings.VERSION = "1.28.0"
|
||||
spectacular_settings.DESCRIPTION = (
|
||||
"Prowler API specification.\n\nThis file is auto-generated."
|
||||
)
|
||||
@@ -2534,28 +2536,45 @@ class ScanViewSet(BaseRLSViewSet):
|
||||
def create(self, request, *args, **kwargs):
|
||||
input_serializer = self.get_serializer(data=request.data)
|
||||
input_serializer.is_valid(raise_exception=True)
|
||||
|
||||
# Broker publish is deferred to on_commit so the worker cannot read
|
||||
# Scan before BaseRLSViewSet's dispatch-wide atomic commits.
|
||||
pre_task_id = str(uuid.uuid4())
|
||||
|
||||
with transaction.atomic():
|
||||
scan = input_serializer.save()
|
||||
with transaction.atomic():
|
||||
task = perform_scan_task.apply_async(
|
||||
kwargs={
|
||||
"tenant_id": self.request.tenant_id,
|
||||
"scan_id": str(scan.id),
|
||||
"provider_id": str(scan.provider_id),
|
||||
# Disabled for now
|
||||
# checks_to_execute=scan.scanner_args.get("checks_to_execute")
|
||||
},
|
||||
scan.task_id = pre_task_id
|
||||
scan.save(update_fields=["task_id"])
|
||||
|
||||
attack_paths_db_utils.create_attack_paths_scan(
|
||||
tenant_id=self.request.tenant_id,
|
||||
scan_id=str(scan.id),
|
||||
provider_id=str(scan.provider_id),
|
||||
)
|
||||
|
||||
attack_paths_db_utils.create_attack_paths_scan(
|
||||
tenant_id=self.request.tenant_id,
|
||||
scan_id=str(scan.id),
|
||||
provider_id=str(scan.provider_id),
|
||||
)
|
||||
task_result, _ = TaskResult.objects.get_or_create(
|
||||
task_id=pre_task_id,
|
||||
defaults={"status": states.PENDING, "task_name": "scan-perform"},
|
||||
)
|
||||
prowler_task, _ = Task.objects.update_or_create(
|
||||
id=pre_task_id,
|
||||
tenant_id=self.request.tenant_id,
|
||||
defaults={"task_runner_task": task_result},
|
||||
)
|
||||
|
||||
prowler_task = Task.objects.get(id=task.id)
|
||||
scan.task_id = task.id
|
||||
scan.save(update_fields=["task_id"])
|
||||
scan_kwargs = {
|
||||
"tenant_id": self.request.tenant_id,
|
||||
"scan_id": str(scan.id),
|
||||
"provider_id": str(scan.provider_id),
|
||||
# Disabled for now
|
||||
# checks_to_execute=scan.scanner_args.get("checks_to_execute")
|
||||
}
|
||||
|
||||
transaction.on_commit(
|
||||
lambda: perform_scan_task.apply_async(
|
||||
kwargs=scan_kwargs, task_id=pre_task_id
|
||||
)
|
||||
)
|
||||
|
||||
self.response_serializer_class = TaskSerializer
|
||||
output_serializer = self.get_serializer(prowler_task)
|
||||
|
||||
@@ -2,47 +2,378 @@
|
||||
title: 'Creating a New Security Compliance Framework in Prowler'
|
||||
---
|
||||
|
||||
This guide explains how to add a new security compliance framework to Prowler, end to end. It covers directory layout, the JSON schema, check mapping conventions, the Pydantic models that validate each framework, the CSV output formatter, local validation, testing, and the pull request process.
|
||||
|
||||
## Introduction
|
||||
|
||||
To create or contribute a custom security framework for Prowler—or to integrate a public framework—you must ensure the necessary checks are available. If they are missing, they must be implemented before proceeding.
|
||||
A compliance framework in Prowler maps a public or custom control catalog (for example CIS, NIST 800-53, PCI DSS, HIPAA, ENS, CCC) to the security checks that Prowler already runs. Each requirement links to zero, one or more Prowler checks. When a scan executes, findings are aggregated per requirement to produce the compliance report rendered by Prowler CLI and Prowler Cloud.
|
||||
|
||||
Each framework is defined in a compliance file per provider. The file should follow the structure used in `prowler/compliance/<provider>/` and be named `<framework>_<version>_<provider>.json`. Follow the format below to create your own.
|
||||
Prowler ships with 85+ compliance frameworks across All Providers. The catalog lives under `prowler/compliance/<provider>/` (or `prowler/compliance/` for universal compliance frameworks)
|
||||
|
||||
## Compliance Framework
|
||||
<Warning>
|
||||
A compliance framework must represent the **complete state** of the source catalog. Every requirement defined by the framework has to be present in the JSON file, even when none of the existing Prowler checks can automate it. In that case, leave `Checks` as an empty array, but do not omit the requirement.
|
||||
|
||||
### Compliance Framework Structure
|
||||
Requirement coverage feeds the compliance percentage calculations and the metadata surfaces (dashboards, widgets, exports). Missing requirements skew those metrics and break the report as a faithful snapshot of the framework.
|
||||
</Warning>
|
||||
|
||||
Each compliance framework file consists of structured metadata that identifies the framework and maps security checks to requirements or controls. Please note that a single requirement can be linked to multiple Prowler checks:
|
||||
### Prerequisites
|
||||
|
||||
- `Framework`: string – The distinguished name of the framework (e.g., CIS).
|
||||
- `Provider`: string – The cloud provider where the framework applies (AWS, Azure, OCI).
|
||||
- `Version`: string – The framework version (e.g., 1.4 for CIS).
|
||||
- `Requirements`: array of objects. – Defines security requirements and their mapping to Prowler checks. All requirements or controls are to be included with the mapping to Prowler.
|
||||
- `Requirements_Id`: string – A unique identifier for each requirement within the framework
|
||||
- `Requirements_Description`: string – The requirement description as specified in the framework.
|
||||
- `Requirements_Attributes`: array of objects. – Contains relevant metadata such as security levels, sections, and any additional data needed for reporting with the result of the findings. Attributes should be derived directly from the framework’s own terminology, ensuring consistency with its established definitions.
|
||||
- `Requirements_Checks`: array. The Prowler checks that are needed to prove this requirement. It can be one or multiple checks. In case automation is not feasible, this can be empty.
|
||||
Before adding a new framework, complete the following checks:
|
||||
|
||||
- **Verify the framework is not already supported.** Inspect `prowler/compliance/<provider>/` for an existing JSON file matching the name and version.
|
||||
- **Confirm the required checks exist.** Every requirement that can be automated must point to one or more existing Prowler checks. For each missing check, implement it first by following the [Prowler Checks](/developer-guide/checks) guide.
|
||||
- **Review a reference framework.** Use an existing framework with a similar structure as your template. `cis_2.0_aws.json` is the canonical reference for CIS-style frameworks. `ccc_aws.json`, `ens_rd2022_aws.json`, and `nist_800_53_revision_5_aws.json` illustrate other attribute shapes.
|
||||
|
||||
## Four-Layer Architecture
|
||||
|
||||
A compliance framework spans four layers. A complete contribution must touch each layer that applies.
|
||||
|
||||
- **Layer 1 – Schema validation:** The Pydantic models in `prowler/lib/check/compliance_models.py` define the canonical schema for each attribute shape (CIS, ENS, Mitre, CCC, C5, CSA CCM, ISO 27001, KISA ISMS-P, AWS Well-Architected, Prowler ThreatScore, and a generic fallback).
|
||||
- **Layer 2 – JSON catalog:** The framework JSON file in `prowler/compliance/<provider>/` lists every requirement and maps it to checks.
|
||||
- **Layer 3 – Output formatter:** The Python module in `prowler/lib/outputs/compliance/<framework>/` builds the CSV row model, the per-provider transformer, and the CLI summary table.
|
||||
- **Layer 4 – Output dispatchers:** The dispatchers in `prowler/lib/outputs/compliance/compliance.py` and `prowler/lib/outputs/compliance/compliance_output.py` route findings to the right formatter based on the framework identifier.
|
||||
|
||||
The rest of this guide walks each layer in order.
|
||||
|
||||
## Directory Structure and File Naming
|
||||
|
||||
Compliance frameworks live at:
|
||||
|
||||
```
|
||||
prowler/compliance/<provider>/<framework>_<version>_<provider>.json
|
||||
```
|
||||
|
||||
The filename conventions are:
|
||||
|
||||
- All lowercase, words separated with underscores.
|
||||
- `<provider>` is a supported provider identifier: `aws`, `azure`, `gcp`, `kubernetes`, `m365`, `github`, `googleworkspace`, `alibabacloud`, `oraclecloud`, `cloudflare`, `mongodbatlas`, `nhn`, `openstack`, `iac`, `llm`.
|
||||
- `<version>` is optional. Omit it when the framework has no versioning, as in `ccc_aws.json`.
|
||||
- The file basename (without `.json`) is the framework key that Prowler CLI accepts via `--compliance`.
|
||||
|
||||
Examples:
|
||||
|
||||
- `prowler/compliance/aws/cis_2.0_aws.json`
|
||||
- `prowler/compliance/aws/nist_800_53_revision_5_aws.json`
|
||||
- `prowler/compliance/azure/ens_rd2022_azure.json`
|
||||
- `prowler/compliance/kubernetes/cis_1.10_kubernetes.json`
|
||||
- `prowler/compliance/aws/ccc_aws.json`
|
||||
|
||||
The output formatter directory mirrors the framework name:
|
||||
|
||||
```
|
||||
prowler/lib/outputs/compliance/<framework>/
|
||||
├── <framework>.py # CLI summary-table dispatcher
|
||||
├── <framework>_<provider>.py # Per-provider transformer class
|
||||
├── models.py # Pydantic CSV row model
|
||||
└── __init__.py
|
||||
```
|
||||
|
||||
## JSON Schema Reference
|
||||
|
||||
Every compliance file is a JSON document with the following top-level keys.
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
|---|---|---|---|
|
||||
| `Framework` | string | Yes | Canonical framework identifier, for example `CIS`, `NIST-800-53-Revision-5`, `ENS`, `CCC`. |
|
||||
| `Name` | string | Yes | Human-readable framework name displayed by Prowler App. |
|
||||
| `Version` | string | Yes | Framework version, for example `2.0`. Use an empty string only for frameworks without versioning. See [Version Handling](#version-handling). |
|
||||
| `Provider` | string | Yes | Upper-cased provider identifier: `AWS`, `AZURE`, `GCP`, `KUBERNETES`, `M365`, `GITHUB`, `GOOGLEWORKSPACE`, and so on. |
|
||||
| `Description` | string | Yes | Short description of the framework's scope and purpose. |
|
||||
| `Requirements` | array | Yes | List of [requirement objects](#requirement-object). |
|
||||
|
||||
### Requirement Object
|
||||
|
||||
Each entry in `Requirements` describes one control or requirement.
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
|---|---|---|---|
|
||||
| `Id` | string | Yes | Unique identifier within the framework, for example `1.10` or `CCC.Core.CN01.AR01`. |
|
||||
| `Name` | string | No | Optional human-readable name used by frameworks that distinguish control name from description, such as NIST. |
|
||||
| `Description` | string | Yes | Verbatim description from the source framework. |
|
||||
| `Attributes` | array | Yes | List of [attribute objects](#attribute-objects). The shape depends on the framework. |
|
||||
| `Checks` | array of strings | Yes | Prowler check identifiers that automate the requirement. Leave the list empty when the control cannot be automated. |
|
||||
|
||||
### Attribute Objects
|
||||
|
||||
Attributes carry the metadata that Prowler App and the CSV output display for each requirement. The object shape is framework-specific and is validated by a dedicated Pydantic model in `prowler/lib/check/compliance_models.py`. The most common shapes are summarized below.
|
||||
|
||||
#### CIS_Requirement_Attribute
|
||||
|
||||
Used by every CIS benchmark.
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|---|---|---|---|
|
||||
| `Section` | string | Yes | Top-level section, for example `1 Identity and Access Management`. |
|
||||
| `SubSection` | string | No | Optional second-level grouping. |
|
||||
| `Profile` | enum | Yes | One of `Level 1`, `Level 2`, `E3 Level 1`, `E3 Level 2`, `E5 Level 1`, `E5 Level 2`. |
|
||||
| `AssessmentStatus` | enum | Yes | `Manual` or `Automated`. |
|
||||
| `Description` | string | Yes | Control description. |
|
||||
| `RationaleStatement` | string | Yes | Reason the control exists. |
|
||||
| `ImpactStatement` | string | Yes | Impact of non-compliance. |
|
||||
| `RemediationProcedure` | string | Yes | Remediation steps. |
|
||||
| `AuditProcedure` | string | Yes | Audit steps. |
|
||||
| `AdditionalInformation` | string | Yes | Free-form notes. |
|
||||
| `DefaultValue` | string | No | Default configuration value, when relevant. |
|
||||
| `References` | string | Yes | Colon-separated list of reference URLs. |
|
||||
|
||||
#### ENS_Requirement_Attribute
|
||||
|
||||
Used by the Spanish ENS (Esquema Nacional de Seguridad) frameworks.
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|---|---|---|---|
|
||||
| `IdGrupoControl` | string | Yes | Control group identifier. |
|
||||
| `Marco` | string | Yes | Framework block (`operacional`, `organizativo`, `proteccion`). |
|
||||
| `Categoria` | string | Yes | Control category. |
|
||||
| `DescripcionControl` | string | Yes | Control description in Spanish. |
|
||||
| `Tipo` | enum | Yes | `refuerzo`, `requisito`, `recomendacion`, `medida`. |
|
||||
| `Nivel` | enum | Yes | `opcional`, `bajo`, `medio`, `alto`. |
|
||||
| `Dimensiones` | array of enum | Yes | Subset of `confidencialidad`, `integridad`, `trazabilidad`, `autenticidad`, `disponibilidad`. |
|
||||
| `ModoEjecucion` | string | Yes | Execution mode (`manual`, `automático`, `híbrido`). |
|
||||
| `Dependencias` | array of strings | Yes | Ids of prerequisite controls. Empty list when none. |
|
||||
|
||||
#### CCC_Requirement_Attribute
|
||||
|
||||
Used by the Common Cloud Controls Catalog.
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|---|---|---|---|
|
||||
| `FamilyName` | string | Yes | Control family, for example `Data`. |
|
||||
| `FamilyDescription` | string | Yes | Description of the family. |
|
||||
| `Section` | string | Yes | Section title. |
|
||||
| `SubSection` | string | Yes | Subsection title, or empty string. |
|
||||
| `SubSectionObjective` | string | Yes | Stated objective for the subsection. |
|
||||
| `Applicability` | array of strings | Yes | Applicability tags such as `tlp-green`, `tlp-amber`, `tlp-red`. |
|
||||
| `Recommendation` | string | Yes | Implementation recommendation. |
|
||||
| `SectionThreatMappings` | array of objects | Yes | Each entry has `ReferenceId` and `Identifiers`. |
|
||||
| `SectionGuidelineMappings` | array of objects | Yes | Each entry has `ReferenceId` and `Identifiers`. |
|
||||
|
||||
#### Generic_Compliance_Requirement_Attribute
|
||||
|
||||
The fallback attribute model used when no framework-specific schema applies (for example NIST 800-53, PCI DSS, GDPR, HIPAA).
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|---|---|---|---|
|
||||
| `ItemId` | string | No | Item identifier. |
|
||||
| `Section` | string | No | Section name. |
|
||||
| `SubSection` | string | No | Subsection name. |
|
||||
| `SubGroup` | string | No | Subgroup name. |
|
||||
| `Service` | string | No | Affected service, for example `aws`, `iam`. |
|
||||
| `Type` | string | No | Control type. |
|
||||
| `Comment` | string | No | Free-form comment. |
|
||||
|
||||
Additional per-framework attribute models exist for `AWS_Well_Architected_Requirement_Attribute`, `ISO27001_2013_Requirement_Attribute`, `Mitre_Requirement_Attribute_<Provider>`, `KISA_ISMSP_Requirement_Attribute`, `Prowler_ThreatScore_Requirement_Attribute`, `C5Germany_Requirement_Attribute`, and `CSA_CCM_Requirement_Attribute`. Consult `prowler/lib/check/compliance_models.py` for their full field sets.
|
||||
|
||||
<Note>
|
||||
The `Attributes` field is a Pydantic `Union`. The generic attribute model must remain the last element of that Union, otherwise Pydantic v1 silently coerces every framework into the generic shape and your specialized fields are dropped.
|
||||
</Note>
|
||||
|
||||
## Minimal Working Example
|
||||
|
||||
The following snippet is a complete, valid framework file named `my_framework_1.0_aws.json`, saved at `prowler/compliance/aws/my_framework_1.0_aws.json`. It uses the generic attribute shape for simplicity.
|
||||
|
||||
```json title="prowler/compliance/aws/my_framework_1.0_aws.json"
|
||||
{
|
||||
"Framework": "<framework>-<provider>",
|
||||
"Version": "<version>",
|
||||
"Framework": "My-Framework",
|
||||
"Name": "My Framework 1.0 for AWS",
|
||||
"Version": "1.0",
|
||||
"Provider": "AWS",
|
||||
"Description": "Internal baseline for AWS accounts.",
|
||||
"Requirements": [
|
||||
{
|
||||
"Id": "<unique-id>",
|
||||
"Description": "Full description of the requirement",
|
||||
"Checks": [
|
||||
"Here is the prowler check or checks that will be executed"
|
||||
],
|
||||
"Id": "MF-1.1",
|
||||
"Description": "Root account must have multi-factor authentication enabled.",
|
||||
"Attributes": [
|
||||
{
|
||||
<Add here your custom attributes.>
|
||||
"ItemId": "MF-1.1",
|
||||
"Section": "Identity and Access Management",
|
||||
"SubSection": "Root Account",
|
||||
"Service": "iam"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_root_hardware_mfa_enabled"
|
||||
]
|
||||
},
|
||||
...
|
||||
{
|
||||
"Id": "MF-2.1",
|
||||
"Description": "S3 buckets must block public access at the account level.",
|
||||
"Attributes": [
|
||||
{
|
||||
"ItemId": "MF-2.1",
|
||||
"Section": "Data Protection",
|
||||
"Service": "s3"
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"s3_account_level_public_access_blocks"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Finally, to have a proper output file for your reports, your framework data model has to be created in `prowler/lib/outputs/models.py` and also the CLI table output in `prowler/lib/outputs/compliance.py`. Also, you need to add a new conditional in `prowler/lib/outputs/file_descriptors.py` if creating a new CSV model.
|
||||
## Mapping Checks to Requirements
|
||||
|
||||
Each requirement links to the Prowler checks that, together, produce a PASS or FAIL verdict for that control.
|
||||
|
||||
- **Include every requirement from the source catalog.** The framework file must mirror the full control list, one-to-one. Compliance percentages, dashboards, and exported metadata are computed against the total requirement count, so omitting an unmappable control inflates coverage and misrepresents the framework.
|
||||
- List every check by its canonical identifier, the value of `CheckID` inside the check's `.metadata.json` file.
|
||||
- One requirement can reference multiple checks. The requirement is evaluated as FAIL when any referenced check produces a FAIL finding for a resource in scope.
|
||||
- Leave `Checks` as an empty array when the requirement cannot be automated. The requirement still appears in the report, contributes to the total, and resolves to `MANUAL`. An empty mapping is valid; a missing requirement is not.
|
||||
- Reuse checks across requirements when the same control applies in multiple places. Do not duplicate check logic to match framework structure.
|
||||
- Avoid referencing checks from a different provider. A compliance file is bound to one provider, and cross-provider checks will never match findings in the scan.
|
||||
|
||||
To discover available checks, run:
|
||||
|
||||
```bash
|
||||
poetry run python prowler-cli.py <provider> --list-checks
|
||||
```
|
||||
|
||||
## Supporting Multiple Providers
|
||||
|
||||
Each compliance file targets a single provider. To cover several providers with the same framework (for example CIS across AWS, Azure, and GCP), ship one JSON file per provider:
|
||||
|
||||
```
|
||||
prowler/compliance/aws/cis_2.0_aws.json
|
||||
prowler/compliance/azure/cis_2.0_azure.json
|
||||
prowler/compliance/gcp/cis_2.0_gcp.json
|
||||
```
|
||||
|
||||
Keep the `Framework` and `Version` values identical across the files so the dispatcher matches them, and change only the `Provider`, `Checks`, and provider-specific metadata.
|
||||
|
||||
The CIS output formatter already supports every provider listed above. For a brand-new framework that spans several providers, add one transformer per provider in `prowler/lib/outputs/compliance/<framework>/` and extend the summary-table dispatcher accordingly. See [Output Formatter](#output-formatter).
|
||||
|
||||
## Output Formatter
|
||||
|
||||
Prowler renders every compliance framework in two forms: a detailed CSV report written to disk, and a summary table printed in the CLI. Both are produced by the output formatter package for the framework.
|
||||
|
||||
For a new framework named `my_framework`, create:
|
||||
|
||||
```
|
||||
prowler/lib/outputs/compliance/my_framework/
|
||||
├── __init__.py
|
||||
├── my_framework.py # CLI summary table dispatcher
|
||||
├── my_framework_aws.py # Per-provider transformer
|
||||
└── models.py # CSV row Pydantic model
|
||||
```
|
||||
|
||||
### Step 1 – Define the CSV Row Model
|
||||
|
||||
In `models.py`, declare a Pydantic v1 model with one field per CSV column. Use existing models such as `AWSCISModel` in `prowler/lib/outputs/compliance/cis/models.py` as the reference. Fields typically include `Provider`, `Description`, `AccountId`, `Region`, `AssessmentDate`, `Requirements_Id`, `Requirements_Description`, one `Requirements_Attributes_*` field per attribute key, plus the finding fields `Status`, `StatusExtended`, `ResourceId`, `ResourceName`, `CheckId`, `Muted`, `Framework`, `Name`.
|
||||
|
||||
### Step 2 – Implement the Transformer Class
|
||||
|
||||
In `my_framework_aws.py`, subclass `ComplianceOutput` from `prowler.lib.outputs.compliance.compliance_output` and implement `transform(findings, compliance, compliance_name)`. Iterate over `findings`, match each finding to the requirements it satisfies through `finding.compliance.get(compliance_name, [])`, and append one row per attribute to `self._data`.
|
||||
|
||||
### Step 3 – Add the Summary-Table Dispatcher
|
||||
|
||||
In `my_framework.py`, implement `get_my_framework_table(findings, bulk_checks_metadata, compliance_framework, output_filename, output_directory, compliance_overview)` following the pattern in `prowler/lib/outputs/compliance/cis/cis.py`.
|
||||
|
||||
### Step 4 – Register the Framework in the Dispatchers
|
||||
|
||||
- Add the dispatcher call in `prowler/lib/outputs/compliance/compliance.py`, inside `display_compliance_table`, with a branch such as `elif "my_framework" in compliance_framework:`.
|
||||
- Register the CSV model and transformer in `prowler/lib/outputs/compliance/compliance_output.py` so the CSV file is emitted during the scan.
|
||||
|
||||
<Note>
|
||||
For NIST-style catalogs that use `Generic_Compliance_Requirement_Attribute`, no custom formatter is needed. The generic formatter in `prowler/lib/outputs/compliance/generic/` handles them automatically, provided the JSON validates against the generic attribute schema.
|
||||
</Note>
|
||||
|
||||
## Version Handling
|
||||
|
||||
Prowler matches frameworks by concatenating `Framework` and `Version`. A missing or empty `Version` collapses several frameworks to the same key and breaks CLI filtering with `--compliance`.
|
||||
|
||||
- Always set `Version` to a non-empty string, even for frameworks that rename editions rather than version them. Use the edition identifier (for example `RD2022`, `v2025.10`, `4.0`).
|
||||
- When the source catalog has no version, use the first year of adoption or the release date.
|
||||
- Make sure the version substring embedded in the filename matches `Version`, because the CLI dispatcher reads `compliance_framework.split("_")[1]` to select the correct version.
|
||||
|
||||
## Validating the Framework Locally
|
||||
|
||||
Follow the steps below before opening a pull request.
|
||||
|
||||
### 1. Run the Compliance Model Validator
|
||||
|
||||
```bash
|
||||
poetry run python prowler-cli.py <provider> --list-compliance
|
||||
```
|
||||
|
||||
The framework must appear in the output. A validation error indicates a schema mismatch between the JSON file and the attribute model.
|
||||
|
||||
### 2. Run a Scan Filtered by the Framework
|
||||
|
||||
```bash
|
||||
poetry run python prowler-cli.py <provider> \
|
||||
--compliance <framework>_<version>_<provider> \
|
||||
--log-level ERROR
|
||||
```
|
||||
|
||||
Verify that:
|
||||
|
||||
- Prowler produces a CSV file under `output/compliance/` with the expected name.
|
||||
- The CLI summary table lists every section in the framework.
|
||||
- Findings roll up under the expected requirements.
|
||||
|
||||
### 3. Inspect the CSV Output
|
||||
|
||||
Open the generated CSV and confirm:
|
||||
|
||||
- All columns defined in `models.py` appear.
|
||||
- Every requirement has at least one row per scanned resource.
|
||||
- Values such as `Requirements_Attributes_Section` reflect the JSON content.
|
||||
|
||||
### 4. Verify the Framework in Prowler App
|
||||
|
||||
Launch Prowler App locally (`docker compose up` from the repository root) and run a scan with the new compliance framework. Confirm the compliance page renders the requirements, sections, and status widgets correctly.
|
||||
|
||||
## Testing
|
||||
|
||||
Compliance contributions require two layers of tests.
|
||||
|
||||
- **Schema tests** exercise the Pydantic models. Extend `tests/lib/check/universal_compliance_models_test.py` with a case that loads the new JSON file and asserts the attribute type matches the expected model.
|
||||
- **Output tests** exercise the transformer. Mirror the structure under `tests/lib/outputs/compliance/<framework>/` with fixtures that feed synthetic findings through the transformer and assert the resulting CSV rows.
|
||||
|
||||
Run the suite with:
|
||||
|
||||
```bash
|
||||
poetry run pytest -n auto tests/lib/check/universal_compliance_models_test.py \
|
||||
tests/lib/outputs/compliance/
|
||||
```
|
||||
|
||||
For guidance on writing Prowler SDK tests, refer to [Unit Testing](/developer-guide/unit-testing).
|
||||
|
||||
## Submitting the Pull Request
|
||||
|
||||
Before opening the pull request:
|
||||
|
||||
1. Run the complete QA pipeline:
|
||||
```bash
|
||||
poetry run pre-commit run --all-files
|
||||
poetry run pytest -n auto
|
||||
```
|
||||
2. Add a changelog entry under the `### 🚀 Added` section of `prowler/CHANGELOG.md`, describing the new framework and the providers it covers.
|
||||
3. Follow the [Pull Request Template](https://github.com/prowler-cloud/prowler/blob/master/.github/pull_request_template.md) and set the PR title using Conventional Commits, for example `feat(compliance): add My Framework 1.0 for AWS`.
|
||||
4. Request review from the compliance codeowners listed in `.github/CODEOWNERS`.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
The following issues are the most common when contributing a compliance framework.
|
||||
|
||||
- **`ValidationError: field required` during scan.** The JSON is missing a required attribute field. Re-check the matching Pydantic model in `prowler/lib/check/compliance_models.py`.
|
||||
- **All attributes collapse to `Generic_Compliance_Requirement_Attribute` values.** The Pydantic `Union` is ordered incorrectly, or the JSON matches only the generic shape. Move the generic model to the last Union position and ensure every required field is present in the JSON.
|
||||
- **`--compliance` filter does not find the framework.** The filename does not match the expected pattern `<framework>_<version>_<provider>.json`, the version is empty, or the file lives outside `prowler/compliance/<provider>/`.
|
||||
- **CLI summary table is empty but the CSV is populated.** The dispatcher branch in `prowler/lib/outputs/compliance/compliance.py` is missing or its substring match does not catch the framework key.
|
||||
- **CSV file is missing after the scan.** The transformer class is not registered in `prowler/lib/outputs/compliance/compliance_output.py`, or `transform()` raises silently. Run the scan with `--log-level DEBUG`.
|
||||
- **Findings do not roll up under a requirement.** A check listed in `Checks` either does not exist for that provider or is spelled incorrectly. Run `--list-checks | grep <check_name>` to confirm.
|
||||
|
||||
## Reference Examples
|
||||
|
||||
Use the following files as templates when modeling a new contribution.
|
||||
|
||||
- `prowler/compliance/aws/cis_2.0_aws.json` – CIS attribute shape.
|
||||
- `prowler/compliance/aws/nist_800_53_revision_5_aws.json` – Generic attribute shape.
|
||||
- `prowler/compliance/aws/ccc_aws.json` – CCC attribute shape.
|
||||
- `prowler/compliance/azure/ens_rd2022_azure.json` – ENS attribute shape.
|
||||
- `prowler/lib/check/compliance_models.py` – Canonical Pydantic schemas.
|
||||
- `prowler/lib/outputs/compliance/cis/` – Reference implementation of a multi-provider output formatter.
|
||||
- `prowler/lib/outputs/compliance/generic/` – Reference implementation of a generic output formatter.
|
||||
|
||||
@@ -119,6 +119,7 @@
|
||||
"user-guide/tutorials/prowler-app-multi-tenant",
|
||||
"user-guide/tutorials/prowler-app-api-keys",
|
||||
"user-guide/tutorials/prowler-app-import-findings",
|
||||
"user-guide/tutorials/prowler-app-alerts",
|
||||
{
|
||||
"group": "Mutelist",
|
||||
"expanded": true,
|
||||
@@ -176,7 +177,6 @@
|
||||
"pages": [
|
||||
"user-guide/cli/tutorials/misc",
|
||||
"user-guide/cli/tutorials/reporting",
|
||||
"user-guide/cli/tutorials/compliance",
|
||||
"user-guide/cli/tutorials/dashboard",
|
||||
"user-guide/cli/tutorials/configuration_file",
|
||||
"user-guide/cli/tutorials/logging",
|
||||
@@ -338,6 +338,7 @@
|
||||
{
|
||||
"group": "Compliance",
|
||||
"pages": [
|
||||
"user-guide/compliance/tutorials/compliance",
|
||||
"user-guide/compliance/tutorials/threatscore"
|
||||
]
|
||||
},
|
||||
@@ -503,6 +504,10 @@
|
||||
}
|
||||
},
|
||||
"redirects": [
|
||||
{
|
||||
"source": "/user-guide/cli/tutorials/compliance",
|
||||
"destination": "/user-guide/compliance/tutorials/compliance"
|
||||
},
|
||||
{
|
||||
"source": "/projects/prowler-open-source/en/latest/tutorials/prowler-app-lighthouse",
|
||||
"destination": "/user-guide/tutorials/prowler-app-lighthouse"
|
||||
|
||||
@@ -10,7 +10,7 @@ Complete reference guide for all tools available in the Prowler MCP Server. Tool
|
||||
|----------|------------|------------------------|
|
||||
| Prowler Hub | 10 tools | No |
|
||||
| Prowler Documentation | 2 tools | No |
|
||||
| Prowler Cloud/App | 29 tools | Yes |
|
||||
| Prowler Cloud/App | 32 tools | Yes |
|
||||
|
||||
## Tool Naming Convention
|
||||
|
||||
@@ -36,6 +36,14 @@ Tools for searching, viewing, and analyzing security findings across all cloud p
|
||||
- **`prowler_app_get_finding_details`** - Get comprehensive details about a specific finding including remediation guidance, check metadata, and resource relationships
|
||||
- **`prowler_app_get_findings_overview`** - Get aggregate statistics and trends about security findings as a markdown report
|
||||
|
||||
### Finding Groups Management
|
||||
|
||||
Tools for listing finding groups aggregated by check ID, viewing complete group counters, and drilling down into affected resources.
|
||||
|
||||
- **`prowler_app_list_finding_groups`** - List latest or historical finding groups with filters for provider, region, service, resource, category, check, severity, status, muted state, delta, date range, and sorting
|
||||
- **`prowler_app_get_finding_group_details`** - Get complete details for a specific finding group including counters, description, timestamps, and impacted providers
|
||||
- **`prowler_app_list_finding_group_resources`** - List actionable unmuted resources affected by a finding group by default, including nested resource and provider data plus the `finding_id` for remediation details. Set `include_muted` to include suppressed resources
|
||||
|
||||
### Provider Management
|
||||
|
||||
Tools for managing cloud provider connections in Prowler.
|
||||
|
||||
@@ -121,8 +121,8 @@ To update the environment file:
|
||||
Edit the `.env` file and change version values:
|
||||
|
||||
```env
|
||||
PROWLER_UI_VERSION="5.25.2"
|
||||
PROWLER_API_VERSION="5.25.2"
|
||||
PROWLER_UI_VERSION="5.26.1"
|
||||
PROWLER_API_VERSION="5.26.1"
|
||||
```
|
||||
|
||||
<Note>
|
||||
|
||||
|
After Width: | Height: | Size: 38 KiB |
|
After Width: | Height: | Size: 48 KiB |
|
After Width: | Height: | Size: 534 KiB |
|
After Width: | Height: | Size: 659 KiB |
|
After Width: | Height: | Size: 759 KiB |
|
After Width: | Height: | Size: 62 KiB |
|
After Width: | Height: | Size: 534 KiB |
|
After Width: | Height: | Size: 257 KiB |
|
After Width: | Height: | Size: 399 KiB |
|
After Width: | Height: | Size: 425 KiB |
|
After Width: | Height: | Size: 88 KiB |
|
After Width: | Height: | Size: 222 KiB |
@@ -1,12 +1,17 @@
|
||||
export const VersionBadge = ({ version }) => {
|
||||
return (
|
||||
<code className="version-badge-container">
|
||||
<p className="version-badge">
|
||||
<span className="version-badge-label">Added in:</span>
|
||||
<code className="version-badge-version">{version}</code>
|
||||
</p>
|
||||
</code>
|
||||
|
||||
|
||||
<a
|
||||
href={`https://github.com/prowler-cloud/prowler/releases/tag/${version}`}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="version-badge-link"
|
||||
>
|
||||
<span className="version-badge-container">
|
||||
<span className="version-badge">
|
||||
<span className="version-badge-label">Added in:</span>
|
||||
<span className="version-badge-version">{version}</span>
|
||||
</span>
|
||||
</span>
|
||||
</a>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1,4 +1,21 @@
|
||||
/* Version Badge Styling */
|
||||
.version-badge-link,
|
||||
.version-badge-link:hover,
|
||||
.version-badge-link:focus,
|
||||
.version-badge-link:active,
|
||||
.version-badge-link:visited {
|
||||
display: inline-block;
|
||||
text-decoration: none !important;
|
||||
background-image: none !important;
|
||||
border-bottom: none !important;
|
||||
color: inherit;
|
||||
transition: opacity 0.15s ease-in-out;
|
||||
}
|
||||
|
||||
.version-badge-link:hover {
|
||||
opacity: 0.85;
|
||||
}
|
||||
|
||||
.version-badge-container {
|
||||
display: inline-block;
|
||||
margin: 0 0 1rem 0;
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
---
|
||||
title: 'Compliance'
|
||||
---
|
||||
|
||||
Prowler allows you to execute checks based on requirements defined in compliance frameworks. By default, it will execute and give you an overview of the status of each compliance framework:
|
||||
|
||||
<img src="/images/cli/compliance/compliance.png" />
|
||||
|
||||
You can find CSVs containing detailed compliance results in the compliance folder within Prowler's output folder.
|
||||
|
||||
## Execute Prowler based on Compliance Frameworks
|
||||
|
||||
Prowler can analyze your environment based on a specific compliance framework and get more details, to do it, you can use option `--compliance`:
|
||||
|
||||
```sh
|
||||
prowler <provider> --compliance <compliance_framework>
|
||||
```
|
||||
|
||||
Standard results will be shown and additionally the framework information as the sample below for CIS AWS 2.0. For details a CSV file has been generated as well.
|
||||
|
||||
<img src="/images/cli/compliance/compliance-cis-sample1.png" />
|
||||
|
||||
<Note>
|
||||
**If Prowler can't find a resource related with a check from a compliance requirement, this requirement won't appear on the output**
|
||||
</Note>
|
||||
|
||||
## List Available Compliance Frameworks
|
||||
|
||||
To see which compliance frameworks are covered by Prowler, use the `--list-compliance` option:
|
||||
|
||||
```sh
|
||||
prowler <provider> --list-compliance
|
||||
```
|
||||
|
||||
Or you can visit [Prowler Hub](https://hub.prowler.com/compliance).
|
||||
|
||||
## List Requirements of Compliance Frameworks
|
||||
To list requirements for a compliance framework, use the `--list-compliance-requirements` option:
|
||||
|
||||
```sh
|
||||
prowler <provider> --list-compliance-requirements <compliance_framework(s)>
|
||||
```
|
||||
|
||||
Example for the first requirements of CIS 1.5 for AWS:
|
||||
|
||||
```
|
||||
Listing CIS 1.5 AWS Compliance Requirements:
|
||||
|
||||
Requirement Id: 1.1
|
||||
- Description: Maintain current contact details
|
||||
- Checks:
|
||||
account_maintain_current_contact_details
|
||||
|
||||
Requirement Id: 1.2
|
||||
- Description: Ensure security contact information is registered
|
||||
- Checks:
|
||||
account_security_contact_information_is_registered
|
||||
|
||||
Requirement Id: 1.3
|
||||
- Description: Ensure security questions are registered in the AWS account
|
||||
- Checks:
|
||||
account_security_questions_are_registered_in_the_aws_account
|
||||
|
||||
Requirement Id: 1.4
|
||||
- Description: Ensure no 'root' user account access key exists
|
||||
- Checks:
|
||||
iam_no_root_access_key
|
||||
|
||||
Requirement Id: 1.5
|
||||
- Description: Ensure MFA is enabled for the 'root' user account
|
||||
- Checks:
|
||||
iam_root_mfa_enabled
|
||||
|
||||
[redacted]
|
||||
|
||||
```
|
||||
|
||||
## Create and contribute adding other Security Frameworks
|
||||
|
||||
This information is part of the Developer Guide and can be found [here](/developer-guide/security-compliance-framework).
|
||||
@@ -56,6 +56,7 @@ The following list includes all the AWS checks with configurable variables that
|
||||
| `elb_is_in_multiple_az` | `elb_min_azs` | Integer |
|
||||
| `elbv2_is_in_multiple_az` | `elbv2_min_azs` | Integer |
|
||||
| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `iam_user_access_not_stale_to_sagemaker` | `max_unused_sagemaker_access_days` | Integer |
|
||||
| `iam_user_accesskey_unused` | `max_unused_access_keys_days` | Integer |
|
||||
| `iam_user_console_access_unused` | `max_console_access_days` | Integer |
|
||||
| `organizations_delegated_administrators` | `organizations_trusted_delegated_administrators` | List of Strings |
|
||||
@@ -186,6 +187,8 @@ aws:
|
||||
max_unused_access_keys_days: 45
|
||||
# aws.iam_user_console_access_unused --> CIS recommends 45 days
|
||||
max_console_access_days: 45
|
||||
# aws.iam_user_access_not_stale_to_sagemaker --> default 90 days
|
||||
max_unused_sagemaker_access_days: 90
|
||||
|
||||
# AWS EC2 Configuration
|
||||
# aws.ec2_elastic_ip_shodan
|
||||
|
||||
@@ -0,0 +1,259 @@
|
||||
---
|
||||
title: 'Compliance'
|
||||
description: 'Run security checks against compliance frameworks, review posture across providers, and download CSV or PDF reports from Prowler Cloud, Prowler App, and Prowler CLI.'
|
||||
---
|
||||
|
||||
Prowler maps every security check to one or more industry-standard compliance frameworks, so a single scan produces both technical findings and framework-aligned evidence. The same evaluation runs identically whether scans are launched from Prowler Cloud, Prowler App, or Prowler CLI.
|
||||
|
||||
Out of the box, Prowler covers frameworks such as CIS Benchmarks, NIST 800-53, NIST CSF, NIS2, ENS RD2022, ISO 27001, PCI-DSS, SOC 2, GDPR, HIPAA, AWS Well-Architected, BSI C5, CSA CCM, MITRE ATT&CK, KISA ISMS-P, FedRAMP, and Prowler ThreatScore. The full catalog is available at [Prowler Hub](https://hub.prowler.com/compliance).
|
||||
|
||||
<Note>
|
||||
For the unified compliance score methodology used across frameworks, see [Prowler ThreatScore Documentation](/user-guide/compliance/tutorials/threatscore).
|
||||
</Note>
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card title="Prowler Cloud" icon="cloud" href="#prowler-cloud">
|
||||
Review compliance posture using Prowler Cloud
|
||||
</Card>
|
||||
<Card title="Prowler CLI" icon="terminal" href="#prowler-cli">
|
||||
Run compliance scans using Prowler CLI
|
||||
</Card>
|
||||
</CardGroup>
|
||||
|
||||
## Prowler Cloud
|
||||
|
||||
The Compliance section in Prowler Cloud and Prowler App centralizes compliance posture across every connected provider. It aggregates scan results, surfaces Prowler ThreatScore, and exposes detailed requirement-level evidence for each supported framework.
|
||||
|
||||
### Accessing the Compliance Section
|
||||
|
||||
To open the compliance overview, follow these steps:
|
||||
|
||||
1. Sign in to Prowler Cloud at [cloud.prowler.com](https://cloud.prowler.com/sign-in) or to a self-hosted Prowler App instance.
|
||||
2. Select **Compliance** from the left navigation.
|
||||
|
||||
The page lists every framework evaluated by the most recent completed scan of the selected provider.
|
||||
|
||||
<img src="/images/compliance/prowler-app-compliance-overview.png" alt="Compliance overview page in Prowler Cloud and App showing filters, the Prowler ThreatScore card, and the framework grid" width="900" />
|
||||
|
||||
<Note>
|
||||
Compliance results require at least one completed scan. If no scan has finished yet, Prowler Cloud and App display a notice prompting to launch or wait for a scan to complete.
|
||||
</Note>
|
||||
|
||||
### Filtering Compliance Results
|
||||
|
||||
The filters bar at the top of the overview controls which scan and which regions feed every card on the page.
|
||||
|
||||
#### Scan Selector
|
||||
|
||||
The scan selector lists completed scans across all connected providers. Each entry includes the provider type, alias, and completion timestamp. Selecting a scan updates the entire page, including ThreatScore and every framework card.
|
||||
|
||||
#### Region Filter
|
||||
|
||||
The region multi-select narrows results to one or more regions detected in the selected scan. Use it to evaluate compliance posture for a specific geography or account boundary. The filter applies to:
|
||||
|
||||
* The framework grid scores and pass/fail counts.
|
||||
* The detailed requirement view inside each framework.
|
||||
|
||||
<Note>
|
||||
Region filters apply only to providers that report a region attribute (for example, AWS, Azure, and Google Cloud). Providers without regions ignore the filter.
|
||||
</Note>
|
||||
|
||||
#### Clearing Filters
|
||||
|
||||
Select **Clear filters** to reset both the region filter and any other applied filter to its default state. The scan selector is preserved.
|
||||
|
||||
### Reviewing the Prowler ThreatScore Card
|
||||
|
||||
When the selected scan includes Prowler ThreatScore data, a dedicated card appears at the top of the overview, showing:
|
||||
|
||||
* The overall ThreatScore (0–100) with a color-coded indicator.
|
||||
* A progress bar reflecting current posture.
|
||||
* Per-pillar bars for IAM, Attack Surface, and Logging and Monitoring.
|
||||
|
||||
<img src="/images/compliance/prowler-app-compliance-threatscore-card.png" alt="Prowler ThreatScore badge on the Compliance overview showing the overall score and per-pillar bars" width="900" />
|
||||
|
||||
Selecting the card opens the ThreatScore framework detail page, covered in [Working With the Framework Detail Page](#working-with-the-framework-detail-page).
|
||||
|
||||
For a complete explanation of the methodology, formula, and weighting, see [Prowler ThreatScore Documentation](/user-guide/compliance/tutorials/threatscore).
|
||||
|
||||
### Exploring the Framework Grid
|
||||
|
||||
Below ThreatScore, the framework grid shows one card per supported compliance framework. Each card includes:
|
||||
|
||||
* **Framework logo and name:** Identifies the standard (CIS, NIST, ENS, ISO 27001, PCI-DSS, SOC 2, NIS2, CSA CCM, MITRE ATT&CK, and more).
|
||||
* **Version:** Indicates the framework version applied to the scan.
|
||||
* **Score:** The percentage of passing requirements over the total evaluated.
|
||||
* **Passing Requirements:** A `passed / total` counter for additional context.
|
||||
* **Download dropdown:** Quick access to the CSV report and, when supported, the PDF report.
|
||||
|
||||
<img src="/images/compliance/prowler-app-compliance-card-download.png" alt="Download dropdown on a framework card showing CSV and PDF report options" width="500" />
|
||||
|
||||
Select any card to open the framework detail page.
|
||||
|
||||
<Note>
|
||||
Score color coding follows three thresholds: red for severely low compliance, amber for partial compliance, and green for healthy posture. Hover over the score for the exact percentage.
|
||||
</Note>
|
||||
|
||||
### Working With the Framework Detail Page
|
||||
|
||||
The detail page provides everything needed to evaluate a single framework: aggregate metrics, top failure sections, and a requirement-by-requirement view.
|
||||
|
||||
#### Header, Summary Cards, and Download Actions
|
||||
|
||||
The header shows the framework name, version, the provider scan being reviewed, and CSV / PDF download buttons. Below the header, summary cards condense the framework state at a glance:
|
||||
|
||||
* **Requirements Status:** Donut chart with `Pass`, `Fail`, and `Manual` counts plus the total number of requirements.
|
||||
* **Top Failed Sections:** Ranks the sections or pillars with the highest number of failing requirements.
|
||||
* **ThreatScore Breakdown:** Appears only on the ThreatScore framework. It shows the overall score and per-pillar scores aligned with the ThreatScore pillars (IAM, Attack Surface, Logging and Monitoring, Encryption).
|
||||
|
||||
The same layout applies to every compliance framework. ThreatScore is the only framework that includes the extra Breakdown card on the left; for any other framework, the Requirements Status and Top Failed Sections cards span the full row.
|
||||
|
||||
<img src="/images/compliance/prowler-app-compliance-threatscore-detail.png" alt="Prowler ThreatScore detail page including the extra Breakdown card alongside Requirements Status and Top Failed Sections" width="900" />
|
||||
|
||||
<img src="/images/compliance/prowler-app-compliance-detail-header.png" alt="CIS framework detail page showing only the Requirements Status donut and the Top Failed Sections card, without the ThreatScore Breakdown" width="900" />
|
||||
|
||||
#### Requirements Accordion
|
||||
|
||||
Below the summary cards, an accordion organizes every requirement of the framework. Expand a section to see:
|
||||
|
||||
* **Requirement ID and title:** Reflect the official identifier from the framework.
|
||||
* **Pass / Fail / Manual badges:** Indicate the status of each requirement based on the underlying checks.
|
||||
* **Custom details panel:** Opens additional context tailored to the framework. For frameworks with custom layouts, the panel surfaces fields such as control objectives, severity, attack tactics, regulatory references, or required evidence.
|
||||
|
||||
Select a requirement to open the detail panel and review the failing checks, the resources affected, and remediation guidance.
|
||||
|
||||
<img src="/images/compliance/prowler-app-compliance-requirements-accordion.png" alt="Expanded CIS requirement showing description, rationale, remediation procedure, audit procedure, profile and assessment tags, references, and the underlying check" width="900" />
|
||||
|
||||
##### Frameworks With Custom Detail Layouts
|
||||
|
||||
Several frameworks include enriched detail panels that highlight fields specific to the standard:
|
||||
|
||||
* ASD Essential Eight
|
||||
* AWS Well-Architected Framework
|
||||
* BSI C5
|
||||
* Cloud Controls Matrix (CSA CCM)
|
||||
* CIS Benchmarks
|
||||
* CCC (Common Cloud Controls)
|
||||
* ENS RD2022
|
||||
* ISO 27001
|
||||
* KISA ISMS-P
|
||||
* MITRE ATT&CK
|
||||
* Prowler ThreatScore
|
||||
|
||||
Frameworks without a custom layout fall back to the generic details panel, which still exposes the official requirement metadata captured by Prowler.
|
||||
|
||||
### Downloading Compliance Reports
|
||||
|
||||
Prowler Cloud and App expose two formats:
|
||||
|
||||
* **CSV report:** Every requirement, every check, and every finding for the selected scan and filters. Available for all supported frameworks.
|
||||
* **PDF report:** Curated executive-style report. Currently supported for Prowler ThreatScore, ENS RD2022, NIS2, and CSA CCM. Additional PDF reports are added in subsequent Prowler releases.
|
||||
|
||||
#### Downloading From the Detail Page
|
||||
|
||||
Inside any framework detail page, the **CSV** and **PDF** buttons in the header trigger the same downloads as the overview dropdown. The PDF button only appears for frameworks that support it.
|
||||
|
||||
<img src="/images/compliance/prowler-app-compliance-detail-download.png" alt="Top of a framework detail page showing the CSV and PDF download buttons in the header" width="900" />
|
||||
|
||||
<Note>
|
||||
Region filters disable the per-card download dropdown to avoid generating partial reports. Open the framework detail page when downloads scoped to a region are required, or remove the region filter to download the full report.
|
||||
</Note>
|
||||
|
||||
#### Downloading the Full Scan Output
|
||||
|
||||
To export every framework, finding, and resource at once, use the **Scan Jobs** section instead. The ZIP archive contains the CSV, JSON-OCSF, and HTML reports plus a `compliance/` subfolder with one CSV per framework. See [Prowler App — Getting Started](/user-guide/tutorials/prowler-app) for details.
|
||||
|
||||
### API Access
|
||||
|
||||
Every report available in the UI is also reachable through the Prowler API. The following endpoints are the most relevant:
|
||||
|
||||
* [Retrieve a scan compliance report as CSV](https://api.prowler.com/api/v1/docs#tag/Scan/operation/scans_compliance_retrieve)
|
||||
* [Download a complete scan output (ZIP)](https://api.prowler.com/api/v1/docs#tag/Scan/operation/scans_report_retrieve)
|
||||
|
||||
Use the API to integrate compliance evidence into ticketing systems, executive dashboards, or downstream pipelines.
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
Prowler CLI evaluates the same compliance frameworks as Prowler Cloud and App, and produces detailed CSV outputs alongside the standard scan results. By default, it runs every supported framework and prints a status summary at the end of the scan:
|
||||
|
||||
<img src="/images/cli/compliance/compliance.png" />
|
||||
|
||||
Detailed compliance results are stored as CSV files under the `compliance/` subfolder of Prowler's output directory.
|
||||
|
||||
### Scan a Specific Compliance Framework
|
||||
|
||||
To scope a scan to a single framework and get the framework-specific summary, use the `--compliance` option:
|
||||
|
||||
```sh
|
||||
prowler <provider> --compliance <compliance_framework>
|
||||
```
|
||||
|
||||
Standard results plus the framework breakdown are printed to the terminal. A dedicated CSV is also generated under the `compliance/` output folder. Sample output for CIS AWS 2.0:
|
||||
|
||||
<img src="/images/cli/compliance/compliance-cis-sample1.png" />
|
||||
|
||||
<Note>
|
||||
If Prowler cannot find a resource related with a check from a compliance requirement, that requirement is omitted from the output.
|
||||
</Note>
|
||||
|
||||
### List Available Compliance Frameworks
|
||||
|
||||
To see which compliance frameworks are covered by a given provider, use the `--list-compliance` option:
|
||||
|
||||
```sh
|
||||
prowler <provider> --list-compliance
|
||||
```
|
||||
|
||||
The full catalog is also browsable at [Prowler Hub](https://hub.prowler.com/compliance).
|
||||
|
||||
### List Requirements of a Compliance Framework
|
||||
|
||||
To inspect the requirements that compose a specific framework, use the `--list-compliance-requirements` option:
|
||||
|
||||
```sh
|
||||
prowler <provider> --list-compliance-requirements <compliance_framework(s)>
|
||||
```
|
||||
|
||||
Sample output for the first requirements of CIS 1.5 for AWS:
|
||||
|
||||
```
|
||||
Listing CIS 1.5 AWS Compliance Requirements:
|
||||
|
||||
Requirement Id: 1.1
|
||||
- Description: Maintain current contact details
|
||||
- Checks:
|
||||
account_maintain_current_contact_details
|
||||
|
||||
Requirement Id: 1.2
|
||||
- Description: Ensure security contact information is registered
|
||||
- Checks:
|
||||
account_security_contact_information_is_registered
|
||||
|
||||
Requirement Id: 1.3
|
||||
- Description: Ensure security questions are registered in the AWS account
|
||||
- Checks:
|
||||
account_security_questions_are_registered_in_the_aws_account
|
||||
|
||||
Requirement Id: 1.4
|
||||
- Description: Ensure no 'root' user account access key exists
|
||||
- Checks:
|
||||
iam_no_root_access_key
|
||||
|
||||
Requirement Id: 1.5
|
||||
- Description: Ensure MFA is enabled for the 'root' user account
|
||||
- Checks:
|
||||
iam_root_mfa_enabled
|
||||
|
||||
[redacted]
|
||||
|
||||
```
|
||||
|
||||
## Contributing New Compliance Frameworks
|
||||
|
||||
To request a new framework or contribute one, see [Creating a New Security Compliance Framework in Prowler](/developer-guide/security-compliance-framework). The developer guide covers the Pydantic schema, JSON catalog, output formatter, and PR submission steps required to ship a new framework end to end.
|
||||
|
||||
## Related Documentation
|
||||
|
||||
* [Prowler ThreatScore Documentation](/user-guide/compliance/tutorials/threatscore)
|
||||
* [Creating a New Security Compliance Framework in Prowler](/developer-guide/security-compliance-framework)
|
||||
* [Prowler App — Getting Started](/user-guide/tutorials/prowler-app)
|
||||
@@ -4,7 +4,7 @@ title: 'Check Mapping Prowler v4/v3 to v2'
|
||||
|
||||
Prowler v3 and v4 introduce distinct identifiers while preserving the checks originally implemented in v2. This change was made because, in previous versions, check names were primarily derived from the CIS Benchmark for AWS. Starting with v3 and v4, all checks are independent of any security framework and have unique names and IDs.
|
||||
|
||||
For more details on the updated compliance implementation in Prowler v4 and v3, refer to the [Compliance](/user-guide/cli/tutorials/compliance) section.
|
||||
For more details on the updated compliance implementation in Prowler v4 and v3, refer to the [Compliance](/user-guide/compliance/tutorials/compliance) section.
|
||||
|
||||
```
|
||||
checks_v4_v3_to_v2_mapping = {
|
||||
|
||||
@@ -398,7 +398,7 @@ prowler oci --severity critical high
|
||||
|
||||
### Next Steps
|
||||
|
||||
- Learn about [Compliance Frameworks](/user-guide/cli/tutorials/compliance) in Prowler
|
||||
- Learn about [Compliance Frameworks](/user-guide/compliance/tutorials/compliance) in Prowler
|
||||
- Review [Prowler Output Formats](/user-guide/cli/tutorials/reporting)
|
||||
- Explore [Integrations](/user-guide/cli/tutorials/integrations) with SIEM and ticketing systems
|
||||
|
||||
|
||||
@@ -0,0 +1,146 @@
|
||||
---
|
||||
title: 'Alerts'
|
||||
description: 'Create email alerts from Prowler Cloud findings to monitor relevant security changes after scans or in daily digests.'
|
||||
---
|
||||
|
||||
import { VersionBadge } from "/snippets/version-badge.mdx"
|
||||
|
||||
<VersionBadge version="5.26.0" />
|
||||
|
||||
Alerts notify recipients by email when security findings match saved filter conditions. Use Alerts to track high-priority findings, monitor specific providers or services, and keep teams informed about scan results that match defined criteria.
|
||||
|
||||
<Note>
|
||||
This feature is available exclusively in **Prowler Cloud** with a paid subscription.
|
||||
</Note>
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before creating Alerts, ensure that:
|
||||
|
||||
* At least one scan has completed and produced findings.
|
||||
* The user role includes the `manage_alerts` permission.
|
||||
|
||||
The `manage_alerts` permission is required to create, edit, test, enable, disable, and delete Alerts. See [RBAC Administrative Permissions](/user-guide/tutorials/prowler-app-rbac#rbac-administrative-permissions) for details.
|
||||
|
||||
## How Alerts Work
|
||||
|
||||
Alerts are created from Findings filters. When an Alert runs, Prowler Cloud evaluates the saved conditions against findings and sends an email digest when matching findings exist.
|
||||
|
||||
<Note>
|
||||
Alerts evaluate findings with status `FAIL` only. Findings with status `PASS` or `MANUAL`, and muted findings, never trigger an Alert regardless of the saved filters.
|
||||
</Note>
|
||||
|
||||
Alerts run on one of three schedules:
|
||||
|
||||
| Frequency | Description |
|
||||
|-----------|-------------|
|
||||
| After each scan | Evaluates the Alert after each completed scan. |
|
||||
| Daily digest | Evaluates the Alert once per day and sends a digest when findings match. |
|
||||
| After each scan and daily | Evaluates the Alert after every scan and in the daily digest. |
|
||||
|
||||
## Creating an Alert From Findings
|
||||
|
||||
To create an Alert:
|
||||
|
||||
1. Navigate to **Findings** in Prowler Cloud.
|
||||
2. Apply at least one [Alert-compatible filter](#alert-compatible-filters) to define the findings that should trigger the Alert.
|
||||
3. Click **Create Alert**.
|
||||
|
||||

|
||||
|
||||
4. Configure the Alert settings:
|
||||
* **Name:** Add a short, descriptive name.
|
||||
* **Description:** Add optional context for the Alert.
|
||||
* **Frequency:** Select when Prowler Cloud should evaluate the Alert.
|
||||
* **Recipients:** Select the recipients who should receive the email digest.
|
||||
|
||||

|
||||
|
||||
5. Click **Create**.
|
||||
|
||||
After the Alert is created, Prowler Cloud evaluates it based on the selected frequency.
|
||||
|
||||
## Alert-Compatible Filters
|
||||
|
||||
An **Alert-compatible filter** is a Findings-page filter that the Alert condition language can evaluate when the Alert runs. The Findings page exposes many filters, but only a specific subset can be saved into an Alert. Filters outside this subset, such as **Status**, free-text search, sort, or pagination, are ignored when seeding an Alert from the current Findings view.
|
||||
|
||||
When **Create Alert** is clicked on the Findings page, Prowler Cloud takes the active filters, keeps only the Alert-compatible ones, and uses them to build the Alert condition.
|
||||
|
||||
The following filters are Alert-compatible:
|
||||
|
||||
* Provider type
|
||||
* Provider
|
||||
* Severity
|
||||
* Delta (new findings since the previous scan)
|
||||
* Region
|
||||
* Service
|
||||
* Resource type
|
||||
* Category
|
||||
* Resource group
|
||||
|
||||
If only the **Status** filter is applied on the Findings page, Prowler Cloud substitutes all severities as the condition base so the Alert can still be created. Status itself never becomes part of the Alert condition.
|
||||
|
||||
## Managing Alerts
|
||||
|
||||
Navigate to **Alerts** to review and manage existing Alerts.
|
||||
|
||||

|
||||
|
||||
Each Alert provides these actions:
|
||||
|
||||
| Action | Description |
|
||||
|--------|-------------|
|
||||
| Edit | Update name, description, recipients, frequency, or filters. |
|
||||
| Enable/Disable | Start or stop Alert evaluation without deleting the Alert. |
|
||||
| Delete | Permanently remove the Alert. |
|
||||
|
||||
## Testing Alert Filters
|
||||
|
||||
When editing an Alert, click **Test** to preview whether the current filters match existing findings.
|
||||
|
||||
The test result indicates whether the filters match findings and includes a summary of the matching results.
|
||||
|
||||

|
||||
|
||||
<Warning>
|
||||
**The Test result is a snapshot, not a guarantee of future Alert triggers.**
|
||||
|
||||
The Test evaluates the current filters against existing findings at the moment **Test** is clicked. It does not predict whether the Alert will trigger on its next evaluation. The Alert trigger depends on the state at evaluation time:
|
||||
|
||||
* **After each scan:** The Alert is evaluated against the findings produced by that scan only. If the next scan produces no findings that match the filters, the Alert will not trigger, even if a Test run earlier in the day showed matches.
|
||||
* **Daily digest:** The Alert is evaluated against the findings present on the digest day. If no matching findings exist for that day, the Alert will not trigger, even if previous days had matches.
|
||||
|
||||
The reverse is also true: a Test showing no matches does not guarantee the Alert will stay silent. Future scans may produce matching findings.
|
||||
|
||||
Use **Test** to validate that the filters are well-formed and target the intended findings, not to forecast future Alert behavior.
|
||||
</Warning>
|
||||
|
||||
## Recipients
|
||||
|
||||
Alert recipients are selected from the email addresses available in the tenant. Recipients receive an email digest each time an Alert evaluates and matches findings.
|
||||
|
||||
<Note>
|
||||
By default, the **organization owner** receives a **daily digest** for **critical findings**. Adjust the recipient, frequency, or filters in the Alert configuration to change this behavior.
|
||||
</Note>
|
||||
|
||||
If a recipient unsubscribes from Alerts, that address stops receiving digests until it is reconfirmed.
|
||||
|
||||
## Email Notifications
|
||||
|
||||
When an Alert matches findings, Prowler Cloud sends a security alert email that summarizes the matching findings. The email includes:
|
||||
|
||||
* The scan name and evaluation time.
|
||||
* The total number of matching findings.
|
||||
* The number of Alert rules that triggered.
|
||||
* A preview of the affected findings, grouped by severity, with resource details and the originating rule.
|
||||
* A direct link to view all matching findings in Prowler Cloud.
|
||||
|
||||

|
||||
|
||||
## Best Practices
|
||||
|
||||
* **Start with focused filters:** Create Alerts for specific high-priority scopes, such as critical findings, production providers, or important services.
|
||||
* **Use clear names:** Choose names that explain the intent of the Alert.
|
||||
* **Review recipients regularly:** Keep recipient lists aligned with current ownership.
|
||||
* **Test before saving edits:** Use **Test** after changing filters to confirm that the Alert matches the expected findings.
|
||||
* **Disable instead of deleting during tuning:** Disable Alerts temporarily when adjusting filters or recipients.
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Prowler v3 and v4 introduce distinct identifiers while preserving the checks originally implemented in v2. This change was made because, in previous versions, check names were primarily derived from the CIS Benchmark for AWS. Starting with v3 and v4, all checks are independent of any security framework and have unique names and IDs.
|
||||
|
||||
For more details on the updated compliance implementation in Prowler v4 and v3, refer to the [Compliance](/user-guide/cli/tutorials/compliance) section.
|
||||
For more details on the updated compliance implementation in Prowler v4 and v3, refer to the [Compliance](/user-guide/compliance/tutorials/compliance) section.
|
||||
|
||||
```
|
||||
checks_v4_v3_to_v2_mapping = {
|
||||
|
||||
@@ -4,6 +4,10 @@ All notable changes to the **Prowler MCP Server** are documented in this file.
|
||||
|
||||
## [0.7.0] (Prowler UNRELEASED)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- MCP Server tools for Prowler Finding Groups Management [(#11140)](https://github.com/prowler-cloud/prowler/pull/11140)
|
||||
|
||||
### 🔐 Security
|
||||
|
||||
- `cryptography` from 46.0.1 to 47.0.0 (transitive) for CVE-2026-39892 and CVE-2026-26007 / CVE-2026-34073 [(#10978)](https://github.com/prowler-cloud/prowler/pull/10978)
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
|
||||
Full access to Prowler Cloud platform and self-managed Prowler App for:
|
||||
- **Findings Analysis**: Query, filter, and analyze security findings across all your cloud environments
|
||||
- **Finding Groups Analysis**: Triage findings grouped by check ID and drill down into affected resources
|
||||
- **Provider Management**: Create, configure, and manage your configured Prowler providers (AWS, Azure, GCP, etc.)
|
||||
- **Scan Orchestration**: Trigger on-demand scans and schedule recurring security assessments
|
||||
- **Resource Inventory**: Search and view detailed information about your audited resources
|
||||
|
||||
@@ -0,0 +1,300 @@
|
||||
"""Pydantic models for Prowler Finding Groups responses."""
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from prowler_mcp_server.prowler_app.models.base import MinimalSerializerMixin
|
||||
|
||||
|
||||
FindingStatus = Literal["FAIL", "PASS", "MANUAL"]
|
||||
FindingSeverity = Literal["critical", "high", "medium", "low", "informational"]
|
||||
FindingDelta = Literal["new", "changed"]
|
||||
|
||||
|
||||
def _attributes(data: dict) -> dict:
|
||||
return data.get("attributes", {})
|
||||
|
||||
|
||||
def _counter(attributes: dict, key: str) -> int:
|
||||
return attributes.get(key) or 0
|
||||
|
||||
|
||||
def _simplified_group_kwargs(data: dict) -> dict:
|
||||
attributes = _attributes(data)
|
||||
return {
|
||||
"check_id": attributes.get("check_id", data.get("id", "")),
|
||||
"check_title": attributes.get("check_title"),
|
||||
"severity": attributes.get("severity", "informational"),
|
||||
"status": attributes.get("status", "MANUAL"),
|
||||
"muted": attributes.get("muted", False),
|
||||
"impacted_providers": attributes.get("impacted_providers") or [],
|
||||
"resources_fail": _counter(attributes, "resources_fail"),
|
||||
"resources_total": _counter(attributes, "resources_total"),
|
||||
"pass_count": _counter(attributes, "pass_count"),
|
||||
"fail_count": _counter(attributes, "fail_count"),
|
||||
"manual_count": _counter(attributes, "manual_count"),
|
||||
"muted_count": _counter(attributes, "muted_count"),
|
||||
"new_count": _counter(attributes, "new_count"),
|
||||
"changed_count": _counter(attributes, "changed_count"),
|
||||
"first_seen_at": attributes.get("first_seen_at"),
|
||||
"last_seen_at": attributes.get("last_seen_at"),
|
||||
"failing_since": attributes.get("failing_since"),
|
||||
}
|
||||
|
||||
|
||||
class SimplifiedFindingGroup(MinimalSerializerMixin):
|
||||
"""Finding group summary optimized for browsing many checks."""
|
||||
|
||||
check_id: str = Field(description="Public check ID that identifies this group")
|
||||
check_title: str | None = Field(
|
||||
default=None, description="Human-readable check title"
|
||||
)
|
||||
severity: FindingSeverity = Field(description="Highest severity in the group")
|
||||
status: FindingStatus = Field(description="Aggregated finding group status")
|
||||
muted: bool = Field(
|
||||
description="Whether all findings in this group are muted or accepted"
|
||||
)
|
||||
impacted_providers: list[str] = Field(
|
||||
default_factory=list,
|
||||
description="Provider types impacted by this finding group",
|
||||
)
|
||||
resources_fail: int = Field(
|
||||
description="Number of non-muted failing resources in this group", ge=0
|
||||
)
|
||||
resources_total: int = Field(
|
||||
description="Total number of resources in this group", ge=0
|
||||
)
|
||||
pass_count: int = Field(
|
||||
description="Number of non-muted PASS findings in this group", ge=0
|
||||
)
|
||||
fail_count: int = Field(
|
||||
description="Number of non-muted FAIL findings in this group", ge=0
|
||||
)
|
||||
manual_count: int = Field(
|
||||
description="Number of non-muted MANUAL findings in this group", ge=0
|
||||
)
|
||||
muted_count: int = Field(description="Total muted findings in this group", ge=0)
|
||||
new_count: int = Field(description="Number of new non-muted findings", ge=0)
|
||||
changed_count: int = Field(
|
||||
description="Number of changed non-muted findings", ge=0
|
||||
)
|
||||
first_seen_at: str | None = Field(
|
||||
default=None, description="First time this group was detected"
|
||||
)
|
||||
last_seen_at: str | None = Field(
|
||||
default=None, description="Last time this group was detected"
|
||||
)
|
||||
failing_since: str | None = Field(
|
||||
default=None, description="First time this group started failing"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "SimplifiedFindingGroup":
|
||||
"""Transform JSON:API finding group response to simplified format."""
|
||||
return cls(**_simplified_group_kwargs(data))
|
||||
|
||||
|
||||
class DetailedFindingGroup(SimplifiedFindingGroup):
|
||||
"""Finding group with complete counters and descriptive context."""
|
||||
|
||||
check_description: str | None = Field(
|
||||
default=None, description="Description of the check behind this group"
|
||||
)
|
||||
pass_muted_count: int = Field(description="Muted PASS findings", ge=0)
|
||||
fail_muted_count: int = Field(description="Muted FAIL findings", ge=0)
|
||||
manual_muted_count: int = Field(description="Muted MANUAL findings", ge=0)
|
||||
new_fail_count: int = Field(description="New non-muted FAIL findings", ge=0)
|
||||
new_fail_muted_count: int = Field(description="New muted FAIL findings", ge=0)
|
||||
new_pass_count: int = Field(description="New non-muted PASS findings", ge=0)
|
||||
new_pass_muted_count: int = Field(description="New muted PASS findings", ge=0)
|
||||
new_manual_count: int = Field(description="New non-muted MANUAL findings", ge=0)
|
||||
new_manual_muted_count: int = Field(
|
||||
description="New muted MANUAL findings", ge=0
|
||||
)
|
||||
changed_fail_count: int = Field(
|
||||
description="Changed non-muted FAIL findings", ge=0
|
||||
)
|
||||
changed_fail_muted_count: int = Field(
|
||||
description="Changed muted FAIL findings", ge=0
|
||||
)
|
||||
changed_pass_count: int = Field(
|
||||
description="Changed non-muted PASS findings", ge=0
|
||||
)
|
||||
changed_pass_muted_count: int = Field(
|
||||
description="Changed muted PASS findings", ge=0
|
||||
)
|
||||
changed_manual_count: int = Field(
|
||||
description="Changed non-muted MANUAL findings", ge=0
|
||||
)
|
||||
changed_manual_muted_count: int = Field(
|
||||
description="Changed muted MANUAL findings", ge=0
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "DetailedFindingGroup":
|
||||
"""Transform JSON:API finding group response to detailed format."""
|
||||
attributes = _attributes(data)
|
||||
|
||||
return cls(
|
||||
**_simplified_group_kwargs(data),
|
||||
check_description=attributes.get("check_description"),
|
||||
pass_muted_count=_counter(attributes, "pass_muted_count"),
|
||||
fail_muted_count=_counter(attributes, "fail_muted_count"),
|
||||
manual_muted_count=_counter(attributes, "manual_muted_count"),
|
||||
new_fail_count=_counter(attributes, "new_fail_count"),
|
||||
new_fail_muted_count=_counter(attributes, "new_fail_muted_count"),
|
||||
new_pass_count=_counter(attributes, "new_pass_count"),
|
||||
new_pass_muted_count=_counter(attributes, "new_pass_muted_count"),
|
||||
new_manual_count=_counter(attributes, "new_manual_count"),
|
||||
new_manual_muted_count=_counter(attributes, "new_manual_muted_count"),
|
||||
changed_fail_count=_counter(attributes, "changed_fail_count"),
|
||||
changed_fail_muted_count=_counter(attributes, "changed_fail_muted_count"),
|
||||
changed_pass_count=_counter(attributes, "changed_pass_count"),
|
||||
changed_pass_muted_count=_counter(attributes, "changed_pass_muted_count"),
|
||||
changed_manual_count=_counter(attributes, "changed_manual_count"),
|
||||
changed_manual_muted_count=_counter(
|
||||
attributes, "changed_manual_muted_count"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupsListResponse(MinimalSerializerMixin):
|
||||
"""Paginated response for finding group list queries."""
|
||||
|
||||
groups: list[SimplifiedFindingGroup] = Field(
|
||||
description="Finding groups matching the query"
|
||||
)
|
||||
total_num_groups: int = Field(
|
||||
description="Total groups matching the query across all pages", ge=0
|
||||
)
|
||||
total_num_pages: int = Field(description="Total pages available", ge=0)
|
||||
current_page: int = Field(description="Current page number", ge=1)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, response: dict) -> "FindingGroupsListResponse":
|
||||
"""Transform JSON:API list response to simplified format."""
|
||||
pagination = response.get("meta", {}).get("pagination", {})
|
||||
groups = [
|
||||
SimplifiedFindingGroup.from_api_response(item)
|
||||
for item in response.get("data", [])
|
||||
]
|
||||
|
||||
return cls(
|
||||
groups=groups,
|
||||
total_num_groups=pagination.get("count", len(groups)),
|
||||
total_num_pages=pagination.get("pages", 1),
|
||||
current_page=pagination.get("page", 1),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupResourceInfo(MinimalSerializerMixin):
|
||||
"""Nested resource information for a finding group row."""
|
||||
|
||||
uid: str = Field(description="Provider-native resource UID")
|
||||
name: str = Field(description="Resource name")
|
||||
service: str = Field(description="Cloud service")
|
||||
region: str = Field(description="Cloud region")
|
||||
type: str = Field(description="Resource type")
|
||||
resource_group: str | None = Field(
|
||||
default=None, description="Provider resource group or equivalent"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "FindingGroupResourceInfo":
|
||||
"""Transform nested resource data to simplified format."""
|
||||
return cls(
|
||||
uid=data.get("uid", ""),
|
||||
name=data.get("name", ""),
|
||||
service=data.get("service", ""),
|
||||
region=data.get("region", ""),
|
||||
type=data.get("type", ""),
|
||||
resource_group=data.get("resource_group"),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupProviderInfo(MinimalSerializerMixin):
|
||||
"""Nested provider information for a finding group resource row."""
|
||||
|
||||
type: str = Field(description="Provider type")
|
||||
uid: str = Field(description="Provider-native account or subscription ID")
|
||||
alias: str | None = Field(default=None, description="Provider alias")
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "FindingGroupProviderInfo":
|
||||
"""Transform nested provider data to simplified format."""
|
||||
return cls(
|
||||
type=data.get("type", ""),
|
||||
uid=data.get("uid", ""),
|
||||
alias=data.get("alias"),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupResource(MinimalSerializerMixin):
|
||||
"""Resource row affected by a finding group."""
|
||||
|
||||
id: str = Field(description="Row identifier for this finding group resource")
|
||||
resource: FindingGroupResourceInfo = Field(description="Affected resource")
|
||||
provider: FindingGroupProviderInfo = Field(description="Affected provider")
|
||||
finding_id: str = Field(
|
||||
description="Finding UUID to use with prowler_app_get_finding_details"
|
||||
)
|
||||
status: FindingStatus = Field(description="Finding status for this resource")
|
||||
severity: FindingSeverity = Field(description="Finding severity")
|
||||
muted: bool = Field(description="Whether the finding is muted")
|
||||
delta: FindingDelta | None = Field(default=None, description="Change status")
|
||||
first_seen_at: str | None = Field(default=None, description="First seen time")
|
||||
last_seen_at: str | None = Field(default=None, description="Last seen time")
|
||||
muted_reason: str | None = Field(default=None, description="Mute reason")
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, data: dict) -> "FindingGroupResource":
|
||||
"""Transform JSON:API finding group resource response."""
|
||||
attributes = _attributes(data)
|
||||
|
||||
return cls(
|
||||
id=data.get("id", ""),
|
||||
resource=FindingGroupResourceInfo.from_api_response(
|
||||
attributes.get("resource") or {}
|
||||
),
|
||||
provider=FindingGroupProviderInfo.from_api_response(
|
||||
attributes.get("provider") or {}
|
||||
),
|
||||
finding_id=str(attributes.get("finding_id", "")),
|
||||
status=attributes.get("status", "MANUAL"),
|
||||
severity=attributes.get("severity", "informational"),
|
||||
muted=attributes.get("muted", False),
|
||||
delta=attributes.get("delta"),
|
||||
first_seen_at=attributes.get("first_seen_at"),
|
||||
last_seen_at=attributes.get("last_seen_at"),
|
||||
muted_reason=attributes.get("muted_reason"),
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupResourcesListResponse(MinimalSerializerMixin):
|
||||
"""Paginated response for finding group resource queries."""
|
||||
|
||||
resources: list[FindingGroupResource] = Field(
|
||||
description="Resources matching the finding group query"
|
||||
)
|
||||
total_num_resources: int = Field(
|
||||
description="Total resources matching the query across all pages", ge=0
|
||||
)
|
||||
total_num_pages: int = Field(description="Total pages available", ge=0)
|
||||
current_page: int = Field(description="Current page number", ge=1)
|
||||
|
||||
@classmethod
|
||||
def from_api_response(cls, response: dict) -> "FindingGroupResourcesListResponse":
|
||||
"""Transform JSON:API resource list response to simplified format."""
|
||||
pagination = response.get("meta", {}).get("pagination", {})
|
||||
resources = [
|
||||
FindingGroupResource.from_api_response(item)
|
||||
for item in response.get("data", [])
|
||||
]
|
||||
|
||||
return cls(
|
||||
resources=resources,
|
||||
total_num_resources=pagination.get("count", len(resources)),
|
||||
total_num_pages=pagination.get("pages", 1),
|
||||
current_page=pagination.get("page", 1),
|
||||
)
|
||||
@@ -0,0 +1,473 @@
|
||||
"""Finding Groups tools for Prowler App MCP Server.
|
||||
|
||||
This module provides read-only tools for finding group triage and drill-downs.
|
||||
"""
|
||||
|
||||
from typing import Any, Literal
|
||||
from urllib.parse import quote
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from prowler_mcp_server.prowler_app.models.finding_groups import (
|
||||
DetailedFindingGroup,
|
||||
FindingGroupResourcesListResponse,
|
||||
FindingGroupsListResponse,
|
||||
)
|
||||
from prowler_mcp_server.prowler_app.tools.base import BaseTool
|
||||
|
||||
|
||||
StatusFilter = Literal["FAIL", "PASS", "MANUAL"]
|
||||
SeverityFilter = Literal["critical", "high", "medium", "low", "informational"]
|
||||
DeltaFilter = Literal["new", "changed"]
|
||||
|
||||
GROUP_DETAIL_FIELDS = (
|
||||
"check_id,check_title,check_description,severity,status,muted,"
|
||||
"impacted_providers,resources_fail,resources_total,pass_count,fail_count,"
|
||||
"manual_count,pass_muted_count,fail_muted_count,manual_muted_count,"
|
||||
"muted_count,new_count,changed_count,new_fail_count,new_fail_muted_count,"
|
||||
"new_pass_count,new_pass_muted_count,new_manual_count,new_manual_muted_count,"
|
||||
"changed_fail_count,changed_fail_muted_count,changed_pass_count,"
|
||||
"changed_pass_muted_count,changed_manual_count,changed_manual_muted_count,"
|
||||
"first_seen_at,last_seen_at,failing_since"
|
||||
)
|
||||
|
||||
GROUP_LIST_FIELDS = (
|
||||
"check_id,check_title,severity,status,muted,impacted_providers,"
|
||||
"resources_fail,resources_total,pass_count,fail_count,manual_count,"
|
||||
"muted_count,new_count,changed_count,first_seen_at,last_seen_at,failing_since"
|
||||
)
|
||||
|
||||
RESOURCE_FIELDS = (
|
||||
"resource,provider,finding_id,status,severity,muted,delta,"
|
||||
"first_seen_at,last_seen_at,muted_reason"
|
||||
)
|
||||
|
||||
|
||||
class FindingGroupsTools(BaseTool):
|
||||
"""Tools for Finding Groups operations."""
|
||||
|
||||
@staticmethod
|
||||
def _bool_value(value: bool | str) -> bool:
|
||||
"""Normalize bool-like MCP client values."""
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
return value.lower() == "true"
|
||||
|
||||
@staticmethod
|
||||
def _group_endpoint(date_range: tuple[str, str] | None) -> str:
|
||||
return "/finding-groups/latest" if date_range is None else "/finding-groups"
|
||||
|
||||
@staticmethod
|
||||
def _resource_endpoint(check_id: str, date_range: tuple[str, str] | None) -> str:
|
||||
escaped_check_id = quote(check_id, safe="")
|
||||
if date_range is None:
|
||||
return f"/finding-groups/latest/{escaped_check_id}/resources"
|
||||
return f"/finding-groups/{escaped_check_id}/resources"
|
||||
|
||||
def _base_date_params(
|
||||
self, date_from: str | None, date_to: str | None
|
||||
) -> tuple[tuple[str, str] | None, dict[str, Any]]:
|
||||
date_range = self.api_client.normalize_date_range(
|
||||
date_from, date_to, max_days=2
|
||||
)
|
||||
if date_range is None:
|
||||
return None, {}
|
||||
|
||||
return date_range, {
|
||||
"filter[inserted_at__gte]": date_range[0],
|
||||
"filter[inserted_at__lte]": date_range[1],
|
||||
}
|
||||
|
||||
def _apply_common_filters(
|
||||
self,
|
||||
params: dict[str, Any],
|
||||
provider: list[str],
|
||||
provider_type: list[str],
|
||||
provider_uid: list[str],
|
||||
provider_alias: str | None,
|
||||
region: list[str],
|
||||
service: list[str],
|
||||
resource_type: list[str],
|
||||
resource_name: str | None,
|
||||
resource_uid: str | None,
|
||||
resource_group: list[str],
|
||||
category: list[str],
|
||||
check_id: list[str],
|
||||
check_title: str | None,
|
||||
severity: list[SeverityFilter],
|
||||
status: list[StatusFilter],
|
||||
muted: bool | str | None,
|
||||
delta: list[DeltaFilter],
|
||||
) -> None:
|
||||
if provider:
|
||||
params["filter[provider__in]"] = provider
|
||||
if provider_type:
|
||||
params["filter[provider_type__in]"] = provider_type
|
||||
if provider_uid:
|
||||
params["filter[provider_uid__in]"] = provider_uid
|
||||
if provider_alias:
|
||||
params["filter[provider_alias__icontains]"] = provider_alias
|
||||
if region:
|
||||
params["filter[region__in]"] = region
|
||||
if service:
|
||||
params["filter[service__in]"] = service
|
||||
if resource_type:
|
||||
params["filter[resource_type__in]"] = resource_type
|
||||
if resource_name:
|
||||
params["filter[resource_name__icontains]"] = resource_name
|
||||
if resource_uid:
|
||||
params["filter[resource_uid__icontains]"] = resource_uid
|
||||
if resource_group:
|
||||
params["filter[resource_groups__in]"] = resource_group
|
||||
if category:
|
||||
params["filter[category__in]"] = category
|
||||
if check_id:
|
||||
params["filter[check_id__in]"] = check_id
|
||||
if check_title:
|
||||
params["filter[check_title__icontains]"] = check_title
|
||||
if severity:
|
||||
params["filter[severity__in]"] = severity
|
||||
if status:
|
||||
params["filter[status__in]"] = status
|
||||
if muted is not None:
|
||||
params["filter[muted]"] = self._bool_value(muted)
|
||||
if delta:
|
||||
params["filter[delta__in]"] = delta
|
||||
|
||||
async def list_finding_groups(
|
||||
self,
|
||||
provider: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider UUIDs. Multiple values allowed. If empty, all visible providers are returned.",
|
||||
),
|
||||
provider_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider type. Multiple values allowed, such as aws, azure, gcp, kubernetes, github, or m365.",
|
||||
),
|
||||
provider_uid: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider-native account, subscription, or project IDs. Multiple values allowed.",
|
||||
),
|
||||
provider_alias: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by provider alias/name using partial matching.",
|
||||
),
|
||||
region: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud regions. Multiple values allowed.",
|
||||
),
|
||||
service: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud services. Multiple values allowed.",
|
||||
),
|
||||
resource_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by resource types. Multiple values allowed.",
|
||||
),
|
||||
resource_name: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by resource name using partial matching.",
|
||||
),
|
||||
resource_uid: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by resource UID using partial matching.",
|
||||
),
|
||||
resource_group: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by resource group values. Multiple values allowed.",
|
||||
),
|
||||
category: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by finding categories. Multiple values allowed.",
|
||||
),
|
||||
check_id: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by check IDs. Multiple values allowed.",
|
||||
),
|
||||
check_title: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by check title using partial matching.",
|
||||
),
|
||||
severity: list[SeverityFilter] = Field(
|
||||
default=[],
|
||||
description="Filter by aggregated severity. Empty returns all severities.",
|
||||
),
|
||||
status: list[StatusFilter] = Field(
|
||||
default=["FAIL"],
|
||||
description="Filter by aggregated status. Default returns failing groups. Pass [] to return all statuses.",
|
||||
),
|
||||
muted: bool | str | None = Field(
|
||||
default=None,
|
||||
description="Filter by fully muted group state. Accepts true/false.",
|
||||
),
|
||||
include_muted: bool | str = Field(
|
||||
default=False,
|
||||
description="When false, excludes fully muted groups. Set true to include fully muted groups.",
|
||||
),
|
||||
delta: list[DeltaFilter] = Field(
|
||||
default=[],
|
||||
description="Filter by group delta values: new or changed.",
|
||||
),
|
||||
date_from: str | None = Field(
|
||||
default=None,
|
||||
description="Start date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
date_to: str | None = Field(
|
||||
default=None,
|
||||
description="End date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
sort: str | None = Field(
|
||||
default=None,
|
||||
description="Optional sort expression supported by the finding-groups API, such as -fail_count,-severity,check_id.",
|
||||
),
|
||||
page_size: int = Field(
|
||||
default=50, description="Number of groups to return per page"
|
||||
),
|
||||
page_number: int = Field(
|
||||
default=1, description="Page number to retrieve (1-indexed)"
|
||||
),
|
||||
) -> dict[str, Any]:
|
||||
"""List finding groups aggregated by check ID.
|
||||
|
||||
Default behavior returns the latest non-muted FAIL groups for fast triage.
|
||||
Without dates this uses `/finding-groups/latest`. With `date_from` or
|
||||
`date_to`, this uses `/finding-groups` with a maximum 2-day date window.
|
||||
|
||||
Use this tool to find noisy or high-impact checks, then call
|
||||
prowler_app_get_finding_group_details for complete counters or
|
||||
prowler_app_list_finding_group_resources to drill into affected resources.
|
||||
"""
|
||||
try:
|
||||
self.api_client.validate_page_size(page_size)
|
||||
date_range, params = self._base_date_params(date_from, date_to)
|
||||
endpoint = self._group_endpoint(date_range)
|
||||
|
||||
self._apply_common_filters(
|
||||
params,
|
||||
provider,
|
||||
provider_type,
|
||||
provider_uid,
|
||||
provider_alias,
|
||||
region,
|
||||
service,
|
||||
resource_type,
|
||||
resource_name,
|
||||
resource_uid,
|
||||
resource_group,
|
||||
category,
|
||||
check_id,
|
||||
check_title,
|
||||
severity,
|
||||
status,
|
||||
muted,
|
||||
delta,
|
||||
)
|
||||
|
||||
params["filter[include_muted]"] = self._bool_value(include_muted)
|
||||
params["page[size]"] = page_size
|
||||
params["page[number]"] = page_number
|
||||
params["fields[finding-groups]"] = GROUP_LIST_FIELDS
|
||||
if sort:
|
||||
params["sort"] = sort
|
||||
|
||||
clean_params = self.api_client.build_filter_params(params)
|
||||
api_response = await self.api_client.get(endpoint, params=clean_params)
|
||||
response = FindingGroupsListResponse.from_api_response(api_response)
|
||||
return response.model_dump()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error listing finding groups: {e}")
|
||||
return {"error": str(e), "status": "failed"}
|
||||
|
||||
async def get_finding_group_details(
|
||||
self,
|
||||
check_id: str = Field(
|
||||
description="Public check ID that identifies the finding group. This is not a UUID."
|
||||
),
|
||||
date_from: str | None = Field(
|
||||
default=None,
|
||||
description="Start date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
date_to: str | None = Field(
|
||||
default=None,
|
||||
description="End date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
) -> dict[str, Any]:
|
||||
"""Get complete details for one finding group by exact check ID.
|
||||
|
||||
Uses `filter[check_id]` exact matching against latest data by default,
|
||||
or historical data when dates are provided. Fully muted groups are
|
||||
included by default so accepted risk does not look like a missing group.
|
||||
"""
|
||||
try:
|
||||
date_range, params = self._base_date_params(date_from, date_to)
|
||||
endpoint = self._group_endpoint(date_range)
|
||||
|
||||
params.update(
|
||||
{
|
||||
"filter[check_id]": check_id,
|
||||
"filter[include_muted]": True,
|
||||
"page[size]": 1,
|
||||
"page[number]": 1,
|
||||
"fields[finding-groups]": GROUP_DETAIL_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
clean_params = self.api_client.build_filter_params(params)
|
||||
api_response = await self.api_client.get(endpoint, params=clean_params)
|
||||
data = api_response.get("data", [])
|
||||
|
||||
if not data:
|
||||
return {
|
||||
"error": f"Finding group '{check_id}' not found.",
|
||||
"status": "not_found",
|
||||
}
|
||||
|
||||
group = DetailedFindingGroup.from_api_response(data[0])
|
||||
return group.model_dump()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting finding group details: {e}")
|
||||
return {"error": str(e), "status": "failed"}
|
||||
|
||||
async def list_finding_group_resources(
|
||||
self,
|
||||
check_id: str = Field(
|
||||
description="Public check ID that identifies the finding group. This is not a UUID."
|
||||
),
|
||||
provider: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider UUIDs. Multiple values allowed.",
|
||||
),
|
||||
provider_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider type. Multiple values allowed.",
|
||||
),
|
||||
provider_uid: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by provider-native account, subscription, or project IDs. Multiple values allowed.",
|
||||
),
|
||||
provider_alias: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by provider alias/name using partial matching.",
|
||||
),
|
||||
region: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud regions. Multiple values allowed.",
|
||||
),
|
||||
service: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by cloud services. Multiple values allowed.",
|
||||
),
|
||||
resource_type: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by resource types. Multiple values allowed.",
|
||||
),
|
||||
resource_name: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by resource name using partial matching.",
|
||||
),
|
||||
resource_uid: str | None = Field(
|
||||
default=None,
|
||||
description="Filter by resource UID using partial matching.",
|
||||
),
|
||||
resource_group: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by resource group values. Multiple values allowed.",
|
||||
),
|
||||
category: list[str] = Field(
|
||||
default=[],
|
||||
description="Filter by finding categories. Multiple values allowed.",
|
||||
),
|
||||
severity: list[SeverityFilter] = Field(
|
||||
default=[],
|
||||
description="Filter by severity. Empty returns all severities.",
|
||||
),
|
||||
status: list[StatusFilter] = Field(
|
||||
default=["FAIL"],
|
||||
description="Filter by status. Default returns failing resources. Pass [] to return all statuses.",
|
||||
),
|
||||
muted: bool | str | None = Field(
|
||||
default=None,
|
||||
description="Filter by muted state. Accepts true/false. Overrides include_muted when provided.",
|
||||
),
|
||||
include_muted: bool | str = Field(
|
||||
default=False,
|
||||
description="When false, returns only actionable unmuted resources by applying muted=false. Set true to include muted and unmuted resources.",
|
||||
),
|
||||
delta: list[DeltaFilter] = Field(
|
||||
default=[], description="Filter by delta values: new or changed."
|
||||
),
|
||||
date_from: str | None = Field(
|
||||
default=None,
|
||||
description="Start date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
date_to: str | None = Field(
|
||||
default=None,
|
||||
description="End date for historical query in YYYY-MM-DD format. Maximum range is 2 days.",
|
||||
),
|
||||
sort: str | None = Field(
|
||||
default=None,
|
||||
description="Optional sort expression supported by the finding group resources API.",
|
||||
),
|
||||
page_size: int = Field(
|
||||
default=50, description="Number of resources to return per page"
|
||||
),
|
||||
page_number: int = Field(
|
||||
default=1, description="Page number to retrieve (1-indexed)"
|
||||
),
|
||||
) -> dict[str, Any]:
|
||||
"""List resources affected by a finding group.
|
||||
|
||||
Without dates this uses `/finding-groups/latest/{check_id}/resources`.
|
||||
With `date_from` or `date_to`, this uses
|
||||
`/finding-groups/{check_id}/resources` with a maximum 2-day date window.
|
||||
|
||||
Default behavior returns FAIL, unmuted resources so the result is
|
||||
actionable. Set `include_muted=True` to include accepted/suppressed
|
||||
resources too. Each row includes nested resource and provider data plus
|
||||
`finding_id`. Use `prowler_app_get_finding_details(finding_id)` to
|
||||
retrieve complete remediation guidance for a specific resource finding.
|
||||
"""
|
||||
try:
|
||||
self.api_client.validate_page_size(page_size)
|
||||
date_range, params = self._base_date_params(date_from, date_to)
|
||||
endpoint = self._resource_endpoint(check_id, date_range)
|
||||
|
||||
if muted is None and not self._bool_value(include_muted):
|
||||
muted = False
|
||||
|
||||
self._apply_common_filters(
|
||||
params,
|
||||
provider,
|
||||
provider_type,
|
||||
provider_uid,
|
||||
provider_alias,
|
||||
region,
|
||||
service,
|
||||
resource_type,
|
||||
resource_name,
|
||||
resource_uid,
|
||||
resource_group,
|
||||
category,
|
||||
[],
|
||||
None,
|
||||
severity,
|
||||
status,
|
||||
muted,
|
||||
delta,
|
||||
)
|
||||
|
||||
params["page[size]"] = page_size
|
||||
params["page[number]"] = page_number
|
||||
params["fields[finding-group-resources]"] = RESOURCE_FIELDS
|
||||
if sort:
|
||||
params["sort"] = sort
|
||||
|
||||
clean_params = self.api_client.build_filter_params(params)
|
||||
api_response = await self.api_client.get(endpoint, params=clean_params)
|
||||
response = FindingGroupResourcesListResponse.from_api_response(
|
||||
api_response
|
||||
)
|
||||
return response.model_dump()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error listing finding group resources: {e}")
|
||||
return {"error": str(e), "status": "failed"}
|
||||
@@ -1009,7 +1009,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.5"
|
||||
version = "2.33.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "certifi" },
|
||||
@@ -1017,9 +1017,9 @@ dependencies = [
|
||||
{ name = "idna" },
|
||||
{ name = "urllib3" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5f/a4/98b9c7c6428a668bf7e42ebb7c79d576a1c3c1e3ae2d47e674b468388871/requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517", size = 134120, upload-time = "2026-03-30T16:09:15.531Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/8e/7540e8a2036f79a125c1d2ebadf69ed7901608859186c856fa0388ef4197/requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a", size = 64947, upload-time = "2026-03-30T16:09:13.83Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -2,15 +2,39 @@
|
||||
|
||||
All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
## [5.26.0] (Prowler UNRELEASED)
|
||||
## [5.27.0] (Prowler UNRELEASED)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- `entra_service_principal_no_secrets_for_permanent_tier0_roles` check for M365 provider [(#10788)](https://github.com/prowler-cloud/prowler/pull/10788)
|
||||
- `iam_user_access_not_stale_to_sagemaker` check for AWS provider with configurable `max_unused_sagemaker_access_days` (default 90) [(#11000)](https://github.com/prowler-cloud/prowler/pull/11000)
|
||||
- `cloudtrail_bedrock_logging_enabled` check for AWS provider [(#10858)](https://github.com/prowler-cloud/prowler/pull/10858)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- `entra_emergency_access_exclusion` check for M365 provider now scopes the exclusion requirement to enabled Conditional Access policies with a `Block` grant control instead of every enabled policy, focusing on the lockout-relevant policy set [(#10849)](https://github.com/prowler-cloud/prowler/pull/10849)
|
||||
|
||||
---
|
||||
|
||||
## [5.26.1] (Prowler v5.26.1)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- `entra_users_mfa_capable` no longer flags disabled guest users by requesting `accountEnabled` and `userType` from Microsoft Graph via `$select` and using Graph as the source of truth for `account_enabled` (EXO `Get-User` does not return guest users) [(#11002)](https://github.com/prowler-cloud/prowler/pull/11002)
|
||||
|
||||
---
|
||||
|
||||
## [5.26.0] (Prowler v5.26.0)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- `bedrock_guardrails_configured` check for AWS provider [(#10844)](https://github.com/prowler-cloud/prowler/pull/10844)
|
||||
- Universal compliance pipeline integrated into the CLI: `--list-compliance` and `--list-compliance-requirements` show universal frameworks, and CSV plus OCSF outputs are generated for any framework declaring a `TableConfig` [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301)
|
||||
- Universal compliance with OCSF support [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301)
|
||||
- ASD Essential Eight Maturity Model compliance framework for AWS (Maturity Level One, Nov 2023) [(#10808)](https://github.com/prowler-cloud/prowler/pull/10808)
|
||||
- Update Vercel checks to return personalized finding status extended depending on billing plan and classify them with billing-plan categories [(#10663)](https://github.com/prowler-cloud/prowler/pull/10663)
|
||||
- Vercel checks to return personalized finding status extended depending on billing plan and classify them with billing-plan categories [(#10663)](https://github.com/prowler-cloud/prowler/pull/10663)
|
||||
- `bedrock_prompt_management_exists` check for AWS provider [(#10878)](https://github.com/prowler-cloud/prowler/pull/10878)
|
||||
- 8 Gmail attachment safety and spoofing protection checks for Google Workspace provider using the Cloud Identity Policy API [(#10980)](https://github.com/prowler-cloud/prowler/pull/10980)
|
||||
- `bedrock_prompt_encrypted_with_cmk` check for AWS provider [(#10905)](https://github.com/prowler-cloud/prowler/pull/10905)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
@@ -19,6 +43,7 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- AWS CodeBuild service now batches `BatchGetProjects` and `BatchGetBuilds` calls per region (up to 100 items per call) to reduce API call volume and prevent throttling-induced false positives in `codebuild_project_not_publicly_accessible` [(#10639)](https://github.com/prowler-cloud/prowler/pull/10639)
|
||||
- `display_compliance_table` dispatch switched from substring `in` checks to `startswith` to prevent false matches between similarly named frameworks (e.g. `cisa` vs `cis`) [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301)
|
||||
- Restore the `ec2-imdsv1` category for EC2 IMDS checks to keep Attack Surface and findings filters aligned [(#10998)](https://github.com/prowler-cloud/prowler/pull/10998)
|
||||
- Container image CVE findings and IaC findings now use official CVE, Prowler Hub, or GitHub Security Advisory URLs instead of Aqua advisory URLs in remediation and references; Trivy rule IDs map to Prowler Hub without the `AVD-` prefix so links resolve [(#10853)](https://github.com/prowler-cloud/prowler/pull/10853)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
@@ -32,11 +57,13 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- Parser-mismatch SSRF in image provider registry auth where crafted bearer-token realms and pagination links could force requests to internal addresses and leak credentials cross-origin [(#10945)](https://github.com/prowler-cloud/prowler/pull/10945)
|
||||
- `cryptography` from 46.0.6 to 46.0.7 and `trivy` binary from 0.69.2 to 0.70.0 in the SDK image for CVE-2026-39892 and CVE-2026-33186 [(#10978)](https://github.com/prowler-cloud/prowler/pull/10978)
|
||||
|
||||
## [5.25.3] (Prowler UNRELEASED)
|
||||
---
|
||||
|
||||
## [5.25.3] (Prowler v5.25.3)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Oracle cloud identity scans now scan known or supplied regions to better support non ashburn tenancies [(#10529)](https://github.com/prowler-cloud/prowler/pull/10529)
|
||||
- Oracle Cloud identity scans known or supplied regions to better support non Ashburn tenancies [(#10529)](https://github.com/prowler-cloud/prowler/pull/10529)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -550,6 +550,7 @@
|
||||
"apigatewayv2_api_access_logging_enabled",
|
||||
"awslambda_function_invoke_api_operations_cloudtrail_logging_enabled",
|
||||
"cloudfront_distributions_logging_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_logs_s3_bucket_access_logging_enabled",
|
||||
"directoryservice_directory_log_forwarding_enabled",
|
||||
|
||||
@@ -3461,6 +3461,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"kinesis_stream_data_retention_period",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events"
|
||||
]
|
||||
},
|
||||
@@ -3669,6 +3670,7 @@
|
||||
"awslambda_function_invoke_api_operations_cloudtrail_logging_enabled",
|
||||
"bedrock_model_invocation_logging_enabled",
|
||||
"cloudfront_distributions_logging_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_logs_s3_bucket_access_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events",
|
||||
@@ -5288,6 +5290,7 @@
|
||||
"cognito_user_pool_blocks_compromised_credentials_sign_in_attempts",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"secretsmanager_secret_unused"
|
||||
@@ -6359,6 +6362,7 @@
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_administrator_access_policy",
|
||||
"iam_user_console_access_unused",
|
||||
@@ -6473,6 +6477,7 @@
|
||||
"backup_recovery_point_encrypted",
|
||||
"backup_vaults_encrypted",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"bedrock_prompt_encrypted_with_cmk",
|
||||
"cloudfront_distributions_field_level_encryption_enabled",
|
||||
"cloudfront_distributions_origin_traffic_encrypted",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
@@ -6730,6 +6735,7 @@
|
||||
"backup_recovery_point_encrypted",
|
||||
"backup_vaults_encrypted",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"bedrock_prompt_encrypted_with_cmk",
|
||||
"cloudfront_distributions_field_level_encryption_enabled",
|
||||
"cloudfront_distributions_origin_traffic_encrypted",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
|
||||
@@ -1958,6 +1958,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
|
||||
@@ -1311,6 +1311,7 @@
|
||||
"glue_development_endpoints_job_bookmark_encryption_enabled",
|
||||
"glue_ml_transform_encrypted_at_rest",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"bedrock_prompt_encrypted_with_cmk",
|
||||
"codebuild_project_s3_logs_encrypted",
|
||||
"codebuild_report_group_export_encrypted"
|
||||
]
|
||||
@@ -3100,6 +3101,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"iam_user_two_active_access_key"
|
||||
@@ -3442,6 +3444,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"iam_user_no_setup_initial_access_key"
|
||||
@@ -3551,6 +3554,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"iam_rotate_access_key_90_days",
|
||||
@@ -5853,6 +5857,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
|
||||
@@ -544,6 +544,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
|
||||
@@ -109,6 +109,7 @@
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"iam_user_hardware_mfa_enabled",
|
||||
@@ -325,6 +326,7 @@
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"organizations_delegated_administrators"
|
||||
|
||||
@@ -39,6 +39,7 @@
|
||||
"iam_user_hardware_mfa_enabled",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
|
||||
@@ -32,6 +32,7 @@
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"securityhub_enabled"
|
||||
@@ -109,6 +110,7 @@
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -165,6 +167,7 @@
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -185,6 +188,7 @@
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -320,6 +324,7 @@
|
||||
"iam_no_root_access_key",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
@@ -434,6 +439,7 @@
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
@@ -589,6 +595,7 @@
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
|
||||
@@ -119,6 +119,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_restapi_logging_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
|
||||
@@ -87,6 +87,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_restapi_logging_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
@@ -632,6 +633,7 @@
|
||||
],
|
||||
"Checks": [
|
||||
"apigateway_restapi_logging_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
|
||||
@@ -869,6 +869,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
|
||||
@@ -247,6 +247,7 @@
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
@@ -1293,6 +1294,7 @@
|
||||
"bedrock_model_invocation_logging_enabled",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"cloudfront_distributions_logging_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
@@ -1767,6 +1769,7 @@
|
||||
"backup_recovery_point_encrypted",
|
||||
"backup_vaults_encrypted",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"bedrock_prompt_encrypted_with_cmk",
|
||||
"cloudfront_distributions_field_level_encryption_enabled",
|
||||
"cloudfront_distributions_origin_traffic_encrypted",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
|
||||
@@ -2115,6 +2115,7 @@
|
||||
"Checks": [
|
||||
"backup_vaults_encrypted",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"bedrock_prompt_encrypted_with_cmk",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
@@ -2539,6 +2540,7 @@
|
||||
"bedrock_model_invocation_logging_enabled",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"cloudfront_distributions_logging_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_bucket_requires_mfa_delete",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_insights_exist",
|
||||
|
||||
@@ -2117,6 +2117,7 @@
|
||||
"Checks": [
|
||||
"backup_vaults_encrypted",
|
||||
"bedrock_model_invocation_logs_encryption_enabled",
|
||||
"bedrock_prompt_encrypted_with_cmk",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
|
||||
@@ -171,6 +171,7 @@
|
||||
"iam_no_expired_server_certificates_stored",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"iam_no_root_access_key",
|
||||
|
||||
@@ -1913,6 +1913,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
],
|
||||
|
||||
@@ -32,6 +32,7 @@
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
@@ -76,6 +77,7 @@
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
@@ -164,6 +166,7 @@
|
||||
"iam_no_root_access_key",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -589,6 +592,7 @@
|
||||
"iam_password_policy_expires_passwords_within_90_days_or_less",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"securityhub_enabled"
|
||||
@@ -43,6 +44,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -116,6 +118,7 @@
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"rds_instance_integration_cloudwatch_logs",
|
||||
@@ -240,6 +243,7 @@
|
||||
"iam_no_root_access_key",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"awslambda_function_url_public",
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"secretsmanager_automatic_rotation_enabled"
|
||||
@@ -53,6 +54,7 @@
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -74,6 +76,7 @@
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -95,6 +98,7 @@
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -116,6 +120,7 @@
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -136,6 +141,7 @@
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -247,6 +253,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -285,6 +292,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
@@ -861,6 +869,7 @@
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"secretsmanager_automatic_rotation_enabled"
|
||||
@@ -1199,6 +1208,7 @@
|
||||
"iam_no_root_access_key",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
@@ -1594,6 +1604,7 @@
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
@@ -2152,6 +2163,7 @@
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
@@ -2179,6 +2191,7 @@
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
|
||||
@@ -577,6 +577,7 @@
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"secretsmanager_automatic_rotation_enabled"
|
||||
@@ -638,6 +639,7 @@
|
||||
"iam_no_root_access_key",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
]
|
||||
|
||||
@@ -707,6 +707,7 @@
|
||||
"iam_user_console_access_unused",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_two_active_access_key",
|
||||
"iam_root_credentials_management_enabled",
|
||||
@@ -903,6 +904,7 @@
|
||||
"Checks": [
|
||||
"backup_vaults_encrypted",
|
||||
"backup_recovery_point_encrypted",
|
||||
"bedrock_prompt_encrypted_with_cmk",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"s3_bucket_kms_encryption",
|
||||
@@ -1310,6 +1312,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"cloudtrail_logs_s3_bucket_access_logging_enabled",
|
||||
@@ -1473,6 +1476,7 @@
|
||||
"cloudtrail_threat_detection_enumeration",
|
||||
"cloudtrail_threat_detection_privilege_escalation",
|
||||
"cloudtrail_threat_detection_llm_jacking",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events"
|
||||
]
|
||||
@@ -1569,6 +1573,7 @@
|
||||
"cloudtrail_threat_detection_llm_jacking",
|
||||
"cloudtrail_threat_detection_enumeration",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudwatch_log_metric_filter_unauthorized_api_calls",
|
||||
"cloudwatch_log_metric_filter_authentication_failures",
|
||||
|
||||
@@ -1563,6 +1563,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_password_policy_reuse_24",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused"
|
||||
|
||||
@@ -295,6 +295,7 @@
|
||||
"Checks": [
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"iam_no_expired_server_certificates_stored"
|
||||
@@ -340,6 +341,7 @@
|
||||
"iam_rotate_access_key_90_days",
|
||||
"iam_role_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_bedrock",
|
||||
"iam_user_access_not_stale_to_sagemaker",
|
||||
"iam_user_accesskey_unused",
|
||||
"iam_user_console_access_unused",
|
||||
"accessanalyzer_enabled_without_findings"
|
||||
@@ -816,6 +818,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
|
||||
@@ -346,6 +346,7 @@
|
||||
}
|
||||
],
|
||||
"Checks": [
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudwatch_changes_to_network_acls_alarm_configured",
|
||||
"cloudwatch_changes_to_network_gateways_alarm_configured",
|
||||
|
||||
@@ -653,7 +653,9 @@
|
||||
{
|
||||
"Id": "3.1.3.4.1.1",
|
||||
"Description": "Ensure protection against encrypted attachments from untrusted senders is enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_encrypted_attachment_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -674,7 +676,9 @@
|
||||
{
|
||||
"Id": "3.1.3.4.1.2",
|
||||
"Description": "Ensure protection against attachments with scripts from untrusted senders is enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_script_attachment_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -695,7 +699,9 @@
|
||||
{
|
||||
"Id": "3.1.3.4.1.3",
|
||||
"Description": "Ensure protection against anomalous attachment types in emails is enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_anomalous_attachment_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -785,7 +791,9 @@
|
||||
{
|
||||
"Id": "3.1.3.4.3.1",
|
||||
"Description": "Ensure protection against domain spoofing based on similar domain names is enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_domain_spoofing_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -806,7 +814,9 @@
|
||||
{
|
||||
"Id": "3.1.3.4.3.2",
|
||||
"Description": "Ensure protection against spoofing of employee names is enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_employee_name_spoofing_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -827,7 +837,9 @@
|
||||
{
|
||||
"Id": "3.1.3.4.3.3",
|
||||
"Description": "Ensure protection against inbound emails spoofing your domain is enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_inbound_domain_spoofing_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -848,7 +860,9 @@
|
||||
{
|
||||
"Id": "3.1.3.4.3.4",
|
||||
"Description": "Ensure protection against any unauthenticated emails is enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_unauthenticated_email_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -869,7 +883,9 @@
|
||||
{
|
||||
"Id": "3.1.3.4.3.5",
|
||||
"Description": "Ensure groups are protected from inbound emails spoofing your domain",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_groups_spoofing_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
|
||||
@@ -649,7 +649,9 @@
|
||||
{
|
||||
"Id": "GWS.GMAIL.5.1",
|
||||
"Description": "Protect against encrypted attachments from untrusted senders SHALL be enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_encrypted_attachment_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Gmail",
|
||||
@@ -662,7 +664,9 @@
|
||||
{
|
||||
"Id": "GWS.GMAIL.5.2",
|
||||
"Description": "Protect against attachments with scripts from untrusted senders SHALL be enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_script_attachment_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Gmail",
|
||||
@@ -675,7 +679,9 @@
|
||||
{
|
||||
"Id": "GWS.GMAIL.5.3",
|
||||
"Description": "Protect against anomalous attachment types in emails SHALL be enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_anomalous_attachment_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Gmail",
|
||||
@@ -798,7 +804,9 @@
|
||||
{
|
||||
"Id": "GWS.GMAIL.7.1",
|
||||
"Description": "Protect against domain spoofing based on similar domain names SHALL be enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_domain_spoofing_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Gmail",
|
||||
@@ -811,7 +819,9 @@
|
||||
{
|
||||
"Id": "GWS.GMAIL.7.2",
|
||||
"Description": "Protect against spoofing of employee names SHALL be enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_employee_name_spoofing_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Gmail",
|
||||
@@ -824,7 +834,9 @@
|
||||
{
|
||||
"Id": "GWS.GMAIL.7.3",
|
||||
"Description": "Protect against inbound emails spoofing your domain SHALL be enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_inbound_domain_spoofing_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Gmail",
|
||||
@@ -837,7 +849,9 @@
|
||||
{
|
||||
"Id": "GWS.GMAIL.7.4",
|
||||
"Description": "Protect against any unauthenticated emails SHALL be enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_unauthenticated_email_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Gmail",
|
||||
@@ -850,7 +864,9 @@
|
||||
{
|
||||
"Id": "GWS.GMAIL.7.5",
|
||||
"Description": "Protect your Groups from inbound emails spoofing your domain SHALL be enabled",
|
||||
"Checks": [],
|
||||
"Checks": [
|
||||
"gmail_groups_spoofing_protection_enabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Gmail",
|
||||
|
||||
@@ -251,6 +251,7 @@
|
||||
"entra_break_glass_account_fido2_security_key_registered",
|
||||
"entra_conditional_access_policy_mfa_enforced_for_guest_users",
|
||||
"entra_default_app_management_policy_enabled",
|
||||
"entra_emergency_access_exclusion",
|
||||
"entra_all_apps_conditional_access_coverage",
|
||||
"entra_conditional_access_policy_device_registration_mfa_required",
|
||||
"entra_intune_enrollment_sign_in_frequency_every_time",
|
||||
@@ -260,6 +261,7 @@
|
||||
"entra_legacy_authentication_blocked",
|
||||
"entra_managed_device_required_for_authentication",
|
||||
"entra_seamless_sso_disabled",
|
||||
"entra_service_principal_no_secrets_for_permanent_tier0_roles",
|
||||
"entra_users_mfa_enabled",
|
||||
"exchange_organization_modern_authentication_enabled",
|
||||
"exchange_transport_config_smtp_auth_disabled",
|
||||
@@ -282,6 +284,7 @@
|
||||
"entra_admin_portals_access_restriction",
|
||||
"entra_app_registration_no_unused_privileged_permissions",
|
||||
"entra_policy_guest_users_access_restrictions",
|
||||
"entra_service_principal_no_secrets_for_permanent_tier0_roles",
|
||||
"sharepoint_external_sharing_managed",
|
||||
"sharepoint_external_sharing_restricted",
|
||||
"sharepoint_guest_sharing_restricted"
|
||||
@@ -671,10 +674,12 @@
|
||||
"entra_admin_users_phishing_resistant_mfa_enabled",
|
||||
"entra_admin_users_sign_in_frequency_enabled",
|
||||
"entra_break_glass_account_fido2_security_key_registered",
|
||||
"entra_emergency_access_exclusion",
|
||||
"entra_app_registration_no_unused_privileged_permissions",
|
||||
"entra_policy_ensure_default_user_cannot_create_tenants",
|
||||
"entra_policy_guest_invite_only_for_admin_roles",
|
||||
"entra_seamless_sso_disabled"
|
||||
"entra_seamless_sso_disabled",
|
||||
"entra_service_principal_no_secrets_for_permanent_tier0_roles"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -727,9 +732,11 @@
|
||||
"entra_conditional_access_policy_device_code_flow_blocked",
|
||||
"entra_conditional_access_policy_directory_sync_account_excluded",
|
||||
"entra_conditional_access_policy_corporate_device_sign_in_frequency_enforced",
|
||||
"entra_emergency_access_exclusion",
|
||||
"entra_identity_protection_sign_in_risk_enabled",
|
||||
"entra_managed_device_required_for_authentication",
|
||||
"entra_seamless_sso_disabled",
|
||||
"entra_service_principal_no_secrets_for_permanent_tier0_roles",
|
||||
"entra_users_mfa_enabled"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -48,7 +48,7 @@ class _MutableTimestamp:
|
||||
|
||||
timestamp = _MutableTimestamp(datetime.today())
|
||||
timestamp_utc = _MutableTimestamp(datetime.now(timezone.utc))
|
||||
prowler_version = "5.26.0"
|
||||
prowler_version = "5.27.0"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
square_logo_img = "https://raw.githubusercontent.com/prowler-cloud/prowler/dc7d2d5aeb92fdf12e8604f42ef6472cd3e8e889/docs/img/prowler-logo-black.png"
|
||||
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
|
||||
|
||||
@@ -26,6 +26,8 @@ aws:
|
||||
max_unused_access_keys_days: 45
|
||||
# aws.iam_user_console_access_unused --> CIS recommends 45 days
|
||||
max_console_access_days: 45
|
||||
# aws.iam_user_access_not_stale_to_sagemaker --> default 90 days
|
||||
max_unused_sagemaker_access_days: 90
|
||||
|
||||
# AWS EC2 Configuration
|
||||
# aws.ec2_elastic_ip_shodan
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
import re
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
|
||||
AQUA_REFERENCE_HOST = "avd.aquasec.com"
|
||||
GITHUB_ADVISORY_URL = "https://github.com/advisories/{advisory_id}"
|
||||
PROWLER_HUB_CHECK_URL = "https://hub.prowler.com/check/{check_id}"
|
||||
_CVE_ID_PATTERN = re.compile(r"^CVE-\d{4}-\d+$", re.IGNORECASE)
|
||||
_GHSA_ID_PATTERN = re.compile(r"^GHSA(?:-[a-z0-9]{4}){3}$", re.IGNORECASE)
|
||||
|
||||
|
||||
def _dedupe_preserve_order(urls: list[str]) -> list[str]:
|
||||
seen: set[str] = set()
|
||||
ordered_urls: list[str] = []
|
||||
|
||||
for url in urls:
|
||||
if not url or not url.strip():
|
||||
continue
|
||||
|
||||
normalized_url = url.strip()
|
||||
if normalized_url in seen:
|
||||
continue
|
||||
|
||||
seen.add(normalized_url)
|
||||
ordered_urls.append(normalized_url)
|
||||
|
||||
return ordered_urls
|
||||
|
||||
|
||||
def _is_aqua_reference(url: str) -> bool:
|
||||
return AQUA_REFERENCE_HOST in urlparse(url).netloc.lower()
|
||||
|
||||
|
||||
def _build_cve_org_url(vulnerability_id: str) -> str:
|
||||
return f"https://www.cve.org/CVERecord?id={vulnerability_id.upper()}"
|
||||
|
||||
|
||||
def build_finding_reference_url(finding_id: str) -> str:
|
||||
"""Map a Trivy finding ID to a stable, real reference URL.
|
||||
|
||||
- CVE-XXXX-NNNN → cve.org record
|
||||
- GHSA-… → github.com/advisories
|
||||
- everything else → hub.prowler.com/check/<id>, stripping a leading
|
||||
"AVD-" prefix because Prowler Hub indexes Trivy rules by the
|
||||
non-prefixed ID (e.g., "AWS-0001" not "AVD-AWS-0001").
|
||||
"""
|
||||
normalized = finding_id.strip().upper()
|
||||
if _CVE_ID_PATTERN.match(normalized):
|
||||
return _build_cve_org_url(normalized)
|
||||
if _GHSA_ID_PATTERN.match(normalized):
|
||||
return GITHUB_ADVISORY_URL.format(advisory_id=normalized)
|
||||
hub_id = normalized[4:] if normalized.startswith("AVD-") else normalized
|
||||
return PROWLER_HUB_CHECK_URL.format(check_id=hub_id)
|
||||
|
||||
|
||||
def _is_cve_org_url(url: str, vulnerability_id: str) -> bool:
|
||||
parsed_url = urlparse(url)
|
||||
if parsed_url.netloc.lower() != "www.cve.org":
|
||||
return False
|
||||
|
||||
query_value = parse_qs(parsed_url.query).get("id", [""])[0]
|
||||
return query_value.upper() == vulnerability_id.upper()
|
||||
|
||||
|
||||
def resolve_vulnerability_reference_urls(
|
||||
vulnerability_id: str,
|
||||
references: list[str] | None = None,
|
||||
primary_url: str = "",
|
||||
) -> tuple[str, list[str]]:
|
||||
"""Resolve non-Aqua vulnerability URLs, prioritizing official CVE destinations."""
|
||||
|
||||
candidate_urls = list(references or [])
|
||||
if primary_url and primary_url not in candidate_urls:
|
||||
candidate_urls.append(primary_url)
|
||||
|
||||
filtered_urls = _dedupe_preserve_order(
|
||||
[url for url in candidate_urls if not _is_aqua_reference(url)]
|
||||
)
|
||||
|
||||
if not _CVE_ID_PATTERN.match(vulnerability_id):
|
||||
return "", filtered_urls
|
||||
|
||||
cve_org_urls = [
|
||||
url for url in filtered_urls if _is_cve_org_url(url, vulnerability_id)
|
||||
]
|
||||
|
||||
recommendation_url = (
|
||||
cve_org_urls[0] if cve_org_urls else _build_cve_org_url(vulnerability_id)
|
||||
)
|
||||
|
||||
return recommendation_url, [recommendation_url]
|
||||
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "bedrock_prompt_encrypted_with_cmk",
|
||||
"CheckTitle": "Amazon Bedrock prompt is encrypted at rest with a customer-managed KMS key",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks/AWS Security Best Practices"
|
||||
],
|
||||
"ServiceName": "bedrock",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Other",
|
||||
"ResourceGroup": "ai_ml",
|
||||
"Description": "Bedrock prompts should be encrypted at rest with a **customer-managed KMS key (CMK)** rather than the AWS-owned default key. Prompts can contain sensitive instructions, business logic, and references to downstream tooling that warrant tenant-controlled key material and auditable access via AWS KMS.",
|
||||
"Risk": "A prompt encrypted only with the AWS-owned default key offers limited tenant control over key access and lifecycle: no customer KMS key policy to govern decrypt permissions, no control over rotation cadence or scheduled deletion, and gaps against frameworks (ISO 27001 A.8.24, NIST CSF PR.DS, KISA-ISMS-P 2.7.2) that require customer-managed keys for sensitive data at rest.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-management.html",
|
||||
"https://docs.aws.amazon.com/bedrock/latest/APIReference/API_agent_CreatePrompt.html",
|
||||
"https://docs.aws.amazon.com/bedrock/latest/APIReference/API_agent_UpdatePrompt.html"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "# Retrieve the current DRAFT prompt first and note the existing fields you want to preserve, such as description, defaultVariant, and variants:\naws bedrock-agent get-prompt --prompt-identifier <prompt_id> --prompt-version DRAFT --output json\n# Then update the prompt and include the existing fields you want to keep alongside the CMK change:\naws bedrock-agent update-prompt --prompt-identifier <prompt_id> --name <prompt_name> --description <current_or_new_description> --default-variant <current_default_variant> --variants <current_or_updated_variants_json> --customer-encryption-key-arn <kms_key_arn>",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Open the Amazon Bedrock console\n2. Navigate to Prompt management\n3. Select the prompt\n4. Edit the prompt and choose a customer-managed KMS key for encryption\n5. Save the prompt",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Encrypt every Bedrock prompt with a **customer-managed KMS key** to retain control over key access, rotation, and lifecycle. When using `update-prompt`, first retrieve the current draft and carry forward the fields you want to preserve, such as the existing description, `defaultVariant`, and `variants`, so the encryption change does not unintentionally overwrite prompt configuration.",
|
||||
"Url": "https://hub.prowler.com/check/bedrock_prompt_encrypted_with_cmk"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"gen-ai",
|
||||
"encryption"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"bedrock_prompt_management_exists"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_AWS
|
||||
from prowler.providers.aws.services.bedrock.bedrock_agent_client import (
|
||||
bedrock_agent_client,
|
||||
)
|
||||
|
||||
|
||||
class bedrock_prompt_encrypted_with_cmk(Check):
|
||||
"""Ensure that Bedrock prompts are encrypted with a customer-managed KMS key.
|
||||
|
||||
This check evaluates whether each Bedrock prompt is encrypted at rest using
|
||||
a customer-managed KMS key (CMK) rather than the AWS-owned default key.
|
||||
- PASS: The Bedrock prompt is encrypted with a customer-managed KMS key.
|
||||
- FAIL: The Bedrock prompt is not encrypted with a customer-managed KMS key.
|
||||
"""
|
||||
|
||||
def execute(self) -> list[Check_Report_AWS]:
|
||||
"""Execute the Bedrock prompt CMK encryption check.
|
||||
|
||||
Returns:
|
||||
A list of reports containing the result of the check.
|
||||
"""
|
||||
findings = []
|
||||
for prompt in bedrock_agent_client.prompts.values():
|
||||
report = Check_Report_AWS(metadata=self.metadata(), resource=prompt)
|
||||
if prompt.customer_encryption_key_arn:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Bedrock Prompt {prompt.name} is encrypted with a customer-managed KMS key."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Bedrock Prompt {prompt.name} is not encrypted with a customer-managed KMS key."
|
||||
findings.append(report)
|
||||
return findings
|
||||
@@ -34,6 +34,8 @@
|
||||
"gen-ai"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"RelatedTo": [
|
||||
"bedrock_prompt_encrypted_with_cmk"
|
||||
],
|
||||
"Notes": "Results are generated per scanned region. Regions where `ListPrompts` cannot be queried are omitted from the findings."
|
||||
}
|
||||
|
||||
@@ -136,7 +136,10 @@ class Guardrail(BaseModel):
|
||||
|
||||
|
||||
class BedrockAgent(AWSService):
|
||||
"""Bedrock Agent service class for managing agents and prompts."""
|
||||
|
||||
def __init__(self, provider):
|
||||
"""Initialize the BedrockAgent service."""
|
||||
# Call AWSService's __init__
|
||||
super().__init__("bedrock-agent", provider)
|
||||
self.agents = {}
|
||||
@@ -144,6 +147,7 @@ class BedrockAgent(AWSService):
|
||||
self.prompt_scanned_regions: set = set()
|
||||
self.__threading_call__(self._list_agents)
|
||||
self.__threading_call__(self._list_prompts)
|
||||
self.__threading_call__(self._get_prompt, self.prompts.values())
|
||||
self.__threading_call__(self._list_tags_for_resource, self.agents.values())
|
||||
|
||||
def _list_agents(self, regional_client):
|
||||
@@ -171,29 +175,43 @@ class BedrockAgent(AWSService):
|
||||
)
|
||||
|
||||
def _list_prompts(self, regional_client):
|
||||
"""List all prompts in a region.
|
||||
|
||||
Prompt Management is evaluated as a region-level adoption signal, so
|
||||
prompt collection is intentionally not filtered by audit_resources.
|
||||
"""
|
||||
"""List all prompts in a region."""
|
||||
logger.info("Bedrock Agent - Listing Prompts...")
|
||||
try:
|
||||
paginator = regional_client.get_paginator("list_prompts")
|
||||
for page in paginator.paginate():
|
||||
for prompt in page.get("promptSummaries", []):
|
||||
prompt_arn = prompt.get("arn", "")
|
||||
self.prompts[prompt_arn] = Prompt(
|
||||
id=prompt.get("id", ""),
|
||||
name=prompt.get("name", ""),
|
||||
arn=prompt_arn,
|
||||
region=regional_client.region,
|
||||
)
|
||||
if not self.audit_resources or (
|
||||
is_resource_filtered(prompt_arn, self.audit_resources)
|
||||
):
|
||||
self.prompts[prompt_arn] = Prompt(
|
||||
id=prompt.get("id", ""),
|
||||
name=prompt.get("name", ""),
|
||||
arn=prompt_arn,
|
||||
region=regional_client.region,
|
||||
)
|
||||
self.prompt_scanned_regions.add(regional_client.region)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _get_prompt(self, prompt):
|
||||
"""Get detailed prompt information including encryption configuration."""
|
||||
logger.info("Bedrock Agent - Getting Prompt...")
|
||||
try:
|
||||
prompt_info = self.regional_clients[prompt.region].get_prompt(
|
||||
promptIdentifier=prompt.id
|
||||
)
|
||||
prompt.customer_encryption_key_arn = prompt_info.get(
|
||||
"customerEncryptionKeyArn"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{prompt.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def _list_tags_for_resource(self, resource):
|
||||
"""List tags for a Bedrock Agent resource."""
|
||||
logger.info("Bedrock Agent - Listing Tags for Resource...")
|
||||
@@ -212,6 +230,8 @@ class BedrockAgent(AWSService):
|
||||
|
||||
|
||||
class Agent(BaseModel):
|
||||
"""Model for a Bedrock Agent resource."""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
arn: str
|
||||
@@ -227,3 +247,4 @@ class Prompt(BaseModel):
|
||||
name: str
|
||||
arn: str
|
||||
region: str
|
||||
customer_encryption_key_arn: Optional[str] = None
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "cloudtrail_bedrock_logging_enabled",
|
||||
"CheckTitle": "CloudTrail logs Amazon Bedrock API calls for security auditing",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks/AWS Security Best Practices"
|
||||
],
|
||||
"ServiceName": "cloudtrail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsCloudTrailTrail",
|
||||
"ResourceGroup": "monitoring",
|
||||
"Description": "**At least one actively logging CloudTrail trail** records **Amazon Bedrock API activity** through management events or advanced event selectors targeting Bedrock resources.\n\nThis check covers **control-plane** operations such as configuration changes through CloudTrail management events and can also cover **data-plane** Bedrock events when advanced event selectors target Bedrock resource types.",
|
||||
"Risk": "Without CloudTrail logging for Bedrock control-plane operations, changes to prompts, guardrails, agents, flows, or knowledge bases can become invisible, weakening forensics and incident response. Management events do not capture `InvokeModel`; pair this control with `bedrock_model_invocation_logging_enabled` or Bedrock data event selectors for invocation visibility.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://docs.aws.amazon.com/bedrock/latest/userguide/logging-using-cloudtrail.html",
|
||||
"https://docs.aws.amazon.com/awscloudtrail/latest/userguide/logging-data-events-with-cloudtrail.html"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "aws cloudtrail put-event-selectors --trail-name <example_resource_name> --advanced-event-selectors '[{\"Name\":\"Bedrock data events\",\"FieldSelectors\":[{\"Field\":\"eventCategory\",\"Equals\":[\"Data\"]},{\"Field\":\"resources.type\",\"Equals\":[\"AWS::Bedrock::Model\",\"AWS::Bedrock::Guardrail\",\"AWS::Bedrock::AgentAlias\",\"AWS::Bedrock::FlowAlias\",\"AWS::Bedrock::InlineAgent\",\"AWS::Bedrock::KnowledgeBase\",\"AWS::Bedrock::Prompt\"]}]}]'",
|
||||
"NativeIaC": "```yaml\n# CloudFormation: enable Bedrock data event logging on an actively logging trail\nResources:\n ExampleTrail:\n Type: AWS::CloudTrail::Trail\n Properties:\n TrailName: <example_resource_name>\n S3BucketName: <example_resource_name>\n IsLogging: true\n AdvancedEventSelectors:\n - Name: Bedrock data events\n FieldSelectors:\n - Field: eventCategory\n Equals:\n - Data\n - Field: resources.type # CRITICAL: target Bedrock resources\n Equals:\n - AWS::Bedrock::Model\n - AWS::Bedrock::Guardrail\n - AWS::Bedrock::AgentAlias\n - AWS::Bedrock::FlowAlias\n - AWS::Bedrock::InlineAgent\n - AWS::Bedrock::KnowledgeBase\n - AWS::Bedrock::Prompt\n```",
|
||||
"Other": "1. In the AWS Console, open CloudTrail and select a trail that is actively logging\n2. Edit the trail and enable Management events to capture Bedrock control-plane operations, or add Bedrock advanced data event selectors for data-plane visibility\n3. If using data events, select the Bedrock resource types you want to log\n4. Save changes and confirm the trail remains in logging state",
|
||||
"Terraform": "```hcl\n# Terraform: enable Bedrock data event logging on an actively logging trail\nresource \"aws_cloudtrail\" \"example_resource\" {\n name = \"example_resource\"\n s3_bucket_name = \"example_resource\"\n\n advanced_event_selector {\n name = \"Bedrock data events\"\n field_selector {\n field = \"eventCategory\"\n equals = [\"Data\"]\n }\n field_selector {\n field = \"resources.type\" # CRITICAL: target Bedrock resources\n equals = [\"AWS::Bedrock::Model\", \"AWS::Bedrock::Guardrail\", \"AWS::Bedrock::AgentAlias\", \"AWS::Bedrock::FlowAlias\", \"AWS::Bedrock::InlineAgent\", \"AWS::Bedrock::KnowledgeBase\", \"AWS::Bedrock::Prompt\"]\n }\n }\n}\n```"
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable CloudTrail logging for Amazon Bedrock on **at least one actively logging trail**. At minimum, enable **management events** to capture Bedrock control-plane operations. For invocation-level and other data-plane visibility, add **advanced event selectors** targeting Bedrock resource types or pair this control with `bedrock_model_invocation_logging_enabled`.\n\nFor broader region coverage, pair this control with a separate multi-region CloudTrail check. Centralize logs in an encrypted bucket or CloudWatch Logs to support **defense in depth** and forensic readiness for AI workloads.",
|
||||
"Url": "https://hub.prowler.com/check/cloudtrail_bedrock_logging_enabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"logging",
|
||||
"forensics-ready",
|
||||
"gen-ai"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"cloudtrail_multi_region_enabled_logging_management_events",
|
||||
"bedrock_model_invocation_logging_enabled"
|
||||
],
|
||||
"Notes": "This check passes when CloudTrail captures Bedrock control-plane activity via management events or Bedrock data events via advanced selectors. It does not require multi-region coverage, and it does not by itself guarantee `InvokeModel` visibility unless Bedrock data events are selected; use `bedrock_model_invocation_logging_enabled` for model invocation logs. Additional advanced selector filters such as `eventName` or `resources.ARN` can further narrow effective coverage and should be reviewed explicitly."
|
||||
}
|
||||
@@ -0,0 +1,213 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_AWS
|
||||
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
|
||||
cloudtrail_client,
|
||||
)
|
||||
from prowler.providers.aws.services.cloudtrail.cloudtrail_service import (
|
||||
Event_Selector,
|
||||
)
|
||||
|
||||
|
||||
class cloudtrail_bedrock_logging_enabled(Check):
|
||||
"""Ensure CloudTrail is configured to log Amazon Bedrock API calls.
|
||||
|
||||
This check verifies whether at least one CloudTrail trail is configured to
|
||||
capture Amazon Bedrock control-plane API calls through management events or
|
||||
Bedrock data events through advanced event selectors.
|
||||
|
||||
- PASS: A trail logs Bedrock control-plane API calls via management events
|
||||
or Bedrock data events via Bedrock-specific advanced event selectors.
|
||||
- FAIL: No CloudTrail trail is configured to log Bedrock API calls.
|
||||
"""
|
||||
|
||||
# Bedrock resource types supported by CloudTrail advanced event selectors.
|
||||
BEDROCK_RESOURCE_TYPES = frozenset(
|
||||
{
|
||||
"AWS::Bedrock::AgentAlias",
|
||||
"AWS::Bedrock::FlowAlias",
|
||||
"AWS::Bedrock::Guardrail",
|
||||
"AWS::Bedrock::InlineAgent",
|
||||
"AWS::Bedrock::KnowledgeBase",
|
||||
"AWS::Bedrock::Model",
|
||||
"AWS::Bedrock::Prompt",
|
||||
}
|
||||
)
|
||||
# Bedrock control-plane event sources, including Bedrock Data Automation.
|
||||
BEDROCK_EVENT_SOURCES = frozenset(
|
||||
{
|
||||
"bedrock.amazonaws.com",
|
||||
"bedrock-agent.amazonaws.com",
|
||||
"bedrock-runtime.amazonaws.com",
|
||||
"bedrock-agent-runtime.amazonaws.com",
|
||||
"bedrock-data-automation.amazonaws.com",
|
||||
"bedrock-data-automation-runtime.amazonaws.com",
|
||||
}
|
||||
)
|
||||
|
||||
def execute(self) -> list[Check_Report_AWS]:
|
||||
"""Execute the check logic.
|
||||
|
||||
Returns:
|
||||
A list of reports containing the result of the check.
|
||||
"""
|
||||
findings = []
|
||||
if cloudtrail_client.trails is not None:
|
||||
for trail in cloudtrail_client.trails.values():
|
||||
if trail.is_logging:
|
||||
for data_event in trail.data_events:
|
||||
match_type = self._get_bedrock_match_type(data_event)
|
||||
if match_type:
|
||||
report = Check_Report_AWS(
|
||||
metadata=self.metadata(), resource=trail
|
||||
)
|
||||
report.region = trail.home_region
|
||||
report.status = "PASS"
|
||||
if match_type == "classic_management":
|
||||
report.status_extended = (
|
||||
f"Trail {trail.name} from home region "
|
||||
f"{trail.home_region} has management events "
|
||||
"enabled to log Amazon Bedrock control-plane "
|
||||
"API calls."
|
||||
)
|
||||
elif match_type == "advanced_management":
|
||||
report.status_extended = (
|
||||
f"Trail {trail.name} from home region "
|
||||
f"{trail.home_region} has an advanced "
|
||||
"management event selector to log Amazon "
|
||||
"Bedrock control-plane API calls."
|
||||
)
|
||||
else:
|
||||
report.status_extended = (
|
||||
f"Trail {trail.name} from home region "
|
||||
f"{trail.home_region} has an advanced data "
|
||||
"event selector to log Amazon Bedrock API "
|
||||
"calls."
|
||||
)
|
||||
findings.append(report)
|
||||
break
|
||||
if not findings:
|
||||
report = Check_Report_AWS(
|
||||
metadata=self.metadata(), resource=cloudtrail_client.trails
|
||||
)
|
||||
report.region = cloudtrail_client.region
|
||||
report.resource_arn = cloudtrail_client.trail_arn_template
|
||||
report.resource_id = cloudtrail_client.audited_account
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "No CloudTrail trails are configured to log Amazon Bedrock API calls."
|
||||
findings.append(report)
|
||||
return findings
|
||||
|
||||
def _get_bedrock_match_type(self, data_event: Event_Selector) -> str | None:
|
||||
"""Return the Bedrock logging match type for an event selector.
|
||||
|
||||
Args:
|
||||
data_event: An Event_Selector object from the trail.
|
||||
|
||||
Returns:
|
||||
The matching selector type, or None if the selector does not log
|
||||
the Bedrock events covered by this check.
|
||||
"""
|
||||
if not data_event.is_advanced:
|
||||
if self._logs_classic_management_events(data_event.event_selector):
|
||||
return "classic_management"
|
||||
return None
|
||||
|
||||
field_selectors = data_event.event_selector.get("FieldSelectors", [])
|
||||
if self._logs_advanced_management_events(field_selectors):
|
||||
return "advanced_management"
|
||||
if self._logs_advanced_bedrock_data_events(field_selectors):
|
||||
return "advanced_data"
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _logs_classic_management_events(event_selector: dict) -> bool:
|
||||
"""Check whether a classic selector logs Bedrock control-plane events."""
|
||||
return event_selector.get(
|
||||
"IncludeManagementEvents", True
|
||||
) and event_selector.get("ReadWriteType", "All") in ("All", "WriteOnly")
|
||||
|
||||
def _logs_advanced_management_events(self, field_selectors: list[dict]) -> bool:
|
||||
"""Check whether advanced selectors log Bedrock control-plane events."""
|
||||
event_category_selectors = [
|
||||
field for field in field_selectors if field.get("Field") == "eventCategory"
|
||||
]
|
||||
if not self._selectors_match_value("Management", event_category_selectors):
|
||||
return False
|
||||
|
||||
read_only_selectors = [
|
||||
field for field in field_selectors if field.get("Field") == "readOnly"
|
||||
]
|
||||
has_read_only_restriction = bool(read_only_selectors) and not any(
|
||||
self._field_selector_matches_value("false", selector)
|
||||
for selector in read_only_selectors
|
||||
)
|
||||
|
||||
return not has_read_only_restriction and self._logs_bedrock_management_events(
|
||||
field_selectors
|
||||
)
|
||||
|
||||
def _logs_advanced_bedrock_data_events(self, field_selectors: list[dict]) -> bool:
|
||||
"""Check whether advanced selectors log Bedrock data events."""
|
||||
event_category_selectors = [
|
||||
field for field in field_selectors if field.get("Field") == "eventCategory"
|
||||
]
|
||||
if not self._selectors_match_value("Data", event_category_selectors):
|
||||
return False
|
||||
|
||||
resource_type_selectors = [
|
||||
field for field in field_selectors if field.get("Field") == "resources.type"
|
||||
]
|
||||
return any(
|
||||
self._selectors_match_value(resource_type, resource_type_selectors)
|
||||
for resource_type in self.BEDROCK_RESOURCE_TYPES
|
||||
)
|
||||
|
||||
def _logs_bedrock_management_events(self, field_selectors: list[dict]) -> bool:
|
||||
"""Check whether advanced management selectors include Bedrock sources."""
|
||||
event_source_selectors = [
|
||||
field for field in field_selectors if field.get("Field") == "eventSource"
|
||||
]
|
||||
if not event_source_selectors:
|
||||
return True
|
||||
|
||||
return any(
|
||||
self._selectors_match_value(event_source, event_source_selectors)
|
||||
for event_source in self.BEDROCK_EVENT_SOURCES
|
||||
)
|
||||
|
||||
def _selectors_match_value(self, value: str, selectors: list[dict]) -> bool:
|
||||
"""Check whether a candidate value satisfies all selectors for a field."""
|
||||
return bool(selectors) and all(
|
||||
self._field_selector_matches_value(value, selector)
|
||||
for selector in selectors
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _field_selector_matches_value(value: str, selector: dict) -> bool:
|
||||
"""Evaluate a CloudTrail advanced field selector against a candidate value."""
|
||||
conditions = []
|
||||
|
||||
if "Equals" in selector:
|
||||
conditions.append(value in selector["Equals"])
|
||||
if "NotEquals" in selector:
|
||||
conditions.append(value not in selector["NotEquals"])
|
||||
if "StartsWith" in selector:
|
||||
conditions.append(
|
||||
any(value.startswith(prefix) for prefix in selector["StartsWith"])
|
||||
)
|
||||
if "NotStartsWith" in selector:
|
||||
conditions.append(
|
||||
all(
|
||||
not value.startswith(prefix) for prefix in selector["NotStartsWith"]
|
||||
)
|
||||
)
|
||||
if "EndsWith" in selector:
|
||||
conditions.append(
|
||||
any(value.endswith(suffix) for suffix in selector["EndsWith"])
|
||||
)
|
||||
if "NotEndsWith" in selector:
|
||||
conditions.append(
|
||||
all(not value.endswith(suffix) for suffix in selector["NotEndsWith"])
|
||||
)
|
||||
|
||||
return all(conditions) if conditions else True
|
||||
@@ -1,9 +1,10 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from dateutil.parser import parse
|
||||
|
||||
from prowler.lib.check.models import Check, Check_Report_AWS
|
||||
from prowler.providers.aws.services.iam.iam_client import iam_client
|
||||
from prowler.providers.aws.services.iam.lib.policy import (
|
||||
evaluate_bedrock_staleness,
|
||||
find_bedrock_service,
|
||||
)
|
||||
|
||||
|
||||
class iam_role_access_not_stale_to_bedrock(Check):
|
||||
@@ -33,33 +34,73 @@ class iam_role_access_not_stale_to_bedrock(Check):
|
||||
"max_unused_bedrock_access_days", 60
|
||||
)
|
||||
|
||||
for (
|
||||
role_data,
|
||||
last_accessed_services,
|
||||
) in iam_client.role_last_accessed_services.items():
|
||||
role_name = role_data[0]
|
||||
role_arn = role_data[1]
|
||||
if iam_client.roles is None:
|
||||
return findings
|
||||
|
||||
bedrock_service = find_bedrock_service(last_accessed_services)
|
||||
for role in iam_client.roles:
|
||||
last_accessed_services = iam_client.role_last_accessed_services.get(
|
||||
(role.name, role.arn), []
|
||||
)
|
||||
bedrock_service = self._find_bedrock_service(last_accessed_services)
|
||||
if bedrock_service is None:
|
||||
continue
|
||||
|
||||
report = Check_Report_AWS(
|
||||
metadata=self.metadata(),
|
||||
resource={"name": role_name, "arn": role_arn},
|
||||
)
|
||||
report.resource_id = role_name
|
||||
report.resource_arn = role_arn
|
||||
report = Check_Report_AWS(metadata=self.metadata(), resource=role)
|
||||
report.region = iam_client.region
|
||||
if iam_client.roles is not None:
|
||||
for iam_role in iam_client.roles:
|
||||
if iam_role.arn == role_arn:
|
||||
report.resource_tags = iam_role.tags
|
||||
break
|
||||
|
||||
evaluate_bedrock_staleness(
|
||||
report, bedrock_service, max_unused_bedrock_days, role_name, "Role"
|
||||
self._evaluate_bedrock_staleness(
|
||||
report,
|
||||
bedrock_service,
|
||||
max_unused_bedrock_days,
|
||||
role.name,
|
||||
"Role",
|
||||
)
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@staticmethod
|
||||
def _find_bedrock_service(
|
||||
last_accessed_services: list[dict],
|
||||
) -> Optional[dict]:
|
||||
"""Return the Bedrock entry from a service last accessed list."""
|
||||
for service in last_accessed_services:
|
||||
if service.get("ServiceNamespace") == "bedrock":
|
||||
return service
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _evaluate_bedrock_staleness(
|
||||
report: Check_Report_AWS,
|
||||
bedrock_service: dict,
|
||||
max_days: int,
|
||||
identity_name: str,
|
||||
identity_type: str,
|
||||
) -> None:
|
||||
"""Populate a check report based on Bedrock access recency."""
|
||||
last_authenticated = bedrock_service.get("LastAuthenticated")
|
||||
if last_authenticated is None:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"IAM {identity_type} {identity_name} has Bedrock permissions "
|
||||
f"but has never used them."
|
||||
)
|
||||
return
|
||||
|
||||
if isinstance(last_authenticated, str):
|
||||
last_authenticated = parse(last_authenticated)
|
||||
|
||||
days_since_access = (datetime.now(timezone.utc) - last_authenticated).days
|
||||
|
||||
if days_since_access > max_days:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"IAM {identity_type} {identity_name} has not accessed Bedrock "
|
||||
f"in {days_since_access} days (threshold: {max_days} days)."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"IAM {identity_type} {identity_name} accessed Bedrock "
|
||||
f"{days_since_access} days ago (threshold: {max_days} days)."
|
||||
)
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from dateutil.parser import parse
|
||||
|
||||
from prowler.lib.check.models import Check, Check_Report_AWS
|
||||
from prowler.providers.aws.services.iam.iam_client import iam_client
|
||||
from prowler.providers.aws.services.iam.lib.policy import (
|
||||
evaluate_bedrock_staleness,
|
||||
find_bedrock_service,
|
||||
)
|
||||
|
||||
|
||||
class iam_user_access_not_stale_to_bedrock(Check):
|
||||
@@ -33,32 +34,70 @@ class iam_user_access_not_stale_to_bedrock(Check):
|
||||
"max_unused_bedrock_access_days", 60
|
||||
)
|
||||
|
||||
for (
|
||||
user_data,
|
||||
last_accessed_services,
|
||||
) in iam_client.last_accessed_services.items():
|
||||
user_name = user_data[0]
|
||||
user_arn = user_data[1]
|
||||
|
||||
bedrock_service = find_bedrock_service(last_accessed_services)
|
||||
for user in iam_client.users:
|
||||
last_accessed_services = iam_client.last_accessed_services.get(
|
||||
(user.name, user.arn), []
|
||||
)
|
||||
bedrock_service = self._find_bedrock_service(last_accessed_services)
|
||||
if bedrock_service is None:
|
||||
continue
|
||||
|
||||
report = Check_Report_AWS(
|
||||
metadata=self.metadata(),
|
||||
resource={"name": user_name, "arn": user_arn},
|
||||
)
|
||||
report.resource_id = user_name
|
||||
report.resource_arn = user_arn
|
||||
report = Check_Report_AWS(metadata=self.metadata(), resource=user)
|
||||
report.region = iam_client.region
|
||||
for iam_user in iam_client.users:
|
||||
if iam_user.arn == user_arn:
|
||||
report.resource_tags = iam_user.tags
|
||||
break
|
||||
|
||||
evaluate_bedrock_staleness(
|
||||
report, bedrock_service, max_unused_bedrock_days, user_name, "User"
|
||||
self._evaluate_bedrock_staleness(
|
||||
report,
|
||||
bedrock_service,
|
||||
max_unused_bedrock_days,
|
||||
user.name,
|
||||
"User",
|
||||
)
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@staticmethod
|
||||
def _find_bedrock_service(
|
||||
last_accessed_services: list[dict],
|
||||
) -> Optional[dict]:
|
||||
"""Return the Bedrock entry from a service last accessed list."""
|
||||
for service in last_accessed_services:
|
||||
if service.get("ServiceNamespace") == "bedrock":
|
||||
return service
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _evaluate_bedrock_staleness(
|
||||
report: Check_Report_AWS,
|
||||
bedrock_service: dict,
|
||||
max_days: int,
|
||||
identity_name: str,
|
||||
identity_type: str,
|
||||
) -> None:
|
||||
"""Populate a check report based on Bedrock access recency."""
|
||||
last_authenticated = bedrock_service.get("LastAuthenticated")
|
||||
if last_authenticated is None:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"IAM {identity_type} {identity_name} has Bedrock permissions "
|
||||
f"but has never used them."
|
||||
)
|
||||
return
|
||||
|
||||
if isinstance(last_authenticated, str):
|
||||
last_authenticated = parse(last_authenticated)
|
||||
|
||||
days_since_access = (datetime.now(timezone.utc) - last_authenticated).days
|
||||
|
||||
if days_since_access > max_days:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"IAM {identity_type} {identity_name} has not accessed Bedrock "
|
||||
f"in {days_since_access} days (threshold: {max_days} days)."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"IAM {identity_type} {identity_name} accessed Bedrock "
|
||||
f"{days_since_access} days ago (threshold: {max_days} days)."
|
||||
)
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "iam_user_access_not_stale_to_sagemaker",
|
||||
"CheckTitle": "Regular SageMaker access ensures IAM users retain only actively used permissions",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks/AWS Security Best Practices"
|
||||
],
|
||||
"ServiceName": "iam",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsIamUser",
|
||||
"ResourceGroup": "IAM",
|
||||
"Description": "IAM users granted **SageMaker** permissions are evaluated for recent service usage.\n\nUsers whose last SageMaker access exceeds the configured threshold (default **90 days**) or that have **never** accessed SageMaker are flagged, indicating stale permissions that should be reviewed.",
|
||||
"Risk": "Stale SageMaker permissions widen the **blast radius** of a credential compromise.\n\nAn attacker who gains access to a user with unused SageMaker permissions can access ML training data, models, endpoints, and notebooks — all without triggering expected usage patterns. Removing or scoping down stale permissions enforces least privilege and limits blast radius.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_access-advisor.html",
|
||||
"https://docs.aws.amazon.com/sagemaker/latest/dg/security-iam.html",
|
||||
"https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#remove-credentials"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Open the IAM console and select the user\n2. Review the **Access Advisor** tab to confirm SageMaker has not been accessed recently\n3. Remove or detach any policies granting SageMaker permissions that are no longer needed\n4. If the user still requires SageMaker access, verify usage and reduce scope to least privilege",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Apply the **principle of least privilege** by regularly reviewing IAM Access Advisor data and revoking SageMaker permissions that are no longer actively used.\n\nEstablish a periodic access review process and automate alerts for stale permissions to maintain a minimal attack surface.",
|
||||
"Url": "https://hub.prowler.com/check/iam_user_access_not_stale_to_sagemaker"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"identity-access"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"iam_user_access_not_stale_to_bedrock"
|
||||
],
|
||||
"Notes": "The staleness threshold is configurable via the `max_unused_sagemaker_access_days` audit config key (default: 90 days)."
|
||||
}
|
||||
@@ -0,0 +1,103 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from dateutil.parser import parse
|
||||
|
||||
from prowler.lib.check.models import Check, Check_Report_AWS
|
||||
from prowler.providers.aws.services.iam.iam_client import iam_client
|
||||
|
||||
|
||||
class iam_user_access_not_stale_to_sagemaker(Check):
|
||||
"""Detect IAM users with stale SageMaker permissions.
|
||||
|
||||
This check evaluates whether IAM users with SageMaker service permissions
|
||||
have actively used those permissions within the configured threshold
|
||||
(default 90 days).
|
||||
|
||||
- PASS: The user has accessed SageMaker within the allowed period.
|
||||
- FAIL: The user has SageMaker permissions but has not used them within
|
||||
the allowed period or has never used them.
|
||||
"""
|
||||
|
||||
def execute(self) -> list[Check_Report_AWS]:
|
||||
"""Execute the SageMaker access staleness check for IAM users.
|
||||
|
||||
Iterates over IAM users, inspecting service last accessed data for
|
||||
the ``sagemaker`` namespace. Users whose last SageMaker access exceeds
|
||||
the configured threshold are reported as non-compliant.
|
||||
|
||||
Returns:
|
||||
A list of reports containing the result of the check.
|
||||
"""
|
||||
findings = []
|
||||
max_unused_sagemaker_days = iam_client.audit_config.get(
|
||||
"max_unused_sagemaker_access_days", 90
|
||||
)
|
||||
|
||||
for user in iam_client.users:
|
||||
last_accessed_services = iam_client.last_accessed_services.get(
|
||||
(user.name, user.arn), []
|
||||
)
|
||||
sagemaker_service = self._find_sagemaker_service(last_accessed_services)
|
||||
if sagemaker_service is None:
|
||||
continue
|
||||
|
||||
report = Check_Report_AWS(metadata=self.metadata(), resource=user)
|
||||
report.region = iam_client.region
|
||||
|
||||
self._evaluate_sagemaker_staleness(
|
||||
report,
|
||||
sagemaker_service,
|
||||
max_unused_sagemaker_days,
|
||||
user.name,
|
||||
"User",
|
||||
)
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@staticmethod
|
||||
def _find_sagemaker_service(
|
||||
last_accessed_services: list[dict],
|
||||
) -> Optional[dict]:
|
||||
"""Return the SageMaker entry from a service last accessed list."""
|
||||
for service in last_accessed_services:
|
||||
if service.get("ServiceNamespace") == "sagemaker":
|
||||
return service
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _evaluate_sagemaker_staleness(
|
||||
report: Check_Report_AWS,
|
||||
sagemaker_service: dict,
|
||||
max_days: int,
|
||||
identity_name: str,
|
||||
identity_type: str,
|
||||
) -> None:
|
||||
"""Populate a check report based on SageMaker access recency."""
|
||||
last_authenticated = sagemaker_service.get("LastAuthenticated")
|
||||
if last_authenticated is None:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"IAM {identity_type} {identity_name} has SageMaker permissions "
|
||||
f"but has never used them."
|
||||
)
|
||||
return
|
||||
|
||||
if isinstance(last_authenticated, str):
|
||||
last_authenticated = parse(last_authenticated)
|
||||
|
||||
days_since_access = (datetime.now(timezone.utc) - last_authenticated).days
|
||||
|
||||
if days_since_access > max_days:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"IAM {identity_type} {identity_name} has not accessed SageMaker "
|
||||
f"in {days_since_access} days (threshold: {max_days} days)."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"IAM {identity_type} {identity_name} accessed SageMaker "
|
||||
f"{days_since_access} days ago (threshold: {max_days} days)."
|
||||
)
|
||||
@@ -1,12 +1,9 @@
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
from ipaddress import ip_address, ip_network
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from dateutil.parser import parse
|
||||
from py_iam_expand.actions import InvalidActionHandling, expand_actions
|
||||
|
||||
from prowler.lib.check.models import Check_Report_AWS
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.aws.aws_provider import read_aws_regions_file
|
||||
|
||||
@@ -1121,47 +1118,3 @@ def has_codebuild_trusted_principal(trust_policy: dict) -> bool:
|
||||
)
|
||||
for s in statements
|
||||
)
|
||||
|
||||
|
||||
def find_bedrock_service(last_accessed_services: list[dict]) -> Optional[dict]:
|
||||
"""Return the Bedrock entry from a service last accessed list."""
|
||||
for service in last_accessed_services:
|
||||
if service.get("ServiceNamespace") == "bedrock":
|
||||
return service
|
||||
return None
|
||||
|
||||
|
||||
def evaluate_bedrock_staleness(
|
||||
report: Check_Report_AWS,
|
||||
bedrock_service: dict,
|
||||
max_days: int,
|
||||
identity_name: str,
|
||||
identity_type: str,
|
||||
) -> None:
|
||||
"""Populate a check report based on Bedrock access recency."""
|
||||
last_authenticated = bedrock_service.get("LastAuthenticated")
|
||||
if last_authenticated is None:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"IAM {identity_type} {identity_name} has Bedrock permissions "
|
||||
f"but has never used them."
|
||||
)
|
||||
return
|
||||
|
||||
if isinstance(last_authenticated, str):
|
||||
last_authenticated = parse(last_authenticated)
|
||||
|
||||
days_since_access = (datetime.now(timezone.utc) - last_authenticated).days
|
||||
|
||||
if days_since_access > max_days:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"IAM {identity_type} {identity_name} has not accessed Bedrock "
|
||||
f"in {days_since_access} days (threshold: {max_days} days)."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"IAM {identity_type} {identity_name} accessed Bedrock "
|
||||
f"{days_since_access} days ago (threshold: {max_days} days)."
|
||||
)
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"Provider": "googleworkspace",
|
||||
"CheckID": "gmail_anomalous_attachment_protection_enabled",
|
||||
"CheckTitle": "Protection against anomalous attachment types in emails is enabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "gmail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "collaboration",
|
||||
"Description": "Verifies that Gmail is configured to take a protective action (such as moving to spam, quarantining, or showing a warning) when emails contain anomalous attachment types. Unusual file types that are uncommon for the sender or organization may indicate an attempt to deliver malware through less-scrutinized formats.",
|
||||
"Risk": "Without protection against anomalous attachment types, users may receive **emails with unusual file formats** that are designed to bypass standard security filters. Attackers may use **uncommon file extensions or MIME types** to deliver malware that evades signature-based detection.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://support.google.com/a/answer/7676854",
|
||||
"https://cloud.google.com/identity/docs/concepts/supported-policy-api-settings"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Google **Admin console** at https://admin.google.com\n2. Navigate to **Apps** > **Google Workspace** > **Gmail**\n3. Click **Safety** > **Attachments**\n4. Check **Protect against anomalous attachment types in emails**\n5. Select the desired action (e.g., Move email to spam)\n6. Click **Save**",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable protection against anomalous attachment types in emails and configure an appropriate action such as moving to spam or quarantining.",
|
||||
"Url": "https://hub.prowler.com/check/gmail_anomalous_attachment_protection_enabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"email-security"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"gmail_encrypted_attachment_protection_enabled",
|
||||
"gmail_script_attachment_protection_enabled"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportGoogleWorkspace
|
||||
from prowler.providers.googleworkspace.services.gmail.gmail_client import gmail_client
|
||||
|
||||
|
||||
class gmail_anomalous_attachment_protection_enabled(Check):
|
||||
"""Check that protection against anomalous attachment types in emails is enabled.
|
||||
|
||||
This check verifies that Gmail is configured to take action on
|
||||
emails containing unusual attachment types, helping prevent
|
||||
malware delivery via uncommon file formats.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportGoogleWorkspace]:
|
||||
findings = []
|
||||
|
||||
if gmail_client.policies_fetched:
|
||||
report = CheckReportGoogleWorkspace(
|
||||
metadata=self.metadata(),
|
||||
resource=gmail_client.provider.domain_resource,
|
||||
)
|
||||
|
||||
enabled = gmail_client.policies.enable_anomalous_attachment_protection
|
||||
consequence = (
|
||||
gmail_client.policies.anomalous_attachment_protection_consequence
|
||||
)
|
||||
|
||||
if enabled is False:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Protection against anomalous attachment types in emails "
|
||||
f"is disabled in domain "
|
||||
f"{gmail_client.provider.identity.domain}. "
|
||||
f"Enable the protection and configure a protective action."
|
||||
)
|
||||
elif enabled is None:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Protection against anomalous attachment types in emails "
|
||||
f"is not configured and uses Google's insecure default "
|
||||
f"(disabled) in domain "
|
||||
f"{gmail_client.provider.identity.domain}. "
|
||||
f"Enable the protection and configure a protective action."
|
||||
)
|
||||
elif consequence == "NO_ACTION":
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Protection against anomalous attachment types in emails "
|
||||
f"is set to take no action in domain "
|
||||
f"{gmail_client.provider.identity.domain}. "
|
||||
f"A protective action should be configured."
|
||||
)
|
||||
elif consequence is None:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Protection against anomalous attachment types in emails "
|
||||
f"is enabled in domain "
|
||||
f"{gmail_client.provider.identity.domain}."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Protection against anomalous attachment types in emails "
|
||||
f"is enabled with consequence '{consequence}' "
|
||||
f"in domain {gmail_client.provider.identity.domain}."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"Provider": "googleworkspace",
|
||||
"CheckID": "gmail_domain_spoofing_protection_enabled",
|
||||
"CheckTitle": "Protection against domain spoofing based on similar domain names is enabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "gmail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "collaboration",
|
||||
"Description": "Verifies that Gmail is configured to take a protective action (such as moving to spam, quarantining, or showing a warning) when emails appear to come from domain names that look similar to the organization's domain. Lookalike domains are a common phishing technique used to trick users into trusting malicious messages.",
|
||||
"Risk": "Without protection against domain spoofing based on similar domain names, users may receive **phishing emails from lookalike domains** (e.g., examp1e.com instead of example.com) that appear legitimate. This enables **credential theft, malware delivery, and business email compromise** attacks.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://support.google.com/a/answer/9157861",
|
||||
"https://cloud.google.com/identity/docs/concepts/supported-policy-api-settings"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Google **Admin console** at https://admin.google.com\n2. Navigate to **Apps** > **Google Workspace** > **Gmail**\n3. Click **Safety** > **Spoofing and authentication**\n4. Check **Protect against domain spoofing based on similar domain names**\n5. Select the desired action (e.g., Move email to spam)\n6. Click **Save**",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable protection against domain spoofing based on similar domain names and configure an appropriate action such as moving to spam or quarantining.",
|
||||
"Url": "https://hub.prowler.com/check/gmail_domain_spoofing_protection_enabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"email-security"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"gmail_employee_name_spoofing_protection_enabled",
|
||||
"gmail_inbound_domain_spoofing_protection_enabled",
|
||||
"gmail_unauthenticated_email_protection_enabled",
|
||||
"gmail_groups_spoofing_protection_enabled"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,62 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportGoogleWorkspace
|
||||
from prowler.providers.googleworkspace.services.gmail.gmail_client import gmail_client
|
||||
|
||||
|
||||
class gmail_domain_spoofing_protection_enabled(Check):
|
||||
"""Check that protection against domain spoofing based on similar domain names is enabled.
|
||||
|
||||
This check verifies that Gmail is configured to take action on
|
||||
emails that appear to come from similar-looking domain names,
|
||||
helping prevent phishing via domain impersonation.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportGoogleWorkspace]:
|
||||
findings = []
|
||||
|
||||
if gmail_client.policies_fetched:
|
||||
report = CheckReportGoogleWorkspace(
|
||||
metadata=self.metadata(),
|
||||
resource=gmail_client.provider.domain_resource,
|
||||
)
|
||||
|
||||
enabled = gmail_client.policies.detect_domain_name_spoofing
|
||||
consequence = gmail_client.policies.domain_spoofing_consequence
|
||||
|
||||
if enabled is False:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Protection against domain spoofing based on similar "
|
||||
f"domain names is disabled in domain "
|
||||
f"{gmail_client.provider.identity.domain}. "
|
||||
f"Enable the protection and configure a protective action."
|
||||
)
|
||||
elif consequence == "NO_ACTION":
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Protection against domain spoofing based on similar "
|
||||
f"domain names is set to take no action in domain "
|
||||
f"{gmail_client.provider.identity.domain}. "
|
||||
f"A protective action should be configured."
|
||||
)
|
||||
elif consequence is None:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Protection against domain spoofing based on similar "
|
||||
f"domain names uses Google's secure default configuration "
|
||||
f"(enabled) in domain "
|
||||
f"{gmail_client.provider.identity.domain}."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Protection against domain spoofing based on similar "
|
||||
f"domain names is enabled with consequence "
|
||||
f"'{consequence}' in domain "
|
||||
f"{gmail_client.provider.identity.domain}."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,42 @@
|
||||
{
|
||||
"Provider": "googleworkspace",
|
||||
"CheckID": "gmail_employee_name_spoofing_protection_enabled",
|
||||
"CheckTitle": "Protection against spoofing of employee names is enabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "gmail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "collaboration",
|
||||
"Description": "Verifies that Gmail is configured to take a protective action (such as moving to spam, quarantining, or showing a warning) when the sender's display name matches an employee's name but the email comes from an external address. This is a common social engineering technique where attackers impersonate colleagues or executives.",
|
||||
"Risk": "Without protection against employee name spoofing, users may receive **emails that appear to come from colleagues or executives** but are actually from external attackers. This enables **business email compromise (BEC)**, **wire fraud**, and **social engineering attacks** that exploit trust relationships.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://support.google.com/a/answer/9157861",
|
||||
"https://cloud.google.com/identity/docs/concepts/supported-policy-api-settings"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Google **Admin console** at https://admin.google.com\n2. Navigate to **Apps** > **Google Workspace** > **Gmail**\n3. Click **Safety** > **Spoofing and authentication**\n4. Check **Protect against spoofing of employee names**\n5. Select the desired action (e.g., Move email to spam)\n6. Click **Save**",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable protection against spoofing of employee names and configure an appropriate action such as moving to spam or quarantining.",
|
||||
"Url": "https://hub.prowler.com/check/gmail_employee_name_spoofing_protection_enabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"email-security"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"gmail_domain_spoofing_protection_enabled",
|
||||
"gmail_inbound_domain_spoofing_protection_enabled",
|
||||
"gmail_unauthenticated_email_protection_enabled",
|
||||
"gmail_groups_spoofing_protection_enabled"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,60 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportGoogleWorkspace
|
||||
from prowler.providers.googleworkspace.services.gmail.gmail_client import gmail_client
|
||||
|
||||
|
||||
class gmail_employee_name_spoofing_protection_enabled(Check):
|
||||
"""Check that protection against spoofing of employee names is enabled.
|
||||
|
||||
This check verifies that Gmail is configured to take action on
|
||||
emails where the sender name matches an employee name but comes
|
||||
from an external address, helping prevent social engineering attacks.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportGoogleWorkspace]:
|
||||
findings = []
|
||||
|
||||
if gmail_client.policies_fetched:
|
||||
report = CheckReportGoogleWorkspace(
|
||||
metadata=self.metadata(),
|
||||
resource=gmail_client.provider.domain_resource,
|
||||
)
|
||||
|
||||
enabled = gmail_client.policies.detect_employee_name_spoofing
|
||||
consequence = gmail_client.policies.employee_name_spoofing_consequence
|
||||
|
||||
if enabled is False:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Protection against spoofing of employee names is "
|
||||
f"disabled in domain "
|
||||
f"{gmail_client.provider.identity.domain}. "
|
||||
f"Enable the protection and configure a protective action."
|
||||
)
|
||||
elif consequence == "NO_ACTION":
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Protection against spoofing of employee names is set "
|
||||
f"to take no action in domain "
|
||||
f"{gmail_client.provider.identity.domain}. "
|
||||
f"A protective action should be configured."
|
||||
)
|
||||
elif consequence is None:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Protection against spoofing of employee names uses "
|
||||
f"Google's secure default configuration (enabled) "
|
||||
f"in domain {gmail_client.provider.identity.domain}."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Protection against spoofing of employee names is "
|
||||
f"enabled with consequence '{consequence}' in domain "
|
||||
f"{gmail_client.provider.identity.domain}."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"Provider": "googleworkspace",
|
||||
"CheckID": "gmail_encrypted_attachment_protection_enabled",
|
||||
"CheckTitle": "Protection against encrypted attachments from untrusted senders is enabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "gmail",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "collaboration",
|
||||
"Description": "Verifies that Gmail is configured to take a protective action (such as moving to spam, quarantining, or showing a warning) when an encrypted attachment is received from an untrusted sender. Encrypted attachments cannot be scanned for malware by security filters, making them a common vector for delivering malicious payloads.",
|
||||
"Risk": "Without protection against encrypted attachments from untrusted senders, users may receive **password-protected archives containing malware** that bypass standard content scanning. Attackers commonly use encrypted attachments to evade detection and deliver **ransomware, trojans, or other malicious payloads**.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://support.google.com/a/answer/7676854",
|
||||
"https://cloud.google.com/identity/docs/concepts/supported-policy-api-settings"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Google **Admin console** at https://admin.google.com\n2. Navigate to **Apps** > **Google Workspace** > **Gmail**\n3. Click **Safety** > **Attachments**\n4. Check **Protect against encrypted attachments from untrusted senders**\n5. Select the desired action (e.g., Move email to spam)\n6. Click **Save**",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable protection against encrypted attachments from untrusted senders and configure an appropriate action such as moving to spam or quarantining.",
|
||||
"Url": "https://hub.prowler.com/check/gmail_encrypted_attachment_protection_enabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"email-security"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"gmail_script_attachment_protection_enabled",
|
||||
"gmail_anomalous_attachment_protection_enabled"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportGoogleWorkspace
|
||||
from prowler.providers.googleworkspace.services.gmail.gmail_client import gmail_client
|
||||
|
||||
|
||||
class gmail_encrypted_attachment_protection_enabled(Check):
|
||||
"""Check that protection against encrypted attachments from untrusted senders is enabled.
|
||||
|
||||
This check verifies that Gmail is configured to take action on
|
||||
encrypted attachments from untrusted senders, helping prevent
|
||||
malware delivery via password-protected archives.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportGoogleWorkspace]:
|
||||
findings = []
|
||||
|
||||
if gmail_client.policies_fetched:
|
||||
report = CheckReportGoogleWorkspace(
|
||||
metadata=self.metadata(),
|
||||
resource=gmail_client.provider.domain_resource,
|
||||
)
|
||||
|
||||
enabled = gmail_client.policies.enable_encrypted_attachment_protection
|
||||
consequence = (
|
||||
gmail_client.policies.encrypted_attachment_protection_consequence
|
||||
)
|
||||
|
||||
if enabled is False:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Protection against encrypted attachments from untrusted "
|
||||
f"senders is disabled in domain "
|
||||
f"{gmail_client.provider.identity.domain}. "
|
||||
f"Enable the protection and configure a protective action."
|
||||
)
|
||||
elif consequence == "NO_ACTION":
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Protection against encrypted attachments from untrusted "
|
||||
f"senders is set to take no action in domain "
|
||||
f"{gmail_client.provider.identity.domain}. "
|
||||
f"A protective action should be configured."
|
||||
)
|
||||
elif consequence is None:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Protection against encrypted attachments from untrusted "
|
||||
f"senders uses Google's secure default configuration "
|
||||
f"(enabled) in domain "
|
||||
f"{gmail_client.provider.identity.domain}."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Protection against encrypted attachments from untrusted "
|
||||
f"senders is enabled with consequence '{consequence}' "
|
||||
f"in domain {gmail_client.provider.identity.domain}."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||