From 578186aa40e43d654a1c42680bde1eea1ae35d10 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20Mart=C3=ADn?= Date: Thu, 30 Apr 2026 13:49:00 +0200 Subject: [PATCH 01/29] feat(sdk): integrate universal compliance into CLI pipeline (#10301) --- prowler/CHANGELOG.md | 2 + prowler/__main__.py | 54 +- prowler/config/config.py | 8 +- prowler/lib/check/check.py | 37 +- prowler/lib/check/checks_loader.py | 20 +- prowler/lib/outputs/compliance/compliance.py | 191 +++-- .../outputs/compliance/compliance_check.py | 48 ++ .../compliance/universal/ocsf_compliance.py | 58 +- .../compliance/universal/universal_output.py | 294 +++++++ prowler/lib/outputs/finding.py | 2 +- tests/config/config_test.py | 27 + tests/lib/check/check_loader_test.py | 174 +++++ .../lib/outputs/compliance/compliance_test.py | 120 +++ .../display_compliance_table_test.py | 244 ++++++ .../compliance/process_universal_test.py | 730 ++++++++++++++++++ .../universal/ocsf_compliance_test.py | 158 ++++ .../universal/universal_output_test.py | 568 ++++++++++++++ 17 files changed, 2634 insertions(+), 101 deletions(-) create mode 100644 prowler/lib/outputs/compliance/compliance_check.py create mode 100644 prowler/lib/outputs/compliance/universal/universal_output.py create mode 100644 tests/lib/outputs/compliance/display_compliance_table_test.py create mode 100644 tests/lib/outputs/compliance/process_universal_test.py create mode 100644 tests/lib/outputs/compliance/universal/universal_output_test.py diff --git a/prowler/CHANGELOG.md b/prowler/CHANGELOG.md index a66c4fef68..283b9508a9 100644 --- a/prowler/CHANGELOG.md +++ b/prowler/CHANGELOG.md @@ -7,12 +7,14 @@ All notable changes to the **Prowler SDK** are documented in this file. ### 🚀 Added - `bedrock_guardrails_configured` check for AWS provider [(#10844)](https://github.com/prowler-cloud/prowler/pull/10844) +- Universal compliance pipeline integrated into the CLI: `--list-compliance` and `--list-compliance-requirements` show universal frameworks, and CSV plus OCSF outputs are generated for any framework declaring a `TableConfig` [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301) ### 🔄 Changed - `route53_dangling_ip_subdomain_takeover` now also flags `CNAME` records pointing to S3 website endpoints whose buckets are missing from the account [(#10920)](https://github.com/prowler-cloud/prowler/pull/10920) - Azure Network Watcher flow log checks now require workspace-backed Traffic Analytics for `network_flow_log_captured_sent` and align metadata with VNet-compatible flow log guidance [(#10645)](https://github.com/prowler-cloud/prowler/pull/10645) - Azure compliance entries for legacy Network Watcher flow log controls now use retirement-aware guidance and point new deployments to VNet flow logs +- `display_compliance_table` dispatch switched from substring `in` checks to `startswith` to prevent false matches between similarly named frameworks (e.g. `cisa` vs `cis`) [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301) ### 🐞 Fixed diff --git a/prowler/__main__.py b/prowler/__main__.py index 119c10dd0e..da2b339b27 100644 --- a/prowler/__main__.py +++ b/prowler/__main__.py @@ -45,7 +45,10 @@ from prowler.lib.check.check import ( ) from prowler.lib.check.checks_loader import load_checks_to_execute from prowler.lib.check.compliance import update_checks_metadata_with_compliance -from prowler.lib.check.compliance_models import Compliance +from prowler.lib.check.compliance_models import ( + Compliance, + get_bulk_compliance_frameworks_universal, +) from prowler.lib.check.custom_checks_metadata import ( parse_custom_checks_metadata_file, update_checks_metadata, @@ -75,7 +78,10 @@ from prowler.lib.outputs.compliance.cis.cis_oraclecloud import OracleCloudCIS from prowler.lib.outputs.compliance.cisa_scuba.cisa_scuba_googleworkspace import ( GoogleWorkspaceCISASCuBA, ) -from prowler.lib.outputs.compliance.compliance import display_compliance_table +from prowler.lib.outputs.compliance.compliance import ( + display_compliance_table, + process_universal_compliance_frameworks, +) from prowler.lib.outputs.compliance.csa.csa_alibabacloud import AlibabaCloudCSA from prowler.lib.outputs.compliance.csa.csa_aws import AWSCSA from prowler.lib.outputs.compliance.csa.csa_azure import AzureCSA @@ -235,6 +241,8 @@ def prowler(): # Load compliance frameworks logger.debug("Loading compliance frameworks from .json files") + universal_frameworks = {} + # Skip compliance frameworks for external-tool providers if provider not in EXTERNAL_TOOL_PROVIDERS: bulk_compliance_frameworks = Compliance.get_bulk(provider) @@ -242,6 +250,8 @@ def prowler(): bulk_checks_metadata = update_checks_metadata_with_compliance( bulk_compliance_frameworks, bulk_checks_metadata ) + # Load universal compliance frameworks for new rendering pipeline + universal_frameworks = get_bulk_compliance_frameworks_universal(provider) # Update checks metadata if the --custom-checks-metadata-file is present custom_checks_metadata = None @@ -254,12 +264,12 @@ def prowler(): ) if args.list_compliance: - print_compliance_frameworks(bulk_compliance_frameworks) + all_frameworks = {**bulk_compliance_frameworks, **universal_frameworks} + print_compliance_frameworks(all_frameworks) sys.exit() if args.list_compliance_requirements: - print_compliance_requirements( - bulk_compliance_frameworks, args.list_compliance_requirements - ) + all_frameworks = {**bulk_compliance_frameworks, **universal_frameworks} + print_compliance_requirements(all_frameworks, args.list_compliance_requirements) sys.exit() # Load checks to execute @@ -276,6 +286,7 @@ def prowler(): provider=provider, list_checks=getattr(args, "list_checks", False) or getattr(args, "list_checks_json", False), + universal_frameworks=universal_frameworks, ) # if --list-checks-json, dump a json file and exit @@ -624,15 +635,29 @@ def prowler(): ) # Compliance Frameworks - # Source the framework listing from `bulk_compliance_frameworks.keys()` - # so it is by construction a subset of what the bulk loader can resolve. - # `get_available_compliance_frameworks(provider)` also discovers top-level - # multi-provider universal JSONs (e.g. `prowler/compliance/csa_ccm_4.0.json`) - # which `Compliance.get_bulk(provider)` does not load, and which the legacy - # output handlers below cannot consume — using it as the source produced + # Source the framework listing from the union of `bulk_compliance_frameworks` + # and `universal_frameworks` so universal-only frameworks (e.g. + # `prowler/compliance/csa_ccm_4.0.json`) — which `Compliance.get_bulk(provider)` + # does not load — still reach `process_universal_compliance_frameworks` below. + # The provider-specific block subtracts the names handled by the universal + # processor so the legacy per-provider handlers only see frameworks that the + # bulk loader actually resolved. input_compliance_frameworks = set(output_options.output_modes).intersection( - bulk_compliance_frameworks.keys() + set(bulk_compliance_frameworks.keys()) | set(universal_frameworks.keys()) ) + + # ── Universal compliance frameworks (provider-agnostic) ── + universal_processed = process_universal_compliance_frameworks( + input_compliance_frameworks=input_compliance_frameworks, + universal_frameworks=universal_frameworks, + finding_outputs=finding_outputs, + output_directory=output_options.output_directory, + output_filename=output_options.output_filename, + provider=provider, + generated_outputs=generated_outputs, + ) + input_compliance_frameworks -= universal_processed + if provider == "aws": for compliance_name in input_compliance_frameworks: if compliance_name.startswith("cis_"): @@ -1402,6 +1427,9 @@ def prowler(): output_options.output_filename, output_options.output_directory, compliance_overview, + universal_frameworks=universal_frameworks, + provider=provider, + output_formats=args.output_formats, ) if compliance_overview: print( diff --git a/prowler/config/config.py b/prowler/config/config.py index f80f1f3df9..aa81e81136 100644 --- a/prowler/config/config.py +++ b/prowler/config/config.py @@ -87,8 +87,8 @@ def get_available_compliance_frameworks(provider=None): providers = [p.value for p in Provider] if provider: providers = [provider] - for provider in providers: - compliance_dir = f"{actual_directory}/../compliance/{provider}" + for current_provider in providers: + compliance_dir = f"{actual_directory}/../compliance/{current_provider}" if not os.path.isdir(compliance_dir): continue with os.scandir(compliance_dir) as files: @@ -97,7 +97,9 @@ def get_available_compliance_frameworks(provider=None): available_compliance_frameworks.append( file.name.removesuffix(".json") ) - # Also scan top-level compliance/ for multi-provider JSONs + # Also scan top-level compliance/ for multi-provider (universal) JSONs. + # When a specific provider was requested, only include the framework if it + # declares support for that provider; otherwise include all universal frameworks. compliance_root = f"{actual_directory}/../compliance" if os.path.isdir(compliance_root): with os.scandir(compliance_root) as files: diff --git a/prowler/lib/check/check.py b/prowler/lib/check/check.py index 8e365acedc..b15cf8bfbe 100644 --- a/prowler/lib/check/check.py +++ b/prowler/lib/check/check.py @@ -299,12 +299,22 @@ def print_compliance_frameworks( def print_compliance_requirements( bulk_compliance_frameworks: dict, compliance_frameworks: list ): + from prowler.lib.check.compliance_models import ComplianceFramework + for compliance_framework in compliance_frameworks: for key in bulk_compliance_frameworks.keys(): - framework = bulk_compliance_frameworks[key].Framework - provider = bulk_compliance_frameworks[key].Provider - version = bulk_compliance_frameworks[key].Version - requirements = bulk_compliance_frameworks[key].Requirements + entry = bulk_compliance_frameworks[key] + is_universal = isinstance(entry, ComplianceFramework) + if is_universal: + framework = entry.framework + provider = entry.provider or "Multi-provider" + version = entry.version + requirements = entry.requirements + else: + framework = entry.Framework + provider = entry.Provider or "Multi-provider" + version = entry.Version + requirements = entry.Requirements # We can list the compliance requirements for a given framework using the # bulk_compliance_frameworks keys since they are the compliance specification file name if compliance_framework == key: @@ -313,10 +323,23 @@ def print_compliance_requirements( ) for requirement in requirements: checks = "" - for check in requirement.Checks: - checks += f" {Fore.YELLOW}\t\t{check}\n{Style.RESET_ALL}" + if is_universal: + req_checks = requirement.checks + req_id = requirement.id + req_description = requirement.description + else: + req_checks = requirement.Checks + req_id = requirement.Id + req_description = requirement.Description + if isinstance(req_checks, dict): + for prov, check_list in req_checks.items(): + for check in check_list: + checks += f" {Fore.YELLOW}\t\t[{prov}] {check}\n{Style.RESET_ALL}" + else: + for check in req_checks: + checks += f" {Fore.YELLOW}\t\t{check}\n{Style.RESET_ALL}" print( - f"Requirement Id: {Fore.MAGENTA}{requirement.Id}{Style.RESET_ALL}\n\t- Description: {requirement.Description}\n\t- Checks:\n{checks}" + f"Requirement Id: {Fore.MAGENTA}{req_id}{Style.RESET_ALL}\n\t- Description: {req_description}\n\t- Checks:\n{checks}" ) diff --git a/prowler/lib/check/checks_loader.py b/prowler/lib/check/checks_loader.py index 5737c0b232..9ef672df6b 100644 --- a/prowler/lib/check/checks_loader.py +++ b/prowler/lib/check/checks_loader.py @@ -22,6 +22,7 @@ def load_checks_to_execute( categories: set = None, resource_groups: set = None, list_checks: bool = False, + universal_frameworks: dict = None, ) -> set: """Generate the list of checks to execute based on the cloud provider and the input arguments given""" try: @@ -155,12 +156,21 @@ def load_checks_to_execute( if not bulk_compliance_frameworks: bulk_compliance_frameworks = Compliance.get_bulk(provider=provider) for compliance_framework in compliance_frameworks: - checks_to_execute.update( - CheckMetadata.list( - bulk_compliance_frameworks=bulk_compliance_frameworks, - compliance_framework=compliance_framework, + # Try universal frameworks first (snake_case dict-keyed checks) + if ( + universal_frameworks + and compliance_framework in universal_frameworks + ): + fw = universal_frameworks[compliance_framework] + for req in fw.requirements: + checks_to_execute.update(req.checks.get(provider.lower(), [])) + elif compliance_framework in bulk_compliance_frameworks: + checks_to_execute.update( + CheckMetadata.list( + bulk_compliance_frameworks=bulk_compliance_frameworks, + compliance_framework=compliance_framework, + ) ) - ) # Handle if there are categories passed using --categories elif categories: diff --git a/prowler/lib/outputs/compliance/compliance.py b/prowler/lib/outputs/compliance/compliance.py index db657063fa..0d743a4d25 100644 --- a/prowler/lib/outputs/compliance/compliance.py +++ b/prowler/lib/outputs/compliance/compliance.py @@ -1,10 +1,12 @@ import sys -from prowler.lib.check.models import Check_Report from prowler.lib.logger import logger from prowler.lib.outputs.compliance.c5.c5 import get_c5_table from prowler.lib.outputs.compliance.ccc.ccc import get_ccc_table from prowler.lib.outputs.compliance.cis.cis import get_cis_table +from prowler.lib.outputs.compliance.compliance_check import ( # noqa: F401 - re-export for backward compatibility + get_check_compliance, +) from prowler.lib.outputs.compliance.csa.csa import get_csa_table from prowler.lib.outputs.compliance.ens.ens import get_ens_table from prowler.lib.outputs.compliance.generic.generic_table import ( @@ -17,6 +19,94 @@ from prowler.lib.outputs.compliance.mitre_attack.mitre_attack import ( from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore import ( get_prowler_threatscore_table, ) +from prowler.lib.outputs.compliance.universal.universal_table import get_universal_table + + +def process_universal_compliance_frameworks( + input_compliance_frameworks: set, + universal_frameworks: dict, + finding_outputs: list, + output_directory: str, + output_filename: str, + provider: str, + generated_outputs: dict, +) -> set: + """Process universal compliance frameworks, generating CSV and OCSF outputs. + + For each framework in *input_compliance_frameworks* that exists in + *universal_frameworks* and has an outputs.table_config, this function + creates both a CSV (UniversalComplianceOutput) and an OCSF JSON + (OCSFComplianceOutput) file. OCSF is always generated regardless of + the user's ``--output-formats`` flag. + + The function is idempotent: it tracks already-created writers via + ``generated_outputs["compliance"]`` keyed by ``file_path``. If invoked + again for the same framework (e.g. once per streaming batch), it + reuses the existing writer instead of recreating it. This guarantees + one output writer per framework for the whole execution and keeps + the OCSF JSON array valid across multiple calls. + + Returns the set of framework names that were processed so the caller + can remove them before entering the legacy per-provider output loop. + """ + from prowler.lib.outputs.compliance.universal.ocsf_compliance import ( + OCSFComplianceOutput, + ) + from prowler.lib.outputs.compliance.universal.universal_output import ( + UniversalComplianceOutput, + ) + + existing_writers = { + getattr(out, "file_path", None): out + for out in generated_outputs.get("compliance", []) + if isinstance(out, (UniversalComplianceOutput, OCSFComplianceOutput)) + } + + processed = set() + for compliance_name in input_compliance_frameworks: + if not ( + compliance_name in universal_frameworks + and universal_frameworks[compliance_name].outputs + and universal_frameworks[compliance_name].outputs.table_config + ): + continue + + fw = universal_frameworks[compliance_name] + + # CSV output + csv_path = ( + f"{output_directory}/compliance/" f"{output_filename}_{compliance_name}.csv" + ) + if csv_path not in existing_writers: + output = UniversalComplianceOutput( + findings=finding_outputs, + framework=fw, + file_path=csv_path, + provider=provider, + ) + generated_outputs["compliance"].append(output) + existing_writers[csv_path] = output + output.batch_write_data_to_file() + + # OCSF output (always generated for universal frameworks) + ocsf_path = ( + f"{output_directory}/compliance/" + f"{output_filename}_{compliance_name}.ocsf.json" + ) + if ocsf_path not in existing_writers: + ocsf_output = OCSFComplianceOutput( + findings=finding_outputs, + framework=fw, + file_path=ocsf_path, + provider=provider, + ) + generated_outputs["compliance"].append(ocsf_output) + existing_writers[ocsf_path] = ocsf_output + ocsf_output.batch_write_data_to_file() + + processed.add(compliance_name) + + return processed def display_compliance_table( @@ -26,6 +116,9 @@ def display_compliance_table( output_filename: str, output_directory: str, compliance_overview: bool, + universal_frameworks: dict = None, + provider: str = None, + output_formats: list = None, ) -> None: """ display_compliance_table generates the compliance table for the given compliance framework. @@ -37,6 +130,9 @@ def display_compliance_table( output_filename (str): The output filename output_directory (str): The output directory compliance_overview (bool): The compliance + universal_frameworks (dict): Optional universal ComplianceFramework objects + provider (str): The current provider (e.g. "aws") for multi-provider filtering + output_formats (list): The output formats to generate Returns: None @@ -45,16 +141,24 @@ def display_compliance_table( findings = [f for f in findings if f.check_metadata.CheckID in bulk_checks_metadata] try: - if "ens_" in compliance_framework: - get_ens_table( - findings, - bulk_checks_metadata, - compliance_framework, - output_filename, - output_directory, - compliance_overview, - ) - elif "cis_" in compliance_framework: + # Universal path: if the framework has TableConfig, use the universal renderer + if universal_frameworks and compliance_framework in universal_frameworks: + fw = universal_frameworks[compliance_framework] + if fw.outputs and fw.outputs.table_config: + get_universal_table( + findings, + bulk_checks_metadata, + compliance_framework, + output_filename, + output_directory, + compliance_overview, + framework=fw, + provider=provider, + output_formats=output_formats, + ) + return + + if compliance_framework.startswith("cis_"): get_cis_table( findings, bulk_checks_metadata, @@ -63,7 +167,16 @@ def display_compliance_table( output_directory, compliance_overview, ) - elif "mitre_attack" in compliance_framework: + elif compliance_framework.startswith("ens_"): + get_ens_table( + findings, + bulk_checks_metadata, + compliance_framework, + output_filename, + output_directory, + compliance_overview, + ) + elif compliance_framework.startswith("mitre_attack"): get_mitre_attack_table( findings, bulk_checks_metadata, @@ -72,7 +185,7 @@ def display_compliance_table( output_directory, compliance_overview, ) - elif "kisa_isms_" in compliance_framework: + elif compliance_framework.startswith("kisa"): get_kisa_ismsp_table( findings, bulk_checks_metadata, @@ -81,7 +194,7 @@ def display_compliance_table( output_directory, compliance_overview, ) - elif "threatscore_" in compliance_framework: + elif compliance_framework.startswith("prowler_threatscore_"): get_prowler_threatscore_table( findings, bulk_checks_metadata, @@ -90,7 +203,7 @@ def display_compliance_table( output_directory, compliance_overview, ) - elif "csa_ccm_" in compliance_framework: + elif compliance_framework.startswith("csa_ccm_"): get_csa_table( findings, bulk_checks_metadata, @@ -99,7 +212,7 @@ def display_compliance_table( output_directory, compliance_overview, ) - elif "c5_" in compliance_framework: + elif compliance_framework.startswith("c5_"): get_c5_table( findings, bulk_checks_metadata, @@ -131,49 +244,3 @@ def display_compliance_table( f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}" ) sys.exit(1) - - -# TODO: this should be in the Check class -def get_check_compliance( - finding: Check_Report, provider_type: str, bulk_checks_metadata: dict -) -> dict: - """get_check_compliance returns a map with the compliance framework as key and the requirements where the finding's check is present. - - Example: - - { - "CIS-1.4": ["2.1.3"], - "CIS-1.5": ["2.1.3"], - } - - Args: - finding (Any): The Check_Report finding - provider_type (str): The provider type - bulk_checks_metadata (dict): The bulk checks metadata - - Returns: - dict: The compliance framework as key and the requirements where the finding's check is present. - """ - try: - check_compliance = {} - # We have to retrieve all the check's compliance requirements - if finding.check_metadata.CheckID in bulk_checks_metadata: - for compliance in bulk_checks_metadata[ - finding.check_metadata.CheckID - ].Compliance: - compliance_fw = compliance.Framework - if compliance.Version: - compliance_fw = f"{compliance_fw}-{compliance.Version}" - # compliance.Provider == "Azure" or "Kubernetes" - # provider_type == "azure" or "kubernetes" - if compliance.Provider.upper() == provider_type.upper(): - if compliance_fw not in check_compliance: - check_compliance[compliance_fw] = [] - for requirement in compliance.Requirements: - check_compliance[compliance_fw].append(requirement.Id) - return check_compliance - except Exception as error: - logger.error( - f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}" - ) - return {} diff --git a/prowler/lib/outputs/compliance/compliance_check.py b/prowler/lib/outputs/compliance/compliance_check.py new file mode 100644 index 0000000000..85de5faed0 --- /dev/null +++ b/prowler/lib/outputs/compliance/compliance_check.py @@ -0,0 +1,48 @@ +from prowler.lib.check.models import Check_Report +from prowler.lib.logger import logger + + +# TODO: this should be in the Check class +def get_check_compliance( + finding: Check_Report, provider_type: str, bulk_checks_metadata: dict +) -> dict: + """get_check_compliance returns a map with the compliance framework as key and the requirements where the finding's check is present. + + Example: + + { + "CIS-1.4": ["2.1.3"], + "CIS-1.5": ["2.1.3"], + } + + Args: + finding (Any): The Check_Report finding + provider_type (str): The provider type + bulk_checks_metadata (dict): The bulk checks metadata + + Returns: + dict: The compliance framework as key and the requirements where the finding's check is present. + """ + try: + check_compliance = {} + # We have to retrieve all the check's compliance requirements + if finding.check_metadata.CheckID in bulk_checks_metadata: + for compliance in bulk_checks_metadata[ + finding.check_metadata.CheckID + ].Compliance: + compliance_fw = compliance.Framework + if compliance.Version: + compliance_fw = f"{compliance_fw}-{compliance.Version}" + # compliance.Provider == "Azure" or "Kubernetes" + # provider_type == "azure" or "kubernetes" + if compliance.Provider.upper() == provider_type.upper(): + if compliance_fw not in check_compliance: + check_compliance[compliance_fw] = [] + for requirement in compliance.Requirements: + check_compliance[compliance_fw].append(requirement.Id) + return check_compliance + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}" + ) + return {} diff --git a/prowler/lib/outputs/compliance/universal/ocsf_compliance.py b/prowler/lib/outputs/compliance/universal/ocsf_compliance.py index 3760549cd5..2ce69412e1 100644 --- a/prowler/lib/outputs/compliance/universal/ocsf_compliance.py +++ b/prowler/lib/outputs/compliance/universal/ocsf_compliance.py @@ -1,6 +1,7 @@ +import json import os from datetime import datetime -from typing import List +from typing import TYPE_CHECKING, List from py_ocsf_models.events.base_event import SeverityID from py_ocsf_models.events.base_event import StatusID as EventStatusID @@ -20,11 +21,12 @@ from py_ocsf_models.objects.resource_details import ResourceDetails from prowler.config.config import prowler_version from prowler.lib.check.compliance_models import ComplianceFramework from prowler.lib.logger import logger -from prowler.lib.outputs.finding import Finding -from prowler.lib.outputs.ocsf.ocsf import OCSF from prowler.lib.outputs.utils import unroll_dict_to_list from prowler.lib.utils.utils import open_file +if TYPE_CHECKING: + from prowler.lib.outputs.finding import Finding + PROWLER_TO_COMPLIANCE_STATUS = { "PASS": ComplianceStatusID.Pass, "FAIL": ComplianceStatusID.Fail, @@ -32,6 +34,40 @@ PROWLER_TO_COMPLIANCE_STATUS = { } +def _sanitize_resource_data(resource_details, resource_metadata) -> dict: + """Ensure resource data is JSON-serializable. + + Service resource_metadata may carry non-serializable objects (e.g. raw + Pydantic models or service classes such as ``Trail`` / ``LifecyclePolicy``). + Convert them to plain dicts and roundtrip through JSON so the resulting + ComplianceFinding can be serialized without errors. + """ + + def _make_serializable(obj): + if hasattr(obj, "model_dump") and callable(obj.model_dump): + return _make_serializable(obj.model_dump()) + if hasattr(obj, "dict") and callable(obj.dict): + return _make_serializable(obj.dict()) + if isinstance(obj, dict): + return {str(k): _make_serializable(v) for k, v in obj.items()} + if isinstance(obj, (list, tuple)): + return [_make_serializable(v) for v in obj] + return obj + + try: + converted = _make_serializable(resource_metadata) + sanitized_metadata = json.loads(json.dumps(converted, default=str)) + except (TypeError, ValueError, RecursionError) as error: + logger.warning( + f"Failed to serialize resource metadata, defaulting to empty: {error}" + ) + sanitized_metadata = {} + return { + "details": resource_details, + "metadata": sanitized_metadata, + } + + def _to_snake_case(name: str) -> str: """Convert a PascalCase or camelCase string to snake_case.""" import re @@ -108,7 +144,7 @@ class OCSFComplianceOutput: def _transform( self, - findings: List[Finding], + findings: List["Finding"], framework: ComplianceFramework, compliance_name: str, ) -> None: @@ -177,7 +213,7 @@ class OCSFComplianceOutput: def _build_compliance_finding( self, - finding: Finding, + finding: "Finding", framework: ComplianceFramework, requirement, compliance_name: str, @@ -195,7 +231,9 @@ class OCSFComplianceOutput: finding.metadata.Severity.capitalize(), SeverityID.Unknown, ) - event_status = OCSF.get_finding_status_id(finding.muted) + event_status = ( + EventStatusID.Suppressed if finding.muted else EventStatusID.New + ) time_value = ( int(finding.timestamp.timestamp()) @@ -268,10 +306,10 @@ class OCSFComplianceOutput: if finding.provider == "kubernetes" else None ), - data={ - "details": finding.resource_details, - "metadata": finding.resource_metadata, - }, + data=_sanitize_resource_data( + finding.resource_details, + finding.resource_metadata, + ), ) ], severity_id=finding_severity.value, diff --git a/prowler/lib/outputs/compliance/universal/universal_output.py b/prowler/lib/outputs/compliance/universal/universal_output.py new file mode 100644 index 0000000000..5f99f05755 --- /dev/null +++ b/prowler/lib/outputs/compliance/universal/universal_output.py @@ -0,0 +1,294 @@ +from csv import DictWriter +from pathlib import Path +from typing import TYPE_CHECKING, Optional + +from pydantic.v1 import create_model + +from prowler.config.config import timestamp +from prowler.lib.check.compliance_models import ComplianceFramework +from prowler.lib.logger import logger +from prowler.lib.utils.utils import open_file + +if TYPE_CHECKING: + from prowler.lib.outputs.finding import Finding + +PROVIDER_HEADER_MAP = { + "aws": ("AccountId", "account_uid", "Region", "region"), + "azure": ("SubscriptionId", "account_uid", "Location", "region"), + "gcp": ("ProjectId", "account_uid", "Location", "region"), + "kubernetes": ("Context", "account_name", "Namespace", "region"), + "m365": ("TenantId", "account_uid", "Location", "region"), + "github": ("Account_Name", "account_name", "Account_Id", "account_uid"), + "oraclecloud": ("TenancyId", "account_uid", "Region", "region"), + "alibabacloud": ("AccountId", "account_uid", "Region", "region"), + "nhn": ("AccountId", "account_uid", "Region", "region"), +} +_DEFAULT_HEADERS = ("AccountId", "account_uid", "Region", "region") + + +class UniversalComplianceOutput: + """Universal compliance CSV output driven by ComplianceFramework metadata. + + Dynamically builds a Pydantic row model from attributes_metadata so that + CSV columns match the framework's declared attribute fields. + """ + + def __init__( + self, + findings: list, + framework: ComplianceFramework, + file_path: str = None, + from_cli: bool = True, + provider: str = None, + ) -> None: + self._data = [] + self._file_descriptor = None + self.file_path = file_path + self._from_cli = from_cli + self._provider = provider + self.close_file = False + + if file_path: + path_obj = Path(file_path) + self._file_extension = path_obj.suffix if path_obj.suffix else "" + + if findings: + self._row_model = self._build_row_model(framework) + compliance_name = ( + framework.framework + "-" + framework.version + if framework.version + else framework.framework + ) + self._transform(findings, framework, compliance_name) + if not self._file_descriptor and file_path: + self._create_file_descriptor(file_path) + + @property + def data(self): + return self._data + + def _build_row_model(self, framework: ComplianceFramework): + """Build a dynamic Pydantic model from attributes_metadata.""" + acct_header, acct_field, loc_header, loc_field = PROVIDER_HEADER_MAP.get( + (self._provider or "").lower(), _DEFAULT_HEADERS + ) + self._acct_header = acct_header + self._acct_field = acct_field + self._loc_header = loc_header + self._loc_field = loc_field + + # Base fields present in every compliance CSV + fields = { + "Provider": (str, ...), + "Description": (str, ...), + acct_header: (str, ...), + loc_header: (str, ...), + "AssessmentDate": (str, ...), + "Requirements_Id": (str, ...), + "Requirements_Description": (str, ...), + } + + # Dynamic attribute columns from metadata + if framework.attributes_metadata: + for attr_meta in framework.attributes_metadata: + if not attr_meta.output_formats.csv: + continue + field_name = f"Requirements_Attributes_{attr_meta.key}" + # Map type strings to Python types + type_map = { + "str": Optional[str], + "int": Optional[int], + "float": Optional[float], + "bool": Optional[bool], + "list_str": Optional[str], # Serialized as joined string + "list_dict": Optional[str], # Serialized as string + } + py_type = type_map.get(attr_meta.type, Optional[str]) + fields[field_name] = (py_type, None) + + # Check if any requirement has MITRE fields + has_mitre = any(req.tactics for req in framework.requirements if req.tactics) + if has_mitre: + fields["Requirements_Tactics"] = (Optional[str], None) + fields["Requirements_SubTechniques"] = (Optional[str], None) + fields["Requirements_Platforms"] = (Optional[str], None) + fields["Requirements_TechniqueURL"] = (Optional[str], None) + + # Trailing fields + fields["Status"] = (str, ...) + fields["StatusExtended"] = (str, ...) + fields["ResourceId"] = (str, ...) + fields["ResourceName"] = (str, ...) + fields["CheckId"] = (str, ...) + fields["Muted"] = (bool, ...) + fields["Framework"] = (str, ...) + fields["Name"] = (str, ...) + + return create_model("UniversalComplianceRow", **fields) + + def _serialize_attr_value(self, value): + """Serialize attribute values for CSV.""" + if isinstance(value, list): + if value and isinstance(value[0], dict): + return str(value) + return " | ".join(str(v) for v in value) + return value + + def _build_row(self, finding, framework, requirement, is_manual=False): + """Build a single row dict for a finding + requirement combination.""" + row = { + "Provider": ( + finding.provider + if not is_manual + else (framework.provider or self._provider or "").lower() + ), + "Description": framework.description, + self._acct_header: ( + getattr(finding, self._acct_field, "") if not is_manual else "" + ), + self._loc_header: ( + getattr(finding, self._loc_field, "") if not is_manual else "" + ), + "AssessmentDate": str(timestamp), + "Requirements_Id": requirement.id, + "Requirements_Description": requirement.description, + } + + # Add dynamic attribute columns + if framework.attributes_metadata: + for attr_meta in framework.attributes_metadata: + if not attr_meta.output_formats.csv: + continue + field_name = f"Requirements_Attributes_{attr_meta.key}" + raw_val = requirement.attributes.get(attr_meta.key) + row[field_name] = ( + self._serialize_attr_value(raw_val) if raw_val is not None else None + ) + + # MITRE fields + if requirement.tactics: + row["Requirements_Tactics"] = ( + " | ".join(requirement.tactics) if requirement.tactics else None + ) + row["Requirements_SubTechniques"] = ( + " | ".join(requirement.sub_techniques) + if requirement.sub_techniques + else None + ) + row["Requirements_Platforms"] = ( + " | ".join(requirement.platforms) if requirement.platforms else None + ) + row["Requirements_TechniqueURL"] = requirement.technique_url + + row["Status"] = finding.status if not is_manual else "MANUAL" + row["StatusExtended"] = ( + finding.status_extended if not is_manual else "Manual check" + ) + row["ResourceId"] = finding.resource_uid if not is_manual else "manual_check" + row["ResourceName"] = finding.resource_name if not is_manual else "Manual check" + row["CheckId"] = finding.check_id if not is_manual else "manual" + row["Muted"] = finding.muted if not is_manual else False + row["Framework"] = framework.framework + row["Name"] = framework.name + + return row + + def _transform( + self, + findings: list["Finding"], + framework: ComplianceFramework, + compliance_name: str, + ) -> None: + """Transform findings into universal compliance CSV rows.""" + # Build check -> requirements map (filtered by provider for dict checks) + check_req_map = {} + for req in framework.requirements: + checks = req.checks + if self._provider: + all_checks = checks.get(self._provider.lower(), []) + else: + all_checks = [] + for check_list in checks.values(): + all_checks.extend(check_list) + for check_id in all_checks: + if check_id not in check_req_map: + check_req_map[check_id] = [] + check_req_map[check_id].append(req) + + # Process findings using the provider-filtered check_req_map. + # This ensures that for multi-provider dict checks, only the checks + # belonging to the current provider produce output rows. + for finding in findings: + check_id = finding.check_id + if check_id in check_req_map: + for req in check_req_map[check_id]: + row = self._build_row(finding, framework, req) + try: + self._data.append(self._row_model(**row)) + except Exception as e: + logger.debug(f"Skipping row for {req.id}: {e}") + + # Manual requirements (no checks or empty dict) + for req in framework.requirements: + checks = req.checks + if self._provider: + has_checks = bool(checks.get(self._provider.lower(), [])) + else: + has_checks = any(checks.values()) + + if not has_checks: + # Use a dummy finding-like namespace for manual rows + row = self._build_row( + _ManualFindingStub(), framework, req, is_manual=True + ) + try: + self._data.append(self._row_model(**row)) + except Exception as e: + logger.debug(f"Skipping manual row for {req.id}: {e}") + + def _create_file_descriptor(self, file_path: str) -> None: + try: + self._file_descriptor = open_file(file_path, "a") + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + + def batch_write_data_to_file(self) -> None: + """Write findings data to CSV.""" + try: + if ( + getattr(self, "_file_descriptor", None) + and not self._file_descriptor.closed + and self._data + ): + csv_writer = DictWriter( + self._file_descriptor, + fieldnames=[field.upper() for field in self._data[0].dict().keys()], + delimiter=";", + ) + if self._file_descriptor.tell() == 0: + csv_writer.writeheader() + for row in self._data: + csv_writer.writerow({k.upper(): v for k, v in row.dict().items()}) + if self.close_file or self._from_cli: + self._file_descriptor.close() + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + + +class _ManualFindingStub: + """Minimal stub to satisfy _build_row for manual requirements.""" + + provider = "" + account_uid = "" + account_name = "" + region = "" + status = "MANUAL" + status_extended = "Manual check" + resource_uid = "manual_check" + resource_name = "Manual check" + check_id = "manual" + muted = False diff --git a/prowler/lib/outputs/finding.py b/prowler/lib/outputs/finding.py index 3da2f0d7d3..95a634c85f 100644 --- a/prowler/lib/outputs/finding.py +++ b/prowler/lib/outputs/finding.py @@ -15,7 +15,7 @@ from prowler.lib.check.models import ( ) from prowler.lib.logger import logger from prowler.lib.outputs.common import Status, fill_common_finding_data -from prowler.lib.outputs.compliance.compliance import get_check_compliance +from prowler.lib.outputs.compliance.compliance_check import get_check_compliance from prowler.lib.outputs.utils import unroll_tags from prowler.lib.utils.utils import dict_to_lowercase, get_nested_attribute from prowler.providers.common.provider import Provider diff --git a/tests/config/config_test.py b/tests/config/config_test.py index 8bf7755267..f7349d2004 100644 --- a/tests/config/config_test.py +++ b/tests/config/config_test.py @@ -436,6 +436,33 @@ class Test_Config: assert "csa_ccm_4.0" in aws_frameworks assert "csa_ccm_4.0" not in kubernetes_frameworks + def test_get_available_compliance_frameworks_no_provider_includes_universals(self): + """Regression test for the variable shadowing bug. + + Previously, the inner ``for provider in providers`` loop shadowed + the outer ``provider`` parameter. When called without a provider, + the post-loop ``if provider:`` branch wrongly applied + ``framework.supports_provider()`` and + excluded universal frameworks from the result. + + Result: the parser-level ``available_compliance_frameworks`` + constant was missing universal frameworks like ``csa_ccm_4.0``, + which made ``--compliance csa_ccm_4.0`` reject the choice. + """ + all_frameworks = get_available_compliance_frameworks() + assert "csa_ccm_4.0" in all_frameworks + + def test_get_available_compliance_frameworks_does_not_mutate_provider_param(self): + """Calling with a specific provider must not affect a subsequent + call without provider. Validates that the loop variable rename + prevents leaking state between calls.""" + # Force an iteration over multiple providers first + get_available_compliance_frameworks("kubernetes") + # Then a no-provider call must still include universals supported + # by ANY provider (not filtered by some leaked value) + all_frameworks = get_available_compliance_frameworks() + assert "csa_ccm_4.0" in all_frameworks + def test_load_and_validate_config_file_aws(self): path = pathlib.Path(os.path.dirname(os.path.realpath(__file__))) config_test_file = f"{path}/fixtures/config.yaml" diff --git a/tests/lib/check/check_loader_test.py b/tests/lib/check/check_loader_test.py index 5650ee17ab..24cc93bac0 100644 --- a/tests/lib/check/check_loader_test.py +++ b/tests/lib/check/check_loader_test.py @@ -675,3 +675,177 @@ class TestCheckLoader: ) assert CLOUDTRAIL_THREAT_DETECTION_ENUMERATION_NAME not in result assert S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME in result + + def test_load_checks_to_execute_universal_framework_takes_precedence(self): + """When ``--compliance `` matches a universal framework, the + loader must source checks from ``universal_frameworks[fw].requirements[*] + .checks[provider]`` and NOT fall through to ``bulk_compliance_frameworks``. + + This is the path added by PR #10301 in checks_loader.py. + """ + from prowler.lib.check.compliance_models import ( + ComplianceFramework, + UniversalComplianceRequirement, + ) + + bulk_checks_metadata = { + S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_s3_metadata() + } + + universal_framework = ComplianceFramework( + framework="csa_ccm", + name="CSA CCM 4.0", + version="4.0", + description="Cloud Controls Matrix", + requirements=[ + UniversalComplianceRequirement( + id="A&A-01", + description="Audit & Assurance", + attributes={}, + checks={"aws": [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME]}, + ), + ], + ) + + with patch( + "prowler.lib.check.checks_loader.CheckMetadata.get_bulk", + return_value=bulk_checks_metadata, + ): + result = load_checks_to_execute( + bulk_checks_metadata=bulk_checks_metadata, + bulk_compliance_frameworks={}, # legacy empty + compliance_frameworks=["csa_ccm_4.0"], + provider=self.provider, + universal_frameworks={"csa_ccm_4.0": universal_framework}, + ) + + assert result == {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} + + def test_load_checks_to_execute_universal_filters_by_provider(self): + """A universal requirement may declare checks for several + providers; the loader must only return those for the active + provider key (lowercased).""" + from prowler.lib.check.compliance_models import ( + ComplianceFramework, + UniversalComplianceRequirement, + ) + + bulk_checks_metadata = { + S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_s3_metadata() + } + + # The same requirement maps a different check per provider. + # Only the AWS one must be returned for provider="aws". + universal_framework = ComplianceFramework( + framework="csa_ccm", + name="CSA CCM 4.0", + version="4.0", + description="Cloud Controls Matrix", + requirements=[ + UniversalComplianceRequirement( + id="A&A-02", + description="Multi-provider req", + attributes={}, + checks={ + "aws": [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME], + "azure": ["azure_only_check"], + "gcp": ["gcp_only_check"], + }, + ), + ], + ) + + with patch( + "prowler.lib.check.checks_loader.CheckMetadata.get_bulk", + return_value=bulk_checks_metadata, + ): + result = load_checks_to_execute( + bulk_checks_metadata=bulk_checks_metadata, + bulk_compliance_frameworks={}, + compliance_frameworks=["csa_ccm_4.0"], + provider=self.provider, # "aws" + universal_frameworks={"csa_ccm_4.0": universal_framework}, + ) + + assert S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME in result + assert "azure_only_check" not in result + assert "gcp_only_check" not in result + + def test_load_checks_to_execute_universal_no_match_falls_back_to_legacy(self): + """If the requested compliance framework is not present in + ``universal_frameworks``, the loader must fall back to the + legacy ``bulk_compliance_frameworks`` lookup.""" + bulk_checks_metadata = { + S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_s3_metadata() + } + bulk_compliance_frameworks = { + "soc2_aws": Compliance( + Framework="SOC2", + Name="SOC2", + Provider="aws", + Version="2.0", + Description="x", + Requirements=[ + Compliance_Requirement( + Checks=[S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME], + Id="", + Description="", + Attributes=[], + ) + ], + ), + } + + with patch( + "prowler.lib.check.checks_loader.CheckMetadata.get_bulk", + return_value=bulk_checks_metadata, + ): + result = load_checks_to_execute( + bulk_checks_metadata=bulk_checks_metadata, + bulk_compliance_frameworks=bulk_compliance_frameworks, + compliance_frameworks=["soc2_aws"], + provider=self.provider, + universal_frameworks={"some_other_universal_fw": object()}, + ) + + assert result == {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} + + def test_load_checks_to_execute_universal_unknown_provider_returns_empty(self): + """If the universal requirement has no checks for the active + provider, no checks are picked up for that requirement.""" + from prowler.lib.check.compliance_models import ( + ComplianceFramework, + UniversalComplianceRequirement, + ) + + bulk_checks_metadata = { + S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_s3_metadata() + } + universal_framework = ComplianceFramework( + framework="csa_ccm", + name="CSA CCM 4.0", + version="4.0", + description="Cloud Controls Matrix", + requirements=[ + UniversalComplianceRequirement( + id="A&A-03", + description="Only Azure", + attributes={}, + checks={"azure": ["azure_only_check"]}, + ), + ], + ) + + with patch( + "prowler.lib.check.checks_loader.CheckMetadata.get_bulk", + return_value=bulk_checks_metadata, + ): + result = load_checks_to_execute( + bulk_checks_metadata=bulk_checks_metadata, + bulk_compliance_frameworks={}, + compliance_frameworks=["csa_ccm_4.0"], + provider=self.provider, # "aws" — no checks declared + universal_frameworks={"csa_ccm_4.0": universal_framework}, + ) + + assert result == set() diff --git a/tests/lib/outputs/compliance/compliance_test.py b/tests/lib/outputs/compliance/compliance_test.py index bb6a7e4089..f38c783d42 100644 --- a/tests/lib/outputs/compliance/compliance_test.py +++ b/tests/lib/outputs/compliance/compliance_test.py @@ -442,3 +442,123 @@ class TestComplianceOutput: ) assert compliance_output.file_extension == ".csv" + + +class TestComplianceCheckHelperModule: + """Tests for the new ``compliance_check`` leaf module that hosts + ``get_check_compliance``. + + This module exists to break the cyclic import chain + ``finding -> compliance.compliance -> universal.* -> finding`` that + CodeQL flagged. It must be: + - importable directly without pulling in the universal pipeline + - re-exported by ``compliance.compliance`` for backward compatibility + - the SAME function object, regardless of import path + """ + + def test_module_is_importable_directly(self): + """The helper module must be importable on its own — it is the + leaf used by ``finding.py`` to break the cyclic import chain.""" + from prowler.lib.outputs.compliance import compliance_check + + assert hasattr(compliance_check, "get_check_compliance") + assert callable(compliance_check.get_check_compliance) + + def test_helper_module_only_depends_on_check_models_and_logger(self): + """The helper must not pull in universal pipeline modules; that + was the whole point of extracting it. Inspecting the module's + own imports keeps it honest without polluting ``sys.modules``.""" + import inspect + + from prowler.lib.outputs.compliance import compliance_check + + source = inspect.getsource(compliance_check) + # Only these two prowler imports are allowed in the leaf module + assert "from prowler.lib.check.models import Check_Report" in source + assert "from prowler.lib.logger import logger" in source + # And NOT these (would re-introduce the cycle): + assert "from prowler.lib.outputs.compliance.universal" not in source + assert "from prowler.lib.outputs.finding" not in source + assert "from prowler.lib.outputs.ocsf" not in source + + def test_re_export_from_compliance_compliance(self): + """``compliance.compliance.get_check_compliance`` must point to + the same function as ``compliance.compliance_check.get_check_compliance``.""" + from prowler.lib.outputs.compliance.compliance import ( + get_check_compliance as via_compliance, + ) + from prowler.lib.outputs.compliance.compliance_check import ( + get_check_compliance as via_helper, + ) + + assert via_compliance is via_helper + + def test_re_export_from_finding_module(self): + """``finding.get_check_compliance`` must point to the same + function. Test mocks rely on this attribute existing on the + ``prowler.lib.outputs.finding`` module.""" + from prowler.lib.outputs.compliance.compliance_check import ( + get_check_compliance as via_helper, + ) + from prowler.lib.outputs.finding import get_check_compliance as via_finding + + assert via_finding is via_helper + + def test_returns_empty_dict_on_unknown_check(self): + """Sanity test of the function logic via the helper module.""" + from prowler.lib.outputs.compliance.compliance_check import ( + get_check_compliance, + ) + + finding = mock.MagicMock() + finding.check_metadata.CheckID = "unknown_check_id" + result = get_check_compliance(finding, "aws", {}) + assert result == {} + + def test_filters_by_provider(self): + """The function returns frameworks only for the matching provider.""" + from prowler.lib.outputs.compliance.compliance_check import ( + get_check_compliance, + ) + + compliance_aws = mock.MagicMock( + Framework="CIS", + Version="1.4", + Provider="AWS", + Requirements=[mock.MagicMock(Id="2.1.3")], + ) + compliance_azure = mock.MagicMock( + Framework="CIS", + Version="2.0", + Provider="Azure", + Requirements=[mock.MagicMock(Id="9.1")], + ) + finding = mock.MagicMock() + finding.check_metadata.CheckID = "shared_check" + bulk = { + "shared_check": mock.MagicMock( + Compliance=[compliance_aws, compliance_azure] + ) + } + + # Only AWS frameworks come back + result = get_check_compliance(finding, "aws", bulk) + assert "CIS-1.4" in result + assert "CIS-2.0" not in result + + def test_returns_empty_dict_on_exception(self): + """If iteration raises, the function logs the error and returns + an empty dict (defensive behaviour).""" + from prowler.lib.outputs.compliance.compliance_check import ( + get_check_compliance, + ) + + # bulk_checks_metadata that raises when accessed → defensive path + class Boom: + def __contains__(self, _key): + raise RuntimeError("boom") + + finding = mock.MagicMock() + finding.check_metadata.CheckID = "any" + result = get_check_compliance(finding, "aws", Boom()) + assert result == {} diff --git a/tests/lib/outputs/compliance/display_compliance_table_test.py b/tests/lib/outputs/compliance/display_compliance_table_test.py new file mode 100644 index 0000000000..0d2cd5313b --- /dev/null +++ b/tests/lib/outputs/compliance/display_compliance_table_test.py @@ -0,0 +1,244 @@ +"""Tests for display_compliance_table dispatch logic. + +Validates that each compliance framework name is routed to the correct +table renderer via startswith matching, and that the universal early-return +takes precedence when applicable. +""" + +from unittest.mock import patch + +import pytest + +from prowler.lib.check.compliance_models import ( + ComplianceFramework, + OutputsConfig, + TableConfig, + UniversalComplianceRequirement, +) +from prowler.lib.outputs.compliance.compliance import display_compliance_table + +MODULE = "prowler.lib.outputs.compliance.compliance" + +# Common args shared by every call — the actual values don't matter +# because we mock the downstream renderers. +_COMMON = dict( + findings=[], + bulk_checks_metadata={}, + output_filename="out", + output_directory="/tmp", + compliance_overview=False, +) + + +# ── Dispatch to legacy table renderers ─────────────────────────────── + + +class TestDispatchStartswith: + """Each framework prefix must route to exactly one renderer.""" + + @pytest.mark.parametrize( + "framework_name", + [ + "cis_1.4_aws", + "cis_2.0_azure", + "cis_3.0_gcp", + "cis_6.0_m365", + "cis_1.10_kubernetes", + ], + ) + @patch(f"{MODULE}.get_cis_table") + def test_cis_dispatch(self, mock_fn, framework_name): + display_compliance_table(compliance_framework=framework_name, **_COMMON) + mock_fn.assert_called_once() + + @pytest.mark.parametrize( + "framework_name", + ["ens_rd2022_aws", "ens_rd2022_azure", "ens_rd2022_gcp"], + ) + @patch(f"{MODULE}.get_ens_table") + def test_ens_dispatch(self, mock_fn, framework_name): + display_compliance_table(compliance_framework=framework_name, **_COMMON) + mock_fn.assert_called_once() + + @pytest.mark.parametrize( + "framework_name", + ["mitre_attack_aws", "mitre_attack_azure", "mitre_attack_gcp"], + ) + @patch(f"{MODULE}.get_mitre_attack_table") + def test_mitre_dispatch(self, mock_fn, framework_name): + display_compliance_table(compliance_framework=framework_name, **_COMMON) + mock_fn.assert_called_once() + + @pytest.mark.parametrize( + "framework_name", + ["kisa_isms_p_2023_aws", "kisa_isms_p_2023_korean_aws"], + ) + @patch(f"{MODULE}.get_kisa_ismsp_table") + def test_kisa_dispatch(self, mock_fn, framework_name): + display_compliance_table(compliance_framework=framework_name, **_COMMON) + mock_fn.assert_called_once() + + @pytest.mark.parametrize( + "framework_name", + [ + "prowler_threatscore_aws", + "prowler_threatscore_azure", + "prowler_threatscore_gcp", + "prowler_threatscore_kubernetes", + "prowler_threatscore_m365", + "prowler_threatscore_alibabacloud", + ], + ) + @patch(f"{MODULE}.get_prowler_threatscore_table") + def test_threatscore_dispatch(self, mock_fn, framework_name): + display_compliance_table(compliance_framework=framework_name, **_COMMON) + mock_fn.assert_called_once() + + @pytest.mark.parametrize( + "framework_name", + [ + "csa_ccm_4.0_aws", + "csa_ccm_4.0_azure", + "csa_ccm_4.0_gcp", + "csa_ccm_4.0_oraclecloud", + "csa_ccm_4.0_alibabacloud", + ], + ) + @patch(f"{MODULE}.get_csa_table") + def test_csa_dispatch(self, mock_fn, framework_name): + display_compliance_table(compliance_framework=framework_name, **_COMMON) + mock_fn.assert_called_once() + + @pytest.mark.parametrize( + "framework_name", + ["c5_aws", "c5_azure", "c5_gcp"], + ) + @patch(f"{MODULE}.get_c5_table") + def test_c5_dispatch(self, mock_fn, framework_name): + display_compliance_table(compliance_framework=framework_name, **_COMMON) + mock_fn.assert_called_once() + + @pytest.mark.parametrize( + "framework_name", + [ + "soc2_aws", + "hipaa_aws", + "gdpr_aws", + "nist_800_53_revision_4_aws", + "pci_3.2.1_aws", + "iso27001_2013_aws", + "aws_well_architected_framework_security_pillar_aws", + "fedramp_low_revision_4_aws", + "cisa_aws", + ], + ) + @patch(f"{MODULE}.get_generic_compliance_table") + def test_generic_dispatch(self, mock_fn, framework_name): + display_compliance_table(compliance_framework=framework_name, **_COMMON) + mock_fn.assert_called_once() + + +# ── No false matches (the old `in` bug) ───────────────────────────── + + +class TestNoFalseSubstringMatches: + """Frameworks that previously could false-match with `in` must NOT + be routed to the wrong renderer now that we use startswith.""" + + @patch(f"{MODULE}.get_ens_table") + @patch(f"{MODULE}.get_generic_compliance_table") + def test_cisa_does_not_match_cis(self, mock_generic, mock_cis): + """'cisa_aws' must NOT match startswith('cis_').""" + display_compliance_table(compliance_framework="cisa_aws", **_COMMON) + mock_generic.assert_called_once() + mock_cis.assert_not_called() + + @patch(f"{MODULE}.get_prowler_threatscore_table") + @patch(f"{MODULE}.get_generic_compliance_table") + def test_threatscore_prefix_not_partial(self, mock_generic, mock_ts): + """A hypothetical 'threatscore_custom_aws' must NOT match + startswith('prowler_threatscore_').""" + display_compliance_table( + compliance_framework="threatscore_custom_aws", **_COMMON + ) + mock_generic.assert_called_once() + mock_ts.assert_not_called() + + @patch(f"{MODULE}.get_ens_table") + @patch(f"{MODULE}.get_prowler_threatscore_table") + def test_prowler_threatscore_does_not_match_ens(self, mock_ts, mock_ens): + """'prowler_threatscore_aws' must hit threatscore, never ens.""" + display_compliance_table( + compliance_framework="prowler_threatscore_aws", **_COMMON + ) + mock_ts.assert_called_once() + mock_ens.assert_not_called() + + +# ── Universal early-return ─────────────────────────────────────────── + + +class TestUniversalEarlyReturn: + """The universal path must take precedence over the elif chain.""" + + @staticmethod + def _make_fw(): + return ComplianceFramework( + framework="CIS", + name="CIS", + provider="AWS", + version="5.0", + description="d", + requirements=[ + UniversalComplianceRequirement( + id="1.1", + description="d", + attributes={}, + checks={"aws": ["check_a"]}, + ), + ], + outputs=OutputsConfig(table_config=TableConfig(group_by="_default")), + ) + + @patch(f"{MODULE}.get_universal_table") + @patch(f"{MODULE}.get_cis_table") + def test_universal_takes_precedence_over_cis(self, mock_cis, mock_universal): + """A CIS framework in universal_frameworks with TableConfig must + use the universal renderer, not get_cis_table.""" + fw = self._make_fw() + display_compliance_table( + compliance_framework="cis_5.0_aws", + universal_frameworks={"cis_5.0_aws": fw}, + **_COMMON, + ) + mock_universal.assert_called_once() + mock_cis.assert_not_called() + + @patch(f"{MODULE}.get_universal_table") + @patch(f"{MODULE}.get_cis_table") + def test_falls_through_without_table_config(self, mock_cis, mock_universal): + """If the universal framework has no TableConfig, fall through + to the legacy elif chain.""" + fw = self._make_fw() + fw.outputs = None + display_compliance_table( + compliance_framework="cis_5.0_aws", + universal_frameworks={"cis_5.0_aws": fw}, + **_COMMON, + ) + mock_cis.assert_called_once() + mock_universal.assert_not_called() + + @patch(f"{MODULE}.get_universal_table") + @patch(f"{MODULE}.get_generic_compliance_table") + def test_falls_through_when_not_in_universal_dict( + self, mock_generic, mock_universal + ): + """If universal_frameworks is empty, fall through to legacy.""" + display_compliance_table( + compliance_framework="soc2_aws", + universal_frameworks={}, + **_COMMON, + ) + mock_generic.assert_called_once() + mock_universal.assert_not_called() diff --git a/tests/lib/outputs/compliance/process_universal_test.py b/tests/lib/outputs/compliance/process_universal_test.py new file mode 100644 index 0000000000..fa8b737ddd --- /dev/null +++ b/tests/lib/outputs/compliance/process_universal_test.py @@ -0,0 +1,730 @@ +"""Tests for process_universal_compliance_frameworks and --list-compliance fixes. + +Validates that the pre-processing step: + - generates both CSV and OCSF outputs for universal frameworks + - always generates OCSF (no output-format gate) + - skips frameworks without outputs or table_config + - skips frameworks not in universal_frameworks + - returns the set of processed names for removal from the legacy loop + - works across different providers + +Also validates that print_compliance_frameworks and print_compliance_requirements +work with universal ComplianceFramework objects (dict checks, None provider). +""" + +import json +import os +from datetime import datetime, timezone +from types import SimpleNamespace + +import pytest + +from prowler.lib.check.check import ( + print_compliance_frameworks, + print_compliance_requirements, +) +from prowler.lib.check.compliance_models import ( + AttributeMetadata, + ComplianceFramework, + OutputsConfig, + TableConfig, + UniversalComplianceRequirement, +) +from prowler.lib.outputs.compliance.compliance import ( + process_universal_compliance_frameworks, +) +from prowler.lib.outputs.compliance.universal.ocsf_compliance import ( + OCSFComplianceOutput, +) +from prowler.lib.outputs.compliance.universal.universal_output import ( + UniversalComplianceOutput, +) + + +@pytest.fixture(autouse=True) +def _create_compliance_dir(tmp_path): + """Ensure the compliance/ subdirectory exists before each test.""" + os.makedirs(tmp_path / "compliance", exist_ok=True) + + +# ── Helpers ────────────────────────────────────────────────────────── + + +def _make_finding(check_id, status="PASS", provider="aws"): + """Create a mock Finding with all fields needed by both output classes.""" + finding = SimpleNamespace() + finding.provider = provider + finding.account_uid = "123456789012" + finding.account_name = "test-account" + finding.account_email = "" + finding.account_organization_uid = "org-123" + finding.account_organization_name = "test-org" + finding.account_tags = {"env": "test"} + finding.region = "us-east-1" + finding.status = status + finding.status_extended = f"{check_id} is {status}" + finding.resource_uid = f"arn:aws:iam::123456789012:{check_id}" + finding.resource_name = check_id + finding.resource_details = "some details" + finding.resource_metadata = {} + finding.resource_tags = {"Name": "test"} + finding.partition = "aws" + finding.muted = False + finding.check_id = check_id + finding.uid = "test-finding-uid" + finding.timestamp = datetime(2025, 1, 15, 12, 0, 0, tzinfo=timezone.utc) + finding.prowler_version = "5.0.0" + finding.compliance = {"TestFW-1.0": ["1.1"]} + finding.metadata = SimpleNamespace( + Provider=provider, + CheckID=check_id, + CheckTitle=f"Title for {check_id}", + CheckType=["test-type"], + Description=f"Description for {check_id}", + Severity="medium", + ServiceName="iam", + ResourceType="aws-iam-role", + Risk="test-risk", + RelatedUrl="https://example.com", + Remediation=SimpleNamespace( + Recommendation=SimpleNamespace(Text="Fix it", Url="https://fix.com"), + ), + DependsOn=[], + RelatedTo=[], + Categories=["test"], + Notes="", + AdditionalURLs=[], + ) + return finding + + +def _make_universal_framework(name="TestFW", version="1.0", with_table_config=True): + """Build a ComplianceFramework with optional table_config.""" + reqs = [ + UniversalComplianceRequirement( + id="1.1", + description="Test requirement", + attributes={"Section": "IAM"}, + checks={"aws": ["check_a"]}, + ), + ] + metadata = [AttributeMetadata(key="Section", type="str")] + outputs = None + if with_table_config: + outputs = OutputsConfig(table_config=TableConfig(group_by="Section")) + return ComplianceFramework( + framework=name, + name=f"{name} Framework", + provider="AWS", + version=version, + description="Test framework", + requirements=reqs, + attributes_metadata=metadata, + outputs=outputs, + ) + + +# ── Tests ──────────────────────────────────────────────────────────── + + +class TestProcessUniversalComplianceFrameworks: + """Core tests for the extracted pre-processing function.""" + + def test_generates_csv_and_ocsf_outputs(self, tmp_path): + """Both CSV and OCSF outputs are appended to generated_outputs.""" + fw = _make_universal_framework() + generated = {"compliance": []} + + processed = process_universal_compliance_frameworks( + input_compliance_frameworks={"test_fw_1.0"}, + universal_frameworks={"test_fw_1.0": fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="prowler_output", + provider="aws", + generated_outputs=generated, + ) + + assert processed == {"test_fw_1.0"} + assert len(generated["compliance"]) == 2 + assert isinstance(generated["compliance"][0], UniversalComplianceOutput) + assert isinstance(generated["compliance"][1], OCSFComplianceOutput) + + def test_ocsf_always_generated_no_format_gate(self, tmp_path): + """OCSF output is generated regardless of output_formats — no gate.""" + fw = _make_universal_framework() + generated = {"compliance": []} + process_universal_compliance_frameworks( + input_compliance_frameworks={"test_fw_1.0"}, + universal_frameworks={"test_fw_1.0": fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="prowler_output", + provider="aws", + generated_outputs=generated, + ) + + ocsf_outputs = [ + o for o in generated["compliance"] if isinstance(o, OCSFComplianceOutput) + ] + assert len(ocsf_outputs) == 1 + + def test_csv_file_written(self, tmp_path): + """CSV file is created with expected content.""" + fw = _make_universal_framework() + generated = {"compliance": []} + process_universal_compliance_frameworks( + input_compliance_frameworks={"test_fw_1.0"}, + universal_frameworks={"test_fw_1.0": fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="prowler_output", + provider="aws", + generated_outputs=generated, + ) + + csv_path = tmp_path / "compliance" / "prowler_output_test_fw_1.0.csv" + assert csv_path.exists() + content = csv_path.read_text() + assert "PROVIDER" in content + assert "REQUIREMENTS_ATTRIBUTES_SECTION" in content + + def test_ocsf_file_written(self, tmp_path): + """OCSF JSON file is created with valid content.""" + fw = _make_universal_framework() + generated = {"compliance": []} + process_universal_compliance_frameworks( + input_compliance_frameworks={"test_fw_1.0"}, + universal_frameworks={"test_fw_1.0": fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="prowler_output", + provider="aws", + generated_outputs=generated, + ) + + ocsf_path = tmp_path / "compliance" / "prowler_output_test_fw_1.0.ocsf.json" + assert ocsf_path.exists() + data = json.loads(ocsf_path.read_text()) + assert isinstance(data, list) + assert len(data) >= 1 + assert data[0]["class_uid"] == 2003 + + def test_returns_processed_names(self, tmp_path): + """Returns the set of framework names that were processed.""" + fw = _make_universal_framework() + generated = {"compliance": []} + + processed = process_universal_compliance_frameworks( + input_compliance_frameworks={"test_fw_1.0", "legacy_fw"}, + universal_frameworks={"test_fw_1.0": fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="out", + provider="aws", + generated_outputs=generated, + ) + + assert processed == {"test_fw_1.0"} + assert "legacy_fw" not in processed + + +class TestSkipConditions: + """Tests for frameworks that should NOT be processed.""" + + def test_skips_framework_not_in_universal(self, tmp_path): + """Frameworks not in universal_frameworks dict are skipped.""" + generated = {"compliance": []} + + processed = process_universal_compliance_frameworks( + input_compliance_frameworks={"cis_aws_1.4"}, + universal_frameworks={}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="out", + provider="aws", + generated_outputs=generated, + ) + + assert processed == set() + assert len(generated["compliance"]) == 0 + + def test_skips_framework_without_outputs(self, tmp_path): + """Frameworks with outputs=None are skipped.""" + fw = _make_universal_framework(with_table_config=False) + # outputs is None since with_table_config=False + assert fw.outputs is None + generated = {"compliance": []} + + processed = process_universal_compliance_frameworks( + input_compliance_frameworks={"test_fw_1.0"}, + universal_frameworks={"test_fw_1.0": fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="out", + provider="aws", + generated_outputs=generated, + ) + + assert processed == set() + assert len(generated["compliance"]) == 0 + + def test_skips_framework_with_outputs_but_no_table_config(self, tmp_path): + """Frameworks with outputs but table_config=None are skipped.""" + fw = _make_universal_framework() + # Manually set table_config to None while keeping outputs + fw.outputs = OutputsConfig(table_config=None) + generated = {"compliance": []} + + processed = process_universal_compliance_frameworks( + input_compliance_frameworks={"test_fw_1.0"}, + universal_frameworks={"test_fw_1.0": fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="out", + provider="aws", + generated_outputs=generated, + ) + + assert processed == set() + assert len(generated["compliance"]) == 0 + + def test_empty_input_frameworks(self, tmp_path): + """No processing when input set is empty.""" + fw = _make_universal_framework() + generated = {"compliance": []} + + processed = process_universal_compliance_frameworks( + input_compliance_frameworks=set(), + universal_frameworks={"test_fw_1.0": fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="out", + provider="aws", + generated_outputs=generated, + ) + + assert processed == set() + assert len(generated["compliance"]) == 0 + + +class TestMixedFrameworks: + """Tests with a mix of universal and legacy frameworks.""" + + def test_only_universal_processed_legacy_untouched(self, tmp_path): + """Only universal frameworks are processed; legacy names are not returned.""" + universal_fw = _make_universal_framework() + generated = {"compliance": []} + + all_frameworks = {"test_fw_1.0", "cis_aws_1.4", "nist_800_53_aws"} + processed = process_universal_compliance_frameworks( + input_compliance_frameworks=all_frameworks, + universal_frameworks={"test_fw_1.0": universal_fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="out", + provider="aws", + generated_outputs=generated, + ) + + assert processed == {"test_fw_1.0"} + # 2 outputs for the one universal framework (CSV + OCSF) + assert len(generated["compliance"]) == 2 + + def test_removal_from_input_set(self, tmp_path): + """Caller can subtract processed set from input to get legacy-only frameworks.""" + universal_fw = _make_universal_framework() + generated = {"compliance": []} + + input_frameworks = {"test_fw_1.0", "cis_aws_1.4", "nist_800_53_aws"} + processed = process_universal_compliance_frameworks( + input_compliance_frameworks=input_frameworks, + universal_frameworks={"test_fw_1.0": universal_fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="out", + provider="aws", + generated_outputs=generated, + ) + + remaining = input_frameworks - processed + assert remaining == {"cis_aws_1.4", "nist_800_53_aws"} + + def test_multiple_universal_frameworks(self, tmp_path): + """Multiple universal frameworks each get CSV + OCSF.""" + fw1 = _make_universal_framework(name="FW1", version="1.0") + fw2 = _make_universal_framework(name="FW2", version="2.0") + generated = {"compliance": []} + + processed = process_universal_compliance_frameworks( + input_compliance_frameworks={"fw1_1.0", "fw2_2.0", "legacy"}, + universal_frameworks={"fw1_1.0": fw1, "fw2_2.0": fw2}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="out", + provider="aws", + generated_outputs=generated, + ) + + assert processed == {"fw1_1.0", "fw2_2.0"} + # 2 frameworks × 2 outputs each = 4 + assert len(generated["compliance"]) == 4 + csv_outputs = [ + o + for o in generated["compliance"] + if isinstance(o, UniversalComplianceOutput) + ] + ocsf_outputs = [ + o for o in generated["compliance"] if isinstance(o, OCSFComplianceOutput) + ] + assert len(csv_outputs) == 2 + assert len(ocsf_outputs) == 2 + + +class TestProviderVariants: + """Verify the function works for different providers.""" + + @pytest.mark.parametrize( + "provider", + [ + "aws", + "azure", + "gcp", + "kubernetes", + "m365", + "github", + "oraclecloud", + "alibabacloud", + "nhn", + ], + ) + def test_all_providers_produce_outputs(self, tmp_path, provider): + """Each provider generates CSV + OCSF when given a universal framework.""" + fw = _make_universal_framework() + generated = {"compliance": []} + + processed = process_universal_compliance_frameworks( + input_compliance_frameworks={"test_fw_1.0"}, + universal_frameworks={"test_fw_1.0": fw}, + finding_outputs=[_make_finding("check_a", provider=provider)], + output_directory=str(tmp_path), + output_filename="out", + provider=provider, + generated_outputs=generated, + ) + + assert processed == {"test_fw_1.0"} + assert len(generated["compliance"]) == 2 + assert isinstance(generated["compliance"][0], UniversalComplianceOutput) + assert isinstance(generated["compliance"][1], OCSFComplianceOutput) + + +class TestEmptyFindings: + """Test behavior when there are no findings.""" + + def test_still_processed_with_empty_findings(self, tmp_path): + """Framework is still marked as processed even with no findings.""" + fw = _make_universal_framework() + generated = {"compliance": []} + + processed = process_universal_compliance_frameworks( + input_compliance_frameworks={"test_fw_1.0"}, + universal_frameworks={"test_fw_1.0": fw}, + finding_outputs=[], + output_directory=str(tmp_path), + output_filename="out", + provider="aws", + generated_outputs=generated, + ) + + assert processed == {"test_fw_1.0"} + # Outputs are still appended (they'll just have empty data) + assert len(generated["compliance"]) == 2 + + +class TestFilePaths: + """Verify correct file path construction.""" + + def test_csv_path_format(self, tmp_path): + """CSV output has the correct file path.""" + fw = _make_universal_framework() + generated = {"compliance": []} + + process_universal_compliance_frameworks( + input_compliance_frameworks={"csa_ccm_4.0"}, + universal_frameworks={"csa_ccm_4.0": fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="prowler_report", + provider="aws", + generated_outputs=generated, + ) + + csv_output = generated["compliance"][0] + assert csv_output.file_path == ( + f"{tmp_path}/compliance/prowler_report_csa_ccm_4.0.csv" + ) + + def test_ocsf_path_format(self, tmp_path): + """OCSF output has the correct file path.""" + fw = _make_universal_framework() + generated = {"compliance": []} + + process_universal_compliance_frameworks( + input_compliance_frameworks={"csa_ccm_4.0"}, + universal_frameworks={"csa_ccm_4.0": fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="prowler_report", + provider="aws", + generated_outputs=generated, + ) + + ocsf_output = generated["compliance"][1] + assert ocsf_output.file_path == ( + f"{tmp_path}/compliance/prowler_report_csa_ccm_4.0.ocsf.json" + ) + + +# ── Tests for --list-compliance fix ────────────────────────────────── + + +def _make_legacy_compliance(): + """Create a mock legacy Compliance-like object with the expected attributes.""" + return SimpleNamespace( + Framework="CIS", + Provider="AWS", + Version="1.4", + Requirements=[ + SimpleNamespace( + Id="2.1.3", + Description="Ensure MFA Delete is enabled", + Checks=["s3_bucket_mfa_delete"], + ), + ], + ) + + +class TestPrintComplianceFrameworks: + """Tests for print_compliance_frameworks with universal frameworks.""" + + def test_includes_universal_frameworks(self, capsys): + """Universal frameworks appear in the listing.""" + legacy = {"cis_1.4_aws": _make_legacy_compliance()} + universal = {"csa_ccm_4.0": _make_universal_framework()} + merged = {**legacy, **universal} + + print_compliance_frameworks(merged) + captured = capsys.readouterr().out + + assert "cis_1.4_aws" in captured + assert "csa_ccm_4.0" in captured + + def test_count_includes_both(self, capsys): + """Framework count includes both legacy and universal.""" + legacy = {"cis_1.4_aws": _make_legacy_compliance()} + universal = {"csa_ccm_4.0": _make_universal_framework()} + merged = {**legacy, **universal} + + print_compliance_frameworks(merged) + captured = capsys.readouterr().out + + assert "2" in captured + + def test_universal_only(self, capsys): + """Works when only universal frameworks are present.""" + universal = {"csa_ccm_4.0": _make_universal_framework()} + + print_compliance_frameworks(universal) + captured = capsys.readouterr().out + + assert "csa_ccm_4.0" in captured + assert "1" in captured + + +class TestPrintComplianceRequirements: + """Tests for print_compliance_requirements with universal frameworks.""" + + def test_list_checks_universal_framework(self, capsys): + """Requirements with dict checks are printed correctly.""" + fw = _make_universal_framework() + all_fw = {"test_fw_1.0": fw} + + print_compliance_requirements(all_fw, ["test_fw_1.0"]) + captured = capsys.readouterr().out + + assert "1.1" in captured + assert "check_a" in captured + + def test_dict_checks_universal_framework(self, capsys): + """Requirements with dict checks show provider-prefixed checks.""" + reqs = [ + UniversalComplianceRequirement( + id="A&A-01", + description="Audit & Assurance", + attributes={"Section": "A&A"}, + checks={"aws": ["check_a", "check_b"], "azure": ["check_c"]}, + ), + ] + fw = ComplianceFramework( + framework="CSA_CCM", + name="CSA CCM 4.0", + version="4.0", + description="Cloud Controls Matrix", + requirements=reqs, + ) + all_fw = {"csa_ccm_4.0": fw} + + print_compliance_requirements(all_fw, ["csa_ccm_4.0"]) + captured = capsys.readouterr().out + + assert "A&A-01" in captured + assert "[aws] check_a" in captured + assert "[aws] check_b" in captured + assert "[azure] check_c" in captured + + def test_none_provider_shows_multi_provider(self, capsys): + """Frameworks with provider=None show 'Multi-provider'.""" + fw = ComplianceFramework( + framework="CSA_CCM", + name="CSA CCM 4.0", + version="4.0", + description="Cloud Controls Matrix", + requirements=[ + UniversalComplianceRequirement( + id="1.1", + description="test", + attributes={}, + checks={"aws": ["check_a"]}, + ), + ], + ) + all_fw = {"csa_ccm_4.0": fw} + + print_compliance_requirements(all_fw, ["csa_ccm_4.0"]) + captured = capsys.readouterr().out + + assert "Multi-provider" in captured + + +# ── Idempotency tests ──────────────────────────────────────────────── + + +class TestIdempotency: + """The function must be safe to invoke multiple times for the same + framework. Repeated calls must reuse writers tracked in + ``generated_outputs["compliance"]`` instead of recreating them. + + This guards against: + - duplicate writer entries in generated_outputs (regular pipeline + treats one writer per framework) + - the OCSF append-bug where a second writer would emit + ``[...]...]`` and break the JSON array. + """ + + def test_second_call_does_not_duplicate_writers(self, tmp_path): + fw = _make_universal_framework() + generated = {"compliance": []} + kwargs = dict( + input_compliance_frameworks={"test_fw_1.0"}, + universal_frameworks={"test_fw_1.0": fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="prowler_output", + provider="aws", + generated_outputs=generated, + ) + + first = process_universal_compliance_frameworks(**kwargs) + first_count = len(generated["compliance"]) + second = process_universal_compliance_frameworks(**kwargs) + second_count = len(generated["compliance"]) + + assert first == {"test_fw_1.0"} + assert second == {"test_fw_1.0"} # still reported as processed + assert first_count == 2 # CSV + OCSF + assert second_count == 2 # NO duplication + + def test_second_call_keeps_ocsf_json_valid(self, tmp_path): + """End-to-end: after two calls the OCSF JSON file must still be + a single, valid JSON array — not the broken ``[...]...]`` form.""" + fw = _make_universal_framework() + generated = {"compliance": []} + kwargs = dict( + input_compliance_frameworks={"test_fw_1.0"}, + universal_frameworks={"test_fw_1.0": fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="prowler_output", + provider="aws", + generated_outputs=generated, + ) + + process_universal_compliance_frameworks(**kwargs) + process_universal_compliance_frameworks(**kwargs) + + ocsf_path = tmp_path / "compliance" / "prowler_output_test_fw_1.0.ocsf.json" + data = json.loads(ocsf_path.read_text()) # Will raise on invalid JSON + assert isinstance(data, list) + assert len(data) >= 1 + + def test_reuses_existing_writer_object(self, tmp_path): + """The CSV/OCSF writer instances appended on first call must be + the SAME objects after a second call — not fresh ones.""" + fw = _make_universal_framework() + generated = {"compliance": []} + kwargs = dict( + input_compliance_frameworks={"test_fw_1.0"}, + universal_frameworks={"test_fw_1.0": fw}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="prowler_output", + provider="aws", + generated_outputs=generated, + ) + + process_universal_compliance_frameworks(**kwargs) + first_writers = list(generated["compliance"]) + process_universal_compliance_frameworks(**kwargs) + second_writers = list(generated["compliance"]) + + # Same identity, same length — reused, not recreated. + assert len(first_writers) == len(second_writers) + for a, b in zip(first_writers, second_writers): + assert a is b + + def test_idempotency_across_mixed_frameworks(self, tmp_path): + """When the second call adds a new framework, the new one is + created while existing ones are NOT recreated.""" + fw1 = _make_universal_framework(name="FW1", version="1.0") + fw2 = _make_universal_framework(name="FW2", version="2.0") + generated = {"compliance": []} + + # First call: only FW1 + process_universal_compliance_frameworks( + input_compliance_frameworks={"fw1_1.0"}, + universal_frameworks={"fw1_1.0": fw1, "fw2_2.0": fw2}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="out", + provider="aws", + generated_outputs=generated, + ) + first_writers = list(generated["compliance"]) + assert len(first_writers) == 2 + + # Second call: includes both. FW1 must be reused, FW2 created fresh. + process_universal_compliance_frameworks( + input_compliance_frameworks={"fw1_1.0", "fw2_2.0"}, + universal_frameworks={"fw1_1.0": fw1, "fw2_2.0": fw2}, + finding_outputs=[_make_finding("check_a")], + output_directory=str(tmp_path), + output_filename="out", + provider="aws", + generated_outputs=generated, + ) + second_writers = list(generated["compliance"]) + assert len(second_writers) == 4 # 2 (FW1 reused) + 2 new (FW2) + # FW1 writer instances unchanged + assert second_writers[0] is first_writers[0] + assert second_writers[1] is first_writers[1] diff --git a/tests/lib/outputs/compliance/universal/ocsf_compliance_test.py b/tests/lib/outputs/compliance/universal/ocsf_compliance_test.py index 39bce5cb20..71429fc1ca 100644 --- a/tests/lib/outputs/compliance/universal/ocsf_compliance_test.py +++ b/tests/lib/outputs/compliance/universal/ocsf_compliance_test.py @@ -2,6 +2,7 @@ import json from datetime import datetime, timezone from types import SimpleNamespace +from py_ocsf_models.events.base_event import StatusID as EventStatusID from py_ocsf_models.events.findings.compliance_finding import ComplianceFinding from py_ocsf_models.events.findings.compliance_finding_type_id import ( ComplianceFindingTypeID, @@ -18,6 +19,7 @@ from prowler.lib.check.compliance_models import ( ) from prowler.lib.outputs.compliance.universal.ocsf_compliance import ( OCSFComplianceOutput, + _sanitize_resource_data, ) @@ -473,3 +475,159 @@ class TestOCSFComplianceOutput: cf = output.data[0] assert cf.unmapped["requirement_attributes"]["section"] == "Logging" assert "internal_note" not in cf.unmapped["requirement_attributes"] + + +class TestSanitizeResourceData: + """Unit tests for the _sanitize_resource_data helper. + + Service resources may carry non-JSON-serializable objects (e.g. raw + Pydantic models such as ``Trail`` or ``LifecyclePolicy``). The helper + must convert them so the resulting ComplianceFinding can be serialized. + """ + + def test_dict_passthrough(self): + result = _sanitize_resource_data("details", {"a": 1, "b": "two"}) + assert result == {"details": "details", "metadata": {"a": 1, "b": "two"}} + + def test_none_metadata(self): + result = _sanitize_resource_data("details", None) + assert result == {"details": "details", "metadata": None} + + def test_pydantic_v2_model_dump(self): + class FakeV2Model: + def model_dump(self): + return {"name": "trail-1", "region": "us-east-1"} + + result = _sanitize_resource_data("d", {"trail": FakeV2Model()}) + assert result["metadata"]["trail"] == { + "name": "trail-1", + "region": "us-east-1", + } + + def test_pydantic_v1_dict(self): + class FakeV1Model: + def dict(self): + return {"name": "policy-1", "schedule": "daily"} + + result = _sanitize_resource_data("d", {"policy": FakeV1Model()}) + assert result["metadata"]["policy"] == { + "name": "policy-1", + "schedule": "daily", + } + + def test_nested_pydantic_in_list(self): + class FakeModel: + def model_dump(self): + return {"id": "x"} + + result = _sanitize_resource_data("d", {"items": [FakeModel(), FakeModel()]}) + assert result["metadata"]["items"] == [{"id": "x"}, {"id": "x"}] + + def test_nested_dict_recursion(self): + class FakeInner: + def model_dump(self): + return {"k": "v"} + + result = _sanitize_resource_data( + "d", {"outer": {"inner": FakeInner(), "x": [1, 2]}} + ) + assert result["metadata"]["outer"]["inner"] == {"k": "v"} + assert result["metadata"]["outer"]["x"] == [1, 2] + + def test_tuple_to_list(self): + result = _sanitize_resource_data("d", {"t": (1, 2, "three")}) + assert result["metadata"]["t"] == [1, 2, "three"] + + def test_non_string_dict_keys_coerced(self): + result = _sanitize_resource_data("d", {1: "a", 2: "b"}) + assert result["metadata"] == {"1": "a", "2": "b"} + + def test_unknown_object_falls_back_to_str(self): + class Opaque: + def __str__(self): + return "opaque-repr" + + result = _sanitize_resource_data("d", {"thing": Opaque()}) + assert result["metadata"]["thing"] == "opaque-repr" + + def test_circular_reference_falls_back_to_empty(self): + a = {} + a["self"] = a + # json.dumps raises ValueError on recursion → fallback to empty metadata + result = _sanitize_resource_data("d", a) + assert result == {"details": "d", "metadata": {}} + + def test_serializes_via_full_finding_pipeline(self): + """End-to-end: a finding with a non-serializable resource_metadata + produces a JSON-serializable ComplianceFinding.""" + + class TrailLike: + def __init__(self): + self.name = "trail-A" + self.kms_key_id = "arn:aws:kms:..." + + def model_dump(self): + return {"name": self.name, "kms_key_id": self.kms_key_id} + + finding = _make_finding("check_a") + finding.resource_metadata = {"trail": TrailLike()} + req = _simple_requirement() + fw = _make_framework([req]) + + output = OCSFComplianceOutput(findings=[finding], framework=fw, provider="aws") + + # Serialize the resulting ComplianceFinding — must NOT raise + cf = output.data[0] + if hasattr(cf, "model_dump_json"): + json_output = cf.model_dump_json(exclude_none=True) + else: + json_output = cf.json(exclude_none=True) + payload = json.loads(json_output) + + # Confirm the trail object made it through as a plain dict + assert payload["resources"][0]["data"]["metadata"]["trail"]["name"] == "trail-A" + + +class TestEventStatusInline: + """Tests for the inlined event_status logic that replaced + OCSF.get_finding_status_id() to break the cyclic import.""" + + def test_unmuted_finding_status_new(self): + finding = _make_finding("check_a") + finding.muted = False + req = _simple_requirement() + fw = _make_framework([req]) + + output = OCSFComplianceOutput(findings=[finding], framework=fw, provider="aws") + cf = output.data[0] + + assert cf.status_id == EventStatusID.New.value + assert cf.status == EventStatusID.New.name + + def test_muted_finding_status_suppressed(self): + finding = _make_finding("check_a") + finding.muted = True + req = _simple_requirement() + fw = _make_framework([req]) + + output = OCSFComplianceOutput(findings=[finding], framework=fw, provider="aws") + cf = output.data[0] + + assert cf.status_id == EventStatusID.Suppressed.value + assert cf.status == EventStatusID.Suppressed.name + + +class TestNoTopLevelOCSFImport: + """Regression test: the top-level OCSF/Finding imports were removed + to break the CodeQL cyclic-import warnings. Ensure they stay out of + the runtime namespace of the module (TYPE_CHECKING block only).""" + + def test_finding_not_in_runtime_namespace(self): + import prowler.lib.outputs.compliance.universal.ocsf_compliance as mod + + assert "Finding" not in dir(mod) + + def test_ocsf_class_not_imported(self): + import prowler.lib.outputs.compliance.universal.ocsf_compliance as mod + + assert "OCSF" not in dir(mod) diff --git a/tests/lib/outputs/compliance/universal/universal_output_test.py b/tests/lib/outputs/compliance/universal/universal_output_test.py new file mode 100644 index 0000000000..391a9015d7 --- /dev/null +++ b/tests/lib/outputs/compliance/universal/universal_output_test.py @@ -0,0 +1,568 @@ +from types import SimpleNamespace + +from prowler.lib.check.compliance_models import ( + AttributeMetadata, + ComplianceFramework, + OutputFormats, + OutputsConfig, + TableConfig, + UniversalComplianceRequirement, +) +from prowler.lib.outputs.compliance.universal.universal_output import ( + UniversalComplianceOutput, +) + + +def _make_finding(check_id, status="PASS", compliance_map=None): + """Create a mock Finding for output tests.""" + finding = SimpleNamespace() + finding.provider = "aws" + finding.account_uid = "123456789012" + finding.account_name = "test-account" + finding.region = "us-east-1" + finding.status = status + finding.status_extended = f"{check_id} is {status}" + finding.resource_uid = f"arn:aws:iam::123456789012:{check_id}" + finding.resource_name = check_id + finding.muted = False + finding.check_id = check_id + finding.metadata = SimpleNamespace( + Provider="aws", + CheckID=check_id, + Severity="medium", + ) + finding.compliance = compliance_map or {} + return finding + + +def _make_framework(requirements, attrs_metadata=None, table_config=None): + return ComplianceFramework( + framework="TestFW", + name="Test Framework", + provider="AWS", + version="1.0", + description="Test framework", + requirements=requirements, + attributes_metadata=attrs_metadata, + outputs=OutputsConfig(table_config=table_config) if table_config else None, + ) + + +class TestDynamicCSVColumns: + def test_columns_match_metadata(self, tmp_path): + reqs = [ + UniversalComplianceRequirement( + id="1.1", + description="test", + attributes={"Section": "IAM", "SubSection": "Auth"}, + checks={"aws": ["check_a"]}, + ), + ] + metadata = [ + AttributeMetadata(key="Section", type="str"), + AttributeMetadata(key="SubSection", type="str"), + ] + fw = _make_framework(reqs, metadata, TableConfig(group_by="Section")) + + findings = [ + _make_finding("check_a", "PASS", {"TestFW-1.0": ["1.1"]}), + ] + filepath = str(tmp_path / "test.csv") + + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=filepath, + ) + + assert len(output.data) == 1 + row_dict = output.data[0].dict() + assert "Requirements_Attributes_Section" in row_dict + assert "Requirements_Attributes_SubSection" in row_dict + assert row_dict["Requirements_Attributes_Section"] == "IAM" + assert row_dict["Requirements_Attributes_SubSection"] == "Auth" + + +class TestManualRequirements: + def test_manual_status(self, tmp_path): + reqs = [ + UniversalComplianceRequirement( + id="1.1", + description="test", + attributes={"Section": "IAM"}, + checks={"aws": ["check_a"]}, + ), + UniversalComplianceRequirement( + id="manual-1", + description="manual check", + attributes={"Section": "Governance"}, + checks={}, + ), + ] + metadata = [ + AttributeMetadata(key="Section", type="str"), + ] + fw = _make_framework(reqs, metadata, TableConfig(group_by="Section")) + + findings = [ + _make_finding("check_a", "PASS", {"TestFW-1.0": ["1.1"]}), + ] + filepath = str(tmp_path / "test.csv") + + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=filepath, + ) + + # Should have 1 real finding + 1 manual + assert len(output.data) == 2 + manual_rows = [r for r in output.data if r.dict()["Status"] == "MANUAL"] + assert len(manual_rows) == 1 + assert manual_rows[0].dict()["Requirements_Id"] == "manual-1" + assert manual_rows[0].dict()["ResourceId"] == "manual_check" + + +class TestMITREExtraColumns: + def test_mitre_columns_present(self, tmp_path): + reqs = [ + UniversalComplianceRequirement( + id="T1190", + description="Exploit", + attributes={}, + checks={"aws": ["check_a"]}, + tactics=["Initial Access"], + sub_techniques=[], + platforms=["IaaS"], + technique_url="https://attack.mitre.org/techniques/T1190/", + ), + ] + fw = _make_framework(reqs, None, TableConfig(group_by="_Tactics")) + + findings = [ + _make_finding("check_a", "PASS", {"TestFW-1.0": ["T1190"]}), + ] + filepath = str(tmp_path / "test.csv") + + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=filepath, + ) + + assert len(output.data) == 1 + row_dict = output.data[0].dict() + assert "Requirements_Tactics" in row_dict + assert row_dict["Requirements_Tactics"] == "Initial Access" + assert "Requirements_TechniqueURL" in row_dict + + +class TestCSVFileWrite: + def test_batch_write(self, tmp_path): + reqs = [ + UniversalComplianceRequirement( + id="1.1", + description="test", + attributes={"Section": "IAM"}, + checks={"aws": ["check_a"]}, + ), + ] + metadata = [ + AttributeMetadata(key="Section", type="str"), + ] + fw = _make_framework(reqs, metadata, TableConfig(group_by="Section")) + + findings = [ + _make_finding("check_a", "PASS", {"TestFW-1.0": ["1.1"]}), + ] + filepath = str(tmp_path / "test.csv") + + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=filepath, + ) + output.batch_write_data_to_file() + + # Verify file was created and has content + with open(filepath, "r") as f: + content = f.read() + assert "PROVIDER" in content # Headers are uppercase + assert "REQUIREMENTS_ATTRIBUTES_SECTION" in content + assert "IAM" in content + + +class TestNoFindings: + def test_empty_findings_no_data(self, tmp_path): + reqs = [ + UniversalComplianceRequirement( + id="1.1", + description="test", + attributes={"Section": "IAM"}, + checks={"aws": ["check_a"]}, + ), + ] + fw = _make_framework(reqs, None, TableConfig(group_by="Section")) + filepath = str(tmp_path / "test.csv") + + output = UniversalComplianceOutput( + findings=[], + framework=fw, + file_path=filepath, + ) + assert len(output.data) == 0 + + +class TestMultiProviderOutput: + def test_dict_checks_filtered_by_provider(self, tmp_path): + """Only checks for the given provider appear in CSV output.""" + reqs = [ + UniversalComplianceRequirement( + id="1.1", + description="test", + attributes={"Section": "IAM"}, + checks={"aws": ["check_a"], "azure": ["check_b"]}, + ), + ] + metadata = [ + AttributeMetadata(key="Section", type="str"), + ] + fw = ComplianceFramework( + framework="MultiCloud", + name="Multi", + version="1.0", + description="Test multi-provider", + requirements=reqs, + attributes_metadata=metadata, + outputs=OutputsConfig(table_config=TableConfig(group_by="Section")), + ) + + findings = [ + _make_finding("check_a", "PASS", {"MultiCloud-1.0": ["1.1"]}), + _make_finding("check_b", "FAIL", {"MultiCloud-1.0": ["1.1"]}), + ] + filepath = str(tmp_path / "test.csv") + + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=filepath, + provider="aws", + ) + + # Only check_a should match (it's the AWS check) + assert len(output.data) == 1 + row_dict = output.data[0].dict() + assert row_dict["Requirements_Attributes_Section"] == "IAM" + + def test_no_provider_includes_all(self, tmp_path): + """Without provider filter, all checks from all providers are included.""" + reqs = [ + UniversalComplianceRequirement( + id="1.1", + description="test", + attributes={"Section": "IAM"}, + checks={"aws": ["check_a"], "azure": ["check_b"]}, + ), + ] + metadata = [ + AttributeMetadata(key="Section", type="str"), + ] + fw = ComplianceFramework( + framework="MultiCloud", + name="Multi", + version="1.0", + description="Test multi-provider", + requirements=reqs, + attributes_metadata=metadata, + outputs=OutputsConfig(table_config=TableConfig(group_by="Section")), + ) + + findings = [ + _make_finding("check_a", "PASS", {"MultiCloud-1.0": ["1.1"]}), + _make_finding("check_b", "FAIL", {"MultiCloud-1.0": ["1.1"]}), + ] + filepath = str(tmp_path / "test.csv") + + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=filepath, + ) + + # Both checks should be included without provider filter + assert len(output.data) == 2 + + def test_empty_dict_checks_is_manual(self, tmp_path): + """Requirement with empty dict checks is treated as manual.""" + reqs = [ + UniversalComplianceRequirement( + id="manual-1", + description="manual check", + attributes={"Section": "Governance"}, + checks={}, + ), + ] + metadata = [ + AttributeMetadata(key="Section", type="str"), + ] + fw = ComplianceFramework( + framework="MultiCloud", + name="Multi", + version="1.0", + description="Test", + requirements=reqs, + attributes_metadata=metadata, + outputs=OutputsConfig(table_config=TableConfig(group_by="Section")), + ) + + filepath = str(tmp_path / "test.csv") + + output = UniversalComplianceOutput( + findings=[_make_finding("other_check", "PASS", {})], + framework=fw, + file_path=filepath, + provider="aws", + ) + + manual_rows = [r for r in output.data if r.dict()["Status"] == "MANUAL"] + assert len(manual_rows) == 1 + assert manual_rows[0].dict()["Requirements_Id"] == "manual-1" + + +class TestCSVExclude: + def test_csv_false_excludes_column(self, tmp_path): + reqs = [ + UniversalComplianceRequirement( + id="1.1", + description="test", + attributes={"Section": "IAM", "Internal": "hidden"}, + checks={"aws": ["check_a"]}, + ), + ] + metadata = [ + AttributeMetadata( + key="Section", type="str", output_formats=OutputFormats(csv=True) + ), + AttributeMetadata( + key="Internal", type="str", output_formats=OutputFormats(csv=False) + ), + ] + fw = _make_framework(reqs, metadata, TableConfig(group_by="Section")) + + findings = [ + _make_finding("check_a", "PASS", {"TestFW-1.0": ["1.1"]}), + ] + filepath = str(tmp_path / "test.csv") + + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=filepath, + ) + + row_dict = output.data[0].dict() + assert "Requirements_Attributes_Section" in row_dict + assert "Requirements_Attributes_Internal" not in row_dict + + +def _make_provider_finding(provider, check_id="check_a", status="PASS"): + """Create a mock Finding with a specific provider.""" + finding = _make_finding(check_id, status, {"TestFW-1.0": ["1.1"]}) + finding.provider = provider + return finding + + +def _simple_framework(): + all_providers = [ + "aws", + "azure", + "gcp", + "kubernetes", + "m365", + "github", + "oraclecloud", + "alibabacloud", + "nhn", + "unknown", + ] + reqs = [ + UniversalComplianceRequirement( + id="1.1", + description="test", + attributes={"Section": "IAM"}, + checks={p: ["check_a"] for p in all_providers}, + ), + ] + metadata = [ + AttributeMetadata(key="Section", type="str"), + ] + return _make_framework(reqs, metadata, TableConfig(group_by="Section")) + + +class TestProviderHeaders: + def test_aws_headers(self, tmp_path): + fw = _simple_framework() + findings = [_make_provider_finding("aws")] + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=str(tmp_path / "test.csv"), + provider="aws", + ) + row_dict = output.data[0].dict() + assert "AccountId" in row_dict + assert "Region" in row_dict + assert row_dict["AccountId"] == "123456789012" + assert row_dict["Region"] == "us-east-1" + + def test_azure_headers(self, tmp_path): + fw = _simple_framework() + findings = [_make_provider_finding("azure")] + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=str(tmp_path / "test.csv"), + provider="azure", + ) + row_dict = output.data[0].dict() + assert "SubscriptionId" in row_dict + assert "Location" in row_dict + assert row_dict["SubscriptionId"] == "123456789012" + assert row_dict["Location"] == "us-east-1" + + def test_gcp_headers(self, tmp_path): + fw = _simple_framework() + findings = [_make_provider_finding("gcp")] + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=str(tmp_path / "test.csv"), + provider="gcp", + ) + row_dict = output.data[0].dict() + assert "ProjectId" in row_dict + assert "Location" in row_dict + assert row_dict["ProjectId"] == "123456789012" + + def test_kubernetes_headers(self, tmp_path): + fw = _simple_framework() + findings = [_make_provider_finding("kubernetes")] + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=str(tmp_path / "test.csv"), + provider="kubernetes", + ) + row_dict = output.data[0].dict() + assert "Context" in row_dict + assert "Namespace" in row_dict + # Kubernetes Context maps to account_name + assert row_dict["Context"] == "test-account" + assert row_dict["Namespace"] == "us-east-1" + + def test_github_headers(self, tmp_path): + fw = _simple_framework() + findings = [_make_provider_finding("github")] + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=str(tmp_path / "test.csv"), + provider="github", + ) + row_dict = output.data[0].dict() + assert "Account_Name" in row_dict + assert "Account_Id" in row_dict + # GitHub: Account_Name (pos 3) from account_name, Account_Id (pos 4) from account_uid + assert row_dict["Account_Name"] == "test-account" + assert row_dict["Account_Id"] == "123456789012" + # Verify column order matches legacy (Account_Name before Account_Id) + keys = list(row_dict.keys()) + assert keys.index("Account_Name") < keys.index("Account_Id") + + def test_unknown_provider_defaults(self, tmp_path): + fw = _simple_framework() + findings = [_make_provider_finding("unknown")] + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=str(tmp_path / "test.csv"), + provider="unknown", + ) + row_dict = output.data[0].dict() + assert "AccountId" in row_dict + assert "Region" in row_dict + + def test_none_provider_defaults(self, tmp_path): + fw = _simple_framework() + findings = [_make_provider_finding("aws")] + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=str(tmp_path / "test.csv"), + ) + row_dict = output.data[0].dict() + assert "AccountId" in row_dict + assert "Region" in row_dict + + def test_csv_write_azure_headers(self, tmp_path): + fw = _simple_framework() + findings = [_make_provider_finding("azure")] + filepath = str(tmp_path / "test.csv") + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=filepath, + provider="azure", + ) + output.batch_write_data_to_file() + + with open(filepath, "r") as f: + content = f.read() + assert "SUBSCRIPTIONID" in content + assert "LOCATION" in content + # Should NOT have the default AccountId/Region headers + assert "ACCOUNTID" not in content + + def test_column_order_matches_legacy(self, tmp_path): + """Verify that the base column order matches the legacy per-provider models. + + Legacy models all define: Provider, Description, , , AssessmentDate, ... + The universal output must preserve this exact order for backward compatibility. + """ + # Expected column order per provider (positions 3 and 4 after Provider, Description) + legacy_order = { + "aws": ("AccountId", "Region"), + "azure": ("SubscriptionId", "Location"), + "gcp": ("ProjectId", "Location"), + "kubernetes": ("Context", "Namespace"), + "m365": ("TenantId", "Location"), + "github": ("Account_Name", "Account_Id"), + "oraclecloud": ("TenancyId", "Region"), + "alibabacloud": ("AccountId", "Region"), + "nhn": ("AccountId", "Region"), + } + + for provider_name, (expected_col3, expected_col4) in legacy_order.items(): + fw = _simple_framework() + findings = [_make_provider_finding(provider_name)] + output = UniversalComplianceOutput( + findings=findings, + framework=fw, + file_path=str(tmp_path / f"test_{provider_name}.csv"), + provider=provider_name, + ) + keys = list(output.data[0].dict().keys()) + assert keys[0] == "Provider", f"{provider_name}: col 1 should be Provider" + assert ( + keys[1] == "Description" + ), f"{provider_name}: col 2 should be Description" + assert ( + keys[2] == expected_col3 + ), f"{provider_name}: col 3 should be {expected_col3}, got {keys[2]}" + assert ( + keys[3] == expected_col4 + ), f"{provider_name}: col 4 should be {expected_col4}, got {keys[3]}" + assert ( + keys[4] == "AssessmentDate" + ), f"{provider_name}: col 5 should be AssessmentDate" From 228fe6d57995c5b98671f0f5657b05c4e69e3825 Mon Sep 17 00:00:00 2001 From: Boon Date: Thu, 30 Apr 2026 19:49:08 +0800 Subject: [PATCH 02/29] feat: add ASD Essential Eight compliance framework for AWS (#10808) Co-authored-by: Boon Co-authored-by: pedrooot --- prowler/CHANGELOG.md | 3 + prowler/__main__.py | 15 + .../compliance/aws/essential_eight_aws.json | 1282 +++++++++++++++++ prowler/lib/check/compliance_models.py | 43 + prowler/lib/outputs/compliance/compliance.py | 12 + .../compliance/essential_eight/__init__.py | 0 .../essential_eight/essential_eight.py | 98 ++ .../essential_eight/essential_eight_aws.py | 111 ++ .../compliance/essential_eight/models.py | 35 + .../compliance/essential_eight/__init__.py | 0 .../essential_eight_aws_test.py | 128 ++ tests/lib/outputs/compliance/fixtures.py | 56 + 12 files changed, 1783 insertions(+) create mode 100644 prowler/compliance/aws/essential_eight_aws.json create mode 100644 prowler/lib/outputs/compliance/essential_eight/__init__.py create mode 100644 prowler/lib/outputs/compliance/essential_eight/essential_eight.py create mode 100644 prowler/lib/outputs/compliance/essential_eight/essential_eight_aws.py create mode 100644 prowler/lib/outputs/compliance/essential_eight/models.py create mode 100644 tests/lib/outputs/compliance/essential_eight/__init__.py create mode 100644 tests/lib/outputs/compliance/essential_eight/essential_eight_aws_test.py diff --git a/prowler/CHANGELOG.md b/prowler/CHANGELOG.md index 283b9508a9..649ae60c5a 100644 --- a/prowler/CHANGELOG.md +++ b/prowler/CHANGELOG.md @@ -9,6 +9,9 @@ All notable changes to the **Prowler SDK** are documented in this file. - `bedrock_guardrails_configured` check for AWS provider [(#10844)](https://github.com/prowler-cloud/prowler/pull/10844) - Universal compliance pipeline integrated into the CLI: `--list-compliance` and `--list-compliance-requirements` show universal frameworks, and CSV plus OCSF outputs are generated for any framework declaring a `TableConfig` [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301) +### 🚀 Added +- ASD Essential Eight Maturity Model compliance framework for AWS provider, mapping 64 checks across all 8 controls [(#10808)](https://github.com/prowler-cloud/prowler/pull/10808) + ### 🔄 Changed - `route53_dangling_ip_subdomain_takeover` now also flags `CNAME` records pointing to S3 website endpoints whose buckets are missing from the account [(#10920)](https://github.com/prowler-cloud/prowler/pull/10920) diff --git a/prowler/__main__.py b/prowler/__main__.py index da2b339b27..e10c9c745b 100644 --- a/prowler/__main__.py +++ b/prowler/__main__.py @@ -90,6 +90,9 @@ from prowler.lib.outputs.compliance.csa.csa_oraclecloud import OracleCloudCSA from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS from prowler.lib.outputs.compliance.ens.ens_azure import AzureENS from prowler.lib.outputs.compliance.ens.ens_gcp import GCPENS +from prowler.lib.outputs.compliance.essential_eight.essential_eight_aws import ( + EssentialEightAWS, +) from prowler.lib.outputs.compliance.generic.generic import GenericCompliance from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001 from prowler.lib.outputs.compliance.iso27001.iso27001_azure import AzureISO27001 @@ -673,6 +676,18 @@ def prowler(): ) generated_outputs["compliance"].append(cis) cis.batch_write_data_to_file() + elif compliance_name.startswith("essential_eight"): + filename = ( + f"{output_options.output_directory}/compliance/" + f"{output_options.output_filename}_{compliance_name}.csv" + ) + essential_eight = EssentialEightAWS( + findings=finding_outputs, + compliance=bulk_compliance_frameworks[compliance_name], + file_path=filename, + ) + generated_outputs["compliance"].append(essential_eight) + essential_eight.batch_write_data_to_file() elif compliance_name == "mitre_attack_aws": # Generate MITRE ATT&CK Finding Object filename = ( diff --git a/prowler/compliance/aws/essential_eight_aws.json b/prowler/compliance/aws/essential_eight_aws.json new file mode 100644 index 0000000000..46164ebba5 --- /dev/null +++ b/prowler/compliance/aws/essential_eight_aws.json @@ -0,0 +1,1282 @@ +{ + "Framework": "Essential-Eight", + "Name": "ASD Essential Eight Maturity Model - Maturity Level One (AWS)", + "Version": "Nov 2023", + "Provider": "AWS", + "Description": "Literal mapping of the Australian Signals Directorate (ASD) Essential Eight Maturity Model (Last updated November 2023, Appendix A: Maturity Level One) to AWS infrastructure checks. Each Requirement is one literal clause from the ASD document, in canonical document order: (1) Patch applications, (2) Patch operating systems, (3) Multi-factor authentication, (4) Restrict administrative privileges, (5) Application control, (6) Restrict Microsoft Office macros, (7) User application hardening, (8) Regular backups. The Essential Eight is designed to protect internet-connected information technology networks (workstations, internet-facing servers, online services). Several controls are inherently endpoint or procedural and have no AWS infrastructure equivalent - those clauses are flagged with CloudApplicability `non-applicable` and AssessmentStatus `Manual`. ML2 and ML3 are out of scope of this framework.", + "Requirements": [ + { + "Id": "E8-1.1", + "Description": "An automated method of asset discovery is used at least fortnightly to support the detection of assets for subsequent vulnerability scanning activities.", + "Checks": [ + "ec2_instance_managed_by_ssm", + "config_recorder_all_regions_enabled", + "inspector2_is_enabled" + ], + "Attributes": [ + { + "Section": "1 Patch applications", + "MaturityLevel": "ML1", + "AssessmentStatus": "Automated", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Shadow IT", + "Unmanaged vulnerable assets" + ], + "Description": "Asset discovery in AWS is delivered by AWS Config (resource recorder), SSM Inventory and Inspector v2's continuous coverage. The fortnightly cadence itself is not directly observable from AWS APIs - what is observable is whether the discovery mechanisms are enabled.", + "RationaleStatement": "Vulnerability scanning is only as good as the asset coverage it has. Continuous AWS-native discovery is the cloud equivalent of fortnightly asset discovery.", + "ImpactStatement": "Cadence verification is procedural - Prowler verifies the discovery mechanism is enabled, not that it ran in the last fortnight.", + "RemediationProcedure": "Enable AWS Config recorders in every region. Ensure all EC2 instances are managed by SSM. Enable Inspector v2.", + "AuditProcedure": "Verify the listed checks pass.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch applications - clause 1.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-1.2", + "Description": "A vulnerability scanner with an up-to-date vulnerability database is used for vulnerability scanning activities.", + "Checks": [ + "inspector2_is_enabled" + ], + "Attributes": [ + { + "Section": "1 Patch applications", + "MaturityLevel": "ML1", + "AssessmentStatus": "Automated", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Stale vulnerability data", + "Missed CVE coverage" + ], + "Description": "Inspector v2 is AWS's managed vulnerability scanner; AWS keeps its vulnerability database up to date.", + "RationaleStatement": "An out-of-date scanner cannot detect newly disclosed vulnerabilities.", + "ImpactStatement": "Coverage is limited to AWS-resident workloads (EC2, ECR, Lambda). Endpoints (workstations, browsers, Office) are out of scope.", + "RemediationProcedure": "Enable Inspector v2 across all regions and ensure all eligible resources are enrolled.", + "AuditProcedure": "Verify Inspector v2 is enabled.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch applications - clause 2.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-1.3", + "Description": "A vulnerability scanner is used at least daily to identify missing patches or updates for vulnerabilities in online services.", + "Checks": [ + "inspector2_is_enabled", + "inspector2_active_findings_exist" + ], + "Attributes": [ + { + "Section": "1 Patch applications", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Late detection of internet-facing vulnerabilities" + ], + "Description": "Inspector v2 performs continuous scanning of AWS-hosted online services (Lambda, ECR-backed containers, EC2 with internet exposure). The 'at least daily' cadence is implicit in continuous scanning.", + "RationaleStatement": "Internet-facing services are weaponised fastest after a vulnerability is disclosed.", + "ImpactStatement": "Cadence cannot be evidenced by a single scan; requires Inspector audit history.", + "RemediationProcedure": "Enable Inspector v2 with all scan types relevant to the service surface (EC2, ECR, Lambda).", + "AuditProcedure": "Verify Inspector v2 enabled and review finding age distribution.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch applications - clause 3.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-1.4", + "Description": "A vulnerability scanner is used at least weekly to identify missing patches or updates for vulnerabilities in office productivity suites, web browsers and their extensions, email clients, PDF software, and security products.", + "Checks": [], + "Attributes": [ + { + "Section": "1 Patch applications", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Endpoint application exploitation" + ], + "Description": "This clause targets endpoint-resident applications (Office, browsers, email, PDF, security products). AWS infrastructure scans cannot evidence endpoint vulnerability scanning.", + "RationaleStatement": "Endpoint applications are major attack vectors but live outside AWS infrastructure scope.", + "ImpactStatement": "Evidence must come from endpoint-management vulnerability scanners (e.g. Defender, Qualys, Tenable).", + "RemediationProcedure": "Run an endpoint vulnerability scanner with weekly cadence covering Office, browsers, email, PDF and security products.", + "AuditProcedure": "Manual review of endpoint vulnerability scanning evidence.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch applications - clause 4. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-1.5", + "Description": "Patches, updates or other vendor mitigations for vulnerabilities in online services are applied within 48 hours of release when vulnerabilities are assessed as critical by vendors or when working exploits exist.", + "Checks": [ + "inspector2_active_findings_exist", + "awslambda_function_using_supported_runtimes", + "rds_instance_deprecated_engine_version", + "rds_instance_extended_support" + ], + "Attributes": [ + { + "Section": "1 Patch applications", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Active exploitation of disclosed CVEs", + "N-day exploit kits" + ], + "Description": "AWS managed services (RDS, ElastiCache, Lambda, ECS Fargate, etc.) consume vendor patches transparently when auto-upgrade is enabled. The 48-hour SLA itself cannot be evidenced from AWS APIs - it requires correlating Inspector v2 first-seen timestamps with remediation timestamps.", + "RationaleStatement": "Critical vulnerabilities with working exploits are weaponised within hours.", + "ImpactStatement": "Prowler verifies the absence of active critical findings (a proxy outcome) and the absence of unsupported runtimes (a precondition); SLA timing requires manual correlation.", + "RemediationProcedure": "Maintain auto-upgrade flags on managed services. Configure Inspector v2 alerting for critical findings on internet-facing resources with 48-hour escalation.", + "AuditProcedure": "Manual review of Inspector finding age + remediation timestamps for internet-facing critical findings.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch applications - clause 5. SLA timing out of Prowler scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-1.6", + "Description": "Patches, updates or other vendor mitigations for vulnerabilities in online services are applied within two weeks of release when vulnerabilities are assessed as non-critical by vendors and no working exploits exist.", + "Checks": [ + "ssm_managed_compliant_patching", + "awslambda_function_using_supported_runtimes", + "rds_instance_minor_version_upgrade_enabled", + "rds_cluster_minor_version_upgrade_enabled", + "elasticache_redis_cluster_auto_minor_version_upgrades", + "memorydb_cluster_auto_minor_version_upgrades", + "dms_instance_minor_version_upgrade_enabled", + "mq_broker_auto_minor_version_upgrades", + "redshift_cluster_automatic_upgrades", + "kafka_cluster_uses_latest_version", + "opensearch_service_domains_updated_to_the_latest_service_software_version", + "ecs_service_fargate_latest_platform_version" + ], + "Attributes": [ + { + "Section": "1 Patch applications", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Accumulated technical debt", + "Eventual exploitation of stale CVEs" + ], + "Description": "Auto-upgrade flags on AWS managed services (RDS, Aurora, ElastiCache, MemoryDB, DMS, MQ, Redshift, OpenSearch, MSK, Fargate) and SSM Patch Manager for self-managed compute deliver the underlying capability. SLA timing itself is not surfaced.", + "RationaleStatement": "Two weeks is the standard ASD cadence for non-critical patching of internet-facing services.", + "ImpactStatement": "SLA window evidence requires patch logs / change-management correlation, not in Prowler scope.", + "RemediationProcedure": "Enable auto minor version upgrade on all managed data services. Enable SSM Patch Manager on all EC2.", + "AuditProcedure": "Run all listed checks. Manually review patch deployment timeline.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch applications - clause 6.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-1.7", + "Description": "Patches, updates or other vendor mitigations for vulnerabilities in office productivity suites, web browsers and their extensions, email clients, PDF software, and security products are applied within two weeks of release.", + "Checks": [], + "Attributes": [ + { + "Section": "1 Patch applications", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Endpoint application exploitation" + ], + "Description": "Endpoint-resident applications (Office, browsers, email clients, PDF, security products) are out of AWS infrastructure scope.", + "RationaleStatement": "Same as 1.4 - this is endpoint-management territory.", + "ImpactStatement": "Evidence must come from endpoint patch-management tooling.", + "RemediationProcedure": "Use endpoint patch-management (Intune, SCCM, third-party MDM) to enforce two-week SLA on these application classes.", + "AuditProcedure": "Manual review of endpoint patch reports.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch applications - clause 7. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-1.8", + "Description": "Online services that are no longer supported by vendors are removed.", + "Checks": [ + "awslambda_function_using_supported_runtimes", + "rds_instance_deprecated_engine_version", + "rds_instance_extended_support", + "eks_cluster_uses_a_supported_version", + "ecs_service_fargate_latest_platform_version", + "kafka_cluster_uses_latest_version", + "opensearch_service_domains_updated_to_the_latest_service_software_version" + ], + "Attributes": [ + { + "Section": "1 Patch applications", + "MaturityLevel": "ML1", + "AssessmentStatus": "Automated", + "CloudApplicability": "full", + "MitigatedThreats": [ + "Use of unsupported software", + "Long-tail vulnerability accumulation" + ], + "Description": "Online services in AWS scope: Lambda runtimes, RDS engines, EKS Kubernetes versions, ECS/Fargate platform versions, Kafka, OpenSearch. Prowler can detect deprecated/unsupported versions across all of these.", + "RationaleStatement": "Unsupported services no longer receive security patches.", + "ImpactStatement": "", + "RemediationProcedure": "Migrate Lambda functions off deprecated runtimes; remove RDS instances on Extended Support or deprecated engines; upgrade EKS clusters to supported Kubernetes versions; use latest Fargate platform version; keep Kafka and OpenSearch at supported versions.", + "AuditProcedure": "Run all listed checks.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch applications - clause 8.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-1.9", + "Description": "Office productivity suites, web browsers and their extensions, email clients, PDF software, Adobe Flash Player, and security products that are no longer supported by vendors are removed.", + "Checks": [], + "Attributes": [ + { + "Section": "1 Patch applications", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Endpoint compromise via unsupported applications" + ], + "Description": "Endpoint-resident applications. Out of AWS infrastructure scope.", + "RationaleStatement": "Adobe Flash Player end-of-life is the canonical example - removal must be enforced at the endpoint.", + "ImpactStatement": "Evidence must come from endpoint software inventory.", + "RemediationProcedure": "Inventory endpoint applications; enforce removal of EOL software via endpoint management.", + "AuditProcedure": "Manual review of endpoint inventory.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch applications - clause 9. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-2.1", + "Description": "An automated method of asset discovery is used at least fortnightly to support the detection of assets for subsequent vulnerability scanning activities.", + "Checks": [ + "ec2_instance_managed_by_ssm", + "config_recorder_all_regions_enabled", + "inspector2_is_enabled" + ], + "Attributes": [ + { + "Section": "2 Patch operating systems", + "MaturityLevel": "ML1", + "AssessmentStatus": "Automated", + "CloudApplicability": "full", + "MitigatedThreats": [ + "Unmanaged hosts", + "OS-level shadow IT" + ], + "Description": "Same asset-discovery clause as 1.1, applied here to operating systems. AWS Config + SSM Inventory continuously discovers EC2 instances and their OS metadata; Inspector v2 enrols them automatically.", + "RationaleStatement": "OS vulnerability scanning depends on asset coverage.", + "ImpactStatement": "", + "RemediationProcedure": "Enable AWS Config in every region; ensure all EC2 are SSM-managed; enable Inspector v2.", + "AuditProcedure": "Run all listed checks.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch operating systems - clause 1.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-2.2", + "Description": "A vulnerability scanner with an up-to-date vulnerability database is used for vulnerability scanning activities.", + "Checks": [ + "inspector2_is_enabled" + ], + "Attributes": [ + { + "Section": "2 Patch operating systems", + "MaturityLevel": "ML1", + "AssessmentStatus": "Automated", + "CloudApplicability": "full", + "MitigatedThreats": [ + "Stale OS vulnerability data" + ], + "Description": "Inspector v2 is AWS's managed OS vulnerability scanner with continuously updated database.", + "RationaleStatement": "Same rationale as 1.2 applied to OS scanning.", + "ImpactStatement": "", + "RemediationProcedure": "Enable Inspector v2 in all regions.", + "AuditProcedure": "Verify Inspector v2 is enabled.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch operating systems - clause 2.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-2.3", + "Description": "A vulnerability scanner is used at least daily to identify missing patches or updates for vulnerabilities in operating systems of internet-facing servers and internet-facing network devices.", + "Checks": [ + "inspector2_is_enabled", + "inspector2_active_findings_exist" + ], + "Attributes": [ + { + "Section": "2 Patch operating systems", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Late detection of OS vulnerabilities on exposed hosts" + ], + "Description": "Internet-facing servers in AWS map to EC2 with public IP / behind public ALB / NLB. Inspector v2 performs continuous OS scanning. Network devices (firewalls, routers) inside customer VPC are typically appliance AMIs - scanned by Inspector when SSM-managed.", + "RationaleStatement": "Internet-facing OS surface is the primary entry point for opportunistic attacks.", + "ImpactStatement": "Daily cadence implicit in continuous scanning; not directly verifiable.", + "RemediationProcedure": "Enable Inspector v2 EC2 scanning. Ensure network appliances are SSM-managed where possible.", + "AuditProcedure": "Verify Inspector v2 enabled with EC2 scan type. Manually verify cadence via finding history.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch operating systems - clause 3.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-2.4", + "Description": "A vulnerability scanner is used at least fortnightly to identify missing patches or updates for vulnerabilities in operating systems of workstations, non-internet-facing servers and non-internet-facing network devices.", + "Checks": [ + "inspector2_is_enabled" + ], + "Attributes": [ + { + "Section": "2 Patch operating systems", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Lateral movement via unpatched internal hosts" + ], + "Description": "Workstations are out of AWS infrastructure scope. Non-internet-facing EC2 servers and appliance AMIs are scanned by Inspector v2 when SSM-managed.", + "RationaleStatement": "Internal hosts still carry lateral-movement risk.", + "ImpactStatement": "Workstation scanning is endpoint-management territory.", + "RemediationProcedure": "Ensure internal EC2 are SSM-managed and Inspector v2 covers them. For workstations, use endpoint vulnerability tooling.", + "AuditProcedure": "Manual review.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch operating systems - clause 4.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-2.5", + "Description": "Patches, updates or other vendor mitigations for vulnerabilities in operating systems of internet-facing servers and internet-facing network devices are applied within 48 hours of release when vulnerabilities are assessed as critical by vendors or when working exploits exist.", + "Checks": [ + "inspector2_active_findings_exist" + ], + "Attributes": [ + { + "Section": "2 Patch operating systems", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Active exploitation of OS-level CVEs", + "Public-facing intrusion" + ], + "Description": "SLA for critical OS patches on internet-facing hosts. AWS APIs do not surface time-to-patch.", + "RationaleStatement": "Critical OS CVEs on internet-facing surface are weaponised fastest.", + "ImpactStatement": "Requires correlation between Inspector finding age and SSM patch logs.", + "RemediationProcedure": "Configure Inspector v2 critical-finding alerting; route to a 48-hour remediation queue. Configure SSM Patch Manager with appropriate baselines.", + "AuditProcedure": "Manual: review patch deployment timestamps for internet-facing critical OS findings.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch operating systems - clause 5. SLA timing out of Prowler scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-2.6", + "Description": "Patches, updates or other vendor mitigations for vulnerabilities in operating systems of internet-facing servers and internet-facing network devices are applied within two weeks of release when vulnerabilities are assessed as non-critical by vendors and no working exploits exist.", + "Checks": [ + "ssm_managed_compliant_patching", + "ec2_instance_managed_by_ssm" + ], + "Attributes": [ + { + "Section": "2 Patch operating systems", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Stale OS exposure", + "Background CVE accumulation" + ], + "Description": "Two-week SLA for non-critical OS patches on internet-facing hosts. SSM Patch Manager delivers the capability; SLA evidence is procedural.", + "RationaleStatement": "Two-week cadence is the ASD baseline for non-critical OS patching of internet-facing hosts.", + "ImpactStatement": "SLA window evidence requires patch logs.", + "RemediationProcedure": "Configure SSM Patch Manager with a fortnightly maintenance window for internet-facing EC2.", + "AuditProcedure": "Verify SSM patch compliance + manually verify deployment cadence.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch operating systems - clause 6.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-2.7", + "Description": "Patches, updates or other vendor mitigations for vulnerabilities in operating systems of workstations, non-internet-facing servers and non-internet-facing network devices are applied within one month of release.", + "Checks": [ + "ssm_managed_compliant_patching", + "ec2_instance_managed_by_ssm" + ], + "Attributes": [ + { + "Section": "2 Patch operating systems", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Lateral movement", + "Internal compromise" + ], + "Description": "One-month SLA for OS patches on workstations and non-internet-facing hosts. AWS infrastructure covers non-internet-facing EC2 via SSM Patch Manager. Workstations are endpoint-management territory.", + "RationaleStatement": "Internal hosts have a longer SLA but still need regular patching.", + "ImpactStatement": "Workstation patch evidence is endpoint-side.", + "RemediationProcedure": "Configure SSM Patch Manager with a monthly maintenance window for non-internet-facing EC2.", + "AuditProcedure": "Verify SSM patch compliance for non-internet-facing instances.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch operating systems - clause 7.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-2.8", + "Description": "Operating systems that are no longer supported by vendors are replaced.", + "Checks": [ + "ec2_instance_with_outdated_ami", + "ec2_instance_older_than_specific_days", + "ec2_ami_public", + "eks_cluster_uses_a_supported_version" + ], + "Attributes": [ + { + "Section": "2 Patch operating systems", + "MaturityLevel": "ML1", + "AssessmentStatus": "Automated", + "CloudApplicability": "full", + "MitigatedThreats": [ + "Use of unsupported operating systems", + "Loss of security updates" + ], + "Description": "Detect EC2 instances on outdated AMIs, EKS clusters on unsupported Kubernetes versions, and AMIs that may be unsupported.", + "RationaleStatement": "Unsupported OS receives no further security updates.", + "ImpactStatement": "Prowler verifies AMI freshness and EKS version; the operator must define the policy threshold for `ec2_instance_older_than_specific_days`.", + "RemediationProcedure": "Replace EC2 instances on unsupported AMIs. Upgrade EKS clusters to supported Kubernetes versions.", + "AuditProcedure": "Run all listed checks.", + "AdditionalInformation": "ASD Essential Eight ML1 - Patch operating systems - clause 8.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-3.1", + "Description": "Multi-factor authentication is used to authenticate users to their organisation's online services that process, store or communicate their organisation's sensitive data.", + "Checks": [ + "iam_root_mfa_enabled", + "iam_root_hardware_mfa_enabled", + "iam_user_mfa_enabled_console_access", + "iam_user_hardware_mfa_enabled", + "iam_administrator_access_with_mfa", + "directoryservice_supported_mfa_radius_enabled", + "cloudwatch_log_metric_filter_sign_in_without_mfa" + ], + "Attributes": [ + { + "Section": "3 Multi-factor authentication", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Credential stuffing", + "Password reuse compromise", + "Unauthorised access to internal services" + ], + "Description": "MFA on the organisation's own AWS-hosted online services that handle sensitive data. Prowler verifies MFA on root, IAM users with console access, IAM admins and Directory Service. Whether a given service handles 'sensitive data' is a classification call the operator must make.", + "RationaleStatement": "MFA prevents credential-only compromise.", + "ImpactStatement": "Sensitive-data classification is procedural; Prowler verifies MFA presence on the underlying identity surface.", + "RemediationProcedure": "Enable hardware MFA on root and admin IAM users. Require MFA for all IAM users with console access. Enable MFA on Directory Service. Configure CloudWatch metric filter for non-MFA sign-ins.", + "AuditProcedure": "Run the listed checks. Manually classify which services hold sensitive data and confirm MFA coverage.", + "AdditionalInformation": "ASD Essential Eight ML1 - Multi-factor authentication - clause 1.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-3.2", + "Description": "Multi-factor authentication is used to authenticate users to third-party online services that process, store or communicate their organisation's sensitive data.", + "Checks": [], + "Attributes": [ + { + "Section": "3 Multi-factor authentication", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Third-party SaaS account compromise", + "Sensitive data leakage via SaaS" + ], + "Description": "Third-party online services (SaaS providers, external platforms) are outside the AWS account. AWS infrastructure scans cannot evidence MFA enforcement on those services.", + "RationaleStatement": "Third-party services are a major data-exfiltration vector.", + "ImpactStatement": "Evidence must come from each third-party provider's IAM / SSO configuration.", + "RemediationProcedure": "Enforce MFA in the third-party service's identity provider or SSO upstream (e.g. via AWS IAM Identity Center or external IdP).", + "AuditProcedure": "Manual review of third-party service IdP / SSO configuration.", + "AdditionalInformation": "ASD Essential Eight ML1 - Multi-factor authentication - clause 2. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-3.3", + "Description": "Multi-factor authentication (where available) is used to authenticate users to third-party online services that process, store or communicate their organisation's non-sensitive data.", + "Checks": [], + "Attributes": [ + { + "Section": "3 Multi-factor authentication", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Account compromise on lower-sensitivity SaaS" + ], + "Description": "Same as 3.2, applied to non-sensitive third-party services. Out of AWS infrastructure scope.", + "RationaleStatement": "Defence in depth - even non-sensitive services can be staging ground for further attacks.", + "ImpactStatement": "Evidence must come from each third-party provider.", + "RemediationProcedure": "Where the third-party service supports it, enforce MFA upstream via SSO.", + "AuditProcedure": "Manual review.", + "AdditionalInformation": "ASD Essential Eight ML1 - Multi-factor authentication - clause 3. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-3.4", + "Description": "Multi-factor authentication is used to authenticate users to their organisation's online customer services that process, store or communicate their organisation's sensitive customer data.", + "Checks": [ + "cognito_user_pool_mfa_enabled" + ], + "Attributes": [ + { + "Section": "3 Multi-factor authentication", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Customer-data exposure via account takeover" + ], + "Description": "Online customer services hosted by the organisation in AWS typically use Cognito. Whether a Cognito user pool serves users handling sensitive customer data is an operator classification.", + "RationaleStatement": "Insider users of customer-facing services need MFA when they handle sensitive customer data.", + "ImpactStatement": "Sensitive-data classification is procedural.", + "RemediationProcedure": "Enable MFA on every Cognito user pool that serves users handling sensitive customer data.", + "AuditProcedure": "Verify Cognito MFA per pool.", + "AdditionalInformation": "ASD Essential Eight ML1 - Multi-factor authentication - clause 4.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-3.5", + "Description": "Multi-factor authentication is used to authenticate users to third-party online customer services that process, store or communicate their organisation's sensitive customer data.", + "Checks": [], + "Attributes": [ + { + "Section": "3 Multi-factor authentication", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Customer-data exposure via third-party platform" + ], + "Description": "Third-party customer-facing services. Out of AWS infrastructure scope.", + "RationaleStatement": "Sensitive customer data on third-party platforms still needs MFA.", + "ImpactStatement": "Evidence must come from the third-party provider.", + "RemediationProcedure": "Enforce MFA in the third-party customer service's IdP.", + "AuditProcedure": "Manual review.", + "AdditionalInformation": "ASD Essential Eight ML1 - Multi-factor authentication - clause 5. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-3.6", + "Description": "Multi-factor authentication is used to authenticate customers to online customer services that process, store or communicate sensitive customer data.", + "Checks": [ + "cognito_user_pool_mfa_enabled" + ], + "Attributes": [ + { + "Section": "3 Multi-factor authentication", + "MaturityLevel": "ML1", + "AssessmentStatus": "Automated", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Customer account takeover", + "Customer-data theft" + ], + "Description": "Customer-facing AWS-hosted services typically use Cognito user pools. Prowler can verify MFA is enabled on the pool.", + "RationaleStatement": "Customers handling sensitive data benefit from MFA the same as employees.", + "ImpactStatement": "Cognito MFA can be enforced or optional - the check verifies it is at least configured.", + "RemediationProcedure": "Enable MFA on customer-facing Cognito user pools.", + "AuditProcedure": "Verify Cognito MFA configuration.", + "AdditionalInformation": "ASD Essential Eight ML1 - Multi-factor authentication - clause 6.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-3.7", + "Description": "Multi-factor authentication uses either: something users have and something users know, or something users have that is unlocked by something users know or are.", + "Checks": [ + "iam_user_hardware_mfa_enabled", + "iam_root_hardware_mfa_enabled" + ], + "Attributes": [ + { + "Section": "3 Multi-factor authentication", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "limited", + "MitigatedThreats": [ + "Single-factor compromise", + "Knowledge-only authentication" + ], + "Description": "MFA factor type. AWS IAM API does not surface the underlying authenticator type granularly, only whether MFA is virtual or hardware. Hardware MFA on root + admin is the strongest signal Prowler can give.", + "RationaleStatement": "MFA must combine factor categories - knowledge alone doesn't qualify.", + "ImpactStatement": "Prowler cannot universally verify factor type for every identity. Hardware MFA detection is the closest proxy.", + "RemediationProcedure": "Prefer hardware MFA on privileged identities.", + "AuditProcedure": "Run the listed checks. Manually review IdP audit feed for factor distribution.", + "AdditionalInformation": "ASD Essential Eight ML1 - Multi-factor authentication - clause 7.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-4.1", + "Description": "Requests for privileged access to systems, applications and data repositories are validated when first requested.", + "Checks": [], + "Attributes": [ + { + "Section": "4 Restrict administrative privileges", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Unauthorised privilege grant", + "Insider misuse" + ], + "Description": "Procedural control: privileged-access requests must go through a validation workflow. Prowler does not evaluate change-management workflows.", + "RationaleStatement": "Privileged access without validation is a primary insider-risk vector.", + "ImpactStatement": "Evidence comes from access-review tickets, change management or AWS IAM Identity Center request workflows.", + "RemediationProcedure": "Implement a request/approve workflow for AWS privileged role assumption (e.g. IAM Identity Center permission sets with approval) or rely on an external ITSM workflow.", + "AuditProcedure": "Manual review of change-management evidence.", + "AdditionalInformation": "ASD Essential Eight ML1 - Restrict administrative privileges - clause 1. Procedural.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-4.2", + "Description": "Privileged users are assigned a dedicated privileged user account to be used solely for duties requiring privileged access.", + "Checks": [ + "iam_user_administrator_access_policy", + "iam_user_two_active_access_key", + "iam_user_no_setup_initial_access_key" + ], + "Attributes": [ + { + "Section": "4 Restrict administrative privileges", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Compromise of dual-purpose accounts", + "Privilege accumulation" + ], + "Description": "Privileged AWS access should be exercised through dedicated identities (separate IAM users / dedicated permission sets), not through a daily-driver identity. Prowler can detect IAM users carrying admin policies but cannot infer whether they are dedicated.", + "RationaleStatement": "Dual-purpose accounts (admin + daily) increase blast radius of credential theft.", + "ImpactStatement": "Prowler flags admin-bearing identities; the operator must verify each is dedicated to privileged duties.", + "RemediationProcedure": "Use IAM Identity Center permission sets or dedicated IAM users with admin policies, separated from daily-driver identities.", + "AuditProcedure": "Run listed checks; manually verify each admin identity is dedicated.", + "AdditionalInformation": "ASD Essential Eight ML1 - Restrict administrative privileges - clause 2.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-4.3", + "Description": "Privileged user accounts (excluding those explicitly authorised to access online services) are prevented from accessing the internet, email and web services.", + "Checks": [], + "Attributes": [ + { + "Section": "4 Restrict administrative privileges", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "limited", + "MitigatedThreats": [ + "Drive-by compromise of privileged accounts", + "Phishing-driven privileged compromise" + ], + "Description": "Network-level control: privileged accounts must not browse the internet or access email. In AWS this would map to SCPs / VPC egress controls / dedicated jump-host architecture - none directly verifiable by a posture scan.", + "RationaleStatement": "If a privileged account never touches the internet, it cannot be phished or drive-by compromised.", + "ImpactStatement": "Evidence comes from network architecture and SCP review.", + "RemediationProcedure": "Restrict privileged AWS sessions to a hardened bastion / Cloud Workspace with no general internet egress. Apply SCPs that deny privileged roles from launching workloads with unrestricted internet egress.", + "AuditProcedure": "Manual review of SCPs and network architecture.", + "AdditionalInformation": "ASD Essential Eight ML1 - Restrict administrative privileges - clause 3.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-4.4", + "Description": "Privileged user accounts explicitly authorised to access online services are strictly limited to only what is required for users and services to undertake their duties.", + "Checks": [ + "iam_aws_attached_policy_no_administrative_privileges", + "iam_customer_attached_policy_no_administrative_privileges", + "iam_customer_unattached_policy_no_administrative_privileges", + "iam_inline_policy_no_administrative_privileges", + "iam_inline_policy_allows_privilege_escalation", + "iam_policy_allows_privilege_escalation", + "iam_policy_attached_only_to_group_or_roles", + "iam_policy_no_full_access_to_kms", + "iam_policy_no_full_access_to_cloudtrail", + "iam_inline_policy_no_full_access_to_cloudtrail", + "iam_inline_policy_no_full_access_to_kms", + "iam_role_cross_account_readonlyaccess_policy", + "iam_role_cross_service_confused_deputy_prevention", + "accessanalyzer_enabled", + "accessanalyzer_enabled_without_findings" + ], + "Attributes": [ + { + "Section": "4 Restrict administrative privileges", + "MaturityLevel": "ML1", + "AssessmentStatus": "Automated", + "CloudApplicability": "full", + "MitigatedThreats": [ + "Excessive standing privilege", + "Privilege escalation", + "Confused deputy" + ], + "Description": "Least-privilege enforcement on AWS IAM. This is the cleanest cloud-native mapping in the entire framework: Prowler can verify policy scoping, privilege escalation paths and Access Analyzer findings.", + "RationaleStatement": "Strictly-scoped privileged access reduces blast radius of compromise.", + "ImpactStatement": "", + "RemediationProcedure": "Audit and tighten managed/inline policies; remove privilege-escalation paths; attach policies only to groups/roles; enable Access Analyzer and resolve findings.", + "AuditProcedure": "Run all listed checks.", + "AdditionalInformation": "ASD Essential Eight ML1 - Restrict administrative privileges - clause 4.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-4.5", + "Description": "Privileged users use separate privileged and unprivileged operating environments.", + "Checks": [], + "Attributes": [ + { + "Section": "4 Restrict administrative privileges", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Cross-environment compromise", + "Privileged session hijack" + ], + "Description": "Operating-environment separation is an endpoint / virtualisation control (privileged jump host vs daily workstation). Out of AWS infrastructure scope.", + "RationaleStatement": "Operating-environment isolation prevents malware on the unprivileged side from attacking privileged sessions.", + "ImpactStatement": "Evidence comes from endpoint architecture (PAW / privileged jump hosts / WorkSpaces partitioning).", + "RemediationProcedure": "Provide privileged users with a dedicated PAW or privileged WorkSpaces image.", + "AuditProcedure": "Manual review of endpoint architecture.", + "AdditionalInformation": "ASD Essential Eight ML1 - Restrict administrative privileges - clause 5. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-4.6", + "Description": "Unprivileged user accounts cannot logon to privileged operating environments.", + "Checks": [], + "Attributes": [ + { + "Section": "4 Restrict administrative privileges", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Privileged-environment lateral access" + ], + "Description": "Endpoint logon-policy control. Out of AWS infrastructure scope.", + "RationaleStatement": "Even read-only access to a privileged environment is a foothold.", + "ImpactStatement": "Evidence comes from endpoint logon policy / Active Directory.", + "RemediationProcedure": "Enforce logon restrictions on privileged endpoints.", + "AuditProcedure": "Manual review.", + "AdditionalInformation": "ASD Essential Eight ML1 - Restrict administrative privileges - clause 6. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-4.7", + "Description": "Privileged user accounts (excluding local administrator accounts) cannot logon to unprivileged operating environments.", + "Checks": [], + "Attributes": [ + { + "Section": "4 Restrict administrative privileges", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Privileged-credential exposure on unprivileged hosts" + ], + "Description": "Endpoint logon-policy control. Out of AWS infrastructure scope.", + "RationaleStatement": "Privileged credentials must never be entered on potentially-compromised unprivileged endpoints.", + "ImpactStatement": "Evidence comes from endpoint logon policy / Active Directory.", + "RemediationProcedure": "Enforce that privileged accounts can only logon to PAWs / jump hosts.", + "AuditProcedure": "Manual review.", + "AdditionalInformation": "ASD Essential Eight ML1 - Restrict administrative privileges - clause 7. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-5.1", + "Description": "Application control is implemented on workstations.", + "Checks": [], + "Attributes": [ + { + "Section": "5 Application control", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Malware execution on workstations" + ], + "Description": "Application control is fundamentally an endpoint allowlist. AWS infrastructure has no direct equivalent for workstation app control.", + "RationaleStatement": "Application control on workstations is the highest-impact mitigation against malware.", + "ImpactStatement": "Evidence comes from endpoint allowlist tooling (AppLocker, WDAC, third-party EDR).", + "RemediationProcedure": "Deploy AppLocker / Windows Defender Application Control / equivalent on every workstation.", + "AuditProcedure": "Manual review of endpoint app-control coverage.", + "AdditionalInformation": "ASD Essential Eight ML1 - Application control - clause 1. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-5.2", + "Description": "Application control is applied to user profiles and temporary folders used by operating systems, web browsers and email clients.", + "Checks": [], + "Attributes": [ + { + "Section": "5 Application control", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Drop-and-execute from user profile", + "Browser-cache execution", + "Email-attachment execution" + ], + "Description": "Endpoint app-control scope (user profiles, temp folders). Out of AWS infrastructure scope.", + "RationaleStatement": "User-writable folders are the most common malware drop locations.", + "ImpactStatement": "Evidence comes from endpoint app-control configuration.", + "RemediationProcedure": "Configure AppLocker / WDAC rules to cover %APPDATA%, %TEMP%, browser caches, email-client temp folders.", + "AuditProcedure": "Manual review.", + "AdditionalInformation": "ASD Essential Eight ML1 - Application control - clause 2. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-5.3", + "Description": "Application control restricts the execution of executables, software libraries, scripts, installers, compiled HTML, HTML applications and control panel applets to an organisation-approved set.", + "Checks": [], + "Attributes": [ + { + "Section": "5 Application control", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Unauthorised executable execution", + "Library hijacking", + "Script-based malware", + "HTA / CHM payload execution" + ], + "Description": "Endpoint allowlisting of executable file types. Out of AWS infrastructure scope.", + "RationaleStatement": "An effective allowlist must cover all execution surfaces, not only .exe.", + "ImpactStatement": "Evidence comes from endpoint app-control rule set.", + "RemediationProcedure": "Define an organisation-approved allowlist covering all listed file types and enforce it in AppLocker / WDAC / equivalent.", + "AuditProcedure": "Manual review of allowlist contents.", + "AdditionalInformation": "ASD Essential Eight ML1 - Application control - clause 3. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-6.1", + "Description": "Microsoft Office macros are disabled for users that do not have a demonstrated business requirement.", + "Checks": [], + "Attributes": [ + { + "Section": "6 Restrict Microsoft Office macros", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Macro-based malware delivery" + ], + "Description": "Endpoint / Microsoft 365 control. Out of AWS infrastructure scope.", + "RationaleStatement": "Most users never need Office macros. Disabling by default removes a major delivery vector.", + "ImpactStatement": "Evidence comes from Microsoft 365 / Group Policy.", + "RemediationProcedure": "Disable macros via Group Policy / Intune / M365 admin policies, with explicit allowlist for business-justified users.", + "AuditProcedure": "Manual review of M365 macro policy.", + "AdditionalInformation": "ASD Essential Eight ML1 - Restrict Microsoft Office macros - clause 1. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-6.2", + "Description": "Microsoft Office macros in files originating from the internet are blocked.", + "Checks": [], + "Attributes": [ + { + "Section": "6 Restrict Microsoft Office macros", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Internet-sourced macro malware" + ], + "Description": "Endpoint / Microsoft 365 control (Mark-of-the-Web macro blocking). Out of AWS infrastructure scope.", + "RationaleStatement": "Internet-sourced documents are the primary macro-delivery vector.", + "ImpactStatement": "Evidence comes from Microsoft 365 / Group Policy.", + "RemediationProcedure": "Enable 'Block macros from running in Office files from the internet' via Group Policy / Intune.", + "AuditProcedure": "Manual review.", + "AdditionalInformation": "ASD Essential Eight ML1 - Restrict Microsoft Office macros - clause 2. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-6.3", + "Description": "Microsoft Office macro antivirus scanning is enabled.", + "Checks": [], + "Attributes": [ + { + "Section": "6 Restrict Microsoft Office macros", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Macro-borne malware not detected at file-open time" + ], + "Description": "Endpoint / Microsoft 365 control (AMSI integration with macros). Out of AWS infrastructure scope.", + "RationaleStatement": "Antivirus scanning of macros at run-time catches polymorphic payloads that static analysis misses.", + "ImpactStatement": "Evidence comes from endpoint AV / AMSI configuration.", + "RemediationProcedure": "Enable AMSI macro scanning via Group Policy.", + "AuditProcedure": "Manual review.", + "AdditionalInformation": "ASD Essential Eight ML1 - Restrict Microsoft Office macros - clause 3. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-6.4", + "Description": "Microsoft Office macro security settings cannot be changed by users.", + "Checks": [], + "Attributes": [ + { + "Section": "6 Restrict Microsoft Office macros", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "User-driven macro policy bypass" + ], + "Description": "Endpoint / Microsoft 365 control. Out of AWS infrastructure scope.", + "RationaleStatement": "If users can change macro security settings, the previous controls become advisory.", + "ImpactStatement": "Evidence comes from Group Policy lockdown.", + "RemediationProcedure": "Apply Group Policy that prevents users from changing Office macro security settings.", + "AuditProcedure": "Manual review.", + "AdditionalInformation": "ASD Essential Eight ML1 - Restrict Microsoft Office macros - clause 4. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-7.1", + "Description": "Internet Explorer 11 is disabled or removed.", + "Checks": [], + "Attributes": [ + { + "Section": "7 User application hardening", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Legacy-browser exploitation" + ], + "Description": "Endpoint browser control. Out of AWS infrastructure scope.", + "RationaleStatement": "Internet Explorer 11 is end-of-life and a recurring entry vector.", + "ImpactStatement": "Evidence comes from endpoint software inventory.", + "RemediationProcedure": "Remove or disable IE 11 via endpoint management.", + "AuditProcedure": "Manual review.", + "AdditionalInformation": "ASD Essential Eight ML1 - User application hardening - clause 1. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-7.2", + "Description": "Web browsers do not process Java from the internet.", + "Checks": [], + "Attributes": [ + { + "Section": "7 User application hardening", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Java-applet exploitation" + ], + "Description": "Endpoint browser control. Out of AWS infrastructure scope.", + "RationaleStatement": "Java in browsers is a long-standing exploitation surface.", + "ImpactStatement": "Evidence comes from browser policy enforced via endpoint management.", + "RemediationProcedure": "Block Java in browsers via Group Policy / browser policy templates.", + "AuditProcedure": "Manual review.", + "AdditionalInformation": "ASD Essential Eight ML1 - User application hardening - clause 2. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-7.3", + "Description": "Web browsers do not process web advertisements from the internet.", + "Checks": [], + "Attributes": [ + { + "Section": "7 User application hardening", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Malvertising delivery" + ], + "Description": "Endpoint browser control. Out of AWS infrastructure scope.", + "RationaleStatement": "Malicious advertising is a passive infection vector.", + "ImpactStatement": "Evidence comes from browser ad-blocking enforcement.", + "RemediationProcedure": "Deploy enterprise ad-blocking via browser policy or upstream DNS filtering.", + "AuditProcedure": "Manual review.", + "AdditionalInformation": "ASD Essential Eight ML1 - User application hardening - clause 3. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-7.4", + "Description": "Web browser security settings cannot be changed by users.", + "Checks": [], + "Attributes": [ + { + "Section": "7 User application hardening", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "User-driven browser hardening bypass" + ], + "Description": "Endpoint browser control. Out of AWS infrastructure scope.", + "RationaleStatement": "If users can change browser security settings, the previous controls become advisory.", + "ImpactStatement": "Evidence comes from browser policy enforcement.", + "RemediationProcedure": "Lock browser security settings via Group Policy / browser policy templates.", + "AuditProcedure": "Manual review.", + "AdditionalInformation": "ASD Essential Eight ML1 - User application hardening - clause 4. Out of AWS infrastructure scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-8.1", + "Description": "Backups of data, applications and settings are performed and retained in accordance with business criticality and business continuity requirements.", + "Checks": [ + "backup_plans_exist", + "backup_vaults_exist", + "backup_reportplans_exist", + "rds_cluster_protected_by_backup_plan", + "rds_instance_protected_by_backup_plan", + "rds_instance_backup_enabled", + "dynamodb_tables_pitr_enabled", + "dynamodb_table_protected_by_backup_plan", + "efs_have_backup_enabled", + "ec2_ebs_volume_protected_by_backup_plan", + "ec2_ebs_volume_snapshots_exists", + "elasticache_redis_cluster_backup_enabled", + "documentdb_cluster_backup_enabled", + "neptune_cluster_backup_enabled", + "redshift_cluster_automated_snapshot", + "dlm_ebs_snapshot_lifecycle_policy_exists" + ], + "Attributes": [ + { + "Section": "8 Regular backups", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Ransomware", + "Data destruction", + "Recovery failure" + ], + "Description": "Backup coverage across AWS data services. Whether the backup frequency and retention align with business criticality is an operator classification.", + "RationaleStatement": "Backups must exist and align with the criticality of the data they protect.", + "ImpactStatement": "Prowler verifies backup mechanisms exist; criticality alignment is procedural.", + "RemediationProcedure": "Define AWS Backup plans + vaults covering critical services; enable engine-native backups on RDS / ElastiCache / DocumentDB / Neptune / Redshift; enable EFS Backup, EBS Backup, DynamoDB PITR; configure DLM lifecycle.", + "AuditProcedure": "Run all listed checks. Manually verify cadence and retention match criticality.", + "AdditionalInformation": "ASD Essential Eight ML1 - Regular backups - clause 1.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-8.2", + "Description": "Backups of data, applications and settings are synchronised to enable restoration to a common point in time.", + "Checks": [ + "dynamodb_tables_pitr_enabled", + "rds_cluster_backtrack_enabled", + "s3_bucket_object_versioning", + "rds_instance_backup_enabled" + ], + "Attributes": [ + { + "Section": "8 Regular backups", + "MaturityLevel": "ML1", + "AssessmentStatus": "Automated", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Inconsistent recovery state", + "Application-level corruption from partial restore" + ], + "Description": "Common-point-in-time recovery is delivered by DynamoDB PITR, Aurora backtrack, RDS automated backups and S3 versioning.", + "RationaleStatement": "Without coordinated PIT, restoration produces an inconsistent state across services.", + "ImpactStatement": "Coordinated multi-service PIT remains an architectural decision.", + "RemediationProcedure": "Enable DynamoDB PITR, Aurora backtrack, RDS automated backups and S3 versioning on all critical data stores.", + "AuditProcedure": "Run listed checks.", + "AdditionalInformation": "ASD Essential Eight ML1 - Regular backups - clause 2.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-8.3", + "Description": "Backups of data, applications and settings are retained in a secure and resilient manner.", + "Checks": [ + "backup_vaults_encrypted", + "backup_recovery_point_encrypted", + "rds_snapshots_encrypted", + "rds_snapshots_public_access", + "ec2_ebs_public_snapshot", + "ec2_ebs_snapshot_account_block_public_access", + "documentdb_cluster_public_snapshot", + "neptune_cluster_public_snapshot", + "neptune_cluster_copy_tags_to_snapshots", + "rds_cluster_copy_tags_to_snapshots", + "rds_instance_copy_tags_to_snapshots", + "s3_bucket_cross_region_replication" + ], + "Attributes": [ + { + "Section": "8 Regular backups", + "MaturityLevel": "ML1", + "AssessmentStatus": "Automated", + "CloudApplicability": "full", + "MitigatedThreats": [ + "Backup tampering", + "Backup exfiltration", + "Regional outage destroying backups" + ], + "Description": "Secure: encryption + no public exposure. Resilient: cross-region replication / multi-AZ vault placement / lifecycle. Prowler covers both.", + "RationaleStatement": "A public or unencrypted backup is a data breach in waiting.", + "ImpactStatement": "", + "RemediationProcedure": "Enforce encryption on Backup vaults and recovery points, RDS snapshots. Block public access on EBS / RDS / DocumentDB / Neptune snapshots account-wide. Enable S3 cross-region replication for critical buckets.", + "AuditProcedure": "Run all listed checks.", + "AdditionalInformation": "ASD Essential Eight ML1 - Regular backups - clause 3.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-8.4", + "Description": "Restoration of data, applications and settings from backups to a common point in time is tested as part of disaster recovery exercises.", + "Checks": [], + "Attributes": [ + { + "Section": "8 Regular backups", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "non-applicable", + "MitigatedThreats": [ + "Untested recovery path", + "Disaster recovery failure" + ], + "Description": "Restoration testing is a procedural / operational control. AWS APIs do not surface evidence that a restoration test occurred.", + "RationaleStatement": "Untested backups are unreliable backups.", + "ImpactStatement": "Evidence comes from DR exercise reports.", + "RemediationProcedure": "Schedule and execute DR exercises that include backup restoration to a common point in time. Record results.", + "AuditProcedure": "Manual review of DR exercise evidence (last 12 months).", + "AdditionalInformation": "ASD Essential Eight ML1 - Regular backups - clause 4. Procedural; out of Prowler scope.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-8.5", + "Description": "Unprivileged user accounts cannot access backups belonging to other user accounts.", + "Checks": [ + "rds_snapshots_public_access", + "ec2_ebs_public_snapshot", + "ec2_ebs_snapshot_account_block_public_access", + "documentdb_cluster_public_snapshot", + "neptune_cluster_public_snapshot" + ], + "Attributes": [ + { + "Section": "8 Regular backups", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Cross-tenant backup access", + "Backup exfiltration via permissive IAM" + ], + "Description": "Cross-account / cross-user backup isolation. Public-snapshot blocking is the strongest cloud-side signal Prowler surfaces. Fine-grained per-user backup access scoping requires manual IAM/Vault policy review.", + "RationaleStatement": "Backups containing other users' data must never be readable by general users.", + "ImpactStatement": "Public-access blocks are partial - true 'cannot access other users' backups' requires IAM scoping review.", + "RemediationProcedure": "Block public access on EBS / RDS / DocumentDB / Neptune snapshots. Apply IAM policies that restrict backup-read APIs to dedicated backup-operator roles.", + "AuditProcedure": "Run listed checks. Manually review backup IAM scoping.", + "AdditionalInformation": "ASD Essential Eight ML1 - Regular backups - clause 5.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + }, + { + "Id": "E8-8.6", + "Description": "Unprivileged user accounts are prevented from modifying and deleting backups.", + "Checks": [], + "Attributes": [ + { + "Section": "8 Regular backups", + "MaturityLevel": "ML1", + "AssessmentStatus": "Manual", + "CloudApplicability": "partial", + "MitigatedThreats": [ + "Backup deletion by compromised user", + "Ransomware-driven backup destruction" + ], + "Description": "Backup immutability against unprivileged accounts. Cloud-native delivery: AWS Backup Vault Lock + IAM/SCP scoping. Prowler does not currently surface Vault Lock state, so this clause is largely operator-evidenced.", + "RationaleStatement": "Without write protection, ransomware that compromises any user can erase backups.", + "ImpactStatement": "Vault Lock is the strongest cloud control here but is not surfaced by Prowler today.", + "RemediationProcedure": "Apply AWS Backup Vault Lock with retention enforcement. Use SCPs to deny `backup:Delete*`, `rds:DeleteDB*Snapshot`, `s3:DeleteObjectVersion`, etc. for non-backup-operator principals.", + "AuditProcedure": "Manual review of Vault Lock + SCP coverage.", + "AdditionalInformation": "ASD Essential Eight ML1 - Regular backups - clause 6.", + "References": "https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model" + } + ] + } + ] +} diff --git a/prowler/lib/check/compliance_models.py b/prowler/lib/check/compliance_models.py index 136b49f83b..226af6ec36 100644 --- a/prowler/lib/check/compliance_models.py +++ b/prowler/lib/check/compliance_models.py @@ -102,6 +102,48 @@ class CIS_Requirement_Attribute(BaseModel): References: str +class EssentialEight_Requirement_Attribute_MaturityLevel(str, Enum): + """ASD Essential Eight Maturity Level""" + + ML1 = "ML1" + ML2 = "ML2" + ML3 = "ML3" + + +class EssentialEight_Requirement_Attribute_AssessmentStatus(str, Enum): + """Essential Eight Requirement Attribute Assessment Status""" + + Manual = "Manual" + Automated = "Automated" + + +class EssentialEight_Requirement_Attribute_CloudApplicability(str, Enum): + """How well the ASD control maps to AWS cloud infrastructure.""" + + Full = "full" + Partial = "partial" + Limited = "limited" + NonApplicable = "non-applicable" + + +# Essential Eight Requirement Attribute +class EssentialEight_Requirement_Attribute(BaseModel): + """ASD Essential Eight Requirement Attribute""" + + Section: str + MaturityLevel: EssentialEight_Requirement_Attribute_MaturityLevel + AssessmentStatus: EssentialEight_Requirement_Attribute_AssessmentStatus + CloudApplicability: EssentialEight_Requirement_Attribute_CloudApplicability + MitigatedThreats: list[str] + Description: str + RationaleStatement: str + ImpactStatement: str + RemediationProcedure: str + AuditProcedure: str + AdditionalInformation: str + References: str + + # Well Architected Requirement Attribute class AWS_Well_Architected_Requirement_Attribute(BaseModel): """AWS Well Architected Requirement Attribute""" @@ -250,6 +292,7 @@ class Compliance_Requirement(BaseModel): Name: Optional[str] = None Attributes: list[ Union[ + EssentialEight_Requirement_Attribute, CIS_Requirement_Attribute, ENS_Requirement_Attribute, ISO27001_2013_Requirement_Attribute, diff --git a/prowler/lib/outputs/compliance/compliance.py b/prowler/lib/outputs/compliance/compliance.py index 0d743a4d25..7fe2330550 100644 --- a/prowler/lib/outputs/compliance/compliance.py +++ b/prowler/lib/outputs/compliance/compliance.py @@ -9,6 +9,9 @@ from prowler.lib.outputs.compliance.compliance_check import ( # noqa: F401 - re ) from prowler.lib.outputs.compliance.csa.csa import get_csa_table from prowler.lib.outputs.compliance.ens.ens import get_ens_table +from prowler.lib.outputs.compliance.essential_eight.essential_eight import ( + get_essential_eight_table, +) from prowler.lib.outputs.compliance.generic.generic_table import ( get_generic_compliance_table, ) @@ -230,6 +233,15 @@ def display_compliance_table( output_directory, compliance_overview, ) + elif "essential_eight" in compliance_framework: + get_essential_eight_table( + findings, + bulk_checks_metadata, + compliance_framework, + output_filename, + output_directory, + compliance_overview, + ) else: get_generic_compliance_table( findings, diff --git a/prowler/lib/outputs/compliance/essential_eight/__init__.py b/prowler/lib/outputs/compliance/essential_eight/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/lib/outputs/compliance/essential_eight/essential_eight.py b/prowler/lib/outputs/compliance/essential_eight/essential_eight.py new file mode 100644 index 0000000000..9438a613f8 --- /dev/null +++ b/prowler/lib/outputs/compliance/essential_eight/essential_eight.py @@ -0,0 +1,98 @@ +from colorama import Fore, Style +from tabulate import tabulate + +from prowler.config.config import orange_color + + +def get_essential_eight_table( + findings: list, + bulk_checks_metadata: dict, + compliance_framework: str, + output_filename: str, + output_directory: str, + compliance_overview: bool, +): + sections = {} + essential_eight_compliance_table = { + "Provider": [], + "Section": [], + "Status": [], + "Muted": [], + } + pass_count = [] + fail_count = [] + muted_count = [] + for index, finding in enumerate(findings): + check = bulk_checks_metadata[finding.check_metadata.CheckID] + check_compliances = check.Compliance + for compliance in check_compliances: + if compliance.Framework == "Essential-Eight": + for requirement in compliance.Requirements: + for attribute in requirement.Attributes: + section = attribute.Section + if section not in sections: + sections[section] = { + "FAIL": 0, + "PASS": 0, + "Muted": 0, + } + if finding.muted: + if index not in muted_count: + muted_count.append(index) + sections[section]["Muted"] += 1 + else: + if finding.status == "FAIL" and index not in fail_count: + fail_count.append(index) + sections[section]["FAIL"] += 1 + elif finding.status == "PASS" and index not in pass_count: + pass_count.append(index) + sections[section]["PASS"] += 1 + + sections = dict(sorted(sections.items())) + for section in sections: + essential_eight_compliance_table["Provider"].append(compliance.Provider) + essential_eight_compliance_table["Section"].append(section) + if sections[section]["FAIL"] > 0: + essential_eight_compliance_table["Status"].append( + f"{Fore.RED}FAIL({sections[section]['FAIL']}){Style.RESET_ALL}" + ) + elif sections[section]["PASS"] > 0: + essential_eight_compliance_table["Status"].append( + f"{Fore.GREEN}PASS({sections[section]['PASS']}){Style.RESET_ALL}" + ) + else: + essential_eight_compliance_table["Status"].append("-") + essential_eight_compliance_table["Muted"].append( + f"{orange_color}{sections[section]['Muted']}{Style.RESET_ALL}" + ) + if len(fail_count) + len(pass_count) + len(muted_count) > 1: + print( + f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:" + ) + total_findings_count = len(fail_count) + len(pass_count) + len(muted_count) + overview_table = [ + [ + f"{Fore.RED}{round(len(fail_count) / total_findings_count * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}", + f"{Fore.GREEN}{round(len(pass_count) / total_findings_count * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}", + f"{orange_color}{round(len(muted_count) / total_findings_count * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}", + ] + ] + print(tabulate(overview_table, tablefmt="rounded_grid")) + if not compliance_overview: + print( + f"\nFramework {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Results:" + ) + print( + tabulate( + essential_eight_compliance_table, + headers="keys", + tablefmt="rounded_grid", + ) + ) + print( + f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}" + ) + print(f"\nDetailed results of {compliance_framework.upper()} are in:") + print( + f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n" + ) diff --git a/prowler/lib/outputs/compliance/essential_eight/essential_eight_aws.py b/prowler/lib/outputs/compliance/essential_eight/essential_eight_aws.py new file mode 100644 index 0000000000..198e3ab331 --- /dev/null +++ b/prowler/lib/outputs/compliance/essential_eight/essential_eight_aws.py @@ -0,0 +1,111 @@ +from prowler.config.config import timestamp +from prowler.lib.check.compliance_models import Compliance +from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput +from prowler.lib.outputs.compliance.essential_eight.models import ( + EssentialEightAWSModel, +) +from prowler.lib.outputs.finding import Finding + + +class EssentialEightAWS(ComplianceOutput): + """ + This class represents the AWS ASD Essential Eight compliance output. + + Attributes: + - _data (list): A list to store transformed data from findings. + - _file_descriptor (TextIOWrapper): A file descriptor to write data to a file. + + Methods: + - transform: Transforms findings into AWS Essential Eight compliance format. + """ + + def transform( + self, + findings: list[Finding], + compliance: Compliance, + compliance_name: str, + ) -> None: + """ + Transforms a list of findings into AWS Essential Eight compliance format. + + Parameters: + - findings (list): A list of findings. + - compliance (Compliance): A compliance model. + - compliance_name (str): The name of the compliance model. + + Returns: + - None + """ + for finding in findings: + finding_requirements = finding.compliance.get(compliance_name, []) + for requirement in compliance.Requirements: + if requirement.Id in finding_requirements: + for attribute in requirement.Attributes: + compliance_row = EssentialEightAWSModel( + Provider=finding.provider, + Description=compliance.Description, + AccountId=finding.account_uid, + Region=finding.region, + AssessmentDate=str(timestamp), + Requirements_Id=requirement.Id, + Requirements_Description=requirement.Description, + Requirements_Attributes_Section=attribute.Section, + Requirements_Attributes_MaturityLevel=attribute.MaturityLevel, + Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, + Requirements_Attributes_CloudApplicability=attribute.CloudApplicability, + Requirements_Attributes_MitigatedThreats=", ".join( + attribute.MitigatedThreats + ), + Requirements_Attributes_Description=attribute.Description, + Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, + Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, + Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, + Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, + Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, + Requirements_Attributes_References=attribute.References, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_uid, + ResourceName=finding.resource_name, + CheckId=finding.check_id, + Muted=finding.muted, + Framework=compliance.Framework, + Name=compliance.Name, + ) + self._data.append(compliance_row) + # Add manual requirements to the compliance output + for requirement in compliance.Requirements: + if not requirement.Checks: + for attribute in requirement.Attributes: + compliance_row = EssentialEightAWSModel( + Provider=compliance.Provider.lower(), + Description=compliance.Description, + AccountId="", + Region="", + AssessmentDate=str(timestamp), + Requirements_Id=requirement.Id, + Requirements_Description=requirement.Description, + Requirements_Attributes_Section=attribute.Section, + Requirements_Attributes_MaturityLevel=attribute.MaturityLevel, + Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, + Requirements_Attributes_CloudApplicability=attribute.CloudApplicability, + Requirements_Attributes_MitigatedThreats=", ".join( + attribute.MitigatedThreats + ), + Requirements_Attributes_Description=attribute.Description, + Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, + Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, + Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, + Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, + Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, + Requirements_Attributes_References=attribute.References, + Status="MANUAL", + StatusExtended="Manual check", + ResourceId="manual_check", + ResourceName="Manual check", + CheckId="manual", + Muted=False, + Framework=compliance.Framework, + Name=compliance.Name, + ) + self._data.append(compliance_row) diff --git a/prowler/lib/outputs/compliance/essential_eight/models.py b/prowler/lib/outputs/compliance/essential_eight/models.py new file mode 100644 index 0000000000..89e0191dac --- /dev/null +++ b/prowler/lib/outputs/compliance/essential_eight/models.py @@ -0,0 +1,35 @@ +from pydantic.v1 import BaseModel + + +class EssentialEightAWSModel(BaseModel): + """ + EssentialEightAWSModel generates a finding's output in AWS ASD Essential Eight Compliance format. + """ + + Provider: str + Description: str + AccountId: str + Region: str + AssessmentDate: str + Requirements_Id: str + Requirements_Description: str + Requirements_Attributes_Section: str + Requirements_Attributes_MaturityLevel: str + Requirements_Attributes_AssessmentStatus: str + Requirements_Attributes_CloudApplicability: str + Requirements_Attributes_MitigatedThreats: str + Requirements_Attributes_Description: str + Requirements_Attributes_RationaleStatement: str + Requirements_Attributes_ImpactStatement: str + Requirements_Attributes_RemediationProcedure: str + Requirements_Attributes_AuditProcedure: str + Requirements_Attributes_AdditionalInformation: str + Requirements_Attributes_References: str + Status: str + StatusExtended: str + ResourceId: str + ResourceName: str + CheckId: str + Muted: bool + Framework: str + Name: str diff --git a/tests/lib/outputs/compliance/essential_eight/__init__.py b/tests/lib/outputs/compliance/essential_eight/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/lib/outputs/compliance/essential_eight/essential_eight_aws_test.py b/tests/lib/outputs/compliance/essential_eight/essential_eight_aws_test.py new file mode 100644 index 0000000000..2abbdadb9c --- /dev/null +++ b/tests/lib/outputs/compliance/essential_eight/essential_eight_aws_test.py @@ -0,0 +1,128 @@ +from io import StringIO +from unittest import mock + +from freezegun import freeze_time +from mock import patch + +from prowler.lib.outputs.compliance.essential_eight.essential_eight_aws import ( + EssentialEightAWS, +) +from prowler.lib.outputs.compliance.essential_eight.models import ( + EssentialEightAWSModel, +) +from tests.lib.outputs.compliance.fixtures import ESSENTIAL_EIGHT_AWS +from tests.lib.outputs.fixtures.fixtures import generate_finding_output +from tests.providers.aws.utils import AWS_ACCOUNT_NUMBER, AWS_REGION_EU_WEST_1 + +# The fixture's first Requirement maps clause "E8-1.8" (Patch applications, +# clause 8: removal of unsupported online services). The second Requirement is +# E8-6.1 (Restrict Office macros, clause 1) which has no Checks and is therefore +# emitted as a manual row. +COMPLIANCE_NAME = "Essential-Eight-Nov 2023" + + +class TestEssentialEightAWS: + def test_output_transform(self): + findings = [generate_finding_output(compliance={COMPLIANCE_NAME: "E8-1.8"})] + + output = EssentialEightAWS(findings, ESSENTIAL_EIGHT_AWS) + output_data = output.data[0] + assert isinstance(output_data, EssentialEightAWSModel) + assert output_data.Provider == "aws" + assert output_data.Framework == ESSENTIAL_EIGHT_AWS.Framework + assert output_data.Name == ESSENTIAL_EIGHT_AWS.Name + assert output_data.Description == ESSENTIAL_EIGHT_AWS.Description + assert output_data.AccountId == AWS_ACCOUNT_NUMBER + assert output_data.Region == AWS_REGION_EU_WEST_1 + assert output_data.Requirements_Id == "E8-1.8" + assert ( + output_data.Requirements_Description + == ESSENTIAL_EIGHT_AWS.Requirements[0].Description + ) + assert output_data.Requirements_Attributes_Section == "1 Patch applications" + assert output_data.Requirements_Attributes_MaturityLevel == "ML1" + assert output_data.Requirements_Attributes_AssessmentStatus == "Automated" + assert output_data.Requirements_Attributes_CloudApplicability == "full" + assert ( + output_data.Requirements_Attributes_MitigatedThreats + == "Use of unsupported software, Long-tail vulnerability accumulation" + ) + assert ( + output_data.Requirements_Attributes_Description + == ESSENTIAL_EIGHT_AWS.Requirements[0].Attributes[0].Description + ) + assert output_data.Status == "PASS" + assert output_data.StatusExtended == "" + assert output_data.ResourceId == "" + assert output_data.ResourceName == "" + assert output_data.CheckId == "service_test_check_id" + assert not output_data.Muted + + def test_manual_requirement(self): + findings = [generate_finding_output(compliance={COMPLIANCE_NAME: "E8-1.8"})] + output = EssentialEightAWS(findings, ESSENTIAL_EIGHT_AWS) + + # E8-6.1 (macros) has no Checks -> emitted as a manual row, non-applicable + manual_rows = [row for row in output.data if row.Status == "MANUAL"] + assert len(manual_rows) == 1 + + manual = manual_rows[0] + assert manual.Provider == "aws" + assert manual.AccountId == "" + assert manual.Region == "" + assert manual.Requirements_Id == "E8-6.1" + assert ( + manual.Requirements_Attributes_Section + == "6 Restrict Microsoft Office macros" + ) + assert manual.Requirements_Attributes_MaturityLevel == "ML1" + assert manual.Requirements_Attributes_AssessmentStatus == "Manual" + assert manual.Requirements_Attributes_CloudApplicability == "non-applicable" + assert ( + manual.Requirements_Attributes_MitigatedThreats + == "Macro-based malware delivery" + ) + assert manual.StatusExtended == "Manual check" + assert manual.ResourceId == "manual_check" + assert manual.ResourceName == "Manual check" + assert manual.CheckId == "manual" + assert not manual.Muted + + @freeze_time("2025-01-01 00:00:00") + @mock.patch( + "prowler.lib.outputs.compliance.essential_eight.essential_eight_aws.timestamp", + "2025-01-01 00:00:00", + ) + def test_batch_write_data_to_file(self): + mock_file = StringIO() + findings = [generate_finding_output(compliance={COMPLIANCE_NAME: "E8-1.8"})] + output = EssentialEightAWS(findings, ESSENTIAL_EIGHT_AWS) + output._file_descriptor = mock_file + + with patch.object(mock_file, "close", return_value=None): + output.batch_write_data_to_file() + + mock_file.seek(0) + content = mock_file.read() + + # Validate header carries the E8-specific column names + first_line = content.split("\r\n", 1)[0] + for column in ( + "REQUIREMENTS_ATTRIBUTES_MATURITYLEVEL", + "REQUIREMENTS_ATTRIBUTES_ASSESSMENTSTATUS", + "REQUIREMENTS_ATTRIBUTES_CLOUDAPPLICABILITY", + "REQUIREMENTS_ATTRIBUTES_MITIGATEDTHREATS", + "REQUIREMENTS_ATTRIBUTES_RATIONALESTATEMENT", + "REQUIREMENTS_ATTRIBUTES_REMEDIATIONPROCEDURE", + "REQUIREMENTS_ATTRIBUTES_AUDITPROCEDURE", + ): + assert column in first_line, f"missing column {column} in CSV header" + + # rows: header + matched + manual + rows = [r for r in content.split("\r\n") if r] + assert len(rows) == 3 + assert rows[1].split(";")[0] == "aws" + assert "ML1" in rows[1] + assert ";PASS;" in rows[1] + assert ";MANUAL;" in rows[2] + assert ";manual_check;" in rows[2] diff --git a/tests/lib/outputs/compliance/fixtures.py b/tests/lib/outputs/compliance/fixtures.py index 41c68d148f..9aac01392f 100644 --- a/tests/lib/outputs/compliance/fixtures.py +++ b/tests/lib/outputs/compliance/fixtures.py @@ -7,6 +7,7 @@ from prowler.lib.check.compliance_models import ( ENS_Requirement_Attribute, ENS_Requirement_Attribute_Nivel, ENS_Requirement_Attribute_Tipos, + EssentialEight_Requirement_Attribute, Generic_Compliance_Requirement_Attribute, ISO27001_2013_Requirement_Attribute, KISA_ISMSP_Requirement_Attribute, @@ -1189,3 +1190,58 @@ CCC_GCP_FIXTURE = Compliance( ), ], ) + +ESSENTIAL_EIGHT_AWS = Compliance( + Framework="Essential-Eight", + Name="ASD Essential Eight Maturity Model - Maturity Level One (AWS)", + Version="Nov 2023", + Provider="AWS", + Description="Literal mapping of the Australian Signals Directorate (ASD) Essential Eight Maturity Model ML1 to AWS infrastructure checks.", + Requirements=[ + Compliance_Requirement( + Id="E8-1.8", + Description="Online services that are no longer supported by vendors are removed.", + Attributes=[ + EssentialEight_Requirement_Attribute( + Section="1 Patch applications", + MaturityLevel="ML1", + AssessmentStatus="Automated", + CloudApplicability="full", + MitigatedThreats=[ + "Use of unsupported software", + "Long-tail vulnerability accumulation", + ], + Description="Detect and remove unsupported AWS-hosted online services (Lambda runtimes, RDS engines, EKS, Fargate, Kafka, OpenSearch).", + RationaleStatement="Unsupported services no longer receive security patches.", + ImpactStatement="", + RemediationProcedure="Migrate Lambda off deprecated runtimes; remove RDS Extended Support; upgrade EKS.", + AuditProcedure="Run all listed checks.", + AdditionalInformation="ASD Essential Eight ML1 - Patch applications - clause 8.", + References="https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model", + ) + ], + Checks=["service_test_check_id"], + ), + Compliance_Requirement( + Id="E8-6.1", + Description="Microsoft Office macros are disabled for users that do not have a demonstrated business requirement.", + Attributes=[ + EssentialEight_Requirement_Attribute( + Section="6 Restrict Microsoft Office macros", + MaturityLevel="ML1", + AssessmentStatus="Manual", + CloudApplicability="non-applicable", + MitigatedThreats=["Macro-based malware delivery"], + Description="Endpoint / Microsoft 365 control. Out of AWS infrastructure scope.", + RationaleStatement="Most users never need Office macros.", + ImpactStatement="", + RemediationProcedure="Disable macros via Group Policy / Intune / M365 admin policies.", + AuditProcedure="Manual review of M365 macro policy.", + AdditionalInformation="ASD Essential Eight ML1 - Restrict Microsoft Office macros - clause 1. Out of AWS infrastructure scope.", + References="https://www.cyber.gov.au/resources-business-and-government/essential-cyber-security/essential-eight/essential-eight-maturity-model", + ) + ], + Checks=[], + ), + ], +) From e821e07d7d2d79e544c5bd850e8c85e58c0e0cbd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20Mart=C3=ADn?= Date: Thu, 30 Apr 2026 13:58:17 +0200 Subject: [PATCH 03/29] docs(rbac): add Manage Alerts permission (#10947) --- docs/user-guide/tutorials/prowler-app-rbac.mdx | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/user-guide/tutorials/prowler-app-rbac.mdx b/docs/user-guide/tutorials/prowler-app-rbac.mdx index 8ac3a9f07b..31fd4a730e 100644 --- a/docs/user-guide/tutorials/prowler-app-rbac.mdx +++ b/docs/user-guide/tutorials/prowler-app-rbac.mdx @@ -227,6 +227,7 @@ Assign administrative permissions by selecting from the following options: | Manage Integrations | All | Add or modify the Prowler Integrations. | | Manage Ingestions | Prowler Cloud | Allow or deny the ability to submit findings ingestion batches via the API. | | Manage Billing | Prowler Cloud | Access and manage billing settings and subscription information. | +| Manage Alerts | Prowler Cloud | Create, edit, and delete alert rules and recipients. | The **Scope** column indicates where each permission applies. **All** means the permission is available in both Prowler Cloud and Self-Managed deployments. **Prowler Cloud** indicates permissions that are specific to [Prowler Cloud](https://cloud.prowler.com/sign-in). @@ -241,3 +242,5 @@ The following permissions are available exclusively in **Prowler Cloud**: **Manage Ingestions:** Submit and manage findings ingestion jobs via the API. Required to upload OCSF scan results using the `--push-to-cloud` CLI flag or the ingestion endpoints. See [Import Findings](/user-guide/tutorials/prowler-app-import-findings) for details. **Manage Billing:** Access and manage billing settings, subscription plans, and payment methods. + +**Manage Alerts:** Create, edit, and delete alert rules and recipients used to deliver scan-result digests via email. From 36b8aa1b79d65162347e51adc160dede335805fa Mon Sep 17 00:00:00 2001 From: Pepe Fagoaga Date: Thu, 30 Apr 2026 14:11:29 +0200 Subject: [PATCH 04/29] fix(boto3): pass config to clients (#10944) --- prowler/CHANGELOG.md | 1 + prowler/providers/aws/aws_provider.py | 55 +++++++++++++------ prowler/providers/aws/config.py | 9 +++ .../lib/quick_inventory/quick_inventory.py | 12 ++-- prowler/providers/aws/lib/s3/s3.py | 13 ++++- .../aws/lib/security_hub/security_hub.py | 12 +++- prowler/providers/aws/lib/service/service.py | 10 +++- .../aws/lib/session/aws_set_up_session.py | 4 +- .../globalaccelerator_service.py | 12 ++-- .../aws/services/route53/route53_service.py | 10 ++-- .../trustedadvisor/trustedadvisor_service.py | 36 ++++++------ .../providers/aws/services/waf/waf_service.py | 8 +-- .../aws/services/wafv2/wafv2_service.py | 8 +-- tests/providers/aws/aws_provider_test.py | 7 +++ .../lib/organizations/organizations_test.py | 16 ++++++ .../providers/aws/lib/service/service_test.py | 10 ++++ tests/providers/aws/utils.py | 6 +- 17 files changed, 153 insertions(+), 76 deletions(-) diff --git a/prowler/CHANGELOG.md b/prowler/CHANGELOG.md index 649ae60c5a..cc5e6d47c8 100644 --- a/prowler/CHANGELOG.md +++ b/prowler/CHANGELOG.md @@ -22,6 +22,7 @@ All notable changes to the **Prowler SDK** are documented in this file. ### 🐞 Fixed - AWS SDK test isolation: autouse `mock_aws` fixture and leak detector in `conftest.py` to prevent tests from hitting real AWS endpoints, with idempotent organization setup for tests calling `set_mocked_aws_provider` multiple times [(#10605)](https://github.com/prowler-cloud/prowler/pull/10605) +- AWS `boto` user agent extra is now applied to every client [(#10944)](https://github.com/prowler-cloud/prowler/pull/10944) ### 🔐 Security diff --git a/prowler/providers/aws/aws_provider.py b/prowler/providers/aws/aws_provider.py index c0f1a1ef01..cf6cbdd73a 100644 --- a/prowler/providers/aws/aws_provider.py +++ b/prowler/providers/aws/aws_provider.py @@ -25,8 +25,8 @@ from prowler.lib.utils.utils import open_file, parse_json_file, print_boxes from prowler.providers.aws.config import ( AWS_REGION_US_EAST_1, AWS_STS_GLOBAL_ENDPOINT_REGION, - BOTO3_USER_AGENT_EXTRA, ROLE_SESSION_NAME, + get_default_session_config, ) from prowler.providers.aws.exceptions.exceptions import ( AWSAccessKeyIDInvalidError, @@ -227,14 +227,15 @@ class AwsProvider(Provider): # TODO: Use AwsSetUpSession ????? # Configure the initial AWS Session using the local credentials: profile or environment variables + session_config = self.set_session_config(retries_max_attempts) aws_session = self.setup_session( mfa=mfa, profile=profile, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, + session_config=session_config, ) - session_config = self.set_session_config(retries_max_attempts) # Current session and the original session points to the same session object until we get a new one, if needed self._session = AWSSession( current_session=aws_session, @@ -630,6 +631,7 @@ class AwsProvider(Provider): aws_access_key_id: str = None, aws_secret_access_key: str = None, aws_session_token: Optional[str] = None, + session_config: Optional[Config] = None, ) -> Session: """ setup_session sets up an AWS session using the provided credentials. @@ -640,6 +642,9 @@ class AwsProvider(Provider): - aws_access_key_id: The AWS access key ID. - aws_secret_access_key: The AWS secret access key. - aws_session_token: The AWS session token, optional. + - session_config: Botocore Config applied as the session's default + client config so every client created from the session inherits + the Prowler user agent and retry settings. Returns: - Session: The AWS session. @@ -650,6 +655,9 @@ class AwsProvider(Provider): try: logger.debug("Creating original session ...") + if session_config is None: + session_config = AwsProvider.set_session_config(None) + session_arguments = {} if profile: session_arguments["profile_name"] = profile @@ -661,6 +669,7 @@ class AwsProvider(Provider): if mfa: session = Session(**session_arguments) + session._session.set_default_client_config(session_config) sts_client = session.client("sts") # TODO: pass values from the input @@ -673,7 +682,7 @@ class AwsProvider(Provider): session_credentials = sts_client.get_session_token( **get_session_token_arguments ) - return Session( + mfa_session = Session( aws_access_key_id=session_credentials["Credentials"]["AccessKeyId"], aws_secret_access_key=session_credentials["Credentials"][ "SecretAccessKey" @@ -682,8 +691,12 @@ class AwsProvider(Provider): "SessionToken" ], ) + mfa_session._session.set_default_client_config(session_config) + return mfa_session else: - return Session(**session_arguments) + session = Session(**session_arguments) + session._session.set_default_client_config(session_config) + return session except Exception as error: logger.critical( f"AWSSetUpSessionError[{error.__traceback__.tb_lineno}]: {error}" @@ -698,6 +711,7 @@ class AwsProvider(Provider): identity: AWSIdentityInfo, assumed_role_configuration: AWSAssumeRoleConfiguration, session: AWSSession, + session_config: Optional[Config] = None, ) -> Session: """ Sets up an assumed session using the provided assumed role credentials. @@ -742,6 +756,13 @@ class AwsProvider(Provider): assumed_session = BotocoreSession() assumed_session._credentials = assumed_refreshable_credentials assumed_session.set_config_variable("region", identity.profile_region) + if session_config is None: + session_config = ( + session.session_config + if session is not None + else AwsProvider.set_session_config(None) + ) + assumed_session.set_default_client_config(session_config) return Session( profile_name=identity.profile, botocore_session=assumed_session, @@ -870,7 +891,7 @@ class AwsProvider(Provider): for region in enabled_regions: regional_client = self._session.current_session.client( - service, region_name=region, config=self._session.session_config + service, region_name=region ) regional_client.region = region regional_clients[region] = regional_client @@ -1140,21 +1161,16 @@ class AwsProvider(Provider): Returns: - Config: The botocore Config object """ - # Set the maximum retries for the standard retrier config - default_session_config = Config( - retries={"max_attempts": 3, "mode": "standard"}, - user_agent_extra=BOTO3_USER_AGENT_EXTRA, - ) + default_session_config = get_default_session_config() if retries_max_attempts: - # Create the new config - config = Config( - retries={ - "max_attempts": retries_max_attempts, - "mode": "standard", - }, + default_session_config = default_session_config.merge( + Config( + retries={ + "max_attempts": retries_max_attempts, + "mode": "standard", + }, + ) ) - # Merge the new configuration - default_session_config = default_session_config.merge(config) return default_session_config @@ -1425,6 +1441,9 @@ class AwsProvider(Provider): region_name=aws_region, profile_name=profile, ) + session._session.set_default_client_config( + AwsProvider.set_session_config(None) + ) caller_identity = AwsProvider.validate_credentials(session, aws_region) # Do an extra validation if the AWS account ID is provided diff --git a/prowler/providers/aws/config.py b/prowler/providers/aws/config.py index 216c16e70b..ea55d1a314 100644 --- a/prowler/providers/aws/config.py +++ b/prowler/providers/aws/config.py @@ -1,6 +1,15 @@ import os +from botocore.config import Config + AWS_STS_GLOBAL_ENDPOINT_REGION = "us-east-1" AWS_REGION_US_EAST_1 = "us-east-1" BOTO3_USER_AGENT_EXTRA = os.getenv("PROWLER_AWS_BOTO3_USER_AGENT_EXTRA", "APN_1826889") ROLE_SESSION_NAME = "ProwlerAssessmentSession" + + +def get_default_session_config() -> Config: + return Config( + user_agent_extra=BOTO3_USER_AGENT_EXTRA, + retries={"max_attempts": 3, "mode": "standard"}, + ) diff --git a/prowler/providers/aws/lib/quick_inventory/quick_inventory.py b/prowler/providers/aws/lib/quick_inventory/quick_inventory.py index 95e9aea7bb..8ffdd80e7b 100644 --- a/prowler/providers/aws/lib/quick_inventory/quick_inventory.py +++ b/prowler/providers/aws/lib/quick_inventory/quick_inventory.py @@ -56,9 +56,7 @@ def quick_inventory(provider: AwsProvider, args): try: # Scan IAM only once if not iam_was_scanned: - global_resources.extend( - get_iam_resources(provider.session.current_session) - ) + global_resources.extend(get_iam_resources(provider)) iam_was_scanned = True # Get regional S3 buckets since none-tagged buckets are not supported by the resourcegroupstaggingapi @@ -312,8 +310,8 @@ def create_output(resources: list, provider: AwsProvider, args): if args.output_bucket: output_bucket = args.output_bucket bucket_session = provider.session.current_session - # Check if -D was input - elif args.output_bucket_no_assume: + # The outer condition guarantees -D was input when -B was not + else: output_bucket = args.output_bucket_no_assume bucket_session = provider.session.original_session @@ -375,9 +373,9 @@ def get_regional_buckets(provider: AwsProvider, region: str) -> list: return regional_buckets -def get_iam_resources(session) -> list: +def get_iam_resources(provider: AwsProvider) -> list: iam_resources = [] - iam_client = session.client("iam") + iam_client = provider.session.current_session.client("iam") try: get_roles_paginator = iam_client.get_paginator("list_roles") for page in get_roles_paginator.paginate(): diff --git a/prowler/providers/aws/lib/s3/s3.py b/prowler/providers/aws/lib/s3/s3.py index ae609e67c6..a4bbb42cc7 100644 --- a/prowler/providers/aws/lib/s3/s3.py +++ b/prowler/providers/aws/lib/s3/s3.py @@ -111,6 +111,13 @@ class S3: - None """ if session: + # Preserve the caller's existing default config (and the + # retries_max_attempts already baked into it) instead of clobbering + # it with a freshly built one. + if session._session.get_default_client_config() is None: + session._session.set_default_client_config( + AwsProvider.set_session_config(retries_max_attempts) + ) self._session = session.client(__class__.__name__.lower()) else: aws_setup_session = AwsSetUpSession( @@ -127,8 +134,7 @@ class S3: regions=regions, ) self._session = aws_setup_session._session.current_session.client( - __class__.__name__.lower(), - config=aws_setup_session._session.session_config, + __class__.__name__.lower() ) self._bucket_name = bucket_name @@ -313,6 +319,9 @@ class S3: region_name=aws_region, profile_name=profile, ) + session._session.set_default_client_config( + AwsProvider.set_session_config(None) + ) s3_client = session.client(__class__.__name__.lower()) if "s3://" in bucket_name: bucket_name = bucket_name.removeprefix("s3://") diff --git a/prowler/providers/aws/lib/security_hub/security_hub.py b/prowler/providers/aws/lib/security_hub/security_hub.py index a45d36c2fd..bc372d1ddd 100644 --- a/prowler/providers/aws/lib/security_hub/security_hub.py +++ b/prowler/providers/aws/lib/security_hub/security_hub.py @@ -148,6 +148,13 @@ class SecurityHub: regions=regions, ) self._session = aws_setup_session._session.current_session + # Only install the Prowler default config when the caller-supplied + # session does not already carry one — overwriting would drop the + # provider's retries_max_attempts value. + if aws_session and self._session._session.get_default_client_config() is None: + self._session._session.set_default_client_config( + AwsProvider.set_session_config(retries_max_attempts) + ) self._aws_account_id = aws_account_id if not aws_partition: aws_partition = AwsProvider.validate_credentials( @@ -235,7 +242,7 @@ class SecurityHub: Args: region (str): AWS region to check. - session (Session): AWS session object. + session (Session): AWS session object. Expected to carry the Prowler default client config. aws_account_id (str): AWS account ID. aws_partition (str): AWS partition. @@ -540,6 +547,9 @@ class SecurityHub: region_name=aws_region, profile_name=profile, ) + session._session.set_default_client_config( + AwsProvider.set_session_config(None) + ) all_regions = AwsProvider.get_available_aws_service_regions( service="securityhub", partition=aws_partition diff --git a/prowler/providers/aws/lib/service/service.py b/prowler/providers/aws/lib/service/service.py index 1044a21881..ac241c64a2 100644 --- a/prowler/providers/aws/lib/service/service.py +++ b/prowler/providers/aws/lib/service/service.py @@ -32,7 +32,13 @@ class AWSService: def is_failed_check(cls, check_id, arn): return (check_id.split(".")[-1], arn) in cls.failed_checks - def __init__(self, service: str, provider: AwsProvider, global_service=False): + def __init__( + self, + service: str, + provider: AwsProvider, + global_service=False, + region: str = None, + ): # Audit Information # Do we need to store the whole provider? self.provider = provider @@ -61,7 +67,7 @@ class AWSService: # Get a single region and client if the service needs it (e.g. AWS Global Service) # We cannot include this within an else because some services needs both the regional_clients # and a single client like S3 - self.region = provider.get_default_region( + self.region = region or provider.get_default_region( self.service, global_service=global_service ) self.client = self.session.client(self.service, self.region) diff --git a/prowler/providers/aws/lib/session/aws_set_up_session.py b/prowler/providers/aws/lib/session/aws_set_up_session.py index 9246c0a9eb..3189400040 100644 --- a/prowler/providers/aws/lib/session/aws_set_up_session.py +++ b/prowler/providers/aws/lib/session/aws_set_up_session.py @@ -73,15 +73,15 @@ class AwsSetUpSession: aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, ) - # Setup the AWS session + session_config = AwsProvider.set_session_config(retries_max_attempts) aws_session = AwsProvider.setup_session( mfa=mfa, profile=profile, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, + session_config=session_config, ) - session_config = AwsProvider.set_session_config(retries_max_attempts) self._session = AWSSession( current_session=aws_session, session_config=session_config, diff --git a/prowler/providers/aws/services/globalaccelerator/globalaccelerator_service.py b/prowler/providers/aws/services/globalaccelerator/globalaccelerator_service.py index 0a767cafed..f49ac1ceba 100644 --- a/prowler/providers/aws/services/globalaccelerator/globalaccelerator_service.py +++ b/prowler/providers/aws/services/globalaccelerator/globalaccelerator_service.py @@ -9,15 +9,13 @@ from prowler.providers.aws.lib.service.service import AWSService class GlobalAccelerator(AWSService): def __init__(self, provider): - # Call AWSService's __init__ - super().__init__(__class__.__name__, provider) + # Global Accelerator is a global service that supports endpoints in multiple AWS Regions + # but you must specify the US West (Oregon) Region to create, update, or otherwise work with accelerators. + # That is, for example, specify --region us-west-2 on AWS CLI commands. + region = "us-west-2" if provider.identity.partition == "aws" else None + super().__init__(__class__.__name__, provider, region=region) self.accelerators = {} if self.audited_partition == "aws": - # Global Accelerator is a global service that supports endpoints in multiple AWS Regions - # but you must specify the US West (Oregon) Region to create, update, or otherwise work with accelerators. - # That is, for example, specify --region us-west-2 on AWS CLI commands. - self.region = "us-west-2" - self.client = self.session.client(self.service, self.region) self._list_accelerators() self.__threading_call__(self._list_tags, self.accelerators.values()) diff --git a/prowler/providers/aws/services/route53/route53_service.py b/prowler/providers/aws/services/route53/route53_service.py index c3ec37d9a3..54de22440d 100644 --- a/prowler/providers/aws/services/route53/route53_service.py +++ b/prowler/providers/aws/services/route53/route53_service.py @@ -176,14 +176,12 @@ class RecordSet(BaseModel): class Route53Domains(AWSService): def __init__(self, provider): - # Call AWSService's __init__ - super().__init__(__class__.__name__, provider) + # Route53Domains is a global service that supports endpoints in multiple AWS Regions + # but you must specify the US East (N. Virginia) Region to create, update, or otherwise work with domains. + region = "us-east-1" if provider.identity.partition == "aws" else None + super().__init__(__class__.__name__, provider, region=region) self.domains = {} if self.audited_partition == "aws": - # Route53Domains is a global service that supports endpoints in multiple AWS Regions - # but you must specify the US East (N. Virginia) Region to create, update, or otherwise work with domains. - self.region = "us-east-1" - self.client = self.session.client(self.service, self.region) self._list_domains() self._get_domain_detail() self._list_tags_for_domain() diff --git a/prowler/providers/aws/services/trustedadvisor/trustedadvisor_service.py b/prowler/providers/aws/services/trustedadvisor/trustedadvisor_service.py index bad30341b2..5569569290 100644 --- a/prowler/providers/aws/services/trustedadvisor/trustedadvisor_service.py +++ b/prowler/providers/aws/services/trustedadvisor/trustedadvisor_service.py @@ -9,20 +9,20 @@ from prowler.providers.aws.lib.service.service import AWSService class TrustedAdvisor(AWSService): def __init__(self, provider): - # Call AWSService's __init__ - super().__init__("support", provider) + # Support API is not available in China Partition + # But only in us-east-1 or us-gov-west-1 https://docs.aws.amazon.com/general/latest/gr/awssupport.html + partition = provider.identity.partition + if partition == "aws": + support_region = "us-east-1" + elif partition == "aws-cn": + support_region = None + else: + support_region = "us-gov-west-1" + super().__init__("support", provider, region=support_region) self.account_arn_template = f"arn:{self.audited_partition}:trusted-advisor:{self.region}:{self.audited_account}:account" self.checks = [] self.premium_support = PremiumSupport(enabled=False) - # Support API is not available in China Partition - # But only in us-east-1 or us-gov-west-1 https://docs.aws.amazon.com/general/latest/gr/awssupport.html if self.audited_partition != "aws-cn": - if self.audited_partition == "aws": - support_region = "us-east-1" - else: - support_region = "us-gov-west-1" - self.client = self.session.client(self.service, region_name=support_region) - self.client.region = support_region self._describe_services() if getattr(self.premium_support, "enabled", False): self._describe_trusted_advisor_checks() @@ -34,13 +34,13 @@ class TrustedAdvisor(AWSService): for check in self.client.describe_trusted_advisor_checks(language="en").get( "checks", [] ): - check_arn = f"arn:{self.audited_partition}:trusted-advisor:{self.client.region}:{self.audited_account}:check/{check['id']}" + check_arn = f"arn:{self.audited_partition}:trusted-advisor:{self.region}:{self.audited_account}:check/{check['id']}" self.checks.append( Check( id=check["id"], name=check["name"], arn=check_arn, - region=self.client.region, + region=self.region, ) ) except ClientError as error: @@ -50,22 +50,22 @@ class TrustedAdvisor(AWSService): == "Amazon Web Services Premium Support Subscription is required to use this service." ): logger.warning( - f"{self.client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) else: logger.error( - f"{self.client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) except Exception as error: logger.error( - f"{self.client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) def _describe_trusted_advisor_check_result(self): logger.info("TrustedAdvisor - Describing Check Result...") try: for check in self.checks: - if check.region == self.client.region: + if check.region == self.region: try: response = self.client.describe_trusted_advisor_check_result( checkId=check.id @@ -78,11 +78,11 @@ class TrustedAdvisor(AWSService): == "InvalidParameterValueException" ): logger.warning( - f"{self.client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) except Exception as error: logger.error( - f"{self.client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) def _describe_services(self): diff --git a/prowler/providers/aws/services/waf/waf_service.py b/prowler/providers/aws/services/waf/waf_service.py index b1fda19c50..25476e6858 100644 --- a/prowler/providers/aws/services/waf/waf_service.py +++ b/prowler/providers/aws/services/waf/waf_service.py @@ -9,15 +9,13 @@ from prowler.providers.aws.lib.service.service import AWSService class WAF(AWSService): def __init__(self, provider): - # Call AWSService's __init__ - super().__init__("waf", provider) + # AWS WAF is available globally for CloudFront distributions, but you must use the Region US East (N. Virginia) to create your web ACL and any resources used in the web ACL, such as rule groups, IP sets, and regex pattern sets. + region = "us-east-1" if provider.identity.partition == "aws" else None + super().__init__("waf", provider, region=region) self.rules = {} self.rule_groups = {} self.web_acls = {} if self.audited_partition == "aws": - # AWS WAF is available globally for CloudFront distributions, but you must use the Region US East (N. Virginia) to create your web ACL and any resources used in the web ACL, such as rule groups, IP sets, and regex pattern sets. - self.region = "us-east-1" - self.client = self.session.client(self.service, self.region) self._list_rules() self.__threading_call__(self._get_rule, self.rules.values()) self._list_rule_groups() diff --git a/prowler/providers/aws/services/wafv2/wafv2_service.py b/prowler/providers/aws/services/wafv2/wafv2_service.py index 6a9d3ca5b8..5682502ae3 100644 --- a/prowler/providers/aws/services/wafv2/wafv2_service.py +++ b/prowler/providers/aws/services/wafv2/wafv2_service.py @@ -11,13 +11,11 @@ from prowler.providers.aws.lib.service.service import AWSService class WAFv2(AWSService): def __init__(self, provider): - # Call AWSService's __init__ - super().__init__(__class__.__name__, provider) + # AWS WAFv2 is available globally for CloudFront distributions, but you must use the Region US East (N. Virginia) to create your web ACL. + region = "us-east-1" if provider.identity.partition == "aws" else None + super().__init__(__class__.__name__, provider, region=region) self.web_acls = {} if self.audited_partition == "aws": - # AWS WAFv2 is available globally for CloudFront distributions, but you must use the Region US East (N. Virginia) to create your web ACL. - self.region = "us-east-1" - self.client = self.session.client(self.service, self.region) self._list_web_acls_global() self.__threading_call__(self._list_web_acls_regional) self.__threading_call__(self._get_web_acl, self.web_acls.values()) diff --git a/tests/providers/aws/aws_provider_test.py b/tests/providers/aws/aws_provider_test.py index ea37e58e82..f874ca8812 100644 --- a/tests/providers/aws/aws_provider_test.py +++ b/tests/providers/aws/aws_provider_test.py @@ -21,6 +21,7 @@ from prowler.providers.aws.config import ( AWS_STS_GLOBAL_ENDPOINT_REGION, BOTO3_USER_AGENT_EXTRA, ROLE_SESSION_NAME, + get_default_session_config, ) from prowler.providers.aws.exceptions.exceptions import ( AWSArgumentTypeValidationError, @@ -2242,6 +2243,12 @@ aws: assert session_config.user_agent_extra == BOTO3_USER_AGENT_EXTRA assert session_config.retries == {"max_attempts": 10, "mode": "standard"} + def test_get_default_session_config(self): + config = get_default_session_config() + + assert config.user_agent_extra == BOTO3_USER_AGENT_EXTRA + assert config.retries == {"max_attempts": 3, "mode": "standard"} + @mock_aws @patch( "prowler.lib.check.utils.recover_checks_from_provider", diff --git a/tests/providers/aws/lib/organizations/organizations_test.py b/tests/providers/aws/lib/organizations/organizations_test.py index 8c0def64ac..a2d8ad0441 100644 --- a/tests/providers/aws/lib/organizations/organizations_test.py +++ b/tests/providers/aws/lib/organizations/organizations_test.py @@ -4,6 +4,8 @@ import boto3 from botocore.exceptions import ClientError from moto import mock_aws +from prowler.providers.aws.aws_provider import AwsProvider +from prowler.providers.aws.config import BOTO3_USER_AGENT_EXTRA from prowler.providers.aws.lib.organizations.organizations import ( _get_ou_metadata, get_organizations_metadata, @@ -222,6 +224,20 @@ class Test_AWS_Organizations: assert tags == {} assert ou_metadata == {} + def test_get_organizations_metadata_uses_user_agent_extra(self): + real_session = boto3.Session() + real_session._session.set_default_client_config( + AwsProvider.set_session_config(None) + ) + wrapper = MagicMock(wraps=real_session) + + get_organizations_metadata("123456789012", wrapper) + + wrapper.client.assert_called_once() + default_config = real_session._session.get_default_client_config() + assert default_config is not None + assert BOTO3_USER_AGENT_EXTRA in default_config.user_agent_extra + def test_parse_organizations_metadata_with_empty_ou_metadata(self): tags = {"Tags": []} metadata = { diff --git a/tests/providers/aws/lib/service/service_test.py b/tests/providers/aws/lib/service/service_test.py index 452a2a93ff..c1cd4b05bc 100644 --- a/tests/providers/aws/lib/service/service_test.py +++ b/tests/providers/aws/lib/service/service_test.py @@ -1,5 +1,6 @@ from mock import patch +from prowler.providers.aws.config import BOTO3_USER_AGENT_EXTRA from prowler.providers.aws.lib.service.service import AWSService from tests.providers.aws.utils import ( AWS_ACCOUNT_ARN, @@ -189,6 +190,15 @@ class TestAWSService: == f"arn:{service.audited_partition}:{service_name}::{AWS_ACCOUNT_NUMBER}:bucket/unknown" ) + def test_AWSService_clients_carry_user_agent_extra(self): + provider = set_mocked_aws_provider() + + service = AWSService("s3", provider) + ad_hoc_client = service.session.client("ec2", AWS_REGION_US_EAST_1) + + assert BOTO3_USER_AGENT_EXTRA in service.client._client_config.user_agent_extra + assert BOTO3_USER_AGENT_EXTRA in ad_hoc_client._client_config.user_agent_extra + def test_AWSService_get_unknown_arn_resource_type_set_region(self): service_name = "s3" provider = set_mocked_aws_provider() diff --git a/tests/providers/aws/utils.py b/tests/providers/aws/utils.py index 2341b6b5fd..0b11798e1f 100644 --- a/tests/providers/aws/utils.py +++ b/tests/providers/aws/utils.py @@ -2,7 +2,6 @@ from argparse import Namespace from json import dumps from boto3 import client, session -from botocore.config import Config from moto import mock_aws from prowler.config.config import ( @@ -133,10 +132,11 @@ def set_mocked_aws_provider( provider = AwsProvider() # Mock Session - provider._session.session_config = None + session_config = AwsProvider.set_session_config(None) + provider._session.session_config = session_config provider._session.original_session = original_session provider._session.current_session = audit_session - provider._session.session_config = Config() + audit_session._session.set_default_client_config(session_config) # Mock Identity provider._identity.account = audited_account provider._identity.account_arn = audited_account_arn From 3ab9a4efa535008bf2f4fc96e17947a12b93e408 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pedro=20Mart=C3=ADn?= Date: Thu, 30 Apr 2026 14:13:40 +0200 Subject: [PATCH 05/29] chore(changelog): update with latest changes (#10948) --- prowler/CHANGELOG.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/prowler/CHANGELOG.md b/prowler/CHANGELOG.md index cc5e6d47c8..4a017a47e2 100644 --- a/prowler/CHANGELOG.md +++ b/prowler/CHANGELOG.md @@ -8,9 +8,7 @@ All notable changes to the **Prowler SDK** are documented in this file. - `bedrock_guardrails_configured` check for AWS provider [(#10844)](https://github.com/prowler-cloud/prowler/pull/10844) - Universal compliance pipeline integrated into the CLI: `--list-compliance` and `--list-compliance-requirements` show universal frameworks, and CSV plus OCSF outputs are generated for any framework declaring a `TableConfig` [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301) - -### 🚀 Added -- ASD Essential Eight Maturity Model compliance framework for AWS provider, mapping 64 checks across all 8 controls [(#10808)](https://github.com/prowler-cloud/prowler/pull/10808) +- ASD Essential Eight Maturity Model compliance framework for AWS (Maturity Level One, Nov 2023) [(#10808)](https://github.com/prowler-cloud/prowler/pull/10808) ### 🔄 Changed From c802dc8a362d6b72ae2c56ed9cab847120f78272 Mon Sep 17 00:00:00 2001 From: Danny Lyubenov <23269399+DannyLyubenov@users.noreply.github.com> Date: Thu, 30 Apr 2026 16:19:21 +0100 Subject: [PATCH 06/29] feat(codebuild): use batched API calls to prevent throttling and false positives (#10639) Co-authored-by: Daniel Barranquero --- prowler/CHANGELOG.md | 1 + .../services/codebuild/codebuild_service.py | 112 ++++++++++++++---- .../codebuild/codebuild_service_test.py | 97 ++++++++++++++- 3 files changed, 188 insertions(+), 22 deletions(-) diff --git a/prowler/CHANGELOG.md b/prowler/CHANGELOG.md index 4a017a47e2..cdf166674b 100644 --- a/prowler/CHANGELOG.md +++ b/prowler/CHANGELOG.md @@ -15,6 +15,7 @@ All notable changes to the **Prowler SDK** are documented in this file. - `route53_dangling_ip_subdomain_takeover` now also flags `CNAME` records pointing to S3 website endpoints whose buckets are missing from the account [(#10920)](https://github.com/prowler-cloud/prowler/pull/10920) - Azure Network Watcher flow log checks now require workspace-backed Traffic Analytics for `network_flow_log_captured_sent` and align metadata with VNet-compatible flow log guidance [(#10645)](https://github.com/prowler-cloud/prowler/pull/10645) - Azure compliance entries for legacy Network Watcher flow log controls now use retirement-aware guidance and point new deployments to VNet flow logs +- AWS CodeBuild service now batches `BatchGetProjects` and `BatchGetBuilds` calls per region (up to 100 items per call) to reduce API call volume and prevent throttling-induced false positives in `codebuild_project_not_publicly_accessible` [(#10639)](https://github.com/prowler-cloud/prowler/pull/10639) - `display_compliance_table` dispatch switched from substring `in` checks to `startswith` to prevent false matches between similarly named frameworks (e.g. `cisa` vs `cis`) [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301) ### 🐞 Fixed diff --git a/prowler/providers/aws/services/codebuild/codebuild_service.py b/prowler/providers/aws/services/codebuild/codebuild_service.py index 361002aa65..d4d085065d 100644 --- a/prowler/providers/aws/services/codebuild/codebuild_service.py +++ b/prowler/providers/aws/services/codebuild/codebuild_service.py @@ -1,4 +1,5 @@ import datetime +from concurrent.futures import as_completed from typing import List, Optional from pydantic.v1 import BaseModel @@ -14,9 +15,9 @@ class Codebuild(AWSService): super().__init__(__class__.__name__, provider) self.projects = {} self.__threading_call__(self._list_projects) - self.__threading_call__(self._list_builds_for_project, self.projects.values()) - self.__threading_call__(self._batch_get_builds, self.projects.values()) - self.__threading_call__(self._batch_get_projects, self.projects.values()) + self.__threading_call__(self._list_builds_for_project) + self.__threading_call__(self._batch_get_builds) + self.__threading_call__(self._batch_get_projects) self.report_groups = {} self.__threading_call__(self._list_report_groups) self.__threading_call__( @@ -44,10 +45,8 @@ class Codebuild(AWSService): f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) - def _list_builds_for_project(self, project): - logger.info("Codebuild - Listing builds...") + def _fetch_project_last_build(self, regional_client, project): try: - regional_client = self.regional_clients[project.region] build_ids = regional_client.list_builds_for_project( projectName=project.name ).get("ids", []) @@ -58,28 +57,99 @@ class Codebuild(AWSService): f"{project.region}: {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) - def _batch_get_builds(self, project): - logger.info("Codebuild - Getting builds...") + def _list_builds_for_project(self, regional_client): + logger.info("Codebuild - Listing builds...") try: - if project.last_build and project.last_build.id: - regional_client = self.regional_clients[project.region] - builds_by_id = regional_client.batch_get_builds( - ids=[project.last_build.id] - ).get("builds", []) - if len(builds_by_id) > 0: - project.last_invoked_time = builds_by_id[0].get("endTime") + regional_projects = [ + project + for project in self.projects.values() + if project.region == regional_client.region + ] + + # list_builds_for_project has no batch API equivalent, so reuse the + # shared thread pool to issue per-project calls in parallel within + # this region — preserving the wall-clock performance of the + # previous implementation. + futures = [ + self.thread_pool.submit( + self._fetch_project_last_build, regional_client, project + ) + for project in regional_projects + ] + for future in as_completed(futures): + try: + future.result() + except Exception: + pass except Exception as error: logger.error( - f"{regional_client.region}: {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) - def _batch_get_projects(self, project): + def _batch_get_builds(self, regional_client): + logger.info("Codebuild - Getting builds...") + try: + # Collect all build IDs for this region + build_id_to_project = {} + for project in self.projects.values(): + if ( + project.region == regional_client.region + and project.last_build + and project.last_build.id + ): + build_id_to_project[project.last_build.id] = project + + if not build_id_to_project: + return + + build_ids = list(build_id_to_project.keys()) + + # batch_get_builds supports up to 100 IDs per call + for i in range(0, len(build_ids), 100): + batch = build_ids[i : i + 100] + response = regional_client.batch_get_builds(ids=batch) + for build_info in response.get("builds", []): + build_id = build_info.get("id") + if build_id in build_id_to_project: + end_time = build_info.get("endTime") + if end_time: + build_id_to_project[build_id].last_invoked_time = end_time + except Exception as error: + logger.error( + f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + + def _batch_get_projects(self, regional_client): logger.info("Codebuild - Getting projects...") try: - regional_client = self.regional_clients[project.region] - project_info = regional_client.batch_get_projects(names=[project.name])[ - "projects" - ][0] + # Collect all project names for this region + regional_projects = { + arn: project + for arn, project in self.projects.items() + if project.region == regional_client.region + } + if not regional_projects: + return + + project_names = [project.name for project in regional_projects.values()] + + # batch_get_projects supports up to 100 names per call + for i in range(0, len(project_names), 100): + batch = project_names[i : i + 100] + response = regional_client.batch_get_projects(names=batch) + for project_info in response.get("projects", []): + project_arn = project_info.get("arn") + if project_arn in regional_projects: + self._parse_project_info( + regional_projects[project_arn], project_info + ) + except Exception as error: + logger.error( + f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + + def _parse_project_info(self, project, project_info): + try: project.buildspec = project_info["source"].get("buildspec") if project_info["source"]["type"] != "NO_SOURCE": project.source = Source( diff --git a/tests/providers/aws/services/codebuild/codebuild_service_test.py b/tests/providers/aws/services/codebuild/codebuild_service_test.py index 13bf7b31fb..5b9820cc47 100644 --- a/tests/providers/aws/services/codebuild/codebuild_service_test.py +++ b/tests/providers/aws/services/codebuild/codebuild_service_test.py @@ -45,11 +45,12 @@ def mock_make_api_call(self, operation_name, kwarg): elif operation_name == "ListBuildsForProject": return {"ids": [build_id]} elif operation_name == "BatchGetBuilds": - return {"builds": [{"endTime": last_invoked_time}]} + return {"builds": [{"id": build_id, "endTime": last_invoked_time}]} elif operation_name == "BatchGetProjects": return { "projects": [ { + "arn": project_arn, "source": { "type": source_type, "location": bitbucket_url, @@ -230,3 +231,97 @@ class Test_Codebuild_Service: assert ( codebuild.report_groups[report_group_arn].tags[0]["value"] == project_name ) + + +# Module-level state and helpers used by the chunking/out-of-order test below. +# Kept at module level so the API-call mock is a plain function rather than a +# closure defined inside the test method. +TOTAL_PROJECTS = 150 +many_project_names = [f"project-{i}" for i in range(TOTAL_PROJECTS)] +many_project_arns = [ + f"arn:{AWS_COMMERCIAL_PARTITION}:codebuild:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:project/{name}" + for name in many_project_names +] +many_build_ids_for = {name: f"{name}:build-id" for name in many_project_names} +many_end_times_for = { + name: datetime.now() - timedelta(days=i) + for i, name in enumerate(many_project_names) +} +many_name_by_build_id = {v: k for k, v in many_build_ids_for.items()} +many_batch_call_sizes = {"BatchGetProjects": [], "BatchGetBuilds": []} + + +def mock_make_api_call_many_projects(self, operation_name, kwarg): + if operation_name == "ListProjects": + return {"projects": many_project_names} + if operation_name == "ListBuildsForProject": + return {"ids": [many_build_ids_for[kwarg["projectName"]]]} + if operation_name == "BatchGetBuilds": + ids = kwarg["ids"] + many_batch_call_sizes["BatchGetBuilds"].append(len(ids)) + # Reverse the response order to verify id->project mapping does not + # depend on response ordering. + builds = [ + {"id": bid, "endTime": many_end_times_for[many_name_by_build_id[bid]]} + for bid in reversed(ids) + ] + return {"builds": builds} + if operation_name == "BatchGetProjects": + names = kwarg["names"] + many_batch_call_sizes["BatchGetProjects"].append(len(names)) + # Reverse the response order to verify arn->project mapping does not + # depend on response ordering. + projects = [ + { + "arn": f"arn:{AWS_COMMERCIAL_PARTITION}:codebuild:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:project/{name}", + "source": {"type": "NO_SOURCE"}, + "logsConfig": {}, + "tags": [], + "projectVisibility": "PRIVATE", + } + for name in reversed(names) + ] + return {"projects": projects} + if operation_name == "ListReportGroups": + return {"reportGroups": []} + return make_api_call(self, operation_name, kwarg) + + +class Test_Codebuild_Service_Batching: + @patch( + "botocore.client.BaseClient._make_api_call", + new=mock_make_api_call_many_projects, + ) + @patch( + "prowler.providers.aws.aws_provider.AwsProvider.generate_regional_clients", + new=mock_generate_regional_clients, + ) + @mock_aws + def test_codebuild_batches_chunks_over_100_projects_and_maps_out_of_order_responses( + self, + ): + """Verify _batch_get_projects/_batch_get_builds chunk in groups of 100 + and correctly map out-of-order batch responses back to the right + project using `arn`/`id`. + """ + # Reset the per-test recorder (module-level state survives across runs). + many_batch_call_sizes["BatchGetProjects"].clear() + many_batch_call_sizes["BatchGetBuilds"].clear() + + codebuild = Codebuild(set_mocked_aws_provider([AWS_REGION_EU_WEST_1])) + + # Verify chunking: 150 items -> two batches of 100 and 50. + assert sorted(many_batch_call_sizes["BatchGetProjects"]) == [50, 100] + assert sorted(many_batch_call_sizes["BatchGetBuilds"]) == [50, 100] + + # Verify all projects were tracked. + assert len(codebuild.projects) == TOTAL_PROJECTS + + # Verify out-of-order responses were correctly mapped back to the + # right project by `arn` (projects) and `id` (builds). + for name, arn in zip(many_project_names, many_project_arns): + project = codebuild.projects[arn] + assert project.name == name + assert project.project_visibility == "PRIVATE" + assert project.last_build == Build(id=many_build_ids_for[name]) + assert project.last_invoked_time == many_end_times_for[name] From 8db3a896697e3e5d79b8f5cef1c885a3f5f5b954 Mon Sep 17 00:00:00 2001 From: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com> Date: Thu, 30 Apr 2026 17:07:25 +0100 Subject: [PATCH 07/29] ci: remove andoniaf from prowler-cloud (#10926) --- .github/workflows/labeler.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml index 621a763da5..d55e60ae88 100644 --- a/.github/workflows/labeler.yml +++ b/.github/workflows/labeler.yml @@ -62,7 +62,7 @@ jobs: "Alan-TheGentleman" "alejandrobailo" "amitsharm" - "andoniaf" + # "andoniaf" "cesararroba" "danibarranqueroo" "HugoPBrito" From 40dd0e640b72ee2b9990c86e62103dc745c6bf7a Mon Sep 17 00:00:00 2001 From: Andoni Alonso <14891798+andoniaf@users.noreply.github.com> Date: Mon, 4 May 2026 08:37:46 +0200 Subject: [PATCH 08/29] fix(sdk): strip http(s):// scheme from image registry URLs (#10950) --- prowler/CHANGELOG.md | 1 + prowler/providers/image/image_provider.py | 20 ++++-- tests/providers/image/image_provider_test.py | 69 +++++++++++++++++++- 3 files changed, 84 insertions(+), 6 deletions(-) diff --git a/prowler/CHANGELOG.md b/prowler/CHANGELOG.md index cdf166674b..d49c7eb43c 100644 --- a/prowler/CHANGELOG.md +++ b/prowler/CHANGELOG.md @@ -22,6 +22,7 @@ All notable changes to the **Prowler SDK** are documented in this file. - AWS SDK test isolation: autouse `mock_aws` fixture and leak detector in `conftest.py` to prevent tests from hitting real AWS endpoints, with idempotent organization setup for tests calling `set_mocked_aws_provider` multiple times [(#10605)](https://github.com/prowler-cloud/prowler/pull/10605) - AWS `boto` user agent extra is now applied to every client [(#10944)](https://github.com/prowler-cloud/prowler/pull/10944) +- Image provider connection check no longer fails with a misleading `host='https'` resolution error when the registry URL includes an `http://` or `https://` scheme prefix [(#10950)](https://github.com/prowler-cloud/prowler/pull/10950) ### 🔐 Security diff --git a/prowler/providers/image/image_provider.py b/prowler/providers/image/image_provider.py index 7c9a9cbb08..07d02c04c9 100644 --- a/prowler/providers/image/image_provider.py +++ b/prowler/providers/image/image_provider.py @@ -329,12 +329,21 @@ class ImageProvider(Provider): """Image provider doesn't need a session since it uses Trivy directly""" return None + @staticmethod + def _strip_scheme(value: str) -> str: + """Remove a leading http:// or https:// scheme from a registry input.""" + for prefix in ("https://", "http://"): + if value.lower().startswith(prefix): + return value[len(prefix) :] + return value + @staticmethod def _extract_registry(image: str) -> str | None: """Extract registry hostname from an image reference. Returns None for Docker Hub images (no registry prefix). """ + image = ImageProvider._strip_scheme(image) parts = image.split("/") if len(parts) >= 2 and ("." in parts[0] or ":" in parts[0]): return parts[0] @@ -348,6 +357,7 @@ class ImageProvider(Provider): or "myregistry.com:5000" are registry URLs (dots in host, no slash). Image references like "alpine:3.18" or "nginx" are not. """ + image_uid = ImageProvider._strip_scheme(image_uid) if "/" not in image_uid: host_part = image_uid.split(":")[0] if "." in host_part: @@ -835,11 +845,9 @@ class ImageProvider(Provider): image_ref = f"{repo}:{tag}" else: # OCI registries need the full host/repo:tag reference - registry_host = self.registry.rstrip("/") - for prefix in ("https://", "http://"): - if registry_host.startswith(prefix): - registry_host = registry_host[len(prefix) :] - break + registry_host = ImageProvider._strip_scheme( + self.registry.rstrip("/") + ) image_ref = f"{registry_host}/{repo}:{tag}" discovered_images.append(image_ref) @@ -977,6 +985,8 @@ class ImageProvider(Provider): if not image: return Connection(is_connected=False, error="Image name is required") + image = ImageProvider._strip_scheme(image) + # Registry URL (bare hostname) → test via OCI catalog if ImageProvider._is_registry_url(image): return ImageProvider._test_registry_connection( diff --git a/tests/providers/image/image_provider_test.py b/tests/providers/image/image_provider_test.py index 68b0c5d7a2..4462df6beb 100644 --- a/tests/providers/image/image_provider_test.py +++ b/tests/providers/image/image_provider_test.py @@ -7,6 +7,7 @@ from unittest.mock import MagicMock, patch import pytest from prowler.lib.check.models import CheckReportImage +from prowler.providers.common.provider import Provider from prowler.providers.image.exceptions.exceptions import ( ImageInvalidConfigScannerError, ImageInvalidNameError, @@ -20,7 +21,6 @@ from prowler.providers.image.exceptions.exceptions import ( ImageScanError, ImageTrivyBinaryNotFoundError, ) -from prowler.providers.common.provider import Provider from prowler.providers.image.image_provider import ImageProvider from tests.providers.image.image_fixtures import ( SAMPLE_IMAGE_SHA, @@ -345,6 +345,24 @@ class TestImageProvider: ) mock_adapter.list_repositories.assert_called_once() + @patch("prowler.providers.image.image_provider.create_registry_adapter") + def test_test_connection_registry_url_with_https_scheme(self, mock_factory): + """Registry URL with https:// scheme is normalised before adapter creation.""" + mock_adapter = MagicMock() + mock_adapter.list_repositories.return_value = ["repo1"] + mock_factory.return_value = mock_adapter + + result = ImageProvider.test_connection(image="https://my-registry.example.com") + + assert result.is_connected is True + mock_factory.assert_called_once_with( + registry_url="my-registry.example.com", + username=None, + password=None, + token=None, + ) + mock_adapter.list_repositories.assert_called_once() + def test_build_status_extended(self): """Test status message content for different finding types.""" provider = _make_provider() @@ -659,6 +677,27 @@ class TestImageProviderRegistryAuth: assert "Docker login" in output +class TestStripScheme: + @pytest.mark.parametrize( + "raw,expected", + [ + ("https://my-registry.example.com", "my-registry.example.com"), + ("http://my-registry.example.com", "my-registry.example.com"), + ("HTTPS://My-Registry.Example.Com", "My-Registry.Example.Com"), + ("Http://localhost:5000", "localhost:5000"), + ("my-registry.example.com", "my-registry.example.com"), + ("https://", ""), + ("https://https://nested.example.com", "https://nested.example.com"), + ( + "ftp://not-a-supported-scheme.example.com", + "ftp://not-a-supported-scheme.example.com", + ), + ], + ) + def test_strip_scheme(self, raw, expected): + assert ImageProvider._strip_scheme(raw) == expected + + class TestExtractRegistry: def test_docker_hub_simple(self): assert ImageProvider._extract_registry("alpine:3.18") is None @@ -698,6 +737,24 @@ class TestExtractRegistry: def test_bare_image_name(self): assert ImageProvider._extract_registry("nginx") is None + def test_https_scheme_bare_hostname_returns_none(self): + """Bare scheme-prefixed hostname has no image path, so no registry is extracted.""" + assert ( + ImageProvider._extract_registry("https://my-registry.example.com") is None + ) + + def test_http_scheme_with_port_stripped(self): + assert ( + ImageProvider._extract_registry("http://localhost:5000/myimage:latest") + == "localhost:5000" + ) + + def test_https_scheme_with_path_stripped(self): + assert ( + ImageProvider._extract_registry("https://ghcr.io/org/image:tag") + == "ghcr.io" + ) + class TestIsRegistryUrl: def test_bare_ecr_hostname(self): @@ -728,6 +785,16 @@ class TestIsRegistryUrl: def test_dockerhub_namespace(self): assert not ImageProvider._is_registry_url("library/alpine") + def test_https_scheme_bare_hostname(self): + assert ImageProvider._is_registry_url("https://my-registry.example.com") + + def test_http_scheme_bare_hostname_with_port(self): + assert ImageProvider._is_registry_url("http://my-registry.example.com:5000") + + def test_https_scheme_image_reference_not_registry(self): + """A scheme-prefixed full image reference is still an image, not a registry URL.""" + assert not ImageProvider._is_registry_url("https://ghcr.io/myorg/repo:tag") + class TestTestRegistryConnection: @patch("prowler.providers.image.image_provider.create_registry_adapter") From 86449fb99d0b4ffdbce827e69965422e4e7d21d5 Mon Sep 17 00:00:00 2001 From: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com> Date: Mon, 4 May 2026 08:56:50 +0200 Subject: [PATCH 09/29] chore(vercel): add disclaimer for checks depending on billing plan (#10663) --- .../check-metadata-guidelines.mdx | 3 + docs/developer-guide/checks.mdx | 2 +- .../vercel/getting-started-vercel.mdx | 22 +++++ prowler/CHANGELOG.md | 1 + prowler/lib/check/models.py | 28 +++--- prowler/providers/vercel/lib/billing.py | 27 ++++++ .../providers/vercel/lib/service/service.py | 6 +- prowler/providers/vercel/models.py | 19 ++++ ...thentication_no_stale_tokens.metadata.json | 3 +- ...entication_token_not_expired.metadata.json | 3 +- ...roduction_uses_stable_target.metadata.json | 3 +- ...main_dns_properly_configured.metadata.json | 3 +- ...domain_ssl_certificate_valid.metadata.json | 3 +- .../domain_verified.metadata.json | 3 +- ...o_expose_system_env_disabled.metadata.json | 3 +- ...eployment_protection_enabled.metadata.json | 3 +- ...t_directory_listing_disabled.metadata.json | 3 +- ...nment_no_overly_broad_target.metadata.json | 3 +- ...ent_no_secrets_in_plain_type.metadata.json | 3 +- ...oduction_vars_not_in_preview.metadata.json | 3 +- ..._git_fork_protection_enabled.metadata.json | 3 +- ..._password_protection_enabled.metadata.json | 5 +- .../project_password_protection_enabled.py | 2 + ...eployment_protection_enabled.metadata.json | 5 +- ...roduction_deployment_protection_enabled.py | 2 + .../services/project/project_service.py | 11 ++- ...ject_skew_protection_enabled.metadata.json | 5 +- .../project_skew_protection_enabled.py | 2 + ...rity_custom_rules_configured.metadata.json | 5 +- .../security_custom_rules_configured.py | 12 ++- ...ip_blocking_rules_configured.metadata.json | 5 +- .../security_ip_blocking_rules_configured.py | 12 ++- ...ity_managed_rulesets_enabled.metadata.json | 11 ++- .../security_managed_rulesets_enabled.py | 13 ++- ...ity_rate_limiting_configured.metadata.json | 5 +- .../security_rate_limiting_configured.py | 12 ++- .../services/security/security_service.py | 12 ++- .../security_waf_enabled.metadata.json | 5 +- .../security_waf_enabled.py | 9 +- .../team_directory_sync_enabled.metadata.json | 5 +- .../team_directory_sync_enabled.py | 2 + ..._member_role_least_privilege.metadata.json | 3 +- .../team_no_stale_invitations.metadata.json | 3 +- .../team_saml_sso_enabled.metadata.json | 5 +- .../team_saml_sso_enabled.py | 2 + .../team_saml_sso_enforced.metadata.json | 5 +- .../team_saml_sso_enforced.py | 2 + .../vercel/services/team/team_service.py | 3 + prowler/providers/vercel/vercel_provider.py | 5 + tests/lib/check/models_test.py | 46 ++++++++- .../vercel/lib/service/vercel_service_test.py | 29 ++++++ ...roject_password_protection_enabled_test.py | 38 ++++++++ ...tion_deployment_protection_enabled_test.py | 38 ++++++++ .../services/project/project_service_test.py | 67 +++++++++++++ .../project_skew_protection_enabled_test.py | 38 ++++++++ .../security_custom_rules_configured_test.py | 38 ++++++++ ...urity_ip_blocking_rules_configured_test.py | 38 ++++++++ .../security_managed_rulesets_enabled_test.py | 42 +++++++- .../security_rate_limiting_configured_test.py | 38 ++++++++ .../security/security_service_test.py | 8 +- .../security_waf_enabled_test.py | 80 +++++++++++++++ .../team_directory_sync_enabled_test.py | 38 ++++++++ .../team_saml_sso_enabled_test.py | 39 ++++++++ .../team_saml_sso_enforced_test.py | 38 ++++++++ tests/providers/vercel/vercel_fixtures.py | 19 +++- .../providers/vercel/vercel_metadata_test.py | 97 +++++++++++++++++++ 66 files changed, 968 insertions(+), 78 deletions(-) create mode 100644 prowler/providers/vercel/lib/billing.py create mode 100644 tests/providers/vercel/lib/service/vercel_service_test.py create mode 100644 tests/providers/vercel/vercel_metadata_test.py diff --git a/docs/developer-guide/check-metadata-guidelines.mdx b/docs/developer-guide/check-metadata-guidelines.mdx index bac16f6fc0..85b84620d5 100644 --- a/docs/developer-guide/check-metadata-guidelines.mdx +++ b/docs/developer-guide/check-metadata-guidelines.mdx @@ -215,3 +215,6 @@ Also is important to keep all code examples as short as possible, including the | e5 | M365 and Azure Entra checks enabled by or dependent on an E5 license (e.g., advanced threat protection, audit, DLP, and eDiscovery) | | privilege-escalation | Detects IAM policies or permissions that allow identities to elevate their privileges beyond their intended scope, potentially gaining administrator or higher-level access through specific action combinations | | ec2-imdsv1 | Identifies EC2 instances using Instance Metadata Service version 1 (IMDSv1), which is vulnerable to SSRF attacks and should be replaced with IMDSv2 for enhanced security | +| vercel-hobby-plan | Vercel checks whose audited feature is available on the Hobby plan (and therefore also on Pro and Enterprise plans) | +| vercel-pro-plan | Vercel checks whose audited feature requires a Pro plan or higher, including features also available on Enterprise or via supported paid add-ons for Pro plans | +| vercel-enterprise-plan | Vercel checks whose audited feature requires the Enterprise plan | diff --git a/docs/developer-guide/checks.mdx b/docs/developer-guide/checks.mdx index da469678fa..956dc293d1 100644 --- a/docs/developer-guide/checks.mdx +++ b/docs/developer-guide/checks.mdx @@ -387,7 +387,7 @@ Provides both code examples and best practice recommendations for addressing the #### Categories -One or more functional groupings used for execution filtering (e.g., `internet-exposed`). You can define new categories just by adding to this field. +One or more functional groupings used for execution filtering (e.g., `internet-exposed`). Categories must match the predefined values enforced by `CheckMetadata`; adding a new category requires updating the validator and the metadata documentation. For the complete list of available categories, see [Categories Guidelines](/developer-guide/check-metadata-guidelines#categories-guidelines). diff --git a/docs/user-guide/providers/vercel/getting-started-vercel.mdx b/docs/user-guide/providers/vercel/getting-started-vercel.mdx index ddec26e6dc..8a6fdecc22 100644 --- a/docs/user-guide/providers/vercel/getting-started-vercel.mdx +++ b/docs/user-guide/providers/vercel/getting-started-vercel.mdx @@ -160,3 +160,25 @@ Prowler for Vercel includes security checks across the following services: | **Project** | Deployment protection, environment variable security, fork protection, and skew protection | | **Security** | Web Application Firewall (WAF), rate limiting, IP blocking, and managed rulesets | | **Team** | SSO enforcement, directory sync, member access, and invitation hygiene | + +## Checks With Explicit Plan-Based Behavior + +Prowler currently includes 26 Vercel checks. The 11 checks below have explicit billing-plan handling in the provider metadata or check logic. When the scanned scope reports a billing plan, Prowler adds plan-aware context to findings for these checks. If the API does not expose the required configuration, Prowler may return `MANUAL` and require verification in the Vercel dashboard. + +| Check ID | Hobby | Pro | Enterprise | Notes | +|----------|-------|-----|------------|-------| +| `project_password_protection_enabled` | Not available | Available as a paid add-on | Available | Checks password protection for deployments | +| `project_production_deployment_protection_enabled` | Not available | Available with supported paid deployment protection options | Available | Checks protection for production deployments | +| `project_skew_protection_enabled` | Not available | Available | Available | Checks skew protection during rollouts | +| `security_custom_rules_configured` | Not available | Available | Available | Returns `MANUAL` when the firewall configuration cannot be assessed from the API | +| `security_ip_blocking_rules_configured` | Not available | Available | Available | Returns `MANUAL` when the firewall configuration cannot be assessed from the API | +| `team_saml_sso_enabled` | Not available | Available | Available | Checks team SAML SSO configuration | +| `team_saml_sso_enforced` | Not available | Available | Available | Checks SAML SSO enforcement for all team members | +| `team_directory_sync_enabled` | Not available | Not available | Available | Checks SCIM directory sync | +| `security_managed_rulesets_enabled` | Bot Protection and AI Bots managed rulesets | Bot Protection and AI Bots managed rulesets | All managed rulesets, including OWASP Core Ruleset | Returns `MANUAL` when the firewall configuration cannot be assessed from the API | +| `security_rate_limiting_configured` | Not available | Available | Available | Returns `MANUAL` when the firewall configuration cannot be assessed from the API | +| `security_waf_enabled` | Not available | Available | Available | Returns `MANUAL` when the firewall configuration cannot be assessed from the API | + + +The five firewall-related checks (`security_waf_enabled`, `security_custom_rules_configured`, `security_ip_blocking_rules_configured`, `security_rate_limiting_configured`, and `security_managed_rulesets_enabled`) return `MANUAL` when the firewall configuration endpoint is not accessible from the API. The other 15 current Vercel checks do not currently include plan-specific handling in provider logic, but every Vercel check includes exactly one billing-plan metadata category (`vercel-hobby-plan`, `vercel-pro-plan`, or `vercel-enterprise-plan`) alongside its functional security category. + diff --git a/prowler/CHANGELOG.md b/prowler/CHANGELOG.md index d49c7eb43c..643aeb518e 100644 --- a/prowler/CHANGELOG.md +++ b/prowler/CHANGELOG.md @@ -9,6 +9,7 @@ All notable changes to the **Prowler SDK** are documented in this file. - `bedrock_guardrails_configured` check for AWS provider [(#10844)](https://github.com/prowler-cloud/prowler/pull/10844) - Universal compliance pipeline integrated into the CLI: `--list-compliance` and `--list-compliance-requirements` show universal frameworks, and CSV plus OCSF outputs are generated for any framework declaring a `TableConfig` [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301) - ASD Essential Eight Maturity Model compliance framework for AWS (Maturity Level One, Nov 2023) [(#10808)](https://github.com/prowler-cloud/prowler/pull/10808) +- Update Vercel checks to return personalized finding status extended depending on billing plan and classify them with billing-plan categories [(#10663)](https://github.com/prowler-cloud/prowler/pull/10663) ### 🔄 Changed diff --git a/prowler/lib/check/models.py b/prowler/lib/check/models.py index dba664d298..f5f0ff87e8 100644 --- a/prowler/lib/check/models.py +++ b/prowler/lib/check/models.py @@ -62,6 +62,9 @@ VALID_CATEGORIES = frozenset( "e5", "privilege-escalation", "ec2-imdsv1", + "vercel-hobby-plan", + "vercel-pro-plan", + "vercel-enterprise-plan", } ) @@ -244,14 +247,15 @@ class CheckMetadata(BaseModel): # store the compliance later if supplied Compliance: Optional[list[Any]] = Field(default_factory=list) + # TODO: Remove noqa and fix cls vulture errors @validator("Categories", each_item=True, pre=True, always=True) - def valid_category(cls, value, values): + def valid_category(cls, value, values): # noqa: F841 if not isinstance(value, str): raise ValueError("Categories must be a list of strings") value_lower = value.lower() if not re.match("^[a-z0-9-]+$", value_lower): raise ValueError( - f"Invalid category: {value}. Categories can only contain lowercase letters, numbers and hyphen '-'" + f"Invalid category: {value}. Categories can only contain lowercase letters, numbers, and hyphen '-'" ) if ( value_lower not in VALID_CATEGORIES @@ -279,7 +283,7 @@ class CheckMetadata(BaseModel): return resource_type @validator("ServiceName", pre=True, always=True) - def validate_service_name(cls, service_name, values): + def validate_service_name(cls, service_name, values): # noqa: F841 if not service_name: raise ValueError("ServiceName must be a non-empty string") @@ -296,7 +300,7 @@ class CheckMetadata(BaseModel): return service_name @validator("CheckID", pre=True, always=True) - def valid_check_id(cls, check_id, values): + def valid_check_id(cls, check_id, values): # noqa: F841 if not check_id: raise ValueError("CheckID must be a non-empty string") @@ -309,7 +313,7 @@ class CheckMetadata(BaseModel): return check_id @validator("CheckTitle", pre=True, always=True) - def validate_check_title(cls, check_title, values): + def validate_check_title(cls, check_title, values): # noqa: F841 if values.get("Provider") not in EXTERNAL_TOOL_PROVIDERS: if len(check_title) > 150: raise ValueError( @@ -322,13 +326,13 @@ class CheckMetadata(BaseModel): return check_title @validator("RelatedUrl", pre=True, always=True) - def validate_related_url(cls, related_url, values): + def validate_related_url(cls, related_url, values): # noqa: F841 if related_url and values.get("Provider") not in EXTERNAL_TOOL_PROVIDERS: raise ValueError("RelatedUrl must be empty. This field is deprecated.") return related_url @validator("Remediation") - def validate_recommendation_url(cls, remediation, values): + def validate_recommendation_url(cls, remediation, values): # noqa: F841 if values.get("Provider") not in EXTERNAL_TOOL_PROVIDERS: url = remediation.Recommendation.Url if url and not url.startswith("https://hub.prowler.com/"): @@ -338,7 +342,7 @@ class CheckMetadata(BaseModel): return remediation @validator("CheckType", pre=True, always=True) - def validate_check_type(cls, check_type, values): + def validate_check_type(cls, check_type, values): # noqa: F841 provider = values.get("Provider", "").lower() # Non-AWS providers must have an empty CheckType list @@ -367,7 +371,7 @@ class CheckMetadata(BaseModel): return check_type @validator("Description", pre=True, always=True) - def validate_description(cls, description, values): + def validate_description(cls, description, values): # noqa: F841 if values.get("Provider") not in EXTERNAL_TOOL_PROVIDERS: if len(description) > 400: raise ValueError( @@ -376,7 +380,7 @@ class CheckMetadata(BaseModel): return description @validator("Risk", pre=True, always=True) - def validate_risk(cls, risk, values): + def validate_risk(cls, risk, values): # noqa: F841 if values.get("Provider") not in EXTERNAL_TOOL_PROVIDERS: if len(risk) > 400: raise ValueError( @@ -385,7 +389,7 @@ class CheckMetadata(BaseModel): return risk @validator("ResourceGroup", pre=True, always=True) - def validate_resource_group(cls, resource_group): + def validate_resource_group(cls, resource_group): # noqa: F841 if resource_group and resource_group not in VALID_RESOURCE_GROUPS: raise ValueError( f"Invalid ResourceGroup: '{resource_group}'. Must be one of: {', '.join(sorted(VALID_RESOURCE_GROUPS))} or empty string." @@ -393,7 +397,7 @@ class CheckMetadata(BaseModel): return resource_group @validator("AdditionalURLs", pre=True, always=True) - def validate_additional_urls(cls, additional_urls): + def validate_additional_urls(cls, additional_urls): # noqa: F841 if not isinstance(additional_urls, list): raise ValueError("AdditionalURLs must be a list") diff --git a/prowler/providers/vercel/lib/billing.py b/prowler/providers/vercel/lib/billing.py new file mode 100644 index 0000000000..4e7170dbe6 --- /dev/null +++ b/prowler/providers/vercel/lib/billing.py @@ -0,0 +1,27 @@ +from typing import Optional + + +def extract_billing_plan(data: Optional[dict]) -> Optional[str]: + """Return the Vercel billing plan from a user or team payload. + + Vercel's REST API consistently returns the plan identifier at + ``data["billing"]["plan"]`` (e.g. ``"hobby"``, ``"pro"``, ``"enterprise"``) + on both ``GET /v2/user`` and ``GET /v2/teams`` responses, even though the + field is not part of the public OpenAPI schema. + """ + if not isinstance(data, dict): + return None + billing = data.get("billing") + if not isinstance(billing, dict): + return None + plan = billing.get("plan") + return plan.lower() if isinstance(plan, str) else None + + +def plan_reason_suffix( + billing_plan: Optional[str], unsupported_plans: set[str], explanation: str +) -> str: + """Return a plan-based explanation suffix only when the plan proves it.""" + if billing_plan in unsupported_plans: + return f" This may be expected because {explanation}" + return "" diff --git a/prowler/providers/vercel/lib/service/service.py b/prowler/providers/vercel/lib/service/service.py index aaf4d2625c..7fd9ced264 100644 --- a/prowler/providers/vercel/lib/service/service.py +++ b/prowler/providers/vercel/lib/service/service.py @@ -84,10 +84,10 @@ class VercelService: ) if response.status_code == 403: - # Plan limitation or permission error — return None for graceful handling - logger.warning( + # Endpoint unavailable for this token/scope; let checks handle it gracefully + logger.info( f"{self.service} - Access denied for {path} (403). " - "This may be a plan limitation." + "This may be caused by plan or permission restrictions." ) return None diff --git a/prowler/providers/vercel/models.py b/prowler/providers/vercel/models.py index 5f0e207f19..5f730bde76 100644 --- a/prowler/providers/vercel/models.py +++ b/prowler/providers/vercel/models.py @@ -21,6 +21,7 @@ class VercelTeamInfo(BaseModel): id: str name: str slug: str + billing_plan: Optional[str] = None class VercelIdentityInfo(BaseModel): @@ -29,9 +30,27 @@ class VercelIdentityInfo(BaseModel): user_id: Optional[str] = None username: Optional[str] = None email: Optional[str] = None + billing_plan: Optional[str] = None team: Optional[VercelTeamInfo] = None teams: list[VercelTeamInfo] = Field(default_factory=list) + def get_billing_plan_for(self, scope_id: Optional[str]) -> Optional[str]: + """Return the billing plan for an explicit user or team scope.""" + if not scope_id: + return None + + if self.team and self.team.id == scope_id and self.team.billing_plan: + return self.team.billing_plan + + for team in self.teams: + if team.id == scope_id: + return team.billing_plan + + if self.user_id == scope_id: + return self.billing_plan + + return None + class VercelOutputOptions(ProviderOutputOptions): """Customize output filenames for Vercel scans.""" diff --git a/prowler/providers/vercel/services/authentication/authentication_no_stale_tokens/authentication_no_stale_tokens.metadata.json b/prowler/providers/vercel/services/authentication/authentication_no_stale_tokens/authentication_no_stale_tokens.metadata.json index f4863ada5f..d2292216f3 100644 --- a/prowler/providers/vercel/services/authentication/authentication_no_stale_tokens/authentication_no_stale_tokens.metadata.json +++ b/prowler/providers/vercel/services/authentication/authentication_no_stale_tokens/authentication_no_stale_tokens.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "trust-boundaries" + "trust-boundaries", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [ diff --git a/prowler/providers/vercel/services/authentication/authentication_token_not_expired/authentication_token_not_expired.metadata.json b/prowler/providers/vercel/services/authentication/authentication_token_not_expired/authentication_token_not_expired.metadata.json index 47e5087cf5..c3288d968d 100644 --- a/prowler/providers/vercel/services/authentication/authentication_token_not_expired/authentication_token_not_expired.metadata.json +++ b/prowler/providers/vercel/services/authentication/authentication_token_not_expired/authentication_token_not_expired.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "trust-boundaries" + "trust-boundaries", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [ diff --git a/prowler/providers/vercel/services/deployment/deployment_production_uses_stable_target/deployment_production_uses_stable_target.metadata.json b/prowler/providers/vercel/services/deployment/deployment_production_uses_stable_target/deployment_production_uses_stable_target.metadata.json index 25416e6882..bb9b1a3b60 100644 --- a/prowler/providers/vercel/services/deployment/deployment_production_uses_stable_target/deployment_production_uses_stable_target.metadata.json +++ b/prowler/providers/vercel/services/deployment/deployment_production_uses_stable_target/deployment_production_uses_stable_target.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "trust-boundaries" + "trust-boundaries", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [], diff --git a/prowler/providers/vercel/services/domain/domain_dns_properly_configured/domain_dns_properly_configured.metadata.json b/prowler/providers/vercel/services/domain/domain_dns_properly_configured/domain_dns_properly_configured.metadata.json index e2450da8f1..28f151b2e6 100644 --- a/prowler/providers/vercel/services/domain/domain_dns_properly_configured/domain_dns_properly_configured.metadata.json +++ b/prowler/providers/vercel/services/domain/domain_dns_properly_configured/domain_dns_properly_configured.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "trust-boundaries" + "trust-boundaries", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [ diff --git a/prowler/providers/vercel/services/domain/domain_ssl_certificate_valid/domain_ssl_certificate_valid.metadata.json b/prowler/providers/vercel/services/domain/domain_ssl_certificate_valid/domain_ssl_certificate_valid.metadata.json index ac683cd71f..ae8d2750a8 100644 --- a/prowler/providers/vercel/services/domain/domain_ssl_certificate_valid/domain_ssl_certificate_valid.metadata.json +++ b/prowler/providers/vercel/services/domain/domain_ssl_certificate_valid/domain_ssl_certificate_valid.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "encryption" + "encryption", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [ diff --git a/prowler/providers/vercel/services/domain/domain_verified/domain_verified.metadata.json b/prowler/providers/vercel/services/domain/domain_verified/domain_verified.metadata.json index 1e79a433ba..f5f1aace08 100644 --- a/prowler/providers/vercel/services/domain/domain_verified/domain_verified.metadata.json +++ b/prowler/providers/vercel/services/domain/domain_verified/domain_verified.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "trust-boundaries" + "trust-boundaries", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [ diff --git a/prowler/providers/vercel/services/project/project_auto_expose_system_env_disabled/project_auto_expose_system_env_disabled.metadata.json b/prowler/providers/vercel/services/project/project_auto_expose_system_env_disabled/project_auto_expose_system_env_disabled.metadata.json index 21c3f118e1..36e128caae 100644 --- a/prowler/providers/vercel/services/project/project_auto_expose_system_env_disabled/project_auto_expose_system_env_disabled.metadata.json +++ b/prowler/providers/vercel/services/project/project_auto_expose_system_env_disabled/project_auto_expose_system_env_disabled.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "trust-boundaries" + "trust-boundaries", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [], diff --git a/prowler/providers/vercel/services/project/project_deployment_protection_enabled/project_deployment_protection_enabled.metadata.json b/prowler/providers/vercel/services/project/project_deployment_protection_enabled/project_deployment_protection_enabled.metadata.json index 521610c617..55b5fc917c 100644 --- a/prowler/providers/vercel/services/project/project_deployment_protection_enabled/project_deployment_protection_enabled.metadata.json +++ b/prowler/providers/vercel/services/project/project_deployment_protection_enabled/project_deployment_protection_enabled.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "internet-exposed" + "internet-exposed", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [ diff --git a/prowler/providers/vercel/services/project/project_directory_listing_disabled/project_directory_listing_disabled.metadata.json b/prowler/providers/vercel/services/project/project_directory_listing_disabled/project_directory_listing_disabled.metadata.json index b477a5984f..a2558ed667 100644 --- a/prowler/providers/vercel/services/project/project_directory_listing_disabled/project_directory_listing_disabled.metadata.json +++ b/prowler/providers/vercel/services/project/project_directory_listing_disabled/project_directory_listing_disabled.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "internet-exposed" + "internet-exposed", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [], diff --git a/prowler/providers/vercel/services/project/project_environment_no_overly_broad_target/project_environment_no_overly_broad_target.metadata.json b/prowler/providers/vercel/services/project/project_environment_no_overly_broad_target/project_environment_no_overly_broad_target.metadata.json index c9a418a503..5dc15b12aa 100644 --- a/prowler/providers/vercel/services/project/project_environment_no_overly_broad_target/project_environment_no_overly_broad_target.metadata.json +++ b/prowler/providers/vercel/services/project/project_environment_no_overly_broad_target/project_environment_no_overly_broad_target.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "secrets" + "secrets", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [ diff --git a/prowler/providers/vercel/services/project/project_environment_no_secrets_in_plain_type/project_environment_no_secrets_in_plain_type.metadata.json b/prowler/providers/vercel/services/project/project_environment_no_secrets_in_plain_type/project_environment_no_secrets_in_plain_type.metadata.json index 90fea53eea..0e3e654f93 100644 --- a/prowler/providers/vercel/services/project/project_environment_no_secrets_in_plain_type/project_environment_no_secrets_in_plain_type.metadata.json +++ b/prowler/providers/vercel/services/project/project_environment_no_secrets_in_plain_type/project_environment_no_secrets_in_plain_type.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "secrets" + "secrets", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [ diff --git a/prowler/providers/vercel/services/project/project_environment_production_vars_not_in_preview/project_environment_production_vars_not_in_preview.metadata.json b/prowler/providers/vercel/services/project/project_environment_production_vars_not_in_preview/project_environment_production_vars_not_in_preview.metadata.json index 6fc3e3af79..5b99fe00d6 100644 --- a/prowler/providers/vercel/services/project/project_environment_production_vars_not_in_preview/project_environment_production_vars_not_in_preview.metadata.json +++ b/prowler/providers/vercel/services/project/project_environment_production_vars_not_in_preview/project_environment_production_vars_not_in_preview.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "secrets" + "secrets", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [ diff --git a/prowler/providers/vercel/services/project/project_git_fork_protection_enabled/project_git_fork_protection_enabled.metadata.json b/prowler/providers/vercel/services/project/project_git_fork_protection_enabled/project_git_fork_protection_enabled.metadata.json index 8e3db04fcd..37bbc7b8c6 100644 --- a/prowler/providers/vercel/services/project/project_git_fork_protection_enabled/project_git_fork_protection_enabled.metadata.json +++ b/prowler/providers/vercel/services/project/project_git_fork_protection_enabled/project_git_fork_protection_enabled.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "internet-exposed" + "internet-exposed", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [], diff --git a/prowler/providers/vercel/services/project/project_password_protection_enabled/project_password_protection_enabled.metadata.json b/prowler/providers/vercel/services/project/project_password_protection_enabled/project_password_protection_enabled.metadata.json index 2db77410d9..edd12c67f0 100644 --- a/prowler/providers/vercel/services/project/project_password_protection_enabled/project_password_protection_enabled.metadata.json +++ b/prowler/providers/vercel/services/project/project_password_protection_enabled/project_password_protection_enabled.metadata.json @@ -28,11 +28,12 @@ } }, "Categories": [ - "internet-exposed" + "internet-exposed", + "vercel-pro-plan" ], "DependsOn": [], "RelatedTo": [ "project_deployment_protection_enabled" ], - "Notes": "" + "Notes": "Required billing plan: Enterprise, or as a paid add-on for Pro plans." } diff --git a/prowler/providers/vercel/services/project/project_password_protection_enabled/project_password_protection_enabled.py b/prowler/providers/vercel/services/project/project_password_protection_enabled/project_password_protection_enabled.py index 86c0778408..e5313c6182 100644 --- a/prowler/providers/vercel/services/project/project_password_protection_enabled/project_password_protection_enabled.py +++ b/prowler/providers/vercel/services/project/project_password_protection_enabled/project_password_protection_enabled.py @@ -1,6 +1,7 @@ from typing import List from prowler.lib.check.models import Check, CheckReportVercel +from prowler.providers.vercel.lib.billing import plan_reason_suffix from prowler.providers.vercel.services.project.project_client import project_client @@ -38,6 +39,7 @@ class project_password_protection_enabled(Check): report.status_extended = ( f"Project {project.name} does not have password protection " f"configured for deployments." + f"{plan_reason_suffix(project.billing_plan, {'hobby'}, 'password protection is not available on the Vercel Hobby plan.')}" ) findings.append(report) diff --git a/prowler/providers/vercel/services/project/project_production_deployment_protection_enabled/project_production_deployment_protection_enabled.metadata.json b/prowler/providers/vercel/services/project/project_production_deployment_protection_enabled/project_production_deployment_protection_enabled.metadata.json index 3760eefb57..20c1fac713 100644 --- a/prowler/providers/vercel/services/project/project_production_deployment_protection_enabled/project_production_deployment_protection_enabled.metadata.json +++ b/prowler/providers/vercel/services/project/project_production_deployment_protection_enabled/project_production_deployment_protection_enabled.metadata.json @@ -28,11 +28,12 @@ } }, "Categories": [ - "internet-exposed" + "internet-exposed", + "vercel-pro-plan" ], "DependsOn": [], "RelatedTo": [ "project_deployment_protection_enabled" ], - "Notes": "" + "Notes": "Protecting production deployments requires Enterprise, or Pro plans with supported paid deployment protection options." } diff --git a/prowler/providers/vercel/services/project/project_production_deployment_protection_enabled/project_production_deployment_protection_enabled.py b/prowler/providers/vercel/services/project/project_production_deployment_protection_enabled/project_production_deployment_protection_enabled.py index ccbfd15325..bb0924266e 100644 --- a/prowler/providers/vercel/services/project/project_production_deployment_protection_enabled/project_production_deployment_protection_enabled.py +++ b/prowler/providers/vercel/services/project/project_production_deployment_protection_enabled/project_production_deployment_protection_enabled.py @@ -1,6 +1,7 @@ from typing import List from prowler.lib.check.models import Check, CheckReportVercel +from prowler.providers.vercel.lib.billing import plan_reason_suffix from prowler.providers.vercel.services.project.project_client import project_client @@ -38,6 +39,7 @@ class project_production_deployment_protection_enabled(Check): report.status_extended = ( f"Project {project.name} does not have deployment protection " f"enabled on production deployments." + f"{plan_reason_suffix(project.billing_plan, {'hobby'}, 'protecting production deployments is not available on the Vercel Hobby plan.')}" ) findings.append(report) diff --git a/prowler/providers/vercel/services/project/project_service.py b/prowler/providers/vercel/services/project/project_service.py index e139a2a939..6cc3a36418 100644 --- a/prowler/providers/vercel/services/project/project_service.py +++ b/prowler/providers/vercel/services/project/project_service.py @@ -20,6 +20,7 @@ class Project(VercelService): """List all projects, optionally filtered by --project argument.""" try: raw_projects = self._paginate("/v9/projects", "projects") + identity = getattr(self.provider, "identity", None) filter_projects = self.provider.filter_projects seen_ids: set[str] = set() @@ -57,10 +58,17 @@ class Project(VercelService): pwd_protection = proj.get("passwordProtection") security = proj.get("security", {}) or {} + project_team_id = proj.get("accountId") or self.provider.session.team_id + self.projects[project_id] = VercelProject( id=project_id, name=project_name, - team_id=proj.get("accountId") or self.provider.session.team_id, + team_id=project_team_id, + billing_plan=( + identity.get_billing_plan_for(project_team_id) + if identity + else None + ), framework=proj.get("framework"), node_version=proj.get("nodeVersion"), auto_expose_system_envs=proj.get("autoExposeSystemEnvs", False), @@ -160,6 +168,7 @@ class VercelProject(BaseModel): id: str name: str team_id: Optional[str] = None + billing_plan: Optional[str] = None framework: Optional[str] = None node_version: Optional[str] = None auto_expose_system_envs: bool = False diff --git a/prowler/providers/vercel/services/project/project_skew_protection_enabled/project_skew_protection_enabled.metadata.json b/prowler/providers/vercel/services/project/project_skew_protection_enabled/project_skew_protection_enabled.metadata.json index a0a4f70cee..01e38c03c4 100644 --- a/prowler/providers/vercel/services/project/project_skew_protection_enabled/project_skew_protection_enabled.metadata.json +++ b/prowler/providers/vercel/services/project/project_skew_protection_enabled/project_skew_protection_enabled.metadata.json @@ -28,9 +28,10 @@ } }, "Categories": [ - "resilience" + "resilience", + "vercel-pro-plan" ], "DependsOn": [], "RelatedTo": [], - "Notes": "" + "Notes": "Required billing plan: Pro or Enterprise." } diff --git a/prowler/providers/vercel/services/project/project_skew_protection_enabled/project_skew_protection_enabled.py b/prowler/providers/vercel/services/project/project_skew_protection_enabled/project_skew_protection_enabled.py index f4e878ba5c..3f7a2d0ddb 100644 --- a/prowler/providers/vercel/services/project/project_skew_protection_enabled/project_skew_protection_enabled.py +++ b/prowler/providers/vercel/services/project/project_skew_protection_enabled/project_skew_protection_enabled.py @@ -1,6 +1,7 @@ from typing import List from prowler.lib.check.models import Check, CheckReportVercel +from prowler.providers.vercel.lib.billing import plan_reason_suffix from prowler.providers.vercel.services.project.project_client import project_client @@ -34,6 +35,7 @@ class project_skew_protection_enabled(Check): report.status_extended = ( f"Project {project.name} does not have skew protection enabled, " f"which may cause version mismatches during deployments." + f"{plan_reason_suffix(project.billing_plan, {'hobby'}, 'skew protection is not available on the Vercel Hobby plan.')}" ) findings.append(report) diff --git a/prowler/providers/vercel/services/security/security_custom_rules_configured/security_custom_rules_configured.metadata.json b/prowler/providers/vercel/services/security/security_custom_rules_configured/security_custom_rules_configured.metadata.json index a4b53baf4f..615f40843a 100644 --- a/prowler/providers/vercel/services/security/security_custom_rules_configured/security_custom_rules_configured.metadata.json +++ b/prowler/providers/vercel/services/security/security_custom_rules_configured/security_custom_rules_configured.metadata.json @@ -28,11 +28,12 @@ } }, "Categories": [ - "internet-exposed" + "internet-exposed", + "vercel-pro-plan" ], "DependsOn": [], "RelatedTo": [ "security_waf_enabled" ], - "Notes": "" + "Notes": "Required billing plan: Pro or Enterprise." } diff --git a/prowler/providers/vercel/services/security/security_custom_rules_configured/security_custom_rules_configured.py b/prowler/providers/vercel/services/security/security_custom_rules_configured/security_custom_rules_configured.py index 6ec7f8834f..a525c93f0a 100644 --- a/prowler/providers/vercel/services/security/security_custom_rules_configured/security_custom_rules_configured.py +++ b/prowler/providers/vercel/services/security/security_custom_rules_configured/security_custom_rules_configured.py @@ -1,6 +1,7 @@ from typing import List from prowler.lib.check.models import Check, CheckReportVercel +from prowler.providers.vercel.lib.billing import plan_reason_suffix from prowler.providers.vercel.services.security.security_client import security_client @@ -24,7 +25,16 @@ class security_custom_rules_configured(Check): for config in security_client.firewall_configs.values(): report = CheckReportVercel(metadata=self.metadata(), resource=config) - if config.custom_rules: + if not config.firewall_config_accessible: + report.status = "MANUAL" + report.status_extended = ( + f"Project {config.project_name} ({config.project_id}) " + f"could not be assessed for custom firewall rules because the " + f"firewall configuration endpoint was not accessible. " + f"Manual verification is required." + f"{plan_reason_suffix(config.billing_plan, {'hobby'}, 'custom firewall rules are not available on the Vercel Hobby plan.')}" + ) + elif config.custom_rules: report.status = "PASS" report.status_extended = ( f"Project {config.project_name} ({config.project_id}) " diff --git a/prowler/providers/vercel/services/security/security_ip_blocking_rules_configured/security_ip_blocking_rules_configured.metadata.json b/prowler/providers/vercel/services/security/security_ip_blocking_rules_configured/security_ip_blocking_rules_configured.metadata.json index a02cd35324..88c609e742 100644 --- a/prowler/providers/vercel/services/security/security_ip_blocking_rules_configured/security_ip_blocking_rules_configured.metadata.json +++ b/prowler/providers/vercel/services/security/security_ip_blocking_rules_configured/security_ip_blocking_rules_configured.metadata.json @@ -28,11 +28,12 @@ } }, "Categories": [ - "internet-exposed" + "internet-exposed", + "vercel-pro-plan" ], "DependsOn": [], "RelatedTo": [ "security_waf_enabled" ], - "Notes": "" + "Notes": "Required billing plan: Pro or Enterprise." } diff --git a/prowler/providers/vercel/services/security/security_ip_blocking_rules_configured/security_ip_blocking_rules_configured.py b/prowler/providers/vercel/services/security/security_ip_blocking_rules_configured/security_ip_blocking_rules_configured.py index 0b89c8d111..443c052354 100644 --- a/prowler/providers/vercel/services/security/security_ip_blocking_rules_configured/security_ip_blocking_rules_configured.py +++ b/prowler/providers/vercel/services/security/security_ip_blocking_rules_configured/security_ip_blocking_rules_configured.py @@ -1,6 +1,7 @@ from typing import List from prowler.lib.check.models import Check, CheckReportVercel +from prowler.providers.vercel.lib.billing import plan_reason_suffix from prowler.providers.vercel.services.security.security_client import security_client @@ -25,7 +26,16 @@ class security_ip_blocking_rules_configured(Check): for config in security_client.firewall_configs.values(): report = CheckReportVercel(metadata=self.metadata(), resource=config) - if config.ip_blocking_rules: + if not config.firewall_config_accessible: + report.status = "MANUAL" + report.status_extended = ( + f"Project {config.project_name} ({config.project_id}) " + f"could not be assessed for IP blocking rules because the " + f"firewall configuration endpoint was not accessible. " + f"Manual verification is required." + f"{plan_reason_suffix(config.billing_plan, {'hobby'}, 'IP blocking rules are not available on the Vercel Hobby plan.')}" + ) + elif config.ip_blocking_rules: report.status = "PASS" report.status_extended = ( f"Project {config.project_name} ({config.project_id}) " diff --git a/prowler/providers/vercel/services/security/security_managed_rulesets_enabled/security_managed_rulesets_enabled.metadata.json b/prowler/providers/vercel/services/security/security_managed_rulesets_enabled/security_managed_rulesets_enabled.metadata.json index ee66a3be21..cbd956cd52 100644 --- a/prowler/providers/vercel/services/security/security_managed_rulesets_enabled/security_managed_rulesets_enabled.metadata.json +++ b/prowler/providers/vercel/services/security/security_managed_rulesets_enabled/security_managed_rulesets_enabled.metadata.json @@ -9,7 +9,7 @@ "Severity": "high", "ResourceType": "NotDefined", "ResourceGroup": "security", - "Description": "**Vercel projects** are assessed for **managed WAF ruleset** enablement. Managed rulesets are curated by Vercel and provide protection against known attack patterns including **OWASP Top 10** threats. This feature requires an Enterprise plan and reports MANUAL status when unavailable.", + "Description": "**Vercel projects** are assessed for **managed WAF ruleset** enablement. Managed rulesets are curated by Vercel and provide protection against known attack patterns including **OWASP Top 10** threats. Availability varies by ruleset, and the check reports MANUAL when the firewall configuration cannot be assessed from the API.", "Risk": "Without **managed rulesets** enabled, the firewall lacks curated protection rules against well-known attack patterns. The application relies solely on custom rules, which may miss **new or evolving threats** that managed rulesets are designed to detect and block automatically.", "RelatedUrl": "", "AdditionalURLs": [ @@ -19,20 +19,21 @@ "Code": { "CLI": "", "NativeIaC": "", - "Other": "1. Sign in to the Vercel dashboard\n2. Navigate to the project Settings > Security > Firewall\n3. Enable managed rulesets from the available options\n4. Review and configure ruleset sensitivity levels\n5. Note: This feature requires an Enterprise plan", + "Other": "1. Sign in to the Vercel dashboard\n2. Navigate to the project Settings > Security > Firewall\n3. Enable the managed rulesets that are available for your plan\n4. Review and configure ruleset sensitivity levels\n5. If the API does not expose firewall configuration for the project, verify the rulesets manually in the dashboard", "Terraform": "" }, "Recommendation": { - "Text": "Enable managed WAF rulesets to benefit from Vercel-curated protection against common attack patterns. If you are on a plan that does not support managed rulesets, consider upgrading to the Enterprise plan for enhanced security features.", + "Text": "Enable the managed WAF rulesets that are available for your Vercel plan to benefit from curated protection against common attack patterns. If the API does not expose firewall configuration for the project, verify the rulesets manually in the dashboard.", "Url": "https://hub.prowler.com/check/security_managed_rulesets_enabled" } }, "Categories": [ - "internet-exposed" + "internet-exposed", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [ "security_waf_enabled" ], - "Notes": "This check is plan-gated. If the Vercel API returns a 403 for managed rulesets, the check reports MANUAL status indicating that an Enterprise plan is required." + "Notes": "Managed ruleset availability varies by ruleset. OWASP Core Ruleset requires Enterprise, while Bot Protection and AI Bots managed rulesets are available on all plans." } diff --git a/prowler/providers/vercel/services/security/security_managed_rulesets_enabled/security_managed_rulesets_enabled.py b/prowler/providers/vercel/services/security/security_managed_rulesets_enabled/security_managed_rulesets_enabled.py index ead4623f4c..f7f476ccad 100644 --- a/prowler/providers/vercel/services/security/security_managed_rulesets_enabled/security_managed_rulesets_enabled.py +++ b/prowler/providers/vercel/services/security/security_managed_rulesets_enabled/security_managed_rulesets_enabled.py @@ -1,6 +1,7 @@ from typing import List from prowler.lib.check.models import Check, CheckReportVercel +from prowler.providers.vercel.lib.billing import plan_reason_suffix from prowler.providers.vercel.services.security.security_client import security_client @@ -17,8 +18,8 @@ class security_managed_rulesets_enabled(Check): """Execute the Vercel Managed Rulesets Enabled check. Iterates over all firewall configurations and checks if managed - rulesets are enabled. Reports MANUAL status when the feature is - not available due to plan limitations. + rulesets are enabled. Reports MANUAL status when the firewall + configuration cannot be assessed from the API. Returns: List[CheckReportVercel]: A list of reports for each project. @@ -27,12 +28,14 @@ class security_managed_rulesets_enabled(Check): for config in security_client.firewall_configs.values(): report = CheckReportVercel(metadata=self.metadata(), resource=config) - if config.managed_rulesets is None: + if not config.firewall_config_accessible: report.status = "MANUAL" report.status_extended = ( f"Project {config.project_name} ({config.project_id}) " - f"could not be assessed for managed rulesets. " - f"Enterprise plan required to access this feature." + f"could not be assessed for managed rulesets because the " + f"firewall configuration endpoint was not accessible. " + f"Manual verification is required." + f"{plan_reason_suffix(config.billing_plan, {'hobby', 'pro'}, 'some managed WAF rulesets, including the OWASP Core Ruleset, are only available on Vercel Enterprise plans.')}" ) elif config.managed_rulesets: report.status = "PASS" diff --git a/prowler/providers/vercel/services/security/security_rate_limiting_configured/security_rate_limiting_configured.metadata.json b/prowler/providers/vercel/services/security/security_rate_limiting_configured/security_rate_limiting_configured.metadata.json index 8a804233a5..637502ab6f 100644 --- a/prowler/providers/vercel/services/security/security_rate_limiting_configured/security_rate_limiting_configured.metadata.json +++ b/prowler/providers/vercel/services/security/security_rate_limiting_configured/security_rate_limiting_configured.metadata.json @@ -28,11 +28,12 @@ } }, "Categories": [ - "internet-exposed" + "internet-exposed", + "vercel-pro-plan" ], "DependsOn": [], "RelatedTo": [ "security_waf_enabled" ], - "Notes": "" + "Notes": "Required billing plan: Pro or Enterprise." } diff --git a/prowler/providers/vercel/services/security/security_rate_limiting_configured/security_rate_limiting_configured.py b/prowler/providers/vercel/services/security/security_rate_limiting_configured/security_rate_limiting_configured.py index 3e6419666c..6e37fd2779 100644 --- a/prowler/providers/vercel/services/security/security_rate_limiting_configured/security_rate_limiting_configured.py +++ b/prowler/providers/vercel/services/security/security_rate_limiting_configured/security_rate_limiting_configured.py @@ -1,6 +1,7 @@ from typing import List from prowler.lib.check.models import Check, CheckReportVercel +from prowler.providers.vercel.lib.billing import plan_reason_suffix from prowler.providers.vercel.services.security.security_client import security_client @@ -24,7 +25,16 @@ class security_rate_limiting_configured(Check): for config in security_client.firewall_configs.values(): report = CheckReportVercel(metadata=self.metadata(), resource=config) - if config.rate_limiting_rules: + if not config.firewall_config_accessible: + report.status = "MANUAL" + report.status_extended = ( + f"Project {config.project_name} ({config.project_id}) " + f"could not be assessed for rate limiting rules because the " + f"firewall configuration endpoint was not accessible. " + f"Manual verification is required." + f"{plan_reason_suffix(config.billing_plan, {'hobby'}, 'rate limiting rules are not available on the Vercel Hobby plan.')}" + ) + elif config.rate_limiting_rules: report.status = "PASS" report.status_extended = ( f"Project {config.project_name} ({config.project_id}) " diff --git a/prowler/providers/vercel/services/security/security_service.py b/prowler/providers/vercel/services/security/security_service.py index 3c79f5c2f5..b472dcc811 100644 --- a/prowler/providers/vercel/services/security/security_service.py +++ b/prowler/providers/vercel/services/security/security_service.py @@ -29,11 +29,13 @@ class Security(VercelService): data = self._read_firewall_config(project) if data is None: - # 403 — plan limitation, store with managed_rulesets=None + # Firewall config endpoint unavailable for this project/token self.firewall_configs[project.id] = VercelFirewallConfig( project_id=project.id, project_name=project.name, team_id=project.team_id, + billing_plan=project.billing_plan, + firewall_config_accessible=False, firewall_enabled=False, managed_rulesets=None, name=project.name, @@ -49,6 +51,8 @@ class Security(VercelService): project_id=project.id, project_name=project.name, team_id=project.team_id, + billing_plan=project.billing_plan, + firewall_config_accessible=True, firewall_enabled=( fallback_firewall_enabled if fallback_firewall_enabled is not None @@ -93,6 +97,8 @@ class Security(VercelService): project_id=project.id, project_name=project.name, team_id=project.team_id, + billing_plan=project.billing_plan, + firewall_config_accessible=True, firewall_enabled=firewall_enabled, managed_rulesets=managed, custom_rules=custom_rules, @@ -246,8 +252,10 @@ class VercelFirewallConfig(BaseModel): project_id: str project_name: Optional[str] = None team_id: Optional[str] = None + billing_plan: Optional[str] = None + firewall_config_accessible: bool = True firewall_enabled: bool = False - managed_rulesets: Optional[dict] = None # None means plan-gated (403) + managed_rulesets: Optional[dict] = None # None means config endpoint unavailable custom_rules: list[dict] = Field(default_factory=list) ip_blocking_rules: list[dict] = Field(default_factory=list) rate_limiting_rules: list[dict] = Field(default_factory=list) diff --git a/prowler/providers/vercel/services/security/security_waf_enabled/security_waf_enabled.metadata.json b/prowler/providers/vercel/services/security/security_waf_enabled/security_waf_enabled.metadata.json index 7598e7cccd..467edbc66c 100644 --- a/prowler/providers/vercel/services/security/security_waf_enabled/security_waf_enabled.metadata.json +++ b/prowler/providers/vercel/services/security/security_waf_enabled/security_waf_enabled.metadata.json @@ -28,12 +28,13 @@ } }, "Categories": [ - "internet-exposed" + "internet-exposed", + "vercel-pro-plan" ], "DependsOn": [], "RelatedTo": [ "security_managed_rulesets_enabled", "security_custom_rules_configured" ], - "Notes": "" + "Notes": "Required billing plan: Pro or Enterprise." } diff --git a/prowler/providers/vercel/services/security/security_waf_enabled/security_waf_enabled.py b/prowler/providers/vercel/services/security/security_waf_enabled/security_waf_enabled.py index 82859a4f5c..9ab93b87ca 100644 --- a/prowler/providers/vercel/services/security/security_waf_enabled/security_waf_enabled.py +++ b/prowler/providers/vercel/services/security/security_waf_enabled/security_waf_enabled.py @@ -1,6 +1,7 @@ from typing import List from prowler.lib.check.models import Check, CheckReportVercel +from prowler.providers.vercel.lib.billing import plan_reason_suffix from prowler.providers.vercel.services.security.security_client import security_client @@ -24,13 +25,15 @@ class security_waf_enabled(Check): for config in security_client.firewall_configs.values(): report = CheckReportVercel(metadata=self.metadata(), resource=config) - if config.managed_rulesets is None: - # 403 — plan limitation, cannot determine WAF status + if not config.firewall_config_accessible: + # Firewall config could not be retrieved for this project report.status = "MANUAL" report.status_extended = ( f"Project {config.project_name} ({config.project_id}) " - f"could not be checked for WAF status due to plan limitations. " + f"could not be checked for WAF status because the firewall " + f"configuration endpoint was not accessible. " f"Manual verification is required." + f"{plan_reason_suffix(config.billing_plan, {'hobby'}, 'the Web Application Firewall is not available on the Vercel Hobby plan.')}" ) elif config.firewall_enabled: report.status = "PASS" diff --git a/prowler/providers/vercel/services/team/team_directory_sync_enabled/team_directory_sync_enabled.metadata.json b/prowler/providers/vercel/services/team/team_directory_sync_enabled/team_directory_sync_enabled.metadata.json index fda5c7b94d..ed0ce1470d 100644 --- a/prowler/providers/vercel/services/team/team_directory_sync_enabled/team_directory_sync_enabled.metadata.json +++ b/prowler/providers/vercel/services/team/team_directory_sync_enabled/team_directory_sync_enabled.metadata.json @@ -29,11 +29,12 @@ } }, "Categories": [ - "trust-boundaries" + "trust-boundaries", + "vercel-enterprise-plan" ], "DependsOn": [], "RelatedTo": [ "team_saml_sso_enabled" ], - "Notes": "" + "Notes": "Required billing plan: Enterprise." } diff --git a/prowler/providers/vercel/services/team/team_directory_sync_enabled/team_directory_sync_enabled.py b/prowler/providers/vercel/services/team/team_directory_sync_enabled/team_directory_sync_enabled.py index 101922b6ed..d185de4971 100644 --- a/prowler/providers/vercel/services/team/team_directory_sync_enabled/team_directory_sync_enabled.py +++ b/prowler/providers/vercel/services/team/team_directory_sync_enabled/team_directory_sync_enabled.py @@ -1,6 +1,7 @@ from typing import List from prowler.lib.check.models import Check, CheckReportVercel +from prowler.providers.vercel.lib.billing import plan_reason_suffix from prowler.providers.vercel.services.team.team_client import team_client @@ -40,6 +41,7 @@ class team_directory_sync_enabled(Check): report.status_extended = ( f"Team {team.name} does not have directory sync (SCIM) enabled. " f"User provisioning and deprovisioning must be managed manually." + f"{plan_reason_suffix(team.billing_plan, {'hobby', 'pro'}, 'directory sync (SCIM) is only available on Vercel Enterprise plans.')}" ) findings.append(report) diff --git a/prowler/providers/vercel/services/team/team_member_role_least_privilege/team_member_role_least_privilege.metadata.json b/prowler/providers/vercel/services/team/team_member_role_least_privilege/team_member_role_least_privilege.metadata.json index 37abf769ef..d648769c1f 100644 --- a/prowler/providers/vercel/services/team/team_member_role_least_privilege/team_member_role_least_privilege.metadata.json +++ b/prowler/providers/vercel/services/team/team_member_role_least_privilege/team_member_role_least_privilege.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "trust-boundaries" + "trust-boundaries", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [], diff --git a/prowler/providers/vercel/services/team/team_no_stale_invitations/team_no_stale_invitations.metadata.json b/prowler/providers/vercel/services/team/team_no_stale_invitations/team_no_stale_invitations.metadata.json index 16a1d942e1..606f9d863b 100644 --- a/prowler/providers/vercel/services/team/team_no_stale_invitations/team_no_stale_invitations.metadata.json +++ b/prowler/providers/vercel/services/team/team_no_stale_invitations/team_no_stale_invitations.metadata.json @@ -28,7 +28,8 @@ } }, "Categories": [ - "trust-boundaries" + "trust-boundaries", + "vercel-hobby-plan" ], "DependsOn": [], "RelatedTo": [], diff --git a/prowler/providers/vercel/services/team/team_saml_sso_enabled/team_saml_sso_enabled.metadata.json b/prowler/providers/vercel/services/team/team_saml_sso_enabled/team_saml_sso_enabled.metadata.json index 21785bf482..fe66ed48bc 100644 --- a/prowler/providers/vercel/services/team/team_saml_sso_enabled/team_saml_sso_enabled.metadata.json +++ b/prowler/providers/vercel/services/team/team_saml_sso_enabled/team_saml_sso_enabled.metadata.json @@ -29,11 +29,12 @@ } }, "Categories": [ - "trust-boundaries" + "trust-boundaries", + "vercel-pro-plan" ], "DependsOn": [], "RelatedTo": [ "team_saml_sso_enforced" ], - "Notes": "" + "Notes": "Required billing plan: Pro or Enterprise." } diff --git a/prowler/providers/vercel/services/team/team_saml_sso_enabled/team_saml_sso_enabled.py b/prowler/providers/vercel/services/team/team_saml_sso_enabled/team_saml_sso_enabled.py index 8a979ec66d..38960efa8c 100644 --- a/prowler/providers/vercel/services/team/team_saml_sso_enabled/team_saml_sso_enabled.py +++ b/prowler/providers/vercel/services/team/team_saml_sso_enabled/team_saml_sso_enabled.py @@ -1,6 +1,7 @@ from typing import List from prowler.lib.check.models import Check, CheckReportVercel +from prowler.providers.vercel.lib.billing import plan_reason_suffix from prowler.providers.vercel.services.team.team_client import team_client @@ -38,6 +39,7 @@ class team_saml_sso_enabled(Check): report.status = "FAIL" report.status_extended = ( f"Team {team.name} does not have SAML SSO enabled." + f"{plan_reason_suffix(team.billing_plan, {'hobby'}, 'SAML SSO is not available on the Vercel Hobby plan.')}" ) findings.append(report) diff --git a/prowler/providers/vercel/services/team/team_saml_sso_enforced/team_saml_sso_enforced.metadata.json b/prowler/providers/vercel/services/team/team_saml_sso_enforced/team_saml_sso_enforced.metadata.json index feb43bc179..3e69d9da49 100644 --- a/prowler/providers/vercel/services/team/team_saml_sso_enforced/team_saml_sso_enforced.metadata.json +++ b/prowler/providers/vercel/services/team/team_saml_sso_enforced/team_saml_sso_enforced.metadata.json @@ -29,11 +29,12 @@ } }, "Categories": [ - "trust-boundaries" + "trust-boundaries", + "vercel-pro-plan" ], "DependsOn": [], "RelatedTo": [ "team_saml_sso_enabled" ], - "Notes": "" + "Notes": "Required billing plan: Pro or Enterprise." } diff --git a/prowler/providers/vercel/services/team/team_saml_sso_enforced/team_saml_sso_enforced.py b/prowler/providers/vercel/services/team/team_saml_sso_enforced/team_saml_sso_enforced.py index 746ebba387..564f0b5d46 100644 --- a/prowler/providers/vercel/services/team/team_saml_sso_enforced/team_saml_sso_enforced.py +++ b/prowler/providers/vercel/services/team/team_saml_sso_enforced/team_saml_sso_enforced.py @@ -1,6 +1,7 @@ from typing import List from prowler.lib.check.models import Check, CheckReportVercel +from prowler.providers.vercel.lib.billing import plan_reason_suffix from prowler.providers.vercel.services.team.team_client import team_client @@ -43,6 +44,7 @@ class team_saml_sso_enforced(Check): else: report.status_extended = ( f"Team {team.name} does not have SAML SSO enforced." + f"{plan_reason_suffix(team.billing_plan, {'hobby'}, 'SAML SSO is not available on the Vercel Hobby plan.')}" ) findings.append(report) diff --git a/prowler/providers/vercel/services/team/team_service.py b/prowler/providers/vercel/services/team/team_service.py index 916374ea87..7d8b119def 100644 --- a/prowler/providers/vercel/services/team/team_service.py +++ b/prowler/providers/vercel/services/team/team_service.py @@ -4,6 +4,7 @@ from typing import Optional from pydantic import BaseModel, Field from prowler.lib.logger import logger +from prowler.providers.vercel.lib.billing import extract_billing_plan from prowler.providers.vercel.lib.service.service import VercelService @@ -67,6 +68,7 @@ class Team(VercelService): id=team_data.get("id", team_id), name=team_data.get("name", ""), slug=team_data.get("slug", ""), + billing_plan=extract_billing_plan(team_data), saml=saml_config, directory_sync_enabled=dir_sync, created_at=created_at, @@ -151,6 +153,7 @@ class VercelTeam(BaseModel): id: str name: str slug: str + billing_plan: Optional[str] = None saml: Optional[SAMLConfig] = None directory_sync_enabled: bool = False members: list[VercelTeamMember] = Field(default_factory=list) diff --git a/prowler/providers/vercel/vercel_provider.py b/prowler/providers/vercel/vercel_provider.py index 731adef6ed..54ab4627a9 100644 --- a/prowler/providers/vercel/vercel_provider.py +++ b/prowler/providers/vercel/vercel_provider.py @@ -20,6 +20,7 @@ from prowler.providers.vercel.exceptions.exceptions import ( VercelRateLimitError, VercelSessionError, ) +from prowler.providers.vercel.lib.billing import extract_billing_plan from prowler.providers.vercel.lib.mutelist.mutelist import VercelMutelist from prowler.providers.vercel.models import ( VercelIdentityInfo, @@ -195,6 +196,7 @@ class VercelProvider(Provider): user_id = user_data.get("id") username = user_data.get("username") email = user_data.get("email") + billing_plan = extract_billing_plan(user_data) # Get team info team_info = None @@ -214,6 +216,7 @@ class VercelProvider(Provider): id=team_data.get("id", session.team_id), name=team_data.get("name", ""), slug=team_data.get("slug", ""), + billing_plan=extract_billing_plan(team_data), ) all_teams = [team_info] elif team_response.status_code in (404, 403): @@ -239,6 +242,7 @@ class VercelProvider(Provider): id=t.get("id", ""), name=t.get("name", ""), slug=t.get("slug", ""), + billing_plan=extract_billing_plan(t), ) ) if all_teams: @@ -253,6 +257,7 @@ class VercelProvider(Provider): user_id=user_id, username=username, email=email, + billing_plan=billing_plan, team=team_info, teams=all_teams, ) diff --git a/tests/lib/check/models_test.py b/tests/lib/check/models_test.py index 815479cdfa..71fd1f718b 100644 --- a/tests/lib/check/models_test.py +++ b/tests/lib/check/models_test.py @@ -377,6 +377,50 @@ class TestCheckMetadataValidators: check_metadata = CheckMetadata(**valid_metadata) assert check_metadata.Categories == ["encryption", "logging", "secrets"] + def test_valid_vercel_plan_categories_success(self): + """Test Vercel plan categories are accepted using hyphen-separated names.""" + valid_metadata = { + "Provider": "vercel", + "CheckID": "test_check", + "CheckTitle": "Test Check", + "CheckType": [], + "ServiceName": "test", + "SubServiceName": "subtest", + "ResourceIdTemplate": "template", + "Severity": "high", + "ResourceType": "TestResource", + "Description": "Test description", + "Risk": "Test risk", + "RelatedUrl": "", + "Remediation": { + "Code": { + "CLI": "test command", + "NativeIaC": "test native", + "Other": "test other", + "Terraform": "test terraform", + }, + "Recommendation": { + "Text": "test recommendation", + "Url": "https://hub.prowler.com/check/test_check", + }, + }, + "Categories": [ + "vercel-hobby-plan", + "vercel-pro-plan", + "vercel-enterprise-plan", + ], + "DependsOn": [], + "RelatedTo": [], + "Notes": "Test notes", + } + + check_metadata = CheckMetadata(**valid_metadata) + assert check_metadata.Categories == [ + "vercel-hobby-plan", + "vercel-pro-plan", + "vercel-enterprise-plan", + ] + def test_valid_category_failure_non_string(self): """Test valid category validation fails with non-string category""" invalid_metadata = { @@ -454,7 +498,7 @@ class TestCheckMetadataValidators: with pytest.raises(ValidationError) as exc_info: CheckMetadata(**invalid_metadata) assert ( - "Categories can only contain lowercase letters, numbers and hyphen" + "Categories can only contain lowercase letters, numbers, and hyphen '-'" in str(exc_info.value) ) diff --git a/tests/providers/vercel/lib/service/vercel_service_test.py b/tests/providers/vercel/lib/service/vercel_service_test.py new file mode 100644 index 0000000000..e267b84841 --- /dev/null +++ b/tests/providers/vercel/lib/service/vercel_service_test.py @@ -0,0 +1,29 @@ +from unittest import mock + +from prowler.providers.vercel.lib.service.service import VercelService + + +class TestVercelService: + def test_get_returns_none_and_logs_info_on_expected_403(self): + service = VercelService.__new__(VercelService) + service.audit_config = {"max_retries": 0} + service.service = "security" + service._team_id = None + service._base_url = "https://api.vercel.com" + + response = mock.MagicMock() + response.status_code = 403 + + service._http_session = mock.MagicMock() + service._http_session.get.return_value = response + + with mock.patch( + "prowler.providers.vercel.lib.service.service.logger" + ) as logger_mock: + result = service._get("/v1/security/firewall/config/active") + + assert result is None + logger_mock.info.assert_called_once_with( + "security - Access denied for /v1/security/firewall/config/active (403). " + "This may be caused by plan or permission restrictions." + ) diff --git a/tests/providers/vercel/services/project/project_password_protection_enabled/project_password_protection_enabled_test.py b/tests/providers/vercel/services/project/project_password_protection_enabled/project_password_protection_enabled_test.py index 9de3eb874a..cd19a682c9 100644 --- a/tests/providers/vercel/services/project/project_password_protection_enabled/project_password_protection_enabled_test.py +++ b/tests/providers/vercel/services/project/project_password_protection_enabled/project_password_protection_enabled_test.py @@ -142,3 +142,41 @@ class Test_project_password_protection_enabled: == f"Project {PROJECT_NAME} does not have password protection configured for deployments." ) assert result[0].team_id == TEAM_ID + + def test_no_password_protection_hobby_plan(self): + project_client = mock.MagicMock + project_client.projects = { + PROJECT_ID: VercelProject( + id=PROJECT_ID, + name=PROJECT_NAME, + team_id=TEAM_ID, + billing_plan="hobby", + password_protection=None, + ) + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_vercel_provider(billing_plan="hobby"), + ), + mock.patch( + "prowler.providers.vercel.services.project.project_password_protection_enabled.project_password_protection_enabled.project_client", + new=project_client, + ), + ): + from prowler.providers.vercel.services.project.project_password_protection_enabled.project_password_protection_enabled import ( + project_password_protection_enabled, + ) + + check = project_password_protection_enabled() + result = check.execute() + assert len(result) == 1 + assert result[0].resource_id == PROJECT_ID + assert result[0].resource_name == PROJECT_NAME + assert result[0].status == "FAIL" + assert ( + result[0].status_extended + == f"Project {PROJECT_NAME} does not have password protection configured for deployments. This may be expected because password protection is not available on the Vercel Hobby plan." + ) + assert result[0].team_id == TEAM_ID diff --git a/tests/providers/vercel/services/project/project_production_deployment_protection_enabled/project_production_deployment_protection_enabled_test.py b/tests/providers/vercel/services/project/project_production_deployment_protection_enabled/project_production_deployment_protection_enabled_test.py index ff2517fbd3..eb8e15ddff 100644 --- a/tests/providers/vercel/services/project/project_production_deployment_protection_enabled/project_production_deployment_protection_enabled_test.py +++ b/tests/providers/vercel/services/project/project_production_deployment_protection_enabled/project_production_deployment_protection_enabled_test.py @@ -149,3 +149,41 @@ class Test_project_production_deployment_protection_enabled: == f"Project {PROJECT_NAME} does not have deployment protection enabled on production deployments." ) assert result[0].team_id == TEAM_ID + + def test_protection_null_hobby_plan(self): + project_client = mock.MagicMock + project_client.projects = { + PROJECT_ID: VercelProject( + id=PROJECT_ID, + name=PROJECT_NAME, + team_id=TEAM_ID, + billing_plan="hobby", + production_deployment_protection=None, + ) + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_vercel_provider(billing_plan="hobby"), + ), + mock.patch( + "prowler.providers.vercel.services.project.project_production_deployment_protection_enabled.project_production_deployment_protection_enabled.project_client", + new=project_client, + ), + ): + from prowler.providers.vercel.services.project.project_production_deployment_protection_enabled.project_production_deployment_protection_enabled import ( + project_production_deployment_protection_enabled, + ) + + check = project_production_deployment_protection_enabled() + result = check.execute() + assert len(result) == 1 + assert result[0].resource_id == PROJECT_ID + assert result[0].resource_name == PROJECT_NAME + assert result[0].status == "FAIL" + assert ( + result[0].status_extended + == f"Project {PROJECT_NAME} does not have deployment protection enabled on production deployments. This may be expected because protecting production deployments is not available on the Vercel Hobby plan." + ) + assert result[0].team_id == TEAM_ID diff --git a/tests/providers/vercel/services/project/project_service_test.py b/tests/providers/vercel/services/project/project_service_test.py index b82588ddd9..cad889905e 100644 --- a/tests/providers/vercel/services/project/project_service_test.py +++ b/tests/providers/vercel/services/project/project_service_test.py @@ -5,6 +5,7 @@ from tests.providers.vercel.vercel_fixtures import ( PROJECT_ID, PROJECT_NAME, TEAM_ID, + USER_ID, set_mocked_vercel_provider, ) @@ -43,3 +44,69 @@ class TestProjectService: "ai_bots": {"active": False, "action": "deny"}, } assert project.bot_id_enabled is True + + def test_list_projects_uses_scoped_team_billing_plan(self): + service = Project.__new__(Project) + service.provider = set_mocked_vercel_provider( + billing_plan="enterprise", + team_billing_plan="hobby", + ) + service.projects = {} + service._paginate = mock.MagicMock( + return_value=[ + { + "id": PROJECT_ID, + "name": PROJECT_NAME, + "accountId": TEAM_ID, + } + ] + ) + + service._list_projects() + + project = service.projects[PROJECT_ID] + assert project.billing_plan == "hobby" + + def test_list_projects_uses_user_billing_plan_for_user_scoped_project(self): + service = Project.__new__(Project) + service.provider = set_mocked_vercel_provider( + billing_plan="enterprise", + team_billing_plan="hobby", + ) + service.projects = {} + service._paginate = mock.MagicMock( + return_value=[ + { + "id": PROJECT_ID, + "name": PROJECT_NAME, + "accountId": USER_ID, + } + ] + ) + + service._list_projects() + + project = service.projects[PROJECT_ID] + assert project.billing_plan == "enterprise" + + def test_list_projects_does_not_guess_billing_plan_without_scope(self): + service = Project.__new__(Project) + service.provider = set_mocked_vercel_provider( + billing_plan="enterprise", + team_billing_plan="hobby", + ) + service.provider.session.team_id = None + service.projects = {} + service._paginate = mock.MagicMock( + return_value=[ + { + "id": PROJECT_ID, + "name": PROJECT_NAME, + } + ] + ) + + service._list_projects() + + project = service.projects[PROJECT_ID] + assert project.billing_plan is None diff --git a/tests/providers/vercel/services/project/project_skew_protection_enabled/project_skew_protection_enabled_test.py b/tests/providers/vercel/services/project/project_skew_protection_enabled/project_skew_protection_enabled_test.py index 19d1ce8885..38c02040d1 100644 --- a/tests/providers/vercel/services/project/project_skew_protection_enabled/project_skew_protection_enabled_test.py +++ b/tests/providers/vercel/services/project/project_skew_protection_enabled/project_skew_protection_enabled_test.py @@ -105,3 +105,41 @@ class Test_project_skew_protection_enabled: == f"Project {PROJECT_NAME} does not have skew protection enabled, which may cause version mismatches during deployments." ) assert result[0].team_id == TEAM_ID + + def test_skew_protection_disabled_hobby_plan(self): + project_client = mock.MagicMock + project_client.projects = { + PROJECT_ID: VercelProject( + id=PROJECT_ID, + name=PROJECT_NAME, + team_id=TEAM_ID, + billing_plan="hobby", + skew_protection=False, + ) + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_vercel_provider(billing_plan="hobby"), + ), + mock.patch( + "prowler.providers.vercel.services.project.project_skew_protection_enabled.project_skew_protection_enabled.project_client", + new=project_client, + ), + ): + from prowler.providers.vercel.services.project.project_skew_protection_enabled.project_skew_protection_enabled import ( + project_skew_protection_enabled, + ) + + check = project_skew_protection_enabled() + result = check.execute() + assert len(result) == 1 + assert result[0].resource_id == PROJECT_ID + assert result[0].resource_name == PROJECT_NAME + assert result[0].status == "FAIL" + assert ( + result[0].status_extended + == f"Project {PROJECT_NAME} does not have skew protection enabled, which may cause version mismatches during deployments. This may be expected because skew protection is not available on the Vercel Hobby plan." + ) + assert result[0].team_id == TEAM_ID diff --git a/tests/providers/vercel/services/security/security_custom_rules_configured/security_custom_rules_configured_test.py b/tests/providers/vercel/services/security/security_custom_rules_configured/security_custom_rules_configured_test.py index 72fc44f61e..3eb92c4a8b 100644 --- a/tests/providers/vercel/services/security/security_custom_rules_configured/security_custom_rules_configured_test.py +++ b/tests/providers/vercel/services/security/security_custom_rules_configured/security_custom_rules_configured_test.py @@ -111,3 +111,41 @@ class Test_security_custom_rules_configured: == f"Project {PROJECT_NAME} ({PROJECT_ID}) does not have any custom firewall rules configured." ) assert result[0].team_id == TEAM_ID + + def test_custom_rules_status_unavailable_hobby_plan(self): + security_client = mock.MagicMock + security_client.firewall_configs = { + PROJECT_ID: VercelFirewallConfig( + project_id=PROJECT_ID, + project_name=PROJECT_NAME, + team_id=TEAM_ID, + billing_plan="hobby", + firewall_config_accessible=False, + managed_rulesets=None, + id=PROJECT_ID, + name=PROJECT_NAME, + ) + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_vercel_provider(), + ), + mock.patch( + "prowler.providers.vercel.services.security.security_custom_rules_configured.security_custom_rules_configured.security_client", + new=security_client, + ), + ): + from prowler.providers.vercel.services.security.security_custom_rules_configured.security_custom_rules_configured import ( + security_custom_rules_configured, + ) + + check = security_custom_rules_configured() + result = check.execute() + assert len(result) == 1 + assert result[0].status == "MANUAL" + assert ( + result[0].status_extended + == f"Project {PROJECT_NAME} ({PROJECT_ID}) could not be assessed for custom firewall rules because the firewall configuration endpoint was not accessible. Manual verification is required. This may be expected because custom firewall rules are not available on the Vercel Hobby plan." + ) diff --git a/tests/providers/vercel/services/security/security_ip_blocking_rules_configured/security_ip_blocking_rules_configured_test.py b/tests/providers/vercel/services/security/security_ip_blocking_rules_configured/security_ip_blocking_rules_configured_test.py index 20e6224a24..9d8f537229 100644 --- a/tests/providers/vercel/services/security/security_ip_blocking_rules_configured/security_ip_blocking_rules_configured_test.py +++ b/tests/providers/vercel/services/security/security_ip_blocking_rules_configured/security_ip_blocking_rules_configured_test.py @@ -111,3 +111,41 @@ class Test_security_ip_blocking_rules_configured: == f"Project {PROJECT_NAME} ({PROJECT_ID}) does not have any IP blocking rules configured." ) assert result[0].team_id == TEAM_ID + + def test_ip_rules_status_unavailable_hobby_plan(self): + security_client = mock.MagicMock + security_client.firewall_configs = { + PROJECT_ID: VercelFirewallConfig( + project_id=PROJECT_ID, + project_name=PROJECT_NAME, + team_id=TEAM_ID, + billing_plan="hobby", + firewall_config_accessible=False, + managed_rulesets=None, + id=PROJECT_ID, + name=PROJECT_NAME, + ) + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_vercel_provider(), + ), + mock.patch( + "prowler.providers.vercel.services.security.security_ip_blocking_rules_configured.security_ip_blocking_rules_configured.security_client", + new=security_client, + ), + ): + from prowler.providers.vercel.services.security.security_ip_blocking_rules_configured.security_ip_blocking_rules_configured import ( + security_ip_blocking_rules_configured, + ) + + check = security_ip_blocking_rules_configured() + result = check.execute() + assert len(result) == 1 + assert result[0].status == "MANUAL" + assert ( + result[0].status_extended + == f"Project {PROJECT_NAME} ({PROJECT_ID}) could not be assessed for IP blocking rules because the firewall configuration endpoint was not accessible. Manual verification is required. This may be expected because IP blocking rules are not available on the Vercel Hobby plan." + ) diff --git a/tests/providers/vercel/services/security/security_managed_rulesets_enabled/security_managed_rulesets_enabled_test.py b/tests/providers/vercel/services/security/security_managed_rulesets_enabled/security_managed_rulesets_enabled_test.py index 2cc32c4e91..03cd387d45 100644 --- a/tests/providers/vercel/services/security/security_managed_rulesets_enabled/security_managed_rulesets_enabled_test.py +++ b/tests/providers/vercel/services/security/security_managed_rulesets_enabled/security_managed_rulesets_enabled_test.py @@ -121,6 +121,7 @@ class Test_security_managed_rulesets_enabled: project_id=PROJECT_ID, project_name=PROJECT_NAME, team_id=TEAM_ID, + firewall_config_accessible=False, firewall_enabled=False, managed_rulesets=None, id=PROJECT_ID, @@ -150,6 +151,45 @@ class Test_security_managed_rulesets_enabled: assert result[0].status == "MANUAL" assert ( result[0].status_extended - == f"Project {PROJECT_NAME} ({PROJECT_ID}) could not be assessed for managed rulesets. Enterprise plan required to access this feature." + == f"Project {PROJECT_NAME} ({PROJECT_ID}) could not be assessed for managed rulesets because the firewall configuration endpoint was not accessible. Manual verification is required." ) assert result[0].team_id == TEAM_ID + + def test_managed_rulesets_plan_gated_non_enterprise_scope(self): + security_client = mock.MagicMock + security_client.firewall_configs = { + PROJECT_ID: VercelFirewallConfig( + project_id=PROJECT_ID, + project_name=PROJECT_NAME, + team_id=TEAM_ID, + billing_plan="pro", + firewall_config_accessible=False, + firewall_enabled=False, + managed_rulesets=None, + id=PROJECT_ID, + name=PROJECT_NAME, + ) + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_vercel_provider(), + ), + mock.patch( + "prowler.providers.vercel.services.security.security_managed_rulesets_enabled.security_managed_rulesets_enabled.security_client", + new=security_client, + ), + ): + from prowler.providers.vercel.services.security.security_managed_rulesets_enabled.security_managed_rulesets_enabled import ( + security_managed_rulesets_enabled, + ) + + check = security_managed_rulesets_enabled() + result = check.execute() + assert len(result) == 1 + assert result[0].status == "MANUAL" + assert ( + result[0].status_extended + == f"Project {PROJECT_NAME} ({PROJECT_ID}) could not be assessed for managed rulesets because the firewall configuration endpoint was not accessible. Manual verification is required. This may be expected because some managed WAF rulesets, including the OWASP Core Ruleset, are only available on Vercel Enterprise plans." + ) diff --git a/tests/providers/vercel/services/security/security_rate_limiting_configured/security_rate_limiting_configured_test.py b/tests/providers/vercel/services/security/security_rate_limiting_configured/security_rate_limiting_configured_test.py index 1c00a3c358..aab3e84d71 100644 --- a/tests/providers/vercel/services/security/security_rate_limiting_configured/security_rate_limiting_configured_test.py +++ b/tests/providers/vercel/services/security/security_rate_limiting_configured/security_rate_limiting_configured_test.py @@ -111,3 +111,41 @@ class Test_security_rate_limiting_configured: == f"Project {PROJECT_NAME} ({PROJECT_ID}) does not have any rate limiting rules configured." ) assert result[0].team_id == TEAM_ID + + def test_rate_limiting_status_unavailable_hobby_plan(self): + security_client = mock.MagicMock + security_client.firewall_configs = { + PROJECT_ID: VercelFirewallConfig( + project_id=PROJECT_ID, + project_name=PROJECT_NAME, + team_id=TEAM_ID, + billing_plan="hobby", + firewall_config_accessible=False, + managed_rulesets=None, + id=PROJECT_ID, + name=PROJECT_NAME, + ) + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_vercel_provider(), + ), + mock.patch( + "prowler.providers.vercel.services.security.security_rate_limiting_configured.security_rate_limiting_configured.security_client", + new=security_client, + ), + ): + from prowler.providers.vercel.services.security.security_rate_limiting_configured.security_rate_limiting_configured import ( + security_rate_limiting_configured, + ) + + check = security_rate_limiting_configured() + result = check.execute() + assert len(result) == 1 + assert result[0].status == "MANUAL" + assert ( + result[0].status_extended + == f"Project {PROJECT_NAME} ({PROJECT_ID}) could not be assessed for rate limiting rules because the firewall configuration endpoint was not accessible. Manual verification is required. This may be expected because rate limiting rules are not available on the Vercel Hobby plan." + ) diff --git a/tests/providers/vercel/services/security/security_service_test.py b/tests/providers/vercel/services/security/security_service_test.py index d0a690b1db..17c546db90 100644 --- a/tests/providers/vercel/services/security/security_service_test.py +++ b/tests/providers/vercel/services/security/security_service_test.py @@ -7,7 +7,12 @@ from tests.providers.vercel.vercel_fixtures import PROJECT_ID, PROJECT_NAME, TEA class TestSecurityService: def test_fetch_firewall_config_reads_active_version_and_normalizes_response(self): - project = VercelProject(id=PROJECT_ID, name=PROJECT_NAME, team_id=TEAM_ID) + project = VercelProject( + id=PROJECT_ID, + name=PROJECT_NAME, + team_id=TEAM_ID, + billing_plan="pro", + ) service = Security.__new__(Security) service.firewall_configs = {} @@ -89,6 +94,7 @@ class TestSecurityService: ) config = service.firewall_configs[PROJECT_ID] + assert config.billing_plan == "pro" assert config.firewall_enabled is True assert config.managed_rulesets == {"owasp": {"active": True, "action": "deny"}} assert [rule["id"] for rule in config.custom_rules] == ["rule-custom"] diff --git a/tests/providers/vercel/services/security/security_waf_enabled/security_waf_enabled_test.py b/tests/providers/vercel/services/security/security_waf_enabled/security_waf_enabled_test.py index 1641868175..8df46dfec6 100644 --- a/tests/providers/vercel/services/security/security_waf_enabled/security_waf_enabled_test.py +++ b/tests/providers/vercel/services/security/security_waf_enabled/security_waf_enabled_test.py @@ -113,3 +113,83 @@ class Test_security_waf_enabled: == f"Project {PROJECT_NAME} ({PROJECT_ID}) does not have the Web Application Firewall enabled." ) assert result[0].team_id == TEAM_ID + + def test_waf_status_unavailable(self): + security_client = mock.MagicMock + security_client.firewall_configs = { + PROJECT_ID: VercelFirewallConfig( + project_id=PROJECT_ID, + project_name=PROJECT_NAME, + team_id=TEAM_ID, + firewall_config_accessible=False, + firewall_enabled=False, + managed_rulesets=None, + id=PROJECT_ID, + name=PROJECT_NAME, + ) + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_vercel_provider(), + ), + mock.patch( + "prowler.providers.vercel.services.security.security_waf_enabled.security_waf_enabled.security_client", + new=security_client, + ), + ): + from prowler.providers.vercel.services.security.security_waf_enabled.security_waf_enabled import ( + security_waf_enabled, + ) + + check = security_waf_enabled() + result = check.execute() + assert len(result) == 1 + assert result[0].resource_id == PROJECT_ID + assert result[0].resource_name == PROJECT_NAME + assert result[0].status == "MANUAL" + assert ( + result[0].status_extended + == f"Project {PROJECT_NAME} ({PROJECT_ID}) could not be checked for WAF status because the firewall configuration endpoint was not accessible. Manual verification is required." + ) + assert result[0].team_id == TEAM_ID + + def test_waf_status_unavailable_hobby_plan(self): + security_client = mock.MagicMock + security_client.firewall_configs = { + PROJECT_ID: VercelFirewallConfig( + project_id=PROJECT_ID, + project_name=PROJECT_NAME, + team_id=TEAM_ID, + billing_plan="hobby", + firewall_config_accessible=False, + firewall_enabled=False, + managed_rulesets=None, + id=PROJECT_ID, + name=PROJECT_NAME, + ) + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_vercel_provider(), + ), + mock.patch( + "prowler.providers.vercel.services.security.security_waf_enabled.security_waf_enabled.security_client", + new=security_client, + ), + ): + from prowler.providers.vercel.services.security.security_waf_enabled.security_waf_enabled import ( + security_waf_enabled, + ) + + check = security_waf_enabled() + result = check.execute() + assert len(result) == 1 + assert result[0].status == "MANUAL" + assert ( + result[0].status_extended + == f"Project {PROJECT_NAME} ({PROJECT_ID}) could not be checked for WAF status because the firewall configuration endpoint was not accessible. Manual verification is required. This may be expected because the Web Application Firewall is not available on the Vercel Hobby plan." + ) diff --git a/tests/providers/vercel/services/team/team_directory_sync_enabled/team_directory_sync_enabled_test.py b/tests/providers/vercel/services/team/team_directory_sync_enabled/team_directory_sync_enabled_test.py index 7f08db7fa1..b85e12ba3c 100644 --- a/tests/providers/vercel/services/team/team_directory_sync_enabled/team_directory_sync_enabled_test.py +++ b/tests/providers/vercel/services/team/team_directory_sync_enabled/team_directory_sync_enabled_test.py @@ -105,3 +105,41 @@ class Test_team_directory_sync_enabled: == f"Team {TEAM_NAME} does not have directory sync (SCIM) enabled. User provisioning and deprovisioning must be managed manually." ) assert result[0].team_id == "" + + def test_directory_sync_disabled_pro_plan(self): + team_client = mock.MagicMock + team_client.teams = { + TEAM_ID: VercelTeam( + id=TEAM_ID, + name=TEAM_NAME, + slug=TEAM_SLUG, + directory_sync_enabled=False, + billing_plan="pro", + ) + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_vercel_provider(), + ), + mock.patch( + "prowler.providers.vercel.services.team.team_directory_sync_enabled.team_directory_sync_enabled.team_client", + new=team_client, + ), + ): + from prowler.providers.vercel.services.team.team_directory_sync_enabled.team_directory_sync_enabled import ( + team_directory_sync_enabled, + ) + + check = team_directory_sync_enabled() + result = check.execute() + assert len(result) == 1 + assert result[0].resource_id == TEAM_ID + assert result[0].resource_name == TEAM_NAME + assert result[0].status == "FAIL" + assert ( + result[0].status_extended + == f"Team {TEAM_NAME} does not have directory sync (SCIM) enabled. User provisioning and deprovisioning must be managed manually. This may be expected because directory sync (SCIM) is only available on Vercel Enterprise plans." + ) + assert result[0].team_id == "" diff --git a/tests/providers/vercel/services/team/team_saml_sso_enabled/team_saml_sso_enabled_test.py b/tests/providers/vercel/services/team/team_saml_sso_enabled/team_saml_sso_enabled_test.py index b7f40dd653..b99b862c1f 100644 --- a/tests/providers/vercel/services/team/team_saml_sso_enabled/team_saml_sso_enabled_test.py +++ b/tests/providers/vercel/services/team/team_saml_sso_enabled/team_saml_sso_enabled_test.py @@ -106,3 +106,42 @@ class Test_team_saml_sso_enabled: == f"Team {TEAM_NAME} does not have SAML SSO enabled." ) assert result[0].team_id == "" + + def test_saml_disabled_hobby_plan(self): + team_client = mock.MagicMock + team_client.teams = { + TEAM_ID: VercelTeam( + id=TEAM_ID, + name=TEAM_NAME, + slug=TEAM_SLUG, + saml=SAMLConfig(status="disabled", enforced=False), + billing_plan="hobby", + members=[], + ) + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_vercel_provider(), + ), + mock.patch( + "prowler.providers.vercel.services.team.team_saml_sso_enabled.team_saml_sso_enabled.team_client", + new=team_client, + ), + ): + from prowler.providers.vercel.services.team.team_saml_sso_enabled.team_saml_sso_enabled import ( + team_saml_sso_enabled, + ) + + check = team_saml_sso_enabled() + result = check.execute() + assert len(result) == 1 + assert result[0].resource_id == TEAM_ID + assert result[0].resource_name == TEAM_NAME + assert result[0].status == "FAIL" + assert ( + result[0].status_extended + == f"Team {TEAM_NAME} does not have SAML SSO enabled. This may be expected because SAML SSO is not available on the Vercel Hobby plan." + ) + assert result[0].team_id == "" diff --git a/tests/providers/vercel/services/team/team_saml_sso_enforced/team_saml_sso_enforced_test.py b/tests/providers/vercel/services/team/team_saml_sso_enforced/team_saml_sso_enforced_test.py index 360d6cdbd8..839c42f3ad 100644 --- a/tests/providers/vercel/services/team/team_saml_sso_enforced/team_saml_sso_enforced_test.py +++ b/tests/providers/vercel/services/team/team_saml_sso_enforced/team_saml_sso_enforced_test.py @@ -142,3 +142,41 @@ class Test_team_saml_sso_enforced: == f"Team {TEAM_NAME} does not have SAML SSO enforced." ) assert result[0].team_id == "" + + def test_saml_disabled_hobby_plan(self): + team_client = mock.MagicMock + team_client.teams = { + TEAM_ID: VercelTeam( + id=TEAM_ID, + name=TEAM_NAME, + slug=TEAM_SLUG, + saml=SAMLConfig(status="disabled", enforced=False), + billing_plan="hobby", + ) + } + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=set_mocked_vercel_provider(), + ), + mock.patch( + "prowler.providers.vercel.services.team.team_saml_sso_enforced.team_saml_sso_enforced.team_client", + new=team_client, + ), + ): + from prowler.providers.vercel.services.team.team_saml_sso_enforced.team_saml_sso_enforced import ( + team_saml_sso_enforced, + ) + + check = team_saml_sso_enforced() + result = check.execute() + assert len(result) == 1 + assert result[0].resource_id == TEAM_ID + assert result[0].resource_name == TEAM_NAME + assert result[0].status == "FAIL" + assert ( + result[0].status_extended + == f"Team {TEAM_NAME} does not have SAML SSO enforced. This may be expected because SAML SSO is not available on the Vercel Hobby plan." + ) + assert result[0].team_id == "" diff --git a/tests/providers/vercel/vercel_fixtures.py b/tests/providers/vercel/vercel_fixtures.py index 70775bb377..3e5a21f4ef 100644 --- a/tests/providers/vercel/vercel_fixtures.py +++ b/tests/providers/vercel/vercel_fixtures.py @@ -33,6 +33,8 @@ def set_mocked_vercel_provider( team_id: str = TEAM_ID, identity: VercelIdentityInfo = None, audit_config: dict = None, + billing_plan: str = None, + team_billing_plan: str = None, ): """Create a mocked VercelProvider for testing.""" provider = MagicMock() @@ -42,15 +44,22 @@ def set_mocked_vercel_provider( team_id=team_id, http_session=MagicMock(), ) + resolved_team_billing_plan = ( + team_billing_plan if team_billing_plan is not None else billing_plan + ) + team_info = VercelTeamInfo( + id=TEAM_ID, + name=TEAM_NAME, + slug=TEAM_SLUG, + billing_plan=resolved_team_billing_plan, + ) provider.identity = identity or VercelIdentityInfo( user_id=USER_ID, username=USERNAME, email=USER_EMAIL, - team=VercelTeamInfo( - id=TEAM_ID, - name=TEAM_NAME, - slug=TEAM_SLUG, - ), + billing_plan=billing_plan, + team=team_info, + teams=[team_info], ) provider.audit_config = audit_config or {"max_retries": 3} provider.fixer_config = {} diff --git a/tests/providers/vercel/vercel_metadata_test.py b/tests/providers/vercel/vercel_metadata_test.py new file mode 100644 index 0000000000..0587c1a992 --- /dev/null +++ b/tests/providers/vercel/vercel_metadata_test.py @@ -0,0 +1,97 @@ +from prowler.lib.check.models import CheckMetadata + + +class TestVercelMetadata: + EXPECTED_CATEGORIES = { + "authentication_no_stale_tokens": [ + "trust-boundaries", + "vercel-hobby-plan", + ], + "authentication_token_not_expired": [ + "trust-boundaries", + "vercel-hobby-plan", + ], + "deployment_production_uses_stable_target": [ + "trust-boundaries", + "vercel-hobby-plan", + ], + "domain_dns_properly_configured": [ + "trust-boundaries", + "vercel-hobby-plan", + ], + "domain_ssl_certificate_valid": ["encryption", "vercel-hobby-plan"], + "domain_verified": ["trust-boundaries", "vercel-hobby-plan"], + "project_auto_expose_system_env_disabled": [ + "trust-boundaries", + "vercel-hobby-plan", + ], + "project_deployment_protection_enabled": [ + "internet-exposed", + "vercel-hobby-plan", + ], + "project_directory_listing_disabled": [ + "internet-exposed", + "vercel-hobby-plan", + ], + "project_environment_no_overly_broad_target": [ + "secrets", + "vercel-hobby-plan", + ], + "project_environment_no_secrets_in_plain_type": [ + "secrets", + "vercel-hobby-plan", + ], + "project_environment_production_vars_not_in_preview": [ + "secrets", + "vercel-hobby-plan", + ], + "project_git_fork_protection_enabled": [ + "internet-exposed", + "vercel-hobby-plan", + ], + "project_password_protection_enabled": [ + "internet-exposed", + "vercel-pro-plan", + ], + "project_production_deployment_protection_enabled": [ + "internet-exposed", + "vercel-pro-plan", + ], + "project_skew_protection_enabled": ["resilience", "vercel-pro-plan"], + "security_custom_rules_configured": [ + "internet-exposed", + "vercel-pro-plan", + ], + "security_ip_blocking_rules_configured": [ + "internet-exposed", + "vercel-pro-plan", + ], + "security_managed_rulesets_enabled": [ + "internet-exposed", + "vercel-hobby-plan", + ], + "security_rate_limiting_configured": [ + "internet-exposed", + "vercel-pro-plan", + ], + "security_waf_enabled": ["internet-exposed", "vercel-pro-plan"], + "team_directory_sync_enabled": [ + "trust-boundaries", + "vercel-enterprise-plan", + ], + "team_member_role_least_privilege": [ + "trust-boundaries", + "vercel-hobby-plan", + ], + "team_no_stale_invitations": ["trust-boundaries", "vercel-hobby-plan"], + "team_saml_sso_enabled": ["trust-boundaries", "vercel-pro-plan"], + "team_saml_sso_enforced": ["trust-boundaries", "vercel-pro-plan"], + } + + def test_vercel_checks_use_legacy_and_plan_categories(self): + vercel_metadata = CheckMetadata.get_bulk(provider="vercel") + + assert set(vercel_metadata) == set(self.EXPECTED_CATEGORIES) + + for check_id, expected_categories in self.EXPECTED_CATEGORIES.items(): + assert vercel_metadata[check_id].Categories == expected_categories From 6cb770fcc8d7579dff689e07276752a0db3316c0 Mon Sep 17 00:00:00 2001 From: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com> Date: Mon, 4 May 2026 11:17:54 +0100 Subject: [PATCH 10/29] fix(ui): clean up findings expanded resource row layout (#10949) --- ui/CHANGELOG.md | 8 ++ .../table/column-finding-resources.tsx | 78 +++++++++---------- .../table/inline-resource-container.tsx | 38 ++++----- 3 files changed, 62 insertions(+), 62 deletions(-) diff --git a/ui/CHANGELOG.md b/ui/CHANGELOG.md index 8a18dae634..49ec9a26bd 100644 --- a/ui/CHANGELOG.md +++ b/ui/CHANGELOG.md @@ -2,6 +2,14 @@ All notable changes to the **Prowler UI** are documented in this file. +## [1.26.0] (Prowler UNRELEASED) + +### 🔄 Changed + +- Findings expanded resource rows now drop the redundant cube icons, render Service and Region with the same compact label style as Last seen and Failing for, and reorder columns to Status, Resource, Provider, Severity, then field labels [(#10949)](https://github.com/prowler-cloud/prowler/pull/10949) + +--- + ## [1.25.1] (Prowler v5.25.1) ### 🐞 Fixed diff --git a/ui/components/findings/table/column-finding-resources.tsx b/ui/components/findings/table/column-finding-resources.tsx index 4b184459ef..5789b8a96b 100644 --- a/ui/components/findings/table/column-finding-resources.tsx +++ b/ui/components/findings/table/column-finding-resources.tsx @@ -1,7 +1,7 @@ "use client"; import { ColumnDef, Row, RowSelectionState } from "@tanstack/react-table"; -import { Container, CornerDownRight, VolumeOff, VolumeX } from "lucide-react"; +import { CornerDownRight, VolumeOff, VolumeX } from "lucide-react"; import { useContext, useState } from "react"; import { MuteFindingsModal } from "@/components/findings/mute-findings-modal"; @@ -203,23 +203,6 @@ export function getColumnFindingResources({ enableSorting: false, enableHiding: false, }, - // Resource — name + uid (EntityInfo with resource icon) - { - id: "resource", - header: ({ column }) => ( - - ), - cell: ({ row }) => ( -
- } - entityAlias={row.original.resourceName} - entityId={row.original.resourceUid} - /> -
- ), - enableSorting: false, - }, // Status { id: "status", @@ -233,29 +216,35 @@ export function getColumnFindingResources({ }, enableSorting: false, }, - // Service + // Resource — name + uid { - id: "service", + id: "resource", header: ({ column }) => ( - + ), cell: ({ row }) => ( -

- {row.original.service} -

+
+ +
), enableSorting: false, }, - // Region + // Provider — alias + uid (same style as Resource) { - id: "region", + id: "provider", header: ({ column }) => ( - + ), cell: ({ row }) => ( -

- {row.original.region} -

+
+ +
), enableSorting: false, }, @@ -268,20 +257,29 @@ export function getColumnFindingResources({ cell: ({ row }) => , enableSorting: false, }, - // Account — alias + uid (EntityInfo with provider logo) + // Service { - id: "account", + id: "service", header: ({ column }) => ( - + ), cell: ({ row }) => ( -
- -
+ + {row.original.service || "-"} + + ), + enableSorting: false, + }, + // Region + { + id: "region", + header: ({ column }) => ( + + ), + cell: ({ row }) => ( + + {row.original.region || "-"} + ), enableSorting: false, }, diff --git a/ui/components/findings/table/inline-resource-container.tsx b/ui/components/findings/table/inline-resource-container.tsx index 2aa13056d9..22ccb027e1 100644 --- a/ui/components/findings/table/inline-resource-container.tsx +++ b/ui/components/findings/table/inline-resource-container.tsx @@ -70,27 +70,23 @@ function ResourceSkeletonRow({
- {/* Resource: icon + name + uid */} - -
- -
- - -
-
-
{/* Status */} - {/* Service */} + {/* Resource: name + uid */} - +
+ + +
- {/* Region */} + {/* Provider: alias + uid */} - +
+ + +
{/* Severity */} @@ -99,15 +95,13 @@ function ResourceSkeletonRow({ - {/* Account: provider icon + alias + uid */} + {/* Service */} -
- -
- - -
-
+ +
+ {/* Region */} + + {/* Last seen */} From 921f49a0de407b06afc2d311564f56052526a724 Mon Sep 17 00:00:00 2001 From: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com> Date: Mon, 4 May 2026 12:38:15 +0200 Subject: [PATCH 11/29] feat(aws): add bedrock_prompt_management_exists security check (#10878) --- prowler/CHANGELOG.md | 1 + .../compliance/aws/kisa_isms_p_2023_aws.json | 1 + .../aws/kisa_isms_p_2023_korean_aws.json | 1 + .../__init__.py | 0 ...ock_prompt_management_exists.metadata.json | 39 +++ .../bedrock_prompt_management_exists.py | 54 ++++ .../aws/services/bedrock/bedrock_service.py | 37 +++ .../bedrock_prompt_management_exists_test.py | 280 ++++++++++++++++++ .../services/bedrock/bedrock_service_test.py | 122 ++++++++ 9 files changed, 535 insertions(+) create mode 100644 prowler/providers/aws/services/bedrock/bedrock_prompt_management_exists/__init__.py create mode 100644 prowler/providers/aws/services/bedrock/bedrock_prompt_management_exists/bedrock_prompt_management_exists.metadata.json create mode 100644 prowler/providers/aws/services/bedrock/bedrock_prompt_management_exists/bedrock_prompt_management_exists.py create mode 100644 tests/providers/aws/services/bedrock/bedrock_prompt_management_exists/bedrock_prompt_management_exists_test.py diff --git a/prowler/CHANGELOG.md b/prowler/CHANGELOG.md index 643aeb518e..589f76323c 100644 --- a/prowler/CHANGELOG.md +++ b/prowler/CHANGELOG.md @@ -10,6 +10,7 @@ All notable changes to the **Prowler SDK** are documented in this file. - Universal compliance pipeline integrated into the CLI: `--list-compliance` and `--list-compliance-requirements` show universal frameworks, and CSV plus OCSF outputs are generated for any framework declaring a `TableConfig` [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301) - ASD Essential Eight Maturity Model compliance framework for AWS (Maturity Level One, Nov 2023) [(#10808)](https://github.com/prowler-cloud/prowler/pull/10808) - Update Vercel checks to return personalized finding status extended depending on billing plan and classify them with billing-plan categories [(#10663)](https://github.com/prowler-cloud/prowler/pull/10663) +- `bedrock_prompt_management_exists` check for AWS provider [(#10878)](https://github.com/prowler-cloud/prowler/pull/10878) ### 🔄 Changed diff --git a/prowler/compliance/aws/kisa_isms_p_2023_aws.json b/prowler/compliance/aws/kisa_isms_p_2023_aws.json index f7c2a0bc69..b2b71fa905 100644 --- a/prowler/compliance/aws/kisa_isms_p_2023_aws.json +++ b/prowler/compliance/aws/kisa_isms_p_2023_aws.json @@ -2897,6 +2897,7 @@ "bedrock_guardrails_configured", "bedrock_model_invocation_logging_enabled", "bedrock_model_invocation_logs_encryption_enabled", + "bedrock_prompt_management_exists", "cloudformation_stack_outputs_find_secrets", "cloudfront_distributions_custom_ssl_certificate", "cloudfront_distributions_default_root_object", diff --git a/prowler/compliance/aws/kisa_isms_p_2023_korean_aws.json b/prowler/compliance/aws/kisa_isms_p_2023_korean_aws.json index 877b46250c..a933fc8d27 100644 --- a/prowler/compliance/aws/kisa_isms_p_2023_korean_aws.json +++ b/prowler/compliance/aws/kisa_isms_p_2023_korean_aws.json @@ -2901,6 +2901,7 @@ "bedrock_guardrails_configured", "bedrock_model_invocation_logging_enabled", "bedrock_model_invocation_logs_encryption_enabled", + "bedrock_prompt_management_exists", "cloudformation_stack_outputs_find_secrets", "cloudfront_distributions_custom_ssl_certificate", "cloudfront_distributions_default_root_object", diff --git a/prowler/providers/aws/services/bedrock/bedrock_prompt_management_exists/__init__.py b/prowler/providers/aws/services/bedrock/bedrock_prompt_management_exists/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/providers/aws/services/bedrock/bedrock_prompt_management_exists/bedrock_prompt_management_exists.metadata.json b/prowler/providers/aws/services/bedrock/bedrock_prompt_management_exists/bedrock_prompt_management_exists.metadata.json new file mode 100644 index 0000000000..195cefccd1 --- /dev/null +++ b/prowler/providers/aws/services/bedrock/bedrock_prompt_management_exists/bedrock_prompt_management_exists.metadata.json @@ -0,0 +1,39 @@ +{ + "Provider": "aws", + "CheckID": "bedrock_prompt_management_exists", + "CheckTitle": "Amazon Bedrock Prompt Management prompts exist in the region", + "CheckType": [ + "Software and Configuration Checks/AWS Security Best Practices" + ], + "ServiceName": "bedrock", + "SubServiceName": "", + "ResourceIdTemplate": "", + "Severity": "low", + "ResourceType": "Other", + "ResourceGroup": "ai_ml", + "Description": "**Bedrock Prompt Management** enables centralized creation, versioning, and governance of prompts used with foundation models.\n\nThis region-level check verifies whether at least one managed prompt exists in each scanned region, used as an adoption signal for Prompt Management. The presence of a prompt does not by itself guarantee that every application prompt is managed.", + "Risk": "Without **Prompt Management**, prompts are scattered across applications with no central oversight, versioning, or auditability over instructions sent to foundation models, weakening governance and compliance posture.\n\nManaged prompts are a governance enabler; **prompt injection** defenses are provided by Bedrock **guardrails**, covered by separate checks.", + "RelatedUrl": "", + "AdditionalURLs": [ + "https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-management.html", + "https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-management-create.html" + ], + "Remediation": { + "Code": { + "CLI": "aws bedrock-agent create-prompt --name example_prompt --default-variant default --variants '[{\"name\":\"default\",\"templateType\":\"TEXT\",\"templateConfiguration\":{\"text\":{\"text\":\"Your prompt template here.\"}}}]'", + "NativeIaC": "", + "Other": "1. Open the Amazon Bedrock console\n2. Navigate to Prompt Management\n3. Click Create prompt\n4. Provide a name and configure the prompt template (a prompt can contain at most one variant; additional variants are created via CreatePromptVersion)\n5. Save the prompt", + "Terraform": "" + }, + "Recommendation": { + "Text": "Adopt **Bedrock Prompt Management** to centralize prompt definitions, enforce versioning, and maintain governance over model interactions.\n\nUse managed prompts with **guardrails** and apply **least privilege** access controls to restrict who can create or modify prompts.", + "Url": "https://hub.prowler.com/check/bedrock_prompt_management_exists" + } + }, + "Categories": [ + "gen-ai" + ], + "DependsOn": [], + "RelatedTo": [], + "Notes": "Results are generated per scanned region. Regions where `ListPrompts` cannot be queried are omitted from the findings." +} diff --git a/prowler/providers/aws/services/bedrock/bedrock_prompt_management_exists/bedrock_prompt_management_exists.py b/prowler/providers/aws/services/bedrock/bedrock_prompt_management_exists/bedrock_prompt_management_exists.py new file mode 100644 index 0000000000..b8ec65dbb4 --- /dev/null +++ b/prowler/providers/aws/services/bedrock/bedrock_prompt_management_exists/bedrock_prompt_management_exists.py @@ -0,0 +1,54 @@ +"""Check for region-level Bedrock Prompt Management adoption.""" + +from prowler.lib.check.models import Check, Check_Report_AWS +from prowler.providers.aws.services.bedrock.bedrock_agent_client import ( + bedrock_agent_client, +) + + +class bedrock_prompt_management_exists(Check): + """Check whether Amazon Bedrock Prompt Management prompts exist in the region. + + A region is reported only when ListPrompts succeeded for it; regions where + the API call failed (e.g. AccessDenied, unsupported region) are skipped at + the service layer and produce no finding. + + - PASS: At least one managed prompt exists in the region (one finding per prompt). + - FAIL: No managed prompts exist in the region (one finding per region). + """ + + def execute(self) -> list[Check_Report_AWS]: + """Execute the Bedrock Prompt Management exists check. + + Returns: + A list of reports containing the result of the check. + """ + findings = [] + for region in sorted(bedrock_agent_client.prompt_scanned_regions): + regional_prompts = sorted( + ( + prompt + for prompt in bedrock_agent_client.prompts.values() + if prompt.region == region + ), + key=lambda prompt: prompt.name, + ) + + if regional_prompts: + for prompt in regional_prompts: + report = Check_Report_AWS(metadata=self.metadata(), resource=prompt) + report.status = "PASS" + report.status_extended = f"Bedrock Prompt Management prompt {prompt.name} exists in region {region}." + findings.append(report) + else: + report = Check_Report_AWS(metadata=self.metadata(), resource={}) + report.region = region + report.resource_id = "prompt-management" + report.resource_arn = f"arn:{bedrock_agent_client.audited_partition}:bedrock:{region}:{bedrock_agent_client.audited_account}:prompt-management" + report.status = "FAIL" + report.status_extended = ( + f"No Bedrock Prompt Management prompts exist in region {region}." + ) + findings.append(report) + + return findings diff --git a/prowler/providers/aws/services/bedrock/bedrock_service.py b/prowler/providers/aws/services/bedrock/bedrock_service.py index c19bcb9166..e04319fc57 100644 --- a/prowler/providers/aws/services/bedrock/bedrock_service.py +++ b/prowler/providers/aws/services/bedrock/bedrock_service.py @@ -140,7 +140,10 @@ class BedrockAgent(AWSService): # Call AWSService's __init__ super().__init__("bedrock-agent", provider) self.agents = {} + self.prompts = {} + self.prompt_scanned_regions: set = set() self.__threading_call__(self._list_agents) + self.__threading_call__(self._list_prompts) self.__threading_call__(self._list_tags_for_resource, self.agents.values()) def _list_agents(self, regional_client): @@ -167,7 +170,32 @@ class BedrockAgent(AWSService): f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) + def _list_prompts(self, regional_client): + """List all prompts in a region. + + Prompt Management is evaluated as a region-level adoption signal, so + prompt collection is intentionally not filtered by audit_resources. + """ + logger.info("Bedrock Agent - Listing Prompts...") + try: + paginator = regional_client.get_paginator("list_prompts") + for page in paginator.paginate(): + for prompt in page.get("promptSummaries", []): + prompt_arn = prompt.get("arn", "") + self.prompts[prompt_arn] = Prompt( + id=prompt.get("id", ""), + name=prompt.get("name", ""), + arn=prompt_arn, + region=regional_client.region, + ) + self.prompt_scanned_regions.add(regional_client.region) + except Exception as error: + logger.error( + f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + def _list_tags_for_resource(self, resource): + """List tags for a Bedrock Agent resource.""" logger.info("Bedrock Agent - Listing Tags for Resource...") try: agent_tags = ( @@ -190,3 +218,12 @@ class Agent(BaseModel): guardrail_id: Optional[str] = None region: str tags: Optional[list] = [] + + +class Prompt(BaseModel): + """Model representing a Bedrock Prompt Management prompt.""" + + id: str + name: str + arn: str + region: str diff --git a/tests/providers/aws/services/bedrock/bedrock_prompt_management_exists/bedrock_prompt_management_exists_test.py b/tests/providers/aws/services/bedrock/bedrock_prompt_management_exists/bedrock_prompt_management_exists_test.py new file mode 100644 index 0000000000..c29b67c064 --- /dev/null +++ b/tests/providers/aws/services/bedrock/bedrock_prompt_management_exists/bedrock_prompt_management_exists_test.py @@ -0,0 +1,280 @@ +from unittest import mock + +import botocore +from botocore.exceptions import ClientError +from moto import mock_aws + +from tests.providers.aws.utils import ( + AWS_ACCOUNT_NUMBER, + AWS_REGION_EU_WEST_1, + AWS_REGION_US_EAST_1, + set_mocked_aws_provider, +) + +make_api_call = botocore.client.BaseClient._make_api_call + +PROMPT_ARN = ( + f"arn:aws:bedrock:{AWS_REGION_US_EAST_1}:{AWS_ACCOUNT_NUMBER}:prompt/test-prompt-id" +) + + +def mock_make_api_call_list_prompts_access_denied(self, operation_name, kwarg): + """Mock API call where ListPrompts fails with AccessDeniedException.""" + if operation_name == "ListPrompts": + raise ClientError( + { + "Error": { + "Code": "AccessDeniedException", + "Message": "User is not authorized to perform: bedrock:ListPrompts", + } + }, + operation_name, + ) + return make_api_call(self, operation_name, kwarg) + + +def mock_make_api_call_with_prompts(self, operation_name, kwarg): + """Mock API call that returns prompts.""" + if operation_name == "ListPrompts": + return { + "promptSummaries": [ + { + "id": "test-prompt-id", + "name": "test-prompt", + "arn": PROMPT_ARN, + } + ] + } + return make_api_call(self, operation_name, kwarg) + + +def mock_make_api_call_with_multiple_prompts(self, operation_name, kwarg): + """Mock API call that returns multiple prompts.""" + if operation_name == "ListPrompts": + return { + "promptSummaries": [ + { + "id": "test-prompt-id-1", + "name": "test-prompt-1", + "arn": f"arn:aws:bedrock:{AWS_REGION_US_EAST_1}:{AWS_ACCOUNT_NUMBER}:prompt/test-prompt-id-1", + }, + { + "id": "test-prompt-id-2", + "name": "test-prompt-2", + "arn": f"arn:aws:bedrock:{AWS_REGION_US_EAST_1}:{AWS_ACCOUNT_NUMBER}:prompt/test-prompt-id-2", + }, + { + "id": "test-prompt-id-3", + "name": "test-prompt-3", + "arn": f"arn:aws:bedrock:{AWS_REGION_US_EAST_1}:{AWS_ACCOUNT_NUMBER}:prompt/test-prompt-id-3", + }, + ] + } + return make_api_call(self, operation_name, kwarg) + + +def mock_make_api_call_no_prompts(self, operation_name, kwarg): + """Mock API call that returns no prompts.""" + if operation_name == "ListPrompts": + return {"promptSummaries": []} + return make_api_call(self, operation_name, kwarg) + + +class Test_bedrock_prompt_management_exists: + @mock.patch( + "botocore.client.BaseClient._make_api_call", + new=mock_make_api_call_no_prompts, + ) + @mock_aws + def test_no_prompts(self): + """Test FAIL when no prompts exist in the region.""" + from prowler.providers.aws.services.bedrock.bedrock_service import BedrockAgent + + aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1]) + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=aws_provider, + ), + mock.patch( + "prowler.providers.aws.services.bedrock.bedrock_prompt_management_exists.bedrock_prompt_management_exists.bedrock_agent_client", + new=BedrockAgent(aws_provider), + ), + ): + from prowler.providers.aws.services.bedrock.bedrock_prompt_management_exists.bedrock_prompt_management_exists import ( + bedrock_prompt_management_exists, + ) + + check = bedrock_prompt_management_exists() + result = check.execute() + + assert len(result) == 1 + assert result[0].status == "FAIL" + assert ( + result[0].status_extended + == f"No Bedrock Prompt Management prompts exist in region {AWS_REGION_US_EAST_1}." + ) + assert result[0].resource_id == "prompt-management" + assert result[0].region == AWS_REGION_US_EAST_1 + assert ( + result[0].resource_arn + == f"arn:aws:bedrock:{AWS_REGION_US_EAST_1}:{AWS_ACCOUNT_NUMBER}:prompt-management" + ) + + @mock.patch( + "botocore.client.BaseClient._make_api_call", + new=mock_make_api_call_with_prompts, + ) + @mock_aws + def test_prompts_exist(self): + """Test PASS when prompts exist in the region.""" + from prowler.providers.aws.services.bedrock.bedrock_service import BedrockAgent + + aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1]) + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=aws_provider, + ), + mock.patch( + "prowler.providers.aws.services.bedrock.bedrock_prompt_management_exists.bedrock_prompt_management_exists.bedrock_agent_client", + new=BedrockAgent(aws_provider), + ), + ): + from prowler.providers.aws.services.bedrock.bedrock_prompt_management_exists.bedrock_prompt_management_exists import ( + bedrock_prompt_management_exists, + ) + + check = bedrock_prompt_management_exists() + result = check.execute() + + assert len(result) == 1 + assert result[0].status == "PASS" + assert ( + result[0].status_extended + == f"Bedrock Prompt Management prompt test-prompt exists in region {AWS_REGION_US_EAST_1}." + ) + assert result[0].resource_id == "test-prompt-id" + assert result[0].region == AWS_REGION_US_EAST_1 + assert result[0].resource_arn == PROMPT_ARN + + @mock.patch( + "botocore.client.BaseClient._make_api_call", + new=mock_make_api_call_with_multiple_prompts, + ) + @mock_aws + def test_multiple_prompts_exist(self): + """Test PASS with one finding per prompt when multiple prompts exist.""" + from prowler.providers.aws.services.bedrock.bedrock_service import BedrockAgent + + aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1]) + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=aws_provider, + ), + mock.patch( + "prowler.providers.aws.services.bedrock.bedrock_prompt_management_exists.bedrock_prompt_management_exists.bedrock_agent_client", + new=BedrockAgent(aws_provider), + ), + ): + from prowler.providers.aws.services.bedrock.bedrock_prompt_management_exists.bedrock_prompt_management_exists import ( + bedrock_prompt_management_exists, + ) + + check = bedrock_prompt_management_exists() + result = check.execute() + + assert len(result) == 3 + for index, finding in enumerate(result, start=1): + expected_name = f"test-prompt-{index}" + expected_id = f"test-prompt-id-{index}" + assert finding.status == "PASS" + assert ( + finding.status_extended + == f"Bedrock Prompt Management prompt {expected_name} exists in region {AWS_REGION_US_EAST_1}." + ) + assert finding.resource_id == expected_id + assert finding.region == AWS_REGION_US_EAST_1 + assert ( + finding.resource_arn + == f"arn:aws:bedrock:{AWS_REGION_US_EAST_1}:{AWS_ACCOUNT_NUMBER}:prompt/{expected_id}" + ) + + @mock.patch( + "botocore.client.BaseClient._make_api_call", + new=mock_make_api_call_no_prompts, + ) + @mock_aws + def test_no_prompts_multiple_regions(self): + """Test FAIL in multiple regions when no prompts exist.""" + from prowler.providers.aws.services.bedrock.bedrock_service import BedrockAgent + + aws_provider = set_mocked_aws_provider( + [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1] + ) + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=aws_provider, + ), + mock.patch( + "prowler.providers.aws.services.bedrock.bedrock_prompt_management_exists.bedrock_prompt_management_exists.bedrock_agent_client", + new=BedrockAgent(aws_provider), + ), + ): + from prowler.providers.aws.services.bedrock.bedrock_prompt_management_exists.bedrock_prompt_management_exists import ( + bedrock_prompt_management_exists, + ) + + check = bedrock_prompt_management_exists() + result = check.execute() + + assert len(result) == 2 + for finding in result: + assert finding.status == "FAIL" + assert ( + finding.status_extended + == f"No Bedrock Prompt Management prompts exist in region {finding.region}." + ) + assert finding.resource_id == "prompt-management" + assert ( + finding.resource_arn + == f"arn:aws:bedrock:{finding.region}:{AWS_ACCOUNT_NUMBER}:prompt-management" + ) + regions = {finding.region for finding in result} + assert regions == {AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1} + + @mock.patch( + "botocore.client.BaseClient._make_api_call", + new=mock_make_api_call_list_prompts_access_denied, + ) + @mock_aws + def test_list_prompts_client_error_skips_region(self): + """Test that regions where ListPrompts fails produce no findings.""" + from prowler.providers.aws.services.bedrock.bedrock_service import BedrockAgent + + aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1]) + + with ( + mock.patch( + "prowler.providers.common.provider.Provider.get_global_provider", + return_value=aws_provider, + ), + mock.patch( + "prowler.providers.aws.services.bedrock.bedrock_prompt_management_exists.bedrock_prompt_management_exists.bedrock_agent_client", + new=BedrockAgent(aws_provider), + ), + ): + from prowler.providers.aws.services.bedrock.bedrock_prompt_management_exists.bedrock_prompt_management_exists import ( + bedrock_prompt_management_exists, + ) + + check = bedrock_prompt_management_exists() + result = check.execute() + + assert result == [] diff --git a/tests/providers/aws/services/bedrock/bedrock_service_test.py b/tests/providers/aws/services/bedrock/bedrock_service_test.py index ed39ac865b..5f7ddf0cf2 100644 --- a/tests/providers/aws/services/bedrock/bedrock_service_test.py +++ b/tests/providers/aws/services/bedrock/bedrock_service_test.py @@ -341,3 +341,125 @@ class TestBedrockAgentPagination: # Verify paginator was used regional_client.get_paginator.assert_called_once_with("list_agents") paginator.paginate.assert_called_once() + + +class TestBedrockPromptPagination: + """Test suite for Bedrock Prompt pagination logic.""" + + def test_list_prompts_pagination(self): + """Test that list_prompts iterates through all pages.""" + # Mock the audit_info + audit_info = MagicMock() + audit_info.audited_partition = "aws" + audit_info.audited_account = "123456789012" + audit_info.audit_resources = None + + # Mock the regional client + regional_client = MagicMock() + regional_client.region = "us-east-1" + + # Mock paginator + paginator = MagicMock() + page1 = { + "promptSummaries": [ + { + "id": "prompt-1", + "name": "prompt-name-1", + "arn": "arn:aws:bedrock:us-east-1:123456789012:prompt/prompt-1", + } + ] + } + page2 = { + "promptSummaries": [ + { + "id": "prompt-2", + "name": "prompt-name-2", + "arn": "arn:aws:bedrock:us-east-1:123456789012:prompt/prompt-2", + } + ] + } + paginator.paginate.return_value = [page1, page2] + regional_client.get_paginator.return_value = paginator + + # Initialize service and inject mock client + bedrock_agent_service = BedrockAgent(audit_info) + bedrock_agent_service.regional_clients = {"us-east-1": regional_client} + bedrock_agent_service.prompts = {} # Clear init side effects + bedrock_agent_service.prompt_scanned_regions = set() + + # Run method + bedrock_agent_service._list_prompts(regional_client) + + # Assertions + assert len(bedrock_agent_service.prompts) == 2 + assert ( + "arn:aws:bedrock:us-east-1:123456789012:prompt/prompt-1" + in bedrock_agent_service.prompts + ) + assert ( + "arn:aws:bedrock:us-east-1:123456789012:prompt/prompt-2" + in bedrock_agent_service.prompts + ) + assert "us-east-1" in bedrock_agent_service.prompt_scanned_regions + + # Verify paginator was used + regional_client.get_paginator.assert_called_once_with("list_prompts") + paginator.paginate.assert_called_once() + + def test_list_prompts_ignores_audit_resources_filter(self): + """Prompt collection is region-scoped and must ignore audit_resources.""" + audit_info = MagicMock() + audit_info.audited_partition = "aws" + audit_info.audited_account = "123456789012" + audit_info.audit_resources = ["arn:aws:s3:::unrelated-resource"] + + regional_client = MagicMock() + regional_client.region = "us-east-1" + + paginator = MagicMock() + paginator.paginate.return_value = [ + { + "promptSummaries": [ + { + "id": "prompt-1", + "name": "prompt-name-1", + "arn": "arn:aws:bedrock:us-east-1:123456789012:prompt/prompt-1", + } + ] + } + ] + regional_client.get_paginator.return_value = paginator + + bedrock_agent_service = BedrockAgent(audit_info) + bedrock_agent_service.regional_clients = {"us-east-1": regional_client} + bedrock_agent_service.prompts = {} + bedrock_agent_service.prompt_scanned_regions = set() + + bedrock_agent_service._list_prompts(regional_client) + + assert len(bedrock_agent_service.prompts) == 1 + assert "us-east-1" in bedrock_agent_service.prompt_scanned_regions + + def test_list_prompts_error_does_not_mark_region_scanned(self): + """If ListPrompts raises, the region must not be added to prompt_scanned_regions.""" + audit_info = MagicMock() + audit_info.audited_partition = "aws" + audit_info.audited_account = "123456789012" + audit_info.audit_resources = None + + regional_client = MagicMock() + regional_client.region = "us-east-1" + + paginator = MagicMock() + paginator.paginate.side_effect = Exception("ListPrompts failed") + regional_client.get_paginator.return_value = paginator + + bedrock_agent_service = BedrockAgent(audit_info) + bedrock_agent_service.regional_clients = {"us-east-1": regional_client} + bedrock_agent_service.prompts = {} + bedrock_agent_service.prompt_scanned_regions = set() + + bedrock_agent_service._list_prompts(regional_client) + + assert bedrock_agent_service.prompts == {} + assert bedrock_agent_service.prompt_scanned_regions == set() From 652cb69216438e4357df88edd2a29a830535ae20 Mon Sep 17 00:00:00 2001 From: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com> Date: Mon, 4 May 2026 12:59:06 +0100 Subject: [PATCH 12/29] fix(ui): compliance card layout polish (#10939) --- ui/CHANGELOG.md | 3 +- ui/components/compliance/compliance-card.tsx | 80 ++++++++++---------- ui/components/icons/compliance/iso-27001.svg | 6 +- 3 files changed, 46 insertions(+), 43 deletions(-) diff --git a/ui/CHANGELOG.md b/ui/CHANGELOG.md index 49ec9a26bd..4b63c9cd84 100644 --- a/ui/CHANGELOG.md +++ b/ui/CHANGELOG.md @@ -2,10 +2,11 @@ All notable changes to the **Prowler UI** are documented in this file. -## [1.26.0] (Prowler UNRELEASED) +## [1.25.2] (Prowler UNRELEASED) ### 🔄 Changed +- Compliance cards: progress bar now spans the full card width, the passing-requirements caption sits beside the framework logo under the title, and the ISO 27001 logo asset is recentered within its tile [(#10939)](https://github.com/prowler-cloud/prowler/pull/10939) - Findings expanded resource rows now drop the redundant cube icons, render Service and Region with the same compact label style as Last seen and Failing for, and reorder columns to Status, Resource, Provider, Severity, then field labels [(#10949)](https://github.com/prowler-cloud/prowler/pull/10949) --- diff --git a/ui/components/compliance/compliance-card.tsx b/ui/components/compliance/compliance-card.tsx index c94b7ef63e..98d3949016 100644 --- a/ui/components/compliance/compliance-card.tsx +++ b/ui/components/compliance/compliance-card.tsx @@ -119,49 +119,33 @@ export const ComplianceCard: React.FC = ({ /> -
-
+
+
{getComplianceIcon(title) && ( - {`${title} +
+ {`${title} +
)} -
-
- - -

+
+ + +

+ {formatTitle(title)} + {version ? ` - ${version}` : ""} +

+
+ {formatTitle(title)} {version ? ` - ${version}` : ""} -

-
- - {formatTitle(title)} - {version ? ` - ${version}` : ""} - -
-
-
- - Score: - - - {ratingPercentage}% - -
- -
-
- + + + {passingRequirements} / {totalRequirements} @@ -169,6 +153,24 @@ export const ComplianceCard: React.FC = ({
+
+
+ + Score: + + + {ratingPercentage}% + +
+ +
diff --git a/ui/components/icons/compliance/iso-27001.svg b/ui/components/icons/compliance/iso-27001.svg index 109c19521d..b79f970b42 100644 --- a/ui/components/icons/compliance/iso-27001.svg +++ b/ui/components/icons/compliance/iso-27001.svg @@ -1,8 +1,8 @@ - - + + - + From f4afdf0541eb348d8812e84134bf1af136e44510 Mon Sep 17 00:00:00 2001 From: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com> Date: Mon, 4 May 2026 14:59:27 +0100 Subject: [PATCH 13/29] chore(ui): decrement changelog entry version to 1.25.2 (#10974) From 269e51259dc1dadf9e975859fbd7d03b020d44b4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Mon, 4 May 2026 17:24:09 +0200 Subject: [PATCH 14/29] docs: add troubleshooting guide for stuck scans after worker crash (#10938) --- docs/troubleshooting.mdx | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/docs/troubleshooting.mdx b/docs/troubleshooting.mdx index a6a28c0ad6..9d60c57321 100644 --- a/docs/troubleshooting.mdx +++ b/docs/troubleshooting.mdx @@ -159,6 +159,40 @@ When these environment variables are set, the API will use them directly instead A fix addressing this permission issue is being evaluated in [PR #9953](https://github.com/prowler-cloud/prowler/pull/9953). +### Scan Stuck in Executing State After Worker Crash + +When running Prowler App via Docker Compose, a scan may remain indefinitely in the `executing` state if the worker process crashes (for example, due to an Out of Memory condition) before it can update the scan status. Since it is not currently possible to cancel a scan in `executing` state through the UI, the workaround is to manually update the scan record in the database. + +**Root Cause:** + +The Celery worker process terminates unexpectedly (OOM, node failure, etc.) before transitioning the scan state to `completed` or `failed`. The scan record remains in `executing` with no active process to advance it. + +**Solution:** + +Connect to the database using the `prowler_admin` user. Due to Row-Level Security (RLS), the default database user cannot see scan records — you must use `prowler_admin`: + +```bash +psql -U prowler_admin -d prowler_db +``` + +Identify the stuck scan by filtering for scans in `executing` state: + +```sql +SELECT id, name, state, started_at FROM scans WHERE state = 'executing'; +``` + +Update the scan state to `failed` using the scan ID: + +```sql +UPDATE scans SET state = 'failed' WHERE id = ''; +``` + +After this change, the scan will appear as failed in the UI and you can launch a new scan. + + +A feature to cancel executing scans directly from the UI is being tracked in [GitHub Issue #6893](https://github.com/prowler-cloud/prowler/issues/6893). + + ### SAML/OAuth ACS URL Incorrect When Running Behind a Proxy or Load Balancer See [GitHub Issue #9724](https://github.com/prowler-cloud/prowler/issues/9724) for more details. From 0dd8981ee450fcfee3a33313faec33e69d6e4b57 Mon Sep 17 00:00:00 2001 From: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com> Date: Mon, 4 May 2026 17:47:39 +0200 Subject: [PATCH 15/29] feat: add issue template for creating new checks (#10976) --- .github/ISSUE_TEMPLATE/new-check-request.yml | 143 +++++++++++++++++++ docs/developer-guide/checks.mdx | 24 +++- 2 files changed, 162 insertions(+), 5 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/new-check-request.yml diff --git a/.github/ISSUE_TEMPLATE/new-check-request.yml b/.github/ISSUE_TEMPLATE/new-check-request.yml new file mode 100644 index 0000000000..6c6687e14f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/new-check-request.yml @@ -0,0 +1,143 @@ +name: "🔎 New Check Request" +description: Request a new Prowler security check +title: "[New Check]: " +labels: ["feature-request", "status/needs-triage"] + +body: + - type: checkboxes + id: search + attributes: + label: Existing check search + description: Confirm this check does not already exist before opening a new request. + options: + - label: I have searched existing issues, Prowler Hub, and the public roadmap, and this check does not already exist. + required: true + + - type: markdown + attributes: + value: | + Use this form to describe the security condition that Prowler should evaluate. + + The most useful inputs for [Prowler Studio](https://github.com/prowler-cloud/prowler-studio) are: + - What should be detected + - What PASS and FAIL mean + - Vendor docs, API references, SDK methods, CLI commands, or reference code + + - type: dropdown + id: provider + attributes: + label: Provider + description: Cloud or platform this check targets. + options: + - AWS + - Azure + - GCP + - Kubernetes + - GitHub + - Microsoft 365 + - OCI + - Alibaba Cloud + - Cloudflare + - MongoDB Atlas + - Google Workspace + - OpenStack + - Vercel + - NHN + - Other / New provider + validations: + required: true + + - type: input + id: other_provider_name + attributes: + label: New provider name + description: Only fill this if you selected "Other / New provider" above. + placeholder: "NewProviderName" + validations: + required: false + + - type: input + id: service_name + attributes: + label: Service or product area + description: Optional. Main service, product, or feature to audit. + placeholder: "s3, bedrock, entra, repository, apiserver" + validations: + required: false + + - type: input + id: suggested_check_name + attributes: + label: Suggested check name + description: Optional. Use `snake_case` following `__`, with lowercase letters and underscores only. + placeholder: "bedrock_guardrail_sensitive_information_filter_enabled" + validations: + required: false + + - type: textarea + id: context + attributes: + label: Context and goal + description: Describe the security problem, why it matters, and what this new check should help detect. + placeholder: |- + - Security condition to validate: + - Why it matters: + - Resource, feature, or configuration involved: + validations: + required: true + + - type: textarea + id: expected_behavior + attributes: + label: Expected behavior + description: Explain what the check should evaluate and what PASS, FAIL, or MANUAL should mean. + placeholder: |- + - Resource or scope to evaluate: + - PASS when: + - FAIL when: + - MANUAL when (if applicable): + - Exclusions, thresholds, or edge cases: + validations: + required: true + + - type: textarea + id: references + attributes: + label: References + description: Add vendor docs, API references, SDK methods, CLI commands, endpoint docs, sample payloads, or similar reference material. + placeholder: |- + - Product or service documentation: + - API or SDK reference: + - CLI command or endpoint documentation: + - Sample payload or response: + - Security advisory or benchmark: + validations: + required: true + + - type: dropdown + id: severity + attributes: + label: Suggested severity + description: Your best estimate. Reviewers will confirm during triage. + options: + - Critical + - High + - Medium + - Low + - Informational + - Not sure + validations: + required: true + + - type: textarea + id: implementation_notes + attributes: + label: Additional implementation notes + description: Optional. Add permissions, unsupported regions, config knobs, product limitations, or anything else that may affect implementation. + placeholder: |- + - Required permissions or scopes: + - Region, tenant, or subscription limitations: + - Configurable behavior or thresholds: + - Other constraints: + validations: + required: false diff --git a/docs/developer-guide/checks.mdx b/docs/developer-guide/checks.mdx index 956dc293d1..c5dacc72d9 100644 --- a/docs/developer-guide/checks.mdx +++ b/docs/developer-guide/checks.mdx @@ -27,14 +27,28 @@ The most common high level steps to create a new check are: ### Naming Format for Checks -Checks must be named following the format: `service_subservice_resource_action`. +If you already know the check name when creating a request or implementing a check, use a descriptive identifier with lowercase letters and underscores only. + +Recommended patterns: + +- `__` The name components are: -- `service` – The main service being audited (e.g., ec2, entra, iam, etc.) -- `subservice` – An individual component or subset of functionality within the service that is being audited. This may correspond to a shortened version of the class attribute accessed within the check. If there is no subservice, just omit. -- `resource` – The specific resource type being evaluated (e.g., instance, policy, role, etc.) -- `action` – The security aspect or configuration being checked (e.g., public, encrypted, enabled, etc.) +- `service` – The main service or product area being audited (e.g., ec2, entra, iam, bedrock). +- `resource` – The resource, feature, or configuration being evaluated. It can be a single word or a compound phrase joined with underscores (e.g., instance, policy, guardrail, sensitive_information_filter). +- `best_practice` – The expected secure state or best practice being checked (e.g., enabled, encrypted, restricted, configured, not_publicly_accessible). + +Additional guidance: + +- Use underscores only. Do not use hyphens. +- Keep the name specific enough to describe the behavior of the check. +- The first segment should match the service or product area whenever possible. + +Examples: + +- `s3_bucket_versioning_enabled` +- `bedrock_guardrail_sensitive_information_filter_enabled` ### File Creation From 02f43a7ad62a2482fe069506cfd90863430bc5e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rub=C3=A9n=20De=20la=20Torre=20Vico?= Date: Mon, 4 May 2026 17:51:02 +0200 Subject: [PATCH 16/29] docs: add Prowler Studio page and remove check-kreator pages (#10981) --- docs/developer-guide/prowler-studio.mdx | 131 ++++++++++++++++++ docs/docs.json | 3 +- .../cli/tutorials/prowler-check-kreator.mdx | 47 ------- .../tutorials/prowler-check-kreator.mdx | 51 ------- 4 files changed, 133 insertions(+), 99 deletions(-) create mode 100644 docs/developer-guide/prowler-studio.mdx delete mode 100644 docs/user-guide/cli/tutorials/prowler-check-kreator.mdx delete mode 100644 docs/user-guide/tutorials/prowler-check-kreator.mdx diff --git a/docs/developer-guide/prowler-studio.mdx b/docs/developer-guide/prowler-studio.mdx new file mode 100644 index 0000000000..a4a2a609b5 --- /dev/null +++ b/docs/developer-guide/prowler-studio.mdx @@ -0,0 +1,131 @@ +--- +title: 'Prowler Studio' +--- + +**Prowler Studio is an AI workflow that ensures Claude Code follows Prowler's skills, guardrails, and best practices when creating new security checks.** What lands in the resulting pull request is consistent, tested, and ready for human review — not half-correct boilerplate that needs to be rewritten. + + +**Contributor Tool**: Prowler Studio is a workflow for advanced contributors adding new Prowler security checks. It is not part of Prowler Cloud, Prowler App, or Prowler CLI. + + + +**Preview Feature**: Prowler Studio is under active development and breaking changes are expected. Please report issues or share feedback on [GitHub](https://github.com/prowler-cloud/prowler-studio/issues) or in the [Slack community](https://goto.prowler.com/slack). + + + + Clone the source code, install Prowler Studio, and explore the agent workflow in detail. + + +## The Problem + +Adding a new check to [Prowler](https://github.com/prowler-cloud/prowler) is more than writing detection logic. A correct check has to: + +- Match Prowler's exact service and check folder structure and naming conventions +- Wire up metadata, severity, remediation, tests, and compliance mappings +- Mirror the patterns used by the hundreds of existing checks in the same provider +- Actually load when Prowler scans for available checks — silent structural mistakes are easy to make + +Asking a general-purpose AI assistant to do this usually means guessing. It misses conventions, skips tests, or invents structure that looks right but does not load. The result is a half-correct PR that needs to be reviewed line by line or rewritten. + +## The Solution + +Prowler Studio enforces the workflow end-to-end. Describe the check once — a markdown ticket, a Jira issue, or a GitHub issue — and the workflow: + +1. **Loads Prowler-specific skills into every agent.** Every step starts with the same context an experienced Prowler engineer would have in mind. See [AI Skills System](/developer-guide/ai-skills) for how skills are structured. +2. **Runs specialized agents in sequence.** Implementation → testing → compliance mapping → review → PR creation. Each agent has one job and a tight scope. +3. **Verifies as it goes.** The check must load in Prowler. Tests must pass. If something fails, the agent fixes it and re-runs (up to a bounded number of attempts) before moving on. +4. **Produces a complete pull request.** Branch, passing check, tests, compliance mappings, and a pull request waiting for human review. + +The result is a consistent starting point, every time, on every supported provider. + +## Quick Start + +### Install + +Prowler Studio requires [`uv`](https://docs.astral.sh/uv/getting-started/installation/) — see the official [installation guide](https://docs.astral.sh/uv/getting-started/installation/). + +```bash +git clone https://github.com/prowler-cloud/prowler-studio +cd prowler-studio +uv sync +source .venv/bin/activate +``` + +### Describe the Check + +A ticket is a structured markdown description of the check to create. It is the only input the workflow needs; every agent (implementation, testing, compliance mapping, review, PR creation) uses it as the source of truth, so the more concrete it is, the closer the first PR will land to the desired outcome. + +The ticket can be supplied in three ways: + +- **Local markdown file** → `--ticket path/to/ticket.md` +- **Jira issue** → `--jira-url https://...` (uses the issue body) +- **GitHub issue** → `--github-url https://...` (uses the issue body) + +The content should follow the **New Check Request** template: + +- The local copy at [`check_ticket_template.md`](https://github.com/prowler-cloud/prowler-studio/blob/main/check_ticket_template.md) covers `--ticket` and Jira tickets. +- A prefilled GitHub form is also available: [Create a New Check Request issue](https://github.com/prowler-cloud/prowler/issues/new?template=new-check-request.yml). + +Sections marked *Optional* can be skipped; everything else helps the agents make the right decisions. + +### Run the Workflow + +From a local markdown ticket: + +```bash +prowler-studio --ticket check_ticket.md +``` + +From a Jira ticket: + +```bash +prowler-studio --jira-url https://mycompany.atlassian.net/browse/PROJ-123 +``` + +From a GitHub issue: + +```bash +prowler-studio --github-url https://github.com/owner/repo/issues/123 +``` + + +Provide exactly one of `--ticket`, `--jira-url`, or `--github-url`. + + +Keep changes local (no push, no pull request): + +```bash +prowler-studio -b feat/my-check --ticket check_ticket.md --local +``` + +### What You Get + +After a successful run the working environment contains: + +- A new branch on a clean Prowler worktree containing the check, metadata, tests, and compliance mappings +- A pull request opened against Prowler (skipped with `--local`) +- A timestamped log file under `logs/` capturing every step the agents took + +## CLI Options + +| Option | Short | Description | +|--------|-------|-------------| +| `--branch` | `-b` | Branch name (default: `feat/-` or `feat/`) | +| `--ticket` | `-t` | Path to a markdown check ticket file | +| `--jira-url` | `-j` | Jira ticket URL (e.g., `https://mycompany.atlassian.net/browse/PROJ-123`) | +| `--github-url` | `-g` | GitHub issue URL (e.g., `https://github.com/owner/repo/issues/123`) | +| `--working-dir` | `-w` | Working directory for the Prowler clone (default: `./working`) | +| `--no-worktree` | | Legacy mode — work directly on the main clone instead of using worktrees | +| `--cleanup-worktree` | | Remove the worktree after a successful pull request is created | +| `--local` | | Keep changes local — skip push and pull request creation | + +## Configuration + +Set these environment variables depending on the input source: + +| Variable | When Needed | Purpose | +|----------|-------------|---------| +| `GITHUB_TOKEN` | `--github-url` (recommended) | Higher GitHub API rate limits and access to private issues | +| `JIRA_SITE_URL` | `--jira-url` | Jira site, e.g. `https://mycompany.atlassian.net` | +| `JIRA_EMAIL` | `--jira-url` | Email of the Jira account used to fetch the ticket | +| `JIRA_API_TOKEN` | `--jira-url` | API token for the Jira account | diff --git a/docs/docs.json b/docs/docs.json index aee4e3259a..e9c8cc9347 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -365,7 +365,8 @@ "developer-guide/security-compliance-framework", "developer-guide/lighthouse-architecture", "developer-guide/mcp-server", - "developer-guide/ai-skills" + "developer-guide/ai-skills", + "developer-guide/prowler-studio" ] }, { diff --git a/docs/user-guide/cli/tutorials/prowler-check-kreator.mdx b/docs/user-guide/cli/tutorials/prowler-check-kreator.mdx deleted file mode 100644 index e6c708a212..0000000000 --- a/docs/user-guide/cli/tutorials/prowler-check-kreator.mdx +++ /dev/null @@ -1,47 +0,0 @@ ---- -title: 'Prowler Check Kreator' ---- - - -Currently, this tool is only available for creating checks for the AWS provider. - - - -If you are looking for a way to create new checks for all the supported providers, you can use [Prowler Studio](https://github.com/prowler-cloud/prowler-studio), it is an AI-powered toolkit for generating and managing security checks for Prowler (better version of the Check Kreator). - - -## Introduction - -**Prowler Check Kreator** is a utility designed to streamline the creation of new checks for Prowler. This tool generates all necessary files required to add a new check to the Prowler repository. Specifically, it creates: - -- A dedicated folder for the check. -- The main check script. -- A metadata file with essential details. -- A folder and file structure for testing the check. - -## Usage - -To use the tool, execute the main script with the following command: - -```bash -python util/prowler_check_kreator/prowler_check_kreator.py -``` - -Parameters: - -- ``: Currently only AWS is supported. -- ``: The name you wish to assign to the new check. - -## AI integration - -This tool optionally integrates AI to assist in generating the check code and metadata file content. When AI assistance is chosen, the tool uses [Gemini](https://gemini.google.com/) to produce preliminary code and metadata. - - -For this feature to work, you must have the library `google-generativeai` installed in your Python environment. - - - -AI-generated code and metadata might contain errors or require adjustments to align with specific Prowler requirements. Carefully review all AI-generated content before committing. - - -To enable AI assistance, simply confirm when prompted by the tool. Additionally, ensure that the `GEMINI_API_KEY` environment variable is set with a valid Gemini API key. For instructions on obtaining your API key, refer to the [Gemini documentation](https://ai.google.dev/gemini-api/docs/api-key). diff --git a/docs/user-guide/tutorials/prowler-check-kreator.mdx b/docs/user-guide/tutorials/prowler-check-kreator.mdx deleted file mode 100644 index 253f659814..0000000000 --- a/docs/user-guide/tutorials/prowler-check-kreator.mdx +++ /dev/null @@ -1,51 +0,0 @@ ---- -title: 'Prowler Check Kreator' ---- - - -Currently, this tool is only available for creating checks for the AWS provider. - - - - -If you are looking for a way to create new checks for all the supported providers, you can use [Prowler Studio](https://github.com/prowler-cloud/prowler-studio), it is an AI-powered toolkit for generating and managing security checks for Prowler (better version of the Check Kreator). - - - -## Introduction - -**Prowler Check Kreator** is a utility designed to streamline the creation of new checks for Prowler. This tool generates all necessary files required to add a new check to the Prowler repository. Specifically, it creates: - -- A dedicated folder for the check. -- The main check script. -- A metadata file with essential details. -- A folder and file structure for testing the check. - -## Usage - -To use the tool, execute the main script with the following command: - -```bash -python util/prowler_check_kreator/prowler_check_kreator.py -``` - -Parameters: - -- ``: Currently only AWS is supported. -- ``: The name you wish to assign to the new check. - -## AI integration - -This tool optionally integrates AI to assist in generating the check code and metadata file content. When AI assistance is chosen, the tool uses [Gemini](https://gemini.google.com/) to produce preliminary code and metadata. - - -For this feature to work, you must have the library `google-generativeai` installed in your Python environment. - - - - -AI-generated code and metadata might contain errors or require adjustments to align with specific Prowler requirements. Carefully review all AI-generated content before committing. - - - -To enable AI assistance, simply confirm when prompted by the tool. Additionally, ensure that the `GEMINI_API_KEY` environment variable is set with a valid Gemini API key. For instructions on obtaining your API key, refer to the [Gemini documentation](https://ai.google.dev/gemini-api/docs/api-key). From f314725f4d83d5e9c5c3cf63609698465f833df9 Mon Sep 17 00:00:00 2001 From: Pepe Fagoaga Date: Mon, 4 May 2026 18:11:38 +0200 Subject: [PATCH 17/29] fix(k8s): deduplicate RBAC findings by unique subject (#10242) Co-authored-by: Andoni A. <14891798+andoniaf@users.noreply.github.com> --- prowler/CHANGELOG.md | 8 ++++ .../rbac_minimize_csr_approval_access.py | 40 +++++++++++-------- ..._minimize_node_proxy_subresource_access.py | 36 +++++++++++------ .../rbac_minimize_pv_creation_access.py | 37 +++++++++++------ ...minimize_service_account_token_creation.py | 36 +++++++++++------ .../rbac_minimize_webhook_config_access.py | 40 +++++++++++-------- 6 files changed, 128 insertions(+), 69 deletions(-) diff --git a/prowler/CHANGELOG.md b/prowler/CHANGELOG.md index 589f76323c..5bb7da487f 100644 --- a/prowler/CHANGELOG.md +++ b/prowler/CHANGELOG.md @@ -32,6 +32,14 @@ All notable changes to the **Prowler SDK** are documented in this file. --- +## [5.25.2] (Prowler v5.25.2) + +### 🐞 Fixed + +- Duplicate Kubernetes RBAC findings when the same User or Group subject appeared in multiple ClusterRoleBindings [(#10242)](https://github.com/prowler-cloud/prowler/pull/10242) + +--- + ## [5.25.1] (Prowler v5.25.1) ### 🐞 Fixed diff --git a/prowler/providers/kubernetes/services/rbac/rbac_minimize_csr_approval_access/rbac_minimize_csr_approval_access.py b/prowler/providers/kubernetes/services/rbac/rbac_minimize_csr_approval_access/rbac_minimize_csr_approval_access.py index f2527b4606..17e56add55 100644 --- a/prowler/providers/kubernetes/services/rbac/rbac_minimize_csr_approval_access/rbac_minimize_csr_approval_access.py +++ b/prowler/providers/kubernetes/services/rbac/rbac_minimize_csr_approval_access/rbac_minimize_csr_approval_access.py @@ -11,24 +11,32 @@ resources = ["certificatesigningrequests/approval"] class rbac_minimize_csr_approval_access(Check): def execute(self) -> Check_Report_Kubernetes: findings = [] + # Collect unique subjects and the ClusterRole names bound to them + subjects_bound_roles = {} for crb in rbac_client.cluster_role_bindings.values(): for subject in crb.subjects: + # CIS benchmarks scope these checks to human identities only if subject.kind in ["User", "Group"]: - report = Check_Report_Kubernetes( - metadata=self.metadata(), resource=subject - ) - report.status = "PASS" - report.status_extended = f"User or group '{subject.name}' does not have access to update the CSR approval sub-resource." - for cr in rbac_client.cluster_roles.values(): - if cr.metadata.name == crb.roleRef.name: - if is_rule_allowing_permissions( - cr.rules, - resources, - verbs, - ): - report.status = "FAIL" - report.status_extended = f"User or group '{subject.name}' has access to update the CSR approval sub-resource." - break - findings.append(report) + key = (subject.kind, subject.name, subject.namespace) + if key not in subjects_bound_roles: + subjects_bound_roles[key] = (subject, set()) + subjects_bound_roles[key][1].add(crb.roleRef.name) + + cluster_roles_by_name = { + cr.metadata.name: cr for cr in rbac_client.cluster_roles.values() + } + for _, (subject, role_names) in subjects_bound_roles.items(): + report = Check_Report_Kubernetes(metadata=self.metadata(), resource=subject) + report.resource_name = f"{subject.kind}:{subject.name}" + report.resource_id = f"{subject.kind}/{subject.name}" + report.status = "PASS" + report.status_extended = f"User or group '{subject.name}' does not have access to update the CSR approval sub-resource." + for role_name in role_names: + cr = cluster_roles_by_name.get(role_name) + if cr and is_rule_allowing_permissions(cr.rules, resources, verbs): + report.status = "FAIL" + report.status_extended = f"User or group '{subject.name}' has access to update the CSR approval sub-resource." + break + findings.append(report) return findings diff --git a/prowler/providers/kubernetes/services/rbac/rbac_minimize_node_proxy_subresource_access/rbac_minimize_node_proxy_subresource_access.py b/prowler/providers/kubernetes/services/rbac/rbac_minimize_node_proxy_subresource_access/rbac_minimize_node_proxy_subresource_access.py index 913d968f31..377e5345da 100644 --- a/prowler/providers/kubernetes/services/rbac/rbac_minimize_node_proxy_subresource_access/rbac_minimize_node_proxy_subresource_access.py +++ b/prowler/providers/kubernetes/services/rbac/rbac_minimize_node_proxy_subresource_access/rbac_minimize_node_proxy_subresource_access.py @@ -11,20 +11,32 @@ resources = ["nodes/proxy"] class rbac_minimize_node_proxy_subresource_access(Check): def execute(self) -> Check_Report_Kubernetes: findings = [] + # Collect unique subjects and the ClusterRole names bound to them + subjects_bound_roles = {} for crb in rbac_client.cluster_role_bindings.values(): for subject in crb.subjects: + # CIS benchmarks scope these checks to human identities only if subject.kind in ["User", "Group"]: - report = Check_Report_Kubernetes( - metadata=self.metadata(), resource=subject - ) - report.status = "PASS" - report.status_extended = f"User or group '{subject.name}' does not have access to the node proxy sub-resource." - for cr in rbac_client.cluster_roles.values(): - if cr.metadata.name == crb.roleRef.name: - if is_rule_allowing_permissions(cr.rules, resources, verbs): - report.status = "FAIL" - report.status_extended = f"User or group '{subject.name}' has access to the node proxy sub-resource." - break - findings.append(report) + key = (subject.kind, subject.name, subject.namespace) + if key not in subjects_bound_roles: + subjects_bound_roles[key] = (subject, set()) + subjects_bound_roles[key][1].add(crb.roleRef.name) + + cluster_roles_by_name = { + cr.metadata.name: cr for cr in rbac_client.cluster_roles.values() + } + for _, (subject, role_names) in subjects_bound_roles.items(): + report = Check_Report_Kubernetes(metadata=self.metadata(), resource=subject) + report.resource_name = f"{subject.kind}:{subject.name}" + report.resource_id = f"{subject.kind}/{subject.name}" + report.status = "PASS" + report.status_extended = f"User or group '{subject.name}' does not have access to the node proxy sub-resource." + for role_name in role_names: + cr = cluster_roles_by_name.get(role_name) + if cr and is_rule_allowing_permissions(cr.rules, resources, verbs): + report.status = "FAIL" + report.status_extended = f"User or group '{subject.name}' has access to the node proxy sub-resource." + break + findings.append(report) return findings diff --git a/prowler/providers/kubernetes/services/rbac/rbac_minimize_pv_creation_access/rbac_minimize_pv_creation_access.py b/prowler/providers/kubernetes/services/rbac/rbac_minimize_pv_creation_access/rbac_minimize_pv_creation_access.py index 204942c57e..2fb76bbed7 100644 --- a/prowler/providers/kubernetes/services/rbac/rbac_minimize_pv_creation_access/rbac_minimize_pv_creation_access.py +++ b/prowler/providers/kubernetes/services/rbac/rbac_minimize_pv_creation_access/rbac_minimize_pv_creation_access.py @@ -11,21 +11,32 @@ resources = ["persistentvolumes"] class rbac_minimize_pv_creation_access(Check): def execute(self) -> Check_Report_Kubernetes: findings = [] - # Check each ClusterRoleBinding for access to create PersistentVolumes + # Collect unique subjects and the ClusterRole names bound to them + subjects_bound_roles = {} for crb in rbac_client.cluster_role_bindings.values(): for subject in crb.subjects: + # CIS benchmarks scope these checks to human identities only if subject.kind in ["User", "Group"]: - report = Check_Report_Kubernetes( - metadata=self.metadata(), resource=subject - ) - report.status = "PASS" - report.status_extended = f"User or group '{subject.name}' does not have access to create PersistentVolumes." - for cr in rbac_client.cluster_roles.values(): - if cr.metadata.name == crb.roleRef.name: - if is_rule_allowing_permissions(cr.rules, resources, verbs): - report.status = "FAIL" - report.status_extended = f"User or group '{subject.name}' has access to create PersistentVolumes." - break - findings.append(report) + key = (subject.kind, subject.name, subject.namespace) + if key not in subjects_bound_roles: + subjects_bound_roles[key] = (subject, set()) + subjects_bound_roles[key][1].add(crb.roleRef.name) + + cluster_roles_by_name = { + cr.metadata.name: cr for cr in rbac_client.cluster_roles.values() + } + for _, (subject, role_names) in subjects_bound_roles.items(): + report = Check_Report_Kubernetes(metadata=self.metadata(), resource=subject) + report.resource_name = f"{subject.kind}:{subject.name}" + report.resource_id = f"{subject.kind}/{subject.name}" + report.status = "PASS" + report.status_extended = f"User or group '{subject.name}' does not have access to create PersistentVolumes." + for role_name in role_names: + cr = cluster_roles_by_name.get(role_name) + if cr and is_rule_allowing_permissions(cr.rules, resources, verbs): + report.status = "FAIL" + report.status_extended = f"User or group '{subject.name}' has access to create PersistentVolumes." + break + findings.append(report) return findings diff --git a/prowler/providers/kubernetes/services/rbac/rbac_minimize_service_account_token_creation/rbac_minimize_service_account_token_creation.py b/prowler/providers/kubernetes/services/rbac/rbac_minimize_service_account_token_creation/rbac_minimize_service_account_token_creation.py index 9b1318c92f..8e492309db 100644 --- a/prowler/providers/kubernetes/services/rbac/rbac_minimize_service_account_token_creation/rbac_minimize_service_account_token_creation.py +++ b/prowler/providers/kubernetes/services/rbac/rbac_minimize_service_account_token_creation/rbac_minimize_service_account_token_creation.py @@ -11,20 +11,32 @@ resources = ["serviceaccounts/token"] class rbac_minimize_service_account_token_creation(Check): def execute(self) -> Check_Report_Kubernetes: findings = [] + # Collect unique subjects and the ClusterRole names bound to them + subjects_bound_roles = {} for crb in rbac_client.cluster_role_bindings.values(): for subject in crb.subjects: + # CIS benchmarks scope these checks to human identities only if subject.kind in ["User", "Group"]: - report = Check_Report_Kubernetes( - metadata=self.metadata(), resource=subject - ) - report.status = "PASS" - report.status_extended = f"User or group '{subject.name}' does not have access to create service account tokens." - for cr in rbac_client.cluster_roles.values(): - if cr.metadata.name == crb.roleRef.name: - if is_rule_allowing_permissions(cr.rules, resources, verbs): - report.status = "FAIL" - report.status_extended = f"User or group '{subject.name}' has access to create service account tokens." - break - findings.append(report) + key = (subject.kind, subject.name, subject.namespace) + if key not in subjects_bound_roles: + subjects_bound_roles[key] = (subject, set()) + subjects_bound_roles[key][1].add(crb.roleRef.name) + + cluster_roles_by_name = { + cr.metadata.name: cr for cr in rbac_client.cluster_roles.values() + } + for _, (subject, role_names) in subjects_bound_roles.items(): + report = Check_Report_Kubernetes(metadata=self.metadata(), resource=subject) + report.resource_name = f"{subject.kind}:{subject.name}" + report.resource_id = f"{subject.kind}/{subject.name}" + report.status = "PASS" + report.status_extended = f"User or group '{subject.name}' does not have access to create service account tokens." + for role_name in role_names: + cr = cluster_roles_by_name.get(role_name) + if cr and is_rule_allowing_permissions(cr.rules, resources, verbs): + report.status = "FAIL" + report.status_extended = f"User or group '{subject.name}' has access to create service account tokens." + break + findings.append(report) return findings diff --git a/prowler/providers/kubernetes/services/rbac/rbac_minimize_webhook_config_access/rbac_minimize_webhook_config_access.py b/prowler/providers/kubernetes/services/rbac/rbac_minimize_webhook_config_access/rbac_minimize_webhook_config_access.py index 2da9893dab..ffdad52a19 100644 --- a/prowler/providers/kubernetes/services/rbac/rbac_minimize_webhook_config_access/rbac_minimize_webhook_config_access.py +++ b/prowler/providers/kubernetes/services/rbac/rbac_minimize_webhook_config_access/rbac_minimize_webhook_config_access.py @@ -14,24 +14,32 @@ verbs = ["create", "update", "delete"] class rbac_minimize_webhook_config_access(Check): def execute(self) -> Check_Report_Kubernetes: findings = [] + # Collect unique subjects and the ClusterRole names bound to them + subjects_bound_roles = {} for crb in rbac_client.cluster_role_bindings.values(): for subject in crb.subjects: + # CIS benchmarks scope these checks to human identities only if subject.kind in ["User", "Group"]: - report = Check_Report_Kubernetes( - metadata=self.metadata(), resource=subject - ) - report.status = "PASS" - report.status_extended = f"User or group '{subject.name}' does not have access to create, update, or delete webhook configurations." - for cr in rbac_client.cluster_roles.values(): - if cr.metadata.name == crb.roleRef.name: - if is_rule_allowing_permissions( - cr.rules, - resources, - verbs, - ): - report.status = "FAIL" - report.status_extended = f"User or group '{subject.name}' has access to create, update, or delete webhook configurations." - break - findings.append(report) + key = (subject.kind, subject.name, subject.namespace) + if key not in subjects_bound_roles: + subjects_bound_roles[key] = (subject, set()) + subjects_bound_roles[key][1].add(crb.roleRef.name) + + cluster_roles_by_name = { + cr.metadata.name: cr for cr in rbac_client.cluster_roles.values() + } + for _, (subject, role_names) in subjects_bound_roles.items(): + report = Check_Report_Kubernetes(metadata=self.metadata(), resource=subject) + report.resource_name = f"{subject.kind}:{subject.name}" + report.resource_id = f"{subject.kind}/{subject.name}" + report.status = "PASS" + report.status_extended = f"User or group '{subject.name}' does not have access to create, update, or delete webhook configurations." + for role_name in role_names: + cr = cluster_roles_by_name.get(role_name) + if cr and is_rule_allowing_permissions(cr.rules, resources, verbs): + report.status = "FAIL" + report.status_extended = f"User or group '{subject.name}' has access to create, update, or delete webhook configurations." + break + findings.append(report) return findings From 21d7d08b4b8b43eeedc5b4a75c4ccee6b0a112f2 Mon Sep 17 00:00:00 2001 From: Pepe Fagoaga Date: Mon, 4 May 2026 19:39:17 +0200 Subject: [PATCH 18/29] fix(timeline): Return a compact actor name from CloudTrail events (#10986) --- prowler/CHANGELOG.md | 1 + .../cloudtrail_timeline.py | 31 +++------- .../cloudtrail_timeline_test.py | 57 ++++++++++++++----- 3 files changed, 52 insertions(+), 37 deletions(-) diff --git a/prowler/CHANGELOG.md b/prowler/CHANGELOG.md index 5bb7da487f..8f2b480a36 100644 --- a/prowler/CHANGELOG.md +++ b/prowler/CHANGELOG.md @@ -37,6 +37,7 @@ All notable changes to the **Prowler SDK** are documented in this file. ### 🐞 Fixed - Duplicate Kubernetes RBAC findings when the same User or Group subject appeared in multiple ClusterRoleBindings [(#10242)](https://github.com/prowler-cloud/prowler/pull/10242) +- Return a compact actor name from CloudTrail `userIdentity` events [(#10986)](https://github.com/prowler-cloud/prowler/pull/10986) --- diff --git a/prowler/providers/aws/lib/cloudtrail_timeline/cloudtrail_timeline.py b/prowler/providers/aws/lib/cloudtrail_timeline/cloudtrail_timeline.py index b73d070078..2f03dd8c84 100644 --- a/prowler/providers/aws/lib/cloudtrail_timeline/cloudtrail_timeline.py +++ b/prowler/providers/aws/lib/cloudtrail_timeline/cloudtrail_timeline.py @@ -221,27 +221,12 @@ class CloudTrailTimeline(TimelineService): @staticmethod def _extract_actor(user_identity: Dict[str, Any]) -> str: - """Extract a human-readable actor name from CloudTrail userIdentity.""" - # Try ARN first - most reliable + """Return a compact actor name from CloudTrail userIdentity. + + For ARNs, returns the resource portion (everything after the last + `:`) — e.g. `user/alice`, `assumed-role/MyRole/session-name`, + `root`. The full ARN is preserved separately in `actor_uid`. + """ if arn := user_identity.get("arn"): - if "/" in arn: - parts = arn.split("/") - # For assumed-role, return the role name (second-to-last part) - if "assumed-role" in arn and len(parts) >= 2: - return parts[-2] - return parts[-1] - return arn.split(":")[-1] - - # Fall back to userName - if username := user_identity.get("userName"): - return username - - # Fall back to principalId - if principal_id := user_identity.get("principalId"): - return principal_id - - # For service-invoked actions - if invoking_service := user_identity.get("invokedBy"): - return invoking_service - - return "Unknown" + return arn.rsplit(":", 1)[-1] + return user_identity.get("invokedBy") or "Unknown" diff --git a/tests/providers/aws/lib/cloudtrail_timeline/cloudtrail_timeline_test.py b/tests/providers/aws/lib/cloudtrail_timeline/cloudtrail_timeline_test.py index aeca0f7c1d..5c2c99cbfc 100644 --- a/tests/providers/aws/lib/cloudtrail_timeline/cloudtrail_timeline_test.py +++ b/tests/providers/aws/lib/cloudtrail_timeline/cloudtrail_timeline_test.py @@ -100,7 +100,7 @@ class TestCloudTrailTimeline: assert len(result) == 1 assert result[0]["event_name"] == "RunInstances" - assert result[0]["actor"] == "admin" + assert result[0]["actor"] == "user/admin" assert result[0]["source_ip_address"] == "203.0.113.1" def test_get_resource_timeline_with_resource_uid( @@ -304,14 +304,28 @@ class TestExtractActor: "arn": "arn:aws:iam::123456789012:user/alice", "userName": "alice", } - assert CloudTrailTimeline._extract_actor(user_identity) == "alice" + assert CloudTrailTimeline._extract_actor(user_identity) == "user/alice" def test_extract_actor_assumed_role(self): user_identity = { "type": "AssumedRole", "arn": "arn:aws:sts::123456789012:assumed-role/MyRole/session-name", } - assert CloudTrailTimeline._extract_actor(user_identity) == "MyRole" + assert ( + CloudTrailTimeline._extract_actor(user_identity) + == "assumed-role/MyRole/session-name" + ) + + def test_extract_actor_assumed_role_sso(self): + """SSO sessions store the user identity in the session name.""" + user_identity = { + "type": "AssumedRole", + "arn": "arn:aws:sts::123456789012:assumed-role/AWSReservedSSO_AdministratorAccess_abcdef1234567890/user@example.com", + } + assert ( + CloudTrailTimeline._extract_actor(user_identity) + == "assumed-role/AWSReservedSSO_AdministratorAccess_abcdef1234567890/user@example.com" + ) def test_extract_actor_root(self): user_identity = {"type": "Root", "arn": "arn:aws:iam::123456789012:root"} @@ -327,21 +341,33 @@ class TestExtractActor: == "elasticloadbalancing.amazonaws.com" ) - def test_extract_actor_fallback_to_principal_id(self): - user_identity = {"type": "Unknown", "principalId": "AROAEXAMPLEID:session"} - assert ( - CloudTrailTimeline._extract_actor(user_identity) == "AROAEXAMPLEID:session" - ) - def test_extract_actor_unknown(self): assert CloudTrailTimeline._extract_actor({}) == "Unknown" + def test_extract_actor_username_only_returns_unknown(self): + """When userIdentity carries only userName/principalId (no arn or + invokedBy), we deliberately return "Unknown" — we rely on the ARN + from the upstream service for the actor.""" + assert ( + CloudTrailTimeline._extract_actor({"type": "IAMUser", "userName": "alice"}) + == "Unknown" + ) + assert ( + CloudTrailTimeline._extract_actor( + {"type": "Unknown", "principalId": "AROAEXAMPLEID:session"} + ) + == "Unknown" + ) + def test_extract_actor_federated_user(self): user_identity = { "type": "FederatedUser", "arn": "arn:aws:sts::123456789012:federated-user/developer", } - assert CloudTrailTimeline._extract_actor(user_identity) == "developer" + assert ( + CloudTrailTimeline._extract_actor(user_identity) + == "federated-user/developer" + ) class TestParseEvent: @@ -380,7 +406,7 @@ class TestParseEvent: assert result is not None assert result["event_name"] == "RunInstances" assert result["event_source"] == "ec2.amazonaws.com" - assert result["actor"] == "admin" + assert result["actor"] == "user/admin" assert result["actor_uid"] == "arn:aws:iam::123456789012:user/admin" assert result["actor_type"] == "IAMUser" @@ -424,7 +450,10 @@ class TestParseEvent: "EventName": "RunInstances", "EventSource": "ec2.amazonaws.com", "CloudTrailEvent": { - "userIdentity": {"type": "IAMUser", "userName": "admin"}, + "userIdentity": { + "type": "IAMUser", + "arn": "arn:aws:iam::123456789012:user/admin", + }, }, } timeline = CloudTrailTimeline(session=mock_session) @@ -432,7 +461,7 @@ class TestParseEvent: assert result is not None assert result["event_name"] == "RunInstances" - assert result["actor"] == "admin" + assert result["actor"] == "user/admin" def test_parse_event_missing_event_id(self, mock_session): """Test parsing event without EventId returns None (event_id is required).""" @@ -506,7 +535,7 @@ class TestParseEvent: assert result is not None assert result["event_name"] == "RunInstances" - assert result["actor"] == "admin" + assert result["actor"] == "user/admin" # actor_type should be None when not present in userIdentity assert result["actor_type"] is None From 7c6d658154982e376e30aec216a8af11b7b36cb2 Mon Sep 17 00:00:00 2001 From: Pepe Fagoaga Date: Mon, 4 May 2026 19:54:03 +0200 Subject: [PATCH 19/29] fix(k8s): match RBAC rules by apiGroup, not just core (#10969) Co-authored-by: Andoni A. <14891798+andoniaf@users.noreply.github.com> --- prowler/CHANGELOG.md | 1 + .../services/rbac/lib/role_permissions.py | 57 ++++---- .../rbac_minimize_csr_approval_access.py | 5 +- .../rbac_minimize_webhook_config_access.py | 5 +- .../rbac/lib/role_permissions_test.py | 124 +++++++++--------- 5 files changed, 101 insertions(+), 91 deletions(-) diff --git a/prowler/CHANGELOG.md b/prowler/CHANGELOG.md index 8f2b480a36..6e4f231ec2 100644 --- a/prowler/CHANGELOG.md +++ b/prowler/CHANGELOG.md @@ -37,6 +37,7 @@ All notable changes to the **Prowler SDK** are documented in this file. ### 🐞 Fixed - Duplicate Kubernetes RBAC findings when the same User or Group subject appeared in multiple ClusterRoleBindings [(#10242)](https://github.com/prowler-cloud/prowler/pull/10242) +- Match K8s RBAC rules by `apiGroup` [(#10969)](https://github.com/prowler-cloud/prowler/pull/10969) - Return a compact actor name from CloudTrail `userIdentity` events [(#10986)](https://github.com/prowler-cloud/prowler/pull/10986) --- diff --git a/prowler/providers/kubernetes/services/rbac/lib/role_permissions.py b/prowler/providers/kubernetes/services/rbac/lib/role_permissions.py index c3db38b2f7..04bf9b4c9c 100644 --- a/prowler/providers/kubernetes/services/rbac/lib/role_permissions.py +++ b/prowler/providers/kubernetes/services/rbac/lib/role_permissions.py @@ -1,36 +1,37 @@ -def is_rule_allowing_permissions(rules, resources, verbs): +def is_rule_allowing_permissions(rules, resources, verbs, api_groups=("",)): """ - Check Kubernetes role permissions. + Check whether any RBAC rule grants the specified verbs on the specified + resources within the specified API groups. - This function takes in Kubernetes role rules, resources, and verbs, - and checks if any of the rules grant permissions on the specified - resources with the specified verbs. + A rule matches when its `apiGroups` includes any of `api_groups` (or "*"), + its `resources` includes any of `resources` (or "*"), and its `verbs` + includes any of `verbs` (or "*"). Args: - rules (List[Rule]): The list of Kubernetes role rules. - resources (List[str]): The list of resources to check permissions for. - verbs (List[str]): The list of verbs to check permissions for. + rules (List[Rule]): RBAC rules from a Role or ClusterRole. + resources (List[str]): Resources (or sub-resources) to check. + verbs (List[str]): Verbs to check. + api_groups (Iterable[str]): API groups the resources live in. Defaults + to ("",), the core API group, which matches the most common case. + Pass an explicit value for resources outside the core group, e.g. + ("admissionregistration.k8s.io",) for webhook configurations. Returns: - bool: True if any of the rules grant permissions, False otherwise. + bool: True if any rule grants the permission, False otherwise. """ - if rules: - # Iterate through each rule in the list of rules - for rule in rules: - # Ensure apiGroups are relevant ("" or "v1" for secrets) - if rule.apiGroups and all(api not in ["", "v1"] for api in rule.apiGroups): - continue # Skip rules with unrelated apiGroups - # Check if the rule has resources, verbs, and matches any of the specified resources and verbs - if ( - rule.resources - and ( - any(resource in rule.resources for resource in resources) - or "*" in rule.resources - ) - and rule.verbs - and (any(verb in rule.verbs for verb in verbs) or "*" in rule.verbs) - ): - # If the rule matches, return True - return True - # If no rule matches, return False + if not rules: + return False + for rule in rules: + rule_api_groups = rule.apiGroups or [""] + if not ( + any(g in rule_api_groups for g in api_groups) or "*" in rule_api_groups + ): + continue + if ( + rule.resources + and (any(r in rule.resources for r in resources) or "*" in rule.resources) + and rule.verbs + and (any(v in rule.verbs for v in verbs) or "*" in rule.verbs) + ): + return True return False diff --git a/prowler/providers/kubernetes/services/rbac/rbac_minimize_csr_approval_access/rbac_minimize_csr_approval_access.py b/prowler/providers/kubernetes/services/rbac/rbac_minimize_csr_approval_access/rbac_minimize_csr_approval_access.py index 17e56add55..2b86f0cafe 100644 --- a/prowler/providers/kubernetes/services/rbac/rbac_minimize_csr_approval_access/rbac_minimize_csr_approval_access.py +++ b/prowler/providers/kubernetes/services/rbac/rbac_minimize_csr_approval_access/rbac_minimize_csr_approval_access.py @@ -6,6 +6,7 @@ from prowler.providers.kubernetes.services.rbac.rbac_client import rbac_client verbs = ["update", "patch"] resources = ["certificatesigningrequests/approval"] +api_groups = ["certificates.k8s.io"] class rbac_minimize_csr_approval_access(Check): @@ -33,7 +34,9 @@ class rbac_minimize_csr_approval_access(Check): report.status_extended = f"User or group '{subject.name}' does not have access to update the CSR approval sub-resource." for role_name in role_names: cr = cluster_roles_by_name.get(role_name) - if cr and is_rule_allowing_permissions(cr.rules, resources, verbs): + if cr and is_rule_allowing_permissions( + cr.rules, resources, verbs, api_groups + ): report.status = "FAIL" report.status_extended = f"User or group '{subject.name}' has access to update the CSR approval sub-resource." break diff --git a/prowler/providers/kubernetes/services/rbac/rbac_minimize_webhook_config_access/rbac_minimize_webhook_config_access.py b/prowler/providers/kubernetes/services/rbac/rbac_minimize_webhook_config_access/rbac_minimize_webhook_config_access.py index ffdad52a19..e646efeeef 100644 --- a/prowler/providers/kubernetes/services/rbac/rbac_minimize_webhook_config_access/rbac_minimize_webhook_config_access.py +++ b/prowler/providers/kubernetes/services/rbac/rbac_minimize_webhook_config_access/rbac_minimize_webhook_config_access.py @@ -9,6 +9,7 @@ resources = [ "mutatingwebhookconfigurations", ] verbs = ["create", "update", "delete"] +api_groups = ["admissionregistration.k8s.io"] class rbac_minimize_webhook_config_access(Check): @@ -36,7 +37,9 @@ class rbac_minimize_webhook_config_access(Check): report.status_extended = f"User or group '{subject.name}' does not have access to create, update, or delete webhook configurations." for role_name in role_names: cr = cluster_roles_by_name.get(role_name) - if cr and is_rule_allowing_permissions(cr.rules, resources, verbs): + if cr and is_rule_allowing_permissions( + cr.rules, resources, verbs, api_groups + ): report.status = "FAIL" report.status_extended = f"User or group '{subject.name}' has access to create, update, or delete webhook configurations." break diff --git a/tests/providers/kubernetes/services/rbac/lib/role_permissions_test.py b/tests/providers/kubernetes/services/rbac/lib/role_permissions_test.py index 696550c06c..94028547e9 100644 --- a/tests/providers/kubernetes/services/rbac/lib/role_permissions_test.py +++ b/tests/providers/kubernetes/services/rbac/lib/role_permissions_test.py @@ -6,90 +6,92 @@ from prowler.providers.kubernetes.services.rbac.rbac_service import Rule class TestCheckRolePermissions: def test_is_rule_allowing_permissions(self): - # Define some sample rules, resources, and verbs for testing rules = [ - # Rule 1: Allows 'get' and 'list' on 'pods' and 'services' Rule(resources=["pods", "services"], verbs=["get", "list"]), - # Rule 2: Allows 'create' and 'delete' on 'deployments' Rule(resources=["deployments"], verbs=["create", "delete"]), ] - resources = ["pods", "deployments"] - verbs = ["get", "create"] - - assert is_rule_allowing_permissions(rules, resources, verbs) + assert is_rule_allowing_permissions( + rules, ["pods", "deployments"], ["get", "create"] + ) def test_no_permissions(self): - # Test when there are no rules - rules = [] - resources = ["pods", "deployments"] - verbs = ["get", "create"] - - assert not is_rule_allowing_permissions(rules, resources, verbs) + assert not is_rule_allowing_permissions([], ["pods"], ["get"]) def test_no_matching_rules(self): - # Test when there are rules, but none match the specified resources and verbs rules = [ Rule(resources=["services"], verbs=["get", "list"]), Rule(resources=["pods"], verbs=["create", "delete"]), ] - resources = ["deployments", "configmaps"] - verbs = ["get", "create"] - - assert not is_rule_allowing_permissions(rules, resources, verbs) + assert not is_rule_allowing_permissions( + rules, ["deployments", "configmaps"], ["get", "create"] + ) def test_empty_rules(self): - # Test when the rules list is empty - rules = [] - resources = ["pods", "deployments"] - verbs = ["get", "create"] - - assert not is_rule_allowing_permissions(rules, resources, verbs) + assert not is_rule_allowing_permissions([], ["pods"], ["get"]) def test_empty_resources_and_verbs(self): - # Test when resources and verbs are empty lists - rules = [ - Rule(resources=["pods"], verbs=["get"]), - Rule(resources=["services"], verbs=["list"]), - ] - resources = [] - verbs = [] - - assert not is_rule_allowing_permissions(rules, resources, verbs) + rules = [Rule(resources=["pods"], verbs=["get"])] + assert not is_rule_allowing_permissions(rules, [], []) def test_matching_rule_with_empty_resources_or_verbs(self): - # Test when a rule matches, but either resources or verbs are empty + rules = [Rule(resources=["pods"], verbs=["get"])] + assert not is_rule_allowing_permissions(rules, [], ["get"]) + assert not is_rule_allowing_permissions(rules, ["pods"], []) + + def test_rule_with_non_matching_api_group(self): + rules = [Rule(resources=["pods"], verbs=["get"], apiGroups=["apps"])] + assert not is_rule_allowing_permissions(rules, ["pods"], ["get"]) + + def test_rule_with_matching_api_group(self): + rules = [Rule(resources=["pods"], verbs=["get"], apiGroups=[""])] + assert is_rule_allowing_permissions(rules, ["pods"], ["get"]) + + def test_default_api_group_is_core(self): + rules = [Rule(resources=["pods"], verbs=["get"], apiGroups=None)] + assert is_rule_allowing_permissions(rules, ["pods"], ["get"]) + + def test_rule_with_empty_api_groups_does_not_match_non_core_request(self): + rules = [Rule(resources=["pods"], verbs=["get"], apiGroups=None)] + assert not is_rule_allowing_permissions( + rules, ["pods"], ["get"], ["admissionregistration.k8s.io"] + ) + + def test_non_core_rule_does_not_match_without_api_groups_argument(self): rules = [ - Rule(resources=["pods"], verbs=["get"]), - Rule(resources=["services"], verbs=["list"]), + Rule( + resources=["validatingwebhookconfigurations"], + verbs=["create"], + apiGroups=["admissionregistration.k8s.io"], + ) ] - resources = [] - verbs = ["get"] + assert not is_rule_allowing_permissions( + rules, ["validatingwebhookconfigurations"], ["create"] + ) - assert not is_rule_allowing_permissions(rules, resources, verbs) - - resources = ["pods"] - verbs = [] - - assert not is_rule_allowing_permissions(rules, resources, verbs) - - def test_rule_with_ignored_api_groups(self): - # Test when a rule has apiGroups that are not relevant + def test_explicit_non_core_api_group(self): rules = [ - Rule(resources=["pods"], verbs=["get"], apiGroups=["test"]), - Rule(resources=["services"], verbs=["list"], apiGroups=["test2"]), + Rule( + resources=["validatingwebhookconfigurations"], + verbs=["create"], + apiGroups=["admissionregistration.k8s.io"], + ) ] - resources = ["pods"] - verbs = ["get"] + assert is_rule_allowing_permissions( + rules, + ["validatingwebhookconfigurations"], + ["create"], + ["admissionregistration.k8s.io"], + ) - assert not is_rule_allowing_permissions(rules, resources, verbs) + def test_rule_with_wildcard_api_group(self): + rules = [Rule(resources=["pods"], verbs=["get"], apiGroups=["*"])] + assert is_rule_allowing_permissions(rules, ["pods"], ["get"]) + assert is_rule_allowing_permissions(rules, ["pods"], ["get"], ["apps"]) - def test_rule_with_relevant_api_groups(self): - # Test when a rule has apiGroups that are relevant - rules = [ - Rule(resources=["pods"], verbs=["get"], apiGroups=["", "v1"]), - Rule(resources=["services"], verbs=["list"], apiGroups=["test2"]), - ] - resources = ["pods"] - verbs = ["get"] + def test_rule_with_wildcard_resources(self): + rules = [Rule(resources=["*"], verbs=["get"], apiGroups=[""])] + assert is_rule_allowing_permissions(rules, ["pods"], ["get"]) - assert is_rule_allowing_permissions(rules, resources, verbs) + def test_rule_with_wildcard_verbs(self): + rules = [Rule(resources=["pods"], verbs=["*"], apiGroups=[""])] + assert is_rule_allowing_permissions(rules, ["pods"], ["get"]) From 703a33108cc69de6c06ea5277a4a88e6d73b8261 Mon Sep 17 00:00:00 2001 From: Pepe Fagoaga Date: Tue, 5 May 2026 08:47:28 +0200 Subject: [PATCH 20/29] chore(changelog): prepare for v5.25.2 (#10991) --- api/CHANGELOG.md | 2 +- prowler/CHANGELOG.md | 4 ++-- ui/CHANGELOG.md | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/api/CHANGELOG.md b/api/CHANGELOG.md index 8c57285025..c03906d7e8 100644 --- a/api/CHANGELOG.md +++ b/api/CHANGELOG.md @@ -6,7 +6,7 @@ All notable changes to the **Prowler API** are documented in this file. ### 🚀 Added -- New `scan-reset-ephemeral-resources` post-scan task zeroes `failed_findings_count` for resources missing from the latest full-scope scan, keeping ephemeral resources from polluting the Resources page sort [(#10929)](https://github.com/prowler-cloud/prowler/pull/10929) +- `scan-reset-ephemeral-resources` post-scan task zeroes `failed_findings_count` for resources missing from the latest full-scope scan, keeping ephemeral resources from polluting the Resources page sort [(#10929)](https://github.com/prowler-cloud/prowler/pull/10929) --- diff --git a/prowler/CHANGELOG.md b/prowler/CHANGELOG.md index 6e4f231ec2..3945d1621a 100644 --- a/prowler/CHANGELOG.md +++ b/prowler/CHANGELOG.md @@ -14,9 +14,8 @@ All notable changes to the **Prowler SDK** are documented in this file. ### 🔄 Changed -- `route53_dangling_ip_subdomain_takeover` now also flags `CNAME` records pointing to S3 website endpoints whose buckets are missing from the account [(#10920)](https://github.com/prowler-cloud/prowler/pull/10920) - Azure Network Watcher flow log checks now require workspace-backed Traffic Analytics for `network_flow_log_captured_sent` and align metadata with VNet-compatible flow log guidance [(#10645)](https://github.com/prowler-cloud/prowler/pull/10645) -- Azure compliance entries for legacy Network Watcher flow log controls now use retirement-aware guidance and point new deployments to VNet flow logs +- Azure compliance entries for legacy Network Watcher flow log controls now use retirement-aware guidance and point new deployments to VNet flow logs [(#10937)](https://github.com/prowler-cloud/prowler/pull/10937) - AWS CodeBuild service now batches `BatchGetProjects` and `BatchGetBuilds` calls per region (up to 100 items per call) to reduce API call volume and prevent throttling-induced false positives in `codebuild_project_not_publicly_accessible` [(#10639)](https://github.com/prowler-cloud/prowler/pull/10639) - `display_compliance_table` dispatch switched from substring `in` checks to `startswith` to prevent false matches between similarly named frameworks (e.g. `cisa` vs `cis`) [(#10301)](https://github.com/prowler-cloud/prowler/pull/10301) @@ -36,6 +35,7 @@ All notable changes to the **Prowler SDK** are documented in this file. ### 🐞 Fixed +- `route53_dangling_ip_subdomain_takeover` now also flags `CNAME` records pointing to S3 website endpoints whose buckets are missing from the account [(#10920)](https://github.com/prowler-cloud/prowler/pull/10920) - Duplicate Kubernetes RBAC findings when the same User or Group subject appeared in multiple ClusterRoleBindings [(#10242)](https://github.com/prowler-cloud/prowler/pull/10242) - Match K8s RBAC rules by `apiGroup` [(#10969)](https://github.com/prowler-cloud/prowler/pull/10969) - Return a compact actor name from CloudTrail `userIdentity` events [(#10986)](https://github.com/prowler-cloud/prowler/pull/10986) diff --git a/ui/CHANGELOG.md b/ui/CHANGELOG.md index 4b63c9cd84..8b0f8d17e6 100644 --- a/ui/CHANGELOG.md +++ b/ui/CHANGELOG.md @@ -2,7 +2,7 @@ All notable changes to the **Prowler UI** are documented in this file. -## [1.25.2] (Prowler UNRELEASED) +## [1.25.2] (Prowler v5.25.2) ### 🔄 Changed From 786059bfb24abe690bbcba2cdc9f4ef8431acf5e Mon Sep 17 00:00:00 2001 From: Prowler Bot Date: Tue, 5 May 2026 10:45:07 +0200 Subject: [PATCH 21/29] chore(docs): Bump version to v5.25.2 (#10993) Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com> --- docs/getting-started/installation/prowler-app.mdx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/getting-started/installation/prowler-app.mdx b/docs/getting-started/installation/prowler-app.mdx index 0b9d15852b..cf88d1aded 100644 --- a/docs/getting-started/installation/prowler-app.mdx +++ b/docs/getting-started/installation/prowler-app.mdx @@ -121,8 +121,8 @@ To update the environment file: Edit the `.env` file and change version values: ```env -PROWLER_UI_VERSION="5.25.1" -PROWLER_API_VERSION="5.25.1" +PROWLER_UI_VERSION="5.25.2" +PROWLER_API_VERSION="5.25.2" ``` From d23c2f3b5317ca8aa1bb3031093b7ba85941f172 Mon Sep 17 00:00:00 2001 From: "Pablo Fernandez Guerra (PFE)" <148432447+pfe-nazaries@users.noreply.github.com> Date: Tue, 5 May 2026 14:39:54 +0200 Subject: [PATCH 22/29] refactor(ui): standardize "Providers" wording across UI and docs (#10971) Co-authored-by: Pablo F.G Co-authored-by: Claude Opus 4.7 (1M context) --- docs/developer-guide/provider.mdx | 2 +- .../basic-usage/prowler-app.mdx | 6 +-- .../getting-started-alibabacloud.mdx | 8 +-- .../providers/aws/getting-started-aws.mdx | 8 +-- .../providers/azure/getting-started-azure.mdx | 8 +-- .../cloudflare/getting-started-cloudflare.mdx | 8 +-- .../providers/gcp/getting-started-gcp.mdx | 8 +-- .../providers/github/authentication.mdx | 2 +- .../github/getting-started-github.mdx | 8 +-- .../getting-started-googleworkspace.mdx | 8 +-- .../providers/iac/getting-started-iac.mdx | 8 +-- .../providers/image/getting-started-image.mdx | 8 +-- .../kubernetes/getting-started-k8s.mdx | 8 +-- .../microsoft365/getting-started-m365.mdx | 8 +-- .../getting-started-mongodbatlas.mdx | 2 +- .../providers/oci/getting-started-oci.mdx | 4 +- .../openstack/getting-started-openstack.mdx | 2 +- .../vercel/getting-started-vercel.mdx | 8 +-- .../user-guide/tutorials/prowler-app-rbac.mdx | 4 +- .../tutorials/prowler-app-s3-integration.mdx | 2 +- docs/user-guide/tutorials/prowler-app.mdx | 4 +- .../prowler-cloud-aws-organizations.mdx | 4 +- ui/CHANGELOG.md | 8 +++ ui/actions/manage-groups/manage-groups.ts | 6 +-- .../_components/accounts-selector.tsx | 6 +-- .../_components/provider-type-selector.tsx | 3 +- .../_components/scan-list-table.tsx | 2 +- ui/app/(prowler)/manage-groups/page.tsx | 4 +- ui/app/(prowler)/providers/page.test.ts | 2 +- ui/app/(prowler)/providers/page.tsx | 22 ++++---- ...ontent.tsx => provider-groups-content.tsx} | 10 ++-- .../providers/provider-page-tabs.shared.ts | 8 +-- .../providers/provider-page-tabs.test.tsx | 51 +++++++++---------- .../providers/provider-page-tabs.tsx | 24 ++++----- .../compliance/compliance-card.test.tsx | 4 +- ui/components/filters/data-filters.ts | 4 +- .../table/column-standalone-findings.tsx | 2 +- .../manage-groups/forms/delete-group-form.tsx | 2 +- .../manage-groups/forms/edit-group-form.tsx | 4 +- .../manage-groups/manage-groups-button.tsx | 2 +- .../table/data-table-row-actions.tsx | 6 +-- .../table/skeleton-table-new-findings.tsx | 2 +- .../providers/table/column-providers.tsx | 6 +-- .../table/skeleton-table-provider.tsx | 8 +-- .../wizard/provider-wizard-modal.tsx | 2 +- .../provider-wizard-modal.utils.test.ts | 2 +- .../wizard/provider-wizard-modal.utils.ts | 4 +- .../providers/wizard/wizard-stepper.tsx | 2 +- .../resources/table/column-resources.tsx | 2 +- .../launch-workflow/select-scan-provider.tsx | 6 +-- ui/components/scans/no-providers-added.tsx | 8 +-- .../scans/no-providers-connected.tsx | 12 ++--- .../scans/table/scans/column-get-scans.tsx | 2 +- ui/lib/helper.ts | 5 +- ui/lib/menu-list.ts | 2 +- ui/tests/providers/providers-page.ts | 10 ++-- ui/tests/scans/scans-page.ts | 4 +- .../manage-cloud-providers.auth.setup.ts | 2 +- 58 files changed, 191 insertions(+), 186 deletions(-) rename ui/app/(prowler)/providers/{account-groups-content.tsx => provider-groups-content.tsx} (93%) diff --git a/docs/developer-guide/provider.mdx b/docs/developer-guide/provider.mdx index ec3e106150..f7a1057d11 100644 --- a/docs/developer-guide/provider.mdx +++ b/docs/developer-guide/provider.mdx @@ -1003,7 +1003,7 @@ class ProwlerArgumentParser: formatter_class=RawTextHelpFormatter, usage="prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,nhn,dashboard,iac,your_provider} ...", epilog=""" -Available Cloud Providers: +Available Providers: {aws,azure,gcp,kubernetes,m365,github,iac,nhn,your_provider} aws AWS Provider azure Azure Provider diff --git a/docs/getting-started/basic-usage/prowler-app.mdx b/docs/getting-started/basic-usage/prowler-app.mdx index 26d7d8ee2e..bc39353dcc 100644 --- a/docs/getting-started/basic-usage/prowler-app.mdx +++ b/docs/getting-started/basic-usage/prowler-app.mdx @@ -32,11 +32,11 @@ Access Prowler App by logging in with **email and password**. Log In -## Add Cloud Provider +## Add Provider -Configure a cloud provider for scanning: +Configure a provider for scanning: -1. Navigate to `Settings > Cloud Providers` and click `Add Account`. +1. Navigate to `Settings > Providers` and click `Add Provider`. 2. Select the cloud provider. 3. Enter the provider's identifier (Optional: Add an alias): - **AWS**: Account ID diff --git a/docs/user-guide/providers/alibabacloud/getting-started-alibabacloud.mdx b/docs/user-guide/providers/alibabacloud/getting-started-alibabacloud.mdx index a3de5fc8fa..36520a8f8c 100644 --- a/docs/user-guide/providers/alibabacloud/getting-started-alibabacloud.mdx +++ b/docs/user-guide/providers/alibabacloud/getting-started-alibabacloud.mdx @@ -40,13 +40,13 @@ Before you begin, make sure you have: ### Step 2: Access Prowler Cloud 1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app) -2. Go to "Configuration" > "Cloud Providers" +2. Go to "Configuration" > "Providers" - ![Cloud Providers Page](/images/prowler-app/cloud-providers-page.png) + ![Providers Page](/images/prowler-app/cloud-providers-page.png) -3. Click "Add Cloud Provider" +3. Click "Add Provider" - ![Add a Cloud Provider](/images/prowler-app/add-cloud-provider.png) + ![Add a Provider](/images/prowler-app/add-cloud-provider.png) 4. Select "Alibaba Cloud" diff --git a/docs/user-guide/providers/aws/getting-started-aws.mdx b/docs/user-guide/providers/aws/getting-started-aws.mdx index 8127e873e7..f0c5ab882a 100644 --- a/docs/user-guide/providers/aws/getting-started-aws.mdx +++ b/docs/user-guide/providers/aws/getting-started-aws.mdx @@ -19,13 +19,13 @@ title: 'Getting Started With AWS on Prowler' ### Step 2: Access Prowler Cloud 1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app) -2. Go to "Configuration" > "Cloud Providers" +2. Go to "Configuration" > "Providers" - ![Cloud Providers Page](/images/prowler-app/cloud-providers-page.png) + ![Providers Page](/images/prowler-app/cloud-providers-page.png) -3. Click "Add Cloud Provider" +3. Click "Add Provider" - ![Add a Cloud Provider](/images/prowler-app/add-cloud-provider.png) + ![Add a Provider](/images/prowler-app/add-cloud-provider.png) 4. Select "Amazon Web Services" diff --git a/docs/user-guide/providers/azure/getting-started-azure.mdx b/docs/user-guide/providers/azure/getting-started-azure.mdx index 456c226aab..66b3b14e3a 100644 --- a/docs/user-guide/providers/azure/getting-started-azure.mdx +++ b/docs/user-guide/providers/azure/getting-started-azure.mdx @@ -35,13 +35,13 @@ For detailed instructions on how to create the Service Principal and configure p ### Step 2: Access Prowler Cloud 1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app) -2. Navigate to `Configuration` > `Cloud Providers` +2. Navigate to `Configuration` > `Providers` - ![Cloud Providers Page](/images/prowler-app/cloud-providers-page.png) + ![Providers Page](/images/prowler-app/cloud-providers-page.png) -3. Click on `Add Cloud Provider` +3. Click on `Add Provider` - ![Add a Cloud Provider](/images/prowler-app/add-cloud-provider.png) + ![Add a Provider](/images/prowler-app/add-cloud-provider.png) 4. Select `Microsoft Azure` diff --git a/docs/user-guide/providers/cloudflare/getting-started-cloudflare.mdx b/docs/user-guide/providers/cloudflare/getting-started-cloudflare.mdx index 2bc18bcca8..00320bd34f 100644 --- a/docs/user-guide/providers/cloudflare/getting-started-cloudflare.mdx +++ b/docs/user-guide/providers/cloudflare/getting-started-cloudflare.mdx @@ -42,13 +42,13 @@ The Account ID is a 32-character hexadecimal string (e.g., `372e67954025e0ba6aaa ### Step 2: Open Prowler Cloud 1. Go to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app). -2. Navigate to "Configuration" > "Cloud Providers". +2. Navigate to "Configuration" > "Providers". - ![Cloud Providers Page](/images/prowler-app/cloud-providers-page.png) + ![Providers Page](/images/prowler-app/cloud-providers-page.png) -3. Click "Add Cloud Provider". +3. Click "Add Provider". - ![Add a Cloud Provider](/images/prowler-app/add-cloud-provider.png) + ![Add a Provider](/images/prowler-app/add-cloud-provider.png) 4. Select "Cloudflare". diff --git a/docs/user-guide/providers/gcp/getting-started-gcp.mdx b/docs/user-guide/providers/gcp/getting-started-gcp.mdx index c70250c8a9..e16114e19a 100644 --- a/docs/user-guide/providers/gcp/getting-started-gcp.mdx +++ b/docs/user-guide/providers/gcp/getting-started-gcp.mdx @@ -14,13 +14,13 @@ title: 'Getting Started With GCP on Prowler' ### Step 2: Access Prowler Cloud 1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app) -2. Go to "Configuration" > "Cloud Providers" +2. Go to "Configuration" > "Providers" - ![Cloud Providers Page](/images/prowler-app/cloud-providers-page.png) + ![Providers Page](/images/prowler-app/cloud-providers-page.png) -3. Click "Add Cloud Provider" +3. Click "Add Provider" - ![Add a Cloud Provider](/images/prowler-app/add-cloud-provider.png) + ![Add a Provider](/images/prowler-app/add-cloud-provider.png) 4. Select "Google Cloud Platform" diff --git a/docs/user-guide/providers/github/authentication.mdx b/docs/user-guide/providers/github/authentication.mdx index adbfd235a8..0b5ab6b720 100644 --- a/docs/user-guide/providers/github/authentication.mdx +++ b/docs/user-guide/providers/github/authentication.mdx @@ -275,7 +275,7 @@ For step-by-step setup instructions for Prowler Cloud, see the [Getting Started ### Using Personal Access Token -1. In Prowler Cloud, navigate to **Configuration** > **Cloud Providers** > **Add Cloud Provider** > **GitHub**. +1. In Prowler Cloud, navigate to **Configuration** > **Providers** > **Add Provider** > **GitHub**. 2. Enter your GitHub Account ID (username or organization name). diff --git a/docs/user-guide/providers/github/getting-started-github.mdx b/docs/user-guide/providers/github/getting-started-github.mdx index 3211d7058d..e946660b51 100644 --- a/docs/user-guide/providers/github/getting-started-github.mdx +++ b/docs/user-guide/providers/github/getting-started-github.mdx @@ -49,13 +49,13 @@ Before adding GitHub to Prowler Cloud/App, ensure you have: ### Step 1: Access Prowler Cloud/App 1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app) -2. Go to **Configuration** → **Cloud Providers** +2. Go to **Configuration** → **Providers** - ![Cloud Providers Page](/images/prowler-app/cloud-providers-page.png) + ![Providers Page](/images/prowler-app/cloud-providers-page.png) -3. Click **Add Cloud Provider** +3. Click **Add Provider** - ![Add a Cloud Provider](/images/prowler-app/add-cloud-provider.png) + ![Add a Provider](/images/prowler-app/add-cloud-provider.png) 4. Select **GitHub** diff --git a/docs/user-guide/providers/googleworkspace/getting-started-googleworkspace.mdx b/docs/user-guide/providers/googleworkspace/getting-started-googleworkspace.mdx index af09ab75c3..8931c43ebd 100644 --- a/docs/user-guide/providers/googleworkspace/getting-started-googleworkspace.mdx +++ b/docs/user-guide/providers/googleworkspace/getting-started-googleworkspace.mdx @@ -43,13 +43,13 @@ The Customer ID starts with the letter "C" followed by alphanumeric characters ( ### Step 2: Open Prowler Cloud 1. Go to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app). -2. Navigate to "Configuration" > "Cloud Providers". +2. Navigate to "Configuration" > "Providers". - ![Cloud Providers Page](/images/prowler-app/cloud-providers-page.png) + ![Providers Page](/images/prowler-app/cloud-providers-page.png) -3. Click "Add Cloud Provider". +3. Click "Add Provider". - ![Add a Cloud Provider](/images/prowler-app/add-cloud-provider.png) + ![Add a Provider](/images/prowler-app/add-cloud-provider.png) 4. Select "Google Workspace". diff --git a/docs/user-guide/providers/iac/getting-started-iac.mdx b/docs/user-guide/providers/iac/getting-started-iac.mdx index 2a1e588018..d1e978dc75 100644 --- a/docs/user-guide/providers/iac/getting-started-iac.mdx +++ b/docs/user-guide/providers/iac/getting-started-iac.mdx @@ -42,13 +42,13 @@ Scanner selection is not configurable in Prowler App. Default scanners, misconfi ### Step 1: Access Prowler Cloud/App 1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app) -2. Go to "Configuration" > "Cloud Providers" +2. Go to "Configuration" > "Providers" - ![Cloud Providers Page](/images/prowler-app/cloud-providers-page.png) + ![Providers Page](/images/prowler-app/cloud-providers-page.png) -3. Click "Add Cloud Provider" +3. Click "Add Provider" - ![Add a Cloud Provider](/images/prowler-app/add-cloud-provider.png) + ![Add a Provider](/images/prowler-app/add-cloud-provider.png) 4. Select "Infrastructure as Code" diff --git a/docs/user-guide/providers/image/getting-started-image.mdx b/docs/user-guide/providers/image/getting-started-image.mdx index 9a3d67258d..b9c305d0ef 100644 --- a/docs/user-guide/providers/image/getting-started-image.mdx +++ b/docs/user-guide/providers/image/getting-started-image.mdx @@ -34,13 +34,13 @@ Prowler Cloud does not support scanner selection. The vulnerability, secret, and ### Step 1: Access Prowler Cloud 1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app) -2. Navigate to "Configuration" > "Cloud Providers" +2. Navigate to "Configuration" > "Providers" - ![Cloud Providers Page](/images/prowler-app/cloud-providers-page.png) + ![Providers Page](/images/prowler-app/cloud-providers-page.png) -3. Click "Add Cloud Provider" +3. Click "Add Provider" - ![Add a Cloud Provider](/images/prowler-app/add-cloud-provider.png) + ![Add a Provider](/images/prowler-app/add-cloud-provider.png) 4. Select "Container Registry" diff --git a/docs/user-guide/providers/kubernetes/getting-started-k8s.mdx b/docs/user-guide/providers/kubernetes/getting-started-k8s.mdx index aff63b81a3..9ca791110f 100644 --- a/docs/user-guide/providers/kubernetes/getting-started-k8s.mdx +++ b/docs/user-guide/providers/kubernetes/getting-started-k8s.mdx @@ -7,13 +7,13 @@ title: 'Getting Started with Kubernetes' ### Step 1: Access Prowler Cloud/App 1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app) -2. Go to "Configuration" > "Cloud Providers" +2. Go to "Configuration" > "Providers" - ![Cloud Providers Page](/images/prowler-app/cloud-providers-page.png) + ![Providers Page](/images/prowler-app/cloud-providers-page.png) -3. Click "Add Cloud Provider" +3. Click "Add Provider" - ![Add a Cloud Provider](/images/prowler-app/add-cloud-provider.png) + ![Add a Provider](/images/prowler-app/add-cloud-provider.png) 4. Select "Kubernetes" diff --git a/docs/user-guide/providers/microsoft365/getting-started-m365.mdx b/docs/user-guide/providers/microsoft365/getting-started-m365.mdx index 1e6830c722..a21b796b5c 100644 --- a/docs/user-guide/providers/microsoft365/getting-started-m365.mdx +++ b/docs/user-guide/providers/microsoft365/getting-started-m365.mdx @@ -42,13 +42,13 @@ Set up authentication for Microsoft 365 with the [Microsoft 365 Authentication]( ### Step 2: Open Prowler Cloud 1. Go to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app). -2. Navigate to "Configuration" > "Cloud Providers". +2. Navigate to "Configuration" > "Providers". - ![Cloud Providers Page](/images/prowler-app/cloud-providers-page.png) + ![Providers Page](/images/prowler-app/cloud-providers-page.png) -3. Click "Add Cloud Provider". +3. Click "Add Provider". - ![Add a Cloud Provider](/images/prowler-app/add-cloud-provider.png) + ![Add a Provider](/images/prowler-app/add-cloud-provider.png) 4. Select "Microsoft 365". diff --git a/docs/user-guide/providers/mongodbatlas/getting-started-mongodbatlas.mdx b/docs/user-guide/providers/mongodbatlas/getting-started-mongodbatlas.mdx index c10c6aac30..c68bfac9c2 100644 --- a/docs/user-guide/providers/mongodbatlas/getting-started-mongodbatlas.mdx +++ b/docs/user-guide/providers/mongodbatlas/getting-started-mongodbatlas.mdx @@ -38,7 +38,7 @@ If **Require IP Access List for the Atlas Administration API** is enabled in you ### Step 1: Add the provider -1. Navigate to **Cloud Providers** and click **Add Cloud Provider**. +1. Navigate to **Providers** and click **Add Provider**. ![Add provider list](./img/add-provider-list.png) 2. Select **MongoDB Atlas** from the provider list. 3. Enter your **Organization ID** (24 hex characters). This value is visible in the Atlas UI under **Organization Settings**. diff --git a/docs/user-guide/providers/oci/getting-started-oci.mdx b/docs/user-guide/providers/oci/getting-started-oci.mdx index 459d9ad685..d3024bc1c1 100644 --- a/docs/user-guide/providers/oci/getting-started-oci.mdx +++ b/docs/user-guide/providers/oci/getting-started-oci.mdx @@ -16,8 +16,8 @@ The following steps apply to Prowler Cloud and the self-hosted Prowler App. ### Step 2: Access Prowler Cloud 1. Navigate to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app). -2. Go to **Configuration** → **Cloud Providers** and click **Add Cloud Provider**. -![Add OCI Cloud Provider](./images/oci-add-cloud-provider.png) +2. Go to **Configuration** → **Providers** and click **Add Provider**. +![Add OCI Provider](./images/oci-add-cloud-provider.png) 3. Select **Oracle Cloud** and enter the **Tenancy OCID** and an optional alias, then choose **Next**. ![Add OCI Cloud Tenancy](./images/oci-add-tenancy.png) diff --git a/docs/user-guide/providers/openstack/getting-started-openstack.mdx b/docs/user-guide/providers/openstack/getting-started-openstack.mdx index b80ebe0e9f..1ff80c3e2e 100644 --- a/docs/user-guide/providers/openstack/getting-started-openstack.mdx +++ b/docs/user-guide/providers/openstack/getting-started-openstack.mdx @@ -34,7 +34,7 @@ Before running Prowler with the OpenStack provider, ensure you have: ### Step 1: Add the Provider -1. Navigate to "Cloud Providers" and click "Add Cloud Provider". +1. Navigate to "Providers" and click "Add Provider". ![Providers List](./images/select-provider.png) 2. Select "OpenStack" from the provider list. 3. Enter the "Project ID" from the OpenStack provider. diff --git a/docs/user-guide/providers/vercel/getting-started-vercel.mdx b/docs/user-guide/providers/vercel/getting-started-vercel.mdx index 8a6fdecc22..c39c5f1e6a 100644 --- a/docs/user-guide/providers/vercel/getting-started-vercel.mdx +++ b/docs/user-guide/providers/vercel/getting-started-vercel.mdx @@ -29,13 +29,13 @@ Set up authentication for Vercel with the [Vercel Authentication](/user-guide/pr ### Step 1: Add the Provider 1. Go to [Prowler Cloud](https://cloud.prowler.com/) or launch [Prowler App](/user-guide/tutorials/prowler-app). -2. Navigate to "Configuration" > "Cloud Providers". +2. Navigate to "Configuration" > "Providers". - ![Cloud Providers Page](/images/prowler-app/cloud-providers-page.png) + ![Providers Page](/images/prowler-app/cloud-providers-page.png) -3. Click "Add Cloud Provider". +3. Click "Add Provider". - ![Add a Cloud Provider](/images/prowler-app/add-cloud-provider.png) + ![Add a Provider](/images/prowler-app/add-cloud-provider.png) 4. Select "Vercel". diff --git a/docs/user-guide/tutorials/prowler-app-rbac.mdx b/docs/user-guide/tutorials/prowler-app-rbac.mdx index 31fd4a730e..cccbbc27cc 100644 --- a/docs/user-guide/tutorials/prowler-app-rbac.mdx +++ b/docs/user-guide/tutorials/prowler-app-rbac.mdx @@ -123,7 +123,7 @@ The Roles section in Prowler is designed to facilitate the assignment of custom ### Provider Groups -Provider Groups control visibility across specific providers. When creating a new role, you can assign specific groups to define their Cloud Provider visibility. This ensures that users with that role have access only to the Cloud Providers that are required. +Provider Groups control visibility across specific providers. When creating a new role, you can assign specific groups to define their Provider visibility. This ensures that users with that role have access only to the Providers that are required. By default, a new user role does not have visibility into any group. @@ -223,7 +223,7 @@ Assign administrative permissions by selecting from the following options: | Invite and Manage Users | All | Invite new users and manage existing ones. | | Manage Account | All | Adjust account settings, delete users and read/manage users permissions. | | Manage Scans | All | Run and review scans. | -| Manage Cloud Providers | All | Add or modify connected cloud providers. | +| Manage Providers | All | Add or modify connected providers. | | Manage Integrations | All | Add or modify the Prowler Integrations. | | Manage Ingestions | Prowler Cloud | Allow or deny the ability to submit findings ingestion batches via the API. | | Manage Billing | Prowler Cloud | Access and manage billing settings and subscription information. | diff --git a/docs/user-guide/tutorials/prowler-app-s3-integration.mdx b/docs/user-guide/tutorials/prowler-app-s3-integration.mdx index 728e4a4354..284b10eaaf 100644 --- a/docs/user-guide/tutorials/prowler-app-s3-integration.mdx +++ b/docs/user-guide/tutorials/prowler-app-s3-integration.mdx @@ -320,7 +320,7 @@ Once the required permissions are set up, proceed to configure the S3 integratio ![Add integration button](/images/prowler-app/s3/s3-integration-ui-3.png) 4. Complete the configuration form with the following details: - - **Cloud Providers:** Select the providers whose scan results should be exported to this S3 bucket + - **Providers:** Select the providers whose scan results should be exported to this S3 bucket - **Bucket Name:** Enter the name of the target S3 bucket (e.g., `my-security-findings-bucket`) - **Output Directory:** Specify the directory path within the bucket (e.g., `/prowler-findings/`, defaults to `output`) diff --git a/docs/user-guide/tutorials/prowler-app.mdx b/docs/user-guide/tutorials/prowler-app.mdx index 0b368c5ad4..5e99a41ae7 100644 --- a/docs/user-guide/tutorials/prowler-app.mdx +++ b/docs/user-guide/tutorials/prowler-app.mdx @@ -72,8 +72,8 @@ To perform security scans, link a cloud provider account. Prowler supports the f Steps to add a provider: -1. Navigate to `Settings > Cloud Providers`. -2. Click `Add Account` to set up a new provider and provide your credentials. +1. Navigate to `Settings > Providers`. +2. Click `Add Provider` to set up a new provider and provide your credentials. Add Provider diff --git a/docs/user-guide/tutorials/prowler-cloud-aws-organizations.mdx b/docs/user-guide/tutorials/prowler-cloud-aws-organizations.mdx index e56627ee20..d9c17aaa5f 100644 --- a/docs/user-guide/tutorials/prowler-cloud-aws-organizations.mdx +++ b/docs/user-guide/tutorials/prowler-cloud-aws-organizations.mdx @@ -246,10 +246,10 @@ Now that both roles are deployed — the management account role (Step 1) and th ### Open the Wizard -1. Navigate to **Cloud Providers** and click **Add Cloud Provider**. +1. Navigate to **Providers** and click **Add Provider**. - Cloud Providers page showing the Add Cloud Provider button + Providers page showing the Add Provider button 2. Select **Amazon Web Services** as the provider. diff --git a/ui/CHANGELOG.md b/ui/CHANGELOG.md index 8b0f8d17e6..994c5c9116 100644 --- a/ui/CHANGELOG.md +++ b/ui/CHANGELOG.md @@ -2,6 +2,14 @@ All notable changes to the **Prowler UI** are documented in this file. +## [1.26.0] (Prowler UNRELEASED) + +### 🔄 Changed + +- Standardized "Providers" wording across UI and documentation, replacing legacy "Cloud Providers" / "Accounts" / "Account Groups" copy [(#10971)](https://github.com/prowler-cloud/prowler/pull/10971) + +--- + ## [1.25.2] (Prowler v5.25.2) ### 🔄 Changed diff --git a/ui/actions/manage-groups/manage-groups.ts b/ui/actions/manage-groups/manage-groups.ts index 8758b66f68..933dabbdbc 100644 --- a/ui/actions/manage-groups/manage-groups.ts +++ b/ui/actions/manage-groups/manage-groups.ts @@ -23,7 +23,7 @@ export const getProviderGroups = async ({ const headers = await getAuthHeaders({ contentType: false }); if (isNaN(Number(page)) || page < 1) - redirect("/providers?tab=account-groups"); + redirect("/providers?tab=provider-groups"); const url = new URL(`${apiBaseUrl}/provider-groups`); @@ -112,7 +112,7 @@ export const createProviderGroup = async (formData: FormData) => { body, }); - return await handleApiResponse(response, "/providers?tab=account-groups"); + return await handleApiResponse(response, "/providers?tab=provider-groups"); } catch (error) { handleApiError(error); } @@ -169,7 +169,7 @@ export const deleteProviderGroup = async (formData: FormData) => { if (!providerGroupId) { return { - errors: [{ detail: "Account Group ID is required." }], + errors: [{ detail: "Provider Group ID is required." }], }; } diff --git a/ui/app/(prowler)/_overview/_components/accounts-selector.tsx b/ui/app/(prowler)/_overview/_components/accounts-selector.tsx index e134625d98..7d20c96355 100644 --- a/ui/app/(prowler)/_overview/_components/accounts-selector.tsx +++ b/ui/app/(prowler)/_overview/_components/accounts-selector.tsx @@ -146,7 +146,7 @@ export function AccountsSelector({ const filterDescription = selectedProviderTypes && selectedProviderTypes.length > 0 ? `Accounts for ${selectedProviderTypes.map(getProviderDisplayName).join(", ")}` - : "All connected cloud provider accounts"; + : "All connected provider accounts"; return (
@@ -155,8 +155,8 @@ export function AccountsSelector({ className="sr-only" id="accounts-label" > - Filter by cloud provider account. {filterDescription}. Select one or - more accounts to view findings. + Filter by provider account. {filterDescription}. Select one or more + accounts to view findings. - Filter by cloud provider type. Select one or more providers to view - findings. + Filter by provider type. Select one or more providers to view findings. ( - + ), cell: ({ row }) => ( { ); const source = readFileSync(columnsPath, "utf8"); - // Account is fixed, Account Groups is fluid (no explicit size) + // Provider is fixed, Provider Groups is fluid (no explicit size) expect(source).toContain("size: 420"); expect(source).toContain("size: 160"); expect(source).toContain("size: 140"); diff --git a/ui/app/(prowler)/providers/page.tsx b/ui/app/(prowler)/providers/page.tsx index 8cbdfb3459..cb2dae2e8b 100644 --- a/ui/app/(prowler)/providers/page.tsx +++ b/ui/app/(prowler)/providers/page.tsx @@ -7,7 +7,7 @@ import { ContentLayout } from "@/components/ui"; import { FilterTransitionWrapper } from "@/contexts"; import { SearchParamsProps } from "@/types"; -import { AccountGroupsContent } from "./account-groups-content"; +import { ProviderGroupsContent } from "./provider-groups-content"; import { ProviderPageTabs } from "./provider-page-tabs"; import { getProviderTab } from "./provider-page-tabs.shared"; import { loadProvidersAccountsViewData } from "./providers-page.utils"; @@ -25,24 +25,24 @@ export default async function Providers({ const searchParamsKey = JSON.stringify(paramsWithoutTab); return ( - + } > - + } - accountGroupsContent={ + providerGroupsContent={ } + fallback={} > - + } /> @@ -59,7 +59,7 @@ const ProvidersTableFallback = () => { {/* Organizations filter */} - {/* Account Groups filter */} + {/* Provider Groups filter */} {/* Status filter */} @@ -74,7 +74,7 @@ const ProvidersTableFallback = () => { ); }; -const AccountGroupsFallback = () => { +const ProviderGroupsFallback = () => { return (
@@ -95,7 +95,7 @@ const AccountGroupsFallback = () => { ); }; -const ProvidersAccountsContent = async ({ +const ProvidersTabContent = async ({ searchParams, }: { searchParams: SearchParamsProps; diff --git a/ui/app/(prowler)/providers/account-groups-content.tsx b/ui/app/(prowler)/providers/provider-groups-content.tsx similarity index 93% rename from ui/app/(prowler)/providers/account-groups-content.tsx rename to ui/app/(prowler)/providers/provider-groups-content.tsx index f171f521f9..2bb4cd9ae8 100644 --- a/ui/app/(prowler)/providers/account-groups-content.tsx +++ b/ui/app/(prowler)/providers/provider-groups-content.tsx @@ -9,7 +9,7 @@ import { ColumnGroups } from "@/components/manage-groups/table"; import { DataTable } from "@/components/ui/table"; import { ProviderProps, Role, SearchParamsProps } from "@/types"; -export const AccountGroupsContent = async ({ +export const ProviderGroupsContent = async ({ searchParams, }: { searchParams: SearchParamsProps; @@ -57,10 +57,10 @@ export const AccountGroupsContent = async ({ ) : (

- Create a new account group + Create a new provider group

- Create a new account group to manage the providers and roles. + Create a new provider group to manage the providers and roles.

@@ -127,9 +127,9 @@ const EditGroupSection = ({ return (
-

Edit account group

+

Edit provider group

- Edit the account group to manage the providers and roles. + Edit the provider group to manage the providers and roles.

{ pushMock.mockClear(); }); - it("falls back to accounts when tab search params are invalid", () => { - expect(getProviderTab(undefined)).toBe(PROVIDER_TAB.ACCOUNTS); - expect(getProviderTab(["account-groups"])).toBe(PROVIDER_TAB.ACCOUNTS); - expect(getProviderTab("invalid-tab")).toBe(PROVIDER_TAB.ACCOUNTS); - expect(getProviderTab(PROVIDER_TAB.ACCOUNT_GROUPS)).toBe( - PROVIDER_TAB.ACCOUNT_GROUPS, + it("falls back to providers when tab search params are invalid", () => { + expect(getProviderTab(undefined)).toBe(PROVIDER_TAB.PROVIDERS); + expect(getProviderTab(["provider-groups"])).toBe(PROVIDER_TAB.PROVIDERS); + expect(getProviderTab("invalid-tab")).toBe(PROVIDER_TAB.PROVIDERS); + expect(getProviderTab(PROVIDER_TAB.PROVIDER_GROUPS)).toBe( + PROVIDER_TAB.PROVIDER_GROUPS, ); }); - it("shows the accounts tab when the route changes back to accounts", () => { + it("shows the providers tab when the route changes back to providers", () => { const { rerender } = render( Accounts content
} - accountGroupsContent={
Account groups content
} + activeTab={PROVIDER_TAB.PROVIDER_GROUPS} + providersContent={
Providers content
} + providerGroupsContent={
Provider groups content
} />, ); - expect(screen.getByRole("tab", { name: "Account Groups" })).toHaveAttribute( - "data-state", - "active", - ); + expect( + screen.getByRole("tab", { name: "Provider Groups" }), + ).toHaveAttribute("data-state", "active"); rerender( Accounts content
} - accountGroupsContent={
Account groups content
} + activeTab={PROVIDER_TAB.PROVIDERS} + providersContent={
Providers content
} + providerGroupsContent={
Provider groups content
} />, ); - expect(screen.getByRole("tab", { name: "Accounts" })).toHaveAttribute( + expect(screen.getByRole("tab", { name: "Providers" })).toHaveAttribute( "data-state", "active", ); - expect(screen.getByText("Accounts content")).toBeVisible(); + expect(screen.getByText("Providers content")).toBeVisible(); }); it("does not switch the active tab before navigation updates the route", async () => { @@ -63,21 +62,21 @@ describe("ProviderPageTabs", () => { render( Accounts content
} - accountGroupsContent={
Account groups content
} + activeTab={PROVIDER_TAB.PROVIDERS} + providersContent={
Providers content
} + providerGroupsContent={
Provider groups content
} />, ); - await user.click(screen.getByRole("tab", { name: "Account Groups" })); + await user.click(screen.getByRole("tab", { name: "Provider Groups" })); - expect(pushMock).toHaveBeenCalledWith("/providers?tab=account-groups"); - expect(screen.getByRole("tab", { name: "Accounts" })).toHaveAttribute( + expect(pushMock).toHaveBeenCalledWith("/providers?tab=provider-groups"); + expect(screen.getByRole("tab", { name: "Providers" })).toHaveAttribute( "data-state", "active", ); expect( - screen.getByRole("tab", { name: "Account Groups" }), + screen.getByRole("tab", { name: "Provider Groups" }), ).not.toHaveAttribute("data-state", "active"); }); }); diff --git a/ui/app/(prowler)/providers/provider-page-tabs.tsx b/ui/app/(prowler)/providers/provider-page-tabs.tsx index 437c8b6da0..61934fbdf0 100644 --- a/ui/app/(prowler)/providers/provider-page-tabs.tsx +++ b/ui/app/(prowler)/providers/provider-page-tabs.tsx @@ -9,14 +9,14 @@ import { PROVIDER_TAB, type ProviderTab } from "./provider-page-tabs.shared"; interface ProviderPageTabsProps { activeTab: ProviderTab; - accountsContent: ReactNode; - accountGroupsContent: ReactNode; + providersContent: ReactNode; + providerGroupsContent: ReactNode; } export const ProviderPageTabs = ({ activeTab, - accountsContent, - accountGroupsContent, + providersContent, + providerGroupsContent, }: ProviderPageTabsProps) => { const router = useRouter(); @@ -27,7 +27,7 @@ export const ProviderPageTabs = ({ return; } - if (typedTab === PROVIDER_TAB.ACCOUNTS) { + if (typedTab === PROVIDER_TAB.PROVIDERS) { router.push("/providers"); } else { router.push(`/providers?tab=${typedTab}`); @@ -41,18 +41,18 @@ export const ProviderPageTabs = ({ className="flex w-full flex-col gap-6" > - Accounts - - Account Groups + Providers + + Provider Groups - - {accountsContent} + + {providersContent} - - {accountGroupsContent} + + {providerGroupsContent} ); diff --git a/ui/components/compliance/compliance-card.test.tsx b/ui/components/compliance/compliance-card.test.tsx index c7a199a7fc..996c8cb979 100644 --- a/ui/components/compliance/compliance-card.test.tsx +++ b/ui/components/compliance/compliance-card.test.tsx @@ -13,9 +13,9 @@ describe("ComplianceCard", () => { expect(source).toContain('variant="base"'); }); - it("uses a responsive stacked layout for narrow screens", () => { + it("uses a single-column stacked layout", () => { expect(source).toContain("flex-col"); - expect(source).toContain("sm:flex-row"); + expect(source).not.toContain("sm:flex-row"); }); it("uses the shadcn progress component instead of Hero UI", () => { diff --git a/ui/components/filters/data-filters.ts b/ui/components/filters/data-filters.ts index 0baf89a3df..2457b1a8b2 100644 --- a/ui/components/filters/data-filters.ts +++ b/ui/components/filters/data-filters.ts @@ -24,7 +24,7 @@ export const filterProviders: FilterOption[] = [ }, { key: "provider__in", - labelCheckboxGroup: "Cloud Provider", + labelCheckboxGroup: "Provider", values: [...PROVIDER_TYPES], valueLabelMapping: PROVIDER_TYPE_MAPPING, }, @@ -34,7 +34,7 @@ export const filterProviders: FilterOption[] = [ export const filterScans = [ { key: "provider_type__in", - labelCheckboxGroup: "Cloud Provider", + labelCheckboxGroup: "Provider", values: [...PROVIDER_TYPES], valueLabelMapping: PROVIDER_TYPE_MAPPING, index: 0, diff --git a/ui/components/findings/table/column-standalone-findings.tsx b/ui/components/findings/table/column-standalone-findings.tsx index 59e30ea6a3..0f457140f1 100644 --- a/ui/components/findings/table/column-standalone-findings.tsx +++ b/ui/components/findings/table/column-standalone-findings.tsx @@ -169,7 +169,7 @@ export function getStandaloneFindingColumns({ { accessorKey: "provider", header: ({ column }) => ( - + ), cell: ({ row }) => { const provider = getProviderData(row, "provider"); diff --git a/ui/components/manage-groups/forms/delete-group-form.tsx b/ui/components/manage-groups/forms/delete-group-form.tsx index 7f79817440..0504178cb7 100644 --- a/ui/components/manage-groups/forms/delete-group-form.tsx +++ b/ui/components/manage-groups/forms/delete-group-form.tsx @@ -48,7 +48,7 @@ export const DeleteGroupForm = ({ title: "Success!", description: "The provider group was removed successfully.", }); - router.push("/providers?tab=account-groups"); + router.push("/providers?tab=provider-groups"); } setIsOpen(false); // Close the modal on success } diff --git a/ui/components/manage-groups/forms/edit-group-form.tsx b/ui/components/manage-groups/forms/edit-group-form.tsx index 907e372b0e..02503940f1 100644 --- a/ui/components/manage-groups/forms/edit-group-form.tsx +++ b/ui/components/manage-groups/forms/edit-group-form.tsx @@ -130,7 +130,7 @@ export const EditGroupForm = ({ title: "Success!", description: "The group was updated successfully.", }); - router.push("/providers?tab=account-groups"); + router.push("/providers?tab=provider-groups"); } } catch (_error) { toast({ @@ -263,7 +263,7 @@ export const EditGroupForm = ({ type="button" variant="ghost" onClick={() => { - router.push("/providers?tab=account-groups"); + router.push("/providers?tab=provider-groups"); }} disabled={isLoading} > diff --git a/ui/components/manage-groups/manage-groups-button.tsx b/ui/components/manage-groups/manage-groups-button.tsx index 3b08f5d8eb..48dc1d42c1 100644 --- a/ui/components/manage-groups/manage-groups-button.tsx +++ b/ui/components/manage-groups/manage-groups-button.tsx @@ -10,7 +10,7 @@ export const ManageGroupsButton = () => { ); diff --git a/ui/components/manage-groups/table/data-table-row-actions.tsx b/ui/components/manage-groups/table/data-table-row-actions.tsx index 6787897450..e05a16c807 100644 --- a/ui/components/manage-groups/table/data-table-row-actions.tsx +++ b/ui/components/manage-groups/table/data-table-row-actions.tsx @@ -41,15 +41,15 @@ export function DataTableRowActions({ } - label="Edit Account Group" + label="Edit Provider Group" onSelect={() => - router.push(`/providers?tab=account-groups&groupId=${groupId}`) + router.push(`/providers?tab=provider-groups&groupId=${groupId}`) } /> } - label="Delete Account Group" + label="Delete Provider Group" destructive onSelect={() => setIsDeleteOpen(true)} /> diff --git a/ui/components/overview/new-findings-table/table/skeleton-table-new-findings.tsx b/ui/components/overview/new-findings-table/table/skeleton-table-new-findings.tsx index df5017712e..76c7851d30 100644 --- a/ui/components/overview/new-findings-table/table/skeleton-table-new-findings.tsx +++ b/ui/components/overview/new-findings-table/table/skeleton-table-new-findings.tsx @@ -85,7 +85,7 @@ export const SkeletonTableNewFindings = () => { - {/* Cloud Provider */} + {/* Provider */} diff --git a/ui/components/providers/table/column-providers.tsx b/ui/components/providers/table/column-providers.tsx index 1203234a31..ec5766f491 100644 --- a/ui/components/providers/table/column-providers.tsx +++ b/ui/components/providers/table/column-providers.tsx @@ -140,7 +140,7 @@ export function getColumnProviders(
@@ -200,7 +200,7 @@ export function getColumnProviders( accessorKey: "groupNames", size: 160, header: ({ column }) => ( - + ), cell: ({ row }) => { if (isProvidersOrganizationRow(row.original)) { @@ -251,7 +251,7 @@ export function getColumnProviders( if (isProvidersOrganizationRow(row.original)) { return ( - {row.original.providerCount} Accounts + {row.original.providerCount} Providers ); } diff --git a/ui/components/providers/table/skeleton-table-provider.tsx b/ui/components/providers/table/skeleton-table-provider.tsx index 7d57112291..1fe557b808 100644 --- a/ui/components/providers/table/skeleton-table-provider.tsx +++ b/ui/components/providers/table/skeleton-table-provider.tsx @@ -3,7 +3,7 @@ import { Skeleton } from "@/components/shadcn/skeleton/skeleton"; const SkeletonTableRow = () => { return ( - {/* Account: provider logo + alias + UID */} + {/* Provider: logo + alias + UID */}
@@ -13,7 +13,7 @@ const SkeletonTableRow = () => {
- {/* Account Groups: badge chips */} + {/* Provider Groups: badge chips */}
@@ -68,11 +68,11 @@ export const SkeletonTableProviders = () => { - {/* Account */} + {/* Provider */} - {/* Account Groups */} + {/* Provider Groups */} diff --git a/ui/components/providers/wizard/provider-wizard-modal.tsx b/ui/components/providers/wizard/provider-wizard-modal.tsx index 706045a829..d731ca5786 100644 --- a/ui/components/providers/wizard/provider-wizard-modal.tsx +++ b/ui/components/providers/wizard/provider-wizard-modal.tsx @@ -93,7 +93,7 @@ export function ProviderWizardModal({
- For assistance connecting a Cloud Provider visit + For assistance connecting a Provider visit