Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| d711222e1c |
@@ -61,12 +61,12 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
uses: github/codeql-action/init@68bde559dea0fdcac2102bfdf6230c5f70eb485e # v4.35.4
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/api-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
uses: github/codeql-action/analyze@68bde559dea0fdcac2102bfdf6230c5f70eb485e # v4.35.4
|
||||
with:
|
||||
category: '/language:${{ matrix.language }}'
|
||||
|
||||
@@ -66,12 +66,12 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
uses: github/codeql-action/init@68bde559dea0fdcac2102bfdf6230c5f70eb485e # v4.35.4
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/sdk-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
uses: github/codeql-action/analyze@68bde559dea0fdcac2102bfdf6230c5f70eb485e # v4.35.4
|
||||
with:
|
||||
category: '/language:${{ matrix.language }}'
|
||||
|
||||
@@ -62,12 +62,12 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
uses: github/codeql-action/init@68bde559dea0fdcac2102bfdf6230c5f70eb485e # v4.35.4
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
config-file: ./.github/codeql/ui-codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
uses: github/codeql-action/analyze@68bde559dea0fdcac2102bfdf6230c5f70eb485e # v4.35.4
|
||||
with:
|
||||
category: '/language:${{ matrix.language }}'
|
||||
|
||||
@@ -7,17 +7,12 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
### 🚀 Added
|
||||
|
||||
- GIN index on `findings(categories, resource_services, resource_regions, resource_types)` to speed up `/api/v1/finding-groups` array filters [(#11001)](https://github.com/prowler-cloud/prowler/pull/11001)
|
||||
- `GET /api/v1/scans/{id}/compliance/{name}/ocsf` endpoint to download the per-framework OCSF JSON export for universal compliance frameworks (DORA, CSA CCM) [(#11131)](https://github.com/prowler-cloud/prowler/pull/11131)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
- Replace `poetry` with `uv` (`0.11.14`) as the API package manager; migrate `pyproject.toml` to `[dependency-groups]` and regenerate as `uv.lock` [(#10775)](https://github.com/prowler-cloud/prowler/pull/10775)
|
||||
- Remove orphaned `gin_resources_search_idx` declaration from `Resource.Meta.indexes` (DB index dropped in `0072_drop_unused_indexes`) [(#11001)](https://github.com/prowler-cloud/prowler/pull/11001)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Universal compliance CSV and OCSF outputs (DORA, CSA CCM) were truncated to the first batch on multi-batch scans; they now stream every finding [(#11131)](https://github.com/prowler-cloud/prowler/pull/11131)
|
||||
|
||||
---
|
||||
|
||||
## [1.27.2] (Prowler UNRELEASED)
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
from collections.abc import Iterable, Mapping
|
||||
|
||||
from api.models import Provider
|
||||
from prowler.lib.check.compliance_models import (
|
||||
get_bulk_compliance_frameworks_universal,
|
||||
)
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
|
||||
AVAILABLE_COMPLIANCE_FRAMEWORKS = {}
|
||||
@@ -96,22 +94,25 @@ PROWLER_CHECKS = LazyChecksMapping()
|
||||
|
||||
|
||||
def get_compliance_frameworks(provider_type: Provider.ProviderChoices) -> list[str]:
|
||||
"""List compliance framework identifiers available for `provider_type`.
|
||||
"""List compliance frameworks the API can load for `provider_type`.
|
||||
|
||||
Includes both per-provider frameworks and universal top-level frameworks
|
||||
(e.g. ``dora``, ``csa_ccm_4.0``).
|
||||
The list is sourced from `Compliance.get_bulk` so that the names
|
||||
returned here are guaranteed to be loadable by the bulk loader. This
|
||||
prevents downstream key mismatches (e.g. CSV report generation iterating
|
||||
framework names and looking them up in the bulk dict).
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The cloud provider type
|
||||
(e.g., "aws", "azure", "gcp", "m365").
|
||||
provider_type (Provider.ProviderChoices): The cloud provider type for which to retrieve
|
||||
available compliance frameworks (e.g., "aws", "azure", "gcp", "m365").
|
||||
|
||||
Returns:
|
||||
list[str]: Framework identifiers (e.g., "cis_1.4_aws", "dora").
|
||||
list[str]: A list of framework identifiers (e.g., "cis_1.4_aws", "mitre_attack_azure") available
|
||||
for the given provider.
|
||||
"""
|
||||
global AVAILABLE_COMPLIANCE_FRAMEWORKS
|
||||
if provider_type not in AVAILABLE_COMPLIANCE_FRAMEWORKS:
|
||||
AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type] = list(
|
||||
get_bulk_compliance_frameworks_universal(provider_type).keys()
|
||||
Compliance.get_bulk(provider_type).keys()
|
||||
)
|
||||
|
||||
return AVAILABLE_COMPLIANCE_FRAMEWORKS[provider_type]
|
||||
@@ -138,14 +139,18 @@ def get_prowler_provider_compliance(provider_type: Provider.ProviderChoices) ->
|
||||
"""
|
||||
Retrieve the Prowler compliance data for a specified provider type.
|
||||
|
||||
This function fetches the compliance frameworks and their associated
|
||||
requirements for the given cloud provider.
|
||||
|
||||
Args:
|
||||
provider_type (Provider.ProviderChoices): The provider type
|
||||
(e.g., 'aws', 'azure') for which to retrieve compliance data.
|
||||
|
||||
Returns:
|
||||
dict: Mapping of framework name to `ComplianceFramework` for the provider.
|
||||
dict: A dictionary mapping compliance framework names to their respective
|
||||
Compliance objects for the specified provider.
|
||||
"""
|
||||
return get_bulk_compliance_frameworks_universal(provider_type)
|
||||
return Compliance.get_bulk(provider_type)
|
||||
|
||||
|
||||
def _load_provider_assets(provider_type: Provider.ProviderChoices) -> tuple[dict, dict]:
|
||||
@@ -204,8 +209,8 @@ def load_prowler_checks(
|
||||
for compliance_name, compliance_data in prowler_compliance.get(
|
||||
provider_type, {}
|
||||
).items():
|
||||
for requirement in compliance_data.requirements:
|
||||
for check in requirement.checks.get(provider_type, []):
|
||||
for requirement in compliance_data.Requirements:
|
||||
for check in requirement.Checks:
|
||||
try:
|
||||
checks[provider_type][check].add(compliance_name)
|
||||
except KeyError:
|
||||
@@ -285,33 +290,24 @@ def generate_compliance_overview_template(
|
||||
requirements_status = {"passed": 0, "failed": 0, "manual": 0}
|
||||
total_requirements = 0
|
||||
|
||||
for requirement in compliance_data.requirements:
|
||||
for requirement in compliance_data.Requirements:
|
||||
total_requirements += 1
|
||||
provider_check_list = list(requirement.checks.get(provider_type, []))
|
||||
total_checks = len(provider_check_list)
|
||||
checks_dict = {check: None for check in provider_check_list}
|
||||
total_checks = len(requirement.Checks)
|
||||
checks_dict = {check: None for check in requirement.Checks}
|
||||
|
||||
req_status_val = "MANUAL" if total_checks == 0 else "PASS"
|
||||
|
||||
# Normalize `attributes` to a list — downstream consumers iterate it.
|
||||
if isinstance(requirement.attributes, dict):
|
||||
attributes_payload = (
|
||||
[dict(requirement.attributes)] if requirement.attributes else []
|
||||
)
|
||||
else:
|
||||
attributes_payload = [
|
||||
dict(attribute) for attribute in requirement.attributes
|
||||
]
|
||||
|
||||
# Build requirement dictionary
|
||||
requirement_dict = {
|
||||
"name": requirement.name or requirement.id,
|
||||
"description": requirement.description,
|
||||
"tactics": requirement.tactics or [],
|
||||
"subtechniques": requirement.sub_techniques or [],
|
||||
"platforms": requirement.platforms or [],
|
||||
"technique_url": requirement.technique_url or "",
|
||||
"attributes": attributes_payload,
|
||||
"name": requirement.Name or requirement.Id,
|
||||
"description": requirement.Description,
|
||||
"tactics": getattr(requirement, "Tactics", []),
|
||||
"subtechniques": getattr(requirement, "SubTechniques", []),
|
||||
"platforms": getattr(requirement, "Platforms", []),
|
||||
"technique_url": getattr(requirement, "TechniqueURL", ""),
|
||||
"attributes": [
|
||||
dict(attribute) for attribute in requirement.Attributes
|
||||
],
|
||||
"checks": checks_dict,
|
||||
"checks_status": {
|
||||
"pass": 0,
|
||||
@@ -329,15 +325,15 @@ def generate_compliance_overview_template(
|
||||
requirements_status["passed"] += 1
|
||||
|
||||
# Add requirement to compliance requirements
|
||||
compliance_requirements[requirement.id] = requirement_dict
|
||||
compliance_requirements[requirement.Id] = requirement_dict
|
||||
|
||||
# Build compliance dictionary
|
||||
compliance_dict = {
|
||||
"framework": compliance_data.framework,
|
||||
"name": compliance_data.name,
|
||||
"version": compliance_data.version,
|
||||
"framework": compliance_data.Framework,
|
||||
"name": compliance_data.Name,
|
||||
"version": compliance_data.Version,
|
||||
"provider": provider_type,
|
||||
"description": compliance_data.description,
|
||||
"description": compliance_data.Description,
|
||||
"requirements": compliance_requirements,
|
||||
"requirements_status": requirements_status,
|
||||
"total_requirements": total_requirements,
|
||||
|
||||
@@ -13037,59 +13037,8 @@ paths:
|
||||
responses:
|
||||
'200':
|
||||
description: CSV file containing the compliance report
|
||||
'202':
|
||||
description: The task is in progress
|
||||
'403':
|
||||
description: There is a problem with credentials
|
||||
'404':
|
||||
description: Compliance report not found, or the scan has no reports yet
|
||||
/api/v1/scans/{id}/compliance/{name}/ocsf:
|
||||
get:
|
||||
operationId: scans_compliance_ocsf_retrieve
|
||||
description: Download a specific compliance report as an OCSF JSON file. Only
|
||||
universal frameworks that declare an output configuration produce this artifact
|
||||
(currently 'dora' and 'csa_ccm_4.0'); any other framework returns 404.
|
||||
summary: Retrieve compliance report as OCSF JSON
|
||||
parameters:
|
||||
- in: query
|
||||
name: fields[scan-reports]
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- id
|
||||
- name
|
||||
description: endpoint return only specific fields in the response on a per-type
|
||||
basis by including a fields[TYPE] query parameter.
|
||||
explode: false
|
||||
- in: path
|
||||
name: id
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
description: A UUID string identifying this scan.
|
||||
required: true
|
||||
- in: path
|
||||
name: name
|
||||
schema:
|
||||
type: string
|
||||
description: The compliance report name, like 'dora'
|
||||
required: true
|
||||
tags:
|
||||
- Scan
|
||||
security:
|
||||
- JWT or API Key: []
|
||||
responses:
|
||||
'200':
|
||||
description: OCSF JSON file containing the compliance report
|
||||
'202':
|
||||
description: The task is in progress
|
||||
'403':
|
||||
description: There is a problem with credentials
|
||||
'404':
|
||||
description: Compliance report not found, the framework does not provide
|
||||
an OCSF export, or the scan has no reports yet
|
||||
description: Compliance report not found
|
||||
/api/v1/scans/{id}/csa:
|
||||
get:
|
||||
operationId: scans_csa_retrieve
|
||||
|
||||
@@ -115,7 +115,6 @@ from api.base_views import BaseRLSViewSet, BaseTenantViewset, BaseUserViewset
|
||||
from api.compliance import (
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE,
|
||||
get_compliance_frameworks,
|
||||
get_prowler_provider_compliance,
|
||||
)
|
||||
from api.constants import SEVERITY_ORDER
|
||||
from api.db_router import MainRouter
|
||||
@@ -1849,42 +1848,7 @@ class ProviderViewSet(DisablePaginationMixin, BaseRLSViewSet):
|
||||
200: OpenApiResponse(
|
||||
description="CSV file containing the compliance report"
|
||||
),
|
||||
202: OpenApiResponse(description="The task is in progress"),
|
||||
403: OpenApiResponse(description="There is a problem with credentials"),
|
||||
404: OpenApiResponse(
|
||||
description="Compliance report not found, or the scan has no reports yet"
|
||||
),
|
||||
},
|
||||
request=None,
|
||||
),
|
||||
compliance_ocsf=extend_schema(
|
||||
tags=["Scan"],
|
||||
summary="Retrieve compliance report as OCSF JSON",
|
||||
description=(
|
||||
"Download a specific compliance report as an OCSF JSON file. "
|
||||
"Only universal frameworks that declare an output configuration "
|
||||
"produce this artifact (currently 'dora' and 'csa_ccm_4.0'); any "
|
||||
"other framework returns 404."
|
||||
),
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="name",
|
||||
type=str,
|
||||
location=OpenApiParameter.PATH,
|
||||
required=True,
|
||||
description="The compliance report name, like 'dora'",
|
||||
),
|
||||
],
|
||||
responses={
|
||||
200: OpenApiResponse(
|
||||
description="OCSF JSON file containing the compliance report"
|
||||
),
|
||||
202: OpenApiResponse(description="The task is in progress"),
|
||||
403: OpenApiResponse(description="There is a problem with credentials"),
|
||||
404: OpenApiResponse(
|
||||
description="Compliance report not found, the framework does "
|
||||
"not provide an OCSF export, or the scan has no reports yet"
|
||||
),
|
||||
404: OpenApiResponse(description="Compliance report not found"),
|
||||
},
|
||||
request=None,
|
||||
),
|
||||
@@ -2041,10 +2005,6 @@ class ScanViewSet(BaseRLSViewSet):
|
||||
if hasattr(self, "response_serializer_class"):
|
||||
return self.response_serializer_class
|
||||
return ScanComplianceReportSerializer
|
||||
elif self.action == "compliance_ocsf":
|
||||
if hasattr(self, "response_serializer_class"):
|
||||
return self.response_serializer_class
|
||||
return ScanComplianceReportSerializer
|
||||
elif self.action == "threatscore":
|
||||
if hasattr(self, "response_serializer_class"):
|
||||
return self.response_serializer_class
|
||||
@@ -2296,16 +2256,20 @@ class ScanViewSet(BaseRLSViewSet):
|
||||
content, filename = loader
|
||||
return self._serve_file(content, filename, "application/x-zip-compressed")
|
||||
|
||||
def _serve_compliance_artifact(self, scan, name, file_extension, content_type):
|
||||
"""Resolve and serve a per-framework compliance artifact from disk/S3.
|
||||
@action(
|
||||
detail=True,
|
||||
methods=["get"],
|
||||
url_path="compliance/(?P<name>[^/]+)",
|
||||
url_name="compliance",
|
||||
)
|
||||
def compliance(self, request, pk=None, name=None):
|
||||
scan = self.get_object()
|
||||
if name not in get_compliance_frameworks(scan.provider.provider):
|
||||
return Response(
|
||||
{"detail": f"Compliance '{name}' not found."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
Shared by the CSV and OCSF compliance download actions. Both are
|
||||
path-based (no query params) on purpose: ``get_object`` runs
|
||||
``filter_queryset``, which triggers JSON:API's
|
||||
``QueryParameterValidationFilter`` and 400s on any non-JSON:API
|
||||
query param, so a ``?format=`` / ``?type=`` selector is not viable
|
||||
here — the format is encoded in the route instead.
|
||||
"""
|
||||
running_resp = self._get_task_status(scan)
|
||||
if running_resp:
|
||||
return running_resp
|
||||
@@ -2322,66 +2286,25 @@ class ScanViewSet(BaseRLSViewSet):
|
||||
bucket = env.str("DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET", "")
|
||||
key_prefix = scan.output_location.removeprefix(f"s3://{bucket}/")
|
||||
prefix = os.path.join(
|
||||
os.path.dirname(key_prefix), "compliance", f"{name}.{file_extension}"
|
||||
os.path.dirname(key_prefix), "compliance", f"{name}.csv"
|
||||
)
|
||||
loader = self._load_file(
|
||||
prefix,
|
||||
s3=True,
|
||||
bucket=bucket,
|
||||
list_objects=True,
|
||||
content_type=content_type,
|
||||
content_type="text/csv",
|
||||
)
|
||||
else:
|
||||
base = os.path.dirname(scan.output_location)
|
||||
pattern = os.path.join(base, "compliance", f"*_{name}.{file_extension}")
|
||||
pattern = os.path.join(base, "compliance", f"*_{name}.csv")
|
||||
loader = self._load_file(pattern, s3=False)
|
||||
|
||||
if isinstance(loader, HttpResponseBase):
|
||||
return loader
|
||||
|
||||
content, filename = loader
|
||||
return self._serve_file(content, filename, content_type)
|
||||
|
||||
@action(
|
||||
detail=True,
|
||||
methods=["get"],
|
||||
url_path="compliance/(?P<name>[^/]+)",
|
||||
url_name="compliance",
|
||||
)
|
||||
def compliance(self, request, pk=None, name=None):
|
||||
scan = self.get_object()
|
||||
if name not in get_compliance_frameworks(scan.provider.provider):
|
||||
return Response(
|
||||
{"detail": f"Compliance '{name}' not found."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
return self._serve_compliance_artifact(scan, name, "csv", "text/csv")
|
||||
|
||||
@action(
|
||||
detail=True,
|
||||
methods=["get"],
|
||||
url_path="compliance/(?P<name>[^/]+)/ocsf",
|
||||
url_name="compliance-ocsf",
|
||||
)
|
||||
def compliance_ocsf(self, request, pk=None, name=None):
|
||||
scan = self.get_object()
|
||||
if name not in get_compliance_frameworks(scan.provider.provider):
|
||||
return Response(
|
||||
{"detail": f"Compliance '{name}' not found."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
universal_bulk = get_prowler_provider_compliance(scan.provider.provider)
|
||||
framework_obj = universal_bulk.get(name)
|
||||
if not (framework_obj and getattr(framework_obj, "outputs", None)):
|
||||
return Response(
|
||||
{"detail": f"Compliance '{name}' does not provide an OCSF export."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
return self._serve_compliance_artifact(
|
||||
scan, name, "ocsf.json", "application/json"
|
||||
)
|
||||
return self._serve_file(content, filename, "text/csv")
|
||||
|
||||
@action(
|
||||
detail=True,
|
||||
|
||||
@@ -67,10 +67,7 @@ from tasks.utils import (
|
||||
get_next_execution_datetime,
|
||||
)
|
||||
|
||||
from api.compliance import (
|
||||
get_compliance_frameworks,
|
||||
get_prowler_provider_compliance,
|
||||
)
|
||||
from api.compliance import get_compliance_frameworks
|
||||
from api.db_router import READ_REPLICA_ALIAS
|
||||
from api.db_utils import rls_transaction
|
||||
from api.decorators import handle_provider_deletion, set_tenant
|
||||
@@ -78,9 +75,6 @@ from api.models import Finding, Integration, Provider, Scan, ScanSummary, StateC
|
||||
from api.utils import initialize_prowler_provider
|
||||
from api.v1.serializers import ScanTaskSerializer
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.compliance import (
|
||||
process_universal_compliance_frameworks,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
|
||||
from prowler.lib.outputs.finding import Finding as FindingOutput
|
||||
|
||||
@@ -498,16 +492,7 @@ def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
provider_uid = provider_obj.uid
|
||||
provider_type = provider_obj.provider
|
||||
|
||||
# Per-framework exporters in `COMPLIANCE_CLASS_MAP` consume the legacy bulk.
|
||||
frameworks_bulk = Compliance.get_bulk(provider_type)
|
||||
# Universal-only frameworks (top-level JSONs like `dora.json`) are emitted
|
||||
# via `process_universal_compliance_frameworks` below.
|
||||
universal_bulk = get_prowler_provider_compliance(provider_type)
|
||||
universal_only_names = {
|
||||
name
|
||||
for name in universal_bulk
|
||||
if name not in frameworks_bulk and universal_bulk[name].outputs
|
||||
}
|
||||
frameworks_avail = get_compliance_frameworks(provider_type)
|
||||
out_dir, comp_dir = _generate_output_directory(
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY, provider_uid, tenant_id, scan_id
|
||||
@@ -529,10 +514,6 @@ def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
|
||||
output_writers = {}
|
||||
compliance_writers = {}
|
||||
# Shared across batches so universal writers are created once and reused.
|
||||
universal_compliance_state: dict[str, list] = {"compliance": []}
|
||||
universal_base_dir = os.path.dirname(out_dir)
|
||||
universal_output_filename = os.path.basename(out_dir)
|
||||
|
||||
scan_summary = FindingOutput._transform_findings_stats(
|
||||
ScanSummary.objects.filter(scan_id=scan_id)
|
||||
@@ -587,30 +568,8 @@ def generate_outputs_task(scan_id: str, provider_id: str, tenant_id: str):
|
||||
writer.batch_write_data_to_file(**extra)
|
||||
writer._data.clear()
|
||||
|
||||
# Universal-only frameworks (e.g. `dora.json`).
|
||||
if universal_only_names:
|
||||
process_universal_compliance_frameworks(
|
||||
input_compliance_frameworks=universal_only_names,
|
||||
universal_frameworks=universal_bulk,
|
||||
finding_outputs=fos,
|
||||
output_directory=universal_base_dir,
|
||||
output_filename=universal_output_filename,
|
||||
provider=provider_type,
|
||||
generated_outputs=universal_compliance_state,
|
||||
from_cli=False,
|
||||
is_last=is_last,
|
||||
)
|
||||
|
||||
# Compliance CSVs (per-framework exporters).
|
||||
# Compliance CSVs
|
||||
for name in frameworks_avail:
|
||||
if name in universal_only_names:
|
||||
continue
|
||||
if name not in frameworks_bulk:
|
||||
logger.warning(
|
||||
"Compliance framework '%s' missing from bulk; skipping CSV export",
|
||||
name,
|
||||
)
|
||||
continue
|
||||
compliance_obj = frameworks_bulk[name]
|
||||
|
||||
klass = GenericCompliance
|
||||
|
||||
@@ -152,7 +152,7 @@ These should have been already installed if `uv sync` was already run.
|
||||
|
||||
</Note>
|
||||
- [`bandit`](https://pypi.org/project/bandit/) for code security review.
|
||||
- [`osv-scanner`](https://github.com/google/osv-scanner) and [`dependabot`](https://github.com/features/security) for dependencies.
|
||||
- [`safety`](https://pypi.org/project/safety/) and [`dependabot`](https://github.com/features/security) for dependencies.
|
||||
- [`hadolint`](https://github.com/hadolint/hadolint) and [`dockle`](https://github.com/goodwithtech/dockle) for container security.
|
||||
- [`Snyk`](https://docs.snyk.io/integrations/snyk-container-integrations/container-security-with-docker-hub-integration) for container security in Docker Hub.
|
||||
- [`clair`](https://github.com/quay/clair) for container security in Amazon ECR.
|
||||
|
||||
@@ -2,228 +2,40 @@
|
||||
title: 'Creating a New Security Compliance Framework in Prowler'
|
||||
---
|
||||
|
||||
This guide explains how to add a new security compliance framework to Prowler, end to end. It covers directory layout, the two supported JSON schemas (universal and legacy), the Pydantic models that validate each framework, check mapping conventions, output formatting, local validation, testing, and the pull request process.
|
||||
This guide explains how to add a new security compliance framework to Prowler, end to end. It covers directory layout, the JSON schema, check mapping conventions, the Pydantic models that validate each framework, the CSV output formatter, local validation, testing, and the pull request process.
|
||||
|
||||
## Introduction
|
||||
|
||||
A compliance framework in Prowler maps a public or custom control catalog (for example CIS, NIST 800-53, PCI DSS, HIPAA, ENS, CCC, DORA) to the security checks that Prowler already runs. Each requirement links to zero, one or more Prowler checks. When a scan executes, findings are aggregated per requirement to produce the compliance report rendered by Prowler CLI and Prowler Cloud.
|
||||
A compliance framework in Prowler maps a public or custom control catalog (for example CIS, NIST 800-53, PCI DSS, HIPAA, ENS, CCC) to the security checks that Prowler already runs. Each requirement links to zero, one or more Prowler checks. When a scan executes, findings are aggregated per requirement to produce the compliance report rendered by Prowler CLI and Prowler Cloud.
|
||||
|
||||
Prowler ships 85+ compliance frameworks across all providers. The catalog lives under `prowler/compliance/<provider>/` (legacy, per-provider) or `prowler/compliance/` (universal, multi-provider).
|
||||
Prowler ships with 85+ compliance frameworks across All Providers. The catalog lives under `prowler/compliance/<provider>/` (or `prowler/compliance/` for universal compliance frameworks)
|
||||
|
||||
<Warning>
|
||||
A compliance framework must represent the **complete state** of the source catalog. Every requirement defined by the framework has to be present in the JSON file, even when no Prowler check can automate it. In that case, leave the requirement's check list empty, but do not omit the requirement.
|
||||
A compliance framework must represent the **complete state** of the source catalog. Every requirement defined by the framework has to be present in the JSON file, even when none of the existing Prowler checks can automate it. In that case, leave `Checks` as an empty array, but do not omit the requirement.
|
||||
|
||||
Requirement coverage feeds the compliance percentage calculations and the metadata surfaces (dashboards, widgets, exports). Missing requirements skew those metrics and break the report as a faithful snapshot of the framework.
|
||||
</Warning>
|
||||
|
||||
### Two supported schemas
|
||||
|
||||
| Schema | When to use | File location | Discovered as |
|
||||
| --- | --- | --- | --- |
|
||||
| **Universal (recommended for new frameworks)** | Multi-provider frameworks, or single-provider frameworks that benefit from declarative table/PDF rendering | `prowler/compliance/<framework>.json` (top-level) | Available for **every** provider whose key appears in any `requirement.checks` dict |
|
||||
| **Legacy provider-specific** | Single-provider frameworks with framework-specific attribute classes already declared in the codebase (CIS, ENS, ISO 27001, etc.) | `prowler/compliance/<provider>/<framework>_<version>_<provider>.json` | Available only under that provider |
|
||||
|
||||
Auto-discovery happens in `get_bulk_compliance_frameworks_universal(provider)` (`prowler/lib/check/compliance_models.py:915`), which scans **both** the top-level `prowler/compliance/` directory and every per-provider sub-directory. Legacy frameworks are transparently converted to the universal `ComplianceFramework` model via `adapt_legacy_to_universal()` before being returned, so the rest of Prowler — CLI table rendering, CSV/OCSF outputs, PDF generation — works the same regardless of the source schema.
|
||||
|
||||
> The legacy entry-point `Compliance.get_bulk(provider)` (used by older code paths) only scans per-provider sub-directories. Universal top-level files are picked up exclusively via the universal loader; this matters if you are wiring a new code path against the legacy API.
|
||||
|
||||
For **new** frameworks, prefer the universal schema: it requires no Python code changes, supports multiple providers in a single file, and table/PDF rendering is driven entirely from declarative configuration inside the JSON.
|
||||
|
||||
> All Pydantic models in `compliance_models.py` are imported from `pydantic.v1`. Subclasses you add for the legacy schema must use `from pydantic.v1 import BaseModel`.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Before adding a new framework, complete the following checks:
|
||||
|
||||
- **Verify the framework is not already supported.** Inspect `prowler/compliance/` and every `prowler/compliance/<provider>/` for an existing JSON file matching the name and version.
|
||||
- **Verify the framework is not already supported.** Inspect `prowler/compliance/<provider>/` for an existing JSON file matching the name and version.
|
||||
- **Confirm the required checks exist.** Every requirement that can be automated must point to one or more existing Prowler checks. For each missing check, implement it first by following the [Prowler Checks](/developer-guide/checks) guide.
|
||||
- **Review a reference framework.** Use an existing framework with a similar structure as your template:
|
||||
- Universal: `prowler/compliance/dora.json`, `prowler/compliance/csa_ccm_4.0.json`.
|
||||
- Legacy: `prowler/compliance/aws/cis_2.0_aws.json` (canonical CIS shape), `prowler/compliance/aws/ccc_aws.json`, `prowler/compliance/aws/ens_rd2022_aws.json`, `prowler/compliance/aws/nist_800_53_revision_5_aws.json`.
|
||||
- **Review a reference framework.** Use an existing framework with a similar structure as your template. `cis_2.0_aws.json` is the canonical reference for CIS-style frameworks. `ccc_aws.json`, `ens_rd2022_aws.json`, and `nist_800_53_revision_5_aws.json` illustrate other attribute shapes.
|
||||
|
||||
## Universal Compliance Framework
|
||||
## Four-Layer Architecture
|
||||
|
||||
### Where the file lives
|
||||
A compliance framework spans four layers. A complete contribution must touch each layer that applies.
|
||||
|
||||
Place the file at the top level of the compliance directory:
|
||||
- **Layer 1 – Schema validation:** The Pydantic models in `prowler/lib/check/compliance_models.py` define the canonical schema for each attribute shape (CIS, ENS, Mitre, CCC, C5, CSA CCM, ISO 27001, KISA ISMS-P, AWS Well-Architected, Prowler ThreatScore, and a generic fallback).
|
||||
- **Layer 2 – JSON catalog:** The framework JSON file in `prowler/compliance/<provider>/` lists every requirement and maps it to checks.
|
||||
- **Layer 3 – Output formatter:** The Python module in `prowler/lib/outputs/compliance/<framework>/` builds the CSV row model, the per-provider transformer, and the CLI summary table.
|
||||
- **Layer 4 – Output dispatchers:** The dispatchers in `prowler/lib/outputs/compliance/compliance.py` and `prowler/lib/outputs/compliance/compliance_output.py` route findings to the right formatter based on the framework identifier.
|
||||
|
||||
```
|
||||
prowler/compliance/<framework_name>.json
|
||||
```
|
||||
The rest of this guide walks each layer in order.
|
||||
|
||||
Examples in the repository: `prowler/compliance/csa_ccm_4.0.json`, `prowler/compliance/dora.json`.
|
||||
|
||||
The file is auto-discovered — there is **no** need to register it in any `__init__.py`, modify `prowler/lib/outputs/`, or update any other Python module. The framework key Prowler CLI accepts via `--compliance` is the basename of the JSON file without `.json` (`dora.json` → `dora`).
|
||||
|
||||
### Top-level structure
|
||||
|
||||
```json
|
||||
{
|
||||
"framework": "<short identifier, e.g. \"DORA\" or \"CSA-CCM\">",
|
||||
"name": "<human-readable full name>",
|
||||
"version": "<framework version>",
|
||||
"description": "<one-paragraph description shown in --list-compliance and PDF reports>",
|
||||
"icon": "<short icon slug, optional>",
|
||||
"attributes_metadata": [ /* see below */ ],
|
||||
"outputs": { /* see below — optional */ },
|
||||
"requirements": [ /* see below */ ]
|
||||
}
|
||||
```
|
||||
|
||||
A `provider` field at the top level is **optional**. The framework's effective provider list is derived by `ComplianceFramework.get_providers()` (`compliance_models.py:739`) from the union of all keys appearing in `requirement.checks` across all requirements; the explicit `provider` field is used **only as a fallback** when no requirement carries any `checks` key. This is what enables a single file (e.g. `dora.json`) to cover AWS today and add Azure / GCP / etc. tomorrow without restructuring.
|
||||
|
||||
Provider keys inside `requirement.checks` must match the directory names under `prowler/providers/`. The valid keys at present are: `aws`, `azure`, `gcp`, `m365`, `kubernetes`, `iac`, `github`, `googleworkspace`, `alibabacloud`, `cloudflare`, `mongodbatlas`, `nhn`, `openstack`, `oraclecloud`, `llm`. Comparison in `supports_provider()` is case-insensitive, but lowercase is the convention used everywhere in the repository.
|
||||
|
||||
### `attributes_metadata`
|
||||
|
||||
Declares the shape of the per-requirement `attributes` dict. When this field is present, the root validator `validate_attributes_against_metadata` (`compliance_models.py:669`) enforces the schema at load time and rejects:
|
||||
|
||||
- Missing keys marked `required: true`.
|
||||
- Keys present in `attributes` but not declared in `attributes_metadata` (typo / drift guard).
|
||||
- Values that violate a declared `enum`.
|
||||
- Values whose Python type does not match a declared `int`, `float` or `bool`.
|
||||
|
||||
The runtime type check **only** covers `int`, `float` and `bool`. For `str`, `list_str` and `list_dict` the type is documentation-only — non-conforming values won't fail validation. If `attributes_metadata` is omitted, **no per-requirement validation runs at all**.
|
||||
|
||||
```json
|
||||
"attributes_metadata": [
|
||||
{
|
||||
"key": "Pillar",
|
||||
"label": "Pillar",
|
||||
"type": "str",
|
||||
"required": true,
|
||||
"enum": [
|
||||
"ICT Risk Management",
|
||||
"ICT-Related Incident Reporting",
|
||||
"Digital Operational Resilience Testing",
|
||||
"ICT Third-Party Risk Management",
|
||||
"Information Sharing"
|
||||
],
|
||||
"output_formats": { "csv": true, "ocsf": true }
|
||||
},
|
||||
{
|
||||
"key": "Article",
|
||||
"label": "Article",
|
||||
"type": "str",
|
||||
"required": true,
|
||||
"output_formats": { "csv": true, "ocsf": true }
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
Per attribute:
|
||||
|
||||
- `key` (required): attribute name as it will appear in `requirement.attributes`.
|
||||
- `label`: human-readable label used in CSV headers and PDF.
|
||||
- `type`: one of `str`, `int`, `float`, `bool`, `list_str`, `list_dict`. Defaults to `str`.
|
||||
- `enum`: optional list of allowed values; non-conforming values are rejected at load time.
|
||||
- `required`: if `true`, every requirement must include this key with a non-null value.
|
||||
- `enum_display` / `enum_order`: optional per-enum-value visual metadata (label, abbreviation, color, icon) and explicit ordering for PDF rendering.
|
||||
- `output_formats`: `{ "csv": <bool>, "ocsf": <bool> }` — toggles inclusion in each output format. Both default to `true`.
|
||||
|
||||
### `outputs`
|
||||
|
||||
Optional. Controls how the framework is rendered in the console table and in the generated PDF report. Skipping it falls back to sensible defaults.
|
||||
|
||||
```json
|
||||
"outputs": {
|
||||
"table_config": {
|
||||
"group_by": "Pillar"
|
||||
},
|
||||
"pdf_config": {
|
||||
"language": "en",
|
||||
"primary_color": "#003399",
|
||||
"secondary_color": "#0055A5",
|
||||
"bg_color": "#F0F4FA",
|
||||
"group_by_field": "Pillar",
|
||||
"sections": [ "ICT Risk Management", "ICT-Related Incident Reporting", "..." ],
|
||||
"section_short_names": { "ICT Risk Management": "ICT Risk Mgmt" },
|
||||
"charts": [
|
||||
{
|
||||
"id": "pillar_compliance",
|
||||
"type": "horizontal_bar",
|
||||
"group_by": "Pillar",
|
||||
"title": "Compliance Score by Pillar",
|
||||
"y_label": "Pillar",
|
||||
"x_label": "Compliance %",
|
||||
"value_source": "compliance_percent",
|
||||
"color_mode": "by_value"
|
||||
}
|
||||
],
|
||||
"filter": { "only_failed": true, "include_manual": false }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
`table_config.group_by` must reference an attribute key declared in `attributes_metadata`. The same applies to `pdf_config.group_by_field` and to every `charts[].group_by`.
|
||||
|
||||
For frameworks with weighted scoring (e.g. ThreatScore) declare `pdf_config.scoring` with `risk_field` / `weight_field` / `risk_boost_factor`. For column splitting (e.g. CIS Level 1 vs Level 2) use `table_config.split_by`.
|
||||
|
||||
### `requirements`
|
||||
|
||||
```json
|
||||
"requirements": [
|
||||
{
|
||||
"id": "DORA-Art5",
|
||||
"name": "Governance and organisation",
|
||||
"description": "Financial entities shall have a sound, comprehensive and well-documented ICT internal governance and control framework. ...",
|
||||
"attributes": {
|
||||
"Pillar": "ICT Risk Management",
|
||||
"Article": "Article 5",
|
||||
"ArticleTitle": "Governance and organisation"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"iam_avoid_root_usage",
|
||||
"iam_no_root_access_key",
|
||||
"iam_root_mfa_enabled"
|
||||
],
|
||||
"azure": [],
|
||||
"gcp": []
|
||||
}
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
Per requirement:
|
||||
|
||||
- `id` (required): unique identifier within the framework.
|
||||
- `description` (required): the requirement text as authored by the framework.
|
||||
- `name`: short title shown alongside the id.
|
||||
- `attributes`: flat dict; keys must conform to `attributes_metadata`.
|
||||
- `checks`: dict keyed by provider name (the same lowercase keys listed in the previous section). Each value is a list of Prowler check names that evidence this requirement for that provider. The list **may be empty** and the dict itself defaults to `{}` if omitted; either way the requirement is still loaded and listed by `--list-compliance-requirements`, it just has zero checks to execute. Note: there is **no automatic check-existence validation** at load time — referencing a non-existent check name will silently produce a requirement with no findings. Validate this yourself (see "Validating Your Framework" below).
|
||||
|
||||
For MITRE-style frameworks, additional optional fields are available on the requirement: `tactics`, `sub_techniques`, `platforms`, `technique_url` (these are populated automatically when adapting a legacy MITRE JSON to the universal model).
|
||||
|
||||
### Multi-provider frameworks
|
||||
|
||||
A single universal file can cover any number of providers. The framework appears under each provider's `--list-compliance` output as long as **at least one** requirement has that provider key in its `checks` dict.
|
||||
|
||||
When extending an existing universal framework with a new provider, the only change required is editing `requirement.checks`:
|
||||
|
||||
```diff
|
||||
"checks": {
|
||||
"aws": ["iam_avoid_root_usage", "iam_no_root_access_key"],
|
||||
+ "azure": ["entra_policy_ensure_mfa_for_admin_roles"]
|
||||
}
|
||||
```
|
||||
|
||||
No code changes, no new file, no registration step.
|
||||
|
||||
## Legacy Provider-Specific Compliance Framework
|
||||
|
||||
The legacy schema is still fully supported and remains the format used by most frameworks shipped today (CIS, NIST, ISO 27001, FedRAMP, PCI DSS, GDPR, HIPAA, ENS, etc.). It binds a framework to a single provider and validates each requirement against a framework-specific Pydantic attribute class.
|
||||
|
||||
The legacy schema spans **four layers** — a complete contribution must touch every layer that applies:
|
||||
|
||||
- **Layer 1 — Schema validation:** the Pydantic models in `prowler/lib/check/compliance_models.py` define the canonical schema for each attribute shape.
|
||||
- **Layer 2 — JSON catalog:** the framework JSON file in `prowler/compliance/<provider>/` lists every requirement and maps it to checks.
|
||||
- **Layer 3 — Output formatter:** the Python module in `prowler/lib/outputs/compliance/<framework>/` builds the CSV row model, the per-provider transformer, and the CLI summary table.
|
||||
- **Layer 4 — Output dispatchers:** the dispatchers in `prowler/lib/outputs/compliance/compliance.py` and `prowler/lib/outputs/compliance/compliance_output.py` route findings to the right formatter based on the framework identifier.
|
||||
|
||||
The universal schema collapses Layers 3 and 4 into declarative configuration inside the JSON — that is the main reason it is preferred for new contributions.
|
||||
|
||||
### Directory structure and file naming
|
||||
## Directory Structure and File Naming
|
||||
|
||||
Compliance frameworks live at:
|
||||
|
||||
@@ -234,8 +46,8 @@ prowler/compliance/<provider>/<framework>_<version>_<provider>.json
|
||||
The filename conventions are:
|
||||
|
||||
- All lowercase, words separated with underscores.
|
||||
- `<provider>` is a supported provider identifier (same lowercase list as the universal section above).
|
||||
- `<version>` is optional but recommended. Omit only when the framework has no versioning (e.g. `ccc_aws.json`).
|
||||
- `<provider>` is a supported provider identifier: `aws`, `azure`, `gcp`, `kubernetes`, `m365`, `github`, `googleworkspace`, `alibabacloud`, `oraclecloud`, `cloudflare`, `mongodbatlas`, `nhn`, `openstack`, `iac`, `llm`.
|
||||
- `<version>` is optional. Omit it when the framework has no versioning, as in `ccc_aws.json`.
|
||||
- The file basename (without `.json`) is the framework key that Prowler CLI accepts via `--compliance`.
|
||||
|
||||
Examples:
|
||||
@@ -250,50 +62,48 @@ The output formatter directory mirrors the framework name:
|
||||
|
||||
```
|
||||
prowler/lib/outputs/compliance/<framework>/
|
||||
├── <framework>.py # CLI summary-table dispatcher
|
||||
├── <framework>.py # CLI summary-table dispatcher
|
||||
├── <framework>_<provider>.py # Per-provider transformer class
|
||||
├── models.py # Pydantic CSV row model
|
||||
└── __init__.py
|
||||
```
|
||||
|
||||
### JSON schema reference
|
||||
## JSON Schema Reference
|
||||
|
||||
Every legacy compliance file is a JSON document with the following top-level keys. `Framework`, `Name` and `Provider` are validated non-empty by the root validator `framework_and_provider_must_not_be_empty` (`compliance_models.py:329`).
|
||||
Every compliance file is a JSON document with the following top-level keys.
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
|---|---|---|---|
|
||||
| `Framework` | string | Yes | Canonical framework identifier, for example `CIS`, `NIST-800-53-Revision-5`, `ENS`, `CCC`. |
|
||||
| `Name` | string | Yes | Human-readable framework name displayed by Prowler App. |
|
||||
| `Version` | string | Yes (recommended) | Framework version, e.g. `2.0`. See [Version Handling](#version-handling). |
|
||||
| `Version` | string | Yes | Framework version, for example `2.0`. Use an empty string only for frameworks without versioning. See [Version Handling](#version-handling). |
|
||||
| `Provider` | string | Yes | Upper-cased provider identifier: `AWS`, `AZURE`, `GCP`, `KUBERNETES`, `M365`, `GITHUB`, `GOOGLEWORKSPACE`, and so on. |
|
||||
| `Description` | string | Yes | Short description of the framework's scope and purpose. |
|
||||
| `Requirements` | array | Yes | List of [requirement objects](#requirement-object). |
|
||||
|
||||
#### Requirement Object
|
||||
### Requirement Object
|
||||
|
||||
Each entry in `Requirements` describes one control or requirement.
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
|---|---|---|---|
|
||||
| `Id` | string | Yes | Unique identifier within the framework, for example `1.10` or `CCC.Core.CN01.AR01`. |
|
||||
| `Name` | string | No | Optional human-readable name (frameworks like NIST distinguish control name from description). |
|
||||
| `Name` | string | No | Optional human-readable name used by frameworks that distinguish control name from description, such as NIST. |
|
||||
| `Description` | string | Yes | Verbatim description from the source framework. |
|
||||
| `Attributes` | array | Yes | List of [attribute objects](#attribute-objects). The shape depends on the framework. |
|
||||
| `Checks` | array of strings | Yes | Prowler check identifiers that automate the requirement. Leave the list empty when the control cannot be automated. |
|
||||
|
||||
#### Attribute Objects
|
||||
### Attribute Objects
|
||||
|
||||
`Attributes` is parsed against the union declared in `Compliance_Requirement.Attributes` (`compliance_models.py:293`). Pydantic v1 tries each member of the union in declaration order and falls back to `Generic_Compliance_Requirement_Attribute` (the last entry) when nothing else matches — so a brand-new shape that doesn't match any existing class will silently be accepted as Generic, losing its specific fields.
|
||||
Attributes carry the metadata that Prowler App and the CSV output display for each requirement. The object shape is framework-specific and is validated by a dedicated Pydantic model in `prowler/lib/check/compliance_models.py`. The most common shapes are summarized below.
|
||||
|
||||
As of today, the registered attribute classes are: `CIS_Requirement_Attribute`, `ENS_Requirement_Attribute`, `ASDEssentialEight_Requirement_Attribute`, `ISO27001_2013_Requirement_Attribute`, `AWS_Well_Architected_Requirement_Attribute`, `KISA_ISMSP_Requirement_Attribute`, `Prowler_ThreatScore_Requirement_Attribute`, `CCC_Requirement_Attribute`, `C5Germany_Requirement_Attribute`, `CSA_CCM_Requirement_Attribute`, and `Generic_Compliance_Requirement_Attribute` (fallback). MITRE-style frameworks use the separate `Mitre_Requirement` model with `Tactics` / `SubTechniques` / `Platforms` / `TechniqueURL` at the requirement top level. The most common shapes are summarized below.
|
||||
|
||||
##### CIS_Requirement_Attribute
|
||||
#### CIS_Requirement_Attribute
|
||||
|
||||
Used by every CIS benchmark.
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|---|---|---|---|
|
||||
| `Section` | string | Yes | Top-level section, e.g. `1 Identity and Access Management`. |
|
||||
| `Section` | string | Yes | Top-level section, for example `1 Identity and Access Management`. |
|
||||
| `SubSection` | string | No | Optional second-level grouping. |
|
||||
| `Profile` | enum | Yes | One of `Level 1`, `Level 2`, `E3 Level 1`, `E3 Level 2`, `E5 Level 1`, `E5 Level 2`. |
|
||||
| `AssessmentStatus` | enum | Yes | `Manual` or `Automated`. |
|
||||
@@ -306,7 +116,7 @@ Used by every CIS benchmark.
|
||||
| `DefaultValue` | string | No | Default configuration value, when relevant. |
|
||||
| `References` | string | Yes | Colon-separated list of reference URLs. |
|
||||
|
||||
##### ENS_Requirement_Attribute
|
||||
#### ENS_Requirement_Attribute
|
||||
|
||||
Used by the Spanish ENS (Esquema Nacional de Seguridad) frameworks.
|
||||
|
||||
@@ -322,13 +132,13 @@ Used by the Spanish ENS (Esquema Nacional de Seguridad) frameworks.
|
||||
| `ModoEjecucion` | string | Yes | Execution mode (`manual`, `automático`, `híbrido`). |
|
||||
| `Dependencias` | array of strings | Yes | Ids of prerequisite controls. Empty list when none. |
|
||||
|
||||
##### CCC_Requirement_Attribute
|
||||
#### CCC_Requirement_Attribute
|
||||
|
||||
Used by the Common Cloud Controls Catalog.
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|---|---|---|---|
|
||||
| `FamilyName` | string | Yes | Control family, e.g. `Data`. |
|
||||
| `FamilyName` | string | Yes | Control family, for example `Data`. |
|
||||
| `FamilyDescription` | string | Yes | Description of the family. |
|
||||
| `Section` | string | Yes | Section title. |
|
||||
| `SubSection` | string | Yes | Subsection title, or empty string. |
|
||||
@@ -338,9 +148,9 @@ Used by the Common Cloud Controls Catalog.
|
||||
| `SectionThreatMappings` | array of objects | Yes | Each entry has `ReferenceId` and `Identifiers`. |
|
||||
| `SectionGuidelineMappings` | array of objects | Yes | Each entry has `ReferenceId` and `Identifiers`. |
|
||||
|
||||
##### Generic_Compliance_Requirement_Attribute
|
||||
#### Generic_Compliance_Requirement_Attribute
|
||||
|
||||
The fallback attribute model used when no framework-specific schema applies (e.g. NIST 800-53, PCI DSS, GDPR, HIPAA). It is **always the last** element of the `Compliance_Requirement.Attributes` Union; that ordering is load-bearing.
|
||||
The fallback attribute model used when no framework-specific schema applies (for example NIST 800-53, PCI DSS, GDPR, HIPAA).
|
||||
|
||||
| Field | Type | Required | Notes |
|
||||
|---|---|---|---|
|
||||
@@ -348,17 +158,17 @@ The fallback attribute model used when no framework-specific schema applies (e.g
|
||||
| `Section` | string | No | Section name. |
|
||||
| `SubSection` | string | No | Subsection name. |
|
||||
| `SubGroup` | string | No | Subgroup name. |
|
||||
| `Service` | string | No | Affected service, e.g. `iam`. |
|
||||
| `Service` | string | No | Affected service, for example `aws`, `iam`. |
|
||||
| `Type` | string | No | Control type. |
|
||||
| `Comment` | string | No | Free-form comment. |
|
||||
|
||||
For the remaining attribute classes (`AWS_Well_Architected_Requirement_Attribute`, `ISO27001_2013_Requirement_Attribute`, `Mitre_Requirement_Attribute_<Provider>`, `KISA_ISMSP_Requirement_Attribute`, `Prowler_ThreatScore_Requirement_Attribute`, `C5Germany_Requirement_Attribute`, `CSA_CCM_Requirement_Attribute`) consult `prowler/lib/check/compliance_models.py` for the full field sets.
|
||||
Additional per-framework attribute models exist for `AWS_Well_Architected_Requirement_Attribute`, `ISO27001_2013_Requirement_Attribute`, `Mitre_Requirement_Attribute_<Provider>`, `KISA_ISMSP_Requirement_Attribute`, `Prowler_ThreatScore_Requirement_Attribute`, `C5Germany_Requirement_Attribute`, and `CSA_CCM_Requirement_Attribute`. Consult `prowler/lib/check/compliance_models.py` for their full field sets.
|
||||
|
||||
<Note>
|
||||
The `Attributes` field is a Pydantic `Union`. The generic attribute model **must** remain the last element of that Union — otherwise Pydantic v1 silently coerces every framework into the generic shape and your specialized fields are dropped. Adding a brand-new attribute shape requires inserting the Pydantic class **before** `Generic_Compliance_Requirement_Attribute`.
|
||||
The `Attributes` field is a Pydantic `Union`. The generic attribute model must remain the last element of that Union, otherwise Pydantic v1 silently coerces every framework into the generic shape and your specialized fields are dropped.
|
||||
</Note>
|
||||
|
||||
#### Minimal working example
|
||||
## Minimal Working Example
|
||||
|
||||
The following snippet is a complete, valid framework file named `my_framework_1.0_aws.json`, saved at `prowler/compliance/aws/my_framework_1.0_aws.json`. It uses the generic attribute shape for simplicity.
|
||||
|
||||
@@ -404,26 +214,26 @@ The following snippet is a complete, valid framework file named `my_framework_1.
|
||||
}
|
||||
```
|
||||
|
||||
### Mapping checks to requirements
|
||||
## Mapping Checks to Requirements
|
||||
|
||||
Each requirement links to the Prowler checks that, together, produce a PASS or FAIL verdict for that control.
|
||||
|
||||
- **Include every requirement from the source catalog.** The framework file must mirror the full control list, one-to-one. Compliance percentages, dashboards, and exported metadata are computed against the total requirement count.
|
||||
- List every check by its canonical identifier — the value of `CheckID` inside the check's `.metadata.json` file.
|
||||
- **Include every requirement from the source catalog.** The framework file must mirror the full control list, one-to-one. Compliance percentages, dashboards, and exported metadata are computed against the total requirement count, so omitting an unmappable control inflates coverage and misrepresents the framework.
|
||||
- List every check by its canonical identifier, the value of `CheckID` inside the check's `.metadata.json` file.
|
||||
- One requirement can reference multiple checks. The requirement is evaluated as FAIL when any referenced check produces a FAIL finding for a resource in scope.
|
||||
- Leave `Checks` (legacy) or `checks.<provider>` (universal) as an empty array when the requirement cannot be automated. The requirement still appears in the report and contributes to the total.
|
||||
- Leave `Checks` as an empty array when the requirement cannot be automated. The requirement still appears in the report, contributes to the total, and resolves to `MANUAL`. An empty mapping is valid; a missing requirement is not.
|
||||
- Reuse checks across requirements when the same control applies in multiple places. Do not duplicate check logic to match framework structure.
|
||||
- Avoid referencing checks from a different provider. A legacy compliance file is bound to one provider, and cross-provider checks will never match findings in the scan.
|
||||
- Avoid referencing checks from a different provider. A compliance file is bound to one provider, and cross-provider checks will never match findings in the scan.
|
||||
|
||||
To discover available checks:
|
||||
To discover available checks, run:
|
||||
|
||||
```bash
|
||||
uv run python prowler-cli.py <provider> --list-checks
|
||||
```
|
||||
|
||||
### Supporting multiple providers (legacy)
|
||||
## Supporting Multiple Providers
|
||||
|
||||
The legacy schema binds each file to a single provider. To cover several providers with the same framework, ship one JSON file per provider:
|
||||
Each compliance file targets a single provider. To cover several providers with the same framework (for example CIS across AWS, Azure, and GCP), ship one JSON file per provider:
|
||||
|
||||
```
|
||||
prowler/compliance/aws/cis_2.0_aws.json
|
||||
@@ -431,15 +241,15 @@ prowler/compliance/azure/cis_2.0_azure.json
|
||||
prowler/compliance/gcp/cis_2.0_gcp.json
|
||||
```
|
||||
|
||||
Keep the `Framework` and `Version` values identical across the files so the dispatcher matches them; change only the `Provider`, `Checks`, and provider-specific metadata. The CIS output formatter already supports every provider listed above.
|
||||
Keep the `Framework` and `Version` values identical across the files so the dispatcher matches them, and change only the `Provider`, `Checks`, and provider-specific metadata.
|
||||
|
||||
For a brand-new framework that spans several providers, **prefer the universal schema** — it covers every provider from a single file. If you must use the legacy schema, add one transformer per provider in `prowler/lib/outputs/compliance/<framework>/` and extend the summary-table dispatcher accordingly. See [Output Formatter](#output-formatter).
|
||||
The CIS output formatter already supports every provider listed above. For a brand-new framework that spans several providers, add one transformer per provider in `prowler/lib/outputs/compliance/<framework>/` and extend the summary-table dispatcher accordingly. See [Output Formatter](#output-formatter).
|
||||
|
||||
### Output formatter
|
||||
## Output Formatter
|
||||
|
||||
Legacy frameworks render in two forms: a detailed CSV report written to disk, and a summary table printed in the CLI. Both are produced by the output formatter package for the framework. Universal frameworks do **not** need a Python output formatter — the `outputs` config inside the JSON drives rendering — so this section applies only to the legacy schema.
|
||||
Prowler renders every compliance framework in two forms: a detailed CSV report written to disk, and a summary table printed in the CLI. Both are produced by the output formatter package for the framework.
|
||||
|
||||
For a new legacy framework named `my_framework`, create:
|
||||
For a new framework named `my_framework`, create:
|
||||
|
||||
```
|
||||
prowler/lib/outputs/compliance/my_framework/
|
||||
@@ -449,19 +259,19 @@ prowler/lib/outputs/compliance/my_framework/
|
||||
└── models.py # CSV row Pydantic model
|
||||
```
|
||||
|
||||
#### Step 1 — Define the CSV row model
|
||||
### Step 1 – Define the CSV Row Model
|
||||
|
||||
In `models.py`, declare a Pydantic v1 model with one field per CSV column. Use existing models such as `AWSCISModel` in `prowler/lib/outputs/compliance/cis/models.py` as the reference. Fields typically include `Provider`, `Description`, `AccountId`, `Region`, `AssessmentDate`, `Requirements_Id`, `Requirements_Description`, one `Requirements_Attributes_*` field per attribute key, plus the finding fields `Status`, `StatusExtended`, `ResourceId`, `ResourceName`, `CheckId`, `Muted`, `Framework`, `Name`.
|
||||
|
||||
#### Step 2 — Implement the transformer
|
||||
### Step 2 – Implement the Transformer Class
|
||||
|
||||
In `my_framework_aws.py`, subclass `ComplianceOutput` from `prowler.lib.outputs.compliance.compliance_output` and implement `transform(findings, compliance, compliance_name)`. Iterate over `findings`, match each finding to the requirements it satisfies through `finding.compliance.get(compliance_name, [])`, and append one row per attribute to `self._data`.
|
||||
|
||||
#### Step 3 — Add the summary-table dispatcher
|
||||
### Step 3 – Add the Summary-Table Dispatcher
|
||||
|
||||
In `my_framework.py`, implement `get_my_framework_table(findings, bulk_checks_metadata, compliance_framework, output_filename, output_directory, compliance_overview)` following the pattern in `prowler/lib/outputs/compliance/cis/cis.py`.
|
||||
|
||||
#### Step 4 — Register the framework in the dispatchers
|
||||
### Step 4 – Register the Framework in the Dispatchers
|
||||
|
||||
- Add the dispatcher call in `prowler/lib/outputs/compliance/compliance.py`, inside `display_compliance_table`, with a branch such as `elif "my_framework" in compliance_framework:`.
|
||||
- Register the CSV model and transformer in `prowler/lib/outputs/compliance/compliance_output.py` so the CSV file is emitted during the scan.
|
||||
@@ -470,94 +280,49 @@ In `my_framework.py`, implement `get_my_framework_table(findings, bulk_checks_me
|
||||
For NIST-style catalogs that use `Generic_Compliance_Requirement_Attribute`, no custom formatter is needed. The generic formatter in `prowler/lib/outputs/compliance/generic/` handles them automatically, provided the JSON validates against the generic attribute schema.
|
||||
</Note>
|
||||
|
||||
### Legacy-to-universal adapter
|
||||
|
||||
At load time, every legacy file is transparently adapted to a `ComplianceFramework` via `adapt_legacy_to_universal()` (`compliance_models.py:819`), which: (a) flattens the first element of `Attributes` into a flat `attributes` dict, (b) wraps `Checks` as `{provider_lower: [...]}`, (c) infers `attributes_metadata` from the matched Pydantic class via `_infer_attribute_metadata()`. The rest of Prowler (CSV/OCSF/PDF output, CLI table) then treats both formats identically.
|
||||
|
||||
Loader-error behaviour differs between the two entry points:
|
||||
|
||||
- `load_compliance_framework()` (legacy) is **fail-fast**: it calls `sys.exit(1)` on any `ValidationError` (`compliance_models.py:464`).
|
||||
- `load_compliance_framework_universal()` is more lenient — it logs the error and returns `None`, so `get_bulk_compliance_frameworks_universal()` simply skips the broken file and keeps loading the rest.
|
||||
|
||||
## Version handling
|
||||
## Version Handling
|
||||
|
||||
Prowler matches frameworks by concatenating `Framework` and `Version`. A missing or empty `Version` collapses several frameworks to the same key and breaks CLI filtering with `--compliance`.
|
||||
|
||||
- Always set `Version` (or `version` for universal frameworks) to a non-empty string, even for frameworks that rename editions rather than version them. Use the edition identifier (for example `RD2022`, `v2025.10`, `4.0`, `2022/2554`).
|
||||
- Always set `Version` to a non-empty string, even for frameworks that rename editions rather than version them. Use the edition identifier (for example `RD2022`, `v2025.10`, `4.0`).
|
||||
- When the source catalog has no version, use the first year of adoption or the release date.
|
||||
- For **legacy** files, make sure the version substring embedded in the filename matches `Version`, because the CLI dispatcher reads `compliance_framework.split("_")[1]` to select the correct version.
|
||||
- Make sure the version substring embedded in the filename matches `Version`, because the CLI dispatcher reads `compliance_framework.split("_")[1]` to select the correct version.
|
||||
|
||||
## Validating Your Framework
|
||||
## Validating the Framework Locally
|
||||
|
||||
Before opening a PR, validate the JSON loads cleanly against the model and that every referenced check actually exists.
|
||||
Follow the steps below before opening a pull request.
|
||||
|
||||
### 1. Schema validation
|
||||
|
||||
For **universal** frameworks, load the file and inspect what was parsed. The framework key inside `bulk` is the **basename of the JSON file** (without `.json`); for `prowler/compliance/dora.json` that key is `dora`, for `prowler/compliance/aws/cis_5.0_aws.json` it is `cis_5.0_aws`.
|
||||
|
||||
```python
|
||||
from prowler.lib.check.compliance_models import (
|
||||
load_compliance_framework_universal,
|
||||
get_bulk_compliance_frameworks_universal,
|
||||
)
|
||||
|
||||
fw = load_compliance_framework_universal("prowler/compliance/<your_framework>.json")
|
||||
assert fw is not None, "load returned None — check the logs for the validation error"
|
||||
print(fw.framework, len(fw.requirements), fw.get_providers())
|
||||
|
||||
bulk = get_bulk_compliance_frameworks_universal("aws")
|
||||
assert "<your_framework_filename_without_json>" in bulk
|
||||
```
|
||||
|
||||
### 2. Check existence cross-check
|
||||
|
||||
There is **no automatic check-existence validation** at load time. Cross-check that every check name in your framework maps to a real check directory:
|
||||
|
||||
```python
|
||||
import os
|
||||
real = set()
|
||||
for svc in os.listdir("prowler/providers/aws/services"):
|
||||
svc_path = f"prowler/providers/aws/services/{svc}"
|
||||
if not os.path.isdir(svc_path):
|
||||
continue
|
||||
for entry in os.listdir(svc_path):
|
||||
if os.path.isfile(f"{svc_path}/{entry}/{entry}.metadata.json"):
|
||||
real.add(entry)
|
||||
|
||||
referenced = {c for r in fw.requirements for c in r.checks.get("aws", [])}
|
||||
missing = referenced - real
|
||||
assert not missing, f"checks referenced in framework but not found in repo: {sorted(missing)}"
|
||||
```
|
||||
|
||||
### 3. CLI smoke test
|
||||
### 1. Run the Compliance Model Validator
|
||||
|
||||
```bash
|
||||
uv run python prowler-cli.py <provider> --list-compliance
|
||||
```
|
||||
|
||||
The framework must appear in the output. A validation error indicates a schema mismatch.
|
||||
The framework must appear in the output. A validation error indicates a schema mismatch between the JSON file and the attribute model.
|
||||
|
||||
### 2. Run a Scan Filtered by the Framework
|
||||
|
||||
```bash
|
||||
uv run python prowler-cli.py <provider> \
|
||||
--compliance <framework_key> \
|
||||
--compliance <framework>_<version>_<provider> \
|
||||
--log-level ERROR
|
||||
```
|
||||
|
||||
Verify that:
|
||||
|
||||
- Prowler produces a CSV file under `output/compliance/` with the expected name.
|
||||
- The CLI summary table lists every section / pillar of the framework.
|
||||
- The CLI summary table lists every section in the framework.
|
||||
- Findings roll up under the expected requirements.
|
||||
|
||||
### 4. Inspect the CSV output
|
||||
### 3. Inspect the CSV Output
|
||||
|
||||
Open the generated CSV and confirm:
|
||||
|
||||
- All columns defined in `models.py` (legacy) or in `attributes_metadata` (universal) appear.
|
||||
- Every requirement has at least one row per scanned resource (when there are findings).
|
||||
- Attribute values such as `Requirements_Attributes_Section` reflect the JSON content.
|
||||
- All columns defined in `models.py` appear.
|
||||
- Every requirement has at least one row per scanned resource.
|
||||
- Values such as `Requirements_Attributes_Section` reflect the JSON content.
|
||||
|
||||
### 5. Verify the framework in Prowler App
|
||||
### 4. Verify the Framework in Prowler App
|
||||
|
||||
Launch Prowler App locally (`docker compose up` from the repository root) and run a scan with the new compliance framework. Confirm the compliance page renders the requirements, sections, and status widgets correctly.
|
||||
|
||||
@@ -566,7 +331,7 @@ Launch Prowler App locally (`docker compose up` from the repository root) and ru
|
||||
Compliance contributions require two layers of tests.
|
||||
|
||||
- **Schema tests** exercise the Pydantic models. Extend `tests/lib/check/universal_compliance_models_test.py` with a case that loads the new JSON file and asserts the attribute type matches the expected model.
|
||||
- **Output tests** (legacy frameworks only) exercise the transformer. Mirror the structure under `tests/lib/outputs/compliance/<framework>/` with fixtures that feed synthetic findings through the transformer and assert the resulting CSV rows.
|
||||
- **Output tests** exercise the transformer. Mirror the structure under `tests/lib/outputs/compliance/<framework>/` with fixtures that feed synthetic findings through the transformer and assert the resulting CSV rows.
|
||||
|
||||
Run the suite with:
|
||||
|
||||
@@ -577,20 +342,7 @@ uv run pytest -n auto tests/lib/check/universal_compliance_models_test.py \
|
||||
|
||||
For guidance on writing Prowler SDK tests, refer to [Unit Testing](/developer-guide/unit-testing).
|
||||
|
||||
## Running and listing your framework
|
||||
|
||||
Once the file is in place, the CLI auto-discovers it:
|
||||
|
||||
```sh
|
||||
prowler <provider> --list-compliance # framework appears in the list
|
||||
prowler <provider> --compliance <framework_key> --list-checks
|
||||
prowler <provider> --compliance <framework_key> # full scan + compliance report
|
||||
prowler <provider> --compliance <framework_key> --list-compliance-requirements <framework_key>
|
||||
```
|
||||
|
||||
For end-user-facing tutorials (recommended for high-profile frameworks), add a dedicated page under `docs/user-guide/compliance/tutorials/` and register it in the `"Compliance"` group of `docs/docs.json`. See `docs/user-guide/compliance/tutorials/threatscore.mdx` as a reference.
|
||||
|
||||
## Submitting the pull request
|
||||
## Submitting the Pull Request
|
||||
|
||||
Before opening the pull request:
|
||||
|
||||
@@ -600,31 +352,28 @@ Before opening the pull request:
|
||||
uv run pytest -n auto
|
||||
```
|
||||
2. Add a changelog entry under the `### 🚀 Added` section of `prowler/CHANGELOG.md`, describing the new framework and the providers it covers.
|
||||
3. Follow the [Pull Request Template](https://github.com/prowler-cloud/prowler/blob/master/.github/pull_request_template.md) and set the PR title using Conventional Commits, e.g. `feat(compliance): add My Framework 1.0 for AWS`.
|
||||
3. Follow the [Pull Request Template](https://github.com/prowler-cloud/prowler/blob/master/.github/pull_request_template.md) and set the PR title using Conventional Commits, for example `feat(compliance): add My Framework 1.0 for AWS`.
|
||||
4. Request review from the compliance codeowners listed in `.github/CODEOWNERS`.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
The following issues are the most common when contributing a compliance framework.
|
||||
|
||||
- **`ValidationError: field required` during scan (legacy).** The JSON is missing a required attribute field. Re-check the matching Pydantic model in `prowler/lib/check/compliance_models.py`.
|
||||
- **All attributes collapse to `Generic_Compliance_Requirement_Attribute` values (legacy).** The Pydantic `Union` is ordered incorrectly, or the JSON matches only the generic shape. Keep the generic model in the last Union position and ensure every required field is present in the JSON.
|
||||
- **`attributes_metadata validation failed` (universal).** The root validator in `compliance_models.py:669` rejected the file. The error message lists each offending requirement; common causes are unknown attribute keys (typo or missing entry in `attributes_metadata`), enum violations, or missing required keys.
|
||||
- **`--compliance` filter does not find the framework.** For legacy: the filename does not match `<framework>_<version>_<provider>.json`, the version is empty, or the file lives outside `prowler/compliance/<provider>/`. For universal: the file is not at the top level of `prowler/compliance/` or it loaded as `None` (check logs for the validation error).
|
||||
- **CLI summary table is empty but the CSV is populated (legacy).** The dispatcher branch in `prowler/lib/outputs/compliance/compliance.py` is missing or its substring match does not catch the framework key.
|
||||
- **CSV file is missing after the scan (legacy).** The transformer class is not registered in `prowler/lib/outputs/compliance/compliance_output.py`, or `transform()` raises silently. Run the scan with `--log-level DEBUG`.
|
||||
- **Findings do not roll up under a requirement.** A check listed in `Checks` either does not exist for that provider or is spelled incorrectly. Run `--list-checks | grep <check_name>` to confirm, or run the check-existence cross-check from "Validating Your Framework".
|
||||
- **`ValidationError: field required` during scan.** The JSON is missing a required attribute field. Re-check the matching Pydantic model in `prowler/lib/check/compliance_models.py`.
|
||||
- **All attributes collapse to `Generic_Compliance_Requirement_Attribute` values.** The Pydantic `Union` is ordered incorrectly, or the JSON matches only the generic shape. Move the generic model to the last Union position and ensure every required field is present in the JSON.
|
||||
- **`--compliance` filter does not find the framework.** The filename does not match the expected pattern `<framework>_<version>_<provider>.json`, the version is empty, or the file lives outside `prowler/compliance/<provider>/`.
|
||||
- **CLI summary table is empty but the CSV is populated.** The dispatcher branch in `prowler/lib/outputs/compliance/compliance.py` is missing or its substring match does not catch the framework key.
|
||||
- **CSV file is missing after the scan.** The transformer class is not registered in `prowler/lib/outputs/compliance/compliance_output.py`, or `transform()` raises silently. Run the scan with `--log-level DEBUG`.
|
||||
- **Findings do not roll up under a requirement.** A check listed in `Checks` either does not exist for that provider or is spelled incorrectly. Run `--list-checks | grep <check_name>` to confirm.
|
||||
|
||||
## Reference examples
|
||||
## Reference Examples
|
||||
|
||||
Use the following files as templates when modeling a new contribution.
|
||||
|
||||
- `prowler/compliance/dora.json` — universal schema, single-provider populated (AWS), ready to extend with more providers.
|
||||
- `prowler/compliance/csa_ccm_4.0.json` — universal schema, multi-provider populated (AWS, Azure, GCP, AlibabaCloud, OracleCloud).
|
||||
- `prowler/compliance/aws/cis_2.0_aws.json` — legacy CIS attribute shape.
|
||||
- `prowler/compliance/aws/nist_800_53_revision_5_aws.json` — legacy generic attribute shape.
|
||||
- `prowler/compliance/aws/ccc_aws.json` — legacy CCC attribute shape.
|
||||
- `prowler/compliance/azure/ens_rd2022_azure.json` — legacy ENS attribute shape.
|
||||
- `prowler/lib/check/compliance_models.py` — canonical Pydantic schemas for both formats.
|
||||
- `prowler/lib/outputs/compliance/cis/` — reference implementation of a multi-provider legacy output formatter.
|
||||
- `prowler/lib/outputs/compliance/generic/` — reference implementation of a legacy generic output formatter.
|
||||
- `prowler/compliance/aws/cis_2.0_aws.json` – CIS attribute shape.
|
||||
- `prowler/compliance/aws/nist_800_53_revision_5_aws.json` – Generic attribute shape.
|
||||
- `prowler/compliance/aws/ccc_aws.json` – CCC attribute shape.
|
||||
- `prowler/compliance/azure/ens_rd2022_azure.json` – ENS attribute shape.
|
||||
- `prowler/lib/check/compliance_models.py` – Canonical Pydantic schemas.
|
||||
- `prowler/lib/outputs/compliance/cis/` – Reference implementation of a multi-provider output formatter.
|
||||
- `prowler/lib/outputs/compliance/generic/` – Reference implementation of a generic output formatter.
|
||||
|
||||
@@ -353,8 +353,7 @@
|
||||
"group": "Cookbooks",
|
||||
"pages": [
|
||||
"user-guide/cookbooks/kubernetes-in-cluster",
|
||||
"user-guide/cookbooks/cicd-pipeline",
|
||||
"user-guide/cookbooks/powerbi-cis-benchmarks"
|
||||
"user-guide/cookbooks/cicd-pipeline"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
|
Before Width: | Height: | Size: 120 KiB |
|
Before Width: | Height: | Size: 93 KiB |
|
Before Width: | Height: | Size: 92 KiB |
|
Before Width: | Height: | Size: 107 KiB |
|
Before Width: | Height: | Size: 153 KiB |
|
Before Width: | Height: | Size: 101 KiB |
|
Before Width: | Height: | Size: 405 KiB |
@@ -39,11 +39,10 @@ Dependencies are continuously monitored for known vulnerabilities with timely up
|
||||
|
||||
### Dependency Vulnerability Scanning
|
||||
|
||||
- **osv-scanner:** Scans lockfiles against the [OSV.dev](https://osv.dev) vulnerability database
|
||||
- Runs in CI on every pull request and push for SDK, API, and UI
|
||||
- Fails the build on `HIGH`, `CRITICAL`, and `UNKNOWN` severity findings
|
||||
- Posts a per-lockfile report as a PR comment
|
||||
- Per-vulnerability ignores (with reason and expiry) live in `osv-scanner.toml` at the repo root
|
||||
- **Safety:** Scans Python dependencies against known vulnerability databases
|
||||
- Runs on every commit via pre-commit hooks
|
||||
- Integrated into CI/CD for SDK and API
|
||||
- Configured with selective ignores for tracked exceptions
|
||||
- **Trivy:** Multi-purpose scanner for containers and dependencies
|
||||
- Scans all container images (UI, API, SDK, MCP Server)
|
||||
- Checks for vulnerabilities in OS packages and application dependencies
|
||||
|
||||
@@ -1,168 +0,0 @@
|
||||
---
|
||||
title: "Visualize Multi-Cloud CIS Benchmarks With Power BI"
|
||||
description: "Ingest Prowler compliance CSV exports into a ready-made Microsoft Power BI template that surfaces CIS Benchmark posture across AWS, Azure, Google Cloud, and Kubernetes."
|
||||
---
|
||||
|
||||
The Multi-Cloud CIS Benchmarks Power BI template turns Prowler compliance CSV exports into an interactive dashboard. The template ingests scan results from Prowler CLI or Prowler Cloud and renders cross-provider CIS Benchmark coverage, profile-level breakdowns, regional drill-downs, and time-series trends. Center for Internet Security (CIS) Benchmarks are industry-standard configuration baselines maintained by CIS.
|
||||
|
||||
The template and its source files live in the Prowler repository under [`contrib/PowerBI/Multicloud CIS Benchmarks`](https://github.com/prowler-cloud/prowler/tree/master/contrib/PowerBI/Multicloud%20CIS%20Benchmarks).
|
||||
|
||||
<img src="/images/powerbi/report-cover.png" alt="Multi-Cloud CIS Benchmarks Power BI report cover showing aggregated compliance posture across providers" width="900" />
|
||||
|
||||
## Prerequisites
|
||||
|
||||
The setup requires the following components:
|
||||
|
||||
* **Microsoft Power BI Desktop:** free download from Microsoft.
|
||||
* **Prowler compliance CSV exports:** produced by Prowler CLI or downloaded from Prowler Cloud or Prowler App.
|
||||
* **Local directory:** holds the CSV exports that the template ingests at load time.
|
||||
|
||||
## Supported CIS Benchmarks
|
||||
|
||||
The template ships with predefined mappings for the following CIS Benchmark versions. Exports must match these versions for the dashboard to populate correctly:
|
||||
|
||||
| Compliance Framework | Version |
|
||||
| ---------------------------------------------- | -------- |
|
||||
| CIS Amazon Web Services Foundations Benchmark | v6.0 |
|
||||
| CIS Microsoft Azure Foundations Benchmark | v5.0 |
|
||||
| CIS Google Cloud Platform Foundation Benchmark | v4.0 |
|
||||
| CIS Kubernetes Benchmark | v1.12.0 |
|
||||
|
||||
<Warning>
|
||||
Other CIS Benchmark versions are not recognized by the template. Confirm the framework version before running the scan or downloading the export.
|
||||
</Warning>
|
||||
|
||||
## Setup
|
||||
|
||||
### Step 1: Install Microsoft Power BI Desktop
|
||||
|
||||
Download and install Microsoft Power BI Desktop from the official Microsoft site. The template is opened with this application.
|
||||
|
||||
### Step 2: Generate Compliance CSV Exports
|
||||
|
||||
Compliance CSV exports can be generated through Prowler CLI or downloaded from Prowler Cloud and Prowler App.
|
||||
|
||||
#### Option A: Prowler CLI
|
||||
|
||||
Run a scan with the `--compliance` flag pointing to the appropriate CIS framework, for example:
|
||||
|
||||
```sh
|
||||
prowler aws --compliance cis_6.0_aws
|
||||
prowler azure --compliance cis_5.0_azure
|
||||
prowler gcp --compliance cis_4.0_gcp
|
||||
prowler kubernetes --compliance cis_1.12_kubernetes
|
||||
```
|
||||
|
||||
The compliance CSV exports are written to `output/compliance/` by default.
|
||||
|
||||
#### Option B: Prowler Cloud or Prowler App
|
||||
|
||||
Open the Compliance section, select the desired CIS Benchmark, and download the CSV export.
|
||||
|
||||
<img src="/images/powerbi/download-compliance-scan.png" alt="Compliance section in Prowler Cloud showing the CSV download option for a CIS Benchmark scan" width="900" />
|
||||
|
||||
### Step 3: Create a Local Directory for the Exports
|
||||
|
||||
Place every CSV export in a single local directory. The template parses filenames to detect the provider, so filenames must keep the provider keyword (`aws`, `azure`, `gcp`, or `kubernetes`).
|
||||
|
||||
<Note>
|
||||
Time-series visualizations such as "Compliance Percent Over Time" require multiple scans from different dates in the same directory.
|
||||
</Note>
|
||||
|
||||
### Step 4: Open the Power BI Template
|
||||
|
||||
Download the template file [`Prowler Multicloud CIS Benchmarks.pbit`](https://github.com/prowler-cloud/prowler/raw/master/contrib/PowerBI/Multicloud%20CIS%20Benchmarks/Prowler%20Multicloud%20CIS%20Benchmarks.pbit) and open it. Power BI Desktop prompts for the full filepath to the directory created in step 3.
|
||||
|
||||
### Step 5: Provide the Directory Filepath
|
||||
|
||||
Enter the absolute filepath without quotation marks. The Windows "copy as path" feature wraps the path in quotation marks automatically; remove them before submitting.
|
||||
|
||||
### Step 6: Save the Report as a `.pbix` File
|
||||
|
||||
Once the filepath is submitted, the template ingests the CSV exports and renders the report. Save the populated report as a `.pbix` file for future use. Re-running the `.pbit` template generates a fresh report against an updated directory.
|
||||
|
||||
## Validation
|
||||
|
||||
To confirm the CSV exports were ingested correctly, open the "Configuration" tab inside the report.
|
||||
|
||||
<img src="/images/powerbi/validation.png" alt="Configuration tab in the Power BI report displaying loaded CIS Benchmarks, the Prowler CSV folder path, and the list of ingested exports" width="900" />
|
||||
|
||||
The "Configuration" tab exposes three tables:
|
||||
|
||||
* **Loaded CIS Benchmarks:** lists the benchmarks and versions supported by the template. This table is defined by the template itself and is not editable. All benchmarks remain listed regardless of which provider exports were supplied.
|
||||
* **Prowler CSV Folder:** displays the absolute path provided during template load.
|
||||
* **Loaded Prowler Exports:** lists every CSV file detected in the directory. A green checkmark identifies the file used as the latest assessment for each provider and benchmark combination.
|
||||
|
||||
## Report Sections
|
||||
|
||||
The report is organized into three navigable pages:
|
||||
|
||||
| Report Page | Purpose |
|
||||
| ----------- | ------------------------------------------------------------------------------------ |
|
||||
| Overview | Aggregates CIS Benchmark posture across AWS, Azure, Google Cloud, and Kubernetes. |
|
||||
| Benchmark | Focuses on a single CIS Benchmark with profile-level and regional filters. |
|
||||
| Requirement | Drill-through page that surfaces details for a single benchmark requirement. |
|
||||
|
||||
### Overview Page
|
||||
|
||||
The Overview page summarizes CIS Benchmark posture across every supported provider.
|
||||
|
||||
<img src="/images/powerbi/overview-page.png" alt="Overview page in the Power BI report aggregating CIS Benchmark posture across AWS, Azure, Google Cloud, and Kubernetes" width="900" />
|
||||
|
||||
The Overview page contains the following components:
|
||||
|
||||
| Component | Description |
|
||||
| ---------------------------------------- | ---------------------------------------------------------------------------- |
|
||||
| CIS Benchmark Overview | Table listing benchmark name, version, and overall compliance percentage. |
|
||||
| Provider by Requirement Status | Bar chart breaking down requirements by status and provider. |
|
||||
| Compliance Percent Heatmap | Heatmap of compliance percentage by benchmark and profile level. |
|
||||
| Profile Level by Requirement Status | Bar chart breaking down requirements by status and profile level. |
|
||||
| Compliance Percent Over Time by Provider | Line chart tracking overall compliance percentage over time by provider. |
|
||||
|
||||
### Benchmark Page
|
||||
|
||||
The Benchmark page focuses on a single CIS Benchmark. The benchmark, profile level, and region can be selected through dropdown filters.
|
||||
|
||||
<img src="/images/powerbi/benchmark-page.png" alt="Benchmark page in the Power BI report showing region heatmap, section breakdown, time-series trend, and the requirements table" width="900" />
|
||||
|
||||
The Benchmark page contains the following components:
|
||||
|
||||
| Component | Description |
|
||||
| ---------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| Compliance Percent Heatmap | Heatmap of compliance percentage by region and profile level. |
|
||||
| Benchmark Section by Requirement Status | Bar chart of requirements grouped by benchmark section and status. |
|
||||
| Compliance Percent Over Time by Region | Line chart tracking compliance percentage over time by region. |
|
||||
| Benchmark Requirements | Table listing requirement section, requirement number, requirement title, number of resources tested, status, and failing checks. |
|
||||
|
||||
### Requirement Page
|
||||
|
||||
The Requirement page is a drill-through view that exposes the full context of a single requirement. To populate the page, right-click a row in the "Benchmark Requirements" table on the Benchmark page and select "Drill through" > "Requirement".
|
||||
|
||||
<img src="/images/powerbi/requirement-page.png" alt="Requirement drill-through page in the Power BI report showing rationale, remediation, regional breakdown, and the resource-level check results" width="900" />
|
||||
|
||||
The Requirement page contains the following components:
|
||||
|
||||
| Component | Description |
|
||||
| ------------------------------------------ | -------------------------------------------------------------------------------------------- |
|
||||
| Title | Requirement title. |
|
||||
| Rationale | Rationale for the requirement. |
|
||||
| Remediation | Remediation guidance for the requirement. |
|
||||
| Region by Check Status | Bar chart of Prowler check results grouped by region and status. |
|
||||
| Resource Checks for Benchmark Requirements | Table listing resource ID, resource name, status, description, and the underlying Prowler check. |
|
||||
|
||||
## Walkthrough Video
|
||||
|
||||
A full walkthrough is available on YouTube:
|
||||
|
||||
[](https://www.youtube.com/watch?v=lfKFkTqBxjU)
|
||||
|
||||
## Related Resources
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card title="Compliance Frameworks" icon="shield-check" href="/user-guide/compliance/tutorials/compliance">
|
||||
Review the Compliance workflow across Prowler Cloud, Prowler App, and Prowler CLI.
|
||||
</Card>
|
||||
<Card title="Prowler Dashboard" icon="chart-line" href="/user-guide/cli/tutorials/dashboard">
|
||||
Explore the built-in local dashboard for Prowler CSV exports.
|
||||
</Card>
|
||||
</CardGroup>
|
||||
@@ -18,7 +18,7 @@ Prowler requests the following read-only OAuth 2.0 scopes:
|
||||
| `https://www.googleapis.com/auth/admin.directory.domain.readonly` | Read access to domain information |
|
||||
| `https://www.googleapis.com/auth/admin.directory.customer.readonly` | Read access to customer information (Customer ID) |
|
||||
| `https://www.googleapis.com/auth/admin.directory.orgunit.readonly` | Read access to organizational unit hierarchy (identifies the root OU for policy filtering) |
|
||||
| `https://www.googleapis.com/auth/cloud-identity.policies.readonly` | Read access to domain-level application policies (required for Calendar, Gmail, Chat, and Drive service checks) |
|
||||
| `https://www.googleapis.com/auth/cloud-identity.policies.readonly` | Read access to domain-level application policies (required for Calendar service checks) |
|
||||
| `https://www.googleapis.com/auth/admin.directory.rolemanagement.readonly` | Read access to admin roles and role assignments |
|
||||
|
||||
<Warning>
|
||||
@@ -40,7 +40,7 @@ In the [Google Cloud Console](https://console.cloud.google.com), select the targ
|
||||
| API | Required For |
|
||||
|-----|--------------|
|
||||
| **Admin SDK API** | Directory service checks (users, roles, domains) |
|
||||
| **Cloud Identity API** | Calendar, Gmail, Chat, and Drive service checks (domain-level application policies) |
|
||||
| **Cloud Identity API** | Calendar service checks (domain-level sharing and invitation policies) |
|
||||
|
||||
For each API:
|
||||
|
||||
@@ -49,7 +49,7 @@ For each API:
|
||||
3. Click **Enable**
|
||||
|
||||
<Note>
|
||||
Both APIs must be enabled in the same GCP project that hosts the Service Account. Calendar, Gmail, Chat, and Drive checks will return no findings if the Cloud Identity API is not enabled.
|
||||
Both APIs must be enabled in the same GCP project that hosts the Service Account. Calendar checks will return no findings if the Cloud Identity API is not enabled.
|
||||
</Note>
|
||||
|
||||
### Step 3: Create a Service Account
|
||||
@@ -176,9 +176,9 @@ If Prowler connects but returns empty results or permission errors for specific
|
||||
- Verify all scopes are authorized in the Admin Console
|
||||
- Ensure the delegated user is an active super administrator
|
||||
|
||||
### Policy API Checks Return No Findings
|
||||
### Calendar Checks Return No Findings
|
||||
|
||||
If the Directory checks run successfully but the Calendar, Gmail, Chat, or Drive checks return no findings, the Cloud Identity Policy API is not reachable for this Service Account. Verify:
|
||||
If the Directory checks run successfully but the Calendar checks (e.g., `calendar_external_sharing_primary_calendar`) return no findings, the Cloud Identity Policy API is not reachable for this Service Account. Verify:
|
||||
|
||||
- The **Cloud Identity API** is enabled in the GCP project hosting the Service Account (Step 2)
|
||||
- The scope `https://www.googleapis.com/auth/cloud-identity.policies.readonly` is included in the Domain-Wide Delegation OAuth scopes list in the Admin Console (Step 5)
|
||||
|
||||
@@ -0,0 +1,115 @@
|
||||
# Prowler Multicloud CIS Benchmarks PowerBI Template
|
||||

|
||||
|
||||
## Getting Started
|
||||
|
||||
1. Install Microsoft PowerBI Desktop
|
||||
|
||||
This report requires the Microsoft PowerBI Desktop software which can be downloaded for free from Microsoft.
|
||||
2. Run compliance scans in Prowler
|
||||
|
||||
The report uses compliance csv outputs from Prowler. Compliance scans be run using either [Prowler CLI](https://docs.prowler.com/projects/prowler-open-source/en/latest/#prowler-cli) or [Prowler Cloud/App](https://cloud.prowler.com/sign-in)
|
||||
1. Prowler CLI -> Run a Prowler scan using the --compliance option
|
||||
2. Prowler Cloud/App -> Navigate to the compliance section to download csv outputs
|
||||

|
||||
|
||||
|
||||
The template supports the following CIS Benchmarks only:
|
||||
|
||||
| Compliance Framework | Version |
|
||||
| ---------------------------------------------- | ------- |
|
||||
| CIS Amazon Web Services Foundations Benchmark | v4.0.1 |
|
||||
| CIS Google Cloud Platform Foundation Benchmark | v3.0.0 |
|
||||
| CIS Microsoft Azure Foundations Benchmark | v3.0.0 |
|
||||
| CIS Kubernetes Benchmark | v1.10.0 |
|
||||
|
||||
Ensure you run or download the correct benchmark versions.
|
||||
3. Create a local directory to store Prowler csvoutputs
|
||||
|
||||
Once downloaded, place your csv outputs in a directory on your local machine. If you rename the files, they must maintain the provider in the filename.
|
||||
|
||||
To use time-series capabilities such as "compliance percent over time" you'll need scans from multiple dates.
|
||||
4. Download and run the PowerBI template file (.pbit)
|
||||
|
||||
Running the .pbit file will open PowerBI Desktop and prompt you for the full filepath to the local directory
|
||||
5. Enter the full filepath to the directory created in step 3
|
||||
|
||||
Provide the full filepath from the root directory.
|
||||
|
||||
Ensure that the filepath is not wrapped in quotation marks (""). If you use Window's "copy as path" feature, it will automatically include quotation marks.
|
||||
6. Save the report as a PowerBI file (.pbix)
|
||||
|
||||
Once the filepath is entered, the template will automatically ingest and populate the report. You can then save this file as a new PowerBI report. If you'd like to generate another report, simply re-run the template file (.pbit) from step 4.
|
||||
|
||||
## Validation
|
||||
|
||||
After setting up your dashboard, you may want to validate the Prowler csv files were ingested correctly. To do this, navigate to the "Configuration" tab.
|
||||
|
||||
The "loaded CIS Benchmarks" table shows the supported benchmarks and versions. This is defined by the template file and not editable by the user. All benchmarks will be loaded regardless of which providers you provided csv outputs for.
|
||||
|
||||
The "Prowler CSV Folder" shows the path to the local directory you provided.
|
||||
|
||||
The "Loaded Prowler Exports" table shows the ingested csv files from the local directory. It will mark files that are treated as the latest assessment with a green checkmark.
|
||||
|
||||

|
||||
|
||||
## Report Sections
|
||||
|
||||
The PowerBI Report is broken into three main report pages
|
||||
|
||||
| Report Page | Description |
|
||||
| ----------- | ----------------------------------------------------------------------------------- |
|
||||
| Overview | Provides general CIS Benchmark overview across both AWS, Azure, GCP, and Kubernetes |
|
||||
| Benchmark | Provides overview of a single CIS Benchmark |
|
||||
| Requirement | Drill-through page to view details of a single requirement |
|
||||
|
||||
|
||||
### Overview Page
|
||||
|
||||
The overview page is a general CIS Benchmark overview across both AWS, Azure, GCP, and Kubernetes.
|
||||
|
||||

|
||||
|
||||
The page has the following components:
|
||||
|
||||
| Component | Description |
|
||||
| ---------------------------------------- | ------------------------------------------------------------------------ |
|
||||
| CIS Benchmark Overview | Table with benchmark name, Version, and overall compliance percentage |
|
||||
| Provider by Requirement Status | Bar chart showing benchmark requirements by status by provider |
|
||||
| Compliance Percent Heatmap | Heatmap showing compliance percent by benchmark and profile level |
|
||||
| Profile level by Requirement Status | Bar chart showing requirements by status and profile level |
|
||||
| Compliance Percent Over Time by Provider | Line chart showing overall compliance perecentage over time by provider. |
|
||||
|
||||
### Benchmark Page
|
||||
|
||||
The benchmark page provides an overview of a single CIS Benchmark. You can select the benchmark from the dropdown as well as scope down to specific profile levels or regions.
|
||||
|
||||

|
||||
|
||||
The page has the following components:
|
||||
|
||||
| Component | Description |
|
||||
| --------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| Compliance Percent Heatmap | Heatmap showing compliance percent by region and profile level |
|
||||
| Benchmark Section by Requirement Status | Bar chart showing benchmark requirements by bennchmark section and status |
|
||||
| Compliance percent Over Time by Region | Line chart showing overall compliance percentage over time by region |
|
||||
| Benchmark Requirements | Table showing requirement section, requirement number, reuqirement title, number of resources tested, status, and number of failing checks |
|
||||
|
||||
### Requirement Page
|
||||
|
||||
The requirement page is a drill-through page to view details of a single requirement. To populate the requirement page right click on a requiement from the "Benchmark Requirements" table on the benchmark page and select "Drill through" -> "Requirement".
|
||||
|
||||

|
||||
|
||||
The requirement page has the following components:
|
||||
|
||||
| Component | Description |
|
||||
| ------------------------------------------ | --------------------------------------------------------------------------------- |
|
||||
| Title | Title of the requirement |
|
||||
| Rationale | Rationale of the requirement |
|
||||
| Remediation | Remedation guidance for the requirement |
|
||||
| Region by Check Status | Bar chart showing Prowler checks by region and status |
|
||||
| Resource Checks for Benchmark Requirements | Table showing Resource ID, Resource Name, Status, Description, and Prowler Checkl |
|
||||
|
||||
## Walkthrough Video
|
||||
[](https://www.youtube.com/watch?v=lfKFkTqBxjU)
|
||||
@@ -6,13 +6,11 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
- 6 Chat file sharing, external messaging, spaces, and apps access checks for Google Workspace provider using the Cloud Identity Policy API [(#11126)](https://github.com/prowler-cloud/prowler/pull/11126)
|
||||
- `entra_service_principal_no_secrets_for_permanent_tier0_roles` check for M365 provider [(#10788)](https://github.com/prowler-cloud/prowler/pull/10788)
|
||||
- `iam_user_access_not_stale_to_sagemaker` check for AWS provider with configurable `max_unused_sagemaker_access_days` (default 90) [(#11000)](https://github.com/prowler-cloud/prowler/pull/11000)
|
||||
- `cloudtrail_bedrock_logging_enabled` check for AWS provider [(#10858)](https://github.com/prowler-cloud/prowler/pull/10858)
|
||||
- Okta provider with OAuth 2.0 authentication and `signon_global_session_idle_timeout_15min` check [(#11079)](https://github.com/prowler-cloud/prowler/pull/11079)
|
||||
- `sagemaker_domain_sso_configured` check for AWS provider [(#11094)](https://github.com/prowler-cloud/prowler/pull/11094)
|
||||
- DORA (Digital Operational Resilience Act, Regulation (EU) 2022/2554) universal compliance framework with AWS provider coverage across the five DORA pillars [(#11131)](https://github.com/prowler-cloud/prowler/pull/11131)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
@@ -33,7 +31,6 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
### 🐞 Fixed
|
||||
|
||||
- `entra_users_mfa_capable` and `entra_break_glass_account_fido2_security_key_registered` report a preventive FAIL per affected user (with the missing permission named) when the M365 service principal lacks `AuditLog.Read.All`, instead of mass false positives [(#10907)](https://github.com/prowler-cloud/prowler/pull/10907)
|
||||
- Update duplicated GCP CIS requirements IDs [(#11180)](https://github.com/prowler-cloud/prowler/pull/11180)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -1,597 +0,0 @@
|
||||
{
|
||||
"framework": "DORA",
|
||||
"name": "Digital Operational Resilience Act (Regulation (EU) 2022/2554)",
|
||||
"version": "2022/2554",
|
||||
"description": "The Digital Operational Resilience Act (DORA) is a European Union regulation (Regulation (EU) 2022/2554) that sets a uniform framework for the digital operational resilience of the EU financial sector. Mandatory since 17 January 2025, it applies to financial entities (banks, insurers, investment firms, payment institutions, etc.) and to ICT third-party service providers. DORA is structured around five pillars: ICT risk management, ICT-related incident reporting, digital operational resilience testing, ICT third-party risk management, and information sharing. This Prowler mapping covers the technical controls auditable from cloud configuration; the organisational, contractual and supervisory obligations defined in DORA must be addressed outside of Prowler.",
|
||||
"icon": "dora",
|
||||
"attributes_metadata": [
|
||||
{
|
||||
"key": "Pillar",
|
||||
"label": "Pillar",
|
||||
"type": "str",
|
||||
"required": true,
|
||||
"enum": [
|
||||
"ICT Risk Management",
|
||||
"ICT-Related Incident Reporting",
|
||||
"Digital Operational Resilience Testing",
|
||||
"ICT Third-Party Risk Management",
|
||||
"Information Sharing"
|
||||
],
|
||||
"output_formats": {
|
||||
"csv": true,
|
||||
"ocsf": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"key": "Article",
|
||||
"label": "Article",
|
||||
"type": "str",
|
||||
"required": true,
|
||||
"output_formats": {
|
||||
"csv": true,
|
||||
"ocsf": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"key": "ArticleTitle",
|
||||
"label": "Article Title",
|
||||
"type": "str",
|
||||
"required": true,
|
||||
"output_formats": {
|
||||
"csv": true,
|
||||
"ocsf": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"outputs": {
|
||||
"table_config": {
|
||||
"group_by": "Pillar"
|
||||
},
|
||||
"pdf_config": {
|
||||
"language": "en",
|
||||
"primary_color": "#003399",
|
||||
"secondary_color": "#0055A5",
|
||||
"bg_color": "#F0F4FA",
|
||||
"group_by_field": "Pillar",
|
||||
"sections": [
|
||||
"ICT Risk Management",
|
||||
"ICT-Related Incident Reporting",
|
||||
"Digital Operational Resilience Testing",
|
||||
"ICT Third-Party Risk Management",
|
||||
"Information Sharing"
|
||||
],
|
||||
"section_short_names": {
|
||||
"ICT Risk Management": "ICT Risk Mgmt",
|
||||
"ICT-Related Incident Reporting": "Incident Reporting",
|
||||
"Digital Operational Resilience Testing": "Resilience Testing",
|
||||
"ICT Third-Party Risk Management": "Third-Party Risk",
|
||||
"Information Sharing": "Info Sharing"
|
||||
},
|
||||
"charts": [
|
||||
{
|
||||
"id": "pillar_compliance",
|
||||
"type": "horizontal_bar",
|
||||
"group_by": "Pillar",
|
||||
"title": "Compliance Score by DORA Pillar",
|
||||
"y_label": "Pillar",
|
||||
"x_label": "Compliance %",
|
||||
"value_source": "compliance_percent",
|
||||
"color_mode": "by_value"
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"only_failed": true,
|
||||
"include_manual": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"requirements": [
|
||||
{
|
||||
"id": "DORA-Art5",
|
||||
"name": "Governance and organisation",
|
||||
"description": "Financial entities shall have a sound, comprehensive and well-documented ICT internal governance and control framework. Senior management is accountable for ICT risk and shall enforce strong identity, authentication and least-privilege policies for privileged identities, including the root account.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT Risk Management",
|
||||
"Article": "Article 5",
|
||||
"ArticleTitle": "Governance and organisation"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"iam_avoid_root_usage",
|
||||
"iam_no_root_access_key",
|
||||
"iam_root_mfa_enabled",
|
||||
"iam_root_hardware_mfa_enabled",
|
||||
"iam_root_credentials_management_enabled",
|
||||
"iam_password_policy_minimum_length_14",
|
||||
"iam_password_policy_lowercase",
|
||||
"iam_password_policy_uppercase",
|
||||
"iam_password_policy_number",
|
||||
"iam_password_policy_symbol",
|
||||
"iam_password_policy_reuse_24",
|
||||
"iam_password_policy_expires_passwords_within_90_days_or_less",
|
||||
"iam_securityaudit_role_created",
|
||||
"iam_support_role_created",
|
||||
"organizations_account_part_of_organizations",
|
||||
"iam_user_mfa_enabled_console_access",
|
||||
"iam_user_hardware_mfa_enabled"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art6",
|
||||
"name": "ICT risk management framework",
|
||||
"description": "Financial entities shall have an ICT risk management framework that is sound, comprehensive and well-documented, enabling them to address ICT risk quickly, efficiently and comprehensively and to ensure a high level of digital operational resilience. This includes continuous configuration recording, security findings aggregation and an enterprise-wide visibility plane.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT Risk Management",
|
||||
"Article": "Article 6",
|
||||
"ArticleTitle": "ICT risk management framework"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"config_recorder_all_regions_enabled",
|
||||
"config_recorder_using_aws_service_role",
|
||||
"securityhub_enabled",
|
||||
"accessanalyzer_enabled",
|
||||
"accessanalyzer_enabled_without_findings",
|
||||
"organizations_delegated_administrators",
|
||||
"guardduty_centrally_managed",
|
||||
"guardduty_delegated_admin_enabled_all_regions"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art7",
|
||||
"name": "ICT systems, protocols and tools",
|
||||
"description": "Financial entities shall use and maintain updated ICT systems, protocols and tools that are appropriate to the magnitude of operations supporting ICT functions, technologically resilient, and adequately equipped to securely process data. Cryptographic primitives, certificate hygiene and network segmentation are core to this requirement.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT Risk Management",
|
||||
"Article": "Article 7",
|
||||
"ArticleTitle": "ICT systems, protocols and tools"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"acm_certificates_with_secure_key_algorithms",
|
||||
"acm_certificates_transparency_logs_enabled",
|
||||
"acm_certificates_expiration_check",
|
||||
"ec2_ebs_default_encryption",
|
||||
"kms_cmk_rotation_enabled",
|
||||
"s3_bucket_secure_transport_policy",
|
||||
"s3_bucket_default_encryption",
|
||||
"s3_bucket_kms_encryption",
|
||||
"vpc_subnet_separate_private_public",
|
||||
"vpc_subnet_no_public_ip_by_default",
|
||||
"elb_insecure_ssl_ciphers",
|
||||
"elbv2_insecure_ssl_ciphers",
|
||||
"elb_ssl_listeners",
|
||||
"elbv2_ssl_listeners",
|
||||
"cloudfront_distributions_using_deprecated_ssl_protocols",
|
||||
"cloudfront_distributions_https_enabled",
|
||||
"rds_instance_transport_encrypted"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art8",
|
||||
"name": "Identification",
|
||||
"description": "Financial entities shall identify, classify and adequately document all ICT supported business functions, roles and responsibilities, the information assets and ICT assets supporting them, and their interdependencies. They shall on a continuous basis identify all sources of ICT risk, in particular the risk exposure to and from other financial entities.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT Risk Management",
|
||||
"Article": "Article 8",
|
||||
"ArticleTitle": "Identification"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"accessanalyzer_enabled",
|
||||
"accessanalyzer_enabled_without_findings",
|
||||
"macie_is_enabled",
|
||||
"macie_automated_sensitive_data_discovery_enabled",
|
||||
"ec2_securitygroup_not_used",
|
||||
"ec2_elastic_ip_unassigned",
|
||||
"ec2_networkacl_unused",
|
||||
"secretsmanager_secret_unused"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art9",
|
||||
"name": "Protection and prevention",
|
||||
"description": "Financial entities shall continuously monitor and control the security and functioning of ICT systems and tools and minimise the impact of ICT risk by deploying appropriate ICT security tools, policies and procedures. Encryption at rest and in transit, blocking of public exposure, network access controls, secret management and instance hardening are central to this article.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT Risk Management",
|
||||
"Article": "Article 9",
|
||||
"ArticleTitle": "Protection and prevention"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"kms_key_not_publicly_accessible",
|
||||
"ec2_ebs_volume_encryption",
|
||||
"ec2_ebs_snapshots_encrypted",
|
||||
"ec2_ebs_public_snapshot",
|
||||
"ec2_ebs_snapshot_account_block_public_access",
|
||||
"s3_account_level_public_access_blocks",
|
||||
"s3_bucket_level_public_access_block",
|
||||
"s3_bucket_public_access",
|
||||
"s3_bucket_policy_public_write_access",
|
||||
"s3_bucket_public_write_acl",
|
||||
"s3_bucket_public_list_acl",
|
||||
"s3_bucket_acl_prohibited",
|
||||
"s3_access_point_public_access_block",
|
||||
"ec2_securitygroup_default_restrict_traffic",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"rds_instance_storage_encrypted",
|
||||
"rds_cluster_storage_encrypted",
|
||||
"rds_instance_no_public_access",
|
||||
"rds_snapshots_public_access",
|
||||
"secretsmanager_not_publicly_accessible",
|
||||
"secretsmanager_has_restrictive_resource_policy",
|
||||
"secretsmanager_automatic_rotation_enabled",
|
||||
"dynamodb_tables_kms_cmk_encryption_enabled",
|
||||
"sns_topics_kms_encryption_at_rest_enabled",
|
||||
"sns_topics_not_publicly_accessible",
|
||||
"ec2_instance_imdsv2_enabled",
|
||||
"ec2_instance_account_imdsv2_enabled",
|
||||
"efs_encryption_at_rest_enabled",
|
||||
"awslambda_function_not_publicly_accessible"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art10",
|
||||
"name": "Detection",
|
||||
"description": "Financial entities shall have in place mechanisms to promptly detect anomalous activities, including ICT network performance issues and ICT-related incidents, and to identify potential single points of failure. Threat detection across compute, identity, storage and the API control plane is required for timely detection.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT Risk Management",
|
||||
"Article": "Article 10",
|
||||
"ArticleTitle": "Detection"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"guardduty_is_enabled",
|
||||
"guardduty_no_high_severity_findings",
|
||||
"guardduty_ec2_malware_protection_enabled",
|
||||
"guardduty_lambda_protection_enabled",
|
||||
"guardduty_rds_protection_enabled",
|
||||
"guardduty_s3_protection_enabled",
|
||||
"guardduty_eks_audit_log_enabled",
|
||||
"guardduty_eks_runtime_monitoring_enabled",
|
||||
"securityhub_enabled",
|
||||
"cloudtrail_threat_detection_enumeration",
|
||||
"cloudtrail_threat_detection_llm_jacking",
|
||||
"cloudtrail_threat_detection_privilege_escalation",
|
||||
"cloudtrail_insights_exist",
|
||||
"inspector2_is_enabled",
|
||||
"inspector2_active_findings_exist",
|
||||
"ec2_elastic_ip_shodan"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art11",
|
||||
"name": "Response and recovery",
|
||||
"description": "Financial entities shall put in place a comprehensive ICT business continuity policy, including ICT response and recovery plans, that ensures the continuity of ICT-supported critical or important functions. Operational alarming, automated event routing and tested recovery actions are essential.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT Risk Management",
|
||||
"Article": "Article 11",
|
||||
"ArticleTitle": "Response and recovery"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"cloudwatch_alarm_actions_enabled",
|
||||
"cloudwatch_alarm_actions_alarm_state_configured",
|
||||
"eventbridge_global_endpoint_event_replication_enabled",
|
||||
"sns_subscription_not_using_http_endpoints",
|
||||
"backup_plans_exist",
|
||||
"backup_vaults_exist",
|
||||
"rds_instance_critical_event_subscription",
|
||||
"rds_cluster_critical_event_subscription"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art12",
|
||||
"name": "Backup policies and procedures, restoration and recovery procedures and methods",
|
||||
"description": "Financial entities shall develop and document backup policies and procedures specifying the scope of data subject to backup and the minimum frequency of the backup, as well as restoration and recovery procedures and methods. Backups must be encrypted, retained, and resources must be designed for recoverability across availability zones and regions.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT Risk Management",
|
||||
"Article": "Article 12",
|
||||
"ArticleTitle": "Backup policies and procedures, restoration and recovery procedures and methods"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"backup_plans_exist",
|
||||
"backup_vaults_exist",
|
||||
"backup_vaults_encrypted",
|
||||
"backup_recovery_point_encrypted",
|
||||
"backup_reportplans_exist",
|
||||
"rds_instance_backup_enabled",
|
||||
"rds_cluster_protected_by_backup_plan",
|
||||
"rds_instance_protected_by_backup_plan",
|
||||
"rds_instance_multi_az",
|
||||
"rds_cluster_multi_az",
|
||||
"rds_cluster_backtrack_enabled",
|
||||
"rds_instance_deletion_protection",
|
||||
"rds_cluster_deletion_protection",
|
||||
"rds_snapshots_encrypted",
|
||||
"s3_bucket_object_versioning",
|
||||
"s3_bucket_object_lock",
|
||||
"s3_bucket_cross_region_replication",
|
||||
"s3_bucket_no_mfa_delete",
|
||||
"dynamodb_tables_pitr_enabled",
|
||||
"dynamodb_table_deletion_protection_enabled",
|
||||
"ec2_ebs_volume_protected_by_backup_plan",
|
||||
"ec2_ebs_volume_snapshots_exists",
|
||||
"autoscaling_group_multiple_az",
|
||||
"elb_is_in_multiple_az",
|
||||
"elbv2_is_in_multiple_az",
|
||||
"cloudfront_distributions_multiple_origin_failover_configured",
|
||||
"dynamodb_table_protected_by_backup_plan"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art13",
|
||||
"name": "Learning and evolving",
|
||||
"description": "Financial entities shall have in place capabilities and staff to gather information on vulnerabilities and cyber threats, perform post ICT-related incident reviews, and continuously feed lessons learnt back into the ICT risk assessment process. Findings aggregation and continuous insights drive this cycle.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT Risk Management",
|
||||
"Article": "Article 13",
|
||||
"ArticleTitle": "Learning and evolving"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"securityhub_enabled",
|
||||
"guardduty_no_high_severity_findings",
|
||||
"inspector2_active_findings_exist",
|
||||
"accessanalyzer_enabled_without_findings",
|
||||
"cloudtrail_insights_exist"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art14",
|
||||
"name": "Communication",
|
||||
"description": "As part of the ICT risk management framework, financial entities shall have in place crisis communication plans enabling a responsible disclosure of ICT-related incidents or major vulnerabilities to clients, counterparts and the public. Reliable, encrypted and access-controlled notification channels are required.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT Risk Management",
|
||||
"Article": "Article 14",
|
||||
"ArticleTitle": "Communication"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"sns_topics_kms_encryption_at_rest_enabled",
|
||||
"sns_topics_not_publicly_accessible",
|
||||
"sns_subscription_not_using_http_endpoints",
|
||||
"eventbridge_bus_exposed",
|
||||
"eventbridge_bus_cross_account_access",
|
||||
"eventbridge_schema_registry_cross_account_access",
|
||||
"cloudwatch_alarm_actions_enabled",
|
||||
"cloudwatch_alarm_actions_alarm_state_configured"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art17",
|
||||
"name": "ICT-related incident management process",
|
||||
"description": "Financial entities shall define, establish and implement an ICT-related incident management process to detect, manage and notify ICT-related incidents. Comprehensive trail logging, log integrity protection, retention and centralisation of ICT events are foundational requirements.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT-Related Incident Reporting",
|
||||
"Article": "Article 17",
|
||||
"ArticleTitle": "ICT-related incident management process"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"cloudtrail_multi_region_enabled",
|
||||
"cloudtrail_multi_region_enabled_logging_management_events",
|
||||
"cloudtrail_kms_encryption_enabled",
|
||||
"cloudtrail_log_file_validation_enabled",
|
||||
"cloudtrail_cloudwatch_logging_enabled",
|
||||
"cloudtrail_logs_s3_bucket_access_logging_enabled",
|
||||
"cloudtrail_logs_s3_bucket_is_not_publicly_accessible",
|
||||
"cloudtrail_s3_dataevents_read_enabled",
|
||||
"cloudtrail_s3_dataevents_write_enabled",
|
||||
"cloudtrail_bucket_requires_mfa_delete",
|
||||
"cloudtrail_bedrock_logging_enabled",
|
||||
"cloudwatch_log_group_retention_policy_specific_days_enabled",
|
||||
"cloudwatch_log_group_kms_encryption_enabled",
|
||||
"cloudwatch_log_group_no_secrets_in_logs",
|
||||
"cloudwatch_log_group_not_publicly_accessible",
|
||||
"vpc_flow_logs_enabled",
|
||||
"ec2_client_vpn_endpoint_connection_logging_enabled",
|
||||
"route53_public_hosted_zones_cloudwatch_logging_enabled",
|
||||
"elb_logging_enabled",
|
||||
"elbv2_logging_enabled",
|
||||
"cloudfront_distributions_logging_enabled",
|
||||
"s3_bucket_server_access_logging_enabled"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art18",
|
||||
"name": "Classification of ICT-related incidents and cyber threats",
|
||||
"description": "Financial entities shall classify ICT-related incidents and shall determine their impact based on criteria such as the number of clients affected, duration, geographical spread, data losses, and criticality of the services affected. Severity-aware threat detection across the estate underpins this classification.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT-Related Incident Reporting",
|
||||
"Article": "Article 18",
|
||||
"ArticleTitle": "Classification of ICT-related incidents and cyber threats"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"guardduty_no_high_severity_findings",
|
||||
"guardduty_centrally_managed",
|
||||
"guardduty_delegated_admin_enabled_all_regions",
|
||||
"securityhub_enabled",
|
||||
"inspector2_active_findings_exist",
|
||||
"accessanalyzer_enabled_without_findings",
|
||||
"cloudtrail_threat_detection_enumeration",
|
||||
"cloudtrail_threat_detection_llm_jacking",
|
||||
"cloudtrail_threat_detection_privilege_escalation"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art19",
|
||||
"name": "Reporting of major ICT-related incidents and voluntary notification of significant cyber threats",
|
||||
"description": "Financial entities shall report major ICT-related incidents to the relevant competent authority and may, on a voluntary basis, notify significant cyber threats. Detective metric filters, change-tracking alarms and reliable notification topics are needed to surface and route reportable events.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT-Related Incident Reporting",
|
||||
"Article": "Article 19",
|
||||
"ArticleTitle": "Reporting of major ICT-related incidents and voluntary notification of significant cyber threats"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"cloudwatch_log_metric_filter_authentication_failures",
|
||||
"cloudwatch_log_metric_filter_unauthorized_api_calls",
|
||||
"cloudwatch_log_metric_filter_root_usage",
|
||||
"cloudwatch_log_metric_filter_sign_in_without_mfa",
|
||||
"cloudwatch_log_metric_filter_disable_or_scheduled_deletion_of_kms_cmk",
|
||||
"cloudwatch_log_metric_filter_for_s3_bucket_policy_changes",
|
||||
"cloudwatch_log_metric_filter_policy_changes",
|
||||
"cloudwatch_log_metric_filter_security_group_changes",
|
||||
"cloudwatch_log_metric_filter_aws_organizations_changes",
|
||||
"cloudwatch_log_metric_filter_and_alarm_for_aws_config_configuration_changes_enabled",
|
||||
"cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled",
|
||||
"cloudwatch_changes_to_network_acls_alarm_configured",
|
||||
"cloudwatch_changes_to_network_gateways_alarm_configured",
|
||||
"cloudwatch_changes_to_network_route_tables_alarm_configured",
|
||||
"cloudwatch_changes_to_vpcs_alarm_configured",
|
||||
"sns_subscription_not_using_http_endpoints"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art24",
|
||||
"name": "General requirements for the performance of digital operational resilience testing",
|
||||
"description": "Financial entities shall establish, maintain and review a sound and comprehensive digital operational resilience testing programme, as an integral part of the ICT risk management framework. Continuous vulnerability discovery, configuration assessment and instance manageability are foundational.",
|
||||
"attributes": {
|
||||
"Pillar": "Digital Operational Resilience Testing",
|
||||
"Article": "Article 24",
|
||||
"ArticleTitle": "General requirements for the performance of digital operational resilience testing"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"inspector2_is_enabled",
|
||||
"inspector2_active_findings_exist",
|
||||
"securityhub_enabled",
|
||||
"ec2_instance_managed_by_ssm",
|
||||
"ec2_instance_with_outdated_ami",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art25",
|
||||
"name": "Testing of ICT tools and systems",
|
||||
"description": "Financial entities shall ensure that tests are undertaken on ICT tools and systems, on critical ICT systems supporting all critical or important functions, at least yearly. Vulnerability assessments, deprecated component detection and certificate hygiene must be tracked.",
|
||||
"attributes": {
|
||||
"Pillar": "Digital Operational Resilience Testing",
|
||||
"Article": "Article 25",
|
||||
"ArticleTitle": "Testing of ICT tools and systems"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"inspector2_is_enabled",
|
||||
"inspector2_active_findings_exist",
|
||||
"guardduty_is_enabled",
|
||||
"guardduty_no_high_severity_findings",
|
||||
"config_recorder_all_regions_enabled",
|
||||
"ec2_instance_with_outdated_ami",
|
||||
"ec2_instance_managed_by_ssm",
|
||||
"ec2_instance_paravirtual_type",
|
||||
"rds_instance_deprecated_engine_version",
|
||||
"acm_certificates_expiration_check",
|
||||
"rds_instance_certificate_expiration",
|
||||
"iam_no_expired_server_certificates_stored",
|
||||
"ssm_managed_compliant_patching"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art28",
|
||||
"name": "General principles (ICT third-party risk)",
|
||||
"description": "Financial entities shall manage ICT third-party risk as an integral component of ICT risk within their ICT risk management framework. Cross-account access, trust boundaries, organization-level controls and dependency visibility are critical to monitor third-party exposure on AWS.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT Third-Party Risk Management",
|
||||
"Article": "Article 28",
|
||||
"ArticleTitle": "General principles (ICT third-party risk)"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"iam_role_cross_service_confused_deputy_prevention",
|
||||
"iam_role_cross_account_readonlyaccess_policy",
|
||||
"iam_no_custom_policy_permissive_role_assumption",
|
||||
"accessanalyzer_enabled",
|
||||
"accessanalyzer_enabled_without_findings",
|
||||
"s3_bucket_cross_account_access",
|
||||
"dynamodb_table_cross_account_access",
|
||||
"eventbridge_bus_cross_account_access",
|
||||
"eventbridge_schema_registry_cross_account_access",
|
||||
"cloudwatch_cross_account_sharing_disabled",
|
||||
"organizations_delegated_administrators",
|
||||
"organizations_account_part_of_organizations",
|
||||
"organizations_scp_check_deny_regions",
|
||||
"vpc_endpoint_connections_trust_boundaries",
|
||||
"vpc_endpoint_services_allowed_principals_trust_boundaries",
|
||||
"vpc_peering_routing_tables_with_least_privilege",
|
||||
"awslambda_function_using_cross_account_layers"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art30",
|
||||
"name": "Key contractual provisions",
|
||||
"description": "Contractual arrangements with ICT third-party service providers shall be set out in writing and include, at minimum, agreed service levels and clear allocation of rights and obligations. Privilege boundaries, least-privilege policies and absence of administrative wildcards are the technical guardrails that enforce these contractual constraints inside AWS.",
|
||||
"attributes": {
|
||||
"Pillar": "ICT Third-Party Risk Management",
|
||||
"Article": "Article 30",
|
||||
"ArticleTitle": "Key contractual provisions"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"iam_aws_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_attached_policy_no_administrative_privileges",
|
||||
"iam_customer_unattached_policy_no_administrative_privileges",
|
||||
"iam_inline_policy_no_administrative_privileges",
|
||||
"iam_inline_policy_allows_privilege_escalation",
|
||||
"iam_policy_allows_privilege_escalation",
|
||||
"iam_inline_policy_no_full_access_to_cloudtrail",
|
||||
"iam_inline_policy_no_full_access_to_kms",
|
||||
"iam_policy_no_full_access_to_cloudtrail",
|
||||
"iam_policy_no_full_access_to_kms",
|
||||
"iam_role_administratoraccess_policy",
|
||||
"iam_user_administrator_access_policy",
|
||||
"iam_group_administrator_access_policy",
|
||||
"iam_administrator_access_with_mfa",
|
||||
"iam_policy_attached_only_to_group_or_roles",
|
||||
"accessanalyzer_enabled"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "DORA-Art45",
|
||||
"name": "Information-sharing arrangements on cyber threat information and intelligence",
|
||||
"description": "Financial entities may exchange amongst themselves cyber threat information and intelligence, including indicators of compromise, tactics, techniques and procedures, cyber security alerts and configuration tools. Centralised threat detection, sensitive data discovery and trail-based intelligence enable participation in such information-sharing arrangements.",
|
||||
"attributes": {
|
||||
"Pillar": "Information Sharing",
|
||||
"Article": "Article 45",
|
||||
"ArticleTitle": "Information-sharing arrangements on cyber threat information and intelligence"
|
||||
},
|
||||
"checks": {
|
||||
"aws": [
|
||||
"guardduty_is_enabled",
|
||||
"guardduty_centrally_managed",
|
||||
"securityhub_enabled",
|
||||
"macie_is_enabled",
|
||||
"macie_automated_sensitive_data_discovery_enabled",
|
||||
"cloudtrail_threat_detection_enumeration",
|
||||
"cloudtrail_threat_detection_llm_jacking",
|
||||
"cloudtrail_threat_detection_privilege_escalation",
|
||||
"accessanalyzer_enabled_without_findings"
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -914,7 +914,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "3.10",
|
||||
"Id": "3.1",
|
||||
"Description": "Use Identity Aware Proxy (IAP) to Ensure Only Traffic From Google IP Addresses are 'Allowed'",
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
@@ -1132,7 +1132,7 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "4.10",
|
||||
"Id": "4.1",
|
||||
"Description": "Ensure That App Engine Applications Enforce HTTPS Connections",
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
|
||||
@@ -1084,9 +1084,7 @@
|
||||
{
|
||||
"Id": "3.1.4.1.1",
|
||||
"Description": "Ensure external filesharing in Google Chat and Hangouts is disabled",
|
||||
"Checks": [
|
||||
"chat_external_file_sharing_disabled"
|
||||
],
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -1107,9 +1105,7 @@
|
||||
{
|
||||
"Id": "3.1.4.1.2",
|
||||
"Description": "Ensure internal filesharing in Google Chat and Hangouts is disabled",
|
||||
"Checks": [
|
||||
"chat_internal_file_sharing_disabled"
|
||||
],
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -1130,9 +1126,7 @@
|
||||
{
|
||||
"Id": "3.1.4.2.1",
|
||||
"Description": "Ensure Google Chat externally is restricted to allowed domains",
|
||||
"Checks": [
|
||||
"chat_external_messaging_restricted"
|
||||
],
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -1153,9 +1147,7 @@
|
||||
{
|
||||
"Id": "3.1.4.3.1",
|
||||
"Description": "Ensure external spaces in Google Chat and Hangouts are restricted",
|
||||
"Checks": [
|
||||
"chat_external_spaces_restricted"
|
||||
],
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -1176,9 +1168,7 @@
|
||||
{
|
||||
"Id": "3.1.4.4.1",
|
||||
"Description": "Ensure allow users to install Chat apps is disabled",
|
||||
"Checks": [
|
||||
"chat_apps_installation_disabled"
|
||||
],
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
@@ -1199,9 +1189,7 @@
|
||||
{
|
||||
"Id": "3.1.4.4.2",
|
||||
"Description": "Ensure allow users to add and use incoming webhooks is disabled",
|
||||
"Checks": [
|
||||
"chat_incoming_webhooks_disabled"
|
||||
],
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "3 Apps",
|
||||
|
||||
@@ -1466,9 +1466,7 @@
|
||||
{
|
||||
"Id": "GWS.CHAT.2.1",
|
||||
"Description": "External file sharing SHALL be disabled to protect sensitive information from unauthorized or accidental sharing",
|
||||
"Checks": [
|
||||
"chat_external_file_sharing_disabled"
|
||||
],
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Chat",
|
||||
@@ -1494,9 +1492,7 @@
|
||||
{
|
||||
"Id": "GWS.CHAT.4.1",
|
||||
"Description": "External chat messaging SHALL be restricted to allowlisted domains only",
|
||||
"Checks": [
|
||||
"chat_external_messaging_restricted"
|
||||
],
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "Chat",
|
||||
|
||||
@@ -33,28 +33,24 @@ def process_universal_compliance_frameworks(
|
||||
output_filename: str,
|
||||
provider: str,
|
||||
generated_outputs: dict,
|
||||
from_cli: bool = True,
|
||||
is_last: bool = True,
|
||||
) -> set:
|
||||
"""Process universal compliance frameworks, generating CSV and OCSF outputs.
|
||||
|
||||
For each framework in *input_compliance_frameworks* that exists in
|
||||
*universal_frameworks* and has an ``outputs.table_config``, this function
|
||||
writes both a CSV (``UniversalComplianceOutput``) and an OCSF JSON
|
||||
(``OCSFComplianceOutput``) file. OCSF is always generated regardless of
|
||||
*universal_frameworks* and has an outputs.table_config, this function
|
||||
creates both a CSV (UniversalComplianceOutput) and an OCSF JSON
|
||||
(OCSFComplianceOutput) file. OCSF is always generated regardless of
|
||||
the user's ``--output-formats`` flag.
|
||||
|
||||
Streaming-aware: writers are tracked via ``generated_outputs["compliance"]``
|
||||
keyed by ``file_path``. On the first call per framework a new writer is
|
||||
created and emits both findings and manual requirements; subsequent calls
|
||||
reuse the writer, transform only the new ``finding_outputs`` (manual
|
||||
requirements are not re-emitted), and append to the open file. Set
|
||||
``from_cli=False`` and ``is_last=False`` for intermediate batches; pass
|
||||
``is_last=True`` on the final batch to close the file (OCSF is also
|
||||
finalized as a valid JSON array).
|
||||
The function is idempotent: it tracks already-created writers via
|
||||
``generated_outputs["compliance"]`` keyed by ``file_path``. If invoked
|
||||
again for the same framework (e.g. once per streaming batch), it
|
||||
reuses the existing writer instead of recreating it. This guarantees
|
||||
one output writer per framework for the whole execution and keeps
|
||||
the OCSF JSON array valid across multiple calls.
|
||||
|
||||
Returns the set of framework names processed so the caller can subtract
|
||||
them from the legacy per-provider output loop.
|
||||
Returns the set of framework names that were processed so the caller
|
||||
can remove them before entering the legacy per-provider output loop.
|
||||
"""
|
||||
from prowler.lib.outputs.compliance.universal.ocsf_compliance import (
|
||||
OCSFComplianceOutput,
|
||||
@@ -69,13 +65,6 @@ def process_universal_compliance_frameworks(
|
||||
if isinstance(out, (UniversalComplianceOutput, OCSFComplianceOutput))
|
||||
}
|
||||
|
||||
def _flush(writer, framework, label, is_new):
|
||||
if not is_new:
|
||||
writer._transform(finding_outputs, framework, label, include_manual=False)
|
||||
writer.close_file = is_last
|
||||
writer.batch_write_data_to_file()
|
||||
writer._data.clear()
|
||||
|
||||
processed = set()
|
||||
for compliance_name in input_compliance_frameworks:
|
||||
if not (
|
||||
@@ -86,46 +75,37 @@ def process_universal_compliance_frameworks(
|
||||
continue
|
||||
|
||||
fw = universal_frameworks[compliance_name]
|
||||
compliance_label = (
|
||||
fw.framework + "-" + fw.version if fw.version else fw.framework
|
||||
)
|
||||
|
||||
# CSV output
|
||||
csv_path = (
|
||||
f"{output_directory}/compliance/" f"{output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
csv_writer = existing_writers.get(csv_path)
|
||||
csv_is_new = csv_writer is None
|
||||
if csv_is_new:
|
||||
csv_writer = UniversalComplianceOutput(
|
||||
if csv_path not in existing_writers:
|
||||
output = UniversalComplianceOutput(
|
||||
findings=finding_outputs,
|
||||
framework=fw,
|
||||
file_path=csv_path,
|
||||
from_cli=from_cli,
|
||||
provider=provider,
|
||||
)
|
||||
generated_outputs["compliance"].append(csv_writer)
|
||||
existing_writers[csv_path] = csv_writer
|
||||
_flush(csv_writer, fw, compliance_label, csv_is_new)
|
||||
generated_outputs["compliance"].append(output)
|
||||
existing_writers[csv_path] = output
|
||||
output.batch_write_data_to_file()
|
||||
|
||||
# OCSF output (always generated for universal frameworks)
|
||||
ocsf_path = (
|
||||
f"{output_directory}/compliance/"
|
||||
f"{output_filename}_{compliance_name}.ocsf.json"
|
||||
)
|
||||
ocsf_writer = existing_writers.get(ocsf_path)
|
||||
ocsf_is_new = ocsf_writer is None
|
||||
if ocsf_is_new:
|
||||
ocsf_writer = OCSFComplianceOutput(
|
||||
if ocsf_path not in existing_writers:
|
||||
ocsf_output = OCSFComplianceOutput(
|
||||
findings=finding_outputs,
|
||||
framework=fw,
|
||||
file_path=ocsf_path,
|
||||
from_cli=from_cli,
|
||||
provider=provider,
|
||||
)
|
||||
generated_outputs["compliance"].append(ocsf_writer)
|
||||
existing_writers[ocsf_path] = ocsf_writer
|
||||
_flush(ocsf_writer, fw, compliance_label, ocsf_is_new)
|
||||
generated_outputs["compliance"].append(ocsf_output)
|
||||
existing_writers[ocsf_path] = ocsf_output
|
||||
ocsf_output.batch_write_data_to_file()
|
||||
|
||||
processed.add(compliance_name)
|
||||
|
||||
|
||||
@@ -147,14 +147,7 @@ class OCSFComplianceOutput:
|
||||
findings: List["Finding"],
|
||||
framework: ComplianceFramework,
|
||||
compliance_name: str,
|
||||
include_manual: bool = True,
|
||||
) -> None:
|
||||
"""Transform findings into OCSF ComplianceFinding events.
|
||||
|
||||
Manual requirements are emitted only when ``include_manual=True``. The
|
||||
caller must pass ``False`` for subsequent streaming batches so manual
|
||||
events are not duplicated.
|
||||
"""
|
||||
# Build check -> requirements map
|
||||
check_req_map = {}
|
||||
for req in framework.requirements:
|
||||
@@ -177,9 +170,6 @@ class OCSFComplianceOutput:
|
||||
if cf:
|
||||
self._data.append(cf)
|
||||
|
||||
if not include_manual:
|
||||
return
|
||||
|
||||
# Manual requirements (no checks or empty for current provider)
|
||||
for req in framework.requirements:
|
||||
checks = req.checks
|
||||
|
||||
@@ -198,15 +198,8 @@ class UniversalComplianceOutput:
|
||||
findings: list["Finding"],
|
||||
framework: ComplianceFramework,
|
||||
compliance_name: str,
|
||||
include_manual: bool = True,
|
||||
) -> None:
|
||||
"""Transform findings into universal compliance CSV rows.
|
||||
|
||||
Manual requirements (no checks or empty for current provider) are
|
||||
emitted only when ``include_manual=True``. When the writer is reused
|
||||
across streaming batches, the caller should pass ``False`` after the
|
||||
first batch so manual rows are not duplicated.
|
||||
"""
|
||||
"""Transform findings into universal compliance CSV rows."""
|
||||
# Build check -> requirements map (filtered by provider for dict checks)
|
||||
check_req_map = {}
|
||||
for req in framework.requirements:
|
||||
@@ -235,9 +228,6 @@ class UniversalComplianceOutput:
|
||||
except Exception as e:
|
||||
logger.debug(f"Skipping row for {req.id}: {e}")
|
||||
|
||||
if not include_manual:
|
||||
return
|
||||
|
||||
# Manual requirements (no checks or empty dict)
|
||||
for req in framework.requirements:
|
||||
checks = req.checks
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
{
|
||||
"Provider": "googleworkspace",
|
||||
"CheckID": "chat_apps_installation_disabled",
|
||||
"CheckTitle": "Chat apps installation is disabled for users",
|
||||
"CheckType": [],
|
||||
"ServiceName": "chat",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "collaboration",
|
||||
"Description": "Google Chat apps connect to external services to look up information, schedule meetings, or complete tasks. Apps are accounts created by Google, users in the organization, or third parties that can access user data including **email addresses**, **conversation content**, and **organizational information**.",
|
||||
"Risk": "Unrestricted Chat app installation allows **unvetted third-party applications** to access user data including conversation content and organizational information. An attacker could distribute a malicious Chat app to **exfiltrate confidential data** or establish **persistent access** to internal communications.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://support.google.com/a/answer/6089179",
|
||||
"https://cloud.google.com/identity/docs/concepts/supported-policy-api-settings"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Google **Admin console** at https://admin.google.com\n2. Navigate to **Apps** > **Google Workspace** > **Google Chat and classic Hangouts**\n3. Click **Chat apps**\n4. Under Chat apps access settings, set **Allow users to install Chat apps** to **OFF**\n5. Click **Save**",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Disable Chat apps installation to prevent **unvetted third-party applications** from accessing organizational data through the Chat platform.",
|
||||
"Url": "https://hub.prowler.com/check/chat_apps_installation_disabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"trust-boundaries"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"chat_incoming_webhooks_disabled"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportGoogleWorkspace
|
||||
from prowler.providers.googleworkspace.services.chat.chat_client import chat_client
|
||||
|
||||
|
||||
class chat_apps_installation_disabled(Check):
|
||||
"""Check that users cannot install Chat apps.
|
||||
|
||||
This check verifies that the domain-level Chat policy prevents users
|
||||
from installing Chat apps, reducing the risk of data exposure through
|
||||
third-party or unvetted applications.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportGoogleWorkspace]:
|
||||
findings = []
|
||||
|
||||
if chat_client.policies_fetched:
|
||||
report = CheckReportGoogleWorkspace(
|
||||
metadata=self.metadata(),
|
||||
resource=chat_client.policies,
|
||||
resource_id="chatPolicies",
|
||||
resource_name="Chat Policies",
|
||||
customer_id=chat_client.provider.identity.customer_id,
|
||||
)
|
||||
|
||||
apps_enabled = chat_client.policies.enable_apps
|
||||
|
||||
if apps_enabled is False:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Chat apps installation is disabled "
|
||||
f"in domain {chat_client.provider.identity.domain}."
|
||||
)
|
||||
elif apps_enabled is None:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Chat apps installation uses Google's secure default "
|
||||
f"configuration (disabled) "
|
||||
f"in domain {chat_client.provider.identity.domain}."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Chat apps installation is enabled "
|
||||
f"in domain {chat_client.provider.identity.domain}. "
|
||||
f"Chat apps installation should be disabled to prevent unvetted apps."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -1,4 +0,0 @@
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import Chat
|
||||
|
||||
chat_client = Chat(Provider.get_global_provider())
|
||||
@@ -1,40 +0,0 @@
|
||||
{
|
||||
"Provider": "googleworkspace",
|
||||
"CheckID": "chat_external_file_sharing_disabled",
|
||||
"CheckTitle": "External file sharing in Chat is set to no files",
|
||||
"CheckType": [],
|
||||
"ServiceName": "chat",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "collaboration",
|
||||
"Description": "Google Chat **external file sharing** controls whether users can share files with people outside the organization via Chat conversations. Files often contain **confidential information**, and organizations in regulated industries need to control the flow of this information outside their boundaries.",
|
||||
"Risk": "Enabled external file sharing allows users to send files containing **confidential information** to external parties through Chat. This creates a **data leakage** channel that bypasses DLP controls, particularly dangerous for organizations handling **regulated data** such as PII, PHI, or financial records.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://support.google.com/a/answer/9540647",
|
||||
"https://cloud.google.com/identity/docs/concepts/supported-policy-api-settings"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Google **Admin console** at https://admin.google.com\n2. Navigate to **Apps** > **Google Workspace** > **Google Chat and classic Hangouts**\n3. Click **Chat File Sharing**\n4. Under Setting, set **External filesharing** to **No files**\n5. Click **Save**",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Disable **external file sharing** in Chat to prevent users from sharing files with people outside the organization through Chat conversations.",
|
||||
"Url": "https://hub.prowler.com/check/chat_external_file_sharing_disabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"trust-boundaries"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"chat_internal_file_sharing_disabled",
|
||||
"drive_sharing_allowlisted_domains"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportGoogleWorkspace
|
||||
from prowler.providers.googleworkspace.services.chat.chat_client import chat_client
|
||||
|
||||
|
||||
class chat_external_file_sharing_disabled(Check):
|
||||
"""Check that external file sharing in Google Chat is disabled.
|
||||
|
||||
This check verifies that the domain-level Chat policy prevents users
|
||||
from sharing files with people outside the organization via Chat,
|
||||
protecting sensitive information from unauthorized external access.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportGoogleWorkspace]:
|
||||
findings = []
|
||||
|
||||
if chat_client.policies_fetched:
|
||||
report = CheckReportGoogleWorkspace(
|
||||
metadata=self.metadata(),
|
||||
resource=chat_client.policies,
|
||||
resource_id="chatPolicies",
|
||||
resource_name="Chat Policies",
|
||||
customer_id=chat_client.provider.identity.customer_id,
|
||||
)
|
||||
|
||||
external_sharing = chat_client.policies.external_file_sharing
|
||||
|
||||
if external_sharing == "NO_FILES":
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"External file sharing in Chat is disabled "
|
||||
f"in domain {chat_client.provider.identity.domain}."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
if external_sharing is None:
|
||||
report.status_extended = (
|
||||
f"External file sharing in Chat is not explicitly configured "
|
||||
f"in domain {chat_client.provider.identity.domain}. "
|
||||
f"External file sharing should be set to No files."
|
||||
)
|
||||
else:
|
||||
report.status_extended = (
|
||||
f"External file sharing in Chat is set to {external_sharing} "
|
||||
f"in domain {chat_client.provider.identity.domain}. "
|
||||
f"External file sharing should be set to No files."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -1,40 +0,0 @@
|
||||
{
|
||||
"Provider": "googleworkspace",
|
||||
"CheckID": "chat_external_messaging_restricted",
|
||||
"CheckTitle": "External Chat messaging is restricted to allowed domains",
|
||||
"CheckType": [],
|
||||
"ServiceName": "chat",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "collaboration",
|
||||
"Description": "Google Chat **external messaging** controls whether users can send messages to people outside the organization. If external messaging is allowed, it can optionally be restricted to only **allowlisted domains** to limit the scope of external communication.",
|
||||
"Risk": "Unrestricted external messaging allows users to communicate freely with **any external party**, increasing the risk of **data exfiltration** through conversation content and **social engineering attacks** from untrusted domains targeting internal users.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://support.google.com/a/answer/9540647",
|
||||
"https://cloud.google.com/identity/docs/concepts/supported-policy-api-settings"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Google **Admin console** at https://admin.google.com\n2. Navigate to **Apps** > **Google Workspace** > **Google Chat and classic Hangouts**\n3. Click **External Chat Settings**\n4. Select **Chat externally**\n5. Set **Allow users to send messages outside the organization** to **ON**\n6. Check **Only allow this for allowlisted domains**\n7. Click **Save**",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Restrict **external Chat messaging** to **allowlisted domains** only to limit information flow to trusted parties and reduce exposure to external threats.",
|
||||
"Url": "https://hub.prowler.com/check/chat_external_messaging_restricted"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"trust-boundaries"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"chat_external_spaces_restricted",
|
||||
"drive_sharing_allowlisted_domains"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportGoogleWorkspace
|
||||
from prowler.providers.googleworkspace.services.chat.chat_client import chat_client
|
||||
|
||||
|
||||
class chat_external_messaging_restricted(Check):
|
||||
"""Check that external Chat messaging is restricted to allowed domains.
|
||||
|
||||
This check verifies that external Chat messaging is either disabled
|
||||
entirely or restricted to allowlisted domains only, preventing
|
||||
unrestricted communication with external users.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportGoogleWorkspace]:
|
||||
findings = []
|
||||
|
||||
if chat_client.policies_fetched:
|
||||
report = CheckReportGoogleWorkspace(
|
||||
metadata=self.metadata(),
|
||||
resource=chat_client.policies,
|
||||
resource_id="chatPolicies",
|
||||
resource_name="Chat Policies",
|
||||
customer_id=chat_client.provider.identity.customer_id,
|
||||
)
|
||||
|
||||
allow_external = chat_client.policies.allow_external_chat
|
||||
restriction = chat_client.policies.external_chat_restriction
|
||||
|
||||
if allow_external is False:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"External Chat messaging is disabled "
|
||||
f"in domain {chat_client.provider.identity.domain}."
|
||||
)
|
||||
elif allow_external is None and restriction is None:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"External Chat messaging uses Google's secure default "
|
||||
f"configuration (disabled) "
|
||||
f"in domain {chat_client.provider.identity.domain}."
|
||||
)
|
||||
elif restriction == "TRUSTED_DOMAINS":
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"External Chat messaging is restricted to allowed domains "
|
||||
f"in domain {chat_client.provider.identity.domain}."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"External Chat messaging is not restricted to allowed domains "
|
||||
f"in domain {chat_client.provider.identity.domain}. "
|
||||
f"External messaging should be restricted to allowed domains only."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -1,40 +0,0 @@
|
||||
{
|
||||
"Provider": "googleworkspace",
|
||||
"CheckID": "chat_external_spaces_restricted",
|
||||
"CheckTitle": "External spaces in Chat are restricted to allowed domains",
|
||||
"CheckType": [],
|
||||
"ServiceName": "chat",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "collaboration",
|
||||
"Description": "Google Chat **external spaces** allow users to create or join collaborative spaces that include people outside the organization. If external spaces are allowed, they can optionally be restricted to only **allowlisted domains** to limit external participation.",
|
||||
"Risk": "Unrestricted external spaces allow users to add **anyone from any domain** to persistent group conversations. This increases the risk of **confidential information exposure** in shared spaces and enables **unauthorized external access** to ongoing organizational discussions.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://support.google.com/a/answer/9540647",
|
||||
"https://cloud.google.com/identity/docs/concepts/supported-policy-api-settings"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Google **Admin console** at https://admin.google.com\n2. Navigate to **Apps** > **Google Workspace** > **Google Chat and classic Hangouts**\n3. Click **External Spaces**\n4. Set **Allow users to create and join spaces with people outside their organization** to **ON**\n5. Check **Only allow users to add people from allowlisted domains**\n6. Click **Save**",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Restrict **external spaces** to **allowlisted domains** only to control which external parties can participate in organizational Chat spaces.",
|
||||
"Url": "https://hub.prowler.com/check/chat_external_spaces_restricted"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"trust-boundaries"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"chat_external_messaging_restricted",
|
||||
"drive_sharing_allowlisted_domains"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -1,59 +0,0 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportGoogleWorkspace
|
||||
from prowler.providers.googleworkspace.services.chat.chat_client import chat_client
|
||||
|
||||
|
||||
class chat_external_spaces_restricted(Check):
|
||||
"""Check that external spaces in Google Chat are restricted.
|
||||
|
||||
This check verifies that external spaces are either disabled entirely
|
||||
or restricted to allowlisted domains only, preventing users from
|
||||
creating or joining spaces with unrestricted external participants.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportGoogleWorkspace]:
|
||||
findings = []
|
||||
|
||||
if chat_client.policies_fetched:
|
||||
report = CheckReportGoogleWorkspace(
|
||||
metadata=self.metadata(),
|
||||
resource=chat_client.policies,
|
||||
resource_id="chatPolicies",
|
||||
resource_name="Chat Policies",
|
||||
customer_id=chat_client.provider.identity.customer_id,
|
||||
)
|
||||
|
||||
spaces_enabled = chat_client.policies.external_spaces_enabled
|
||||
allowlist_mode = chat_client.policies.external_spaces_domain_allowlist_mode
|
||||
|
||||
if spaces_enabled is False:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"External spaces are disabled "
|
||||
f"in domain {chat_client.provider.identity.domain}."
|
||||
)
|
||||
elif allowlist_mode == "TRUSTED_DOMAINS":
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"External spaces are restricted to allowed domains "
|
||||
f"in domain {chat_client.provider.identity.domain}."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
if spaces_enabled is None and allowlist_mode is None:
|
||||
report.status_extended = (
|
||||
f"External spaces restriction is not explicitly configured "
|
||||
f"in domain {chat_client.provider.identity.domain}. "
|
||||
f"External spaces should be restricted to allowed domains only."
|
||||
)
|
||||
else:
|
||||
report.status_extended = (
|
||||
f"External spaces are not restricted to allowed domains "
|
||||
f"in domain {chat_client.provider.identity.domain}. "
|
||||
f"External spaces should be restricted to allowed domains only."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -1,39 +0,0 @@
|
||||
{
|
||||
"Provider": "googleworkspace",
|
||||
"CheckID": "chat_incoming_webhooks_disabled",
|
||||
"CheckTitle": "Incoming webhooks in Chat are disabled for users",
|
||||
"CheckType": [],
|
||||
"ServiceName": "chat",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "collaboration",
|
||||
"Description": "**Incoming webhooks** let external applications post asynchronous messages into Google Chat spaces without being a Chat app. When enabled, users can configure webhooks and developers can call them to send content from **external applications**.",
|
||||
"Risk": "Exposed webhook URLs allow **unauthorized content injection** into Chat spaces. Attackers can send **fraudulent or misleading messages** that appear to come from trusted services, creating a vector for **social engineering** and **phishing** within internal communications.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://support.google.com/a/answer/6089179",
|
||||
"https://cloud.google.com/identity/docs/concepts/supported-policy-api-settings"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Google **Admin console** at https://admin.google.com\n2. Navigate to **Apps** > **Google Workspace** > **Google Chat and classic Hangouts**\n3. Click **Chat apps**\n4. Under Chat apps access settings, set **Allow users to add and use incoming webhooks** to **OFF**\n5. Click **Save**",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Disable **incoming webhooks** to prevent unauthenticated external applications from **injecting content** into internal Chat spaces.",
|
||||
"Url": "https://hub.prowler.com/check/chat_incoming_webhooks_disabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"trust-boundaries"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"chat_apps_installation_disabled"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportGoogleWorkspace
|
||||
from prowler.providers.googleworkspace.services.chat.chat_client import chat_client
|
||||
|
||||
|
||||
class chat_incoming_webhooks_disabled(Check):
|
||||
"""Check that incoming webhooks are disabled in Google Chat.
|
||||
|
||||
This check verifies that the domain-level Chat policy prevents users
|
||||
from adding and using incoming webhooks, reducing the risk of
|
||||
unauthorized content being posted into Chat spaces.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportGoogleWorkspace]:
|
||||
findings = []
|
||||
|
||||
if chat_client.policies_fetched:
|
||||
report = CheckReportGoogleWorkspace(
|
||||
metadata=self.metadata(),
|
||||
resource=chat_client.policies,
|
||||
resource_id="chatPolicies",
|
||||
resource_name="Chat Policies",
|
||||
customer_id=chat_client.provider.identity.customer_id,
|
||||
)
|
||||
|
||||
webhooks_enabled = chat_client.policies.enable_webhooks
|
||||
|
||||
if webhooks_enabled is False:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Incoming webhooks are disabled "
|
||||
f"in domain {chat_client.provider.identity.domain}."
|
||||
)
|
||||
elif webhooks_enabled is None:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Incoming webhooks use Google's secure default "
|
||||
f"configuration (disabled) "
|
||||
f"in domain {chat_client.provider.identity.domain}."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Incoming webhooks are enabled "
|
||||
f"in domain {chat_client.provider.identity.domain}. "
|
||||
f"Incoming webhooks should be disabled to prevent unauthorized content."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -1,39 +0,0 @@
|
||||
{
|
||||
"Provider": "googleworkspace",
|
||||
"CheckID": "chat_internal_file_sharing_disabled",
|
||||
"CheckTitle": "Internal file sharing in Chat is set to no files",
|
||||
"CheckType": [],
|
||||
"ServiceName": "chat",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "NotDefined",
|
||||
"ResourceGroup": "collaboration",
|
||||
"Description": "Google Chat **internal file sharing** controls whether users can share files with other people inside the organization via Chat conversations. Organizations in regulated industries may need to **control and audit** all file sharing, even between internal users.",
|
||||
"Risk": "Unrestricted internal file sharing in Chat allows files with **sensitive information** to be distributed freely without passing through approved channels. This undermines **data governance** and **audit trail** requirements, making it harder to track data movement within the organization.",
|
||||
"RelatedUrl": "",
|
||||
"AdditionalURLs": [
|
||||
"https://support.google.com/a/answer/9540647",
|
||||
"https://cloud.google.com/identity/docs/concepts/supported-policy-api-settings"
|
||||
],
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "1. Sign in to the Google **Admin console** at https://admin.google.com\n2. Navigate to **Apps** > **Google Workspace** > **Google Chat and classic Hangouts**\n3. Click **Chat File Sharing**\n4. Under Setting, set **Internal filesharing** to **No files**\n5. Click **Save**",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Disable **internal file sharing** in Chat to enforce file distribution through **approved channels** with proper audit trails and governance controls.",
|
||||
"Url": "https://hub.prowler.com/check/chat_internal_file_sharing_disabled"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"trust-boundaries"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [
|
||||
"chat_external_file_sharing_disabled"
|
||||
],
|
||||
"Notes": ""
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportGoogleWorkspace
|
||||
from prowler.providers.googleworkspace.services.chat.chat_client import chat_client
|
||||
|
||||
|
||||
class chat_internal_file_sharing_disabled(Check):
|
||||
"""Check that internal file sharing in Google Chat is disabled.
|
||||
|
||||
This check verifies that the domain-level Chat policy prevents users
|
||||
from sharing files internally via Chat, providing maximum control over
|
||||
file distribution within the organization.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportGoogleWorkspace]:
|
||||
findings = []
|
||||
|
||||
if chat_client.policies_fetched:
|
||||
report = CheckReportGoogleWorkspace(
|
||||
metadata=self.metadata(),
|
||||
resource=chat_client.policies,
|
||||
resource_id="chatPolicies",
|
||||
resource_name="Chat Policies",
|
||||
customer_id=chat_client.provider.identity.customer_id,
|
||||
)
|
||||
|
||||
internal_sharing = chat_client.policies.internal_file_sharing
|
||||
|
||||
if internal_sharing == "NO_FILES":
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Internal file sharing in Chat is disabled "
|
||||
f"in domain {chat_client.provider.identity.domain}."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
if internal_sharing is None:
|
||||
report.status_extended = (
|
||||
f"Internal file sharing in Chat is not explicitly configured "
|
||||
f"in domain {chat_client.provider.identity.domain}. "
|
||||
f"Internal file sharing should be set to No files."
|
||||
)
|
||||
else:
|
||||
report.status_extended = (
|
||||
f"Internal file sharing in Chat is set to {internal_sharing} "
|
||||
f"in domain {chat_client.provider.identity.domain}. "
|
||||
f"Internal file sharing should be set to No files."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -1,125 +0,0 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.googleworkspace.lib.service.service import GoogleWorkspaceService
|
||||
|
||||
|
||||
class Chat(GoogleWorkspaceService):
|
||||
"""Google Workspace Chat service for auditing domain-level Chat policies.
|
||||
|
||||
Uses the Cloud Identity Policy API v1 to read Chat file sharing, external
|
||||
messaging, spaces, and apps access settings configured in the Admin Console.
|
||||
"""
|
||||
|
||||
def __init__(self, provider):
|
||||
super().__init__(provider)
|
||||
self.policies = ChatPolicies()
|
||||
self.policies_fetched = False
|
||||
self._fetch_chat_policies()
|
||||
|
||||
def _fetch_chat_policies(self):
|
||||
"""Fetch Chat policies from the Cloud Identity Policy API v1."""
|
||||
logger.info("Chat - Fetching Chat policies...")
|
||||
|
||||
try:
|
||||
service = self._build_service("cloudidentity", "v1")
|
||||
|
||||
if not service:
|
||||
logger.error("Failed to build Cloud Identity service")
|
||||
return
|
||||
|
||||
request = service.policies().list(
|
||||
pageSize=100,
|
||||
filter='setting.type.matches("chat.*")',
|
||||
)
|
||||
fetch_succeeded = True
|
||||
|
||||
while request is not None:
|
||||
try:
|
||||
response = request.execute()
|
||||
|
||||
for policy in response.get("policies", []):
|
||||
if not self._is_customer_level_policy(policy):
|
||||
continue
|
||||
|
||||
setting = policy.get("setting", {})
|
||||
setting_type = setting.get("type", "").removeprefix("settings/")
|
||||
logger.debug(f"Processing setting type: {setting_type}")
|
||||
|
||||
value = setting.get("value", {})
|
||||
|
||||
if setting_type == "chat.chat_file_sharing":
|
||||
self.policies.external_file_sharing = value.get(
|
||||
"externalFileSharing"
|
||||
)
|
||||
self.policies.internal_file_sharing = value.get(
|
||||
"internalFileSharing"
|
||||
)
|
||||
logger.debug("Chat file sharing settings fetched.")
|
||||
|
||||
elif setting_type == "chat.external_chat_restriction":
|
||||
self.policies.allow_external_chat = value.get(
|
||||
"allowExternalChat"
|
||||
)
|
||||
self.policies.external_chat_restriction = value.get(
|
||||
"externalChatRestriction"
|
||||
)
|
||||
logger.debug(
|
||||
"Chat external chat restriction settings fetched."
|
||||
)
|
||||
|
||||
elif setting_type == "chat.chat_external_spaces":
|
||||
self.policies.external_spaces_enabled = value.get("enabled")
|
||||
self.policies.external_spaces_domain_allowlist_mode = (
|
||||
value.get("domainAllowlistMode")
|
||||
)
|
||||
logger.debug("Chat external spaces settings fetched.")
|
||||
|
||||
elif setting_type == "chat.chat_apps_access":
|
||||
self.policies.enable_apps = value.get("enableApps")
|
||||
self.policies.enable_webhooks = value.get("enableWebhooks")
|
||||
logger.debug("Chat apps access settings fetched.")
|
||||
|
||||
request = service.policies().list_next(request, response)
|
||||
|
||||
except Exception as error:
|
||||
self._handle_api_error(
|
||||
error,
|
||||
"fetching Chat policies",
|
||||
self.provider.identity.customer_id,
|
||||
)
|
||||
fetch_succeeded = False
|
||||
break
|
||||
|
||||
self.policies_fetched = fetch_succeeded
|
||||
logger.info("Chat policies fetched successfully.")
|
||||
|
||||
except Exception as error:
|
||||
self._handle_api_error(
|
||||
error,
|
||||
"fetching Chat policies",
|
||||
self.provider.identity.customer_id,
|
||||
)
|
||||
self.policies_fetched = False
|
||||
|
||||
|
||||
class ChatPolicies(BaseModel):
|
||||
"""Model for domain-level Chat policy settings."""
|
||||
|
||||
# chat.chat_file_sharing
|
||||
external_file_sharing: Optional[str] = None
|
||||
internal_file_sharing: Optional[str] = None
|
||||
|
||||
# chat.external_chat_restriction
|
||||
allow_external_chat: Optional[bool] = None
|
||||
external_chat_restriction: Optional[str] = None
|
||||
|
||||
# chat.chat_external_spaces
|
||||
external_spaces_enabled: Optional[bool] = None
|
||||
external_spaces_domain_allowlist_mode: Optional[str] = None
|
||||
|
||||
# chat.chat_apps_access
|
||||
enable_apps: Optional[bool] = None
|
||||
enable_webhooks: Optional[bool] = None
|
||||
@@ -150,212 +150,7 @@ AWS_SECURITY_TOKEN = 'testing'
|
||||
AWS_SESSION_TOKEN = 'testing'
|
||||
|
||||
[tool.uv]
|
||||
# Transitive pins matching the current lock to prevent silent drift on `uv lock`
|
||||
# (e.g. supply chain hijacks via newer releases). Bump deliberately.
|
||||
constraint-dependencies = [
|
||||
"about-time==4.2.1",
|
||||
"aenum==3.1.17",
|
||||
"aiofiles==24.1.0",
|
||||
"aiohappyeyeballs==2.6.1",
|
||||
"aiohttp==3.13.5",
|
||||
"aiosignal==1.4.0",
|
||||
"alibabacloud-actiontrail20200706==2.4.1",
|
||||
"alibabacloud-credentials==1.0.3",
|
||||
"alibabacloud-credentials-api==1.0.0",
|
||||
"alibabacloud-cs20151215==6.1.0",
|
||||
"alibabacloud-darabonba-array==0.1.0",
|
||||
"alibabacloud-darabonba-encode-util==0.0.2",
|
||||
"alibabacloud-darabonba-map==0.0.1",
|
||||
"alibabacloud-darabonba-signature-util==0.0.4",
|
||||
"alibabacloud-darabonba-string==0.0.4",
|
||||
"alibabacloud-darabonba-time==0.0.1",
|
||||
"alibabacloud-ecs20140526==7.2.5",
|
||||
"alibabacloud-endpoint-util==0.0.4",
|
||||
"alibabacloud-gateway-oss==0.0.17",
|
||||
"alibabacloud-gateway-sls==0.4.2",
|
||||
"alibabacloud-gateway-sls-util==0.4.1",
|
||||
"alibabacloud-gateway-spi==0.0.3",
|
||||
"alibabacloud-openapi-util==0.2.4",
|
||||
"alibabacloud-oss-util==0.0.6",
|
||||
"alibabacloud-oss20190517==1.0.6",
|
||||
"alibabacloud-ram20150501==1.2.0",
|
||||
"alibabacloud-sas20181203==6.1.0",
|
||||
"alibabacloud-sts20150401==1.1.6",
|
||||
"alibabacloud-tea==0.4.3",
|
||||
"alibabacloud-tea-openapi==0.4.4",
|
||||
"alibabacloud-tea-util==0.3.14",
|
||||
"alibabacloud-tea-xml==0.0.3",
|
||||
"alibabacloud-vpc20160428==6.13.0",
|
||||
"aliyun-log-fastpb==0.3.0",
|
||||
"annotated-types==0.7.0",
|
||||
"antlr4-python3-runtime==4.13.2",
|
||||
"anyio==4.13.0",
|
||||
"apscheduler==3.11.2",
|
||||
"astroid==3.3.11",
|
||||
"async-timeout==5.0.1",
|
||||
"attrs==26.1.0",
|
||||
"aws-sam-translator==1.109.0",
|
||||
"aws-xray-sdk==2.15.0",
|
||||
"azure-common==1.1.28",
|
||||
"azure-core==1.41.0",
|
||||
"azure-mgmt-core==1.6.0",
|
||||
"bandit==1.8.3",
|
||||
"black==25.1.0",
|
||||
"blinker==1.9.0",
|
||||
"certifi==2026.4.22",
|
||||
"cffi==2.0.0",
|
||||
"cfn-lint==1.51.0",
|
||||
"charset-normalizer==3.4.7",
|
||||
"circuitbreaker==2.1.3",
|
||||
"click==8.3.3",
|
||||
"click-plugins==1.1.1.2",
|
||||
"contextlib2==21.6.0",
|
||||
"coverage==7.6.12",
|
||||
"darabonba-core==1.0.5",
|
||||
"decorator==5.2.1",
|
||||
"dill==0.4.1",
|
||||
"distro==1.9.0",
|
||||
"dnspython==2.8.0",
|
||||
"docker==7.1.0",
|
||||
"dogpile-cache==1.5.0",
|
||||
"durationpy==0.10",
|
||||
"email-validator==2.2.0",
|
||||
"exceptiongroup==1.3.1",
|
||||
"execnet==2.1.2",
|
||||
"filelock==3.20.3",
|
||||
"flake8==7.1.2",
|
||||
"flask==3.1.3",
|
||||
"freezegun==1.5.1",
|
||||
"frozenlist==1.8.0",
|
||||
"google-api-core==2.30.3",
|
||||
"google-auth==2.52.0",
|
||||
"googleapis-common-protos==1.75.0",
|
||||
"graphemeu==0.7.2",
|
||||
"graphql-core==3.2.8",
|
||||
"h11==0.16.0",
|
||||
"hpack==4.1.0",
|
||||
"httpcore==1.0.9",
|
||||
"httplib2==0.31.2",
|
||||
"httpx==0.28.1",
|
||||
"hyperframe==6.1.0",
|
||||
"iamdata==0.1.202605131",
|
||||
"idna==3.15",
|
||||
"importlib-metadata==8.7.1",
|
||||
"iniconfig==2.3.0",
|
||||
"iso8601==2.1.0",
|
||||
"isodate==0.7.2",
|
||||
"isort==6.1.0",
|
||||
"itsdangerous==2.2.0",
|
||||
"jinja2==3.1.6",
|
||||
"jmespath==1.1.0",
|
||||
"joserfc==1.6.5",
|
||||
"jsonpatch==1.33",
|
||||
"jsonpath-ng==1.8.0",
|
||||
"jsonpointer==3.1.1",
|
||||
"jsonschema-path==0.3.4",
|
||||
"jsonschema-specifications==2025.9.1",
|
||||
"jwcrypto==1.5.7",
|
||||
"keystoneauth1==5.14.0",
|
||||
"lazy-object-proxy==1.12.0",
|
||||
"lz4==4.4.5",
|
||||
"markdown-it-py==4.2.0",
|
||||
"markupsafe==3.0.3",
|
||||
"mccabe==0.7.0",
|
||||
"mdurl==0.1.2",
|
||||
"microsoft-kiota-authentication-azure==1.9.2",
|
||||
"microsoft-kiota-http==1.9.2",
|
||||
"microsoft-kiota-serialization-form==1.9.2",
|
||||
"microsoft-kiota-serialization-json==1.9.2",
|
||||
"microsoft-kiota-serialization-multipart==1.9.2",
|
||||
"microsoft-kiota-serialization-text==1.9.2",
|
||||
"mock==5.2.0",
|
||||
"moto==5.1.11",
|
||||
"mpmath==1.3.0",
|
||||
"msal==1.36.0",
|
||||
"msal-extensions==1.3.1",
|
||||
"msgraph-core==1.3.8",
|
||||
"msrest==0.7.1",
|
||||
"multidict==6.7.1",
|
||||
"multipart==1.3.1",
|
||||
"mypy-extensions==1.1.0",
|
||||
"narwhals==2.21.0",
|
||||
"nest-asyncio==1.6.0",
|
||||
"networkx==3.4.2",
|
||||
"oauthlib==3.3.1",
|
||||
"openapi-schema-validator==0.6.3",
|
||||
"openapi-spec-validator==0.7.1",
|
||||
"opentelemetry-api==1.41.1",
|
||||
"opentelemetry-sdk==1.41.1",
|
||||
"opentelemetry-semantic-conventions==0.62b1",
|
||||
"os-service-types==1.8.2",
|
||||
"packaging==26.2",
|
||||
"pathable==0.4.4",
|
||||
"pathspec==1.1.1",
|
||||
"pbr==7.0.3",
|
||||
"platformdirs==4.9.6",
|
||||
"plotly==6.7.0",
|
||||
"pluggy==1.6.0",
|
||||
"prek==0.3.9",
|
||||
"propcache==0.5.2",
|
||||
"proto-plus==1.28.0",
|
||||
"protobuf==7.34.1",
|
||||
"psutil==7.2.2",
|
||||
"py-partiql-parser==0.6.1",
|
||||
"pyasn1==0.6.3",
|
||||
"pyasn1-modules==0.4.2",
|
||||
"pycodestyle==2.12.1",
|
||||
"pycparser==3.0",
|
||||
"pycryptodomex==3.23.0",
|
||||
"pydantic-core==2.41.5",
|
||||
"pydash==8.0.6",
|
||||
"pyflakes==3.2.0",
|
||||
"pygments==2.20.0",
|
||||
"pyjwt==2.12.1",
|
||||
"pylint==3.3.4",
|
||||
"pynacl==1.6.2",
|
||||
"pyopenssl==26.2.0",
|
||||
"pyparsing==3.3.2",
|
||||
"pytest==8.3.5",
|
||||
"pytest-cov==6.0.0",
|
||||
"pytest-env==1.1.5",
|
||||
"pytest-randomly==3.16.0",
|
||||
"pytest-xdist==3.6.1",
|
||||
"pywin32==311",
|
||||
"pyyaml==6.0.3",
|
||||
"referencing==0.36.2",
|
||||
"regex==2026.5.9",
|
||||
"requests==2.34.0",
|
||||
"requests-file==3.0.1",
|
||||
"requests-oauthlib==2.0.0",
|
||||
"requestsexceptions==1.4.0",
|
||||
"responses==0.26.0",
|
||||
"retrying==1.4.2",
|
||||
"rfc3339-validator==0.1.4",
|
||||
"rich==15.0.0",
|
||||
"rpds-py==0.30.0",
|
||||
"s3transfer==0.14.0",
|
||||
"setuptools==82.0.1",
|
||||
"six==1.17.0",
|
||||
"sniffio==1.3.1",
|
||||
"std-uritemplate==2.0.8",
|
||||
"stevedore==5.7.0",
|
||||
"sympy==1.14.0",
|
||||
"tldextract==5.3.1",
|
||||
"tomli==2.4.1",
|
||||
"tomlkit==0.15.0",
|
||||
"typing-extensions==4.15.0",
|
||||
"typing-inspection==0.4.2",
|
||||
"tzdata==2026.2",
|
||||
"uritemplate==4.2.0",
|
||||
"urllib3==2.7.0",
|
||||
"vulture==2.14",
|
||||
"websocket-client==1.9.0",
|
||||
"werkzeug==3.1.8",
|
||||
"wrapt==2.1.2",
|
||||
"xlsxwriter==3.2.9",
|
||||
"xmltodict==1.0.4",
|
||||
"yarl==1.23.0",
|
||||
"zipp==3.23.1",
|
||||
"zstd==1.5.7.3"
|
||||
]
|
||||
# cartography (pulled in via the API) still pins okta<1.0.0 for its (unused-by-prowler)
|
||||
# intel.okta integration; the SDK Okta provider needs okta==3.4.2 (PR #11079). Force the
|
||||
# version prowler needs; cartography's okta module is not imported here.
|
||||
override-dependencies = ["okta==3.4.2"]
|
||||
|
||||
@@ -12,7 +12,6 @@ Also validates that print_compliance_frameworks and print_compliance_requirement
|
||||
work with universal ComplianceFramework objects (dict checks, None provider).
|
||||
"""
|
||||
|
||||
import csv
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
@@ -125,41 +124,6 @@ def _make_universal_framework(name="TestFW", version="1.0", with_table_config=Tr
|
||||
)
|
||||
|
||||
|
||||
def _make_framework_with_manual(name="MixedFW", version="1.0"):
|
||||
"""Framework with one aws-covered requirement and one manual one.
|
||||
|
||||
The manual requirement has no aws checks, so for provider ``aws`` it is
|
||||
emitted as a manual row/event — used to assert manual requirements are
|
||||
not duplicated when the writer is reused across streaming batches.
|
||||
"""
|
||||
reqs = [
|
||||
UniversalComplianceRequirement(
|
||||
id="1.1",
|
||||
description="Covered requirement",
|
||||
attributes={"Section": "IAM"},
|
||||
checks={"aws": ["check_a"]},
|
||||
),
|
||||
UniversalComplianceRequirement(
|
||||
id="2.1",
|
||||
description="Manual requirement",
|
||||
attributes={"Section": "GOV"},
|
||||
checks={"aws": []},
|
||||
),
|
||||
]
|
||||
metadata = [AttributeMetadata(key="Section", type="str")]
|
||||
outputs = OutputsConfig(table_config=TableConfig(group_by="Section"))
|
||||
return ComplianceFramework(
|
||||
framework=name,
|
||||
name=f"{name} Framework",
|
||||
provider="AWS",
|
||||
version=version,
|
||||
description="Test framework",
|
||||
requirements=reqs,
|
||||
attributes_metadata=metadata,
|
||||
outputs=outputs,
|
||||
)
|
||||
|
||||
|
||||
# ── Tests ────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
@@ -764,142 +728,3 @@ class TestIdempotency:
|
||||
# FW1 writer instances unchanged
|
||||
assert second_writers[0] is first_writers[0]
|
||||
assert second_writers[1] is first_writers[1]
|
||||
|
||||
|
||||
class TestStreamingBatches:
|
||||
"""Streaming-aware behaviour: ``from_cli`` / ``is_last`` / ``_flush``.
|
||||
|
||||
Regression coverage for the API streaming path where the helper is
|
||||
invoked once per finding batch: before the fix only the first batch
|
||||
was written (batches 2..N silently dropped) and manual requirements
|
||||
were re-emitted on every batch.
|
||||
"""
|
||||
|
||||
def _run_batches(self, tmp_path, fw, key, batches):
|
||||
"""Invoke the helper once per (findings, is_last) batch, sharing
|
||||
``generated_outputs`` so writers are reused like the API does."""
|
||||
generated = {"compliance": []}
|
||||
for findings, is_last in batches:
|
||||
process_universal_compliance_frameworks(
|
||||
input_compliance_frameworks={key},
|
||||
universal_frameworks={key: fw},
|
||||
finding_outputs=findings,
|
||||
output_directory=str(tmp_path),
|
||||
output_filename="out",
|
||||
provider="aws",
|
||||
generated_outputs=generated,
|
||||
from_cli=False,
|
||||
is_last=is_last,
|
||||
)
|
||||
return generated
|
||||
|
||||
def test_defaults_preserve_cli_single_call(self, tmp_path):
|
||||
"""Defaults (``from_cli=True``, ``is_last=True``): a single call
|
||||
still finalizes a valid, closed OCSF JSON array (CLI unchanged)."""
|
||||
fw = _make_universal_framework()
|
||||
generated = {"compliance": []}
|
||||
process_universal_compliance_frameworks(
|
||||
input_compliance_frameworks={"test_fw_1.0"},
|
||||
universal_frameworks={"test_fw_1.0": fw},
|
||||
finding_outputs=[_make_finding("check_a")],
|
||||
output_directory=str(tmp_path),
|
||||
output_filename="out",
|
||||
provider="aws",
|
||||
generated_outputs=generated,
|
||||
)
|
||||
ocsf_path = tmp_path / "compliance" / "out_test_fw_1.0.ocsf.json"
|
||||
data = json.loads(ocsf_path.read_text())
|
||||
assert isinstance(data, list) and len(data) >= 1
|
||||
|
||||
def test_multibatch_csv_keeps_every_batch(self, tmp_path):
|
||||
"""Findings from batches 2..N must not be dropped (the bug)."""
|
||||
fw = _make_universal_framework()
|
||||
f1 = _make_finding("check_a", status="PASS")
|
||||
f2 = _make_finding("check_a", status="FAIL")
|
||||
generated = self._run_batches(
|
||||
tmp_path, fw, "fw_1.0", [([f1], False), ([f2], True)]
|
||||
)
|
||||
content = (tmp_path / "compliance" / "out_fw_1.0.csv").read_text()
|
||||
assert "check_a is PASS" in content # batch 1
|
||||
assert "check_a is FAIL" in content # batch 2 — regression
|
||||
# writer reused, not recreated: still just 1 CSV + 1 OCSF
|
||||
assert len(generated["compliance"]) == 2
|
||||
|
||||
def test_multibatch_ocsf_valid_array_with_every_batch(self, tmp_path):
|
||||
"""OCSF is a valid (closed) JSON array holding every batch's
|
||||
events only after the ``is_last=True`` call."""
|
||||
fw = _make_universal_framework()
|
||||
f1 = _make_finding("check_a", status="PASS")
|
||||
f2 = _make_finding("check_a", status="FAIL")
|
||||
self._run_batches(tmp_path, fw, "fw_1.0", [([f1], False), ([f2], True)])
|
||||
data = json.loads(
|
||||
(tmp_path / "compliance" / "out_fw_1.0.ocsf.json").read_text()
|
||||
)
|
||||
assert isinstance(data, list)
|
||||
assert len(data) >= 2 # one event per batch finding
|
||||
|
||||
def test_manual_requirement_not_duplicated_across_batches(self, tmp_path):
|
||||
"""Manual requirement is emitted once (first batch, via __init__),
|
||||
never re-emitted when the writer is reused (``include_manual=False``)."""
|
||||
fw = _make_framework_with_manual()
|
||||
f1 = _make_finding("check_a", status="PASS")
|
||||
f2 = _make_finding("check_a", status="FAIL")
|
||||
self._run_batches(tmp_path, fw, "fw_1.0", [([f1], False), ([f2], True)])
|
||||
rows = list(
|
||||
csv.DictReader(
|
||||
(tmp_path / "compliance" / "out_fw_1.0.csv").read_text().splitlines(),
|
||||
delimiter=";",
|
||||
)
|
||||
)
|
||||
manual_rows = [r for r in rows if r["STATUS"] == "MANUAL"]
|
||||
assert len(manual_rows) == 1
|
||||
assert manual_rows[0]["REQUIREMENTS_ID"] == "2.1"
|
||||
|
||||
ocsf = json.loads(
|
||||
(tmp_path / "compliance" / "out_fw_1.0.ocsf.json").read_text()
|
||||
)
|
||||
manual_events = [
|
||||
e
|
||||
for e in ocsf
|
||||
if (e.get("compliance") or {}).get("requirements") == ["2.1"]
|
||||
]
|
||||
assert len(manual_events) == 1
|
||||
|
||||
def test_writer_reused_not_recreated_across_batches(self, tmp_path):
|
||||
"""Three batches still yield exactly one CSV + one OCSF writer,
|
||||
and the same instances are reused throughout."""
|
||||
fw = _make_universal_framework()
|
||||
generated = self._run_batches(
|
||||
tmp_path,
|
||||
fw,
|
||||
"fw_1.0",
|
||||
[
|
||||
([_make_finding("check_a")], False),
|
||||
([_make_finding("check_a")], False),
|
||||
([_make_finding("check_a")], True),
|
||||
],
|
||||
)
|
||||
assert len(generated["compliance"]) == 2
|
||||
assert isinstance(generated["compliance"][0], UniversalComplianceOutput)
|
||||
assert isinstance(generated["compliance"][1], OCSFComplianceOutput)
|
||||
|
||||
def test_label_without_version_still_outputs(self, tmp_path):
|
||||
"""Empty framework version → label is the framework name only;
|
||||
the helper still produces both artifacts without error."""
|
||||
fw = _make_universal_framework(version="")
|
||||
generated = {"compliance": []}
|
||||
processed = process_universal_compliance_frameworks(
|
||||
input_compliance_frameworks={"fw"},
|
||||
universal_frameworks={"fw": fw},
|
||||
finding_outputs=[_make_finding("check_a")],
|
||||
output_directory=str(tmp_path),
|
||||
output_filename="out",
|
||||
provider="aws",
|
||||
generated_outputs=generated,
|
||||
from_cli=False,
|
||||
is_last=True,
|
||||
)
|
||||
assert processed == {"fw"}
|
||||
assert len(generated["compliance"]) == 2
|
||||
assert (tmp_path / "compliance" / "out_fw.csv").exists()
|
||||
assert (tmp_path / "compliance" / "out_fw.ocsf.json").exists()
|
||||
|
||||
@@ -202,26 +202,6 @@ class TestOCSFComplianceOutput:
|
||||
assert cf.status_code == "MANUAL"
|
||||
assert cf.finding_info.uid == "manual-MANUAL-1"
|
||||
|
||||
def test_include_manual_false_skips_manual(self):
|
||||
"""``_transform(..., include_manual=False)`` emits check events but
|
||||
NOT manual requirement events. The streaming caller passes ``False``
|
||||
for batches 2..N so manual events are not duplicated."""
|
||||
covered = _simple_requirement("REQ-1", ["check_a"])
|
||||
manual = _simple_requirement("MANUAL-1", checks=[])
|
||||
fw = _make_framework([covered, manual])
|
||||
findings = [_make_finding("check_a")]
|
||||
|
||||
output = OCSFComplianceOutput(findings=findings, framework=fw, provider="aws")
|
||||
# __init__ transforms with include_manual=True (default) → manual present
|
||||
assert any(cf.status_code == "MANUAL" for cf in output.data)
|
||||
|
||||
# A subsequent batch re-transforms with include_manual=False
|
||||
output._data.clear()
|
||||
output._transform(findings, fw, "TestFW-1.0", include_manual=False)
|
||||
|
||||
assert len(output.data) == 1 # only the check event, no manual
|
||||
assert all(cf.status_code != "MANUAL" for cf in output.data)
|
||||
|
||||
def test_multi_provider_checks_dict(self):
|
||||
req = UniversalComplianceRequirement(
|
||||
id="REQ-1",
|
||||
|
||||
@@ -122,43 +122,6 @@ class TestManualRequirements:
|
||||
assert manual_rows[0].dict()["Requirements_Id"] == "manual-1"
|
||||
assert manual_rows[0].dict()["ResourceId"] == "manual_check"
|
||||
|
||||
def test_include_manual_false_skips_manual_rows(self, tmp_path):
|
||||
"""``_transform(..., include_manual=False)`` emits finding rows but
|
||||
NOT manual requirements. The streaming caller passes ``False`` for
|
||||
batches 2..N so manual rows are not duplicated across batches."""
|
||||
reqs = [
|
||||
UniversalComplianceRequirement(
|
||||
id="1.1",
|
||||
description="test",
|
||||
attributes={"Section": "IAM"},
|
||||
checks={"aws": ["check_a"]},
|
||||
),
|
||||
UniversalComplianceRequirement(
|
||||
id="manual-1",
|
||||
description="manual check",
|
||||
attributes={"Section": "Governance"},
|
||||
checks={},
|
||||
),
|
||||
]
|
||||
metadata = [AttributeMetadata(key="Section", type="str")]
|
||||
fw = _make_framework(reqs, metadata, TableConfig(group_by="Section"))
|
||||
findings = [_make_finding("check_a", "PASS", {"TestFW-1.0": ["1.1"]})]
|
||||
|
||||
output = UniversalComplianceOutput(
|
||||
findings=findings,
|
||||
framework=fw,
|
||||
file_path=str(tmp_path / "t.csv"),
|
||||
)
|
||||
# __init__ transforms with include_manual=True (default) → manual present
|
||||
assert any(r.dict()["Status"] == "MANUAL" for r in output.data)
|
||||
|
||||
# A subsequent batch re-transforms with include_manual=False
|
||||
output._data.clear()
|
||||
output._transform(findings, fw, "TestFW-1.0", include_manual=False)
|
||||
|
||||
assert len(output.data) == 1 # only the finding row, no manual
|
||||
assert all(r.dict()["Status"] != "MANUAL" for r in output.data)
|
||||
|
||||
|
||||
class TestMITREExtraColumns:
|
||||
def test_mitre_columns_present(self, tmp_path):
|
||||
|
||||
@@ -1,119 +0,0 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import ChatPolicies
|
||||
from tests.providers.googleworkspace.googleworkspace_fixtures import (
|
||||
CUSTOMER_ID,
|
||||
set_mocked_googleworkspace_provider,
|
||||
)
|
||||
|
||||
|
||||
class TestChatAppsInstallationDisabled:
|
||||
def test_pass(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_apps_installation_disabled.chat_apps_installation_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_apps_installation_disabled.chat_apps_installation_disabled import (
|
||||
chat_apps_installation_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(enable_apps=False)
|
||||
|
||||
check = chat_apps_installation_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "PASS"
|
||||
assert "disabled" in findings[0].status_extended
|
||||
assert findings[0].resource_name == "Chat Policies"
|
||||
assert findings[0].resource_id == "chatPolicies"
|
||||
assert findings[0].customer_id == CUSTOMER_ID
|
||||
assert findings[0].resource == ChatPolicies(enable_apps=False).dict()
|
||||
|
||||
def test_fail_enabled(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_apps_installation_disabled.chat_apps_installation_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_apps_installation_disabled.chat_apps_installation_disabled import (
|
||||
chat_apps_installation_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(enable_apps=True)
|
||||
|
||||
check = chat_apps_installation_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "FAIL"
|
||||
assert "enabled" in findings[0].status_extended
|
||||
|
||||
def test_pass_no_policy_set(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_apps_installation_disabled.chat_apps_installation_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_apps_installation_disabled.chat_apps_installation_disabled import (
|
||||
chat_apps_installation_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(enable_apps=None)
|
||||
|
||||
check = chat_apps_installation_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "PASS"
|
||||
assert "secure default" in findings[0].status_extended
|
||||
|
||||
def test_no_findings_when_fetch_failed(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_apps_installation_disabled.chat_apps_installation_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_apps_installation_disabled.chat_apps_installation_disabled import (
|
||||
chat_apps_installation_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = False
|
||||
mock_client.policies = ChatPolicies()
|
||||
|
||||
check = chat_apps_installation_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 0
|
||||
@@ -1,149 +0,0 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import ChatPolicies
|
||||
from tests.providers.googleworkspace.googleworkspace_fixtures import (
|
||||
CUSTOMER_ID,
|
||||
set_mocked_googleworkspace_provider,
|
||||
)
|
||||
|
||||
|
||||
class TestChatExternalFileSharingDisabled:
|
||||
def test_pass(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_file_sharing_disabled.chat_external_file_sharing_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_file_sharing_disabled.chat_external_file_sharing_disabled import (
|
||||
chat_external_file_sharing_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(external_file_sharing="NO_FILES")
|
||||
|
||||
check = chat_external_file_sharing_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "PASS"
|
||||
assert "disabled" in findings[0].status_extended
|
||||
assert findings[0].resource_name == "Chat Policies"
|
||||
assert findings[0].resource_id == "chatPolicies"
|
||||
assert findings[0].customer_id == CUSTOMER_ID
|
||||
assert (
|
||||
findings[0].resource
|
||||
== ChatPolicies(external_file_sharing="NO_FILES").dict()
|
||||
)
|
||||
|
||||
def test_fail_all_files(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_file_sharing_disabled.chat_external_file_sharing_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_file_sharing_disabled.chat_external_file_sharing_disabled import (
|
||||
chat_external_file_sharing_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(external_file_sharing="ALL_FILES")
|
||||
|
||||
check = chat_external_file_sharing_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "FAIL"
|
||||
assert "ALL_FILES" in findings[0].status_extended
|
||||
|
||||
def test_fail_images_only(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_file_sharing_disabled.chat_external_file_sharing_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_file_sharing_disabled.chat_external_file_sharing_disabled import (
|
||||
chat_external_file_sharing_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(external_file_sharing="IMAGES_ONLY")
|
||||
|
||||
check = chat_external_file_sharing_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "FAIL"
|
||||
assert "IMAGES_ONLY" in findings[0].status_extended
|
||||
|
||||
def test_fail_no_policy_set(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_file_sharing_disabled.chat_external_file_sharing_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_file_sharing_disabled.chat_external_file_sharing_disabled import (
|
||||
chat_external_file_sharing_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(external_file_sharing=None)
|
||||
|
||||
check = chat_external_file_sharing_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "FAIL"
|
||||
assert "not explicitly configured" in findings[0].status_extended
|
||||
|
||||
def test_no_findings_when_fetch_failed(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_file_sharing_disabled.chat_external_file_sharing_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_file_sharing_disabled.chat_external_file_sharing_disabled import (
|
||||
chat_external_file_sharing_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = False
|
||||
mock_client.policies = ChatPolicies()
|
||||
|
||||
check = chat_external_file_sharing_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 0
|
||||
@@ -1,154 +0,0 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import ChatPolicies
|
||||
from tests.providers.googleworkspace.googleworkspace_fixtures import (
|
||||
CUSTOMER_ID,
|
||||
set_mocked_googleworkspace_provider,
|
||||
)
|
||||
|
||||
|
||||
class TestChatExternalMessagingRestricted:
|
||||
def test_pass_external_chat_disabled(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_messaging_restricted.chat_external_messaging_restricted.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_messaging_restricted.chat_external_messaging_restricted import (
|
||||
chat_external_messaging_restricted,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(allow_external_chat=False)
|
||||
|
||||
check = chat_external_messaging_restricted()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "PASS"
|
||||
assert "disabled" in findings[0].status_extended
|
||||
assert findings[0].resource_name == "Chat Policies"
|
||||
assert findings[0].resource_id == "chatPolicies"
|
||||
assert findings[0].customer_id == CUSTOMER_ID
|
||||
assert (
|
||||
findings[0].resource == ChatPolicies(allow_external_chat=False).dict()
|
||||
)
|
||||
|
||||
def test_pass_trusted_domains(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_messaging_restricted.chat_external_messaging_restricted.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_messaging_restricted.chat_external_messaging_restricted import (
|
||||
chat_external_messaging_restricted,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(
|
||||
allow_external_chat=True,
|
||||
external_chat_restriction="TRUSTED_DOMAINS",
|
||||
)
|
||||
|
||||
check = chat_external_messaging_restricted()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "PASS"
|
||||
assert "restricted to allowed domains" in findings[0].status_extended
|
||||
|
||||
def test_fail_no_restriction(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_messaging_restricted.chat_external_messaging_restricted.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_messaging_restricted.chat_external_messaging_restricted import (
|
||||
chat_external_messaging_restricted,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(
|
||||
allow_external_chat=True,
|
||||
external_chat_restriction="NO_RESTRICTION",
|
||||
)
|
||||
|
||||
check = chat_external_messaging_restricted()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "FAIL"
|
||||
assert "not restricted" in findings[0].status_extended
|
||||
|
||||
def test_pass_no_policy_set(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_messaging_restricted.chat_external_messaging_restricted.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_messaging_restricted.chat_external_messaging_restricted import (
|
||||
chat_external_messaging_restricted,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies()
|
||||
|
||||
check = chat_external_messaging_restricted()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "PASS"
|
||||
assert "secure default" in findings[0].status_extended
|
||||
|
||||
def test_no_findings_when_fetch_failed(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_messaging_restricted.chat_external_messaging_restricted.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_messaging_restricted.chat_external_messaging_restricted import (
|
||||
chat_external_messaging_restricted,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = False
|
||||
mock_client.policies = ChatPolicies()
|
||||
|
||||
check = chat_external_messaging_restricted()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 0
|
||||
@@ -1,155 +0,0 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import ChatPolicies
|
||||
from tests.providers.googleworkspace.googleworkspace_fixtures import (
|
||||
CUSTOMER_ID,
|
||||
set_mocked_googleworkspace_provider,
|
||||
)
|
||||
|
||||
|
||||
class TestChatExternalSpacesRestricted:
|
||||
def test_pass_spaces_disabled(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_spaces_restricted.chat_external_spaces_restricted.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_spaces_restricted.chat_external_spaces_restricted import (
|
||||
chat_external_spaces_restricted,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(external_spaces_enabled=False)
|
||||
|
||||
check = chat_external_spaces_restricted()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "PASS"
|
||||
assert "disabled" in findings[0].status_extended
|
||||
assert findings[0].resource_name == "Chat Policies"
|
||||
assert findings[0].resource_id == "chatPolicies"
|
||||
assert findings[0].customer_id == CUSTOMER_ID
|
||||
assert (
|
||||
findings[0].resource
|
||||
== ChatPolicies(external_spaces_enabled=False).dict()
|
||||
)
|
||||
|
||||
def test_pass_trusted_domains(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_spaces_restricted.chat_external_spaces_restricted.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_spaces_restricted.chat_external_spaces_restricted import (
|
||||
chat_external_spaces_restricted,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(
|
||||
external_spaces_enabled=True,
|
||||
external_spaces_domain_allowlist_mode="TRUSTED_DOMAINS",
|
||||
)
|
||||
|
||||
check = chat_external_spaces_restricted()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "PASS"
|
||||
assert "restricted to allowed domains" in findings[0].status_extended
|
||||
|
||||
def test_fail_all_domains(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_spaces_restricted.chat_external_spaces_restricted.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_spaces_restricted.chat_external_spaces_restricted import (
|
||||
chat_external_spaces_restricted,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(
|
||||
external_spaces_enabled=True,
|
||||
external_spaces_domain_allowlist_mode="ALL_DOMAINS",
|
||||
)
|
||||
|
||||
check = chat_external_spaces_restricted()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "FAIL"
|
||||
assert "not restricted" in findings[0].status_extended
|
||||
|
||||
def test_fail_no_policy_set(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_spaces_restricted.chat_external_spaces_restricted.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_spaces_restricted.chat_external_spaces_restricted import (
|
||||
chat_external_spaces_restricted,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies()
|
||||
|
||||
check = chat_external_spaces_restricted()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "FAIL"
|
||||
assert "not explicitly configured" in findings[0].status_extended
|
||||
|
||||
def test_no_findings_when_fetch_failed(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_external_spaces_restricted.chat_external_spaces_restricted.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_external_spaces_restricted.chat_external_spaces_restricted import (
|
||||
chat_external_spaces_restricted,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = False
|
||||
mock_client.policies = ChatPolicies()
|
||||
|
||||
check = chat_external_spaces_restricted()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 0
|
||||
@@ -1,119 +0,0 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import ChatPolicies
|
||||
from tests.providers.googleworkspace.googleworkspace_fixtures import (
|
||||
CUSTOMER_ID,
|
||||
set_mocked_googleworkspace_provider,
|
||||
)
|
||||
|
||||
|
||||
class TestChatIncomingWebhooksDisabled:
|
||||
def test_pass(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_incoming_webhooks_disabled.chat_incoming_webhooks_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_incoming_webhooks_disabled.chat_incoming_webhooks_disabled import (
|
||||
chat_incoming_webhooks_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(enable_webhooks=False)
|
||||
|
||||
check = chat_incoming_webhooks_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "PASS"
|
||||
assert "disabled" in findings[0].status_extended
|
||||
assert findings[0].resource_name == "Chat Policies"
|
||||
assert findings[0].resource_id == "chatPolicies"
|
||||
assert findings[0].customer_id == CUSTOMER_ID
|
||||
assert findings[0].resource == ChatPolicies(enable_webhooks=False).dict()
|
||||
|
||||
def test_fail_enabled(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_incoming_webhooks_disabled.chat_incoming_webhooks_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_incoming_webhooks_disabled.chat_incoming_webhooks_disabled import (
|
||||
chat_incoming_webhooks_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(enable_webhooks=True)
|
||||
|
||||
check = chat_incoming_webhooks_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "FAIL"
|
||||
assert "enabled" in findings[0].status_extended
|
||||
|
||||
def test_pass_no_policy_set(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_incoming_webhooks_disabled.chat_incoming_webhooks_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_incoming_webhooks_disabled.chat_incoming_webhooks_disabled import (
|
||||
chat_incoming_webhooks_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(enable_webhooks=None)
|
||||
|
||||
check = chat_incoming_webhooks_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "PASS"
|
||||
assert "secure default" in findings[0].status_extended
|
||||
|
||||
def test_no_findings_when_fetch_failed(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_incoming_webhooks_disabled.chat_incoming_webhooks_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_incoming_webhooks_disabled.chat_incoming_webhooks_disabled import (
|
||||
chat_incoming_webhooks_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = False
|
||||
mock_client.policies = ChatPolicies()
|
||||
|
||||
check = chat_incoming_webhooks_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 0
|
||||
@@ -1,122 +0,0 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import ChatPolicies
|
||||
from tests.providers.googleworkspace.googleworkspace_fixtures import (
|
||||
CUSTOMER_ID,
|
||||
set_mocked_googleworkspace_provider,
|
||||
)
|
||||
|
||||
|
||||
class TestChatInternalFileSharingDisabled:
|
||||
def test_pass(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_internal_file_sharing_disabled.chat_internal_file_sharing_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_internal_file_sharing_disabled.chat_internal_file_sharing_disabled import (
|
||||
chat_internal_file_sharing_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(internal_file_sharing="NO_FILES")
|
||||
|
||||
check = chat_internal_file_sharing_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "PASS"
|
||||
assert "disabled" in findings[0].status_extended
|
||||
assert findings[0].resource_name == "Chat Policies"
|
||||
assert findings[0].resource_id == "chatPolicies"
|
||||
assert findings[0].customer_id == CUSTOMER_ID
|
||||
assert (
|
||||
findings[0].resource
|
||||
== ChatPolicies(internal_file_sharing="NO_FILES").dict()
|
||||
)
|
||||
|
||||
def test_fail_all_files(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_internal_file_sharing_disabled.chat_internal_file_sharing_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_internal_file_sharing_disabled.chat_internal_file_sharing_disabled import (
|
||||
chat_internal_file_sharing_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(internal_file_sharing="ALL_FILES")
|
||||
|
||||
check = chat_internal_file_sharing_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "FAIL"
|
||||
assert "ALL_FILES" in findings[0].status_extended
|
||||
|
||||
def test_fail_no_policy_set(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_internal_file_sharing_disabled.chat_internal_file_sharing_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_internal_file_sharing_disabled.chat_internal_file_sharing_disabled import (
|
||||
chat_internal_file_sharing_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = True
|
||||
mock_client.policies = ChatPolicies(internal_file_sharing=None)
|
||||
|
||||
check = chat_internal_file_sharing_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 1
|
||||
assert findings[0].status == "FAIL"
|
||||
assert "not explicitly configured" in findings[0].status_extended
|
||||
|
||||
def test_no_findings_when_fetch_failed(self):
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_internal_file_sharing_disabled.chat_internal_file_sharing_disabled.chat_client"
|
||||
) as mock_client,
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_internal_file_sharing_disabled.chat_internal_file_sharing_disabled import (
|
||||
chat_internal_file_sharing_disabled,
|
||||
)
|
||||
|
||||
mock_client.provider = mock_provider
|
||||
mock_client.policies_fetched = False
|
||||
mock_client.policies = ChatPolicies()
|
||||
|
||||
check = chat_internal_file_sharing_disabled()
|
||||
findings = check.execute()
|
||||
|
||||
assert len(findings) == 0
|
||||
@@ -1,440 +0,0 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from googleapiclient.errors import HttpError
|
||||
from httplib2 import Response as HttpResponse
|
||||
|
||||
from tests.providers.googleworkspace.googleworkspace_fixtures import (
|
||||
ROOT_ORG_UNIT_ID,
|
||||
set_mocked_googleworkspace_provider,
|
||||
)
|
||||
|
||||
|
||||
class TestChatService:
|
||||
def test_chat_fetch_policies_all_settings(self):
|
||||
"""Test fetching all 4 Chat policy settings from Cloud Identity API"""
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
mock_provider.audit_config = {}
|
||||
mock_provider.fixer_config = {}
|
||||
mock_credentials = MagicMock()
|
||||
mock_session = MagicMock()
|
||||
mock_session.credentials = mock_credentials
|
||||
mock_provider.session = mock_session
|
||||
|
||||
mock_service = MagicMock()
|
||||
mock_policies_list = MagicMock()
|
||||
mock_policies_list.execute.return_value = {
|
||||
"policies": [
|
||||
{
|
||||
"setting": {
|
||||
"type": "settings/chat.chat_file_sharing",
|
||||
"value": {
|
||||
"externalFileSharing": "NO_FILES",
|
||||
"internalFileSharing": "IMAGES_ONLY",
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"setting": {
|
||||
"type": "settings/chat.external_chat_restriction",
|
||||
"value": {
|
||||
"allowExternalChat": True,
|
||||
"externalChatRestriction": "TRUSTED_DOMAINS",
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"setting": {
|
||||
"type": "settings/chat.chat_external_spaces",
|
||||
"value": {
|
||||
"enabled": True,
|
||||
"domainAllowlistMode": "TRUSTED_DOMAINS",
|
||||
},
|
||||
}
|
||||
},
|
||||
{
|
||||
"setting": {
|
||||
"type": "settings/chat.chat_apps_access",
|
||||
"value": {
|
||||
"enableApps": False,
|
||||
"enableWebhooks": False,
|
||||
},
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
mock_service.policies().list.return_value = mock_policies_list
|
||||
mock_service.policies().list_next.return_value = None
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_service.GoogleWorkspaceService._build_service",
|
||||
return_value=mock_service,
|
||||
),
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import (
|
||||
Chat,
|
||||
)
|
||||
|
||||
chat = Chat(mock_provider)
|
||||
|
||||
assert chat.policies_fetched is True
|
||||
assert chat.policies.external_file_sharing == "NO_FILES"
|
||||
assert chat.policies.internal_file_sharing == "IMAGES_ONLY"
|
||||
assert chat.policies.allow_external_chat is True
|
||||
assert chat.policies.external_chat_restriction == "TRUSTED_DOMAINS"
|
||||
assert chat.policies.external_spaces_enabled is True
|
||||
assert (
|
||||
chat.policies.external_spaces_domain_allowlist_mode == "TRUSTED_DOMAINS"
|
||||
)
|
||||
assert chat.policies.enable_apps is False
|
||||
assert chat.policies.enable_webhooks is False
|
||||
|
||||
def test_chat_fetch_policies_empty_response(self):
|
||||
"""Test handling empty policies response"""
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
mock_provider.audit_config = {}
|
||||
mock_provider.fixer_config = {}
|
||||
mock_session = MagicMock()
|
||||
mock_session.credentials = MagicMock()
|
||||
mock_provider.session = mock_session
|
||||
|
||||
mock_service = MagicMock()
|
||||
mock_policies_list = MagicMock()
|
||||
mock_policies_list.execute.return_value = {"policies": []}
|
||||
mock_service.policies().list.return_value = mock_policies_list
|
||||
mock_service.policies().list_next.return_value = None
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_service.GoogleWorkspaceService._build_service",
|
||||
return_value=mock_service,
|
||||
),
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import (
|
||||
Chat,
|
||||
)
|
||||
|
||||
chat = Chat(mock_provider)
|
||||
|
||||
assert chat.policies_fetched is True
|
||||
assert chat.policies.external_file_sharing is None
|
||||
assert chat.policies.allow_external_chat is None
|
||||
assert chat.policies.enable_apps is None
|
||||
assert chat.policies.enable_webhooks is None
|
||||
|
||||
def test_chat_fetch_policies_api_error(self):
|
||||
"""Test handling of API errors during policy fetch"""
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
mock_provider.audit_config = {}
|
||||
mock_provider.fixer_config = {}
|
||||
mock_session = MagicMock()
|
||||
mock_session.credentials = MagicMock()
|
||||
mock_provider.session = mock_session
|
||||
|
||||
mock_service = MagicMock()
|
||||
mock_service.policies().list.side_effect = Exception("API Error")
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_service.GoogleWorkspaceService._build_service",
|
||||
return_value=mock_service,
|
||||
),
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import (
|
||||
Chat,
|
||||
)
|
||||
|
||||
chat = Chat(mock_provider)
|
||||
|
||||
assert chat.policies_fetched is False
|
||||
assert chat.policies.external_file_sharing is None
|
||||
|
||||
def test_chat_fetch_policies_build_service_returns_none(self):
|
||||
"""Test early return when _build_service fails to construct the client"""
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
mock_provider.audit_config = {}
|
||||
mock_provider.fixer_config = {}
|
||||
mock_session = MagicMock()
|
||||
mock_session.credentials = MagicMock()
|
||||
mock_provider.session = mock_session
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_service.GoogleWorkspaceService._build_service",
|
||||
return_value=None,
|
||||
),
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import (
|
||||
Chat,
|
||||
)
|
||||
|
||||
chat = Chat(mock_provider)
|
||||
|
||||
assert chat.policies_fetched is False
|
||||
assert chat.policies.external_file_sharing is None
|
||||
|
||||
def test_chat_fetch_policies_execute_raises(self):
|
||||
"""Test inner except handler when request.execute() raises during pagination"""
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
mock_provider.audit_config = {}
|
||||
mock_provider.fixer_config = {}
|
||||
mock_session = MagicMock()
|
||||
mock_session.credentials = MagicMock()
|
||||
mock_provider.session = mock_session
|
||||
|
||||
mock_service = MagicMock()
|
||||
mock_request = MagicMock()
|
||||
mock_request.execute.side_effect = Exception("Execute failed")
|
||||
mock_service.policies().list.return_value = mock_request
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_service.GoogleWorkspaceService._build_service",
|
||||
return_value=mock_service,
|
||||
),
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import (
|
||||
Chat,
|
||||
)
|
||||
|
||||
chat = Chat(mock_provider)
|
||||
|
||||
assert chat.policies_fetched is False
|
||||
assert chat.policies.external_file_sharing is None
|
||||
|
||||
def test_chat_fetch_policies_ignores_ou_and_group_level(self):
|
||||
"""Test that OU-level and group-level policies are skipped, only customer-level used"""
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
mock_provider.audit_config = {}
|
||||
mock_provider.fixer_config = {}
|
||||
mock_session = MagicMock()
|
||||
mock_session.credentials = MagicMock()
|
||||
mock_provider.session = mock_session
|
||||
|
||||
mock_service = MagicMock()
|
||||
mock_policies_list = MagicMock()
|
||||
mock_policies_list.execute.return_value = {
|
||||
"policies": [
|
||||
{
|
||||
# Customer-level: no policyQuery → should be used
|
||||
"setting": {
|
||||
"type": "settings/chat.chat_apps_access",
|
||||
"value": {"enableApps": False, "enableWebhooks": False},
|
||||
}
|
||||
},
|
||||
{
|
||||
# OU-level: has policyQuery.orgUnit → should be skipped
|
||||
"policyQuery": {"orgUnit": "orgUnits/sales_team"},
|
||||
"setting": {
|
||||
"type": "settings/chat.chat_apps_access",
|
||||
"value": {"enableApps": True, "enableWebhooks": True},
|
||||
},
|
||||
},
|
||||
{
|
||||
# Group-level: has policyQuery.group → should be skipped
|
||||
"policyQuery": {"group": "groups/contractors"},
|
||||
"setting": {
|
||||
"type": "settings/chat.chat_file_sharing",
|
||||
"value": {
|
||||
"externalFileSharing": "ALL_FILES",
|
||||
"internalFileSharing": "ALL_FILES",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
# Customer-level: no policyQuery → should be used
|
||||
"setting": {
|
||||
"type": "settings/chat.chat_file_sharing",
|
||||
"value": {
|
||||
"externalFileSharing": "NO_FILES",
|
||||
"internalFileSharing": "NO_FILES",
|
||||
},
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
mock_service.policies().list.return_value = mock_policies_list
|
||||
mock_service.policies().list_next.return_value = None
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_service.GoogleWorkspaceService._build_service",
|
||||
return_value=mock_service,
|
||||
),
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import (
|
||||
Chat,
|
||||
)
|
||||
|
||||
chat = Chat(mock_provider)
|
||||
|
||||
assert chat.policies_fetched is True
|
||||
assert chat.policies.enable_apps is False
|
||||
assert chat.policies.external_file_sharing == "NO_FILES"
|
||||
|
||||
def test_chat_fetch_policies_accepts_root_ou(self):
|
||||
"""Test that root-OU-scoped policies are accepted as customer-level"""
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
mock_provider.audit_config = {}
|
||||
mock_provider.fixer_config = {}
|
||||
mock_session = MagicMock()
|
||||
mock_session.credentials = MagicMock()
|
||||
mock_provider.session = mock_session
|
||||
|
||||
mock_service = MagicMock()
|
||||
mock_policies_list = MagicMock()
|
||||
mock_policies_list.execute.return_value = {
|
||||
"policies": [
|
||||
{
|
||||
# Root OU: matches provider's root_org_unit_id → should be accepted
|
||||
"policyQuery": {"orgUnit": f"orgUnits/{ROOT_ORG_UNIT_ID}"},
|
||||
"setting": {
|
||||
"type": "settings/chat.chat_apps_access",
|
||||
"value": {"enableApps": False, "enableWebhooks": True},
|
||||
},
|
||||
},
|
||||
{
|
||||
# Sub-OU: different orgUnit → should be skipped
|
||||
"policyQuery": {"orgUnit": "orgUnits/sub_ou_sales"},
|
||||
"setting": {
|
||||
"type": "settings/chat.chat_file_sharing",
|
||||
"value": {
|
||||
"externalFileSharing": "ALL_FILES",
|
||||
"internalFileSharing": "ALL_FILES",
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
mock_service.policies().list.return_value = mock_policies_list
|
||||
mock_service.policies().list_next.return_value = None
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_service.GoogleWorkspaceService._build_service",
|
||||
return_value=mock_service,
|
||||
),
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import (
|
||||
Chat,
|
||||
)
|
||||
|
||||
chat = Chat(mock_provider)
|
||||
|
||||
assert chat.policies_fetched is True
|
||||
# Root OU policy accepted
|
||||
assert chat.policies.enable_apps is False
|
||||
assert chat.policies.enable_webhooks is True
|
||||
# Sub-OU policy skipped
|
||||
assert chat.policies.external_file_sharing is None
|
||||
|
||||
def test_chat_partial_fetch_marks_policies_fetched_false(self):
|
||||
"""Regression: if page 1 returns valid data but page 2 raises an error,
|
||||
policies_fetched must be False even though some policy values were stored."""
|
||||
mock_provider = set_mocked_googleworkspace_provider()
|
||||
mock_provider.audit_config = {}
|
||||
mock_provider.fixer_config = {}
|
||||
mock_session = MagicMock()
|
||||
mock_session.credentials = MagicMock()
|
||||
mock_provider.session = mock_session
|
||||
|
||||
mock_service = MagicMock()
|
||||
|
||||
# Page 1: returns valid Chat data
|
||||
page1_response = {
|
||||
"policies": [
|
||||
{
|
||||
"setting": {
|
||||
"type": "settings/chat.chat_apps_access",
|
||||
"value": {"enableApps": False, "enableWebhooks": False},
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
# Page 2 request raises HttpError 429
|
||||
page1_request = MagicMock()
|
||||
page1_request.execute.return_value = page1_response
|
||||
|
||||
page2_request = MagicMock()
|
||||
page2_request.execute.side_effect = HttpError(
|
||||
HttpResponse({"status": "429"}), b"Rate limit exceeded"
|
||||
)
|
||||
|
||||
mock_service.policies().list.return_value = page1_request
|
||||
mock_service.policies().list_next.return_value = page2_request
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=mock_provider,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.googleworkspace.services.chat.chat_service.GoogleWorkspaceService._build_service",
|
||||
return_value=mock_service,
|
||||
),
|
||||
):
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import (
|
||||
Chat,
|
||||
)
|
||||
|
||||
chat = Chat(mock_provider)
|
||||
|
||||
# Page 1 data was stored
|
||||
assert chat.policies.enable_apps is False
|
||||
# But policies_fetched must be False because page 2 failed
|
||||
assert chat.policies_fetched is False
|
||||
|
||||
def test_chat_policies_model(self):
|
||||
"""Test ChatPolicies Pydantic model"""
|
||||
from prowler.providers.googleworkspace.services.chat.chat_service import (
|
||||
ChatPolicies,
|
||||
)
|
||||
|
||||
policies = ChatPolicies(
|
||||
external_file_sharing="NO_FILES",
|
||||
internal_file_sharing="IMAGES_ONLY",
|
||||
allow_external_chat=True,
|
||||
external_chat_restriction="TRUSTED_DOMAINS",
|
||||
external_spaces_enabled=True,
|
||||
external_spaces_domain_allowlist_mode="TRUSTED_DOMAINS",
|
||||
enable_apps=False,
|
||||
enable_webhooks=False,
|
||||
)
|
||||
|
||||
assert policies.external_file_sharing == "NO_FILES"
|
||||
assert policies.internal_file_sharing == "IMAGES_ONLY"
|
||||
assert policies.allow_external_chat is True
|
||||
assert policies.external_chat_restriction == "TRUSTED_DOMAINS"
|
||||
assert policies.external_spaces_enabled is True
|
||||
assert policies.external_spaces_domain_allowlist_mode == "TRUSTED_DOMAINS"
|
||||
assert policies.enable_apps is False
|
||||
assert policies.enable_webhooks is False
|
||||
@@ -8,7 +8,6 @@ All notable changes to the **Prowler UI** are documented in this file.
|
||||
|
||||
- UI health endpoint at `GET /api/health` for Docker Compose liveness checks [(#11145)](https://github.com/prowler-cloud/prowler/pull/11145)
|
||||
- AWS findings and resource details now expose a "View in AWS Console" link that opens the resource directly in the AWS Console via the universal `/go/view` ARN resolver. The per-provider external link is rendered by a new shared `ExternalResourceLink` component, which also covers the existing IaC repository link [(#9172)](https://github.com/prowler-cloud/prowler/pull/9172)
|
||||
- OCSF JSON download for universal compliance frameworks (DORA, CSA CCM), alongside the existing CSV and PDF actions [(#11131)](https://github.com/prowler-cloud/prowler/pull/11131)
|
||||
|
||||
### 🔄 Changed
|
||||
|
||||
|
||||
@@ -392,27 +392,6 @@ export const getComplianceCsv = async (scanId: string, complianceId: string) =>
|
||||
"compliance report",
|
||||
);
|
||||
|
||||
/**
|
||||
* Get the OCSF JSON export for a universal compliance framework.
|
||||
*
|
||||
* Only universal frameworks that declare an ``outputs`` block (today: DORA,
|
||||
* CSA CCM 4.0) produce a per-framework OCSF artifact. For any other framework
|
||||
* the backend returns 404; callers should gate this download via
|
||||
* ``isOcsfSupported(framework)``.
|
||||
*
|
||||
* NOTE: this is a dedicated path (``compliance/{id}/ocsf``), not a query
|
||||
* param. The API's JSON:API ``QueryParameterValidationFilter`` rejects any
|
||||
* non-JSON:API query param with 400, so ``?type=`` / ``?format=`` is not an
|
||||
* option — the format must be encoded in the route.
|
||||
*/
|
||||
export const getComplianceOcsf = async (scanId: string, complianceId: string) =>
|
||||
_fetchScanBinary(
|
||||
scanId,
|
||||
`compliance/${complianceId}/ocsf`,
|
||||
`scan-${scanId}-compliance-${complianceId}.ocsf.json`,
|
||||
"compliance OCSF report",
|
||||
);
|
||||
|
||||
/**
|
||||
* Get a compliance PDF report for any supported framework.
|
||||
*
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
import { Requirement } from "@/types/compliance";
|
||||
|
||||
import {
|
||||
ComplianceBadge,
|
||||
ComplianceBadgeContainer,
|
||||
ComplianceDetailContainer,
|
||||
ComplianceDetailSection,
|
||||
ComplianceDetailText,
|
||||
} from "./shared-components";
|
||||
|
||||
interface DORADetailsProps {
|
||||
requirement: Requirement;
|
||||
}
|
||||
|
||||
export const DORACustomDetails = ({ requirement }: DORADetailsProps) => {
|
||||
return (
|
||||
<ComplianceDetailContainer>
|
||||
{requirement.description && (
|
||||
<ComplianceDetailSection title="Description">
|
||||
<ComplianceDetailText>{requirement.description}</ComplianceDetailText>
|
||||
</ComplianceDetailSection>
|
||||
)}
|
||||
|
||||
<ComplianceBadgeContainer>
|
||||
{requirement.pillar && (
|
||||
<ComplianceBadge
|
||||
label="Pillar"
|
||||
value={requirement.pillar as string}
|
||||
color="blue"
|
||||
/>
|
||||
)}
|
||||
{requirement.article && (
|
||||
<ComplianceBadge
|
||||
label="Article"
|
||||
value={requirement.article as string}
|
||||
color="indigo"
|
||||
/>
|
||||
)}
|
||||
{requirement.article_title && (
|
||||
<ComplianceBadge
|
||||
label="Article Title"
|
||||
value={requirement.article_title as string}
|
||||
color="gray"
|
||||
/>
|
||||
)}
|
||||
</ComplianceBadgeContainer>
|
||||
</ComplianceDetailContainer>
|
||||
);
|
||||
};
|
||||
@@ -6,19 +6,15 @@ import { render, screen } from "@testing-library/react";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
const {
|
||||
downloadComplianceCsvMock,
|
||||
downloadComplianceOcsfMock,
|
||||
downloadCompliancePdfMock,
|
||||
} = vi.hoisted(() => ({
|
||||
downloadComplianceCsvMock: vi.fn(),
|
||||
downloadComplianceOcsfMock: vi.fn(),
|
||||
downloadCompliancePdfMock: vi.fn(),
|
||||
}));
|
||||
const { downloadComplianceCsvMock, downloadCompliancePdfMock } = vi.hoisted(
|
||||
() => ({
|
||||
downloadComplianceCsvMock: vi.fn(),
|
||||
downloadCompliancePdfMock: vi.fn(),
|
||||
}),
|
||||
);
|
||||
|
||||
vi.mock("@/lib/helper", () => ({
|
||||
downloadComplianceCsv: downloadComplianceCsvMock,
|
||||
downloadComplianceOcsf: downloadComplianceOcsfMock,
|
||||
downloadCompliancePdf: downloadCompliancePdfMock,
|
||||
}));
|
||||
|
||||
@@ -135,51 +131,4 @@ describe("ComplianceDownloadContainer", () => {
|
||||
{},
|
||||
);
|
||||
});
|
||||
|
||||
it("should hide the OCSF action for frameworks without OCSF support", async () => {
|
||||
const user = userEvent.setup();
|
||||
|
||||
render(
|
||||
<ComplianceDownloadContainer
|
||||
compact
|
||||
presentation="dropdown"
|
||||
scanId="scan-1"
|
||||
complianceId="compliance-1"
|
||||
/>,
|
||||
);
|
||||
|
||||
await user.click(
|
||||
screen.getByRole("button", { name: "Open compliance export actions" }),
|
||||
);
|
||||
|
||||
expect(screen.queryByText("Download OCSF report")).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("should surface and trigger the OCSF download for universal frameworks", async () => {
|
||||
const user = userEvent.setup();
|
||||
|
||||
render(
|
||||
<ComplianceDownloadContainer
|
||||
compact
|
||||
presentation="dropdown"
|
||||
scanId="scan-1"
|
||||
complianceId="dora"
|
||||
/>,
|
||||
);
|
||||
|
||||
await user.click(
|
||||
screen.getByRole("button", { name: "Open compliance export actions" }),
|
||||
);
|
||||
expect(screen.getByText("Download OCSF report")).toBeInTheDocument();
|
||||
|
||||
await user.click(
|
||||
screen.getByRole("menuitem", { name: /Download OCSF report/i }),
|
||||
);
|
||||
|
||||
expect(downloadComplianceOcsfMock).toHaveBeenCalledWith(
|
||||
"scan-1",
|
||||
"dora",
|
||||
{},
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { DownloadIcon, FileJsonIcon, FileTextIcon } from "lucide-react";
|
||||
import { DownloadIcon, FileTextIcon } from "lucide-react";
|
||||
import { useState } from "react";
|
||||
|
||||
import { Button } from "@/components/shadcn/button/button";
|
||||
@@ -14,15 +14,8 @@ import {
|
||||
TooltipTrigger,
|
||||
} from "@/components/shadcn/tooltip";
|
||||
import { toast } from "@/components/ui";
|
||||
import {
|
||||
type ComplianceReportType,
|
||||
isOcsfSupported,
|
||||
} from "@/lib/compliance/compliance-report-types";
|
||||
import {
|
||||
downloadComplianceCsv,
|
||||
downloadComplianceOcsf,
|
||||
downloadCompliancePdf,
|
||||
} from "@/lib/helper";
|
||||
import type { ComplianceReportType } from "@/lib/compliance/compliance-report-types";
|
||||
import { downloadComplianceCsv, downloadCompliancePdf } from "@/lib/helper";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
interface ComplianceDownloadContainerProps {
|
||||
@@ -47,14 +40,9 @@ export const ComplianceDownloadContainer = ({
|
||||
presentation = "buttons",
|
||||
}: ComplianceDownloadContainerProps) => {
|
||||
const [isDownloadingCsv, setIsDownloadingCsv] = useState(false);
|
||||
const [isDownloadingOcsf, setIsDownloadingOcsf] = useState(false);
|
||||
const [isDownloadingPdf, setIsDownloadingPdf] = useState(false);
|
||||
const isIconWidth = buttonWidth === "icon";
|
||||
const isDropdown = presentation === "dropdown";
|
||||
// Only universal frameworks declaring an ``outputs`` block expose a
|
||||
// per-framework OCSF artifact (today: DORA, CSA CCM 4.0). Hide the
|
||||
// action everywhere else so the user never hits a guaranteed 404.
|
||||
const ocsfAvailable = isOcsfSupported(complianceId);
|
||||
|
||||
const handleDownloadCsv = async () => {
|
||||
if (isDownloadingCsv) return;
|
||||
@@ -66,16 +54,6 @@ export const ComplianceDownloadContainer = ({
|
||||
}
|
||||
};
|
||||
|
||||
const handleDownloadOcsf = async () => {
|
||||
if (!ocsfAvailable || isDownloadingOcsf) return;
|
||||
setIsDownloadingOcsf(true);
|
||||
try {
|
||||
await downloadComplianceOcsf(scanId, complianceId, toast);
|
||||
} finally {
|
||||
setIsDownloadingOcsf(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDownloadPdf = async () => {
|
||||
if (!reportType || isDownloadingPdf) return;
|
||||
setIsDownloadingPdf(true);
|
||||
@@ -127,18 +105,6 @@ export const ComplianceDownloadContainer = ({
|
||||
onSelect={handleDownloadCsv}
|
||||
disabled={disabled || isDownloadingCsv}
|
||||
/>
|
||||
{ocsfAvailable && (
|
||||
<ActionDropdownItem
|
||||
icon={
|
||||
<FileJsonIcon
|
||||
className={isDownloadingOcsf ? "animate-download-icon" : ""}
|
||||
/>
|
||||
}
|
||||
label="Download OCSF report"
|
||||
onSelect={handleDownloadOcsf}
|
||||
disabled={disabled || isDownloadingOcsf}
|
||||
/>
|
||||
)}
|
||||
{reportType && (
|
||||
<ActionDropdownItem
|
||||
icon={
|
||||
@@ -186,29 +152,6 @@ export const ComplianceDownloadContainer = ({
|
||||
<TooltipContent>Download CSV report</TooltipContent>
|
||||
)}
|
||||
</Tooltip>
|
||||
{ocsfAvailable && (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
size="sm"
|
||||
variant="outline"
|
||||
className={buttonClassName}
|
||||
onClick={handleDownloadOcsf}
|
||||
disabled={disabled || isDownloadingOcsf}
|
||||
aria-label="Download compliance OCSF report"
|
||||
>
|
||||
<FileJsonIcon
|
||||
size={14}
|
||||
className={isDownloadingOcsf ? "animate-download-icon" : ""}
|
||||
/>
|
||||
<span className={labelClassName}>OCSF</span>
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
{showTooltip && (
|
||||
<TooltipContent>Download OCSF report</TooltipContent>
|
||||
)}
|
||||
</Tooltip>
|
||||
)}
|
||||
{reportType && (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
|
||||
@@ -6,7 +6,6 @@ import CCCLogo from "./ccc.svg";
|
||||
import CISLogo from "./cis.svg";
|
||||
import CISALogo from "./cisa.svg";
|
||||
import CSALogo from "./csa.svg";
|
||||
import DORALogo from "./dora.svg";
|
||||
import ENSLogo from "./ens.png";
|
||||
import FedRAMPLogo from "./fedramp.svg";
|
||||
import FFIECLogo from "./ffiec.svg";
|
||||
@@ -68,9 +67,6 @@ const COMPLIANCE_LOGOS = [
|
||||
["c5", C5Logo],
|
||||
["ccc", CCCLogo],
|
||||
["csa", CSALogo],
|
||||
// DORA — universal framework (`prowler/compliance/dora.json`). The
|
||||
// compliance_id is just `dora`, no provider suffix.
|
||||
["dora", DORALogo],
|
||||
["secnumcloud", ANSSILogo],
|
||||
["aws", AWSLogo],
|
||||
] as const;
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 400 170" fill="none">
|
||||
<defs>
|
||||
<linearGradient id="doraGradient" x1="0%" y1="0%" x2="100%" y2="0%">
|
||||
<stop offset="0%" style="stop-color:#003399"/>
|
||||
<stop offset="100%" style="stop-color:#0055A5"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g>
|
||||
<rect x="0" y="20" width="400" height="130" rx="16" fill="url(#doraGradient)"/>
|
||||
<text x="200" y="100" font-family="Helvetica, Arial, sans-serif" font-size="76" font-weight="700" fill="#FFFFFF" text-anchor="middle" letter-spacing="6">DORA</text>
|
||||
<text x="200" y="135" font-family="Helvetica, Arial, sans-serif" font-size="14" font-weight="500" fill="#FFD700" text-anchor="middle" letter-spacing="3">EU 2022/2554</text>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 747 B |
@@ -6,7 +6,6 @@ import { C5CustomDetails } from "@/components/compliance/compliance-custom-detai
|
||||
import { CCCCustomDetails } from "@/components/compliance/compliance-custom-details/ccc-details";
|
||||
import { CISCustomDetails } from "@/components/compliance/compliance-custom-details/cis-details";
|
||||
import { CSACustomDetails } from "@/components/compliance/compliance-custom-details/csa-details";
|
||||
import { DORACustomDetails } from "@/components/compliance/compliance-custom-details/dora-details";
|
||||
import { ENSCustomDetails } from "@/components/compliance/compliance-custom-details/ens-details";
|
||||
import { GenericCustomDetails } from "@/components/compliance/compliance-custom-details/generic-details";
|
||||
import { ISOCustomDetails } from "@/components/compliance/compliance-custom-details/iso-details";
|
||||
@@ -48,10 +47,6 @@ import {
|
||||
mapComplianceData as mapCSAComplianceData,
|
||||
toAccordionItems as toCSAAccordionItems,
|
||||
} from "./csa";
|
||||
import {
|
||||
mapComplianceData as mapDORAComplianceData,
|
||||
toAccordionItems as toDORAAccordionItems,
|
||||
} from "./dora";
|
||||
import {
|
||||
mapComplianceData as mapENSComplianceData,
|
||||
toAccordionItems as toENSAccordionItems,
|
||||
@@ -213,19 +208,6 @@ const getComplianceMappers = (): Record<string, ComplianceMapper> => ({
|
||||
getDetailsComponent: (requirement: Requirement) =>
|
||||
createElement(CSACustomDetails, { requirement }),
|
||||
},
|
||||
// DORA (Regulation (EU) 2022/2554) — universal framework keyed by the
|
||||
// `framework` field of `prowler/compliance/dora.json` ("DORA"). Groups by
|
||||
// Pillar (5 enum values) and surfaces Pillar / Article / ArticleTitle in
|
||||
// the requirement detail drawer.
|
||||
DORA: {
|
||||
mapComplianceData: mapDORAComplianceData,
|
||||
toAccordionItems: toDORAAccordionItems,
|
||||
getTopFailedSections,
|
||||
calculateCategoryHeatmapData: (data: Framework[]) =>
|
||||
calculateCategoryHeatmapData(data),
|
||||
getDetailsComponent: (requirement: Requirement) =>
|
||||
createElement(DORACustomDetails, { requirement }),
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
|
||||
@@ -4,7 +4,6 @@ import {
|
||||
COMPLIANCE_REPORT_TYPES,
|
||||
getReportTypeForCompliance,
|
||||
getReportTypeForFramework,
|
||||
isOcsfSupported,
|
||||
pickLatestCisPerProvider,
|
||||
} from "./compliance-report-types";
|
||||
|
||||
@@ -35,24 +34,6 @@ describe("getReportTypeForFramework", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("isOcsfSupported", () => {
|
||||
it("returns true for universal frameworks shipping an OCSF artifact", () => {
|
||||
expect(isOcsfSupported("dora")).toBe(true);
|
||||
expect(isOcsfSupported("csa_ccm_4.0")).toBe(true);
|
||||
});
|
||||
|
||||
it("returns false for legacy/per-provider frameworks without OCSF output", () => {
|
||||
expect(isOcsfSupported("cis_5.0_aws")).toBe(false);
|
||||
expect(isOcsfSupported("ens_rd2022_aws")).toBe(false);
|
||||
expect(isOcsfSupported("nis2_aws")).toBe(false);
|
||||
});
|
||||
|
||||
it("returns false for missing or empty inputs", () => {
|
||||
expect(isOcsfSupported(undefined)).toBe(false);
|
||||
expect(isOcsfSupported("")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("pickLatestCisPerProvider", () => {
|
||||
it("returns an empty set for an empty input", () => {
|
||||
const latest = pickLatestCisPerProvider([]);
|
||||
|
||||
@@ -161,30 +161,6 @@ export const pickLatestCisPerProvider = (
|
||||
return latest;
|
||||
};
|
||||
|
||||
/**
|
||||
* Compliance IDs that ship a per-framework OCSF JSON export.
|
||||
*
|
||||
* Only universal compliance frameworks that declare an ``outputs`` block in
|
||||
* their schema (see ``prowler/compliance/<name>.json``) produce a dedicated
|
||||
* OCSF artifact during scan output generation. Today that is DORA and
|
||||
* CSA CCM 4.0. Any other framework only offers CSV (and, for the curated
|
||||
* list above, PDF).
|
||||
*
|
||||
* Keep this Set in lock-step with the backend: ``get_prowler_provider_compliance``
|
||||
* + ``ComplianceFramework.outputs`` is the source of truth. The API will
|
||||
* 404 on ``GET /scans/{id}/compliance/{name}/ocsf`` for any framework not
|
||||
* in this set, so showing the OCSF button for an unsupported framework
|
||||
* would surface a broken download — gate every call site through
|
||||
* ``isOcsfSupported``.
|
||||
*/
|
||||
const OCSF_SUPPORTED_COMPLIANCE_IDS: ReadonlySet<string> = new Set([
|
||||
"dora",
|
||||
"csa_ccm_4.0",
|
||||
]);
|
||||
|
||||
export const isOcsfSupported = (complianceId: string | undefined): boolean =>
|
||||
!!complianceId && OCSF_SUPPORTED_COMPLIANCE_IDS.has(complianceId);
|
||||
|
||||
/**
|
||||
* Resolve the report type for a compliance card.
|
||||
*
|
||||
|
||||
@@ -1,154 +0,0 @@
|
||||
import { ClientAccordionContent } from "@/components/compliance/compliance-accordion/client-accordion-content";
|
||||
import { ComplianceAccordionRequirementTitle } from "@/components/compliance/compliance-accordion/compliance-accordion-requeriment-title";
|
||||
import { ComplianceAccordionTitle } from "@/components/compliance/compliance-accordion/compliance-accordion-title";
|
||||
import { AccordionItemProps } from "@/components/ui/accordion/Accordion";
|
||||
import { FindingStatus } from "@/components/ui/table/status-finding-badge";
|
||||
import {
|
||||
AttributesData,
|
||||
DORAAttributesMetadata,
|
||||
Framework,
|
||||
Requirement,
|
||||
REQUIREMENT_STATUS,
|
||||
RequirementsData,
|
||||
RequirementStatus,
|
||||
} from "@/types/compliance";
|
||||
|
||||
import {
|
||||
calculateFrameworkCounters,
|
||||
createRequirementsMap,
|
||||
findOrCreateCategory,
|
||||
findOrCreateControl,
|
||||
findOrCreateFramework,
|
||||
} from "./commons";
|
||||
|
||||
// Display order for DORA pillars in the accordion and any grouped chart. The
|
||||
// regulation arranges them in this exact order (Articles 5-14, 17-19, 24-25,
|
||||
// 28+30, 45) — preserving it here means the UI always renders pillars in the
|
||||
// "logical" reading order regardless of how the API returns them.
|
||||
export const DORA_PILLAR_ORDER: readonly string[] = [
|
||||
"ICT Risk Management",
|
||||
"ICT-Related Incident Reporting",
|
||||
"Digital Operational Resilience Testing",
|
||||
"ICT Third-Party Risk Management",
|
||||
"Information Sharing",
|
||||
];
|
||||
|
||||
const getStatusCounters = (status: RequirementStatus) => ({
|
||||
pass: status === REQUIREMENT_STATUS.PASS ? 1 : 0,
|
||||
fail: status === REQUIREMENT_STATUS.FAIL ? 1 : 0,
|
||||
manual: status === REQUIREMENT_STATUS.MANUAL ? 1 : 0,
|
||||
});
|
||||
|
||||
export const mapComplianceData = (
|
||||
attributesData: AttributesData,
|
||||
requirementsData: RequirementsData,
|
||||
): Framework[] => {
|
||||
const attributes = attributesData?.data || [];
|
||||
const requirementsMap = createRequirementsMap(requirementsData);
|
||||
const frameworks: Framework[] = [];
|
||||
|
||||
for (const attributeItem of attributes) {
|
||||
const id = attributeItem.id;
|
||||
const metadataArray = attributeItem.attributes?.attributes
|
||||
?.metadata as unknown as DORAAttributesMetadata[];
|
||||
const attrs = metadataArray?.[0];
|
||||
if (!attrs) continue;
|
||||
|
||||
const requirementData = requirementsMap.get(id);
|
||||
if (!requirementData) continue;
|
||||
|
||||
const frameworkName = attributeItem.attributes.framework;
|
||||
// Group by Pillar (top-level accordion section). Article + ArticleTitle
|
||||
// live inside the requirement so they show up on the detail drawer.
|
||||
const categoryName = attrs.Pillar;
|
||||
const requirementName = attributeItem.attributes.name || "";
|
||||
const description = attributeItem.attributes.description;
|
||||
const status = requirementData.attributes.status || "";
|
||||
const checks = attributeItem.attributes.attributes.check_ids || [];
|
||||
|
||||
const framework = findOrCreateFramework(frameworks, frameworkName);
|
||||
const category = findOrCreateCategory(framework.categories, categoryName);
|
||||
// Flat 2-level structure: pillar → requirements (no intermediate control).
|
||||
const control = findOrCreateControl(category.controls, categoryName);
|
||||
|
||||
const finalStatus: RequirementStatus = status as RequirementStatus;
|
||||
const requirement: Requirement = {
|
||||
name: requirementName ? `${id} - ${requirementName}` : id,
|
||||
description,
|
||||
status: finalStatus,
|
||||
check_ids: checks,
|
||||
...getStatusCounters(finalStatus),
|
||||
pillar: attrs.Pillar,
|
||||
article: attrs.Article,
|
||||
article_title: attrs.ArticleTitle,
|
||||
};
|
||||
|
||||
control.requirements.push(requirement);
|
||||
}
|
||||
|
||||
// Sort categories by canonical pillar order so DORA always reads from "ICT
|
||||
// Risk Management" down to "Information Sharing", regardless of map insertion
|
||||
// order driven by the API response.
|
||||
for (const framework of frameworks) {
|
||||
framework.categories.sort((a, b) => {
|
||||
const ia = DORA_PILLAR_ORDER.indexOf(a.name);
|
||||
const ib = DORA_PILLAR_ORDER.indexOf(b.name);
|
||||
// Unknown pillars (defensive — shouldn't happen) sink to the bottom.
|
||||
const orderA = ia === -1 ? DORA_PILLAR_ORDER.length : ia;
|
||||
const orderB = ib === -1 ? DORA_PILLAR_ORDER.length : ib;
|
||||
return orderA - orderB;
|
||||
});
|
||||
}
|
||||
|
||||
calculateFrameworkCounters(frameworks);
|
||||
|
||||
return frameworks;
|
||||
};
|
||||
|
||||
export const toAccordionItems = (
|
||||
data: Framework[],
|
||||
scanId: string | undefined,
|
||||
): AccordionItemProps[] => {
|
||||
const safeId = scanId || "";
|
||||
|
||||
return data.flatMap((framework) =>
|
||||
framework.categories.map((category) => ({
|
||||
key: `${framework.name}-${category.name}`,
|
||||
title: (
|
||||
<ComplianceAccordionTitle
|
||||
label={category.name}
|
||||
pass={category.pass}
|
||||
fail={category.fail}
|
||||
manual={category.manual}
|
||||
isParentLevel={true}
|
||||
/>
|
||||
),
|
||||
content: "",
|
||||
// Pillar → requirements (flat, no intermediate "control" level).
|
||||
items: category.controls.flatMap((control) =>
|
||||
control.requirements.map((requirement, reqIndex) => ({
|
||||
key: `${framework.name}-${category.name}-req-${reqIndex}`,
|
||||
title: (
|
||||
<ComplianceAccordionRequirementTitle
|
||||
type=""
|
||||
name={requirement.name}
|
||||
status={requirement.status as FindingStatus}
|
||||
/>
|
||||
),
|
||||
content: (
|
||||
<ClientAccordionContent
|
||||
key={`content-${framework.name}-${category.name}-req-${reqIndex}`}
|
||||
requirement={requirement}
|
||||
scanId={safeId}
|
||||
framework={framework.name}
|
||||
disableFindings={
|
||||
requirement.check_ids.length === 0 && requirement.manual === 0
|
||||
}
|
||||
/>
|
||||
),
|
||||
items: [],
|
||||
})),
|
||||
),
|
||||
})),
|
||||
);
|
||||
};
|
||||
@@ -1,6 +1,5 @@
|
||||
import {
|
||||
getComplianceCsv,
|
||||
getComplianceOcsf,
|
||||
getCompliancePdfReport,
|
||||
getExportsZip,
|
||||
type ScanBinaryResult,
|
||||
@@ -231,32 +230,6 @@ export const downloadComplianceCsv = async (
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Download the per-framework OCSF JSON export.
|
||||
*
|
||||
* Only universal frameworks declaring an ``outputs`` block produce this
|
||||
* artifact (currently DORA and CSA CCM 4.0); callers must gate the call
|
||||
* via ``isOcsfSupported`` to avoid surfacing a broken download on
|
||||
* frameworks the API will 404 on.
|
||||
*/
|
||||
export const downloadComplianceOcsf = async (
|
||||
scanId: string,
|
||||
complianceId: string,
|
||||
toast: ReturnType<typeof useToast>["toast"],
|
||||
): Promise<void> => {
|
||||
toast({
|
||||
title: "Download Started",
|
||||
description: "Preparing the OCSF report. This may take a moment.",
|
||||
});
|
||||
const result = await getComplianceOcsf(scanId, complianceId);
|
||||
await downloadFile(
|
||||
result,
|
||||
"application/json",
|
||||
"The compliance OCSF report has been downloaded successfully.",
|
||||
toast,
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Download a compliance PDF report.
|
||||
*
|
||||
|
||||
@@ -327,31 +327,6 @@ export interface ASDEssentialEightRequirement extends Requirement {
|
||||
references: ASDEssentialEightAttributesMetadata["References"];
|
||||
}
|
||||
|
||||
// DORA (Digital Operational Resilience Act, Regulation (EU) 2022/2554).
|
||||
// Universal framework — flat attributes dict with Pillar/Article/ArticleTitle.
|
||||
// `Pillar` is the canonical grouping key for tables and PDF; the enum mirrors
|
||||
// the five DORA pillars declared in `prowler/compliance/dora.json`.
|
||||
export const DORA_PILLAR = {
|
||||
ICT_RISK_MANAGEMENT: "ICT Risk Management",
|
||||
INCIDENT_REPORTING: "ICT-Related Incident Reporting",
|
||||
RESILIENCE_TESTING: "Digital Operational Resilience Testing",
|
||||
THIRD_PARTY_RISK: "ICT Third-Party Risk Management",
|
||||
INFORMATION_SHARING: "Information Sharing",
|
||||
} as const;
|
||||
export type DORAPillar = (typeof DORA_PILLAR)[keyof typeof DORA_PILLAR];
|
||||
|
||||
export interface DORAAttributesMetadata {
|
||||
Pillar: DORAPillar;
|
||||
Article: string;
|
||||
ArticleTitle: string;
|
||||
}
|
||||
|
||||
export interface DORARequirement extends Requirement {
|
||||
pillar: DORAAttributesMetadata["Pillar"];
|
||||
article: DORAAttributesMetadata["Article"];
|
||||
article_title: DORAAttributesMetadata["ArticleTitle"];
|
||||
}
|
||||
|
||||
export interface AttributesItemData {
|
||||
type: "compliance-requirements-attributes";
|
||||
id: string;
|
||||
@@ -374,7 +349,6 @@ export interface AttributesItemData {
|
||||
| CCCAttributesMetadata[]
|
||||
| CSAAttributesMetadata[]
|
||||
| ASDEssentialEightAttributesMetadata[]
|
||||
| DORAAttributesMetadata[]
|
||||
| GenericAttributesMetadata[];
|
||||
check_ids: string[];
|
||||
// MITRE structure
|
||||
|
||||
@@ -8,212 +8,6 @@ resolution-markers = [
|
||||
]
|
||||
|
||||
[manifest]
|
||||
constraints = [
|
||||
{ name = "about-time", specifier = "==4.2.1" },
|
||||
{ name = "aenum", specifier = "==3.1.17" },
|
||||
{ name = "aiofiles", specifier = "==24.1.0" },
|
||||
{ name = "aiohappyeyeballs", specifier = "==2.6.1" },
|
||||
{ name = "aiohttp", specifier = "==3.13.5" },
|
||||
{ name = "aiosignal", specifier = "==1.4.0" },
|
||||
{ name = "alibabacloud-actiontrail20200706", specifier = "==2.4.1" },
|
||||
{ name = "alibabacloud-credentials", specifier = "==1.0.3" },
|
||||
{ name = "alibabacloud-credentials-api", specifier = "==1.0.0" },
|
||||
{ name = "alibabacloud-cs20151215", specifier = "==6.1.0" },
|
||||
{ name = "alibabacloud-darabonba-array", specifier = "==0.1.0" },
|
||||
{ name = "alibabacloud-darabonba-encode-util", specifier = "==0.0.2" },
|
||||
{ name = "alibabacloud-darabonba-map", specifier = "==0.0.1" },
|
||||
{ name = "alibabacloud-darabonba-signature-util", specifier = "==0.0.4" },
|
||||
{ name = "alibabacloud-darabonba-string", specifier = "==0.0.4" },
|
||||
{ name = "alibabacloud-darabonba-time", specifier = "==0.0.1" },
|
||||
{ name = "alibabacloud-ecs20140526", specifier = "==7.2.5" },
|
||||
{ name = "alibabacloud-endpoint-util", specifier = "==0.0.4" },
|
||||
{ name = "alibabacloud-gateway-oss", specifier = "==0.0.17" },
|
||||
{ name = "alibabacloud-gateway-sls", specifier = "==0.4.2" },
|
||||
{ name = "alibabacloud-gateway-sls-util", specifier = "==0.4.1" },
|
||||
{ name = "alibabacloud-gateway-spi", specifier = "==0.0.3" },
|
||||
{ name = "alibabacloud-openapi-util", specifier = "==0.2.4" },
|
||||
{ name = "alibabacloud-oss-util", specifier = "==0.0.6" },
|
||||
{ name = "alibabacloud-oss20190517", specifier = "==1.0.6" },
|
||||
{ name = "alibabacloud-ram20150501", specifier = "==1.2.0" },
|
||||
{ name = "alibabacloud-sas20181203", specifier = "==6.1.0" },
|
||||
{ name = "alibabacloud-sts20150401", specifier = "==1.1.6" },
|
||||
{ name = "alibabacloud-tea", specifier = "==0.4.3" },
|
||||
{ name = "alibabacloud-tea-openapi", specifier = "==0.4.4" },
|
||||
{ name = "alibabacloud-tea-util", specifier = "==0.3.14" },
|
||||
{ name = "alibabacloud-tea-xml", specifier = "==0.0.3" },
|
||||
{ name = "alibabacloud-vpc20160428", specifier = "==6.13.0" },
|
||||
{ name = "aliyun-log-fastpb", specifier = "==0.3.0" },
|
||||
{ name = "annotated-types", specifier = "==0.7.0" },
|
||||
{ name = "antlr4-python3-runtime", specifier = "==4.13.2" },
|
||||
{ name = "anyio", specifier = "==4.13.0" },
|
||||
{ name = "apscheduler", specifier = "==3.11.2" },
|
||||
{ name = "astroid", specifier = "==3.3.11" },
|
||||
{ name = "async-timeout", specifier = "==5.0.1" },
|
||||
{ name = "attrs", specifier = "==26.1.0" },
|
||||
{ name = "aws-sam-translator", specifier = "==1.109.0" },
|
||||
{ name = "aws-xray-sdk", specifier = "==2.15.0" },
|
||||
{ name = "azure-common", specifier = "==1.1.28" },
|
||||
{ name = "azure-core", specifier = "==1.41.0" },
|
||||
{ name = "azure-mgmt-core", specifier = "==1.6.0" },
|
||||
{ name = "bandit", specifier = "==1.8.3" },
|
||||
{ name = "black", specifier = "==25.1.0" },
|
||||
{ name = "blinker", specifier = "==1.9.0" },
|
||||
{ name = "certifi", specifier = "==2026.4.22" },
|
||||
{ name = "cffi", specifier = "==2.0.0" },
|
||||
{ name = "cfn-lint", specifier = "==1.51.0" },
|
||||
{ name = "charset-normalizer", specifier = "==3.4.7" },
|
||||
{ name = "circuitbreaker", specifier = "==2.1.3" },
|
||||
{ name = "click", specifier = "==8.3.3" },
|
||||
{ name = "click-plugins", specifier = "==1.1.1.2" },
|
||||
{ name = "contextlib2", specifier = "==21.6.0" },
|
||||
{ name = "coverage", specifier = "==7.6.12" },
|
||||
{ name = "darabonba-core", specifier = "==1.0.5" },
|
||||
{ name = "decorator", specifier = "==5.2.1" },
|
||||
{ name = "dill", specifier = "==0.4.1" },
|
||||
{ name = "distro", specifier = "==1.9.0" },
|
||||
{ name = "dnspython", specifier = "==2.8.0" },
|
||||
{ name = "docker", specifier = "==7.1.0" },
|
||||
{ name = "dogpile-cache", specifier = "==1.5.0" },
|
||||
{ name = "durationpy", specifier = "==0.10" },
|
||||
{ name = "email-validator", specifier = "==2.2.0" },
|
||||
{ name = "exceptiongroup", specifier = "==1.3.1" },
|
||||
{ name = "execnet", specifier = "==2.1.2" },
|
||||
{ name = "filelock", specifier = "==3.20.3" },
|
||||
{ name = "flake8", specifier = "==7.1.2" },
|
||||
{ name = "flask", specifier = "==3.1.3" },
|
||||
{ name = "freezegun", specifier = "==1.5.1" },
|
||||
{ name = "frozenlist", specifier = "==1.8.0" },
|
||||
{ name = "google-api-core", specifier = "==2.30.3" },
|
||||
{ name = "google-auth", specifier = "==2.52.0" },
|
||||
{ name = "googleapis-common-protos", specifier = "==1.75.0" },
|
||||
{ name = "graphemeu", specifier = "==0.7.2" },
|
||||
{ name = "graphql-core", specifier = "==3.2.8" },
|
||||
{ name = "h11", specifier = "==0.16.0" },
|
||||
{ name = "hpack", specifier = "==4.1.0" },
|
||||
{ name = "httpcore", specifier = "==1.0.9" },
|
||||
{ name = "httplib2", specifier = "==0.31.2" },
|
||||
{ name = "httpx", specifier = "==0.28.1" },
|
||||
{ name = "hyperframe", specifier = "==6.1.0" },
|
||||
{ name = "iamdata", specifier = "==0.1.202605131" },
|
||||
{ name = "idna", specifier = "==3.15" },
|
||||
{ name = "importlib-metadata", specifier = "==8.7.1" },
|
||||
{ name = "iniconfig", specifier = "==2.3.0" },
|
||||
{ name = "iso8601", specifier = "==2.1.0" },
|
||||
{ name = "isodate", specifier = "==0.7.2" },
|
||||
{ name = "isort", specifier = "==6.1.0" },
|
||||
{ name = "itsdangerous", specifier = "==2.2.0" },
|
||||
{ name = "jinja2", specifier = "==3.1.6" },
|
||||
{ name = "jmespath", specifier = "==1.1.0" },
|
||||
{ name = "joserfc", specifier = "==1.6.5" },
|
||||
{ name = "jsonpatch", specifier = "==1.33" },
|
||||
{ name = "jsonpath-ng", specifier = "==1.8.0" },
|
||||
{ name = "jsonpointer", specifier = "==3.1.1" },
|
||||
{ name = "jsonschema-path", specifier = "==0.3.4" },
|
||||
{ name = "jsonschema-specifications", specifier = "==2025.9.1" },
|
||||
{ name = "jwcrypto", specifier = "==1.5.7" },
|
||||
{ name = "keystoneauth1", specifier = "==5.14.0" },
|
||||
{ name = "lazy-object-proxy", specifier = "==1.12.0" },
|
||||
{ name = "lz4", specifier = "==4.4.5" },
|
||||
{ name = "markdown-it-py", specifier = "==4.2.0" },
|
||||
{ name = "markupsafe", specifier = "==3.0.3" },
|
||||
{ name = "mccabe", specifier = "==0.7.0" },
|
||||
{ name = "mdurl", specifier = "==0.1.2" },
|
||||
{ name = "microsoft-kiota-authentication-azure", specifier = "==1.9.2" },
|
||||
{ name = "microsoft-kiota-http", specifier = "==1.9.2" },
|
||||
{ name = "microsoft-kiota-serialization-form", specifier = "==1.9.2" },
|
||||
{ name = "microsoft-kiota-serialization-json", specifier = "==1.9.2" },
|
||||
{ name = "microsoft-kiota-serialization-multipart", specifier = "==1.9.2" },
|
||||
{ name = "microsoft-kiota-serialization-text", specifier = "==1.9.2" },
|
||||
{ name = "mock", specifier = "==5.2.0" },
|
||||
{ name = "moto", specifier = "==5.1.11" },
|
||||
{ name = "mpmath", specifier = "==1.3.0" },
|
||||
{ name = "msal", specifier = "==1.36.0" },
|
||||
{ name = "msal-extensions", specifier = "==1.3.1" },
|
||||
{ name = "msgraph-core", specifier = "==1.3.8" },
|
||||
{ name = "msrest", specifier = "==0.7.1" },
|
||||
{ name = "multidict", specifier = "==6.7.1" },
|
||||
{ name = "multipart", specifier = "==1.3.1" },
|
||||
{ name = "mypy-extensions", specifier = "==1.1.0" },
|
||||
{ name = "narwhals", specifier = "==2.21.0" },
|
||||
{ name = "nest-asyncio", specifier = "==1.6.0" },
|
||||
{ name = "networkx", specifier = "==3.4.2" },
|
||||
{ name = "oauthlib", specifier = "==3.3.1" },
|
||||
{ name = "openapi-schema-validator", specifier = "==0.6.3" },
|
||||
{ name = "openapi-spec-validator", specifier = "==0.7.1" },
|
||||
{ name = "opentelemetry-api", specifier = "==1.41.1" },
|
||||
{ name = "opentelemetry-sdk", specifier = "==1.41.1" },
|
||||
{ name = "opentelemetry-semantic-conventions", specifier = "==0.62b1" },
|
||||
{ name = "os-service-types", specifier = "==1.8.2" },
|
||||
{ name = "packaging", specifier = "==26.2" },
|
||||
{ name = "pathable", specifier = "==0.4.4" },
|
||||
{ name = "pathspec", specifier = "==1.1.1" },
|
||||
{ name = "pbr", specifier = "==7.0.3" },
|
||||
{ name = "platformdirs", specifier = "==4.9.6" },
|
||||
{ name = "plotly", specifier = "==6.7.0" },
|
||||
{ name = "pluggy", specifier = "==1.6.0" },
|
||||
{ name = "prek", specifier = "==0.3.9" },
|
||||
{ name = "propcache", specifier = "==0.5.2" },
|
||||
{ name = "proto-plus", specifier = "==1.28.0" },
|
||||
{ name = "protobuf", specifier = "==7.34.1" },
|
||||
{ name = "psutil", specifier = "==7.2.2" },
|
||||
{ name = "py-partiql-parser", specifier = "==0.6.1" },
|
||||
{ name = "pyasn1", specifier = "==0.6.3" },
|
||||
{ name = "pyasn1-modules", specifier = "==0.4.2" },
|
||||
{ name = "pycodestyle", specifier = "==2.12.1" },
|
||||
{ name = "pycparser", specifier = "==3.0" },
|
||||
{ name = "pycryptodomex", specifier = "==3.23.0" },
|
||||
{ name = "pydantic-core", specifier = "==2.41.5" },
|
||||
{ name = "pydash", specifier = "==8.0.6" },
|
||||
{ name = "pyflakes", specifier = "==3.2.0" },
|
||||
{ name = "pygments", specifier = "==2.20.0" },
|
||||
{ name = "pyjwt", specifier = "==2.12.1" },
|
||||
{ name = "pylint", specifier = "==3.3.4" },
|
||||
{ name = "pynacl", specifier = "==1.6.2" },
|
||||
{ name = "pyopenssl", specifier = "==26.2.0" },
|
||||
{ name = "pyparsing", specifier = "==3.3.2" },
|
||||
{ name = "pytest", specifier = "==8.3.5" },
|
||||
{ name = "pytest-cov", specifier = "==6.0.0" },
|
||||
{ name = "pytest-env", specifier = "==1.1.5" },
|
||||
{ name = "pytest-randomly", specifier = "==3.16.0" },
|
||||
{ name = "pytest-xdist", specifier = "==3.6.1" },
|
||||
{ name = "pywin32", specifier = "==311" },
|
||||
{ name = "pyyaml", specifier = "==6.0.3" },
|
||||
{ name = "referencing", specifier = "==0.36.2" },
|
||||
{ name = "regex", specifier = "==2026.5.9" },
|
||||
{ name = "requests", specifier = "==2.34.0" },
|
||||
{ name = "requests-file", specifier = "==3.0.1" },
|
||||
{ name = "requests-oauthlib", specifier = "==2.0.0" },
|
||||
{ name = "requestsexceptions", specifier = "==1.4.0" },
|
||||
{ name = "responses", specifier = "==0.26.0" },
|
||||
{ name = "retrying", specifier = "==1.4.2" },
|
||||
{ name = "rfc3339-validator", specifier = "==0.1.4" },
|
||||
{ name = "rich", specifier = "==15.0.0" },
|
||||
{ name = "rpds-py", specifier = "==0.30.0" },
|
||||
{ name = "s3transfer", specifier = "==0.14.0" },
|
||||
{ name = "setuptools", specifier = "==82.0.1" },
|
||||
{ name = "six", specifier = "==1.17.0" },
|
||||
{ name = "sniffio", specifier = "==1.3.1" },
|
||||
{ name = "std-uritemplate", specifier = "==2.0.8" },
|
||||
{ name = "stevedore", specifier = "==5.7.0" },
|
||||
{ name = "sympy", specifier = "==1.14.0" },
|
||||
{ name = "tldextract", specifier = "==5.3.1" },
|
||||
{ name = "tomli", specifier = "==2.4.1" },
|
||||
{ name = "tomlkit", specifier = "==0.15.0" },
|
||||
{ name = "typing-extensions", specifier = "==4.15.0" },
|
||||
{ name = "typing-inspection", specifier = "==0.4.2" },
|
||||
{ name = "tzdata", specifier = "==2026.2" },
|
||||
{ name = "uritemplate", specifier = "==4.2.0" },
|
||||
{ name = "urllib3", specifier = "==2.7.0" },
|
||||
{ name = "vulture", specifier = "==2.14" },
|
||||
{ name = "websocket-client", specifier = "==1.9.0" },
|
||||
{ name = "werkzeug", specifier = "==3.1.8" },
|
||||
{ name = "wrapt", specifier = "==2.1.2" },
|
||||
{ name = "xlsxwriter", specifier = "==3.2.9" },
|
||||
{ name = "xmltodict", specifier = "==1.0.4" },
|
||||
{ name = "yarl", specifier = "==1.23.0" },
|
||||
{ name = "zipp", specifier = "==3.23.1" },
|
||||
{ name = "zstd", specifier = "==1.5.7.3" },
|
||||
]
|
||||
overrides = [{ name = "okta", specifier = "==3.4.2" }]
|
||||
|
||||
[[package]]
|
||||
@@ -1414,7 +1208,8 @@ source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aws-sam-translator" },
|
||||
{ name = "jsonpatch" },
|
||||
{ name = "networkx" },
|
||||
{ name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "networkx", version = "3.6.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "regex" },
|
||||
{ name = "sympy" },
|
||||
@@ -2824,11 +2619,27 @@ wheels = [
|
||||
name = "networkx"
|
||||
version = "3.4.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version < '3.11'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fd/1d/06475e1cd5264c0b870ea2cc6fdb3e37177c1e565c43f56ff17a10e3937f/networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", size = 2151368, upload-time = "2024-10-21T12:39:38.695Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/54/dd730b32ea14ea797530a4479b2ed46a6fb250f682a9cfb997e968bf0261/networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f", size = 1723263, upload-time = "2024-10-21T12:39:36.247Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "networkx"
|
||||
version = "3.6.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.12'",
|
||||
"python_full_version == '3.11.*'",
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6a/51/63fe664f3908c97be9d2e4f1158eb633317598cfa6e1fc14af5383f17512/networkx-3.6.1.tar.gz", hash = "sha256:26b7c357accc0c8cde558ad486283728b65b6a95d85ee1cd66bafab4c8168509", size = 2517025, upload-time = "2025-12-08T17:02:39.908Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/c9/b2622292ea83fbb4ec318f5b9ab867d0a28ab43c5717bb85b0a5f6b3b0a4/networkx-3.6.1-py3-none-any.whl", hash = "sha256:d47fbf302e7d9cbbb9e2555a0d267983d2aa476bac30e90dfbe5669bd57f3762", size = 2068504, upload-time = "2025-12-08T17:02:38.159Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "numpy"
|
||||
version = "2.0.2"
|
||||
|
||||