mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-01-25 02:08:11 +00:00
Compare commits
8 Commits
feat/githu
...
5.9.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9a46fca8dd | ||
|
|
66e5a03f9f | ||
|
|
ccd561f0f1 | ||
|
|
9e1b78e64f | ||
|
|
536f90ced3 | ||
|
|
5453c02fd4 | ||
|
|
230e11be8a | ||
|
|
20625954a3 |
@@ -2,6 +2,20 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [1.10.2] (Prowler v5.9.2)
|
||||
|
||||
### Changed
|
||||
- Optimized queries for resources views [(#8336)](https://github.com/prowler-cloud/prowler/pull/8336)
|
||||
|
||||
---
|
||||
|
||||
## [v1.10.1] (Prowler v5.9.1)
|
||||
|
||||
### Fixed
|
||||
- Calculate failed findings during scans to prevent heavy database queries [(#8322)](https://github.com/prowler-cloud/prowler/pull/8322)
|
||||
|
||||
---
|
||||
|
||||
## [v1.10.0] (Prowler v5.9.0)
|
||||
|
||||
### Added
|
||||
@@ -12,7 +26,7 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
- `/processors` endpoints to post-process findings. Currently, only the Mutelist processor is supported to allow to mute findings.
|
||||
- Optimized the underlying queries for resources endpoints [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
- Optimized include parameters for resources view [(#8229)](https://github.com/prowler-cloud/prowler/pull/8229)
|
||||
- Optimized overview background tasks [(#8300)](https://github.com/prowler-cloud/prowler/pull/8300)
|
||||
- Optimized overview background tasks [(#8300)](https://github.com/prowler-cloud/prowler/pull/8300)
|
||||
|
||||
### Fixed
|
||||
- Search filter for findings and resources [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
|
||||
3374
api/poetry.lock
generated
3374
api/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -24,7 +24,7 @@ dependencies = [
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"gunicorn==23.0.0",
|
||||
"lxml==5.3.2",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@master",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.9",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
|
||||
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
|
||||
@@ -38,7 +38,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.10.0"
|
||||
version = "1.10.2"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
from functools import partial
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from api.db_utils import create_index_on_partitions, drop_index_on_partitions
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0039_resource_resources_failed_findings_idx"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
partial(
|
||||
create_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_resource_idx",
|
||||
columns="tenant_id, resource_id",
|
||||
method="BTREE",
|
||||
),
|
||||
reverse_code=partial(
|
||||
drop_index_on_partitions,
|
||||
parent_table="resource_finding_mappings",
|
||||
index_name="rfm_tenant_resource_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("api", "0040_rfm_tenant_resource_index_partitions"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="resourcefindingmapping",
|
||||
index=models.Index(
|
||||
fields=["tenant_id", "resource_id"],
|
||||
name="rfm_tenant_resource_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,23 @@
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("api", "0041_rfm_tenant_resource_parent_partitions"),
|
||||
("django_celery_beat", "0019_alter_periodictasks_options"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="scan",
|
||||
index=models.Index(
|
||||
condition=models.Q(("state", "completed")),
|
||||
fields=["tenant_id", "provider_id", "-inserted_at"],
|
||||
include=("id",),
|
||||
name="scans_prov_ins_desc_idx",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -476,6 +476,13 @@ class Scan(RowLevelSecurityProtectedModel):
|
||||
condition=Q(state=StateChoices.COMPLETED),
|
||||
name="scans_prov_state_ins_desc_idx",
|
||||
),
|
||||
# TODO This might replace `scans_prov_state_ins_desc_idx` completely. Review usage
|
||||
models.Index(
|
||||
fields=["tenant_id", "provider_id", "-inserted_at"],
|
||||
condition=Q(state=StateChoices.COMPLETED),
|
||||
include=["id"],
|
||||
name="scans_prov_ins_desc_idx",
|
||||
),
|
||||
]
|
||||
|
||||
class JSONAPIMeta:
|
||||
@@ -860,6 +867,10 @@ class ResourceFindingMapping(PostgresPartitionedModel, RowLevelSecurityProtected
|
||||
fields=["tenant_id", "finding_id"],
|
||||
name="rfm_tenant_finding_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["tenant_id", "resource_id"],
|
||||
name="rfm_tenant_resource_idx",
|
||||
),
|
||||
]
|
||||
constraints = [
|
||||
models.UniqueConstraint(
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
openapi: 3.0.3
|
||||
info:
|
||||
title: Prowler API
|
||||
version: 1.10.0
|
||||
version: 1.10.2
|
||||
description: |-
|
||||
Prowler API specification.
|
||||
|
||||
|
||||
@@ -5188,6 +5188,8 @@ class TestComplianceOverviewViewSet:
|
||||
assert "description" in attributes
|
||||
assert "status" in attributes
|
||||
|
||||
# TODO: This test may fail randomly because requirements are not ordered
|
||||
@pytest.mark.xfail
|
||||
def test_compliance_overview_requirements_manual(
|
||||
self, authenticated_client, compliance_requirements_overviews_fixture
|
||||
):
|
||||
|
||||
@@ -22,7 +22,7 @@ from django.conf import settings as django_settings
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.search import SearchQuery
|
||||
from django.db import transaction
|
||||
from django.db.models import Count, F, Prefetch, Q, Sum
|
||||
from django.db.models import Count, F, Prefetch, Q, Subquery, Sum
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import redirect
|
||||
@@ -292,7 +292,7 @@ class SchemaView(SpectacularAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
spectacular_settings.TITLE = "Prowler API"
|
||||
spectacular_settings.VERSION = "1.10.0"
|
||||
spectacular_settings.VERSION = "1.10.2"
|
||||
spectacular_settings.DESCRIPTION = (
|
||||
"Prowler API specification.\n\nThis file is auto-generated."
|
||||
)
|
||||
@@ -1994,6 +1994,21 @@ class ResourceViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
)
|
||||
)
|
||||
|
||||
def _should_prefetch_findings(self) -> bool:
|
||||
fields_param = self.request.query_params.get("fields[resources]", "")
|
||||
include_param = self.request.query_params.get("include", "")
|
||||
return (
|
||||
fields_param == ""
|
||||
or "findings" in fields_param.split(",")
|
||||
or "findings" in include_param.split(",")
|
||||
)
|
||||
|
||||
def _get_findings_prefetch(self):
|
||||
findings_queryset = Finding.all_objects.defer("scan", "resources").filter(
|
||||
tenant_id=self.request.tenant_id
|
||||
)
|
||||
return [Prefetch("findings", queryset=findings_queryset)]
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action in ["metadata", "metadata_latest"]:
|
||||
return ResourceMetadataSerializer
|
||||
@@ -2017,7 +2032,11 @@ class ResourceViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
filtered_queryset,
|
||||
manager=Resource.all_objects,
|
||||
select_related=["provider"],
|
||||
prefetch_related=["findings"],
|
||||
prefetch_related=(
|
||||
self._get_findings_prefetch()
|
||||
if self._should_prefetch_findings()
|
||||
else []
|
||||
),
|
||||
)
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
@@ -2042,14 +2061,18 @@ class ResourceViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
tenant_id = request.tenant_id
|
||||
filtered_queryset = self.filter_queryset(self.get_queryset())
|
||||
|
||||
latest_scan_ids = (
|
||||
Scan.all_objects.filter(tenant_id=tenant_id, state=StateChoices.COMPLETED)
|
||||
latest_scans = (
|
||||
Scan.all_objects.filter(
|
||||
tenant_id=tenant_id,
|
||||
state=StateChoices.COMPLETED,
|
||||
)
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
.values_list("id", flat=True)
|
||||
.values("provider_id")
|
||||
)
|
||||
|
||||
filtered_queryset = filtered_queryset.filter(
|
||||
tenant_id=tenant_id, provider__scan__in=latest_scan_ids
|
||||
provider_id__in=Subquery(latest_scans)
|
||||
)
|
||||
|
||||
return self.paginate_by_pk(
|
||||
@@ -2057,7 +2080,11 @@ class ResourceViewSet(PaginateByPkMixin, BaseRLSViewSet):
|
||||
filtered_queryset,
|
||||
manager=Resource.all_objects,
|
||||
select_related=["provider"],
|
||||
prefetch_related=["findings"],
|
||||
prefetch_related=(
|
||||
self._get_findings_prefetch()
|
||||
if self._should_prefetch_findings()
|
||||
else []
|
||||
),
|
||||
)
|
||||
|
||||
@action(detail=False, methods=["get"], url_name="metadata")
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import json
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from celery.utils.log import get_task_logger
|
||||
from config.settings.celery import CELERY_DEADLOCK_ATTEMPTS
|
||||
from django.db import IntegrityError, OperationalError, connection
|
||||
from django.db import IntegrityError, OperationalError
|
||||
from django.db.models import Case, Count, IntegerField, Prefetch, Sum, When
|
||||
from tasks.utils import CustomEncoder
|
||||
|
||||
@@ -13,7 +14,11 @@ from api.compliance import (
|
||||
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE,
|
||||
generate_scan_compliance,
|
||||
)
|
||||
from api.db_utils import create_objects_in_batches, rls_transaction
|
||||
from api.db_utils import (
|
||||
create_objects_in_batches,
|
||||
rls_transaction,
|
||||
update_objects_in_batches,
|
||||
)
|
||||
from api.exceptions import ProviderConnectionError
|
||||
from api.models import (
|
||||
ComplianceRequirementOverview,
|
||||
@@ -103,7 +108,10 @@ def _store_resources(
|
||||
|
||||
|
||||
def perform_prowler_scan(
|
||||
tenant_id: str, scan_id: str, provider_id: str, checks_to_execute: list[str] = None
|
||||
tenant_id: str,
|
||||
scan_id: str,
|
||||
provider_id: str,
|
||||
checks_to_execute: list[str] | None = None,
|
||||
):
|
||||
"""
|
||||
Perform a scan using Prowler and store the findings and resources in the database.
|
||||
@@ -175,6 +183,7 @@ def perform_prowler_scan(
|
||||
resource_cache = {}
|
||||
tag_cache = {}
|
||||
last_status_cache = {}
|
||||
resource_failed_findings_cache = defaultdict(int)
|
||||
|
||||
for progress, findings in prowler_scan.scan():
|
||||
for finding in findings:
|
||||
@@ -200,6 +209,9 @@ def perform_prowler_scan(
|
||||
},
|
||||
)
|
||||
resource_cache[resource_uid] = resource_instance
|
||||
|
||||
# Initialize all processed resources in the cache
|
||||
resource_failed_findings_cache[resource_uid] = 0
|
||||
else:
|
||||
resource_instance = resource_cache[resource_uid]
|
||||
|
||||
@@ -313,6 +325,11 @@ def perform_prowler_scan(
|
||||
)
|
||||
finding_instance.add_resources([resource_instance])
|
||||
|
||||
# Increment failed_findings_count cache if the finding status is FAIL and not muted
|
||||
if status == FindingStatus.FAIL and not finding.muted:
|
||||
resource_uid = finding.resource_uid
|
||||
resource_failed_findings_cache[resource_uid] += 1
|
||||
|
||||
# Update scan resource summaries
|
||||
scan_resource_cache.add(
|
||||
(
|
||||
@@ -330,6 +347,24 @@ def perform_prowler_scan(
|
||||
|
||||
scan_instance.state = StateChoices.COMPLETED
|
||||
|
||||
# Update failed_findings_count for all resources in batches if scan completed successfully
|
||||
if resource_failed_findings_cache:
|
||||
resources_to_update = []
|
||||
for resource_uid, failed_count in resource_failed_findings_cache.items():
|
||||
if resource_uid in resource_cache:
|
||||
resource_instance = resource_cache[resource_uid]
|
||||
resource_instance.failed_findings_count = failed_count
|
||||
resources_to_update.append(resource_instance)
|
||||
|
||||
if resources_to_update:
|
||||
update_objects_in_batches(
|
||||
tenant_id=tenant_id,
|
||||
model=Resource,
|
||||
objects=resources_to_update,
|
||||
fields=["failed_findings_count"],
|
||||
batch_size=1000,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error performing scan {scan_id}: {e}")
|
||||
exception = e
|
||||
@@ -376,7 +411,6 @@ def perform_prowler_scan(
|
||||
def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Aggregates findings for a given scan and stores the results in the ScanSummary table.
|
||||
Also updates the failed_findings_count for each resource based on the latest findings.
|
||||
|
||||
This function retrieves all findings associated with a given `scan_id` and calculates various
|
||||
metrics such as counts of failed, passed, and muted findings, as well as their deltas (new,
|
||||
@@ -405,8 +439,6 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
- muted_new: Muted findings with a delta of 'new'.
|
||||
- muted_changed: Muted findings with a delta of 'changed'.
|
||||
"""
|
||||
_update_resource_failed_findings_count(tenant_id, scan_id)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
findings = Finding.objects.filter(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
@@ -531,48 +563,6 @@ def aggregate_findings(tenant_id: str, scan_id: str):
|
||||
ScanSummary.objects.bulk_create(scan_aggregations, batch_size=3000)
|
||||
|
||||
|
||||
def _update_resource_failed_findings_count(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Update the failed_findings_count field for resources based on the latest findings.
|
||||
|
||||
This function calculates the number of failed findings for each resource by:
|
||||
1. Getting the latest finding for each finding.uid
|
||||
2. Counting failed findings per resource
|
||||
3. Updating the failed_findings_count field for each resource
|
||||
|
||||
Args:
|
||||
tenant_id (str): The ID of the tenant to which the scan belongs.
|
||||
scan_id (str): The ID of the scan for which to update resource counts.
|
||||
"""
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
scan = Scan.objects.get(pk=scan_id)
|
||||
provider_id = str(scan.provider_id)
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
UPDATE resources AS r
|
||||
SET failed_findings_count = COALESCE((
|
||||
SELECT COUNT(*) FROM (
|
||||
SELECT DISTINCT ON (f.uid) f.uid
|
||||
FROM findings AS f
|
||||
JOIN resource_finding_mappings AS rfm
|
||||
ON rfm.finding_id = f.id
|
||||
WHERE f.tenant_id = %s
|
||||
AND f.status = %s
|
||||
AND f.muted = FALSE
|
||||
AND rfm.resource_id = r.id
|
||||
ORDER BY f.uid, f.inserted_at DESC
|
||||
) AS latest_uids
|
||||
), 0)
|
||||
WHERE r.tenant_id = %s
|
||||
AND r.provider_id = %s
|
||||
""",
|
||||
[tenant_id, FindingStatus.FAIL, tenant_id, provider_id],
|
||||
)
|
||||
|
||||
|
||||
def create_compliance_requirements(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Create detailed compliance requirement overview records for a scan.
|
||||
|
||||
@@ -7,22 +7,14 @@ import pytest
|
||||
from tasks.jobs.scan import (
|
||||
_create_finding_delta,
|
||||
_store_resources,
|
||||
_update_resource_failed_findings_count,
|
||||
create_compliance_requirements,
|
||||
perform_prowler_scan,
|
||||
)
|
||||
from tasks.utils import CustomEncoder
|
||||
|
||||
from api.exceptions import ProviderConnectionError
|
||||
from api.models import (
|
||||
Finding,
|
||||
Provider,
|
||||
Resource,
|
||||
Scan,
|
||||
Severity,
|
||||
StateChoices,
|
||||
StatusChoices,
|
||||
)
|
||||
from api.models import Finding, Provider, Resource, Scan, StateChoices, StatusChoices
|
||||
from prowler.lib.check.models import Severity
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -182,6 +174,9 @@ class TestPerformScan:
|
||||
assert tag_keys == set(finding.resource_tags.keys())
|
||||
assert tag_values == set(finding.resource_tags.values())
|
||||
|
||||
# Assert that failed_findings_count is 0 (finding is PASS and muted)
|
||||
assert scan_resource.failed_findings_count == 0
|
||||
|
||||
@patch("tasks.jobs.scan.ProwlerScan")
|
||||
@patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider",
|
||||
@@ -386,6 +381,359 @@ class TestPerformScan:
|
||||
assert resource == resource_instance
|
||||
assert resource_uid_tuple == (resource_instance.uid, resource_instance.region)
|
||||
|
||||
def test_perform_prowler_scan_with_failed_findings(
|
||||
self,
|
||||
tenants_fixture,
|
||||
scans_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
"""Test that failed findings increment the failed_findings_count"""
|
||||
with (
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
patch("tasks.jobs.scan.ProwlerScan") as mock_prowler_scan_class,
|
||||
patch(
|
||||
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE",
|
||||
new_callable=dict,
|
||||
),
|
||||
patch("api.compliance.PROWLER_CHECKS", new_callable=dict),
|
||||
):
|
||||
# Ensure the database is empty
|
||||
assert Finding.objects.count() == 0
|
||||
assert Resource.objects.count() == 0
|
||||
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
|
||||
# Ensure the provider type is 'aws'
|
||||
provider.provider = Provider.ProviderChoices.AWS
|
||||
provider.save()
|
||||
|
||||
tenant_id = str(tenant.id)
|
||||
scan_id = str(scan.id)
|
||||
provider_id = str(provider.id)
|
||||
|
||||
# Mock a FAIL finding that is not muted
|
||||
fail_finding = MagicMock()
|
||||
fail_finding.uid = "fail_finding_uid"
|
||||
fail_finding.status = StatusChoices.FAIL
|
||||
fail_finding.status_extended = "test fail status"
|
||||
fail_finding.severity = Severity.high
|
||||
fail_finding.check_id = "fail_check"
|
||||
fail_finding.get_metadata.return_value = {"key": "value"}
|
||||
fail_finding.resource_uid = "resource_uid_fail"
|
||||
fail_finding.resource_name = "fail_resource"
|
||||
fail_finding.region = "us-east-1"
|
||||
fail_finding.service_name = "ec2"
|
||||
fail_finding.resource_type = "instance"
|
||||
fail_finding.resource_tags = {"env": "test"}
|
||||
fail_finding.muted = False
|
||||
fail_finding.raw = {}
|
||||
fail_finding.resource_metadata = {"test": "metadata"}
|
||||
fail_finding.resource_details = {"details": "test"}
|
||||
fail_finding.partition = "aws"
|
||||
fail_finding.compliance = {"compliance1": "FAIL"}
|
||||
|
||||
# Mock the ProwlerScan instance
|
||||
mock_prowler_scan_instance = MagicMock()
|
||||
mock_prowler_scan_instance.scan.return_value = [(100, [fail_finding])]
|
||||
mock_prowler_scan_class.return_value = mock_prowler_scan_instance
|
||||
|
||||
# Mock prowler_provider
|
||||
mock_prowler_provider_instance = MagicMock()
|
||||
mock_prowler_provider_instance.get_regions.return_value = ["us-east-1"]
|
||||
mock_initialize_prowler_provider.return_value = (
|
||||
mock_prowler_provider_instance
|
||||
)
|
||||
|
||||
# Call the function under test
|
||||
perform_prowler_scan(tenant_id, scan_id, provider_id, [])
|
||||
|
||||
# Refresh instances from the database
|
||||
scan.refresh_from_db()
|
||||
scan_resource = Resource.objects.get(provider=provider)
|
||||
|
||||
# Assert that failed_findings_count is 1 (one FAIL finding not muted)
|
||||
assert scan_resource.failed_findings_count == 1
|
||||
|
||||
def test_perform_prowler_scan_multiple_findings_same_resource(
|
||||
self,
|
||||
tenants_fixture,
|
||||
scans_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
"""Test that multiple FAIL findings on the same resource increment the counter correctly"""
|
||||
with (
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
patch("tasks.jobs.scan.ProwlerScan") as mock_prowler_scan_class,
|
||||
patch(
|
||||
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE",
|
||||
new_callable=dict,
|
||||
),
|
||||
patch("api.compliance.PROWLER_CHECKS", new_callable=dict),
|
||||
):
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
|
||||
provider.provider = Provider.ProviderChoices.AWS
|
||||
provider.save()
|
||||
|
||||
tenant_id = str(tenant.id)
|
||||
scan_id = str(scan.id)
|
||||
provider_id = str(provider.id)
|
||||
|
||||
# Create multiple findings for the same resource
|
||||
# Two FAIL findings (not muted) and one PASS finding
|
||||
resource_uid = "shared_resource_uid"
|
||||
|
||||
fail_finding_1 = MagicMock()
|
||||
fail_finding_1.uid = "fail_finding_1"
|
||||
fail_finding_1.status = StatusChoices.FAIL
|
||||
fail_finding_1.status_extended = "fail 1"
|
||||
fail_finding_1.severity = Severity.high
|
||||
fail_finding_1.check_id = "fail_check_1"
|
||||
fail_finding_1.get_metadata.return_value = {"key": "value1"}
|
||||
fail_finding_1.resource_uid = resource_uid
|
||||
fail_finding_1.resource_name = "shared_resource"
|
||||
fail_finding_1.region = "us-east-1"
|
||||
fail_finding_1.service_name = "ec2"
|
||||
fail_finding_1.resource_type = "instance"
|
||||
fail_finding_1.resource_tags = {}
|
||||
fail_finding_1.muted = False
|
||||
fail_finding_1.raw = {}
|
||||
fail_finding_1.resource_metadata = {}
|
||||
fail_finding_1.resource_details = {}
|
||||
fail_finding_1.partition = "aws"
|
||||
fail_finding_1.compliance = {}
|
||||
|
||||
fail_finding_2 = MagicMock()
|
||||
fail_finding_2.uid = "fail_finding_2"
|
||||
fail_finding_2.status = StatusChoices.FAIL
|
||||
fail_finding_2.status_extended = "fail 2"
|
||||
fail_finding_2.severity = Severity.medium
|
||||
fail_finding_2.check_id = "fail_check_2"
|
||||
fail_finding_2.get_metadata.return_value = {"key": "value2"}
|
||||
fail_finding_2.resource_uid = resource_uid
|
||||
fail_finding_2.resource_name = "shared_resource"
|
||||
fail_finding_2.region = "us-east-1"
|
||||
fail_finding_2.service_name = "ec2"
|
||||
fail_finding_2.resource_type = "instance"
|
||||
fail_finding_2.resource_tags = {}
|
||||
fail_finding_2.muted = False
|
||||
fail_finding_2.raw = {}
|
||||
fail_finding_2.resource_metadata = {}
|
||||
fail_finding_2.resource_details = {}
|
||||
fail_finding_2.partition = "aws"
|
||||
fail_finding_2.compliance = {}
|
||||
|
||||
pass_finding = MagicMock()
|
||||
pass_finding.uid = "pass_finding"
|
||||
pass_finding.status = StatusChoices.PASS
|
||||
pass_finding.status_extended = "pass"
|
||||
pass_finding.severity = Severity.low
|
||||
pass_finding.check_id = "pass_check"
|
||||
pass_finding.get_metadata.return_value = {"key": "value3"}
|
||||
pass_finding.resource_uid = resource_uid
|
||||
pass_finding.resource_name = "shared_resource"
|
||||
pass_finding.region = "us-east-1"
|
||||
pass_finding.service_name = "ec2"
|
||||
pass_finding.resource_type = "instance"
|
||||
pass_finding.resource_tags = {}
|
||||
pass_finding.muted = False
|
||||
pass_finding.raw = {}
|
||||
pass_finding.resource_metadata = {}
|
||||
pass_finding.resource_details = {}
|
||||
pass_finding.partition = "aws"
|
||||
pass_finding.compliance = {}
|
||||
|
||||
# Mock the ProwlerScan instance
|
||||
mock_prowler_scan_instance = MagicMock()
|
||||
mock_prowler_scan_instance.scan.return_value = [
|
||||
(100, [fail_finding_1, fail_finding_2, pass_finding])
|
||||
]
|
||||
mock_prowler_scan_class.return_value = mock_prowler_scan_instance
|
||||
|
||||
# Mock prowler_provider
|
||||
mock_prowler_provider_instance = MagicMock()
|
||||
mock_prowler_provider_instance.get_regions.return_value = ["us-east-1"]
|
||||
mock_initialize_prowler_provider.return_value = (
|
||||
mock_prowler_provider_instance
|
||||
)
|
||||
|
||||
# Call the function under test
|
||||
perform_prowler_scan(tenant_id, scan_id, provider_id, [])
|
||||
|
||||
# Refresh instances from the database
|
||||
scan_resource = Resource.objects.get(provider=provider, uid=resource_uid)
|
||||
|
||||
# Assert that failed_findings_count is 2 (two FAIL findings, one PASS)
|
||||
assert scan_resource.failed_findings_count == 2
|
||||
|
||||
def test_perform_prowler_scan_with_muted_findings(
|
||||
self,
|
||||
tenants_fixture,
|
||||
scans_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
"""Test that muted FAIL findings do not increment the failed_findings_count"""
|
||||
with (
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
patch("tasks.jobs.scan.ProwlerScan") as mock_prowler_scan_class,
|
||||
patch(
|
||||
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE",
|
||||
new_callable=dict,
|
||||
),
|
||||
patch("api.compliance.PROWLER_CHECKS", new_callable=dict),
|
||||
):
|
||||
tenant = tenants_fixture[0]
|
||||
scan = scans_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
|
||||
provider.provider = Provider.ProviderChoices.AWS
|
||||
provider.save()
|
||||
|
||||
tenant_id = str(tenant.id)
|
||||
scan_id = str(scan.id)
|
||||
provider_id = str(provider.id)
|
||||
|
||||
# Mock a FAIL finding that is muted
|
||||
muted_fail_finding = MagicMock()
|
||||
muted_fail_finding.uid = "muted_fail_finding"
|
||||
muted_fail_finding.status = StatusChoices.FAIL
|
||||
muted_fail_finding.status_extended = "muted fail"
|
||||
muted_fail_finding.severity = Severity.high
|
||||
muted_fail_finding.check_id = "muted_fail_check"
|
||||
muted_fail_finding.get_metadata.return_value = {"key": "value"}
|
||||
muted_fail_finding.resource_uid = "muted_resource_uid"
|
||||
muted_fail_finding.resource_name = "muted_resource"
|
||||
muted_fail_finding.region = "us-east-1"
|
||||
muted_fail_finding.service_name = "ec2"
|
||||
muted_fail_finding.resource_type = "instance"
|
||||
muted_fail_finding.resource_tags = {}
|
||||
muted_fail_finding.muted = True
|
||||
muted_fail_finding.raw = {}
|
||||
muted_fail_finding.resource_metadata = {}
|
||||
muted_fail_finding.resource_details = {}
|
||||
muted_fail_finding.partition = "aws"
|
||||
muted_fail_finding.compliance = {}
|
||||
|
||||
# Mock the ProwlerScan instance
|
||||
mock_prowler_scan_instance = MagicMock()
|
||||
mock_prowler_scan_instance.scan.return_value = [(100, [muted_fail_finding])]
|
||||
mock_prowler_scan_class.return_value = mock_prowler_scan_instance
|
||||
|
||||
# Mock prowler_provider
|
||||
mock_prowler_provider_instance = MagicMock()
|
||||
mock_prowler_provider_instance.get_regions.return_value = ["us-east-1"]
|
||||
mock_initialize_prowler_provider.return_value = (
|
||||
mock_prowler_provider_instance
|
||||
)
|
||||
|
||||
# Call the function under test
|
||||
perform_prowler_scan(tenant_id, scan_id, provider_id, [])
|
||||
|
||||
# Refresh instances from the database
|
||||
scan_resource = Resource.objects.get(provider=provider)
|
||||
|
||||
# Assert that failed_findings_count is 0 (FAIL finding is muted)
|
||||
assert scan_resource.failed_findings_count == 0
|
||||
|
||||
def test_perform_prowler_scan_reset_failed_findings_count(
|
||||
self,
|
||||
tenants_fixture,
|
||||
providers_fixture,
|
||||
resources_fixture,
|
||||
):
|
||||
"""Test that failed_findings_count is reset to 0 at the beginning of each scan"""
|
||||
# Use existing resource from fixture and set initial failed_findings_count
|
||||
tenant = tenants_fixture[0]
|
||||
provider = providers_fixture[0]
|
||||
resource = resources_fixture[0]
|
||||
|
||||
# Set a non-zero failed_findings_count initially
|
||||
resource.failed_findings_count = 5
|
||||
resource.save()
|
||||
|
||||
# Create a new scan
|
||||
scan = Scan.objects.create(
|
||||
name="Reset Test Scan",
|
||||
provider=provider,
|
||||
trigger=Scan.TriggerChoices.MANUAL,
|
||||
state=StateChoices.AVAILABLE,
|
||||
tenant_id=tenant.id,
|
||||
)
|
||||
|
||||
with (
|
||||
patch("api.db_utils.rls_transaction"),
|
||||
patch(
|
||||
"tasks.jobs.scan.initialize_prowler_provider"
|
||||
) as mock_initialize_prowler_provider,
|
||||
patch("tasks.jobs.scan.ProwlerScan") as mock_prowler_scan_class,
|
||||
patch(
|
||||
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE",
|
||||
new_callable=dict,
|
||||
),
|
||||
patch("api.compliance.PROWLER_CHECKS", new_callable=dict),
|
||||
):
|
||||
provider.provider = Provider.ProviderChoices.AWS
|
||||
provider.save()
|
||||
|
||||
tenant_id = str(tenant.id)
|
||||
scan_id = str(scan.id)
|
||||
provider_id = str(provider.id)
|
||||
|
||||
# Mock a PASS finding for the existing resource
|
||||
pass_finding = MagicMock()
|
||||
pass_finding.uid = "reset_test_finding"
|
||||
pass_finding.status = StatusChoices.PASS
|
||||
pass_finding.status_extended = "reset test pass"
|
||||
pass_finding.severity = Severity.low
|
||||
pass_finding.check_id = "reset_test_check"
|
||||
pass_finding.get_metadata.return_value = {"key": "value"}
|
||||
pass_finding.resource_uid = resource.uid
|
||||
pass_finding.resource_name = resource.name
|
||||
pass_finding.region = resource.region
|
||||
pass_finding.service_name = resource.service
|
||||
pass_finding.resource_type = resource.type
|
||||
pass_finding.resource_tags = {}
|
||||
pass_finding.muted = False
|
||||
pass_finding.raw = {}
|
||||
pass_finding.resource_metadata = {}
|
||||
pass_finding.resource_details = {}
|
||||
pass_finding.partition = "aws"
|
||||
pass_finding.compliance = {}
|
||||
|
||||
# Mock the ProwlerScan instance
|
||||
mock_prowler_scan_instance = MagicMock()
|
||||
mock_prowler_scan_instance.scan.return_value = [(100, [pass_finding])]
|
||||
mock_prowler_scan_class.return_value = mock_prowler_scan_instance
|
||||
|
||||
# Mock prowler_provider
|
||||
mock_prowler_provider_instance = MagicMock()
|
||||
mock_prowler_provider_instance.get_regions.return_value = [resource.region]
|
||||
mock_initialize_prowler_provider.return_value = (
|
||||
mock_prowler_provider_instance
|
||||
)
|
||||
|
||||
# Call the function under test
|
||||
perform_prowler_scan(tenant_id, scan_id, provider_id, [])
|
||||
|
||||
# Refresh resource from the database
|
||||
resource.refresh_from_db()
|
||||
|
||||
# Assert that failed_findings_count was reset to 0 during the scan
|
||||
assert resource.failed_findings_count == 0
|
||||
|
||||
|
||||
# TODO Add tests for aggregations
|
||||
|
||||
@@ -697,68 +1045,3 @@ class TestCreateComplianceRequirements:
|
||||
|
||||
assert "requirements_created" in result
|
||||
assert result["requirements_created"] >= 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestUpdateResourceFailedFindingsCount:
|
||||
def test_execute_sql_update(
|
||||
self, tenants_fixture, scans_fixture, providers_fixture, resources_fixture
|
||||
):
|
||||
resource = resources_fixture[0]
|
||||
tenant_id = resource.tenant_id
|
||||
scan_id = resource.provider.scans.first().id
|
||||
|
||||
# Common kwargs for all failing findings
|
||||
base_kwargs = {
|
||||
"tenant_id": tenant_id,
|
||||
"scan_id": scan_id,
|
||||
"delta": None,
|
||||
"status": StatusChoices.FAIL,
|
||||
"status_extended": "test status extended",
|
||||
"impact": Severity.critical,
|
||||
"impact_extended": "test impact extended",
|
||||
"severity": Severity.critical,
|
||||
"raw_result": {
|
||||
"status": StatusChoices.FAIL,
|
||||
"impact": Severity.critical,
|
||||
"severity": Severity.critical,
|
||||
},
|
||||
"tags": {"test": "dev-qa"},
|
||||
"check_id": "test_check_id",
|
||||
"check_metadata": {
|
||||
"CheckId": "test_check_id",
|
||||
"Description": "test description apple sauce",
|
||||
"servicename": "ec2",
|
||||
},
|
||||
"first_seen_at": "2024-01-02T00:00:00Z",
|
||||
}
|
||||
|
||||
# UIDs to create (two with same UID, one unique)
|
||||
uids = ["test_finding_uid_1", "test_finding_uid_1", "test_finding_uid_2"]
|
||||
|
||||
# Create findings and associate with the resource
|
||||
for uid in uids:
|
||||
finding = Finding.objects.create(uid=uid, **base_kwargs)
|
||||
finding.add_resources([resource])
|
||||
|
||||
resource.refresh_from_db()
|
||||
assert resource.failed_findings_count == 0
|
||||
|
||||
_update_resource_failed_findings_count(tenant_id=tenant_id, scan_id=scan_id)
|
||||
resource.refresh_from_db()
|
||||
|
||||
# Only two since two findings share the same UID
|
||||
assert resource.failed_findings_count == 2
|
||||
|
||||
@patch("tasks.jobs.scan.Scan.objects.get")
|
||||
def test_scan_not_found(
|
||||
self,
|
||||
mock_scan_get,
|
||||
):
|
||||
mock_scan_get.side_effect = Scan.DoesNotExist
|
||||
|
||||
with pytest.raises(Scan.DoesNotExist):
|
||||
_update_resource_failed_findings_count(
|
||||
"8614ca97-8370-4183-a7f7-e96a6c7d2c93",
|
||||
"4705bed5-8782-4e8b-bab6-55e8043edaa6",
|
||||
)
|
||||
|
||||
791
poetry.lock
generated
791
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -2,6 +2,13 @@
|
||||
|
||||
All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
## [v5.9.2] (Prowler v5.9.2)
|
||||
|
||||
### Fixed
|
||||
- Use the correct resource name in `defender_domain_dkim_enabled` check [(#8334)](https://github.com/prowler-cloud/prowler/pull/8334)
|
||||
|
||||
---
|
||||
|
||||
## [v5.9.0] (Prowler v5.9.0)
|
||||
|
||||
### Added
|
||||
|
||||
@@ -12,7 +12,7 @@ from prowler.lib.logger import logger
|
||||
|
||||
timestamp = datetime.today()
|
||||
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||
prowler_version = "5.9.0"
|
||||
prowler_version = "5.9.2"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
|
||||
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
|
||||
|
||||
@@ -26,7 +26,7 @@ class defender_domain_dkim_enabled(Check):
|
||||
report = CheckReportM365(
|
||||
metadata=self.metadata(),
|
||||
resource=config,
|
||||
resource_name="DKIM Configuration",
|
||||
resource_name=config.id,
|
||||
resource_id=config.id,
|
||||
)
|
||||
report.status = "FAIL"
|
||||
|
||||
@@ -71,7 +71,7 @@ maintainers = [{name = "Prowler Engineering", email = "engineering@prowler.com"}
|
||||
name = "prowler"
|
||||
readme = "README.md"
|
||||
requires-python = ">3.9.1,<3.13"
|
||||
version = "5.9.0"
|
||||
version = "5.9.2"
|
||||
|
||||
[project.scripts]
|
||||
prowler = "prowler.__main__:prowler"
|
||||
|
||||
@@ -43,7 +43,7 @@ class Test_defender_domain_dkim_enabled:
|
||||
== "DKIM is enabled for domain with ID domain1."
|
||||
)
|
||||
assert result[0].resource == defender_client.dkim_configurations[0].dict()
|
||||
assert result[0].resource_name == "DKIM Configuration"
|
||||
assert result[0].resource_name == "domain1"
|
||||
assert result[0].resource_id == "domain1"
|
||||
assert result[0].location == "global"
|
||||
|
||||
@@ -86,7 +86,7 @@ class Test_defender_domain_dkim_enabled:
|
||||
== "DKIM is not enabled for domain with ID domain2."
|
||||
)
|
||||
assert result[0].resource == defender_client.dkim_configurations[0].dict()
|
||||
assert result[0].resource_name == "DKIM Configuration"
|
||||
assert result[0].resource_name == "domain2"
|
||||
assert result[0].resource_id == "domain2"
|
||||
assert result[0].location == "global"
|
||||
|
||||
|
||||
Reference in New Issue
Block a user