Compare commits

...

18 Commits

Author SHA1 Message Date
Pepe Fagoaga
2c89489ee3 chore: .env comment 2025-08-07 21:19:06 +05:45
Amit Sharma
34bf308195 fix: updated posthog init function to mask everything in session replays 2025-08-04 19:48:35 -07:00
Amit Sharma
7e176a620e fix: updated posthog init function to mask everything in session replays 2025-08-04 19:47:09 -07:00
Amit Sharma
9ccb9430d2 fix: resolved linting errors 2025-07-30 16:49:36 -07:00
Amit Sharma
f7fe55d95a fix: updated per @paabloLC request to move Posthog initialization outside of providers.tsx 2025-07-30 16:45:30 -07:00
Amit Sharma
c7e4c3d839 fix: resolved linting and prettier issues for formating 2025-07-30 16:02:30 -07:00
Amit Sharma
d0d0ae8716 Update ui/CHANGELOG.md
Co-authored-by: Alejandro Bailo <59607668+alejandrobailo@users.noreply.github.com>
2025-07-29 23:55:26 -07:00
Amit Sharma
bc34c9df4a feat: centralize PostHog analytics with proper error handling
- Create lib/analytics.ts with centralized tracking functions
- Use arrow functions and camelCase naming per code standards
- Add error handling to prevent analytics failures from crashing app
- Update auth forms and provider workflows to use new analytics lib
- Export TypeScript interfaces for type-safe analytics payloads

Addresses PR review comments:
- Convert to arrow functions (requested by @paabloLC)
- Use camelCase for event and property names
- Centralize analytics logic for better maintainability
2025-07-29 19:49:12 -07:00
Amit Sharma
255df2dbc1 fix: let users bring their own posthog host 2025-07-24 14:09:22 -07:00
Amit Sharma
998f32f451 fix: update package-lock.json to sync with PostHog dependencies 2025-07-24 14:06:32 -07:00
Amit Sharma
8a65efa441 feat: add PostHog analytics integration
- Add PostHog client configuration in ui/lib/posthog.ts
- Integrate PostHog tracking in auth forms and provider workflows
- Add PostHog dependencies (posthog-js and posthog-node)
- Update Next.js config for PostHog integration
- Add environment variables for PostHog configuration
- Update changelog with PostHog integration entry
2025-07-23 15:08:21 -07:00
César Arroba
aa843b823c chore(gha): fix action version (#8327) 2025-07-18 15:00:32 +02:00
Víctor Fernández Poyatos
020edc0d1d fix(tasks): calculate failed findings for resources during scan (#8322) 2025-07-18 13:19:22 +02:00
César Arroba
036da81bbd chore(gha): fix api prowler version (#8323) 2025-07-18 12:43:38 +02:00
sumit-tft
4428bcb2c0 feat(ui): update step title and description in cloud provider update … (#8303)
Co-authored-by: alejandrobailo <alejandrobailo94@gmail.com>
2025-07-18 10:11:44 +02:00
Prowler Bot
21de9a2f6f chore(release): Bump version to v5.10.0 (#8314)
Co-authored-by: prowler-bot <179230569+prowler-bot@users.noreply.github.com>
2025-07-17 19:38:28 +02:00
Alejandro Bailo
231d933b9e chore(docs): SAML documentation (#8137)
Co-authored-by: Adrián Jesús Peña Rodríguez <adrianjpr@gmail.com>
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-07-17 23:22:49 +05:45
Alejandro Bailo
2ad360a7f9 docs(ui): Mutelist documentation (#8201) 2025-07-17 23:15:20 +05:45
51 changed files with 1022 additions and 256 deletions

4
.env
View File

@@ -150,3 +150,7 @@ LANGSMITH_TRACING=false
LANGSMITH_ENDPOINT="https://api.smith.langchain.com"
LANGSMITH_API_KEY=""
LANGCHAIN_PROJECT=""
# PostHog integration
NEXT_PUBLIC_POSTHOG_KEY=""
NEXT_PUBLIC_POSTHOG_HOST=""

View File

@@ -19,12 +19,23 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout code
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
with:
python-version: '3.12'
- name: Install Poetry
run: |
python3 -m pip install --user poetry
echo "$HOME/.local/bin" >> $GITHUB_PATH
- name: Parse version and determine branch
run: |
# Validate version format (reusing pattern from sdk-bump-version.yml)
@@ -107,11 +118,12 @@ jobs:
echo "✓ api/pyproject.toml version: $CURRENT_API_VERSION"
- name: Verify prowler dependency in api/pyproject.toml
if: ${{ env.PATCH_VERSION != '0' }}
run: |
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
PROWLER_VERSION_TRIMMED=$(echo "$PROWLER_VERSION" | tr -d '[:space:]')
if [ "$CURRENT_PROWLER_REF" != "$PROWLER_VERSION_TRIMMED" ]; then
echo "ERROR: Prowler dependency mismatch in api/pyproject.toml (expected: '$PROWLER_VERSION_TRIMMED', found: '$CURRENT_PROWLER_REF')"
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
if [ "$CURRENT_PROWLER_REF" != "$BRANCH_NAME_TRIMMED" ]; then
echo "ERROR: Prowler dependency mismatch in api/pyproject.toml (expected: '$BRANCH_NAME_TRIMMED', found: '$CURRENT_PROWLER_REF')"
exit 1
fi
echo "✓ api/pyproject.toml prowler dependency: $CURRENT_PROWLER_REF"
@@ -136,6 +148,36 @@ jobs:
fi
git checkout -b "$BRANCH_NAME"
- name: Update prowler dependency in api/pyproject.toml
if: ${{ env.PATCH_VERSION == '0' }}
run: |
CURRENT_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
BRANCH_NAME_TRIMMED=$(echo "$BRANCH_NAME" | tr -d '[:space:]')
# Minor release: update the dependency to use the new branch
echo "Minor release detected - updating prowler dependency from '$CURRENT_PROWLER_REF' to '$BRANCH_NAME_TRIMMED'"
sed -i "s|prowler @ git+https://github.com/prowler-cloud/prowler.git@[^\"]*\"|prowler @ git+https://github.com/prowler-cloud/prowler.git@$BRANCH_NAME_TRIMMED\"|" api/pyproject.toml
# Verify the change was made
UPDATED_PROWLER_REF=$(grep 'prowler @ git+https://github.com/prowler-cloud/prowler.git@' api/pyproject.toml | sed -E 's/.*@([^"]+)".*/\1/' | tr -d '[:space:]')
if [ "$UPDATED_PROWLER_REF" != "$BRANCH_NAME_TRIMMED" ]; then
echo "ERROR: Failed to update prowler dependency in api/pyproject.toml"
exit 1
fi
# Update poetry lock file
echo "Updating poetry.lock file..."
cd api
poetry lock --no-update
cd ..
# Commit and push the changes
git add api/pyproject.toml api/poetry.lock
git commit -m "chore(api): update prowler dependency to $BRANCH_NAME_TRIMMED for release $PROWLER_VERSION"
git push origin "$BRANCH_NAME"
echo "✓ api/pyproject.toml prowler dependency updated to: $UPDATED_PROWLER_REF"
- name: Extract changelog entries
run: |
set -e

View File

@@ -2,6 +2,13 @@
All notable changes to the **Prowler API** are documented in this file.
## [v1.10.1] (Prowler v5.9.1)
### Fixed
- Calculate failed findings during scans to prevent heavy database queries [(#8322)](https://github.com/prowler-cloud/prowler/pull/8322)
---
## [v1.10.0] (Prowler v5.9.0)
### Added
@@ -12,7 +19,7 @@ All notable changes to the **Prowler API** are documented in this file.
- `/processors` endpoints to post-process findings. Currently, only the Mutelist processor is supported to allow to mute findings.
- Optimized the underlying queries for resources endpoints [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
- Optimized include parameters for resources view [(#8229)](https://github.com/prowler-cloud/prowler/pull/8229)
- Optimized overview background tasks [(#8300)](https://github.com/prowler-cloud/prowler/pull/8300)
- Optimized overview background tasks [(#8300)](https://github.com/prowler-cloud/prowler/pull/8300)
### Fixed
- Search filter for findings and resources [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)

View File

@@ -38,7 +38,7 @@ name = "prowler-api"
package-mode = false
# Needed for the SDK compatibility
requires-python = ">=3.11,<3.13"
version = "1.10.0"
version = "1.10.1"
[project.scripts]
celery = "src.backend.config.settings.celery"

View File

@@ -1,7 +1,7 @@
openapi: 3.0.3
info:
title: Prowler API
version: 1.10.0
version: 1.10.1
description: |-
Prowler API specification.

View File

@@ -5188,6 +5188,8 @@ class TestComplianceOverviewViewSet:
assert "description" in attributes
assert "status" in attributes
# TODO: This test may fail randomly because requirements are not ordered
@pytest.mark.xfail
def test_compliance_overview_requirements_manual(
self, authenticated_client, compliance_requirements_overviews_fixture
):

View File

@@ -292,7 +292,7 @@ class SchemaView(SpectacularAPIView):
def get(self, request, *args, **kwargs):
spectacular_settings.TITLE = "Prowler API"
spectacular_settings.VERSION = "1.10.0"
spectacular_settings.VERSION = "1.10.1"
spectacular_settings.DESCRIPTION = (
"Prowler API specification.\n\nThis file is auto-generated."
)

View File

@@ -1,11 +1,12 @@
import json
import time
from collections import defaultdict
from copy import deepcopy
from datetime import datetime, timezone
from celery.utils.log import get_task_logger
from config.settings.celery import CELERY_DEADLOCK_ATTEMPTS
from django.db import IntegrityError, OperationalError, connection
from django.db import IntegrityError, OperationalError
from django.db.models import Case, Count, IntegerField, Prefetch, Sum, When
from tasks.utils import CustomEncoder
@@ -13,7 +14,11 @@ from api.compliance import (
PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE,
generate_scan_compliance,
)
from api.db_utils import create_objects_in_batches, rls_transaction
from api.db_utils import (
create_objects_in_batches,
rls_transaction,
update_objects_in_batches,
)
from api.exceptions import ProviderConnectionError
from api.models import (
ComplianceRequirementOverview,
@@ -103,7 +108,10 @@ def _store_resources(
def perform_prowler_scan(
tenant_id: str, scan_id: str, provider_id: str, checks_to_execute: list[str] = None
tenant_id: str,
scan_id: str,
provider_id: str,
checks_to_execute: list[str] | None = None,
):
"""
Perform a scan using Prowler and store the findings and resources in the database.
@@ -175,6 +183,7 @@ def perform_prowler_scan(
resource_cache = {}
tag_cache = {}
last_status_cache = {}
resource_failed_findings_cache = defaultdict(int)
for progress, findings in prowler_scan.scan():
for finding in findings:
@@ -200,6 +209,9 @@ def perform_prowler_scan(
},
)
resource_cache[resource_uid] = resource_instance
# Initialize all processed resources in the cache
resource_failed_findings_cache[resource_uid] = 0
else:
resource_instance = resource_cache[resource_uid]
@@ -313,6 +325,11 @@ def perform_prowler_scan(
)
finding_instance.add_resources([resource_instance])
# Increment failed_findings_count cache if the finding status is FAIL and not muted
if status == FindingStatus.FAIL and not finding.muted:
resource_uid = finding.resource_uid
resource_failed_findings_cache[resource_uid] += 1
# Update scan resource summaries
scan_resource_cache.add(
(
@@ -330,6 +347,24 @@ def perform_prowler_scan(
scan_instance.state = StateChoices.COMPLETED
# Update failed_findings_count for all resources in batches if scan completed successfully
if resource_failed_findings_cache:
resources_to_update = []
for resource_uid, failed_count in resource_failed_findings_cache.items():
if resource_uid in resource_cache:
resource_instance = resource_cache[resource_uid]
resource_instance.failed_findings_count = failed_count
resources_to_update.append(resource_instance)
if resources_to_update:
update_objects_in_batches(
tenant_id=tenant_id,
model=Resource,
objects=resources_to_update,
fields=["failed_findings_count"],
batch_size=1000,
)
except Exception as e:
logger.error(f"Error performing scan {scan_id}: {e}")
exception = e
@@ -376,7 +411,6 @@ def perform_prowler_scan(
def aggregate_findings(tenant_id: str, scan_id: str):
"""
Aggregates findings for a given scan and stores the results in the ScanSummary table.
Also updates the failed_findings_count for each resource based on the latest findings.
This function retrieves all findings associated with a given `scan_id` and calculates various
metrics such as counts of failed, passed, and muted findings, as well as their deltas (new,
@@ -405,8 +439,6 @@ def aggregate_findings(tenant_id: str, scan_id: str):
- muted_new: Muted findings with a delta of 'new'.
- muted_changed: Muted findings with a delta of 'changed'.
"""
_update_resource_failed_findings_count(tenant_id, scan_id)
with rls_transaction(tenant_id):
findings = Finding.objects.filter(tenant_id=tenant_id, scan_id=scan_id)
@@ -531,48 +563,6 @@ def aggregate_findings(tenant_id: str, scan_id: str):
ScanSummary.objects.bulk_create(scan_aggregations, batch_size=3000)
def _update_resource_failed_findings_count(tenant_id: str, scan_id: str):
"""
Update the failed_findings_count field for resources based on the latest findings.
This function calculates the number of failed findings for each resource by:
1. Getting the latest finding for each finding.uid
2. Counting failed findings per resource
3. Updating the failed_findings_count field for each resource
Args:
tenant_id (str): The ID of the tenant to which the scan belongs.
scan_id (str): The ID of the scan for which to update resource counts.
"""
with rls_transaction(tenant_id):
scan = Scan.objects.get(pk=scan_id)
provider_id = str(scan.provider_id)
with connection.cursor() as cursor:
cursor.execute(
"""
UPDATE resources AS r
SET failed_findings_count = COALESCE((
SELECT COUNT(*) FROM (
SELECT DISTINCT ON (f.uid) f.uid
FROM findings AS f
JOIN resource_finding_mappings AS rfm
ON rfm.finding_id = f.id
WHERE f.tenant_id = %s
AND f.status = %s
AND f.muted = FALSE
AND rfm.resource_id = r.id
ORDER BY f.uid, f.inserted_at DESC
) AS latest_uids
), 0)
WHERE r.tenant_id = %s
AND r.provider_id = %s
""",
[tenant_id, FindingStatus.FAIL, tenant_id, provider_id],
)
def create_compliance_requirements(tenant_id: str, scan_id: str):
"""
Create detailed compliance requirement overview records for a scan.

View File

@@ -7,22 +7,14 @@ import pytest
from tasks.jobs.scan import (
_create_finding_delta,
_store_resources,
_update_resource_failed_findings_count,
create_compliance_requirements,
perform_prowler_scan,
)
from tasks.utils import CustomEncoder
from api.exceptions import ProviderConnectionError
from api.models import (
Finding,
Provider,
Resource,
Scan,
Severity,
StateChoices,
StatusChoices,
)
from api.models import Finding, Provider, Resource, Scan, StateChoices, StatusChoices
from prowler.lib.check.models import Severity
@pytest.mark.django_db
@@ -182,6 +174,9 @@ class TestPerformScan:
assert tag_keys == set(finding.resource_tags.keys())
assert tag_values == set(finding.resource_tags.values())
# Assert that failed_findings_count is 0 (finding is PASS and muted)
assert scan_resource.failed_findings_count == 0
@patch("tasks.jobs.scan.ProwlerScan")
@patch(
"tasks.jobs.scan.initialize_prowler_provider",
@@ -386,6 +381,359 @@ class TestPerformScan:
assert resource == resource_instance
assert resource_uid_tuple == (resource_instance.uid, resource_instance.region)
def test_perform_prowler_scan_with_failed_findings(
self,
tenants_fixture,
scans_fixture,
providers_fixture,
):
"""Test that failed findings increment the failed_findings_count"""
with (
patch("api.db_utils.rls_transaction"),
patch(
"tasks.jobs.scan.initialize_prowler_provider"
) as mock_initialize_prowler_provider,
patch("tasks.jobs.scan.ProwlerScan") as mock_prowler_scan_class,
patch(
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE",
new_callable=dict,
),
patch("api.compliance.PROWLER_CHECKS", new_callable=dict),
):
# Ensure the database is empty
assert Finding.objects.count() == 0
assert Resource.objects.count() == 0
tenant = tenants_fixture[0]
scan = scans_fixture[0]
provider = providers_fixture[0]
# Ensure the provider type is 'aws'
provider.provider = Provider.ProviderChoices.AWS
provider.save()
tenant_id = str(tenant.id)
scan_id = str(scan.id)
provider_id = str(provider.id)
# Mock a FAIL finding that is not muted
fail_finding = MagicMock()
fail_finding.uid = "fail_finding_uid"
fail_finding.status = StatusChoices.FAIL
fail_finding.status_extended = "test fail status"
fail_finding.severity = Severity.high
fail_finding.check_id = "fail_check"
fail_finding.get_metadata.return_value = {"key": "value"}
fail_finding.resource_uid = "resource_uid_fail"
fail_finding.resource_name = "fail_resource"
fail_finding.region = "us-east-1"
fail_finding.service_name = "ec2"
fail_finding.resource_type = "instance"
fail_finding.resource_tags = {"env": "test"}
fail_finding.muted = False
fail_finding.raw = {}
fail_finding.resource_metadata = {"test": "metadata"}
fail_finding.resource_details = {"details": "test"}
fail_finding.partition = "aws"
fail_finding.compliance = {"compliance1": "FAIL"}
# Mock the ProwlerScan instance
mock_prowler_scan_instance = MagicMock()
mock_prowler_scan_instance.scan.return_value = [(100, [fail_finding])]
mock_prowler_scan_class.return_value = mock_prowler_scan_instance
# Mock prowler_provider
mock_prowler_provider_instance = MagicMock()
mock_prowler_provider_instance.get_regions.return_value = ["us-east-1"]
mock_initialize_prowler_provider.return_value = (
mock_prowler_provider_instance
)
# Call the function under test
perform_prowler_scan(tenant_id, scan_id, provider_id, [])
# Refresh instances from the database
scan.refresh_from_db()
scan_resource = Resource.objects.get(provider=provider)
# Assert that failed_findings_count is 1 (one FAIL finding not muted)
assert scan_resource.failed_findings_count == 1
def test_perform_prowler_scan_multiple_findings_same_resource(
self,
tenants_fixture,
scans_fixture,
providers_fixture,
):
"""Test that multiple FAIL findings on the same resource increment the counter correctly"""
with (
patch("api.db_utils.rls_transaction"),
patch(
"tasks.jobs.scan.initialize_prowler_provider"
) as mock_initialize_prowler_provider,
patch("tasks.jobs.scan.ProwlerScan") as mock_prowler_scan_class,
patch(
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE",
new_callable=dict,
),
patch("api.compliance.PROWLER_CHECKS", new_callable=dict),
):
tenant = tenants_fixture[0]
scan = scans_fixture[0]
provider = providers_fixture[0]
provider.provider = Provider.ProviderChoices.AWS
provider.save()
tenant_id = str(tenant.id)
scan_id = str(scan.id)
provider_id = str(provider.id)
# Create multiple findings for the same resource
# Two FAIL findings (not muted) and one PASS finding
resource_uid = "shared_resource_uid"
fail_finding_1 = MagicMock()
fail_finding_1.uid = "fail_finding_1"
fail_finding_1.status = StatusChoices.FAIL
fail_finding_1.status_extended = "fail 1"
fail_finding_1.severity = Severity.high
fail_finding_1.check_id = "fail_check_1"
fail_finding_1.get_metadata.return_value = {"key": "value1"}
fail_finding_1.resource_uid = resource_uid
fail_finding_1.resource_name = "shared_resource"
fail_finding_1.region = "us-east-1"
fail_finding_1.service_name = "ec2"
fail_finding_1.resource_type = "instance"
fail_finding_1.resource_tags = {}
fail_finding_1.muted = False
fail_finding_1.raw = {}
fail_finding_1.resource_metadata = {}
fail_finding_1.resource_details = {}
fail_finding_1.partition = "aws"
fail_finding_1.compliance = {}
fail_finding_2 = MagicMock()
fail_finding_2.uid = "fail_finding_2"
fail_finding_2.status = StatusChoices.FAIL
fail_finding_2.status_extended = "fail 2"
fail_finding_2.severity = Severity.medium
fail_finding_2.check_id = "fail_check_2"
fail_finding_2.get_metadata.return_value = {"key": "value2"}
fail_finding_2.resource_uid = resource_uid
fail_finding_2.resource_name = "shared_resource"
fail_finding_2.region = "us-east-1"
fail_finding_2.service_name = "ec2"
fail_finding_2.resource_type = "instance"
fail_finding_2.resource_tags = {}
fail_finding_2.muted = False
fail_finding_2.raw = {}
fail_finding_2.resource_metadata = {}
fail_finding_2.resource_details = {}
fail_finding_2.partition = "aws"
fail_finding_2.compliance = {}
pass_finding = MagicMock()
pass_finding.uid = "pass_finding"
pass_finding.status = StatusChoices.PASS
pass_finding.status_extended = "pass"
pass_finding.severity = Severity.low
pass_finding.check_id = "pass_check"
pass_finding.get_metadata.return_value = {"key": "value3"}
pass_finding.resource_uid = resource_uid
pass_finding.resource_name = "shared_resource"
pass_finding.region = "us-east-1"
pass_finding.service_name = "ec2"
pass_finding.resource_type = "instance"
pass_finding.resource_tags = {}
pass_finding.muted = False
pass_finding.raw = {}
pass_finding.resource_metadata = {}
pass_finding.resource_details = {}
pass_finding.partition = "aws"
pass_finding.compliance = {}
# Mock the ProwlerScan instance
mock_prowler_scan_instance = MagicMock()
mock_prowler_scan_instance.scan.return_value = [
(100, [fail_finding_1, fail_finding_2, pass_finding])
]
mock_prowler_scan_class.return_value = mock_prowler_scan_instance
# Mock prowler_provider
mock_prowler_provider_instance = MagicMock()
mock_prowler_provider_instance.get_regions.return_value = ["us-east-1"]
mock_initialize_prowler_provider.return_value = (
mock_prowler_provider_instance
)
# Call the function under test
perform_prowler_scan(tenant_id, scan_id, provider_id, [])
# Refresh instances from the database
scan_resource = Resource.objects.get(provider=provider, uid=resource_uid)
# Assert that failed_findings_count is 2 (two FAIL findings, one PASS)
assert scan_resource.failed_findings_count == 2
def test_perform_prowler_scan_with_muted_findings(
self,
tenants_fixture,
scans_fixture,
providers_fixture,
):
"""Test that muted FAIL findings do not increment the failed_findings_count"""
with (
patch("api.db_utils.rls_transaction"),
patch(
"tasks.jobs.scan.initialize_prowler_provider"
) as mock_initialize_prowler_provider,
patch("tasks.jobs.scan.ProwlerScan") as mock_prowler_scan_class,
patch(
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE",
new_callable=dict,
),
patch("api.compliance.PROWLER_CHECKS", new_callable=dict),
):
tenant = tenants_fixture[0]
scan = scans_fixture[0]
provider = providers_fixture[0]
provider.provider = Provider.ProviderChoices.AWS
provider.save()
tenant_id = str(tenant.id)
scan_id = str(scan.id)
provider_id = str(provider.id)
# Mock a FAIL finding that is muted
muted_fail_finding = MagicMock()
muted_fail_finding.uid = "muted_fail_finding"
muted_fail_finding.status = StatusChoices.FAIL
muted_fail_finding.status_extended = "muted fail"
muted_fail_finding.severity = Severity.high
muted_fail_finding.check_id = "muted_fail_check"
muted_fail_finding.get_metadata.return_value = {"key": "value"}
muted_fail_finding.resource_uid = "muted_resource_uid"
muted_fail_finding.resource_name = "muted_resource"
muted_fail_finding.region = "us-east-1"
muted_fail_finding.service_name = "ec2"
muted_fail_finding.resource_type = "instance"
muted_fail_finding.resource_tags = {}
muted_fail_finding.muted = True
muted_fail_finding.raw = {}
muted_fail_finding.resource_metadata = {}
muted_fail_finding.resource_details = {}
muted_fail_finding.partition = "aws"
muted_fail_finding.compliance = {}
# Mock the ProwlerScan instance
mock_prowler_scan_instance = MagicMock()
mock_prowler_scan_instance.scan.return_value = [(100, [muted_fail_finding])]
mock_prowler_scan_class.return_value = mock_prowler_scan_instance
# Mock prowler_provider
mock_prowler_provider_instance = MagicMock()
mock_prowler_provider_instance.get_regions.return_value = ["us-east-1"]
mock_initialize_prowler_provider.return_value = (
mock_prowler_provider_instance
)
# Call the function under test
perform_prowler_scan(tenant_id, scan_id, provider_id, [])
# Refresh instances from the database
scan_resource = Resource.objects.get(provider=provider)
# Assert that failed_findings_count is 0 (FAIL finding is muted)
assert scan_resource.failed_findings_count == 0
def test_perform_prowler_scan_reset_failed_findings_count(
self,
tenants_fixture,
providers_fixture,
resources_fixture,
):
"""Test that failed_findings_count is reset to 0 at the beginning of each scan"""
# Use existing resource from fixture and set initial failed_findings_count
tenant = tenants_fixture[0]
provider = providers_fixture[0]
resource = resources_fixture[0]
# Set a non-zero failed_findings_count initially
resource.failed_findings_count = 5
resource.save()
# Create a new scan
scan = Scan.objects.create(
name="Reset Test Scan",
provider=provider,
trigger=Scan.TriggerChoices.MANUAL,
state=StateChoices.AVAILABLE,
tenant_id=tenant.id,
)
with (
patch("api.db_utils.rls_transaction"),
patch(
"tasks.jobs.scan.initialize_prowler_provider"
) as mock_initialize_prowler_provider,
patch("tasks.jobs.scan.ProwlerScan") as mock_prowler_scan_class,
patch(
"tasks.jobs.scan.PROWLER_COMPLIANCE_OVERVIEW_TEMPLATE",
new_callable=dict,
),
patch("api.compliance.PROWLER_CHECKS", new_callable=dict),
):
provider.provider = Provider.ProviderChoices.AWS
provider.save()
tenant_id = str(tenant.id)
scan_id = str(scan.id)
provider_id = str(provider.id)
# Mock a PASS finding for the existing resource
pass_finding = MagicMock()
pass_finding.uid = "reset_test_finding"
pass_finding.status = StatusChoices.PASS
pass_finding.status_extended = "reset test pass"
pass_finding.severity = Severity.low
pass_finding.check_id = "reset_test_check"
pass_finding.get_metadata.return_value = {"key": "value"}
pass_finding.resource_uid = resource.uid
pass_finding.resource_name = resource.name
pass_finding.region = resource.region
pass_finding.service_name = resource.service
pass_finding.resource_type = resource.type
pass_finding.resource_tags = {}
pass_finding.muted = False
pass_finding.raw = {}
pass_finding.resource_metadata = {}
pass_finding.resource_details = {}
pass_finding.partition = "aws"
pass_finding.compliance = {}
# Mock the ProwlerScan instance
mock_prowler_scan_instance = MagicMock()
mock_prowler_scan_instance.scan.return_value = [(100, [pass_finding])]
mock_prowler_scan_class.return_value = mock_prowler_scan_instance
# Mock prowler_provider
mock_prowler_provider_instance = MagicMock()
mock_prowler_provider_instance.get_regions.return_value = [resource.region]
mock_initialize_prowler_provider.return_value = (
mock_prowler_provider_instance
)
# Call the function under test
perform_prowler_scan(tenant_id, scan_id, provider_id, [])
# Refresh resource from the database
resource.refresh_from_db()
# Assert that failed_findings_count was reset to 0 during the scan
assert resource.failed_findings_count == 0
# TODO Add tests for aggregations
@@ -697,68 +1045,3 @@ class TestCreateComplianceRequirements:
assert "requirements_created" in result
assert result["requirements_created"] >= 0
@pytest.mark.django_db
class TestUpdateResourceFailedFindingsCount:
def test_execute_sql_update(
self, tenants_fixture, scans_fixture, providers_fixture, resources_fixture
):
resource = resources_fixture[0]
tenant_id = resource.tenant_id
scan_id = resource.provider.scans.first().id
# Common kwargs for all failing findings
base_kwargs = {
"tenant_id": tenant_id,
"scan_id": scan_id,
"delta": None,
"status": StatusChoices.FAIL,
"status_extended": "test status extended",
"impact": Severity.critical,
"impact_extended": "test impact extended",
"severity": Severity.critical,
"raw_result": {
"status": StatusChoices.FAIL,
"impact": Severity.critical,
"severity": Severity.critical,
},
"tags": {"test": "dev-qa"},
"check_id": "test_check_id",
"check_metadata": {
"CheckId": "test_check_id",
"Description": "test description apple sauce",
"servicename": "ec2",
},
"first_seen_at": "2024-01-02T00:00:00Z",
}
# UIDs to create (two with same UID, one unique)
uids = ["test_finding_uid_1", "test_finding_uid_1", "test_finding_uid_2"]
# Create findings and associate with the resource
for uid in uids:
finding = Finding.objects.create(uid=uid, **base_kwargs)
finding.add_resources([resource])
resource.refresh_from_db()
assert resource.failed_findings_count == 0
_update_resource_failed_findings_count(tenant_id=tenant_id, scan_id=scan_id)
resource.refresh_from_db()
# Only two since two findings share the same UID
assert resource.failed_findings_count == 2
@patch("tasks.jobs.scan.Scan.objects.get")
def test_scan_not_found(
self,
mock_scan_get,
):
mock_scan_get.side_effect = Scan.DoesNotExist
with pytest.raises(Scan.DoesNotExist):
_update_resource_failed_findings_count(
"8614ca97-8370-4183-a7f7-e96a6c7d2c93",
"4705bed5-8782-4e8b-bab6-55e8043edaa6",
)

BIN
docs/img/mutelist-ui-1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 321 KiB

BIN
docs/img/mutelist-ui-2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 276 KiB

BIN
docs/img/mutelist-ui-3.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 326 KiB

BIN
docs/img/mutelist-ui-4.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 260 KiB

BIN
docs/img/mutelist-ui-5.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 269 KiB

BIN
docs/img/mutelist-ui-6.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 234 KiB

BIN
docs/img/mutelist-ui-7.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 273 KiB

BIN
docs/img/mutelist-ui-8.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 243 KiB

BIN
docs/img/mutelist-ui-9.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 649 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 121 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 124 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 351 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 139 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 144 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 119 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 103 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 117 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 117 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 359 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 86 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 354 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 261 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

View File

@@ -0,0 +1,59 @@
# Mute Findings (Mutelist)
Prowler App allows users to mute specific findings to focus on the most critical security issues. This comprehensive guide demonstrates how to effectively use the Mutelist feature to manage and prioritize security findings.
## What Is the Mutelist Feature?
The Mutelist feature enables users to:
- **Suppress specific findings** from appearing in future scans
- **Focus on critical issues** by hiding resolved or accepted risks
- **Maintain audit trails** of muted findings for compliance purposes
- **Streamline security workflows** by reducing noise from non-critical findings
## Prerequisites
Before muting findings, ensure:
- Valid access to Prowler App with appropriate permissions
- A provider added to the Prowler App
- Understanding of the security implications of muting specific findings
???+ warning
Muting findings does not resolve underlying security issues. Review each finding carefully before muting to ensure it represents an acceptable risk or has been properly addressed.
## Step 1: Add a provider
To configure Mutelist:
1. Log into Prowler App
2. Navigate to the providers page
![Add provider](../img/mutelist-ui-1.png)
3. Add a provider, then "Configure Muted Findings" button will be enabled in providers page and scans page
![Button enabled in providers page](../img/mutelist-ui-2.png)
![Button enabled in scans pages](../img/mutelist-ui-3.png)
## Step 2: Configure Mutelist
1. Open the modal by clicking "Configure Muted Findings" button
![Open modal](../img/mutelist-ui-4.png)
1. Provide a valid Mutelist in `YAML` format. More details about Mutelist [here](../tutorials/mutelist.md)
![Valid YAML configuration](../img/mutelist-ui-5.png)
If the YAML configuration is invalid, an error message will be displayed
![Wrong YAML configuration](../img/mutelist-ui-7.png)
![Wrong YAML configuration 2](../img/mutelist-ui-8.png)
## Step 3: Review the Mutelist
1. Once added, the configuration can be removed or updated
![Remove or update configuration](../img/mutelist-ui-6.png)
## Step 4: Check muted findings in the scan results
1. Run a new scan
2. Check the muted findings in the scan results
![Check muted fidings](../img/mutelist-ui-9.png)
???+ note
The Mutelist configuration takes effect on the next scans.

View File

@@ -0,0 +1,43 @@
# Entra ID Configuration
This page provides instructions for creating and configuring a Microsoft Entra ID (formerly Azure AD) application to use SAML SSO with Prowler App.
## Creating and Configuring the Enterprise Application
1. From the "Enterprise Applications" page in the Azure Portal, click "+ New application".
![New application](../img/saml/saml-sso-azure-1.png)
2. At the top of the page, click "+ Create your own application".
![Create application](../img/saml/saml-sso-azure-2.png)
3. Enter a name for the application and select the "Integrate any other application you don't find in the gallery (Non-gallery)" option.
![Enter name](../img/saml/saml-sso-azure-3.png)
4. Assign users and groups to the application, then proceed to "Set up single sign on" and select "SAML" as the method.
![Select SAML](../img/saml/saml-sso-azure-4.png)
5. In the "Basic SAML Configuration" section, click "Edit".
![Edit](../img/saml/saml-sso-azure-5.png)
6. Enter the "Identifier (Entity ID)" and "Reply URL (Assertion Consumer Service URL)". These values can be obtained from the SAML SSO integration setup in Prowler App. For detailed instructions, refer to the [SAML SSO Configuration](./prowler-app-sso.md) page.
![Enter data](../img/saml/saml-sso-azure-6.png)
7. In the "SAML Certificates" section, click "Edit".
![Edit](../img/saml/saml-sso-azure-7.png)
8. For the "Signing Option," select "Sign SAML response and assertion", and then click "Save".
![Signing options](../img/saml/saml-sso-azure-8.png)
9. Once the changes are saved, the metadata XML can be downloaded from the "App Federation Metadata Url".
![Metadata XML](../img/saml/saml-sso-azure-9.png)
10. Save the downloaded Metadata XML to a file. To complete the setup, upload this file during the Prowler App integration. (See the [SAML SSO Configuration](./prowler-app-sso.md) page for details).

View File

@@ -1,175 +1,203 @@
# Configuring SAML Single Sign-On (SSO) in Prowler
# SAML Single Sign-On (SSO) Configuration
This guide explains how to enable and test SAML SSO integration in Prowler. It includes environment setup, API endpoints, and how to configure Okta as your Identity Provider (IdP).
This guide provides comprehensive instructions to configure SAML-based Single Sign-On (SSO) in Prowler App. This configuration allows users to authenticate using the organization's Identity Provider (IdP).
This document is divided into two main sections:
- **User Guide**: For organization administrators to configure SAML SSO through Prowler App.
- **Developer and Administrator Guide**: For developers and system administrators running self-hosted Prowler App instances, providing technical details on environment configuration, API usage, and testing.
---
## Environment Configuration
## User Guide: Configuring SAML SSO in Prowler App
### `DJANGO_ALLOWED_HOSTS`
Follow these steps to enable and configure SAML SSO for an organization.
Update this variable to specify which domains Django should accept incoming requests from. This typically includes:
### Key Features
- `localhost` for local development
- container hostnames (e.g. `prowler-api`)
- public-facing domains or tunnels (e.g. ngrok)
Prowler can be integrated with SAML SSO identity providers such as Okta to enable single sign-on for the organization's users. The Prowler SAML integration currently supports the following features:
**Example**:
- **IdP-Initiated SSO**: Users can initiate login from their Identity Provider's dashboard.
- **SP-Initiated SSO**: Users can initiate login directly from the Prowler login page.
- **Just-in-Time Provisioning**: Users from the organization signing into Prowler for the first time will be automatically created.
```env
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1,prowler-api,mycompany.prowler
```
???+ warning "Deactivate SAML"
If the SAML configuration is removed, users who previously authenticated via SAML will need to reset their password to regain access using standard login. This is because their accounts no longer have valid authentication credentials without the SAML integration.
# SAML Configuration API
### Prerequisites
You can manage SAML settings via the API. Prowler provides full CRUD support for tenant-specific SAML configuration.
- Administrator access to the Prowler organization is required.
- Administrative access to the SAML 2.0 compliant Identity Provider (e.g., Okta, Azure AD, Google Workspace) is necessary.
- GET /api/v1/saml-config: Retrieve the current configuration
### Configuration Steps
- POST /api/v1/saml-config: Create a new configuration
#### Step 1: Access Profile Settings
- PATCH /api/v1/saml-config: Update the existing configuration
To access the account settings, click the "Account" button in the top-right corner of Prowler App, or navigate directly to `https://cloud.prowler.com/profile` (or `http://localhost:3000/profile` for local setups).
- DELETE /api/v1/saml-config: Remove the current configuration
![Access Profile Settings](../img/saml/saml-step-1.png)
#### Step 2: Enable SAML Integration
???+ note "API Note"
SSO with SAML API documentation.[Prowler API Reference - Upload SAML configuration](https://api.prowler.com/api/v1/docs#tag/SAML/operation/saml_config_create)
On the profile page, find the "SAML SSO Integration" card and click "Enable" to begin the configuration process.
# SAML Initiate
![Enable SAML Integration](../img/saml/saml-step-2.png)
### Description
#### Step 3: Configure the Identity Provider (IdP)
This endpoint receives an email and checks if there is an active SAML configuration for the associated domain (i.e., the part after the @). If a configuration exists it responds with an HTTP 302 redirect to the appropriate saml_login endpoint for the organization.
The Prowler SAML configuration panel displays the information needed to configure the IdP. This information must be used to create a new SAML application in the IdP.
- POST /api/v1/accounts/saml/initiate/
1. **Assertion Consumer Service (ACS) URL**: The endpoint in Prowler that will receive the SAML assertion from the IdP.
2. **Audience URI (Entity ID)**: A unique identifier for the Prowler application (Service Provider).
???+ note
Important: This endpoint is intended to be used from a browser, as it returns a 302 redirect that needs to be followed to continue the SAML authentication flow. For testing purposes, it is better to use a browser or a tool that follows redirects (such as Postman) rather than relying on unit tests that cannot capture the redirect behavior.
To configure the IdP, copy the **ACS URL** and **Audience URI** from Prowler and use them to set up a new SAML application.
### Expected payload
```
{
"email_domain": "user@domain.com"
}
```
![IdP configuration](../img/saml/idp_config.png)
### Possible responses
???+ info "IdP Configuration"
The exact steps for configuring an IdP vary depending on the provider (Okta, Azure AD, etc.). Please refer to the IdP's documentation for instructions on creating a SAML application. For SSO integration with Azure AD / Entra ID, see our [Entra ID configuration instructions](./prowler-app-sso-entra.md).
• 302 FOUND: Redirects to the SAML login URL associated with the organization.
#### Step 4: Configure Attribute Mapping in the IdP
• 403 FORBIDDEN: The domain is not authorized.
For Prowler to correctly identify and provision users, the IdP must be configured to send the following attributes in the SAML assertion:
### Validation logic
| Attribute Name | Description | Required |
|----------------|---------------------------------------------------------------------------------------------------------|----------|
| `firstName` | The user's first name. | Yes |
| `lastName` | The user's last name. | Yes |
| `userType` | The Prowler role to be assigned to the user (e.g., `admin`, `auditor`). If a role with that name already exists, it will be used; otherwise, a new role called `no_permissions` will be created with minimal permissions. You can then edit the permissions for that role in the [RBAC Management tab](./prowler-app-rbac.md). | No |
| `companyName` | The user's company name. This is automatically populated if the IdP sends an `organization` attribute. | No |
• Looks up the domain in SAMLDomainIndex.
???+ info "IdP Attribute Mapping"
Note that the attribute name is just an example and may be different in your IdP. For instance, if your IdP provides a 'division' attribute, you can map it to 'userType'.
![IdP configuration](../img/saml/saml_attribute_statements.png)
• Retrieves the related SAMLConfiguration object via tenant_id.
???+ warning "Dynamic Updates"
These attributes are updated in Prowler each time a user logs in. Any changes made in the identity provider (IdP) will be reflected the next time the user logs in again.
#### Step 5: Upload IdP Metadata to Prowler
# SAML Integration: Testing Guide
Once the IdP is configured, it provides a **metadata XML file**. This file contains the IdP's configuration information, such as its public key and login URL.
This document outlines the process for testing the SAML integration functionality.
To complete the Prowler-side configuration:
1. Return to the Prowler SAML configuration page.
2. Enter the **email domain** for the organization (e.g., `mycompany.com`). Prowler uses this to identify users who should authenticate via SAML.
3. Upload the **metadata XML file** downloaded from the IdP.
![Configure Prowler with IdP Metadata](../img/saml/saml-step-3.png)
#### Step 6: Save and Verify Configuration
Click the "Save" button to complete the setup. The "SAML Integration" card will now show an "Active" status, indicating that the configuration is complete and enabled.
![Verify Integration Status](../img/saml/saml-step-4.png)
???+ info "IdP Configuration"
The exact steps for configuring an IdP vary depending on the provider (Okta, Azure AD, etc.). Please refer to the IdP's documentation for instructions on creating a SAML application.
##### Remove SAML Configuration
You can disable SAML SSO by removing the existing configuration from the integration panel.
![Remove SAML configuration](../img/saml/saml-step-remove.png)
### Signing in with SAML SSO
Once SAML SSO is enabled, users from the configured domain can sign in by entering their email address on the login page and clicking "Continue with SAML SSO". They will be redirected to the IdP to authenticate and then returned to Prowler.
![Sign in with SAML SSO](../img/saml/saml-step-5.png)
---
## 1. Start Ngrok and Update ALLOWED_HOSTS
## Developer and Administrator Guide
Start ngrok on port 8080:
```
This section provides technical details for developers and administrators of self-hosted Prowler instances.
### Environment Configuration
For self-hosted deployments, several environment variables must be configured to ensure SAML SSO functions correctly. These variables are typically set in an `.env` file.
| Variable | Description | Example |
|---------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------|
| `API_BASE_URL` | The base URL of the Prowler API instance. | `http://mycompany.prowler/api/v1` |
| `DJANGO_ALLOWED_HOSTS` | A comma-separated list of hostnames that the Django backend will accept requests from. Include any domains used to access the Prowler API. | `localhost,127.0.0.1,prowler-api,mycompany.prowler` |
| `AUTH_URL` | The base URL of the Prowler web UI. This is used to construct the callback URL after authentication. | `http://mycompany.prowler` |
| `SAML_SSO_CALLBACK_URL` | The full callback URL where users are redirected after authenticating with the IdP. It is typically constructed using the `AUTH_URL`. | `${AUTH_URL}/api/auth/callback/saml` |
After modifying these variables, the Prowler API must be restarted for the changes to take effect.
### SAML API Reference
Prowler provides a REST API to manage SAML configurations programmatically.
- **Endpoint**: `/api/v1/saml-config`
- **Methods**:
- `GET`: Retrieve the current SAML configuration for the tenant.
- `POST`: Create a new SAML configuration.
- `PATCH`: Update an existing SAML configuration.
- `DELETE`: Remove the SAML configuration.
???+ note "API Documentation"
For detailed information on using the API, refer to the [Prowler API Reference](https://api.prowler.com/api/v1/docs#tag/SAML/operation/saml_config_create).
#### SAML Initiate Endpoint
- **Endpoint**: `POST /api/v1/accounts/saml/initiate/`
- **Description**: This endpoint initiates the SAML login flow. It takes an email address, determines if the domain has a SAML configuration, and redirects the user to the appropriate IdP login page. It is primarily designed for browser-based flows.
### Testing SAML Integration
Follow these steps to test a SAML integration in a development environment.
#### 1. Expose the Local Environment
Since the IdP needs to send requests to the local Prowler instance, it must be exposed to the internet. A tool like `ngrok` can be used for this purpose.
To start ngrok, run the following command:
```bash
ngrok http 8080
```
This command provides a public URL (e.g., `https://<random-string>.ngrok.io`) that forwards to the local server on port 8080.
Then, copy the generated ngrok URL and include it in the ALLOWED_HOSTS setting. If you're using the development environment, it usually defaults to *, but in some cases this may not work properly, like in my tests (investigate):
#### 2. Update `DJANGO_ALLOWED_HOSTS`
```
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["*"])
To allow requests from ngrok, add its URL to the `DJANGO_ALLOWED_HOSTS` environment variable.
```env
DJANGO_ALLOWED_HOSTS=localhost,127.0.0.1,prowler-api,*.ngrok.io
```
## 2. Configure the Identity Provider (IdP)
#### 3. Configure the IdP
Start your environment and configure your IdP. You will need to download the IdP's metadata XML file.
When configuring the IdP for testing, use the ngrok URL for the ACS URL:
`https://<your-ngrok-url>/api/v1/accounts/saml/<YOUR_DOMAIN>/acs/`
Your Assertion Consumer Service (ACS) URL must follow this format:
#### 4. Configure Prowler via API
```
https://<PROXY_URL>/api/v1/accounts/saml/<CONFIGURED_DOMAIN>/acs/
```
## 3. IdP Attribute Mapping
The following fields are expected from the IdP:
- firstName
- lastName
- userType (this is the name of the role the user should be assigned)
- companyName (this is filled automatically if the IdP includes an "organization" field)
These values are dynamic. If the values change in the IdP, they will be updated on the next login.
## 4. SAML Configuration API (POST)
SAML configuration is managed via a CRUD API. Use the following POST request to create a new configuration:
To create a SAML configuration for testing, use `curl`. Make sure to replace placeholders with actual data.
```bash
curl --location 'http://localhost:8080/api/v1/saml-config' \
--header 'Content-Type: application/vnd.api+json' \
--header 'Accept: application/vnd.api+json' \
--header 'Authorization: Bearer <TOKEN>' \
--header 'Authorization: Bearer <YOUR_API_TOKEN>' \
--data '{
"data": {
"type": "saml-configurations",
"attributes": {
"email_domain": "prowler.com",
"metadata_xml": "<XML>"
"email_domain": "yourdomain.com",
"metadata_xml": "<PASTE_YOUR_IDP_METADATA_XML_HERE>"
}
}
}'
```
## 5. SAML SSO Callback Configuration
#### 5. Initiate Login Flow
### Environment Variable Configuration
To test the end-to-end flow, construct the login URL and open it in a browser. This will start the IdP-initiated login flow.
The SAML authentication flow requires proper callback URL configuration to handle post-authentication redirects. Configure the following environment variables:
`https://<your-ngrok-url>/api/v1/accounts/saml/<YOUR_DOMAIN>/login/`
#### `SAML_SSO_CALLBACK_URL`
Specifies the callback endpoint that will be invoked upon successful SAML authentication completion. This URL directs users back to the web application interface.
```env
SAML_SSO_CALLBACK_URL="${AUTH_URL}/api/auth/callback/saml"
```
#### `AUTH_URL`
Defines the base URL of the web user interface application that serves as the authentication callback destination.
```env
AUTH_URL="<WEB_UI_URL>"
```
### Configuration Notes
- The `SAML_SSO_CALLBACK_URL` dynamically references the `AUTH_URL` variable to construct the complete callback endpoint
- Ensure the `AUTH_URL` points to the correct web UI deployment (development, staging, or production)
- The callback endpoint `/api/auth/callback/saml` must be accessible and properly configured to handle SAML authentication responses
- Both environment variables are required for proper SAML SSO functionality
- Verify that the `NEXT_PUBLIC_API_BASE_URL` environment variable is properly configured to reference the correct API server base URL corresponding to your target deployment environment. This ensures proper routing of SAML callback requests to the appropriate backend services.
## 6. Start SAML Login Flow
Once everything is configured, start the SAML login process by visiting the following URL:
```
https://<PROXY_IP>/api/v1/accounts/saml/<CONFIGURED_DOMAIN>/login/?email=<USER_EMAIL>
```
At the end you will get a valid access and refresh token
## 7. Notes on the initiate Endpoint
The initiate endpoint is not strictly required. It was created to allow extra checks or behavior modifications (like enumeration mitigation). It also simplifies UI integration with SAML, but again, it's optional.
If successful, the user will be redirected back to the Prowler application with a valid session.

View File

@@ -54,6 +54,7 @@ nav:
- Role-Based Access Control: tutorials/prowler-app-rbac.md
- Social Login: tutorials/prowler-app-social-login.md
- SSO with SAML: tutorials/prowler-app-sso.md
- Mute findings: tutorials/prowler-app-mute-findings.md
- Lighthouse: tutorials/prowler-app-lighthouse.md
- CLI:
- Miscellaneous: tutorials/misc.md

View File

@@ -12,7 +12,7 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "5.9.0"
prowler_version = "5.10.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"

View File

@@ -71,7 +71,7 @@ maintainers = [{name = "Prowler Engineering", email = "engineering@prowler.com"}
name = "prowler"
readme = "README.md"
requires-python = ">3.9.1,<3.13"
version = "5.9.0"
version = "5.10.0"
[project.scripts]
prowler = "prowler.__main__:prowler"

View File

@@ -6,6 +6,7 @@ All notable changes to the **Prowler UI** are documented in this file.
### 🚀 Added
- PostHog analytics integration [(#8357)](https://github.com/prowler-cloud/prowler/pull/8357)
- Mutelist configuration form [(#8190)](https://github.com/prowler-cloud/prowler/pull/8190)
- SAML login integration [(#8203)](https://github.com/prowler-cloud/prowler/pull/8203)
- Resource view [(#7760)](https://github.com/prowler-cloud/prowler/pull/7760)
@@ -20,6 +21,7 @@ All notable changes to the **Prowler UI** are documented in this file.
### 🔄 Changed
- Upgrade to Next.js 14.2.30 and lock TypeScript to 5.5.4 for ESLint compatibility [(#8189)](https://github.com/prowler-cloud/prowler/pull/8189)
- Improved active step highlighting and updated step titles and descriptions in the Cloud Provider credentials update flow [(#8303)](https://github.com/prowler-cloud/prowler/pull/8303)
### 🐞 Fixed

View File

@@ -5,8 +5,12 @@ import { useRouter } from "next/navigation";
import { SessionProvider } from "next-auth/react";
import { ThemeProvider as NextThemesProvider } from "next-themes";
import { ThemeProviderProps } from "next-themes/dist/types";
import posthog from "posthog-js";
import { PostHogProvider as PHProvider } from "posthog-js/react";
import * as React from "react";
import { initializePostHog } from "@/lib/analytics";
export interface ProvidersProps {
children: React.ReactNode;
themeProps?: ThemeProviderProps;
@@ -15,10 +19,17 @@ export interface ProvidersProps {
export function Providers({ children, themeProps }: ProvidersProps) {
const router = useRouter();
// Initialize PostHog
React.useMemo(() => {
initializePostHog();
}, []);
return (
<SessionProvider>
<NextUIProvider navigate={router.push}>
<NextThemesProvider {...themeProps}>{children}</NextThemesProvider>
<NextThemesProvider {...themeProps}>
<PHProvider client={posthog}>{children}</PHProvider>
</NextThemesProvider>
</NextUIProvider>
</SessionProvider>
);

View File

@@ -21,6 +21,11 @@ import {
FormField,
FormMessage,
} from "@/components/ui/form";
import {
initializeSession,
trackUserLogin,
trackUserRegistration,
} from "@/lib/analytics";
import { ApiError, authFormSchema } from "@/types";
export const AuthForm = ({
@@ -80,6 +85,8 @@ export const AuthForm = ({
const isSamlMode = form.watch("isSamlMode");
const onSubmit = async (data: z.infer<typeof formSchema>) => {
//getting an new posthog init
initializeSession();
if (type === "sign-in") {
if (data.isSamlMode) {
const email = data.email.toLowerCase();
@@ -107,6 +114,7 @@ export const AuthForm = ({
password: data.password,
});
if (result?.message === "Success") {
trackUserLogin({ email: data.email });
router.push("/");
} else if (result?.errors && "credentials" in result.errors) {
form.setError("email", {
@@ -128,6 +136,11 @@ export const AuthForm = ({
const newUser = await createNewUser(data);
if (!newUser.errors) {
trackUserRegistration({
email: data.email,
fullName: data.name,
company: data.company,
});
toast({
title: "Success!",
description: "The user was registered successfully.",

View File

@@ -19,6 +19,7 @@ import { CheckIcon, RocketIcon } from "@/components/icons";
import { useToast } from "@/components/ui";
import { CustomButton } from "@/components/ui/custom";
import { Form } from "@/components/ui/form";
import { trackCloudConnectionSuccess } from "@/lib/analytics";
import { checkTaskStatus } from "@/lib/helper";
import { ProviderType } from "@/types";
import { ApiError, testConnectionFormSchema } from "@/types";
@@ -145,6 +146,12 @@ export const TestConnectionForm = ({
});
} else {
setIsRedirecting(true);
// Track cloud connection success event
trackCloudConnectionSuccess({
providerType: providerType,
providerAlias: providerData.data.attributes.alias,
scanType: form.watch("runOnce") ? "single" : "scheduled",
});
router.push("/scans");
}
} catch (error) {

View File

@@ -27,14 +27,38 @@ const steps = [
},
];
const ROUTE_CONFIG: Record<
string,
{
stepIndex: number;
stepOverride?: { index: number; title: string; description: string };
}
> = {
"/providers/connect-account": { stepIndex: 0 },
"/providers/add-credentials": { stepIndex: 1 },
"/providers/test-connection": { stepIndex: 2 },
"/providers/update-credentials": {
stepIndex: 1,
stepOverride: {
index: 2,
title: "Make sure the new credentials are valid",
description: "Valid credentials will take you back to the providers page",
},
},
};
export const WorkflowAddProvider = () => {
const pathname = usePathname();
// Calculate current step based on pathname
const currentStepIndex = steps.findIndex((step) =>
pathname.endsWith(step.href),
);
const currentStep = currentStepIndex === -1 ? 0 : currentStepIndex;
const config = ROUTE_CONFIG[pathname] || { stepIndex: 0 };
const currentStep = config.stepIndex;
const updatedSteps = steps.map((step, index) => {
if (config.stepOverride && index === config.stepOverride.index) {
return { ...step, ...config.stepOverride };
}
return step;
});
return (
<section className="max-w-sm">
@@ -63,7 +87,7 @@ export const WorkflowAddProvider = () => {
hideProgressBars
currentStep={currentStep}
stepClassName="border border-default-200 dark:border-default-50 aria-[current]:bg-default-100 dark:aria-[current]:bg-prowler-blue-800 cursor-default"
steps={steps}
steps={updatedSteps}
/>
<Spacer y={4} />
</section>

177
ui/lib/analytics.ts Normal file
View File

@@ -0,0 +1,177 @@
import posthog from "posthog-js";
// Initialize PostHog
export const initializePostHog = (): void => {
if (typeof window === "undefined") return; // Don't initialize on server side
try {
posthog.init(process.env.NEXT_PUBLIC_POSTHOG_KEY!, {
api_host: process.env.NEXT_PUBLIC_POSTHOG_HOST,
ui_host: process.env.NEXT_PUBLIC_POSTHOG_HOST,
autocapture: false,
defaults: "2025-05-24",
capture_exceptions: true,
capture_pageview: false,
capture_pageleave: false,
session_recording: {
maskAllInputs: true,
maskTextSelector: "*",
},
});
} catch (error) {
console.error("Failed to initialize PostHog:", error);
}
};
// Type definitions for tracking payloads
export interface UserLoginPayload {
email: string;
}
export interface UserRegistrationPayload {
email: string;
firstName?: string;
lastName?: string;
company?: string;
fullName?: string;
}
export interface CloudConnectionPayload {
providerType: string;
providerAlias: string;
scanType: "single" | "scheduled";
}
// Initialize a new PostHog session
export const initializeSession = (): void => {
try {
posthog.reset(true);
} catch (error) {
console.error("Failed to initialize PostHog session:", error);
}
};
// Identify a user in PostHog
export const identifyUser = (email: string): void => {
try {
posthog.identify(email.toLowerCase());
} catch (error) {
console.error("Failed to identify user in PostHog:", error);
}
};
// Track user login event
export const trackUserLogin = ({ email }: UserLoginPayload): void => {
try {
const normalizedEmail = email.toLowerCase();
identifyUser(normalizedEmail);
posthog.capture("userLogin", {
email: normalizedEmail,
timestamp: Date.now(),
});
} catch (error) {
console.error("Failed to track user login:", error);
}
};
// Track user registration event
export const trackUserRegistration = ({
email,
firstName = "",
lastName = "",
company = "",
fullName = "",
}: UserRegistrationPayload): void => {
try {
// Parse name if fullName is provided
let first = firstName;
let last = lastName;
if (fullName && (!firstName || !lastName)) {
const nameParts = fullName.trim().split(" ");
first = nameParts[0] || "";
last = nameParts.slice(1).join(" ") || "";
}
posthog.capture("userRegistered", {
email: email.toLowerCase(),
firstName: first,
lastName: last,
company: company,
timestamp: Date.now(),
});
} catch (error) {
console.error("Failed to track user registration:", error);
}
};
// Track cloud connection success
export const trackCloudConnectionSuccess = ({
providerType,
providerAlias,
scanType,
}: CloudConnectionPayload): void => {
try {
posthog.capture("cloudConnectionSuccess", {
providerType: providerType,
providerAlias: providerAlias,
scanType: scanType,
timestamp: Date.now(),
});
} catch (error) {
console.error("Failed to track cloud connection success:", error);
}
};
// Generic event tracking function for custom events
export const trackEvent = (
eventName: string,
properties?: Record<string, any>,
): void => {
try {
posthog.capture(eventName, {
...properties,
timestamp: Date.now(),
});
} catch (error) {
console.error(`Failed to track event "${eventName}":`, error);
}
};
// Track page view
export const trackPageView = (
pageName: string,
properties?: Record<string, any>,
): void => {
try {
posthog.capture("$pageview", {
$current_url: window.location.href,
pageName,
...properties,
});
} catch (error) {
console.error("Failed to track page view:", error);
}
};
// Set user properties
export const setUserProperties = (properties: Record<string, any>): void => {
try {
posthog.people.set(properties);
} catch (error) {
console.error("Failed to set user properties:", error);
}
};
// Check if PostHog is initialized and ready
export const isAnalyticsReady = (): boolean => {
try {
return (
typeof window !== "undefined" &&
typeof posthog !== "undefined" &&
posthog.__loaded === true
);
} catch {
return false;
}
};

13
ui/lib/posthog.ts Normal file
View File

@@ -0,0 +1,13 @@
import { PostHog } from "posthog-node";
// NOTE: This is a Node.js client, so you can use it for sending events from the server side to PostHog.
const PostHogClient = () => {
const posthogClient = new PostHog(process.env.NEXT_PUBLIC_POSTHOG_KEY!, {
host: process.env.NEXT_PUBLIC_POSTHOG_HOST,
flushAt: 1,
flushInterval: 0,
});
return posthogClient;
};
export default PostHogClient;

View File

@@ -4,12 +4,12 @@
// 'unsafe-eval' is configured under `script-src` because it is required by NextJS for development mode
const cspHeader = `
default-src 'self';
script-src 'self' 'unsafe-inline' 'unsafe-eval' https://js.stripe.com https://www.googletagmanager.com;
connect-src 'self' https://api.iconify.design https://api.simplesvg.com https://api.unisvg.com https://js.stripe.com https://www.googletagmanager.com;
img-src 'self' https://www.google-analytics.com https://www.googletagmanager.com;
script-src 'self' 'unsafe-inline' 'unsafe-eval' https://js.stripe.com https://www.googletagmanager.com https://*.posthog.com;
connect-src 'self' https://api.iconify.design https://api.simplesvg.com https://api.unisvg.com https://js.stripe.com https://www.googletagmanager.com https://*.posthog.com;
img-src 'self' https://www.google-analytics.com https://www.googletagmanager.com https://*.posthog.com;
font-src 'self';
style-src 'self' 'unsafe-inline';
frame-src 'self' https://js.stripe.com https://www.googletagmanager.com;
frame-src 'self' https://js.stripe.com https://www.googletagmanager.com https://*.posthog.com;
frame-ancestors 'none';
`;

58
ui/package-lock.json generated
View File

@@ -47,6 +47,8 @@
"next": "^14.2.30",
"next-auth": "^5.0.0-beta.25",
"next-themes": "^0.2.1",
"posthog-js": "^1.256.1",
"posthog-node": "^5.1.1",
"radix-ui": "^1.1.3",
"react": "^18.3.1",
"react-dom": "^18.3.1",
@@ -8322,6 +8324,17 @@
"simple-wcswidth": "^1.0.1"
}
},
"node_modules/core-js": {
"version": "3.44.0",
"resolved": "https://registry.npmjs.org/core-js/-/core-js-3.44.0.tgz",
"integrity": "sha512-aFCtd4l6GvAXwVEh3XbbVqJGHDJt0OZRa+5ePGx3LLwi12WfexqQxcsohb2wgsa/92xtl19Hd66G/L+TaAxDMw==",
"hasInstallScript": true,
"license": "MIT",
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/core-js"
}
},
"node_modules/cosmiconfig": {
"version": "8.3.6",
"resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz",
@@ -9969,6 +9982,12 @@
"node": ">= 8"
}
},
"node_modules/fflate": {
"version": "0.4.8",
"resolved": "https://registry.npmjs.org/fflate/-/fflate-0.4.8.tgz",
"integrity": "sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==",
"license": "MIT"
},
"node_modules/file-entry-cache": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
@@ -13913,6 +13932,39 @@
"integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==",
"license": "MIT"
},
"node_modules/posthog-js": {
"version": "1.258.2",
"resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.258.2.tgz",
"integrity": "sha512-XBSeiN4HjiYsy3tW5zss8WOJF2JXTQXAYw2wZ+zjqQuzzi7kkLEXjIgsVrBnt5Opwhqn0krZVsb0ZBw34dIiyQ==",
"license": "SEE LICENSE IN LICENSE",
"dependencies": {
"core-js": "^3.38.1",
"fflate": "^0.4.8",
"preact": "^10.19.3",
"web-vitals": "^4.2.4"
},
"peerDependencies": {
"@rrweb/types": "2.0.0-alpha.17",
"rrweb-snapshot": "2.0.0-alpha.17"
},
"peerDependenciesMeta": {
"@rrweb/types": {
"optional": true
},
"rrweb-snapshot": {
"optional": true
}
}
},
"node_modules/posthog-node": {
"version": "5.6.0",
"resolved": "https://registry.npmjs.org/posthog-node/-/posthog-node-5.6.0.tgz",
"integrity": "sha512-MVXxKmqAYp2cPBrN1YMhnhYsJYIu6yc6wumbHz1dbo67wZBf2WtMm67Uh+4VCrp07049qierWlxQqz1W5zGDeg==",
"license": "MIT",
"engines": {
"node": ">=20"
}
},
"node_modules/preact": {
"version": "10.24.3",
"resolved": "https://registry.npmjs.org/preact/-/preact-10.24.3.tgz",
@@ -16547,6 +16599,12 @@
"node": ">= 14"
}
},
"node_modules/web-vitals": {
"version": "4.2.4",
"resolved": "https://registry.npmjs.org/web-vitals/-/web-vitals-4.2.4.tgz",
"integrity": "sha512-r4DIlprAGwJ7YM11VZp4R884m0Vmgr6EAKe3P+kO0PPj3Unqyvv59rczf6UiGcb9Z8QxZVcqKNwv/g0WNdWwsw==",
"license": "Apache-2.0"
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",

View File

@@ -39,6 +39,8 @@
"next": "^14.2.30",
"next-auth": "^5.0.0-beta.25",
"next-themes": "^0.2.1",
"posthog-js": "^1.256.1",
"posthog-node": "^5.1.1",
"radix-ui": "^1.1.3",
"react": "^18.3.1",
"react-dom": "^18.3.1",