Compare commits

..

4 Commits

Author SHA1 Message Date
Rubén De la Torre Vico
809fd3dcf3 Merge branch 'master' into api-add-missing-map 2025-06-23 10:48:00 +02:00
Rubén De la Torre Vico
a470b7c9d8 docs(changelog): change fix from version 2025-06-20 10:00:39 +02:00
Rubén De la Torre Vico
eba5f8e621 docs(changelog): update PR link for new fix 2025-06-20 09:58:23 +02:00
Adrián Jesús Peña Rodríguez
56443518d6 fix(export): add missing m365 iso27001 mapping 2025-06-18 18:20:25 +02:00
71 changed files with 456 additions and 3602 deletions

View File

@@ -9,8 +9,6 @@ jobs:
if: contains(github.event.pull_request.labels.*.name, 'no-changelog') == false
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
pull-requests: write
env:
MONITORED_FOLDERS: "api ui prowler"
@@ -47,7 +45,6 @@ jobs:
echo "EOF" >> $GITHUB_OUTPUT
- name: Find existing changelog comment
if: github.event.pull_request.head.repo.full_name == github.repository
id: find_comment
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e #v3.1.0
with:
@@ -56,7 +53,7 @@ jobs:
body-includes: '<!-- changelog-check -->'
- name: Comment on PR if changelog is missing
if: github.event.pull_request.head.repo.full_name == github.repository && steps.check_folders.outputs.missing_changelogs != ''
if: steps.check_folders.outputs.missing_changelogs != ''
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
with:
issue-number: ${{ github.event.pull_request.number }}
@@ -70,7 +67,7 @@ jobs:
Please add an entry to the corresponding `CHANGELOG.md` file to maintain a clear history of changes.
- name: Comment on PR if all changelogs are present
if: github.event.pull_request.head.repo.full_name == github.repository && steps.check_folders.outputs.missing_changelogs == ''
if: steps.check_folders.outputs.missing_changelogs == ''
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4.0.0
with:
issue-number: ${{ github.event.pull_request.number }}

View File

@@ -14,7 +14,7 @@ All notable changes to the **Prowler API** are documented in this file.
- Reworked `GET /compliance-overviews` to return proper requirement metrics [(#7877)](https://github.com/prowler-cloud/prowler/pull/7877)
### Fixed
- Scheduled scans are no longer deleted when their daily schedule run is disabled [(#8082)](https://github.com/prowler-cloud/prowler/pull/8082)
- Add missing mapping for ISO 27001 compliance for M365 provider [(#8069)](https://github.com/prowler-cloud/prowler/pull/8069)
---

View File

@@ -43,20 +43,10 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
if provider == "saml":
# Handle SAML-specific logic
user.first_name = (
extra.get("firstName", [""])[0] if extra.get("firstName") else ""
)
user.last_name = (
extra.get("lastName", [""])[0] if extra.get("lastName") else ""
)
user.company_name = (
extra.get("organization", [""])[0]
if extra.get("organization")
else ""
)
user.first_name = extra.get("firstName", [""])[0]
user.last_name = extra.get("lastName", [""])[0]
user.company_name = extra.get("organization", [""])[0]
user.name = f"{user.first_name} {user.last_name}".strip()
if user.name == "":
user.name = "N/A"
user.save(using=MainRouter.admin_db)
email_domain = user.email.split("@")[-1]
@@ -67,11 +57,7 @@ class ProwlerSocialAccountAdapter(DefaultSocialAccountAdapter):
)
with rls_transaction(str(tenant.id)):
role_name = (
extra.get("userType", ["saml_default_role"])[0].strip()
if extra.get("userType")
else "saml_default_role"
)
role_name = extra.get("userType", ["saml_default_role"])[0].strip()
try:
role = Role.objects.using(MainRouter.admin_db).get(

View File

@@ -54,7 +54,6 @@ class Migration(migrations.Migration):
("gpt-4o-mini-2024-07-18", "GPT-4o Mini v2024-07-18"),
("gpt-4o-mini", "GPT-4o Mini Default"),
],
default="gpt-4o-2024-08-06",
help_text="Must be one of the supported model names",
max_length=50,
),

View File

@@ -1,24 +0,0 @@
# Generated by Django 5.1.10 on 2025-06-23 10:04
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("api", "0031_lighthouseconfiguration"),
("django_celery_beat", "0019_alter_periodictasks_options"),
]
operations = [
migrations.AlterField(
model_name="scan",
name="scheduler_task",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="django_celery_beat.periodictask",
),
),
]

View File

@@ -437,7 +437,7 @@ class Scan(RowLevelSecurityProtectedModel):
completed_at = models.DateTimeField(null=True, blank=True)
next_scan_at = models.DateTimeField(null=True, blank=True)
scheduler_task = models.ForeignKey(
PeriodicTask, on_delete=models.SET_NULL, null=True, blank=True
PeriodicTask, on_delete=models.CASCADE, null=True, blank=True
)
output_location = models.CharField(blank=True, null=True, max_length=200)

View File

@@ -63,12 +63,6 @@ class TestProwlerSocialAccountAdapter:
adapter = ProwlerSocialAccountAdapter()
request = rf.get("/")
saml_sociallogin.user.email = saml_setup["email"]
saml_sociallogin.account.extra_data = {
"firstName": [],
"lastName": [],
"organization": [],
"userType": [],
}
tenant = Tenant.objects.using(MainRouter.admin_db).get(
id=saml_setup["tenant_id"]
@@ -80,8 +74,6 @@ class TestProwlerSocialAccountAdapter:
user = adapter.save_user(request, saml_sociallogin)
assert user.name == "N/A"
assert user.company_name == ""
assert user.email == saml_setup["email"]
assert (
Membership.objects.using(MainRouter.admin_db)

View File

@@ -509,16 +509,10 @@ class TenantFinishACSView(FinishACSView):
return response
extra = social_account.extra_data
user.first_name = (
extra.get("firstName", [""])[0] if extra.get("firstName") else ""
)
user.last_name = extra.get("lastName", [""])[0] if extra.get("lastName") else ""
user.company_name = (
extra.get("organization", [""])[0] if extra.get("organization") else ""
)
user.first_name = extra.get("firstName", [""])[0]
user.last_name = extra.get("lastName", [""])[0]
user.company_name = extra.get("organization", [""])[0]
user.name = f"{user.first_name} {user.last_name}".strip()
if user.name == "":
user.name = "N/A"
user.save()
email_domain = user.email.split("@")[-1]
@@ -527,11 +521,7 @@ class TenantFinishACSView(FinishACSView):
.get(email_domain=email_domain)
.tenant
)
role_name = (
extra.get("userType", ["saml_default_role"])[0].strip()
if extra.get("userType")
else "saml_default_role"
)
role_name = extra.get("userType", ["saml_default_role"])[0].strip()
try:
role = Role.objects.using(MainRouter.admin_db).get(
name=role_name, tenant=tenant
@@ -3048,9 +3038,9 @@ class ComplianceOverviewViewSet(BaseRLSViewSet, TaskManagementMixin):
return Response(serializer.data, status=status.HTTP_200_OK)
@extend_schema(tags=["Overview"])
@extend_schema_view(
list=extend_schema(
tags=["Overview"],
summary="Get aggregated provider data",
description=(
"Retrieve an aggregated overview of findings and resources grouped by providers. "
@@ -3297,7 +3287,6 @@ class OverviewViewSet(BaseRLSViewSet):
return Response(serializer.data, status=status.HTTP_200_OK)
@extend_schema(tags=["Schedule"])
@extend_schema_view(
daily=extend_schema(
summary="Create a daily schedule scan for a given provider",

View File

@@ -31,6 +31,7 @@ from prowler.lib.outputs.compliance.iso27001.iso27001_gcp import GCPISO27001
from prowler.lib.outputs.compliance.iso27001.iso27001_kubernetes import (
KubernetesISO27001,
)
from prowler.lib.outputs.compliance.iso27001.iso27001_m365 import M365ISO27001
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp_aws import AWSKISAISMSP
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_aws import AWSMitreAttack
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
@@ -88,6 +89,7 @@ COMPLIANCE_CLASS_MAP = {
(lambda name: name.startswith("iso27001_"), KubernetesISO27001),
],
"m365": [
(lambda name: name.startswith("iso27001_"), M365ISO27001),
(lambda name: name.startswith("cis_"), M365CIS),
(lambda name: name == "prowler_threatscore_m365", ProwlerThreatScoreM365),
],

View File

@@ -55,22 +55,3 @@ class TestScheduleProviderScan:
assert "There is already a scheduled scan for this provider." in str(
exc_info.value
)
def test_remove_periodic_task(self, providers_fixture):
provider_instance = providers_fixture[0]
assert Scan.objects.count() == 0
with patch("tasks.tasks.perform_scheduled_scan_task.apply_async"):
schedule_provider_scan(provider_instance)
assert Scan.objects.count() == 1
scan = Scan.objects.first()
periodic_task = scan.scheduler_task
assert periodic_task is not None
periodic_task.delete()
scan.refresh_from_db()
# Assert the scan still exists but its scheduler_task is set to None
# Otherwise, Scan.DoesNotExist would be raised
assert Scan.objects.get(id=scan.id).scheduler_task is None

View File

@@ -1,128 +0,0 @@
import random
from collections import defaultdict
import requests
from locust import events, task
from utils.helpers import APIUserBase, get_api_token, get_auth_headers
GLOBAL = {
"token": None,
"available_scans_info": {},
}
SUPPORTED_COMPLIANCE_IDS = {
"aws": ["ens_rd2022", "cis_2.0", "prowler_threatscore", "soc2"],
"gcp": ["ens_rd2022", "cis_2.0", "prowler_threatscore", "soc2"],
"azure": ["ens_rd2022", "cis_2.0", "prowler_threatscore", "soc2"],
"m365": ["cis_4.0", "iso27001_2022", "prowler_threatscore"],
}
def _get_random_scan() -> tuple:
provider_type = random.choice(list(GLOBAL["available_scans_info"].keys()))
scan_info = random.choice(GLOBAL["available_scans_info"][provider_type])
return provider_type, scan_info
def _get_random_compliance_id(provider: str) -> str:
return f"{random.choice(SUPPORTED_COMPLIANCE_IDS[provider])}_{provider}"
def _get_compliance_available_scans_by_provider_type(host: str, token: str) -> dict:
excluded_providers = ["kubernetes"]
response_dict = defaultdict(list)
provider_response = requests.get(
f"{host}/providers?fields[providers]=id,provider&filter[connected]=true",
headers=get_auth_headers(token),
)
for provider in provider_response.json()["data"]:
provider_id = provider["id"]
provider_type = provider["attributes"]["provider"]
if provider_type in excluded_providers:
continue
scan_response = requests.get(
f"{host}/scans?fields[scans]=id&filter[provider]={provider_id}&filter[state]=completed",
headers=get_auth_headers(token),
)
scan_data = scan_response.json()["data"]
if not scan_data:
continue
scan_id = scan_data[0]["id"]
response_dict[provider_type].append(scan_id)
return response_dict
def _get_compliance_regions_from_scan(host: str, token: str, scan_id: str) -> list:
response = requests.get(
f"{host}/compliance-overviews/metadata?filter[scan_id]={scan_id}",
headers=get_auth_headers(token),
)
assert response.status_code == 200, f"Failed to get scan: {response.text}"
return response.json()["data"]["attributes"]["regions"]
@events.test_start.add_listener
def on_test_start(environment, **kwargs):
GLOBAL["token"] = get_api_token(environment.host)
scans_by_provider = _get_compliance_available_scans_by_provider_type(
environment.host, GLOBAL["token"]
)
scan_info = defaultdict(list)
for provider, scans in scans_by_provider.items():
for scan in scans:
scan_info[provider].append(
{
"scan_id": scan,
"regions": _get_compliance_regions_from_scan(
environment.host, GLOBAL["token"], scan
),
}
)
GLOBAL["available_scans_info"] = scan_info
class APIUser(APIUserBase):
def on_start(self):
self.token = GLOBAL["token"]
@task(3)
def compliance_overviews_default(self):
provider_type, scan_info = _get_random_scan()
name = f"/compliance-overviews ({provider_type})"
endpoint = f"/compliance-overviews?" f"filter[scan_id]={scan_info['scan_id']}"
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task(2)
def compliance_overviews_region(self):
provider_type, scan_info = _get_random_scan()
name = f"/compliance-overviews?filter[region] ({provider_type})"
endpoint = (
f"/compliance-overviews"
f"?filter[scan_id]={scan_info['scan_id']}"
f"&filter[region]={random.choice(scan_info['regions'])}"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task(2)
def compliance_overviews_requirements(self):
provider_type, scan_info = _get_random_scan()
compliance_id = _get_random_compliance_id(provider_type)
name = f"/compliance-overviews/requirements ({compliance_id})"
endpoint = (
f"/compliance-overviews/requirements"
f"?filter[scan_id]={scan_info['scan_id']}"
f"&filter[compliance_id]={compliance_id}"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)
@task
def compliance_overviews_attributes(self):
provider_type, _ = _get_random_scan()
compliance_id = _get_random_compliance_id(provider_type)
name = f"/compliance-overviews/attributes ({compliance_id})"
endpoint = (
f"/compliance-overviews/attributes"
f"?filter[compliance_id]={compliance_id}"
)
self.client.get(endpoint, headers=get_auth_headers(self.token), name=name)

17
poetry.lock generated
View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand.
[[package]]
name = "about-time"
@@ -1318,14 +1318,14 @@ files = [
[[package]]
name = "checkov"
version = "3.2.445"
version = "3.2.442"
description = "Infrastructure as code static analysis"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "checkov-3.2.445-py3-none-any.whl", hash = "sha256:c0cd1109cbcf1c764198a7c64b911845b83776587a65e0677880ed67a39d1f73"},
{file = "checkov-3.2.445.tar.gz", hash = "sha256:78705d34a9c7234bd7076c1970daedd18e823f79e7d83d2998bb84695cc33ebc"},
{file = "checkov-3.2.442-py3-none-any.whl", hash = "sha256:e94a3283bff9b4a81e54e57b4a00b02259dec0d85b17bf17e00652d137bd1a6d"},
{file = "checkov-3.2.442.tar.gz", hash = "sha256:e5206872de63d389cfb1b7c1212ce4b5a147986152e890461d87251726b4b0e7"},
]
[package.dependencies]
@@ -2653,6 +2653,8 @@ python-versions = "*"
groups = ["dev"]
files = [
{file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"},
{file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"},
{file = "jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6"},
]
[package.dependencies]
@@ -5586,6 +5588,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
@@ -5594,6 +5597,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
@@ -5602,6 +5606,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
@@ -5610,6 +5615,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
@@ -5618,6 +5624,7 @@ files = [
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
@@ -6596,4 +6603,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.1"
python-versions = ">3.9.1,<3.13"
content-hash = "c442552635c8e904d1c7a50f4787c8e90ec90787960ee1867f2235a7aa2205f0"
content-hash = "d72c55b52949ba94f0c68004d5b778edb69514a05bbb7aba8d641b5058a99fd5"

View File

@@ -37,29 +37,14 @@ All notable changes to the **Prowler SDK** are documented in this file.
- Azure Databricks service integration for Azure provider, including the `databricks_workspace_vnet_injection_enabled` check [(#8008)](https://github.com/prowler-cloud/prowler/pull/8008)
- Azure Databricks check `databricks_workspace_cmk_encryption_enabled` to ensure workspaces use customer-managed keys (CMK) for encryption at rest [(#8017)](https://github.com/prowler-cloud/prowler/pull/8017)
- Add `storage_account_default_to_entra_authorization_enabled` check for Azure provider. [(#7981)](https://github.com/prowler-cloud/prowler/pull/7981)
- `keyvault_ensure_public_network_access_disabled` check for Azure provider. [(#8072)](https://github.com/prowler-cloud/prowler/pull/8072)
- New check `monitor_alert_service_health_exists` for Azure provider [(#8067)](https://github.com/prowler-cloud/prowler/pull/8067)
- Replace `Domain.Read.All` with `Directory.Read.All` in Azure and M365 docs [(#8075)](https://github.com/prowler-cloud/prowler/pull/8075)
- Refactor IaC provider to use Checkov as Python library [(#8093)](https://github.com/prowler-cloud/prowler/pull/8093)
### Fixed
- Consolidate Azure Storage file service properties to the account level, improving the accuracy of the `storage_ensure_file_shares_soft_delete_is_enabled` check [(#8087)](https://github.com/prowler-cloud/prowler/pull/8087)
### Removed
- OCSF version number references to point always to the latest [(#8064)](https://github.com/prowler-cloud/prowler/pull/8064)
---
## [v5.7.6] (Prowler UNRELEASED)
### Fixed
- `organizations_scp_check_deny_regions` check to pass when SCP policies have no statements [(#8091)](https://github.com/prowler-cloud/prowler/pull/8091)
- Fix logic in VPC and ELBv2 checks [(#8077)](https://github.com/prowler-cloud/prowler/pull/8077)
- Retrieve correctly ECS Container insights settings [(#8097)](https://github.com/prowler-cloud/prowler/pull/8097)
---
## [v5.7.5] (Prowler 5.7.5)
## [v5.7.5] (Prowler UNRELEASED)
### Fixed
- Use unified timestamp for all requirements [(#8059)](https://github.com/prowler-cloud/prowler/pull/8059)

View File

@@ -28,7 +28,6 @@ Mutelist:
Tags:
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
- "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
"*":
Checks:
@@ -47,6 +46,9 @@ Mutelist:
- "*"
Tags:
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
"*":
Checks:
"ecs_task_definitions_no_environment_secrets":
Regions:
- "*"
@@ -58,3 +60,16 @@ Mutelist:
Regions:
- "eu-west-1"
- "eu-south-2" # Will ignore every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
"123456789012":
Checks:
"*":
Regions:
- "*"
Resources:
- "*"
Exceptions:
Resources:
- "test"
Tags:
- "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod

View File

@@ -28,6 +28,9 @@ Mutelist:
Tags:
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
- "project=test|project=stage" # either of ('project=test' OR project=stage) in Azure subscription 1 and every location
"*":
Checks:
"admincenter_*":
Regions:
- "*"

View File

@@ -7,7 +7,6 @@ from dataclasses import asdict, dataclass, is_dataclass
from enum import Enum
from typing import Any, Dict, Optional, Set
from checkov.common.output.record import Record
from pydantic.v1 import BaseModel, ValidationError, validator
from prowler.config.config import Provider
@@ -442,8 +441,6 @@ class Check_Report:
self.resource = resource.to_dict()
elif is_dataclass(resource):
self.resource = asdict(resource)
elif hasattr(resource, "__dict__"):
self.resource = resource.__dict__
else:
logger.error(
f"Resource metadata {type(resource)} in {self.check_metadata.CheckID} could not be converted to dict"
@@ -625,7 +622,7 @@ class CheckReportIAC(Check_Report):
resource_path: str
resource_line_range: str
def __init__(self, metadata: dict = {}, resource: Record = None) -> None:
def __init__(self, metadata: dict = {}, finding: dict = {}) -> None:
"""
Initialize the IAC Check's finding information from a Checkov failed_check dict.
@@ -633,10 +630,11 @@ class CheckReportIAC(Check_Report):
metadata (Dict): Optional check metadata (can be None).
failed_check (dict): A single failed_check result from Checkov's JSON output.
"""
super().__init__(metadata, resource)
self.resource_name = resource.resource
self.resource_path = resource.file_path
self.resource_line_range = resource.file_line_range
super().__init__(metadata, finding)
self.resource_name = getattr(finding, "resource", "")
self.resource_path = getattr(finding, "file_path", "")
self.resource_line_range = getattr(finding, "file_line_range", "")
@dataclass

View File

@@ -439,13 +439,12 @@ class Mutelist(ABC):
return False
@staticmethod
def validate_mutelist(mutelist: dict, raise_on_exception: bool = False) -> dict:
def validate_mutelist(mutelist: dict) -> dict:
"""
Validate the mutelist against the schema.
Args:
mutelist (dict): The mutelist to be validated.
raise_on_exception (bool): Whether to raise an exception if the mutelist is invalid.
Returns:
dict: The mutelist itself.
@@ -454,10 +453,7 @@ class Mutelist(ABC):
validate(mutelist, schema=mutelist_schema)
return mutelist
except Exception as error:
if raise_on_exception:
raise error
else:
logger.error(
f"{error.__class__.__name__} -- Mutelist YAML is malformed - {error}[{error.__traceback__.tb_lineno}]"
)
logger.error(
f"{error.__class__.__name__} -- Mutelist YAML is malformed - {error}[{error.__traceback__.tb_lineno}]"
)
return {}

View File

@@ -286,8 +286,8 @@ class Finding(BaseModel):
output_data["auth_method"] = "local" # Until we support remote repos
output_data["account_uid"] = "iac"
output_data["account_name"] = "iac"
output_data["resource_name"] = check_output.resource_name
output_data["resource_uid"] = check_output.resource_name
output_data["resource_name"] = check_output.resource["resource"]
output_data["resource_uid"] = check_output.resource["resource"]
output_data["region"] = check_output.resource_path
output_data["resource_line_range"] = check_output.resource_line_range
output_data["framework"] = (

View File

@@ -785,10 +785,7 @@
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ap-southeast-5",
"ap-southeast-7",
"ca-central-1",
"ca-west-1",
"eu-central-1",
"eu-central-2",
"eu-north-1",
@@ -3400,7 +3397,6 @@
"eu-west-1",
"eu-west-2",
"eu-west-3",
"il-central-1",
"me-central-1",
"sa-east-1",
"us-east-1",
@@ -6235,7 +6231,6 @@
"aws": [
"af-south-1",
"ap-east-1",
"ap-east-2",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
@@ -7830,18 +7825,13 @@
"regions": {
"aws": [
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-southeast-1",
"ap-southeast-2",
"ca-central-1",
"eu-central-1",
"eu-north-1",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"sa-east-1",
"us-east-1",
"us-east-2",
"us-west-1",
@@ -8113,8 +8103,6 @@
"regions": {
"aws": [
"ap-northeast-1",
"ap-northeast-3",
"ap-south-1",
"ap-southeast-1",
"eu-central-1",
"eu-west-1",
@@ -8217,10 +8205,7 @@
"us-west-2"
],
"aws-cn": [],
"aws-us-gov": [
"us-gov-east-1",
"us-gov-west-1"
]
"aws-us-gov": []
}
},
"personalize": {
@@ -8858,7 +8843,6 @@
"aws": [
"af-south-1",
"ap-east-1",
"ap-east-2",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",

View File

@@ -13,10 +13,11 @@ class ecs_cluster_container_insights_enabled(Check):
)
if cluster.settings:
for setting in cluster.settings:
if setting["name"] == "containerInsights" and (
setting["value"] == "enabled" or setting["value"] == "enhanced"
if (
setting["name"] == "containerInsights"
and setting["value"] == "enabled"
):
report.status = "PASS"
report.status_extended = f"ECS cluster {cluster.name} has container insights {setting['value']}."
report.status_extended = f"ECS cluster {cluster.name} has container insights enabled."
findings.append(report)
return findings

View File

@@ -175,7 +175,6 @@ class ECS(AWSService):
clusters=[cluster.arn],
include=[
"TAGS",
"SETTINGS",
],
)
cluster.settings = response["clusters"][0].get("settings", [])

View File

@@ -12,7 +12,7 @@ class elbv2_desync_mitigation_mode(Check):
report.status_extended = f"ELBv2 ALB {lb.name} is configured with correct desync mitigation mode."
if (
lb.desync_mitigation_mode != "strictest"
and lb.desync_mitigation_mode != "defensive"
or lb.desync_mitigation_mode != "defensive"
):
if lb.drop_invalid_header_fields == "false":
report.status = "FAIL"

View File

@@ -34,9 +34,9 @@ class organizations_scp_check_deny_regions(Check):
"SERVICE_CONTROL_POLICY", []
):
# Statements are not always list
statements = policy.content.get("Statement", [])
if type(statements) is not list:
statements = [statements]
statements = policy.content.get("Statement")
if type(policy.content["Statement"]) is not list:
statements = [policy.content.get("Statement")]
for statement in statements:
# Deny if Condition = {"StringNotEquals": {"aws:RequestedRegion": [region1, region2]}}

View File

@@ -9,10 +9,10 @@ class vpc_endpoint_multi_az_enabled(Check):
if endpoint.vpc_id in vpc_client.vpcs and endpoint.type == "Interface":
report = Check_Report_AWS(metadata=self.metadata(), resource=endpoint)
report.status = "FAIL"
report.status_extended = f"VPC Endpoint {endpoint.id} in VPC {endpoint.vpc_id} does not have subnets in different AZs."
report.status_extended = f"VPC Endpoint {endpoint.id} in VPC {endpoint.vpc_id} has subnets in different AZs."
if len(endpoint.subnet_ids) > 1:
report.status = "PASS"
report.status_extended = f"VPC Endpoint {endpoint.id} in VPC {endpoint.vpc_id} has subnets in different AZs."
report.status_extended = f"VPC Endpoint {endpoint.id} in VPC {endpoint.vpc_id} does not have subnets in different AZs."
findings.append(report)

View File

@@ -1,30 +0,0 @@
{
"Provider": "azure",
"CheckID": "keyvault_access_only_through_private_endpoints",
"CheckTitle": "Ensure that public network access when using private endpoint is disabled.",
"CheckType": [],
"ServiceName": "keyvault",
"SubServiceName": "",
"ResourceIdTemplate": "/subscriptions/{subscription_id}/resourceGroups/{resource_group}/providers/Microsoft.KeyVault/vaults/{vault_name}",
"Severity": "high",
"ResourceType": "KeyVault",
"Description": "Checks if Key Vaults with private endpoints have public network access disabled.",
"Risk": "Allowing public network access to Key Vault when using private endpoint can expose sensitive data to unauthorized access.",
"RelatedUrl": "https://learn.microsoft.com/en-us/azure/key-vault/general/network-security",
"Remediation": {
"Code": {
"CLI": "az keyvault update --resource-group <resource_group> --name <vault_name> --public-network-access disabled",
"NativeIaC": "{\n \"type\": \"Microsoft.KeyVault/vaults\",\n \"apiVersion\": \"2022-07-01\",\n \"properties\": {\n \"publicNetworkAccess\": \"disabled\"\n }\n}",
"Terraform": "resource \"azurerm_key_vault\" \"example\" {\n # ... other configuration ...\n\n public_network_access_enabled = false\n}",
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/azure/KeyVault/use-private-endpoints.html"
},
"Recommendation": {
"Text": "Disable public network access for Key Vaults that use private endpoint to ensure network traffic only flows through the private endpoint.",
"Url": "https://learn.microsoft.com/en-us/azure/private-link/private-endpoint-overview"
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -1,37 +0,0 @@
from prowler.lib.check.models import Check, Check_Report_Azure
from prowler.providers.azure.services.keyvault.keyvault_client import keyvault_client
class keyvault_access_only_through_private_endpoints(Check):
"""
Ensure that Public Network Access when using Private Endpoint is disabled.
This check evaluates whether Azure Key Vaults with private endpoints configured have
public network access disabled. Disabling public network access enhances security by
isolating the Key Vault from the public internet, thereby reducing its exposure.
- PASS: The Key Vault has private endpoints and public network access is disabled.
- FAIL: The Key Vault has private endpoints and public network access is enabled.
"""
def execute(self) -> Check_Report_Azure:
findings = []
for subscription, key_vaults in keyvault_client.key_vaults.items():
for keyvault in key_vaults:
if (
keyvault.properties
and keyvault.properties.private_endpoint_connections
):
report = Check_Report_Azure(
metadata=self.metadata(), resource=keyvault
)
report.subscription = subscription
if keyvault.properties.public_network_access_disabled:
report.status = "PASS"
report.status_extended = f"Keyvault {keyvault.name} from subscription {subscription} has public network access disabled and is using private endpoints."
else:
report.status = "FAIL"
report.status_extended = f"Keyvault {keyvault.name} from subscription {subscription} has public network access enabled while using private endpoints."
findings.append(report)
return findings

View File

@@ -70,14 +70,6 @@ class KeyVault(AzureService):
"enable_purge_protection",
False,
),
public_network_access_disabled=(
getattr(
keyvault_properties,
"public_network_access",
"Enabled",
)
== "Disabled"
),
),
keys=keys,
secrets=secrets,
@@ -255,7 +247,6 @@ class VaultProperties:
private_endpoint_connections: List[PrivateEndpointConnection]
enable_soft_delete: bool
enable_purge_protection: bool
public_network_access_disabled: bool = False
@dataclass

View File

@@ -1,30 +0,0 @@
{
"Provider": "azure",
"CheckID": "monitor_alert_service_health_exists",
"CheckTitle": "Ensure that an Activity Log Alert exists for Service Health",
"CheckType": [],
"ServiceName": "monitor",
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": "high",
"ResourceType": "Monitor",
"Description": "Ensure that an Azure activity log alert is configured to trigger when Service Health events occur within your Microsoft Azure cloud account. The alert should activate when new events match the specified conditions in the alert rule configuration.",
"Risk": "Lack of monitoring for Service Health events may result in missing critical service issues, planned maintenance, security advisories, or other changes that could impact Azure services and regions in use.",
"RelatedUrl": "https://learn.microsoft.com/en-us/azure/service-health/overview",
"Remediation": {
"Code": {
"CLI": "az monitor activity-log alert create --subscription <subscription-id> --resource-group <resource-group> --name <alert-rule> --condition category=ServiceHealth and properties.incidentType=Incident --scope /subscriptions/<subscription-id> --action-group <action-group>",
"NativeIaC": "",
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/azure/ActivityLog/service-health-alert.html",
"Terraform": ""
},
"Recommendation": {
"Text": "Create an activity log alert for Service Health events and configure an action group to notify appropriate personnel.",
"Url": "https://learn.microsoft.com/en-us/azure/service-health/alerts-activity-log-service-notifications-portal"
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": "By default, in your Azure subscription there will not be any activity log alerts configured for Service Health events."
}

View File

@@ -1,48 +0,0 @@
from prowler.lib.check.models import Check, Check_Report_Azure
from prowler.providers.azure.services.monitor.monitor_client import monitor_client
class monitor_alert_service_health_exists(Check):
def execute(self) -> list[Check_Report_Azure]:
findings = []
for (
subscription_name,
activity_log_alerts,
) in monitor_client.alert_rules.items():
for alert_rule in activity_log_alerts:
# Check if alert rule is enabled and has required Service Health conditions
if alert_rule.enabled:
has_service_health_category = False
has_incident_type_incident = False
for element in alert_rule.condition.all_of:
if (
element.field == "category"
and element.equals == "ServiceHealth"
):
has_service_health_category = True
if (
element.field == "properties.incidentType"
and element.equals == "Incident"
):
has_incident_type_incident = True
if has_service_health_category and has_incident_type_incident:
report = Check_Report_Azure(
metadata=self.metadata(), resource=alert_rule
)
report.subscription = subscription_name
report.status = "PASS"
report.status_extended = f"There is an activity log alert for Service Health in subscription {subscription_name}."
break
else:
report = Check_Report_Azure(metadata=self.metadata(), resource={})
report.subscription = subscription_name
report.resource_name = "Monitor"
report.resource_id = "Monitor"
report.status = "FAIL"
report.status_extended = f"There is no activity log alert for Service Health in subscription {subscription_name}."
findings.append(report)
return findings

View File

@@ -1,5 +1,5 @@
from dataclasses import dataclass
from typing import List, Optional
from typing import List
from azure.mgmt.monitor import MonitorManagementClient
@@ -131,4 +131,4 @@ class AlertRule:
name: str
condition: AlertRuleAllOfCondition
enabled: bool
description: Optional[str]
description: str

View File

@@ -7,24 +7,29 @@ class storage_ensure_file_shares_soft_delete_is_enabled(Check):
findings = []
for subscription, storage_accounts in storage_client.storage_accounts.items():
for storage_account in storage_accounts:
if getattr(storage_account, "file_service_properties", None):
report = Check_Report_Azure(
metadata=self.metadata(),
resource=storage_account.file_service_properties,
)
report.subscription = subscription
report.resource_name = storage_account.name
report.location = storage_account.location
if (
storage_account.file_service_properties.share_delete_retention_policy.enabled
):
report.status = "PASS"
report.status_extended = f"File share soft delete is enabled for storage account {storage_account.name} with a retention period of {storage_account.file_service_properties.share_delete_retention_policy.days} days."
else:
report.status = "FAIL"
report.status_extended = f"File share soft delete is not enabled for storage account {storage_account.name}."
findings.append(report)
if (
hasattr(storage_account, "file_shares")
and storage_account.file_shares
):
for file_share in storage_account.file_shares:
report = Check_Report_Azure(
metadata=self.metadata(), resource=storage_account
)
report.subscription = subscription
report.resource_id = file_share.name
if file_share.soft_delete_enabled:
report.status = "PASS"
report.status_extended = (
f"File share {file_share.name} in storage account {storage_account.name} "
f"from subscription {subscription} has soft delete enabled with a retention period of "
f"{file_share.retention_days} days."
)
else:
report.status = "FAIL"
report.status_extended = (
f"File share {file_share.name} in storage account {storage_account.name} "
f"from subscription {subscription} does not have soft delete enabled or has an invalid "
f"retention period."
)
findings.append(report)
return findings

View File

@@ -3,7 +3,6 @@ from enum import Enum
from typing import List, Optional
from azure.mgmt.storage import StorageManagementClient
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.providers.azure.azure_provider import AzureProvider
@@ -148,33 +147,43 @@ class Storage(AzureService):
client = self.clients[subscription]
for account in accounts:
try:
file_service_properties = (
client.file_services.get_service_properties(
account.resouce_group_name, account.name
service_properties = client.file_services.get_service_properties(
account.resouce_group_name, account.name
)
soft_delete_enabled = False
retention_days = 0
if (
hasattr(service_properties, "share_delete_retention_policy")
and service_properties.share_delete_retention_policy
):
soft_delete_enabled = getattr(
service_properties.share_delete_retention_policy,
"enabled",
False,
)
)
share_delete_retention_policy = getattr(
file_service_properties,
"share_delete_retention_policy",
None,
)
account.file_service_properties = FileServiceProperties(
id=file_service_properties.id,
name=file_service_properties.name,
type=file_service_properties.type,
share_delete_retention_policy=DeleteRetentionPolicy(
enabled=getattr(
share_delete_retention_policy,
"enabled",
False,
),
days=getattr(
share_delete_retention_policy,
retention_days = (
getattr(
service_properties.share_delete_retention_policy,
"days",
0,
),
),
)
if soft_delete_enabled
else 0
)
file_shares = client.file_shares.list(
account.resouce_group_name, account.name
)
account.file_shares = []
for file_share in file_shares:
account.file_shares.append(
FileShare(
id=file_share.id,
name=file_share.name,
soft_delete_enabled=soft_delete_enabled,
retention_days=retention_days,
)
)
except Exception as error:
logger.error(
f"Subscription name: {subscription} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -221,13 +230,6 @@ class ReplicationSettings(Enum):
STANDARD_RAGZRS = "Standard_RAGZRS"
class FileServiceProperties(BaseModel):
id: str
name: str
type: str
share_delete_retention_policy: DeleteRetentionPolicy
@dataclass
class Account:
id: str
@@ -247,4 +249,12 @@ class Account:
allow_shared_key_access: bool = True
blob_properties: Optional[BlobProperties] = None
default_to_entra_authorization: bool = False
file_service_properties: Optional[FileServiceProperties] = None
file_shares: list = None
@dataclass
class FileShare:
id: str
name: str
soft_delete_enabled: bool
retention_days: int

View File

@@ -246,8 +246,6 @@ class Provider(ABC):
elif "iac" in provider_class_name.lower():
provider_class(
scan_path=arguments.scan_path,
frameworks=arguments.frameworks,
exclude_path=arguments.exclude_path,
config_path=arguments.config_file,
fixer_config=fixer_config,
)

View File

@@ -1,39 +1,8 @@
import json
import subprocess
import sys
from typing import List
from checkov.ansible.runner import Runner as AnsibleRunner
from checkov.argo_workflows.runner import Runner as ArgoWorkflowsRunner
from checkov.arm.runner import Runner as ArmRunner
from checkov.azure_pipelines.runner import Runner as AzurePipelinesRunner
from checkov.bicep.runner import Runner as BicepRunner
from checkov.bitbucket.runner import Runner as BitbucketRunner
from checkov.bitbucket_pipelines.runner import Runner as BitbucketPipelinesRunner
from checkov.cdk.runner import CdkRunner
from checkov.circleci_pipelines.runner import Runner as CircleciPipelinesRunner
from checkov.cloudformation.runner import Runner as CfnRunner
from checkov.common.output.record import Record
from checkov.common.output.report import Report
from checkov.common.runners.runner_registry import RunnerRegistry
from checkov.dockerfile.runner import Runner as DockerfileRunner
from checkov.github.runner import Runner as GithubRunner
from checkov.github_actions.runner import Runner as GithubActionsRunner
from checkov.gitlab.runner import Runner as GitlabRunner
from checkov.gitlab_ci.runner import Runner as GitlabCiRunner
from checkov.helm.runner import Runner as HelmRunner
from checkov.json_doc.runner import Runner as JsonDocRunner
from checkov.kubernetes.runner import Runner as K8sRunner
from checkov.kustomize.runner import Runner as KustomizeRunner
from checkov.openapi.runner import Runner as OpenapiRunner
from checkov.runner_filter import RunnerFilter
from checkov.sast.runner import Runner as SastRunner
from checkov.sca_image.runner import Runner as ScaImageRunner
from checkov.sca_package_2.runner import Runner as ScaPackage2Runner
from checkov.secrets.runner import Runner as SecretsRunner
from checkov.serverless.runner import Runner as ServerlessRunner
from checkov.terraform.runner import Runner as TerraformRunner
from checkov.terraform_json.runner import TerraformJsonRunner
from checkov.yaml_doc.runner import Runner as YamlDocRunner
from colorama import Fore, Style
from prowler.config.config import (
@@ -54,8 +23,6 @@ class IacProvider(Provider):
def __init__(
self,
scan_path: str = ".",
frameworks: list[str] = ["all"],
exclude_path: list[str] = [],
config_path: str = None,
config_content: dict = None,
fixer_config: dict = {},
@@ -63,8 +30,6 @@ class IacProvider(Provider):
logger.info("Instantiating IAC Provider...")
self.scan_path = scan_path
self.frameworks = frameworks
self.exclude_path = exclude_path
self.region = "global"
self.audited_account = "local-iac"
self._session = None
@@ -121,9 +86,7 @@ class IacProvider(Provider):
"""IAC provider doesn't need a session since it uses Checkov directly"""
return None
def _process_check(
self, finding: Report, check: Record, status: str
) -> CheckReportIAC:
def _process_check(self, finding: dict, check: dict, status: str) -> CheckReportIAC:
"""
Process a single check (failed or passed) and create a CheckReportIAC object.
@@ -135,110 +98,109 @@ class IacProvider(Provider):
Returns:
CheckReportIAC: The processed check report
"""
try:
metadata_dict = {
"Provider": "iac",
"CheckID": check.check_id,
"CheckTitle": check.check_name,
"CheckType": ["Infrastructure as Code"],
"ServiceName": finding.check_type,
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": (check.severity.lower() if check.severity else "low"),
"ResourceType": finding.check_type,
"Description": check.check_name,
"Risk": "",
"RelatedUrl": (check.guideline if check.guideline else ""),
"Remediation": {
"Code": {
"NativeIaC": "",
"Terraform": "",
"CLI": "",
"Other": "",
},
"Recommendation": {
"Text": "",
"Url": (check.guideline if check.guideline else ""),
},
metadata_dict = {
"Provider": "iac",
"CheckID": check.get("check_id", ""),
"CheckTitle": check.get("check_name", ""),
"CheckType": ["Infrastructure as Code"],
"ServiceName": finding["check_type"],
"SubServiceName": "",
"ResourceIdTemplate": "",
"Severity": (
check.get("severity", "low").lower() if check.get("severity") else "low"
),
"ResourceType": "iac",
"Description": check.get("check_name", ""),
"Risk": "",
"RelatedUrl": (
check.get("guideline", "") if check.get("guideline") else ""
),
"Remediation": {
"Code": {
"NativeIaC": "",
"Terraform": "",
"CLI": "",
"Other": "",
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": "",
}
"Recommendation": {
"Text": "",
"Url": (
check.get("guideline", "") if check.get("guideline") else ""
),
},
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": "",
}
# Convert metadata dict to JSON string
metadata = json.dumps(metadata_dict)
# Convert metadata dict to JSON string
metadata = json.dumps(metadata_dict)
report = CheckReportIAC(metadata=metadata, resource=check)
report.status = status
report.resource_tags = check.entity_tags
report.status_extended = check.check_name
if status == "MUTED":
report.muted = True
return report
except Exception as error:
logger.critical(
f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}"
)
sys.exit(1)
report = CheckReportIAC(metadata=metadata, finding=check)
report.status = status
report.resource_tags = check.get("entity_tags", {})
report.status_extended = check.get("check_name", "")
if status == "MUTED":
report.muted = True
return report
def run(self) -> List[CheckReportIAC]:
return self.run_scan(self.scan_path, self.frameworks, self.exclude_path)
return self.run_scan(self.scan_path)
def run_scan(
self, directory: str, frameworks: list[str], exclude_path: list[str]
) -> List[CheckReportIAC]:
def run_scan(self, directory: str) -> List[CheckReportIAC]:
try:
logger.info(f"Running IaC scan on {directory}...")
runners = [
TerraformRunner(),
CfnRunner(),
K8sRunner(),
ArmRunner(),
ServerlessRunner(),
DockerfileRunner(),
YamlDocRunner(),
OpenapiRunner(),
SastRunner(),
ScaImageRunner(),
ScaPackage2Runner(),
SecretsRunner(),
AnsibleRunner(),
ArgoWorkflowsRunner(),
BitbucketRunner(),
BitbucketPipelinesRunner(),
CdkRunner(),
CircleciPipelinesRunner(),
GithubRunner(),
GithubActionsRunner(),
GitlabRunner(),
GitlabCiRunner(),
HelmRunner(),
JsonDocRunner(),
TerraformJsonRunner(),
KustomizeRunner(),
AzurePipelinesRunner(),
BicepRunner(),
]
runner_filter = RunnerFilter(
framework=frameworks, excluded_paths=exclude_path
# Run Checkov with JSON output
process = subprocess.run(
["checkov", "-d", directory, "-o", "json"],
capture_output=True,
text=True,
)
# Log Checkov's error output if any
if process.stderr:
logger.error(process.stderr)
registry = RunnerRegistry("", runner_filter, *runners)
checkov_reports = registry.run(root_folder=directory)
try:
output = json.loads(process.stdout)
if not output:
logger.warning("No findings returned from Checkov scan")
return []
except Exception as error:
logger.critical(
f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}"
)
sys.exit(1)
reports: List[CheckReportIAC] = []
for report in checkov_reports:
reports = []
for failed in report.failed_checks:
reports.append(self._process_check(report, failed, "FAIL"))
# If only one framework has findings, the output is a dict, otherwise it's a list of dicts
if isinstance(output, dict):
output = [output]
for passed in report.passed_checks:
reports.append(self._process_check(report, passed, "PASS"))
# Process all frameworks findings
for finding in output:
results = finding.get("results", {})
for skipped in report.skipped_checks:
reports.append(self._process_check(report, skipped, "MUTED"))
# Process failed checks
failed_checks = results.get("failed_checks", [])
for failed_check in failed_checks:
report = self._process_check(finding, failed_check, "FAIL")
reports.append(report)
# Process passed checks
passed_checks = results.get("passed_checks", [])
for passed_check in passed_checks:
report = self._process_check(finding, passed_check, "PASS")
reports.append(report)
# Process skipped checks (muted)
skipped_checks = results.get("skipped_checks", [])
for skipped_check in skipped_checks:
report = self._process_check(finding, skipped_check, "MUTED")
reports.append(report)
return reports
@@ -252,12 +214,5 @@ class IacProvider(Provider):
report_lines = [
f"Directory: {Fore.YELLOW}{self.scan_path}{Style.RESET_ALL}",
]
if self.exclude_path:
report_lines.append(
f"Excluded paths: {Fore.YELLOW}{', '.join(self.exclude_path)}{Style.RESET_ALL}"
)
report_lines.append(
f"Frameworks: {Fore.YELLOW}{', '.join(self.frameworks)}{Style.RESET_ALL}"
)
report_title = f"{Style.BRIGHT}Scanning local IaC directory:{Style.RESET_ALL}"
print_boxes(report_lines, report_title)

View File

@@ -1,36 +1,3 @@
FRAMEWORK_CHOICES = [
"ansible",
"argo_workflows",
"arm",
"azure_pipelines",
"bicep",
"bitbucket",
"bitbucket_pipelines",
"cdk",
"circleci_pipelines",
"cloudformation",
"dockerfile",
"github",
"github_actions",
"gitlab",
"gitlab_ci",
"helm",
"json_doc",
"kubernetes",
"kustomize",
"openapi",
"policies_3d",
"sast",
"sca_image",
"sca_package_2",
"secrets",
"serverless",
"terraform",
"terraform_json",
"yaml_doc",
]
def init_parser(self):
"""Init the IAC Provider CLI parser"""
iac_parser = self.subparsers.add_parser(
@@ -46,20 +13,3 @@ def init_parser(self):
default=".",
help="Path to the folder containing your infrastructure-as-code files. Default: current directory",
)
iac_scan_subparser.add_argument(
"--frameworks",
"-f",
"--framework",
dest="frameworks",
nargs="+",
default=["all"],
choices=FRAMEWORK_CHOICES,
help="Comma-separated list of frameworks to scan. Default: all",
)
iac_scan_subparser.add_argument(
"--exclude-path",
dest="exclude_path",
nargs="+",
default=[],
help="Comma-separated list of paths to exclude from the scan. Default: none",
)

View File

@@ -59,7 +59,7 @@ dependencies = [
"slack-sdk==3.34.0",
"tabulate==0.9.0",
"tzlocal==5.3.1",
"checkov==3.2.445",
"checkov (>=3.2.434,<4.0.0)",
"py-iam-expand==0.1.0"
]
description = "Prowler is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks."

View File

@@ -7,7 +7,6 @@ import yaml
from boto3 import client, resource
from mock import MagicMock, patch
from moto import mock_aws
import pytest
from prowler.config.config import encoding_format_utf_8
from prowler.providers.aws.lib.mutelist.mutelist import AWSMutelist
@@ -322,30 +321,6 @@ class TestAWSMutelist:
assert mutelist.mutelist == {}
assert mutelist.mutelist_file_path is None
def test_validate_mutelist_raise_on_exception(self):
mutelist_path = MUTELIST_FIXTURE_PATH
with open(mutelist_path) as f:
mutelist_fixture = yaml.safe_load(f)["Mutelist"]
# Create an invalid mutelist by adding an invalid key
invalid_mutelist = mutelist_fixture.copy()
invalid_mutelist["Accounts1"] = invalid_mutelist["Accounts"]
del invalid_mutelist["Accounts"]
mutelist = AWSMutelist(mutelist_content=mutelist_fixture)
# Test that it raises an exception when raise_on_exception=True
with pytest.raises(Exception):
mutelist.validate_mutelist(invalid_mutelist, raise_on_exception=True)
# Test that it doesn't raise an exception when raise_on_exception=False (default)
result = mutelist.validate_mutelist(invalid_mutelist, raise_on_exception=False)
assert result == {}
# Test that it doesn't raise an exception when raise_on_exception is not specified
result = mutelist.validate_mutelist(invalid_mutelist)
assert result == {}
def test_mutelist_findings_only_wildcard(self):
# Mutelist
mutelist_content = {

View File

@@ -109,45 +109,6 @@ class Test_ecs_clusters_container_insights_enabled:
== f"ECS cluster {CLUSTER_NAME} has container insights enabled."
)
@mock_aws
def test_cluster_enhanced_container_insights(self):
ecs_client = client("ecs", region_name=AWS_REGION_US_EAST_1)
cluster_settings = [
{"name": "containerInsights", "value": "enhanced"},
]
cluster_arn = ecs_client.create_cluster(
clusterName=CLUSTER_NAME,
settings=cluster_settings,
)["cluster"]["clusterArn"]
from prowler.providers.aws.services.ecs.ecs_service import ECS
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.ecs.ecs_cluster_container_insights_enabled.ecs_cluster_container_insights_enabled.ecs_client",
new=ECS(aws_provider),
),
):
from prowler.providers.aws.services.ecs.ecs_cluster_container_insights_enabled.ecs_cluster_container_insights_enabled import (
ecs_cluster_container_insights_enabled,
)
check = ecs_cluster_container_insights_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert result[0].resource_arn == cluster_arn
assert (
result[0].status_extended
== f"ECS cluster {CLUSTER_NAME} has container insights enhanced."
)
@mock_aws
def test_cluster_disabled_container_insights(self):
ecs_client = client("ecs", region_name=AWS_REGION_US_EAST_1)

View File

@@ -17,10 +17,6 @@ def scp_restrict_regions_with_deny():
return '{"Version":"2012-10-17","Statement":{"Effect":"Deny","NotAction":"s3:*","Resource":"*","Condition":{"StringNotEquals":{"aws:RequestedRegion":["eu-central-1","eu-west-1"]}}}}'
def scp_restrict_regions_without_statement():
return '{"Version":"2012-10-17"}'
class Test_organizations_scp_check_deny_regions:
@mock_aws
def test_no_organization(self):
@@ -281,74 +277,3 @@ class Test_organizations_scp_check_deny_regions:
result = check.execute()
assert len(result) == 0
@mock_aws
def test_organizations_scp_check_deny_regions_without_statement(self):
aws_provider = set_mocked_aws_provider([AWS_REGION_EU_WEST_1])
# Create Organization
conn = client("organizations", region_name=AWS_REGION_EU_WEST_1)
response = conn.describe_organization()
# Delete the default FullAWSAccess policy created by Moto
policies = conn.list_policies(Filter="SERVICE_CONTROL_POLICY")["Policies"]
for policy in policies:
if policy["Name"] == "FullAWSAccess":
policy_id = policy["Id"]
# Detach from all roots
roots = conn.list_roots()["Roots"]
for root in roots:
conn.detach_policy(PolicyId=policy_id, TargetId=root["Id"])
# Detach from all OUs
ous = conn.list_organizational_units_for_parent(
ParentId=roots[0]["Id"]
)["OrganizationalUnits"]
for ou in ous:
conn.detach_policy(PolicyId=policy_id, TargetId=ou["Id"])
# Detach from all accounts
accounts = conn.list_accounts()["Accounts"]
for account in accounts:
conn.detach_policy(PolicyId=policy_id, TargetId=account["Id"])
# Now delete
conn.delete_policy(PolicyId=policy_id)
break
# Create Policy
response_policy = conn.create_policy(
Content=scp_restrict_regions_without_statement(),
Description="Test",
Name="Test",
Type="SERVICE_CONTROL_POLICY",
)
org_id = response["Organization"]["Id"]
policy_id = response_policy["Policy"]["PolicySummary"]["Id"]
# Set config variable
aws_provider._audit_config = {"organizations_enabled_regions": ["us-east-1"]}
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
):
with mock.patch(
"prowler.providers.aws.services.organizations.organizations_scp_check_deny_regions.organizations_scp_check_deny_regions.organizations_client",
new=Organizations(aws_provider),
):
# Test Check
from prowler.providers.aws.services.organizations.organizations_scp_check_deny_regions.organizations_scp_check_deny_regions import (
organizations_scp_check_deny_regions,
)
check = organizations_scp_check_deny_regions()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert result[0].resource_id == response["Organization"]["Id"]
assert (
"arn:aws:organizations::123456789012:organization/o-"
in result[0].resource_arn
)
assert (
result[0].status_extended
== f"AWS Organization {org_id} has SCP policies but don't restrict AWS Regions."
)
assert result[0].region == AWS_REGION_EU_WEST_1

View File

@@ -87,7 +87,7 @@ class Test_vpc_endpoint_for_multi_az:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"VPC Endpoint {vpc_endpoint['VpcEndpointId']} in VPC {vpc['VpcId']} does not have subnets in different AZs."
== f"VPC Endpoint {vpc_endpoint['VpcEndpointId']} in VPC {vpc['VpcId']} has subnets in different AZs."
)
assert (
result[0].resource_arn
@@ -158,7 +158,7 @@ class Test_vpc_endpoint_for_multi_az:
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"VPC Endpoint {vpc_endpoint['VpcEndpointId']} in VPC {vpc['VpcId']} has subnets in different AZs."
== f"VPC Endpoint {vpc_endpoint['VpcEndpointId']} in VPC {vpc['VpcId']} does not have subnets in different AZs."
)
assert (
result[0].resource_arn

View File

@@ -1,193 +0,0 @@
from unittest import mock
from uuid import uuid4
from tests.providers.azure.azure_fixtures import (
AZURE_SUBSCRIPTION_ID,
set_mocked_azure_provider,
)
class Test_keyvault_access_only_through_private_endpoints:
def test_no_key_vaults(self):
keyvault_client = mock.MagicMock
keyvault_client.key_vaults = {}
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_azure_provider(),
),
mock.patch(
"prowler.providers.azure.services.keyvault.keyvault_access_only_through_private_endpoints.keyvault_access_only_through_private_endpoints.keyvault_client",
new=keyvault_client,
),
):
from prowler.providers.azure.services.keyvault.keyvault_access_only_through_private_endpoints.keyvault_access_only_through_private_endpoints import (
keyvault_access_only_through_private_endpoints,
)
check = keyvault_access_only_through_private_endpoints()
result = check.execute()
assert len(result) == 0
def test_key_vaults_no_private_endpoints(self):
keyvault_client = mock.MagicMock
keyvault_name = "Keyvault Name"
keyvault_id = str(uuid4())
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_azure_provider(),
),
mock.patch(
"prowler.providers.azure.services.keyvault.keyvault_access_only_through_private_endpoints.keyvault_access_only_through_private_endpoints.keyvault_client",
new=keyvault_client,
),
):
from prowler.providers.azure.services.keyvault.keyvault_access_only_through_private_endpoints.keyvault_access_only_through_private_endpoints import (
keyvault_access_only_through_private_endpoints,
)
from prowler.providers.azure.services.keyvault.keyvault_service import (
KeyVaultInfo,
VaultProperties,
)
keyvault_client.key_vaults = {
AZURE_SUBSCRIPTION_ID: [
KeyVaultInfo(
id=keyvault_id,
name=keyvault_name,
location="westeurope",
resource_group="resource_group",
properties=VaultProperties(
tenant_id="tenantid",
enable_rbac_authorization=False,
private_endpoint_connections=[],
enable_soft_delete=True,
enable_purge_protection=True,
public_network_access_disabled=False,
),
)
]
}
check = keyvault_access_only_through_private_endpoints()
result = check.execute()
assert len(result) == 0
def test_key_vaults_with_private_endpoints_public_access_enabled(self):
keyvault_client = mock.MagicMock
keyvault_name = "Keyvault Name"
keyvault_id = str(uuid4())
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_azure_provider(),
),
mock.patch(
"prowler.providers.azure.services.keyvault.keyvault_access_only_through_private_endpoints.keyvault_access_only_through_private_endpoints.keyvault_client",
new=keyvault_client,
),
):
from prowler.providers.azure.services.keyvault.keyvault_access_only_through_private_endpoints.keyvault_access_only_through_private_endpoints import (
keyvault_access_only_through_private_endpoints,
)
from prowler.providers.azure.services.keyvault.keyvault_service import (
KeyVaultInfo,
PrivateEndpointConnection,
VaultProperties,
)
keyvault_client.key_vaults = {
AZURE_SUBSCRIPTION_ID: [
KeyVaultInfo(
id=keyvault_id,
name=keyvault_name,
location="westeurope",
resource_group="resource_group",
properties=VaultProperties(
tenant_id="tenantid",
enable_rbac_authorization=True,
private_endpoint_connections=[
PrivateEndpointConnection(id="id1")
],
enable_soft_delete=True,
enable_purge_protection=True,
public_network_access_disabled=False,
),
)
]
}
check = keyvault_access_only_through_private_endpoints()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Keyvault {keyvault_name} from subscription {AZURE_SUBSCRIPTION_ID} has public network access enabled while using private endpoints."
)
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
assert result[0].resource_name == keyvault_name
assert result[0].resource_id == keyvault_id
assert result[0].location == "westeurope"
def test_key_vaults_with_private_endpoints_public_access_disabled(self):
keyvault_client = mock.MagicMock
keyvault_name = "Keyvault Name"
keyvault_id = str(uuid4())
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_azure_provider(),
),
mock.patch(
"prowler.providers.azure.services.keyvault.keyvault_access_only_through_private_endpoints.keyvault_access_only_through_private_endpoints.keyvault_client",
new=keyvault_client,
),
):
from prowler.providers.azure.services.keyvault.keyvault_access_only_through_private_endpoints.keyvault_access_only_through_private_endpoints import (
keyvault_access_only_through_private_endpoints,
)
from prowler.providers.azure.services.keyvault.keyvault_service import (
KeyVaultInfo,
PrivateEndpointConnection,
VaultProperties,
)
keyvault_client.key_vaults = {
AZURE_SUBSCRIPTION_ID: [
KeyVaultInfo(
id=keyvault_id,
name=keyvault_name,
location="westeurope",
resource_group="resource_group",
properties=VaultProperties(
tenant_id="tenantid",
enable_rbac_authorization=True,
private_endpoint_connections=[
PrivateEndpointConnection(id="id1")
],
enable_soft_delete=True,
enable_purge_protection=True,
public_network_access_disabled=True,
),
)
]
}
check = keyvault_access_only_through_private_endpoints()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Keyvault {keyvault_name} from subscription {AZURE_SUBSCRIPTION_ID} has public network access disabled and is using private endpoints."
)
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
assert result[0].resource_name == keyvault_name
assert result[0].resource_id == keyvault_id
assert result[0].location == "westeurope"

View File

@@ -1,164 +0,0 @@
from unittest import mock
from tests.providers.azure.azure_fixtures import (
AZURE_SUBSCRIPTION_ID,
set_mocked_azure_provider,
)
class Test_monitor_alert_service_health_exists:
def test_monitor_alert_service_health_exists_no_subscriptions(self):
monitor_client = mock.MagicMock()
monitor_client.alert_rules = {}
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_azure_provider(),
),
mock.patch(
"prowler.providers.azure.services.monitor.monitor_alert_service_health_exists.monitor_alert_service_health_exists.monitor_client",
new=monitor_client,
),
):
from prowler.providers.azure.services.monitor.monitor_alert_service_health_exists.monitor_alert_service_health_exists import (
monitor_alert_service_health_exists,
)
check = monitor_alert_service_health_exists()
result = check.execute()
assert len(result) == 0
def test_no_alert_rules(self):
monitor_client = mock.MagicMock()
monitor_client.alert_rules = {AZURE_SUBSCRIPTION_ID: []}
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_azure_provider(),
),
mock.patch(
"prowler.providers.azure.services.monitor.monitor_alert_service_health_exists.monitor_alert_service_health_exists.monitor_client",
new=monitor_client,
),
):
from prowler.providers.azure.services.monitor.monitor_alert_service_health_exists.monitor_alert_service_health_exists import (
monitor_alert_service_health_exists,
)
check = monitor_alert_service_health_exists()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
assert result[0].resource_name == "Monitor"
assert result[0].resource_id == "Monitor"
assert (
result[0].status_extended
== f"There is no activity log alert for Service Health in subscription {AZURE_SUBSCRIPTION_ID}."
)
def test_alert_rules_configured(self):
monitor_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_azure_provider(),
),
mock.patch(
"prowler.providers.azure.services.monitor.monitor_alert_service_health_exists.monitor_alert_service_health_exists.monitor_client",
new=monitor_client,
),
):
from prowler.providers.azure.services.monitor.monitor_alert_service_health_exists.monitor_alert_service_health_exists import (
monitor_alert_service_health_exists,
)
from prowler.providers.azure.services.monitor.monitor_service import (
AlertRule,
AlertRuleAllOfCondition,
AlertRuleAnyOfOrLeafCondition,
)
monitor_client.alert_rules = {
AZURE_SUBSCRIPTION_ID: [
AlertRule(
id="id1",
name="name1",
condition=AlertRuleAllOfCondition(
all_of=[
AlertRuleAnyOfOrLeafCondition(
field="category", equals="ServiceHealth"
),
AlertRuleAnyOfOrLeafCondition(
field="properties.incidentType", equals="Incident"
),
]
),
enabled=True,
description="desc1",
),
]
}
check = monitor_alert_service_health_exists()
result = check.execute()
assert len(result) == 1
assert result[0].status == "PASS"
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
assert result[0].resource_name == "name1"
assert result[0].resource_id == "id1"
assert (
result[0].status_extended
== f"There is an activity log alert for Service Health in subscription {AZURE_SUBSCRIPTION_ID}."
)
def test_alert_rules_configured_but_disabled(self):
monitor_client = mock.MagicMock()
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=set_mocked_azure_provider(),
),
mock.patch(
"prowler.providers.azure.services.monitor.monitor_alert_service_health_exists.monitor_alert_service_health_exists.monitor_client",
new=monitor_client,
),
):
from prowler.providers.azure.services.monitor.monitor_alert_service_health_exists.monitor_alert_service_health_exists import (
monitor_alert_service_health_exists,
)
from prowler.providers.azure.services.monitor.monitor_service import (
AlertRule,
AlertRuleAllOfCondition,
AlertRuleAnyOfOrLeafCondition,
)
monitor_client.alert_rules = {
AZURE_SUBSCRIPTION_ID: [
AlertRule(
id="id1",
name="name1",
condition=AlertRuleAllOfCondition(
all_of=[
AlertRuleAnyOfOrLeafCondition(
field="category", equals="ServiceHealth"
),
AlertRuleAnyOfOrLeafCondition(
field="properties.incidentType", equals="Incident"
),
]
),
enabled=False,
description="desc1",
),
]
}
check = monitor_alert_service_health_exists()
result = check.execute()
assert len(result) == 1
assert result[0].status == "FAIL"
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
assert result[0].resource_name == "Monitor"
assert result[0].resource_id == "Monitor"
assert (
result[0].status_extended
== f"There is no activity log alert for Service Health in subscription {AZURE_SUBSCRIPTION_ID}."
)

View File

@@ -1,11 +1,7 @@
from unittest import mock
from uuid import uuid4
from prowler.providers.azure.services.storage.storage_service import (
Account,
DeleteRetentionPolicy,
FileServiceProperties,
)
from prowler.providers.azure.services.storage.storage_service import Account, FileShare
from tests.providers.azure.azure_fixtures import (
AZURE_SUBSCRIPTION_ID,
set_mocked_azure_provider,
@@ -35,7 +31,7 @@ class Test_storage_ensure_file_shares_soft_delete_is_enabled:
result = check.execute()
assert len(result) == 0
def test_storage_account_no_file_properties(self):
def test_no_file_shares(self):
storage_account_id = str(uuid4())
storage_account_name = "Test Storage Account"
storage_client = mock.MagicMock
@@ -54,7 +50,7 @@ class Test_storage_ensure_file_shares_soft_delete_is_enabled:
key_expiration_period_in_days=None,
location="westeurope",
private_endpoint_connections=None,
file_service_properties=None,
file_shares=[],
)
]
}
@@ -80,14 +76,13 @@ class Test_storage_ensure_file_shares_soft_delete_is_enabled:
def test_file_share_soft_delete_disabled(self):
storage_account_id = str(uuid4())
storage_account_name = "Test Storage Account"
storage_client = mock.MagicMock
retention_policy = DeleteRetentionPolicy(enabled=False, days=0)
file_service_properties = FileServiceProperties(
id=f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/resourceGroups/prowler-resource-group/providers/Microsoft.Storage/storageAccounts/{storage_account_name}/fileServices/default",
name="default",
type="Microsoft.Storage/storageAccounts/fileServices",
share_delete_retention_policy=retention_policy,
file_share = FileShare(
id="fs1",
name="share1",
soft_delete_enabled=False,
retention_days=0,
)
storage_client = mock.MagicMock
storage_client.storage_accounts = {
AZURE_SUBSCRIPTION_ID: [
Account(
@@ -103,7 +98,7 @@ class Test_storage_ensure_file_shares_soft_delete_is_enabled:
key_expiration_period_in_days=None,
location="westeurope",
private_endpoint_connections=None,
file_service_properties=file_service_properties,
file_shares=[file_share],
)
]
}
@@ -128,24 +123,23 @@ class Test_storage_ensure_file_shares_soft_delete_is_enabled:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"File share soft delete is not enabled for storage account {storage_account_name}."
== f"File share {file_share.name} in storage account {storage_account_name} from subscription {AZURE_SUBSCRIPTION_ID} does not have soft delete enabled or has an invalid retention period."
)
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
assert result[0].resource_name == storage_account_name
assert result[0].resource_id == file_service_properties.id
assert result[0].resource_id == file_share.name
assert result[0].location == "westeurope"
assert result[0].resource_name == storage_account_name
def test_file_share_soft_delete_enabled(self):
storage_account_id = str(uuid4())
storage_account_name = "Test Storage Account"
storage_client = mock.MagicMock
retention_policy = DeleteRetentionPolicy(enabled=True, days=7)
file_service_properties = FileServiceProperties(
id=f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/resourceGroups/prowler-resource-group/providers/Microsoft.Storage/storageAccounts/{storage_account_name}/fileServices/default",
name="default",
type="Microsoft.Storage/storageAccounts/fileServices",
share_delete_retention_policy=retention_policy,
file_share = FileShare(
id="fs2",
name="share2",
soft_delete_enabled=True,
retention_days=7,
)
storage_client = mock.MagicMock
storage_client.storage_accounts = {
AZURE_SUBSCRIPTION_ID: [
Account(
@@ -161,7 +155,7 @@ class Test_storage_ensure_file_shares_soft_delete_is_enabled:
key_expiration_period_in_days=None,
location="westeurope",
private_endpoint_connections=None,
file_service_properties=file_service_properties,
file_shares=[file_share],
)
]
}
@@ -186,9 +180,9 @@ class Test_storage_ensure_file_shares_soft_delete_is_enabled:
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"File share soft delete is enabled for storage account {storage_account_name} with a retention period of {retention_policy.days} days."
== f"File share {file_share.name} in storage account {storage_account_name} from subscription {AZURE_SUBSCRIPTION_ID} has soft delete enabled with a retention period of {file_share.retention_days} days."
)
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
assert result[0].resource_name == storage_account_name
assert result[0].resource_id == file_service_properties.id
assert result[0].resource_id == file_share.name
assert result[0].location == "westeurope"
assert result[0].resource_name == storage_account_name

View File

@@ -3,8 +3,7 @@ from unittest.mock import patch
from prowler.providers.azure.services.storage.storage_service import (
Account,
BlobProperties,
DeleteRetentionPolicy,
FileServiceProperties,
FileShare,
ReplicationSettings,
Storage,
)
@@ -22,13 +21,10 @@ def mock_storage_get_storage_accounts(_):
default_service_version=None,
container_delete_retention_policy=None,
)
retention_policy = DeleteRetentionPolicy(enabled=True, days=7)
file_service_properties = FileServiceProperties(
id="id",
name="name",
type="type",
share_delete_retention_policy=retention_policy,
)
file_shares = [
FileShare(id="fs1", name="share1", soft_delete_enabled=True, retention_days=7),
FileShare(id="fs2", name="share2", soft_delete_enabled=False, retention_days=0),
]
return {
AZURE_SUBSCRIPTION_ID: [
Account(
@@ -49,7 +45,7 @@ def mock_storage_get_storage_accounts(_):
replication_settings=ReplicationSettings.STANDARD_LRS,
allow_cross_tenant_replication=True,
allow_shared_key_access=True,
file_service_properties=file_service_properties,
file_shares=file_shares,
)
]
}
@@ -176,12 +172,14 @@ class Test_Storage_Service:
is None
)
def test_get_file_service_properties(self):
def test_get_file_shares_properties(self):
storage = Storage(set_mocked_azure_provider())
account = storage.storage_accounts[AZURE_SUBSCRIPTION_ID][0]
assert hasattr(account, "file_service_properties")
assert (
account.file_service_properties.share_delete_retention_policy.enabled
is True
)
assert account.file_service_properties.share_delete_retention_policy.days == 7
assert hasattr(account, "file_shares")
assert len(account.file_shares) == 2
assert account.file_shares[0].name == "share1"
assert account.file_shares[0].soft_delete_enabled is True
assert account.file_shares[0].retention_days == 7
assert account.file_shares[1].name == "share2"
assert account.file_shares[1].soft_delete_enabled is False
assert account.file_shares[1].retention_days == 0

View File

@@ -1,257 +1,67 @@
from checkov.common.models.enums import CheckResult
from checkov.common.output.record import Record
from checkov.common.output.report import Report
# IAC Provider Constants
DEFAULT_SCAN_PATH = "."
# Sample Checkov Output
SAMPLE_CHECKOV_OUTPUT = [
{
"check_type": "terraform",
"results": {
"failed_checks": [
{
"check_id": "CKV_AWS_1",
"check_name": "Ensure S3 bucket has encryption enabled",
"guideline": "https://docs.bridgecrew.io/docs/s3_1-s3-bucket-has-encryption-enabled",
"severity": "low",
},
{
"check_id": "CKV_AWS_2",
"check_name": "Ensure S3 bucket has public access blocked",
"guideline": "https://docs.bridgecrew.io/docs/s3_2-s3-bucket-has-public-access-blocked",
"severity": "low",
},
],
"passed_checks": [
{
"check_id": "CKV_AWS_3",
"check_name": "Ensure S3 bucket has versioning enabled",
"guideline": "https://docs.bridgecrew.io/docs/s3_3-s3-bucket-has-versioning-enabled",
"severity": "low",
}
],
},
}
]
# Sample Finding Data
SAMPLE_FINDING = Report(check_type="terraform")
SAMPLE_FAILED_CHECK = Record(
check_id="CKV_AWS_1",
check_name="Ensure S3 bucket has encryption enabled",
severity="low",
file_path="test.tf",
file_line_range=[1, 2],
resource="aws_s3_bucket.test_bucket",
evaluations=[],
check_class="terraform",
check_result=CheckResult.FAILED,
code_block=[],
file_abs_path="test.tf",
)
SAMPLE_FAILED_CHECK.guideline = (
"https://docs.bridgecrew.io/docs/s3_1-s3-bucket-has-encryption-enabled"
)
SAMPLE_FINDING = SAMPLE_CHECKOV_OUTPUT[0]
SAMPLE_PASSED_CHECK = Record(
check_id="CKV_AWS_3",
check_name="Ensure S3 bucket has versioning enabled",
severity="low",
file_path="test.tf",
file_line_range=[1, 2],
resource="aws_s3_bucket.test_bucket",
evaluations=[],
check_class="terraform",
check_result=CheckResult.PASSED,
code_block=[],
file_abs_path="test.tf",
)
SAMPLE_PASSED_CHECK.guideline = (
"https://docs.bridgecrew.io/docs/s3_3-s3-bucket-has-versioning-enabled"
)
SAMPLE_FAILED_CHECK = {
"check_id": "CKV_AWS_1",
"check_name": "Ensure S3 bucket has encryption enabled",
"guideline": "https://docs.bridgecrew.io/docs/s3_1-s3-bucket-has-encryption-enabled",
"severity": "low",
}
# Additional test fixtures for comprehensive testing
SAMPLE_SKIPPED_CHECK = Record(
check_id="CKV_AWS_2",
check_name="Ensure S3 bucket has public access blocked",
severity="high",
file_path="test.tf",
file_line_range=[3, 4],
resource="aws_s3_bucket.test_bucket",
evaluations=[],
check_class="terraform",
check_result=CheckResult.SKIPPED,
code_block=[],
file_abs_path="test.tf",
)
SAMPLE_SKIPPED_CHECK.guideline = (
"https://docs.bridgecrew.io/docs/s3_2-s3-bucket-has-public-access-blocked"
)
SAMPLE_PASSED_CHECK = {
"check_id": "CKV_AWS_3",
"check_name": "Ensure S3 bucket has versioning enabled",
"guideline": "https://docs.bridgecrew.io/docs/s3_3-s3-bucket-has-versioning-enabled",
"severity": "low",
}
SAMPLE_HIGH_SEVERITY_CHECK = Record(
check_id="CKV_AWS_4",
check_name="Ensure S3 bucket has logging enabled",
severity="HIGH",
file_path="test.tf",
file_line_range=[5, 6],
resource="aws_s3_bucket.test_bucket",
evaluations=[],
check_class="terraform",
check_result=CheckResult.FAILED,
code_block=[],
file_abs_path="test.tf",
)
SAMPLE_HIGH_SEVERITY_CHECK.guideline = (
"https://docs.bridgecrew.io/docs/s3_4-s3-bucket-has-logging-enabled"
)
SAMPLE_KUBERNETES_CHECK = Record(
check_id="CKV_K8S_1",
check_name="Ensure API server has audit logging enabled",
severity="medium",
file_path="deployment.yaml",
file_line_range=[1, 10],
resource="kubernetes_deployment.test_deployment",
evaluations=[],
check_class="kubernetes",
check_result=CheckResult.FAILED,
code_block=[],
file_abs_path="deployment.yaml",
)
SAMPLE_KUBERNETES_CHECK.guideline = (
"https://docs.bridgecrew.io/docs/k8s_1-api-server-has-audit-logging-enabled"
)
def get_sample_checkov_json_output():
"""Return sample Checkov JSON output as string"""
import json
SAMPLE_CLOUDFORMATION_CHECK = Record(
check_id="CKV_AWS_5",
check_name="Ensure CloudFormation stacks are not publicly accessible",
severity="critical",
file_path="template.yaml",
file_line_range=[1, 20],
resource="AWS::CloudFormation::Stack",
evaluations=[],
check_class="cloudformation",
check_result=CheckResult.PASSED,
code_block=[],
file_abs_path="template.yaml",
)
SAMPLE_CLOUDFORMATION_CHECK.guideline = "https://docs.bridgecrew.io/docs/cfn_1-cloudformation-stacks-are-not-publicly-accessible"
return json.dumps(SAMPLE_CHECKOV_OUTPUT)
# Sample findings for different frameworks
SAMPLE_KUBERNETES_FINDING = Report(check_type="kubernetes")
SAMPLE_CLOUDFORMATION_FINDING = Report(check_type="cloudformation")
# Additional fixtures for different test scenarios
SAMPLE_CHECK_WITHOUT_GUIDELINE = Record(
check_id="CKV_AWS_6",
check_name="Test check without guideline",
severity="low",
file_path="test.tf",
file_line_range=[1, 2],
resource="aws_s3_bucket.test_bucket",
evaluations=[],
check_class="terraform",
check_result=CheckResult.FAILED,
code_block=[],
file_abs_path="test.tf",
)
# Note: No guideline attribute set
def get_empty_checkov_output():
"""Return empty Checkov output as string"""
return "[]"
SAMPLE_MEDIUM_SEVERITY_CHECK = Record(
check_id="CKV_AWS_7",
check_name="Ensure S3 bucket has proper access controls",
severity="MEDIUM",
file_path="test.tf",
file_line_range=[7, 8],
resource="aws_s3_bucket.test_bucket",
evaluations=[],
check_class="terraform",
check_result=CheckResult.FAILED,
code_block=[],
file_abs_path="test.tf",
)
SAMPLE_MEDIUM_SEVERITY_CHECK.guideline = (
"https://docs.bridgecrew.io/docs/s3_7-s3-bucket-has-proper-access-controls"
)
SAMPLE_CRITICAL_SEVERITY_CHECK = Record(
check_id="CKV_AWS_8",
check_name="Ensure S3 bucket has encryption at rest",
severity="CRITICAL",
file_path="test.tf",
file_line_range=[9, 10],
resource="aws_s3_bucket.test_bucket",
evaluations=[],
check_class="terraform",
check_result=CheckResult.FAILED,
code_block=[],
file_abs_path="test.tf",
)
SAMPLE_CRITICAL_SEVERITY_CHECK.guideline = (
"https://docs.bridgecrew.io/docs/s3_8-s3-bucket-has-encryption-at-rest"
)
# Sample reports for different frameworks
SAMPLE_TERRAFORM_REPORT = Report(check_type="terraform")
SAMPLE_KUBERNETES_REPORT = Report(check_type="kubernetes")
SAMPLE_CLOUDFORMATION_REPORT = Report(check_type="cloudformation")
SAMPLE_DOCKERFILE_REPORT = Report(check_type="dockerfile")
SAMPLE_YAML_REPORT = Report(check_type="yaml")
# Sample checks for different frameworks
SAMPLE_DOCKERFILE_CHECK = Record(
check_id="CKV_DOCKER_1",
check_name="Ensure base image is not using latest tag",
severity="high",
file_path="Dockerfile",
file_line_range=[1, 1],
resource="Dockerfile",
evaluations=[],
check_class="dockerfile",
check_result=CheckResult.FAILED,
code_block=[],
file_abs_path="Dockerfile",
)
SAMPLE_DOCKERFILE_CHECK.guideline = (
"https://docs.bridgecrew.io/docs/docker_1-base-image-not-using-latest-tag"
)
SAMPLE_YAML_CHECK = Record(
check_id="CKV_YAML_1",
check_name="Ensure YAML file has proper indentation",
severity="low",
file_path="config.yaml",
file_line_range=[1, 5],
resource="config.yaml",
evaluations=[],
check_class="yaml",
check_result=CheckResult.PASSED,
code_block=[],
file_abs_path="config.yaml",
)
SAMPLE_YAML_CHECK.guideline = (
"https://docs.bridgecrew.io/docs/yaml_1-proper-indentation"
)
# Sample checks with different statuses for comprehensive testing
SAMPLE_ANOTHER_FAILED_CHECK = Record(
check_id="CKV_AWS_9",
check_name="Ensure S3 bucket has lifecycle policy",
severity="medium",
file_path="test.tf",
file_line_range=[11, 12],
resource="aws_s3_bucket.test_bucket",
evaluations=[],
check_class="terraform",
check_result=CheckResult.FAILED,
code_block=[],
file_abs_path="test.tf",
)
SAMPLE_ANOTHER_FAILED_CHECK.guideline = (
"https://docs.bridgecrew.io/docs/s3_9-s3-bucket-has-lifecycle-policy"
)
SAMPLE_ANOTHER_PASSED_CHECK = Record(
check_id="CKV_AWS_10",
check_name="Ensure S3 bucket has proper tags",
severity="low",
file_path="test.tf",
file_line_range=[13, 14],
resource="aws_s3_bucket.test_bucket",
evaluations=[],
check_class="terraform",
check_result=CheckResult.PASSED,
code_block=[],
file_abs_path="test.tf",
)
SAMPLE_ANOTHER_PASSED_CHECK.guideline = (
"https://docs.bridgecrew.io/docs/s3_10-s3-bucket-has-proper-tags"
)
SAMPLE_ANOTHER_SKIPPED_CHECK = Record(
check_id="CKV_AWS_11",
check_name="Ensure S3 bucket has cross-region replication",
severity="high",
file_path="test.tf",
file_line_range=[15, 16],
resource="aws_s3_bucket.test_bucket",
evaluations=[],
check_class="terraform",
check_result=CheckResult.SKIPPED,
code_block=[],
file_abs_path="test.tf",
)
SAMPLE_ANOTHER_SKIPPED_CHECK.guideline = (
"https://docs.bridgecrew.io/docs/s3_11-s3-bucket-has-cross-region-replication"
)
def get_invalid_checkov_output():
"""Return invalid JSON output as string"""
return "invalid json output"

View File

@@ -1,4 +1,4 @@
from unittest.mock import Mock, patch
from unittest.mock import MagicMock, patch
import pytest
@@ -6,24 +6,12 @@ from prowler.lib.check.models import CheckReportIAC
from prowler.providers.iac.iac_provider import IacProvider
from tests.providers.iac.iac_fixtures import (
DEFAULT_SCAN_PATH,
SAMPLE_ANOTHER_FAILED_CHECK,
SAMPLE_ANOTHER_PASSED_CHECK,
SAMPLE_ANOTHER_SKIPPED_CHECK,
SAMPLE_CHECK_WITHOUT_GUIDELINE,
SAMPLE_CLOUDFORMATION_CHECK,
SAMPLE_CRITICAL_SEVERITY_CHECK,
SAMPLE_DOCKERFILE_CHECK,
SAMPLE_DOCKERFILE_REPORT,
SAMPLE_FAILED_CHECK,
SAMPLE_FINDING,
SAMPLE_HIGH_SEVERITY_CHECK,
SAMPLE_KUBERNETES_CHECK,
SAMPLE_KUBERNETES_FINDING,
SAMPLE_MEDIUM_SEVERITY_CHECK,
SAMPLE_PASSED_CHECK,
SAMPLE_SKIPPED_CHECK,
SAMPLE_YAML_CHECK,
SAMPLE_YAML_REPORT,
get_empty_checkov_output,
get_invalid_checkov_output,
get_sample_checkov_json_output,
)
@@ -58,10 +46,10 @@ class TestIacProvider:
assert report.status == "FAIL"
assert report.check_metadata.Provider == "iac"
assert report.check_metadata.CheckID == SAMPLE_FAILED_CHECK.check_id
assert report.check_metadata.CheckTitle == SAMPLE_FAILED_CHECK.check_name
assert report.check_metadata.CheckID == SAMPLE_FAILED_CHECK["check_id"]
assert report.check_metadata.CheckTitle == SAMPLE_FAILED_CHECK["check_name"]
assert report.check_metadata.Severity == "low"
assert report.check_metadata.RelatedUrl == SAMPLE_FAILED_CHECK.guideline
assert report.check_metadata.RelatedUrl == SAMPLE_FAILED_CHECK["guideline"]
def test_iac_provider_process_check_passed(self):
"""Test processing a passed check"""
@@ -73,473 +61,72 @@ class TestIacProvider:
assert report.status == "PASS"
assert report.check_metadata.Provider == "iac"
assert report.check_metadata.CheckID == SAMPLE_PASSED_CHECK.check_id
assert report.check_metadata.CheckTitle == SAMPLE_PASSED_CHECK.check_name
assert report.check_metadata.CheckID == SAMPLE_PASSED_CHECK["check_id"]
assert report.check_metadata.CheckTitle == SAMPLE_PASSED_CHECK["check_name"]
assert report.check_metadata.Severity == "low"
assert report.check_metadata.RelatedUrl == SAMPLE_PASSED_CHECK.guideline
def test_iac_provider_process_check_skipped(self):
"""Test processing a skipped check"""
@patch("subprocess.run")
def test_iac_provider_run_scan_success(self, mock_subprocess):
"""Test successful IAC scan with Checkov"""
provider = IacProvider()
report = provider._process_check(SAMPLE_FINDING, SAMPLE_SKIPPED_CHECK, "MUTED")
assert isinstance(report, CheckReportIAC)
assert report.status == "MUTED"
assert report.muted is True
assert report.check_metadata.Provider == "iac"
assert report.check_metadata.CheckID == SAMPLE_SKIPPED_CHECK.check_id
assert report.check_metadata.CheckTitle == SAMPLE_SKIPPED_CHECK.check_name
assert report.check_metadata.Severity == "high"
assert report.check_metadata.RelatedUrl == SAMPLE_SKIPPED_CHECK.guideline
def test_iac_provider_process_check_high_severity(self):
"""Test processing a high severity check"""
provider = IacProvider()
report = provider._process_check(
SAMPLE_FINDING, SAMPLE_HIGH_SEVERITY_CHECK, "FAIL"
mock_subprocess.return_value = MagicMock(
stdout=get_sample_checkov_json_output(), stderr=""
)
assert isinstance(report, CheckReportIAC)
assert report.status == "FAIL"
assert report.check_metadata.Severity == "high"
reports = provider.run_scan("/test/directory")
def test_iac_provider_process_check_different_framework(self):
"""Test processing a check from a different framework (Kubernetes)"""
provider = IacProvider()
# Should have 2 failed checks + 1 passed check = 3 total reports
assert len(reports) == 3
report = provider._process_check(
SAMPLE_KUBERNETES_FINDING, SAMPLE_KUBERNETES_CHECK, "FAIL"
# Check that we have both failed and passed reports
failed_reports = [r for r in reports if r.status == "FAIL"]
passed_reports = [r for r in reports if r.status == "PASS"]
assert len(failed_reports) == 2
assert len(passed_reports) == 1
# Verify subprocess was called correctly
mock_subprocess.assert_called_once_with(
["checkov", "-d", "/test/directory", "-o", "json"],
capture_output=True,
text=True,
)
assert isinstance(report, CheckReportIAC)
assert report.status == "FAIL"
assert report.check_metadata.ServiceName == "kubernetes"
assert report.check_metadata.CheckID == SAMPLE_KUBERNETES_CHECK.check_id
def test_iac_provider_process_check_no_guideline(self):
"""Test processing a check without guideline URL"""
@patch("subprocess.run")
def test_iac_provider_run_scan_empty_output(self, mock_subprocess):
"""Test IAC scan with empty Checkov output"""
provider = IacProvider()
report = provider._process_check(
SAMPLE_FINDING, SAMPLE_CHECK_WITHOUT_GUIDELINE, "FAIL"
mock_subprocess.return_value = MagicMock(
stdout=get_empty_checkov_output(), stderr=""
)
assert isinstance(report, CheckReportIAC)
assert report.status == "FAIL"
assert report.check_metadata.RelatedUrl == ""
reports = provider.run_scan("/test/directory")
def test_iac_provider_process_check_medium_severity(self):
"""Test processing a medium severity check"""
assert len(reports) == 0
@patch("subprocess.run")
def test_iac_provider_run_scan_invalid_json(self, mock_subprocess):
"""Test IAC scan with invalid JSON output"""
provider = IacProvider()
report = provider._process_check(
SAMPLE_FINDING, SAMPLE_MEDIUM_SEVERITY_CHECK, "FAIL"
mock_subprocess.return_value = MagicMock(
stdout=get_invalid_checkov_output(), stderr=""
)
assert isinstance(report, CheckReportIAC)
assert report.status == "FAIL"
assert report.check_metadata.Severity == "medium"
with pytest.raises(SystemExit) as excinfo:
provider.run_scan("/test/directory")
def test_iac_provider_process_check_critical_severity(self):
"""Test processing a critical severity check"""
assert excinfo.value.code == 1
@patch("subprocess.run")
def test_iac_provider_run_scan_null_output(self, mock_subprocess):
"""Test IAC scan with null Checkov output"""
provider = IacProvider()
report = provider._process_check(
SAMPLE_FINDING, SAMPLE_CRITICAL_SEVERITY_CHECK, "FAIL"
)
mock_subprocess.return_value = MagicMock(stdout="null", stderr="")
assert isinstance(report, CheckReportIAC)
assert report.status == "FAIL"
assert report.check_metadata.Severity == "critical"
reports = provider.run_scan("/test/directory")
def test_iac_provider_process_check_dockerfile(self):
"""Test processing a Dockerfile check"""
provider = IacProvider()
report = provider._process_check(
SAMPLE_DOCKERFILE_REPORT, SAMPLE_DOCKERFILE_CHECK, "FAIL"
)
assert isinstance(report, CheckReportIAC)
assert report.status == "FAIL"
assert report.check_metadata.ServiceName == "dockerfile"
assert report.check_metadata.CheckID == SAMPLE_DOCKERFILE_CHECK.check_id
def test_iac_provider_process_check_yaml(self):
"""Test processing a YAML check"""
provider = IacProvider()
report = provider._process_check(SAMPLE_YAML_REPORT, SAMPLE_YAML_CHECK, "PASS")
assert isinstance(report, CheckReportIAC)
assert report.status == "PASS"
assert report.check_metadata.ServiceName == "yaml"
assert report.check_metadata.CheckID == SAMPLE_YAML_CHECK.check_id
@patch("prowler.providers.iac.iac_provider.RunnerRegistry")
@patch("prowler.providers.iac.iac_provider.RunnerFilter")
@patch("prowler.providers.iac.iac_provider.logger")
def test_run_scan_success_with_failed_and_passed_checks(
self, mock_logger, mock_runner_filter, mock_runner_registry
):
"""Test successful run_scan with both failed and passed checks"""
# Setup mocks
mock_registry_instance = Mock()
mock_runner_registry.return_value = mock_registry_instance
# Create mock reports with failed and passed checks
mock_report = Mock()
mock_report.check_type = "terraform" # Set the check_type attribute
mock_report.failed_checks = [SAMPLE_FAILED_CHECK]
mock_report.passed_checks = [SAMPLE_PASSED_CHECK]
mock_report.skipped_checks = []
mock_registry_instance.run.return_value = [mock_report]
provider = IacProvider()
result = provider.run_scan("/test/directory", ["terraform"], [])
# Verify logger was called
mock_logger.info.assert_called_with("Running IaC scan on /test/directory...")
# Verify RunnerFilter was created with correct parameters
mock_runner_filter.assert_called_with(
framework=["terraform"], excluded_paths=[]
)
# Verify RunnerRegistry was created and run was called
mock_runner_registry.assert_called_once()
mock_registry_instance.run.assert_called_with(root_folder="/test/directory")
# Verify results
assert len(result) == 2
assert all(isinstance(report, CheckReportIAC) for report in result)
# Check that we have one FAIL and one PASS report
statuses = [report.status for report in result]
assert "FAIL" in statuses
assert "PASS" in statuses
@patch("prowler.providers.iac.iac_provider.RunnerRegistry")
@patch("prowler.providers.iac.iac_provider.RunnerFilter")
@patch("prowler.providers.iac.iac_provider.logger")
def test_run_scan_with_skipped_checks(
self, mock_logger, mock_runner_filter, mock_runner_registry
):
"""Test run_scan with skipped checks (muted)"""
# Setup mocks
mock_registry_instance = Mock()
mock_runner_registry.return_value = mock_registry_instance
# Create mock report with skipped checks
mock_report = Mock()
mock_report.check_type = "terraform" # Set the check_type attribute
mock_report.failed_checks = []
mock_report.passed_checks = []
mock_report.skipped_checks = [SAMPLE_SKIPPED_CHECK]
mock_registry_instance.run.return_value = [mock_report]
provider = IacProvider()
result = provider.run_scan("/test/directory", ["all"], ["exclude/path"])
# Verify RunnerFilter was created with correct parameters
mock_runner_filter.assert_called_with(
framework=["all"], excluded_paths=["exclude/path"]
)
# Verify results
assert len(result) == 1
assert isinstance(result[0], CheckReportIAC)
assert result[0].status == "MUTED"
assert result[0].muted is True
@patch("prowler.providers.iac.iac_provider.RunnerRegistry")
@patch("prowler.providers.iac.iac_provider.RunnerFilter")
@patch("prowler.providers.iac.iac_provider.logger")
def test_run_scan_empty_results(
self, mock_logger, mock_runner_filter, mock_runner_registry
):
"""Test run_scan with no findings"""
# Setup mocks
mock_registry_instance = Mock()
mock_runner_registry.return_value = mock_registry_instance
# Create mock report with no checks
mock_report = Mock()
mock_report.check_type = "terraform" # Set the check_type attribute
mock_report.failed_checks = []
mock_report.passed_checks = []
mock_report.skipped_checks = []
mock_registry_instance.run.return_value = [mock_report]
provider = IacProvider()
result = provider.run_scan("/test/directory", ["kubernetes"], [])
# Verify results
assert len(result) == 0
@patch("prowler.providers.iac.iac_provider.RunnerRegistry")
@patch("prowler.providers.iac.iac_provider.RunnerFilter")
@patch("prowler.providers.iac.iac_provider.logger")
def test_run_scan_multiple_reports(
self, mock_logger, mock_runner_filter, mock_runner_registry
):
"""Test run_scan with multiple reports from different frameworks"""
# Setup mocks
mock_registry_instance = Mock()
mock_runner_registry.return_value = mock_registry_instance
# Create multiple mock reports
mock_report1 = Mock()
mock_report1.check_type = "terraform" # Set the check_type attribute
mock_report1.failed_checks = [SAMPLE_FAILED_CHECK]
mock_report1.passed_checks = []
mock_report1.skipped_checks = []
mock_report2 = Mock()
mock_report2.check_type = "kubernetes" # Set the check_type attribute
mock_report2.failed_checks = []
mock_report2.passed_checks = [SAMPLE_PASSED_CHECK]
mock_report2.skipped_checks = []
mock_registry_instance.run.return_value = [mock_report1, mock_report2]
provider = IacProvider()
result = provider.run_scan("/test/directory", ["terraform", "kubernetes"], [])
# Verify results
assert len(result) == 2
assert all(isinstance(report, CheckReportIAC) for report in result)
# Check that we have one FAIL and one PASS report
statuses = [report.status for report in result]
assert "FAIL" in statuses
assert "PASS" in statuses
@patch("prowler.providers.iac.iac_provider.RunnerRegistry")
@patch("prowler.providers.iac.iac_provider.RunnerFilter")
@patch("prowler.providers.iac.iac_provider.logger")
@patch("prowler.providers.iac.iac_provider.sys")
def test_run_scan_exception_handling(
self, mock_sys, mock_logger, mock_runner_filter, mock_runner_registry
):
"""Test run_scan exception handling"""
# Setup mocks to raise an exception
mock_registry_instance = Mock()
mock_runner_registry.return_value = mock_registry_instance
mock_registry_instance.run.side_effect = Exception("Test exception")
# Configure sys.exit to raise SystemExit
mock_sys.exit.side_effect = SystemExit(1)
provider = IacProvider()
# The function should call sys.exit(1) when an exception occurs
with pytest.raises(SystemExit) as exc_info:
provider.run_scan("/test/directory", ["terraform"], [])
assert exc_info.value.code == 1
# Verify logger was called with error information
mock_logger.critical.assert_called_once()
critical_call_args = mock_logger.critical.call_args[0][0]
assert "Exception" in critical_call_args
assert "Test exception" in critical_call_args
@patch("prowler.providers.iac.iac_provider.RunnerRegistry")
@patch("prowler.providers.iac.iac_provider.RunnerFilter")
@patch("prowler.providers.iac.iac_provider.logger")
def test_run_scan_with_different_frameworks(
self, mock_logger, mock_runner_filter, mock_runner_registry
):
"""Test run_scan with different framework configurations"""
# Setup mocks
mock_registry_instance = Mock()
mock_runner_registry.return_value = mock_registry_instance
mock_report = Mock()
mock_report.check_type = "terraform" # Set the check_type attribute
mock_report.failed_checks = []
mock_report.passed_checks = [SAMPLE_PASSED_CHECK]
mock_report.skipped_checks = []
mock_registry_instance.run.return_value = [mock_report]
provider = IacProvider()
# Test with specific frameworks
frameworks = ["terraform", "kubernetes", "cloudformation"]
result = provider.run_scan("/test/directory", frameworks, [])
# Verify RunnerFilter was created with correct frameworks
mock_runner_filter.assert_called_with(framework=frameworks, excluded_paths=[])
# Verify results
assert len(result) == 1
assert result[0].status == "PASS"
@patch("prowler.providers.iac.iac_provider.RunnerRegistry")
@patch("prowler.providers.iac.iac_provider.RunnerFilter")
@patch("prowler.providers.iac.iac_provider.logger")
def test_run_scan_with_exclude_paths(
self, mock_logger, mock_runner_filter, mock_runner_registry
):
"""Test run_scan with exclude paths"""
# Setup mocks
mock_registry_instance = Mock()
mock_runner_registry.return_value = mock_registry_instance
mock_report = Mock()
mock_report.check_type = "terraform" # Set the check_type attribute
mock_report.failed_checks = []
mock_report.passed_checks = [SAMPLE_PASSED_CHECK]
mock_report.skipped_checks = []
mock_registry_instance.run.return_value = [mock_report]
provider = IacProvider()
# Test with exclude paths
exclude_paths = ["node_modules", ".git", "vendor"]
result = provider.run_scan("/test/directory", ["all"], exclude_paths)
# Verify RunnerFilter was created with correct exclude paths
mock_runner_filter.assert_called_with(
framework=["all"], excluded_paths=exclude_paths
)
# Verify results
assert len(result) == 1
assert result[0].status == "PASS"
@patch("prowler.providers.iac.iac_provider.RunnerRegistry")
@patch("prowler.providers.iac.iac_provider.RunnerFilter")
@patch("prowler.providers.iac.iac_provider.logger")
def test_run_scan_all_check_types(
self, mock_logger, mock_runner_filter, mock_runner_registry
):
"""Test run_scan with all types of checks (failed, passed, skipped)"""
# Setup mocks
mock_registry_instance = Mock()
mock_runner_registry.return_value = mock_registry_instance
mock_report = Mock()
mock_report.check_type = "terraform" # Set the check_type attribute
mock_report.failed_checks = [SAMPLE_FAILED_CHECK, SAMPLE_HIGH_SEVERITY_CHECK]
mock_report.passed_checks = [SAMPLE_PASSED_CHECK, SAMPLE_CLOUDFORMATION_CHECK]
mock_report.skipped_checks = [SAMPLE_SKIPPED_CHECK]
mock_registry_instance.run.return_value = [mock_report]
provider = IacProvider()
result = provider.run_scan("/test/directory", ["all"], [])
# Verify results
assert len(result) == 5 # 2 failed + 2 passed + 1 skipped
# Check status distribution
statuses = [report.status for report in result]
assert statuses.count("FAIL") == 2
assert statuses.count("PASS") == 2
assert statuses.count("MUTED") == 1
# Check that muted reports have muted=True
muted_reports = [report for report in result if report.status == "MUTED"]
assert all(report.muted for report in muted_reports)
@patch("prowler.providers.iac.iac_provider.RunnerRegistry")
@patch("prowler.providers.iac.iac_provider.RunnerFilter")
@patch("prowler.providers.iac.iac_provider.logger")
def test_run_scan_no_reports_returned(
self, mock_logger, mock_runner_filter, mock_runner_registry
):
"""Test run_scan when no reports are returned from registry"""
# Setup mocks
mock_registry_instance = Mock()
mock_runner_registry.return_value = mock_registry_instance
# Return empty list of reports
mock_registry_instance.run.return_value = []
provider = IacProvider()
result = provider.run_scan("/test/directory", ["terraform"], [])
# Verify results
assert len(result) == 0
@patch("prowler.providers.iac.iac_provider.RunnerRegistry")
@patch("prowler.providers.iac.iac_provider.RunnerFilter")
@patch("prowler.providers.iac.iac_provider.logger")
def test_run_scan_multiple_frameworks_with_different_checks(
self, mock_logger, mock_runner_filter, mock_runner_registry
):
"""Test run_scan with multiple frameworks and different types of checks"""
# Setup mocks
mock_registry_instance = Mock()
mock_runner_registry.return_value = mock_registry_instance
# Create reports for different frameworks
terraform_report = Mock()
terraform_report.check_type = "terraform"
terraform_report.failed_checks = [
SAMPLE_FAILED_CHECK,
SAMPLE_ANOTHER_FAILED_CHECK,
]
terraform_report.passed_checks = [SAMPLE_PASSED_CHECK]
terraform_report.skipped_checks = []
kubernetes_report = Mock()
kubernetes_report.check_type = "kubernetes"
kubernetes_report.failed_checks = [SAMPLE_KUBERNETES_CHECK]
kubernetes_report.passed_checks = []
kubernetes_report.skipped_checks = [SAMPLE_ANOTHER_SKIPPED_CHECK]
cloudformation_report = Mock()
cloudformation_report.check_type = "cloudformation"
cloudformation_report.failed_checks = []
cloudformation_report.passed_checks = [
SAMPLE_CLOUDFORMATION_CHECK,
SAMPLE_ANOTHER_PASSED_CHECK,
]
cloudformation_report.skipped_checks = []
mock_registry_instance.run.return_value = [
terraform_report,
kubernetes_report,
cloudformation_report,
]
provider = IacProvider()
result = provider.run_scan(
"/test/directory", ["terraform", "kubernetes", "cloudformation"], []
)
# Verify results
assert (
len(result) == 7
) # 2 failed + 1 passed (terraform) + 1 failed + 1 skipped (kubernetes) + 2 passed (cloudformation)
# Check status distribution
statuses = [report.status for report in result]
assert statuses.count("FAIL") == 3
assert statuses.count("PASS") == 3
assert statuses.count("MUTED") == 1
def test_run_method_calls_run_scan(self):
"""Test that the run method calls run_scan with correct parameters"""
provider = IacProvider(
scan_path="/custom/path", frameworks=["terraform"], exclude_path=["exclude"]
)
with patch.object(provider, "run_scan") as mock_run_scan:
mock_run_scan.return_value = []
provider.run()
mock_run_scan.assert_called_once_with(
"/custom/path", ["terraform"], ["exclude"]
)
assert len(reports) == 0

View File

@@ -29,14 +29,12 @@ All notable changes to the **Prowler UI** are documented in this file.
- Refactor credentials forms with reusable components and error handling [(#7988)](https://github.com/prowler-cloud/prowler/pull/7988)
- Updated the provider details section in Scan and Findings detail pages [(#7968)](https://github.com/prowler-cloud/prowler/pull/7968)
- Improve filter behaviour and relationships between filters in findings page [(#8046)](https://github.com/prowler-cloud/prowler/pull/8046)
- Set filters panel to be always open by default [(#8085)](https://github.com/prowler-cloud/prowler/pull/8085)
### 🐞 Fixed
- Sync between filter buttons and URL when filters change [(#7928)](https://github.com/prowler-cloud/prowler/pull/7928)
- Improve heatmap perfomance [(#7934)](https://github.com/prowler-cloud/prowler/pull/7934)
- SelectScanProvider warning fixed with empty alias [(#7998)](https://github.com/prowler-cloud/prowler/pull/7998)
- Prevent console warnings for accessibility and SVG[(#8019)](https://github.com/prowler-cloud/prowler/pull/8019)
---

View File

@@ -1,78 +0,0 @@
"use server";
import { apiBaseUrl, getAuthHeaders, parseStringify } from "@/lib";
export const getLighthouseFindings = async ({
page = 1,
pageSize = 10,
query = "",
sort = "",
filters = {},
}) => {
const headers = await getAuthHeaders({ contentType: false });
// For lighthouse usage, handle invalid page numbers by defaulting to 1
const validPage = isNaN(Number(page)) || page < 1 ? 1 : page;
const url = new URL(`${apiBaseUrl}/findings`);
if (validPage) url.searchParams.append("page[number]", validPage.toString());
if (pageSize) url.searchParams.append("page[size]", pageSize.toString());
if (query) url.searchParams.append("filter[search]", query);
if (sort) url.searchParams.append("sort", sort);
Object.entries(filters).forEach(([key, value]) => {
url.searchParams.append(key, String(value));
});
try {
const findings = await fetch(url.toString(), {
headers,
});
const data = await findings.json();
const parsedData = parseStringify(data);
return parsedData;
} catch (error) {
// eslint-disable-next-line no-console
console.error("Error fetching lighthouse findings:", error);
return undefined;
}
};
export const getLighthouseLatestFindings = async ({
page = 1,
pageSize = 10,
query = "",
sort = "",
filters = {},
}) => {
const headers = await getAuthHeaders({ contentType: false });
const validPage = isNaN(Number(page)) || page < 1 ? 1 : page;
const url = new URL(`${apiBaseUrl}/findings/latest`);
if (validPage) url.searchParams.append("page[number]", validPage.toString());
if (pageSize) url.searchParams.append("page[size]", pageSize.toString());
if (query) url.searchParams.append("filter[search]", query);
if (sort) url.searchParams.append("sort", sort);
Object.entries(filters).forEach(([key, value]) => {
url.searchParams.append(key, String(value));
});
try {
const findings = await fetch(url.toString(), {
headers,
});
const data = await findings.json();
const parsedData = parseStringify(data);
return parsedData;
} catch (error) {
// eslint-disable-next-line no-console
console.error("Error fetching lighthouse latest findings:", error);
return undefined;
}
};

View File

@@ -1,55 +1,16 @@
import { getLighthouseConfig } from "@/actions/lighthouse/lighthouse";
import { Chat } from "@/components/lighthouse";
import { ContentLayout } from "@/components/ui";
import { CacheService } from "@/lib/lighthouse/cache";
import { suggestedActions } from "@/lib/lighthouse/suggested-actions";
interface LighthousePageProps {
searchParams: { cachedMessage?: string };
}
export default async function AIChatbot({ searchParams }: LighthousePageProps) {
export default async function AIChatbot() {
const config = await getLighthouseConfig();
const hasConfig = !!config;
const isActive = config?.attributes?.is_active ?? false;
// Fetch cached content if a cached message type is specified
let cachedContent = null;
if (searchParams.cachedMessage) {
const cached = await CacheService.getCachedMessage(
searchParams.cachedMessage,
);
cachedContent = cached.success ? cached.data : null;
}
// Pre-fetch all question answers and processing status
const isProcessing = await CacheService.isRecommendationProcessing();
const questionAnswers: Record<string, string> = {};
if (!isProcessing) {
for (const action of suggestedActions) {
if (action.questionRef) {
const cached = await CacheService.getCachedMessage(
`question_${action.questionRef}`,
);
if (cached.success && cached.data) {
questionAnswers[action.questionRef] = cached.data;
}
}
}
}
return (
<ContentLayout title="Lighthouse" icon="lucide:bot">
<Chat
hasConfig={hasConfig}
isActive={isActive}
cachedContent={cachedContent}
messageType={searchParams.cachedMessage}
isProcessing={isProcessing}
questionAnswers={questionAnswers}
/>
<ContentLayout title="Cloud Security Analyst" icon="lucide:bot">
<Chat hasConfig={hasConfig} isActive={isActive} />
</ContentLayout>
);
}

View File

@@ -9,7 +9,6 @@ import {
getProvidersOverview,
} from "@/actions/overview/overview";
import { FilterControls } from "@/components/filters";
import { LighthouseBanner } from "@/components/lighthouse";
import {
FindingsBySeverityChart,
FindingsByStatusChart,
@@ -144,6 +143,7 @@ const SSRDataNewFindingsTable = async () => {
sort,
filters: defaultFilters,
});
// Create dictionaries for resources, scans, and providers
const resourceDict = createDict("resources", findingsData);
const scanDict = createDict("scans", findingsData);
@@ -187,9 +187,6 @@ const SSRDataNewFindingsTable = async () => {
</div>
</div>
<Spacer y={4} />
<LighthouseBanner />
<DataTable
columns={ColumnNewFindingsToDate}
data={expandedResponse?.data || []}

View File

@@ -64,7 +64,9 @@ export const ComplianceHeader = ({
</div>
</>
)}
{allFilters.length > 0 && <DataTableFilterCustom filters={allFilters} />}
{allFilters.length > 0 && (
<DataTableFilterCustom filters={allFilters} defaultOpen={true} />
)}
<Spacer y={8} />
</>
);

View File

@@ -1002,9 +1002,9 @@ export const AzureIcon: React.FC<IconSvgProps> = ({
{...props}
>
<path
fillRule="evenodd"
fill-rule="evenodd"
d="m15.37 13.68l-4-12a1 1 0 0 0-1-.68H5.63a1 1 0 0 0-.95.68l-4.05 12a1 1 0 0 0 1 1.32h2.93a1 1 0 0 0 .94-.68l.61-1.78l3 2.27a1 1 0 0 0 .6.19h4.68a1 1 0 0 0 .98-1.32m-5.62.66a.32.32 0 0 1-.2-.07L3.9 10.08l-.09-.07h3l.08-.21l1-2.53l2.24 6.63a.34.34 0 0 1-.38.44m4.67 0H10.7a1 1 0 0 0 0-.66l-4.05-12h3.72a.34.34 0 0 1 .32.23l4.05 12a.34.34 0 0 1-.32.43"
clipRule="evenodd"
clip-rule="evenodd"
/>
</svg>
);

View File

@@ -1,91 +0,0 @@
import { Bot } from "lucide-react";
import Link from "next/link";
import { getLighthouseConfig } from "@/actions/lighthouse/lighthouse";
import { CacheService, initializeTenantCache } from "@/lib/lighthouse/cache";
interface BannerConfig {
message: string;
href: string;
gradient: string;
animate?: boolean;
}
const renderBanner = ({
message,
href,
gradient,
animate = false,
}: BannerConfig) => (
<Link href={href} className="mb-4 block w-full">
<div
className={`w-full rounded-lg ${gradient} shadow-lg transition-all duration-200 hover:shadow-xl focus:outline-none focus:ring-2 focus:ring-opacity-50`}
>
<div className="p-6">
<div className="flex items-center gap-4">
<div className="flex h-12 w-12 items-center justify-center rounded-lg bg-white/20 backdrop-blur-sm">
<Bot
size={24}
className={`text-white ${animate ? "animate-pulse" : ""}`}
/>
</div>
<div className="text-left">
<p className="text-xl font-semibold text-white">{message}</p>
</div>
</div>
</div>
</div>
</Link>
);
export const LighthouseBanner = async () => {
try {
await initializeTenantCache();
// Check if Lighthouse is configured
const lighthouseConfig = await getLighthouseConfig();
if (!lighthouseConfig?.attributes) {
return renderBanner({
message: "Enable Lighthouse to secure your cloud with AI insights",
href: "/lighthouse/config",
gradient:
"bg-gradient-to-r from-green-500 to-blue-500 hover:from-green-600 hover:to-blue-600 focus:ring-green-500/50 dark:from-green-600 dark:to-blue-600 dark:hover:from-green-700 dark:hover:to-blue-700 dark:focus:ring-green-400/50",
});
}
// Check if recommendation exists
const cachedRecommendations = await CacheService.getRecommendations();
if (
cachedRecommendations.success &&
cachedRecommendations.data &&
cachedRecommendations.data.trim().length > 0
) {
return renderBanner({
message: cachedRecommendations.data,
href: "/lighthouse?cachedMessage=recommendation",
gradient:
"bg-gradient-to-r from-blue-500 to-purple-600 hover:from-blue-600 hover:to-purple-700 focus:ring-blue-500/50 dark:from-blue-600 dark:to-purple-700 dark:hover:from-blue-700 dark:hover:to-purple-800 dark:focus:ring-blue-400/50",
});
}
// Check if recommendation is being processed
const isProcessing = await CacheService.isRecommendationProcessing();
if (isProcessing) {
return renderBanner({
message: "Lighthouse is reviewing your findings for insights",
href: "",
gradient:
"bg-gradient-to-r from-orange-500 to-yellow-500 hover:from-orange-600 hover:to-yellow-600 focus:ring-orange-500/50 dark:from-orange-600 dark:to-yellow-600 dark:hover:from-orange-700 dark:hover:to-yellow-700 dark:focus:ring-orange-400/50",
animate: true,
});
}
// Lighthouse configured but no recommendation and not processing - don't show banner
return null;
} catch (error) {
console.error("Error getting banner state:", error);
return null;
}
};

View File

@@ -2,64 +2,43 @@
import { useChat } from "@ai-sdk/react";
import Link from "next/link";
import { useCallback, useEffect, useRef, useState } from "react";
import { useEffect, useRef } from "react";
import { useForm } from "react-hook-form";
import { MemoizedMarkdown } from "@/components/lighthouse/memoized-markdown";
import { CustomButton, CustomTextarea } from "@/components/ui/custom";
import { Form } from "@/components/ui/form";
import {
SuggestedAction,
suggestedActions,
} from "@/lib/lighthouse/suggested-actions";
interface SuggestedAction {
title: string;
label: string;
action: string;
}
interface ChatProps {
hasConfig: boolean;
isActive: boolean;
cachedContent?: string | null;
messageType?: string;
isProcessing: boolean;
questionAnswers: Record<string, string>;
}
interface ChatFormData {
message: string;
}
export const Chat = ({
hasConfig,
isActive,
cachedContent,
messageType,
isProcessing,
questionAnswers,
}: ChatProps) => {
const {
messages,
handleSubmit,
handleInputChange,
append,
status,
setMessages,
} = useChat({
api: "/api/lighthouse/analyst",
credentials: "same-origin",
experimental_throttle: 100,
sendExtraMessageFields: true,
onFinish: () => {
// Handle chat completion
export const Chat = ({ hasConfig, isActive }: ChatProps) => {
const { messages, handleSubmit, handleInputChange, append, status } = useChat(
{
api: "/api/lighthouse/analyst",
credentials: "same-origin",
experimental_throttle: 100,
sendExtraMessageFields: true,
onFinish: () => {
// Handle chat completion
},
onError: (error) => {
console.error("Chat error:", error);
},
},
onError: (error) => {
console.error("Chat error:", error);
},
});
// State for cached response streaming simulation
const [isStreamingCached, setIsStreamingCached] = useState(false);
const [streamingMessageId, setStreamingMessageId] = useState<string | null>(
null,
);
const [currentStreamText, setCurrentStreamText] = useState("");
const form = useForm<ChatFormData>({
defaultValues: {
@@ -71,149 +50,6 @@ export const Chat = ({
const messagesContainerRef = useRef<HTMLDivElement | null>(null);
const latestUserMsgRef = useRef<HTMLDivElement | null>(null);
// Function to simulate streaming text
const simulateStreaming = useCallback(
async (text: string, messageId: string) => {
setIsStreamingCached(true);
setStreamingMessageId(messageId);
setCurrentStreamText("");
// Stream word by word with realistic delays
const words = text.split(" ");
let currentText = "";
for (let i = 0; i < words.length; i++) {
currentText += (i > 0 ? " " : "") + words[i];
setCurrentStreamText(currentText);
// Shorter delay between words for faster streaming
const delay = Math.random() * 80 + 40; // 40-120ms delay per word
await new Promise((resolve) => setTimeout(resolve, delay));
}
setIsStreamingCached(false);
setStreamingMessageId(null);
setCurrentStreamText("");
},
[],
);
// Function to handle cached response for suggested actions
const handleCachedResponse = useCallback(
async (action: SuggestedAction) => {
if (!action.questionRef) {
// No question ref, use normal flow
append({
role: "user",
content: action.action,
});
return;
}
try {
if (isProcessing) {
// Processing in progress, fallback to real-time LLM
append({
role: "user",
content: action.action,
});
return;
}
// Check if we have cached answer
const cachedAnswer = questionAnswers[action.questionRef];
if (cachedAnswer) {
// Cache hit - use cached content with streaming simulation
const userMessageId = `user-cached-${Date.now()}`;
const assistantMessageId = `assistant-cached-${Date.now()}`;
const userMessage = {
id: userMessageId,
role: "user" as const,
content: action.action,
};
const assistantMessage = {
id: assistantMessageId,
role: "assistant" as const,
content: "",
};
const updatedMessages = [...messages, userMessage, assistantMessage];
setMessages(updatedMessages);
// Start streaming simulation
setTimeout(() => {
simulateStreaming(cachedAnswer, assistantMessageId);
}, 300);
} else {
// Cache miss/expired/error - fallback to real-time LLM
append({
role: "user",
content: action.action,
});
}
} catch (error) {
console.error("Error handling cached response:", error);
// Fall back to normal API flow
append({
role: "user",
content: action.action,
});
}
},
[
messages,
setMessages,
append,
simulateStreaming,
isProcessing,
questionAnswers,
],
);
// Load cached message on mount if cachedContent is provided
useEffect(() => {
const loadCachedMessage = () => {
if (cachedContent && messages.length === 0) {
// Create different user questions based on message type
let userQuestion = "Tell me more about this";
if (messageType === "recommendation") {
userQuestion =
"Tell me more about the security issues Lighthouse found";
}
// Future: handle other message types
// else if (messageType === "question_1") {
// userQuestion = "Previously cached question here";
// }
// Create message IDs
const userMessageId = `user-cached-${messageType}-${Date.now()}`;
const assistantMessageId = `assistant-cached-${messageType}-${Date.now()}`;
// Add user message
const userMessage = {
id: userMessageId,
role: "user" as const,
content: userQuestion,
};
// Add assistant message with the cached content
const assistantMessage = {
id: assistantMessageId,
role: "assistant" as const,
content: cachedContent,
};
setMessages([userMessage, assistantMessage]);
}
};
loadCachedMessage();
}, [cachedContent, messageType, messages.length, setMessages]);
// Sync form value with chat input
useEffect(() => {
const syntheticEvent = {
@@ -250,19 +86,6 @@ export const Chat = ({
return () => document.removeEventListener("keydown", handleKeyDown);
}, [messageValue, onFormSubmit]);
// Update assistant message content during streaming simulation
useEffect(() => {
if (isStreamingCached && streamingMessageId && currentStreamText) {
setMessages((prevMessages) =>
prevMessages.map((msg) =>
msg.id === streamingMessageId
? { ...msg, content: currentStreamText }
: msg,
),
);
}
}, [currentStreamText, isStreamingCached, streamingMessageId, setMessages]);
useEffect(() => {
if (messagesContainerRef.current && latestUserMsgRef.current) {
const container = messagesContainerRef.current;
@@ -273,6 +96,30 @@ export const Chat = ({
}
}, [messages]);
const suggestedActions: SuggestedAction[] = [
{
title: "Are there any exposed S3",
label: "buckets in my AWS accounts?",
action: "List exposed S3 buckets in my AWS accounts",
},
{
title: "What is the risk of having",
label: "RDS databases unencrypted?",
action: "What is the risk of having RDS databases unencrypted?",
},
{
title: "What is the CIS 1.10 compliance status",
label: "of my Kubernetes cluster?",
action:
"What is the CIS 1.10 compliance status of my Kubernetes cluster?",
},
{
title: "List my highest privileged",
label: "AWS IAM users with full admin access?",
action: "List my highest privileged AWS IAM users with full admin access",
},
];
// Determine if chat should be disabled
const shouldDisableChat = !hasConfig || !isActive;
@@ -288,8 +135,8 @@ export const Chat = ({
</h3>
<p className="text-muted-foreground mb-4">
{!hasConfig
? "Please configure your OpenAI API key to use Lighthouse."
: "OpenAI API key is invalid. Please update your key to use Lighthouse."}
? "Please configure your OpenAI API key to use the Lighthouse Cloud Security Analyst."
: "OpenAI API key is invalid. Please update your key to use Lighthouse Cloud Security Analyst."}
</p>
<Link
href="/lighthouse/config"
@@ -311,7 +158,10 @@ export const Chat = ({
key={`suggested-action-${index}`}
ariaLabel={`Send message: ${action.action}`}
onPress={() => {
handleCachedResponse(action); // Use cached response handler
append({
role: "user",
content: action.action,
});
}}
className="hover:bg-muted flex h-auto w-full flex-col items-start justify-start rounded-xl border bg-gray-50 px-4 py-3.5 text-left font-sans text-sm dark:bg-gray-900"
>
@@ -361,12 +211,10 @@ export const Chat = ({
</div>
);
})}
{(status === "submitted" || isStreamingCached) && (
{status === "submitted" && (
<div className="flex justify-start">
<div className="bg-muted max-w-[80%] rounded-lg px-4 py-2">
<div className="animate-pulse">
{isStreamingCached ? "" : "Thinking..."}
</div>
<div className="animate-pulse">Thinking...</div>
</div>
</div>
)}
@@ -397,18 +245,10 @@ export const Chat = ({
ariaLabel={
status === "submitted" ? "Stop generation" : "Send message"
}
isDisabled={
status === "submitted" ||
isStreamingCached ||
!messageValue?.trim()
}
isDisabled={status === "submitted" || !messageValue?.trim()}
className="flex h-10 w-10 flex-shrink-0 items-center justify-center rounded-lg bg-primary p-2 text-primary-foreground hover:bg-primary/90 disabled:opacity-50 dark:bg-primary/90"
>
{status === "submitted" || isStreamingCached ? (
<span></span>
) : (
<span></span>
)}
{status === "submitted" ? <span></span> : <span></span>}
</CustomButton>
</div>
</form>

View File

@@ -1,3 +1,2 @@
export * from "./banner";
export * from "./chat";
export * from "./chatbot-config";

View File

@@ -5,9 +5,7 @@ import { ProwlerExtended } from "@/components/icons";
import {
Sheet,
SheetContent,
SheetDescription,
SheetHeader,
SheetTitle,
SheetTrigger,
} from "@/components/ui/sheet";
import { Menu } from "@/components/ui/sidebar/menu";
@@ -27,8 +25,6 @@ export function SheetMenu() {
side="left"
>
<SheetHeader>
<SheetTitle className="sr-only">Sidebar</SheetTitle>
<SheetDescription className="sr-only" />
<Button
className="flex items-center justify-center pb-2 pt-1"
variant="link"

View File

@@ -17,7 +17,7 @@ export interface DataTableFilterCustomProps {
export const DataTableFilterCustom = ({
filters,
defaultOpen = true,
defaultOpen = false,
showClearButton = false,
}: DataTableFilterCustomProps) => {
const { updateFilter } = useUrlFilters();

View File

@@ -1,434 +0,0 @@
import Valkey from "iovalkey";
import { auth } from "@/auth.config";
import {
generateBannerFromDetailed,
generateDetailedRecommendation,
generateQuestionAnswers,
} from "./recommendations";
import { suggestedActions } from "./suggested-actions";
import {
compareProcessedScanIds,
generateSecurityScanSummary,
getCompletedScansLast24h,
} from "./summary";
let valkeyClient: Valkey | null = null;
export async function getValkeyClient(): Promise<Valkey> {
if (!valkeyClient) {
valkeyClient = new Valkey({
host: process.env.VALKEY_HOST,
port: parseInt(process.env.VALKEY_PORT || "6379"),
connectTimeout: 5000,
lazyConnect: true,
});
}
return valkeyClient;
}
export class CacheService {
private static async getTenantId(): Promise<string | null> {
const session = await auth();
return session?.tenantId || null;
}
private static async acquireProcessingLock(
tenantId: string,
lockKey: string,
lockTtlSeconds: number = 300,
): Promise<boolean> {
try {
const client = await getValkeyClient();
const fullLockKey = `_lighthouse:${tenantId}:lock:${lockKey}`;
const result = await client.set(
fullLockKey,
Date.now().toString(),
"EX",
lockTtlSeconds,
"NX",
);
return result === "OK";
} catch (error) {
return false;
}
}
private static async releaseProcessingLock(
tenantId: string,
lockKey: string,
): Promise<void> {
try {
const client = await getValkeyClient();
const fullLockKey = `_lighthouse:${tenantId}:lock:${lockKey}`;
await client.del([fullLockKey]);
} catch (error) {
// Silent failure
}
}
static async getProcessedScanIds(): Promise<string[]> {
const tenantId = await this.getTenantId();
if (!tenantId) return [];
try {
const client = await getValkeyClient();
const dataKey = `_lighthouse:${tenantId}:processed_scan_ids`;
const result = await client.get(dataKey);
if (!result) return [];
const scanIdsString = result.toString();
return scanIdsString ? scanIdsString.split(",") : [];
} catch (error) {
return [];
}
}
static async setProcessedScanIds(scanIds: string[]): Promise<boolean> {
const tenantId = await this.getTenantId();
if (!tenantId) return false;
try {
const client = await getValkeyClient();
const dataKey = `_lighthouse:${tenantId}:processed_scan_ids`;
const scanIdsString = scanIds.join(",");
await client.set(dataKey, scanIdsString);
return true;
} catch (error) {
return false;
}
}
static async processScansWithLock(scanIds: string[]): Promise<{
success: boolean;
data?: string;
}> {
const tenantId = await this.getTenantId();
if (!tenantId) return { success: false };
const lockKey = "scan-processing";
const lockTtlSeconds = 1200; // 20 minutes
try {
// Try to acquire processing lock
const lockAcquired = await this.acquireProcessingLock(
tenantId,
lockKey,
lockTtlSeconds,
);
if (!lockAcquired) {
// Processing is happening in background, return success but no data
return { success: true };
}
try {
// Generate the scan summary for the provided scan IDs
const scanSummary = await generateSecurityScanSummary(scanIds);
// Only process if we have valid scan summary
if (scanSummary) {
// Cache the scan summary
await this.set("scan-summary", scanSummary);
// Mark scans as processed
await this.setProcessedScanIds(scanIds);
// Generate and cache recommendations asynchronously
this.generateAndCacheRecommendations(scanSummary).catch((error) => {
console.error(
"Background recommendation generation failed:",
error,
);
});
return {
success: true,
data: scanSummary,
};
} else {
// Even if no summary, mark scans as processed to avoid reprocessing
await this.setProcessedScanIds(scanIds);
}
return { success: true };
} finally {
await this.releaseProcessingLock(tenantId, lockKey);
}
} catch (error) {
console.error("Error processing scans with lock:", error);
return { success: false };
}
}
// Generic cache methods for future use
static async get(key: string): Promise<string | null> {
const tenantId = await this.getTenantId();
if (!tenantId) return null;
try {
const client = await getValkeyClient();
const fullKey = `_lighthouse:${tenantId}:${key}`;
const result = await client.get(fullKey);
return result?.toString() || null;
} catch (error) {
return null;
}
}
static async set(
key: string,
value: string,
ttlSeconds?: number,
): Promise<boolean> {
const tenantId = await this.getTenantId();
if (!tenantId) return false;
try {
const client = await getValkeyClient();
const fullKey = `_lighthouse:${tenantId}:${key}`;
if (ttlSeconds) {
await client.set(fullKey, value, "EX", ttlSeconds);
} else {
await client.set(fullKey, value);
}
return true;
} catch (error) {
return false;
}
}
static async getRecommendations(): Promise<{
success: boolean;
data?: string;
}> {
const tenantId = await this.getTenantId();
if (!tenantId) return { success: false };
try {
const client = await getValkeyClient();
const dataKey = `_lighthouse:${tenantId}:recommendations`;
const cachedData = await client.get(dataKey);
if (cachedData) {
return {
success: true,
data: cachedData.toString(),
};
}
return { success: true, data: undefined };
} catch (error) {
return { success: false };
}
}
static async generateAndCacheRecommendations(scanSummary: string): Promise<{
success: boolean;
data?: string;
}> {
const tenantId = await this.getTenantId();
if (!tenantId) return { success: false };
const lockKey = "recommendations-processing";
const dataKey = `_lighthouse:${tenantId}:recommendations`;
const detailedDataKey = `_lighthouse:${tenantId}:cached-messages:recommendation`;
try {
const client = await getValkeyClient();
// Check if data already exists
const existingData = await client.get(dataKey);
if (existingData) {
return {
success: true,
data: existingData.toString(),
};
}
// Lock TTL 10 minutes
const lockAcquired = await this.acquireProcessingLock(
tenantId,
lockKey,
600,
);
if (!lockAcquired) {
// Processing is happening in background, return success but no data
return { success: true };
}
try {
// Double-check after acquiring lock
const doubleCheckData = await client.get(dataKey);
if (doubleCheckData) {
return {
success: true,
data: doubleCheckData.toString(),
};
}
// Generate detailed recommendation first
const detailedRecommendation =
await generateDetailedRecommendation(scanSummary);
if (!detailedRecommendation.trim()) {
return { success: true, data: "" };
}
// Generate banner from detailed content
const bannerRecommendation = await generateBannerFromDetailed(
detailedRecommendation,
);
// Both must succeed - no point in detailed without banner
if (!bannerRecommendation.trim()) {
return { success: true, data: "" };
}
// Generate question answers
const questionAnswers = await generateQuestionAnswers(suggestedActions);
// Cache both versions
await client.set(dataKey, bannerRecommendation);
await client.set(detailedDataKey, detailedRecommendation);
// Cache question answers with 24h TTL
for (const [questionRef, answer] of Object.entries(questionAnswers)) {
const questionKey = `_lighthouse:${tenantId}:cached-messages:question_${questionRef}`;
await client.set(questionKey, answer, "EX", 86400); // 24 hours
}
return {
success: true,
data: bannerRecommendation,
};
} finally {
await this.releaseProcessingLock(tenantId, lockKey);
}
} catch (error) {
console.error("Error generating and caching recommendations:", error);
return { success: false };
}
}
static async isRecommendationProcessing(): Promise<boolean> {
const tenantId = await this.getTenantId();
if (!tenantId) return false;
try {
const client = await getValkeyClient();
const lockKey = `_lighthouse:${tenantId}:lock:recommendations-processing`;
const result = await client.get(lockKey);
return result !== null;
} catch (error) {
return false;
}
}
// New method to get cached message by type
static async getCachedMessage(messageType: string): Promise<{
success: boolean;
data?: string;
}> {
const tenantId = await this.getTenantId();
if (!tenantId) return { success: false };
try {
const client = await getValkeyClient();
const dataKey = `_lighthouse:${tenantId}:cached-messages:${messageType}`;
const cachedData = await client.get(dataKey);
if (cachedData) {
return {
success: true,
data: cachedData.toString(),
};
}
return { success: true, data: undefined };
} catch (error) {
console.error(`Error getting cached message ${messageType}:`, error);
return { success: false };
}
}
// New method to set cached message by type
static async setCachedMessage(
messageType: string,
content: string,
): Promise<boolean> {
const tenantId = await this.getTenantId();
if (!tenantId) return false;
try {
const client = await getValkeyClient();
const dataKey = `_lighthouse:${tenantId}:cached-messages:${messageType}`;
await client.set(dataKey, content);
return true;
} catch (error) {
console.error(`Error caching message type ${messageType}:`, error);
return false;
}
}
}
export async function initializeTenantCache(): Promise<{
success: boolean;
data?: string;
scanSummary?: string;
}> {
try {
// Quick pre-check: Do we need to process anything?
const currentScanIds = await getCompletedScansLast24h();
if (currentScanIds.length === 0) {
// No scans in last 24h, return existing cached data if any
const existingSummary = await CacheService.get("scan-summary");
return {
success: true,
data: existingSummary || undefined,
scanSummary: existingSummary || undefined,
};
}
// Check if we need to process these scans
const processedScanIds = await CacheService.getProcessedScanIds();
const shouldProcess = !compareProcessedScanIds(
currentScanIds,
processedScanIds,
);
if (!shouldProcess) {
// Scans already processed, return existing cached data
const existingSummary = await CacheService.get("scan-summary");
return {
success: true,
data: existingSummary || undefined,
scanSummary: existingSummary || undefined,
};
}
// New scans found, trigger full processing with lock
const result = await CacheService.processScansWithLock(currentScanIds);
return {
success: result.success,
data: result.data,
scanSummary: result.data,
};
} catch (error) {
console.error("Error initializing tenant cache:", error);
return {
success: false,
};
}
}

View File

@@ -127,8 +127,8 @@ You operate in an agent loop, iterating through these steps:
- Fetches information related to:
- All findings data across providers. Supports filtering by severity, status, etc.
- Unique metadata values from findings
- Available checks for a specific provider (aws, gcp, azure, kubernetes, etc)
- Details of a specific check including details about severity, risk, remediation, compliances that are associated with the check, etc
- Remediation for checks
- Check IDs supported by different provider types
### roles_agent

View File

@@ -1,199 +0,0 @@
import { ChatOpenAI } from "@langchain/openai";
import { getAIKey, getLighthouseConfig } from "@/actions/lighthouse/lighthouse";
import { type SuggestedAction } from "./suggested-actions";
import { initLighthouseWorkflow } from "./workflow";
export const generateDetailedRecommendation = async (
scanSummary: string,
): Promise<string> => {
try {
const apiKey = await getAIKey();
if (!apiKey) {
return "";
}
const lighthouseConfig = await getLighthouseConfig();
if (!lighthouseConfig?.attributes) {
return "";
}
const config = lighthouseConfig.attributes;
const businessContext = config.business_context || "";
const llm = new ChatOpenAI({
model: config.model || "gpt-4o",
temperature: config.temperature || 0,
maxTokens: 1500,
apiKey: apiKey,
});
let systemPrompt = `You are a cloud security analyst providing focused, actionable recommendations.
IMPORTANT: Focus on ONE of these high-impact opportunities:
1. The most CRITICAL finding that needs immediate attention
2. A pattern where fixing one check ID resolves many findings (e.g., "Fix aws_s3_bucket_public_access_block to resolve 15 findings")
3. The issue with highest business impact
Your response should be a comprehensive analysis of this ONE focus area including:
**Issue Description:**
- What exactly is the problem
- Why it's critical or high-impact
- How many findings it affects
**Affected Resources:**
- Specific resources, services, or configurations involved
- Number of affected resources
**Business Impact:**
- Security risks and potential consequences
- Compliance violations (mention specific frameworks if applicable)
- Operational impact
**Remediation Steps:**
- Clear, step-by-step instructions
- Specific commands or configuration changes where applicable
- Expected outcome after fix
Be specific with numbers (e.g., "affects 12 S3 buckets", "resolves 15 findings"). Focus on actionable guidance that will have the biggest security improvement.`;
if (businessContext) {
systemPrompt += `\n\nBUSINESS CONTEXT: ${businessContext}`;
}
systemPrompt += `\n\nSecurity Scan Summary:\n${scanSummary}`;
const response = await llm.invoke([
{
role: "system",
content: systemPrompt,
},
]);
return response.content.toString().trim();
} catch (error) {
console.error("Error generating detailed recommendation:", error);
return "";
}
};
export const generateBannerFromDetailed = async (
detailedRecommendation: string,
): Promise<string> => {
try {
const apiKey = await getAIKey();
if (!apiKey) {
return "";
}
const lighthouseConfig = await getLighthouseConfig();
if (!lighthouseConfig?.attributes) {
return "";
}
const config = lighthouseConfig.attributes;
const llm = new ChatOpenAI({
model: config.model || "gpt-4o",
temperature: config.temperature || 0,
maxTokens: 100,
apiKey: apiKey,
});
const systemPrompt = `Create a short, engaging banner message from this detailed security analysis.
REQUIREMENTS:
- Maximum 80 characters
- Include "Lighthouse" in the message
- Focus on the key insight or opportunity
- Make it clickable and business-focused
- Use action words like "detected", "found", "identified"
- Don't end with punctuation
EXAMPLES:
- Lighthouse found fixing 1 S3 check resolves 15 findings
- Lighthouse detected critical RDS encryption gaps
- Lighthouse identified 3 exposed databases needing attention
Based on this detailed analysis, create one engaging banner message:
${detailedRecommendation}`;
const response = await llm.invoke([
{
role: "system",
content: systemPrompt,
},
]);
return response.content.toString().trim();
} catch (error) {
console.error(
"Error generating banner from detailed recommendation:",
error,
);
return "";
}
};
// Legacy function for backward compatibility
export const generateRecommendation = async (
scanSummary: string,
): Promise<string> => {
const detailed = await generateDetailedRecommendation(scanSummary);
if (!detailed) return "";
return await generateBannerFromDetailed(detailed);
};
export const generateQuestionAnswers = async (
questions: SuggestedAction[],
): Promise<Record<string, string>> => {
const answers: Record<string, string> = {};
try {
const apiKey = await getAIKey();
if (!apiKey) {
return answers;
}
// Initialize the workflow system
const workflow = await initLighthouseWorkflow();
for (const question of questions) {
if (!question.questionRef) continue;
try {
// Use the existing workflow to answer the question
const result = await workflow.invoke({
messages: [
{
role: "user",
content: question.action,
},
],
});
// Extract the final message content
const finalMessage = result.messages[result.messages.length - 1];
if (finalMessage?.content) {
answers[question.questionRef] = finalMessage.content
.toString()
.trim();
}
} catch (error) {
console.error(
`Error generating answer for question ${question.questionRef}:`,
error,
);
continue;
}
}
} catch (error) {
console.error("Error generating question answers:", error);
}
return answers;
};

View File

@@ -1,33 +0,0 @@
export interface SuggestedAction {
title: string;
label: string;
action: string;
questionRef?: string;
}
export const suggestedActions: SuggestedAction[] = [
{
title: "Are there any exposed S3",
label: "buckets in my AWS accounts?",
action: "List exposed S3 buckets in my AWS accounts",
questionRef: "1",
},
{
title: "What is the risk of having",
label: "RDS databases unencrypted?",
action: "What is the risk of having RDS databases unencrypted?",
questionRef: "2",
},
{
title: "What is the CIS 1.10 compliance status",
label: "of my Kubernetes cluster?",
action: "What is the CIS 1.10 compliance status of my Kubernetes cluster?",
questionRef: "3",
},
{
title: "List my highest privileged",
label: "AWS IAM users with full admin access?",
action: "List my highest privileged AWS IAM users with full admin access",
questionRef: "4",
},
];

View File

@@ -1,305 +0,0 @@
import { getLighthouseCheckDetails } from "@/actions/lighthouse/checks";
import { getLighthouseFindings } from "@/actions/lighthouse/findings";
import { getScans } from "@/actions/scans/scans";
import { CheckDetails, FindingSummary } from "@/types/lighthouse/summary";
import { getNewFailedFindingsSummary } from "./tools/findings";
export const getCompletedScansLast24h = async (): Promise<string[]> => {
const twentyFourHoursAgo = new Date();
twentyFourHoursAgo.setHours(twentyFourHoursAgo.getHours() - 24);
const scansResponse = await getScans({
page: 1,
pageSize: 50,
filters: {
"fields[scans]": "completed_at",
"filter[state]": "completed",
"filter[started_at__gte]": twentyFourHoursAgo.toISOString(),
},
sort: "-updated_at",
});
if (!scansResponse?.data || scansResponse.data.length === 0) {
return [];
}
return scansResponse.data.map((scan: any) => scan.id);
};
export const compareProcessedScanIds = (
currentScanIds: string[],
processedScanIds: string[],
): boolean => {
const sortedCurrent = [...currentScanIds].sort();
const sortedProcessed = [...processedScanIds].sort();
// Compare lengths first
if (sortedCurrent.length !== sortedProcessed.length) {
return false;
}
// Compare each element
for (let i = 0; i < sortedCurrent.length; i++) {
if (sortedCurrent[i] !== sortedProcessed[i]) {
return false;
}
}
return true;
};
const getTopFailedFindingsSummary = async (
scanIds: string[],
limit: number = 10,
): Promise<FindingSummary[]> => {
const response = await getLighthouseFindings({
page: 1,
pageSize: limit,
sort: "severity",
filters: {
"fields[findings]": "check_id,severity",
"filter[scan__in]": scanIds.join(","),
"filter[status]": "FAIL",
"filter[muted]": "false",
},
});
if (!response?.data) {
return [];
}
return response.data.map((finding: any) => ({
checkId: finding.attributes.check_id,
severity: finding.attributes.severity,
count: 1,
findingIds: [finding.id],
}));
};
// Helper function to collect new failed findings across multiple scans
const collectNewFailedFindings = async (
scanIds: string[],
): Promise<Record<string, FindingSummary[]>> => {
const findingsByScan: Record<string, FindingSummary[]> = {};
for (const scanId of scanIds) {
try {
const newFailedFindingsSummary =
await getNewFailedFindingsSummary(scanId);
if (Object.keys(newFailedFindingsSummary).length > 0) {
const scanFindings: FindingSummary[] = [];
// Convert to FindingSummary format
Object.entries(newFailedFindingsSummary).forEach(
([severity, checks]) => {
Object.entries(checks).forEach(([checkId, summary]) => {
scanFindings.push({
checkId,
severity,
count: summary.count,
findingIds: summary.finding_ids,
});
});
},
);
if (scanFindings.length > 0) {
findingsByScan[scanId] = scanFindings;
}
}
} catch (error) {
console.error(
`Error fetching new failed findings for scan ${scanId}:`,
error,
);
}
}
return findingsByScan;
};
// Helper function to enrich findings with check details
const enrichFindingsWithCheckDetails = async (
findings: FindingSummary[],
): Promise<Map<string, CheckDetails>> => {
const uniqueCheckIds = Array.from(new Set(findings.map((f) => f.checkId)));
const checkDetailsMap = new Map<string, CheckDetails>();
for (const checkId of uniqueCheckIds) {
try {
const checkDetails = await getLighthouseCheckDetails({ checkId });
if (checkDetails) {
checkDetailsMap.set(checkId, checkDetails);
}
} catch (error) {
console.error(`Error fetching check details for ${checkId}:`, error);
// Add a fallback check details object
checkDetailsMap.set(checkId, {
id: checkId,
title: checkId,
description: "",
risk: "",
remediation: {},
});
}
}
return checkDetailsMap;
};
// Helper function to sort findings by severity
const sortFindingsBySeverity = (
findings: FindingSummary[],
): FindingSummary[] => {
const severityOrder = {
critical: 0,
high: 1,
medium: 2,
low: 3,
informational: 4,
};
return findings.sort(
(a, b) =>
severityOrder[a.severity as keyof typeof severityOrder] -
severityOrder[b.severity as keyof typeof severityOrder],
);
};
// Helper function to build details for a single finding
const buildSingleFindingDetails = (
finding: FindingSummary,
checkDetailsMap: Map<string, CheckDetails>,
): string => {
const checkDetails = checkDetailsMap.get(finding.checkId);
let detailsText = "";
detailsText += `**Title:** ${checkDetails?.title || finding.checkId}\n`;
detailsText += `**Severity:** ${finding.severity.toUpperCase()}\n`;
detailsText += `**Check Summary:** ${checkDetails?.description || "Description not available"}\n`;
detailsText += `**Number of failed findings:** ${finding.count}\n`;
detailsText += `**Finding IDs:** ${finding.findingIds.join(", ")}\n`;
detailsText += "**Remediation:**\n";
const remediation = checkDetails?.remediation;
if (remediation?.terraform) {
detailsText += `- Terraform: ${remediation.terraform.description}\n`;
if (remediation.terraform.reference) {
detailsText += ` Reference: ${remediation.terraform.reference}\n`;
}
}
if (remediation?.cli) {
detailsText += `- AWS CLI: ${remediation.cli.description}\n`;
if (remediation.cli.reference) {
detailsText += ` Reference: ${remediation.cli.reference}\n`;
}
}
if (remediation?.nativeiac) {
detailsText += `- Native IAC: ${remediation.nativeiac.description}\n`;
if (remediation.nativeiac.reference) {
detailsText += ` Reference: ${remediation.nativeiac.reference}\n`;
}
}
if (remediation?.other) {
detailsText += `- Other: ${remediation.other.description}\n`;
if (remediation.other.reference) {
detailsText += ` Reference: ${remediation.other.reference}\n`;
}
}
if (remediation?.wui) {
detailsText += `- WUI: ${remediation.wui.description}\n`;
if (remediation.wui.reference) {
detailsText += ` Reference: ${remediation.wui.reference}\n`;
}
}
if (
!remediation?.terraform &&
!remediation?.cli &&
!remediation?.nativeiac &&
!remediation?.other &&
!remediation?.wui
) {
detailsText += "- No specific remediation commands available\n";
}
detailsText += "\n";
return detailsText;
};
// Generates a summary of failed findings for the provided scan IDs
// Returns an empty string if no failed findings in any scan or unexpected error
// Else it returns a string with the summary of the failed findings
export const generateSecurityScanSummary = async (
scanIds: string[],
): Promise<string> => {
try {
// Collect new failed findings by scan
const newFindingsByScan = await collectNewFailedFindings(scanIds);
// Get top failed findings across all scans
let topFailedFindings: FindingSummary[] = [];
try {
topFailedFindings = await getTopFailedFindingsSummary(scanIds, 10);
} catch (error) {
console.error("Error fetching top failed findings:", error);
}
// Combine all findings for check details enrichment
const newFindings = Object.values(newFindingsByScan).flat();
const allFindings = [...newFindings, ...topFailedFindings];
// If no findings at all, return empty string
if (allFindings.length === 0) {
return "";
}
// Enrich all findings with check details
const checkDetailsMap = await enrichFindingsWithCheckDetails(allFindings);
// Build the summary
let summaryText = "";
// Header
if (scanIds.length === 1) {
summaryText += `# Scan ID: ${scanIds[0]}\n\n`;
} else {
summaryText += `# Scans processed (${scanIds.length} scans from last 24h)\n`;
summaryText += `**Scan IDs:** ${scanIds.join(", ")}\n\n`;
}
// New findings section (if any)
if (newFindings.length > 0) {
summaryText += "## New Failed Findings by Scan\n";
summaryText += `${newFindings.length} new findings detected.\n\n`;
Object.entries(newFindingsByScan).forEach(([scanId, scanFindings]) => {
summaryText += `### Scan ID: ${scanId}\n`;
const sortedScanFindings = sortFindingsBySeverity(scanFindings);
for (const finding of sortedScanFindings) {
summaryText += buildSingleFindingDetails(finding, checkDetailsMap);
}
summaryText += "\n";
});
}
// Top findings section
if (topFailedFindings.length > 0) {
summaryText += "## Top Failed Findings Across All Scans\n";
summaryText += `Showing top ${topFailedFindings.length} critical findings.\n\n`;
const sortedTopFindings = sortFindingsBySeverity(topFailedFindings);
for (const finding of sortedTopFindings) {
summaryText += buildSingleFindingDetails(finding, checkDetailsMap);
}
}
return summaryText;
} catch (error) {
console.error("Error generating security scan summary:", error);
return "";
}
};

View File

@@ -1,22 +1,11 @@
import { tool } from "@langchain/core/tools";
import { z } from "zod";
import { getMetadataInfo } from "@/actions/findings";
import {
getLighthouseFindings,
getLighthouseLatestFindings,
} from "@/actions/lighthouse/findings";
import { getFindings, getMetadataInfo } from "@/actions/findings";
import { getFindingsSchema, getMetadataInfoSchema } from "@/types/lighthouse";
export const getFindingsTool = tool(
async ({ page, pageSize, query, sort, filters }) => {
return await getLighthouseFindings({
page,
pageSize,
query,
sort,
filters,
});
return await getFindings({ page, pageSize, query, sort, filters });
},
{
name: "getFindings",
@@ -26,104 +15,6 @@ export const getFindingsTool = tool(
},
);
export const getLatestFindingsTool = tool(
async ({ page, pageSize, query, sort, filters }) => {
return await getLighthouseLatestFindings({
page,
pageSize,
query,
sort,
filters,
});
},
{
name: "getLatestFindings",
description:
"Retrieves a list of the latest findings from the latest scans of all providers with options for filtering by various criteria.",
// getLatestFindings uses the same schema as getFindings
schema: getFindingsSchema,
},
);
// Function to get a summary of new and changed failed findings that appeared in a particular scan
export const getNewFailedFindingsSummary = async (scanId: string) => {
let allFindings: any[] = [];
let currentPage = 1;
let totalPages = 1;
const pageSize = 100;
do {
const response = await getLighthouseFindings({
page: currentPage,
pageSize: pageSize,
sort: "severity",
filters: {
"fields[findings]": "check_id,severity",
"filter[scan]": scanId,
"filter[status]": "FAIL",
"filter[muted]": "false",
"filter[delta__in]": "new,changed",
},
});
if (response?.data) {
allFindings = allFindings.concat(response.data);
}
if (currentPage === 1 && response?.meta?.pagination) {
totalPages = response.meta.pagination.pages;
}
currentPage++;
} while (currentPage <= totalPages);
const summary: Record<
string,
Record<string, { count: number; finding_ids: string[] }>
> = {};
allFindings.forEach((finding) => {
const severity = finding.attributes.severity;
const checkId = finding.attributes.check_id;
const findingId = finding.id;
// Initialize severity group if it doesn't exist
if (!summary[severity]) {
summary[severity] = {};
}
// Initialize check_id group if it doesn't exist
if (!summary[severity][checkId]) {
summary[severity][checkId] = {
count: 0,
finding_ids: [],
};
}
// Add finding to the appropriate group
summary[severity][checkId].count++;
summary[severity][checkId].finding_ids.push(findingId);
});
return summary;
};
export const getNewFailedFindingsSummaryTool = tool(
async ({ scanId }) => {
return await getNewFailedFindingsSummary(scanId);
},
{
name: "getNewFailedFindingsSummary",
description:
"Fetches summary of new and changed failed findings that appeared in a particular scan. Summary includes count of findings by severity, check_id and finding_ids.",
schema: z.object({
scanId: z
.string()
.describe("The UUID of the scan to fetch failed findings summary for."),
}),
},
);
export const getMetadataInfoTool = tool(
async ({ query, sort, filters }) => {
return await getMetadataInfo({ query, sort, filters });

View File

@@ -24,9 +24,7 @@ import {
} from "@/lib/lighthouse/tools/compliances";
import {
getFindingsTool,
getLatestFindingsTool,
getMetadataInfoTool,
getNewFailedFindingsSummaryTool,
} from "@/lib/lighthouse/tools/findings";
import {
getFindingsBySeverityTool,
@@ -103,9 +101,7 @@ export async function initLighthouseWorkflow() {
llm: llm,
tools: [
getFindingsTool,
getLatestFindingsTool,
getMetadataInfoTool,
getNewFailedFindingsSummaryTool,
getProviderChecksTool,
getProviderCheckDetailsTool,
],

84
ui/package-lock.json generated
View File

@@ -38,7 +38,6 @@
"framer-motion": "^11.16.0",
"immer": "^10.1.1",
"intl-messageformat": "^10.5.0",
"iovalkey": "^0.3.3",
"jose": "^5.9.3",
"jwt-decode": "^4.0.0",
"lucide-react": "^0.471.0",
@@ -986,12 +985,6 @@
"@swc/helpers": "^0.5.0"
}
},
"node_modules/@iovalkey/commands": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/@iovalkey/commands/-/commands-0.1.0.tgz",
"integrity": "sha512-/B9W4qKSSITDii5nkBCHyPkIkAi+ealUtr1oqBJsLxjSRLka4pxun2VvMNSmcwgAMxgXtQfl0qRv7TE+udPJzg==",
"license": "MIT"
},
"node_modules/@isaacs/cliui": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
@@ -9119,15 +9112,6 @@
"node": ">=6"
}
},
"node_modules/cluster-key-slot": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz",
"integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/cmdk": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/cmdk/-/cmdk-1.0.0.tgz",
@@ -10001,15 +9985,6 @@
"node": ">=0.4.0"
}
},
"node_modules/denque": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
"integrity": "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.10"
}
},
"node_modules/dequal": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
@@ -11927,26 +11902,6 @@
"tslib": "^2.4.0"
}
},
"node_modules/iovalkey": {
"version": "0.3.3",
"resolved": "https://registry.npmjs.org/iovalkey/-/iovalkey-0.3.3.tgz",
"integrity": "sha512-4rTJX6Q5wTYEvxboXi8DsEiUo+OvqJGtLYOSGm37KpdRXsG5XJjbVtYKGJpPSWP+QT7rWscA4vsrdmzbEbenpw==",
"license": "MIT",
"dependencies": {
"@iovalkey/commands": "^0.1.0",
"cluster-key-slot": "^1.1.0",
"debug": "^4.3.4",
"denque": "^2.1.0",
"lodash.defaults": "^4.2.0",
"lodash.isarguments": "^3.1.0",
"redis-errors": "^1.2.0",
"redis-parser": "^3.0.0",
"standard-as-callback": "^2.1.0"
},
"engines": {
"node": ">=18.12.0"
}
},
"node_modules/is-alphabetical": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz",
@@ -12895,12 +12850,6 @@
"resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz",
"integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow=="
},
"node_modules/lodash.defaults": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz",
"integrity": "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==",
"license": "MIT"
},
"node_modules/lodash.foreach": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/lodash.foreach/-/lodash.foreach-4.5.0.tgz",
@@ -12911,12 +12860,6 @@
"resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz",
"integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ=="
},
"node_modules/lodash.isarguments": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz",
"integrity": "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==",
"license": "MIT"
},
"node_modules/lodash.isplainobject": {
"version": "4.0.6",
"resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
@@ -15366,27 +15309,6 @@
"resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz",
"integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg=="
},
"node_modules/redis-errors": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz",
"integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==",
"license": "MIT",
"engines": {
"node": ">=4"
}
},
"node_modules/redis-parser": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz",
"integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==",
"license": "MIT",
"dependencies": {
"redis-errors": "^1.0.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/reflect.getprototypeof": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.6.tgz",
@@ -16088,12 +16010,6 @@
"url": "https://github.com/sponsors/wooorm"
}
},
"node_modules/standard-as-callback": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/standard-as-callback/-/standard-as-callback-2.1.0.tgz",
"integrity": "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==",
"license": "MIT"
},
"node_modules/stdin-discarder": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.1.0.tgz",

View File

@@ -30,7 +30,6 @@
"framer-motion": "^11.16.0",
"immer": "^10.1.1",
"intl-messageformat": "^10.5.0",
"iovalkey": "^0.3.3",
"jose": "^5.9.3",
"jwt-decode": "^4.0.0",
"lucide-react": "^0.471.0",

View File

@@ -10,5 +10,5 @@ export const checkSchema = z.object({
});
export const checkDetailsSchema = z.object({
checkId: z.string(),
id: z.string(),
});

View File

@@ -71,12 +71,21 @@ export const getFindingsSchema = z.object({
.optional()
.describe("Comma-separated list of UUID values"),
// Impact filters
// Impact and Severity filters
"filter[impact]": impactEnum.optional(),
"filter[impact__in]": z
.string()
.optional()
.describe("Comma-separated list of impact values"),
"filter[severity]": z
.enum(["critical", "high", "medium", "low", "informational"])
.optional(),
"filter[severity__in]": z
.string()
.optional()
.describe(
"Comma-separated list of severity values. Do not use it with severity filter.",
),
// Date filters
"filter[inserted_at]": z
@@ -96,9 +105,6 @@ export const getFindingsSchema = z.object({
.optional()
.describe("Date in format YYYY-MM-DD"),
// Muted filter
"filter[muted]": z.boolean().optional(),
// Provider filters
"filter[provider]": z.string().optional().describe("Provider UUID"),
"filter[provider__in]": z
@@ -170,17 +176,6 @@ export const getFindingsSchema = z.object({
.optional()
.describe("Comma-separated list of service values"),
// Severity filters
"filter[severity]": z
.enum(["critical", "high", "medium", "low", "informational"])
.optional(),
"filter[severity__in]": z
.string()
.optional()
.describe(
"Comma-separated list of severity values. Do not use it with severity filter.",
),
// Status filters
"filter[status]": statusEnum.optional(),
"filter[status__in]": z

View File

@@ -1,35 +0,0 @@
export interface CheckDetails {
id: string;
title: string;
description: string;
risk: string;
remediation: {
cli?: {
description: string;
reference: string;
};
terraform?: {
description: string;
reference: string;
};
nativeiac?: {
description: string;
reference: string;
};
other?: {
description: string;
reference: string;
};
wui?: {
description: string;
reference: string;
};
};
}
export interface FindingSummary {
checkId: string;
severity: string;
count: number;
findingIds: string[];
}