mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-01-25 02:08:11 +00:00
Compare commits
13 Commits
5.14.1
...
PRWLR-6373
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
95f5d6045e | ||
|
|
d33703b437 | ||
|
|
e9c96b0b11 | ||
|
|
2bbf1e1017 | ||
|
|
db20c1e53a | ||
|
|
44c6fbbf58 | ||
|
|
e2d4076c79 | ||
|
|
53e381a9fc | ||
|
|
4d7fb46d12 | ||
|
|
d775b6971b | ||
|
|
356ccdfc35 | ||
|
|
f3cf824950 | ||
|
|
019dec744b |
@@ -8,6 +8,8 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
### Added
|
||||
|
||||
- Added M365 as a new provider [(#7563)](https://github.com/prowler-cloud/prowler/pull/7563).
|
||||
- Added a `compliance/` folder and ZIP‐export functionality for all compliance reports.[(#7653)](https://github.com/prowler-cloud/prowler/pull/7653).
|
||||
- Added a new API endpoint to fetch and download any specific compliance file by name [(#7653)](https://github.com/prowler-cloud/prowler/pull/7653).
|
||||
|
||||
---
|
||||
|
||||
|
||||
10
api/poetry.lock
generated
10
api/poetry.lock
generated
@@ -1469,14 +1469,14 @@ with-social = ["django-allauth[socialaccount] (>=64.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "django"
|
||||
version = "5.1.7"
|
||||
version = "5.1.8"
|
||||
description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "Django-5.1.7-py3-none-any.whl", hash = "sha256:1323617cb624add820cb9611cdcc788312d250824f92ca6048fda8625514af2b"},
|
||||
{file = "Django-5.1.7.tar.gz", hash = "sha256:30de4ee43a98e5d3da36a9002f287ff400b43ca51791920bfb35f6917bfe041c"},
|
||||
{file = "Django-5.1.8-py3-none-any.whl", hash = "sha256:11b28fa4b00e59d0def004e9ee012fefbb1065a5beb39ee838983fd24493ad4f"},
|
||||
{file = "Django-5.1.8.tar.gz", hash = "sha256:42e92a1dd2810072bcc40a39a212b693f94406d0ba0749e68eb642f31dc770b4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3657,7 +3657,7 @@ tzlocal = "5.3.1"
|
||||
type = "git"
|
||||
url = "https://github.com/prowler-cloud/prowler.git"
|
||||
reference = "master"
|
||||
resolved_reference = "0edf19928269b6d42d1c463ab13b90d7c21a65e4"
|
||||
resolved_reference = "9828824b737b8deda61f4a6646b54e0ad45033b9"
|
||||
|
||||
[[package]]
|
||||
name = "psutil"
|
||||
@@ -5483,4 +5483,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.11,<3.13"
|
||||
content-hash = "f3ede96fb76c14d02da37c1132b41a14fa4045787807446fac2cd1750be193e0"
|
||||
content-hash = "051924735a7069c8393fefc18fc2c310b196ea24ad41b8c984dc5852683d0407"
|
||||
|
||||
@@ -7,7 +7,7 @@ authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
dependencies = [
|
||||
"celery[pytest] (>=5.4.0,<6.0.0)",
|
||||
"dj-rest-auth[with_social,jwt] (==7.0.1)",
|
||||
"django==5.1.7",
|
||||
"django==5.1.8",
|
||||
"django-allauth==65.4.1",
|
||||
"django-celery-beat (>=2.7.0,<3.0.0)",
|
||||
"django-celery-results (>=2.5.1,<3.0.0)",
|
||||
|
||||
@@ -4503,6 +4503,47 @@ paths:
|
||||
schema:
|
||||
$ref: '#/components/schemas/ScanUpdateResponse'
|
||||
description: ''
|
||||
/api/v1/scans/{id}/compliance/{name}:
|
||||
get:
|
||||
operationId: scan_compliance_download
|
||||
description: Download a specific compliance report (e.g., 'cis_1.4_aws') as
|
||||
a CSV file.
|
||||
summary: Retrieve compliance report as CSV
|
||||
parameters:
|
||||
- in: query
|
||||
name: fields[scan-reports]
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
enum:
|
||||
- id
|
||||
- name
|
||||
description: endpoint return only specific fields in the response on a per-type
|
||||
basis by including a fields[TYPE] query parameter.
|
||||
explode: false
|
||||
- in: path
|
||||
name: id
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
description: A UUID string identifying this scan.
|
||||
required: true
|
||||
- in: path
|
||||
name: name
|
||||
schema:
|
||||
type: string
|
||||
description: The compliance report name, like 'cis_1.4_aws'
|
||||
required: true
|
||||
tags:
|
||||
- Scan
|
||||
security:
|
||||
- jwtAuth: []
|
||||
responses:
|
||||
'200':
|
||||
description: CSV file containing the compliance report
|
||||
'404':
|
||||
description: Compliance report not found
|
||||
/api/v1/scans/{id}/report:
|
||||
get:
|
||||
operationId: scans_report_retrieve
|
||||
|
||||
@@ -31,6 +31,7 @@ from api.models import (
|
||||
UserRoleRelationship,
|
||||
)
|
||||
from api.rls import Tenant
|
||||
from prowler.config.config import get_available_compliance_frameworks
|
||||
|
||||
TODAY = str(datetime.today().date())
|
||||
|
||||
@@ -2277,7 +2278,8 @@ class TestScanViewSet:
|
||||
scan.save()
|
||||
|
||||
monkeypatch.setattr(
|
||||
"api.v1.views.env", type("env", (), {"str": lambda self, key: bucket})()
|
||||
"api.v1.views.env",
|
||||
type("env", (), {"str": lambda self, *args, **kwargs: "test-bucket"})(),
|
||||
)
|
||||
|
||||
class FakeS3Client:
|
||||
@@ -2346,6 +2348,165 @@ class TestScanViewSet:
|
||||
assert content_disposition.startswith('attachment; filename="')
|
||||
assert f'filename="{file_path.name}"' in content_disposition
|
||||
|
||||
def test_compliance_invalid_framework(self, authenticated_client, scans_fixture):
|
||||
scan = scans_fixture[0]
|
||||
scan.state = StateChoices.COMPLETED
|
||||
scan.output_location = "dummy"
|
||||
scan.save()
|
||||
|
||||
url = reverse("scan-compliance", kwargs={"pk": scan.id, "name": "invalid"})
|
||||
resp = authenticated_client.get(url)
|
||||
assert resp.status_code == status.HTTP_404_NOT_FOUND
|
||||
assert resp.json()["errors"]["detail"] == "Compliance 'invalid' not found."
|
||||
|
||||
def test_compliance_executing(
|
||||
self, authenticated_client, scans_fixture, monkeypatch
|
||||
):
|
||||
scan = scans_fixture[0]
|
||||
scan.state = StateChoices.EXECUTING
|
||||
scan.save()
|
||||
task = Task.objects.create(tenant_id=scan.tenant_id)
|
||||
scan.task = task
|
||||
scan.save()
|
||||
dummy = {"id": str(task.id), "state": StateChoices.EXECUTING}
|
||||
|
||||
monkeypatch.setattr(
|
||||
"api.v1.views.TaskSerializer",
|
||||
lambda *args, **kwargs: type("S", (), {"data": dummy}),
|
||||
)
|
||||
|
||||
framework = get_available_compliance_frameworks(scan.provider.provider)[0]
|
||||
url = reverse("scan-compliance", kwargs={"pk": scan.id, "name": framework})
|
||||
resp = authenticated_client.get(url)
|
||||
assert resp.status_code == status.HTTP_202_ACCEPTED
|
||||
assert "Content-Location" in resp
|
||||
assert dummy["id"] in resp["Content-Location"]
|
||||
|
||||
def test_compliance_no_output(self, authenticated_client, scans_fixture):
|
||||
scan = scans_fixture[0]
|
||||
scan.state = StateChoices.COMPLETED
|
||||
scan.output_location = ""
|
||||
scan.save()
|
||||
|
||||
framework = get_available_compliance_frameworks(scan.provider.provider)[0]
|
||||
url = reverse("scan-compliance", kwargs={"pk": scan.id, "name": framework})
|
||||
resp = authenticated_client.get(url)
|
||||
assert resp.status_code == status.HTTP_404_NOT_FOUND
|
||||
assert resp.json()["errors"]["detail"] == "The scan has no reports."
|
||||
|
||||
def test_compliance_s3_no_credentials(
|
||||
self, authenticated_client, scans_fixture, monkeypatch
|
||||
):
|
||||
scan = scans_fixture[0]
|
||||
bucket = "bucket"
|
||||
key = "file.zip"
|
||||
scan.output_location = f"s3://{bucket}/{key}"
|
||||
scan.state = StateChoices.COMPLETED
|
||||
scan.save()
|
||||
|
||||
monkeypatch.setattr(
|
||||
"api.v1.views.get_s3_client",
|
||||
lambda: (_ for _ in ()).throw(NoCredentialsError()),
|
||||
)
|
||||
|
||||
framework = get_available_compliance_frameworks(scan.provider.provider)[0]
|
||||
url = reverse("scan-compliance", kwargs={"pk": scan.id, "name": framework})
|
||||
resp = authenticated_client.get(url)
|
||||
assert resp.status_code == status.HTTP_403_FORBIDDEN
|
||||
assert resp.json()["errors"]["detail"] == "There is a problem with credentials."
|
||||
|
||||
def test_compliance_s3_success(
|
||||
self, authenticated_client, scans_fixture, monkeypatch
|
||||
):
|
||||
scan = scans_fixture[0]
|
||||
bucket = "bucket"
|
||||
prefix = "path/scan.zip"
|
||||
scan.output_location = f"s3://{bucket}/{prefix}"
|
||||
scan.state = StateChoices.COMPLETED
|
||||
scan.save()
|
||||
|
||||
monkeypatch.setattr(
|
||||
"api.v1.views.env",
|
||||
type("env", (), {"str": lambda self, *args, **kwargs: "test-bucket"})(),
|
||||
)
|
||||
|
||||
match_key = "path/compliance/mitre_attack_aws.csv"
|
||||
|
||||
class FakeS3Client:
|
||||
def list_objects_v2(self, Bucket, Prefix):
|
||||
return {"Contents": [{"Key": match_key}]}
|
||||
|
||||
def get_object(self, Bucket, Key):
|
||||
return {"Body": io.BytesIO(b"ignored")}
|
||||
|
||||
monkeypatch.setattr("api.v1.views.get_s3_client", lambda: FakeS3Client())
|
||||
|
||||
framework = match_key.split("/")[-1].split(".")[0]
|
||||
url = reverse("scan-compliance", kwargs={"pk": scan.id, "name": framework})
|
||||
resp = authenticated_client.get(url)
|
||||
assert resp.status_code == status.HTTP_200_OK
|
||||
cd = resp["Content-Disposition"]
|
||||
assert cd.startswith('attachment; filename="')
|
||||
assert cd.endswith('filename="mitre_attack_aws.csv"')
|
||||
|
||||
def test_compliance_s3_not_found(
|
||||
self, authenticated_client, scans_fixture, monkeypatch
|
||||
):
|
||||
scan = scans_fixture[0]
|
||||
bucket = "bucket"
|
||||
scan.output_location = f"s3://{bucket}/x/scan.zip"
|
||||
scan.state = StateChoices.COMPLETED
|
||||
scan.save()
|
||||
|
||||
monkeypatch.setattr(
|
||||
"api.v1.views.env",
|
||||
type("env", (), {"str": lambda self, *args, **kwargs: "test-bucket"})(),
|
||||
)
|
||||
|
||||
class FakeS3Client:
|
||||
def list_objects_v2(self, Bucket, Prefix):
|
||||
return {"Contents": []}
|
||||
|
||||
def get_object(self, Bucket, Key):
|
||||
return {"Body": io.BytesIO(b"ignored")}
|
||||
|
||||
monkeypatch.setattr("api.v1.views.get_s3_client", lambda: FakeS3Client())
|
||||
|
||||
url = reverse("scan-compliance", kwargs={"pk": scan.id, "name": "cis_1.4_aws"})
|
||||
resp = authenticated_client.get(url)
|
||||
assert resp.status_code == status.HTTP_404_NOT_FOUND
|
||||
assert (
|
||||
resp.json()["errors"]["detail"]
|
||||
== "No compliance file found for name 'cis_1.4_aws'."
|
||||
)
|
||||
|
||||
def test_compliance_local_file(
|
||||
self, authenticated_client, scans_fixture, tmp_path, monkeypatch
|
||||
):
|
||||
scan = scans_fixture[0]
|
||||
scan.state = StateChoices.COMPLETED
|
||||
base = tmp_path / "reports"
|
||||
comp_dir = base / "compliance"
|
||||
comp_dir.mkdir(parents=True)
|
||||
fname = comp_dir / "scan_cis.csv"
|
||||
fname.write_bytes(b"ignored")
|
||||
|
||||
scan.output_location = str(base / "scan.zip")
|
||||
scan.save()
|
||||
|
||||
monkeypatch.setattr(
|
||||
glob,
|
||||
"glob",
|
||||
lambda p: [str(fname)] if p.endswith("*_cis_1.4_aws.csv") else [],
|
||||
)
|
||||
|
||||
url = reverse("scan-compliance", kwargs={"pk": scan.id, "name": "cis_1.4_aws"})
|
||||
resp = authenticated_client.get(url)
|
||||
assert resp.status_code == status.HTTP_200_OK
|
||||
cd = resp["Content-Disposition"]
|
||||
assert cd.startswith('attachment; filename="')
|
||||
assert cd.endswith(f'filename="{fname.name}"')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTaskViewSet:
|
||||
|
||||
@@ -960,6 +960,15 @@ class ScanReportSerializer(serializers.Serializer):
|
||||
fields = ["id"]
|
||||
|
||||
|
||||
class ScanComplianceReportSerializer(serializers.Serializer):
|
||||
id = serializers.CharField(source="scan")
|
||||
name = serializers.CharField()
|
||||
|
||||
class Meta:
|
||||
resource_name = "scan-reports"
|
||||
fields = ["id", "name"]
|
||||
|
||||
|
||||
class ResourceTagSerializer(RLSSerializer):
|
||||
"""
|
||||
Serializer for the ResourceTag model
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import glob
|
||||
import os
|
||||
|
||||
import sentry_sdk
|
||||
from allauth.socialaccount.providers.github.views import GitHubOAuth2Adapter
|
||||
from allauth.socialaccount.providers.google.views import GoogleOAuth2Adapter
|
||||
from botocore.exceptions import ClientError, NoCredentialsError, ParamValidationError
|
||||
@@ -134,6 +133,7 @@ from api.v1.serializers import (
|
||||
RoleProviderGroupRelationshipSerializer,
|
||||
RoleSerializer,
|
||||
RoleUpdateSerializer,
|
||||
ScanComplianceReportSerializer,
|
||||
ScanCreateSerializer,
|
||||
ScanReportSerializer,
|
||||
ScanSerializer,
|
||||
@@ -150,6 +150,7 @@ from api.v1.serializers import (
|
||||
UserSerializer,
|
||||
UserUpdateSerializer,
|
||||
)
|
||||
from prowler.config.config import get_available_compliance_frameworks
|
||||
|
||||
CACHE_DECORATOR = cache_control(
|
||||
max_age=django_settings.CACHE_MAX_AGE,
|
||||
@@ -1150,6 +1151,28 @@ class ProviderViewSet(BaseRLSViewSet):
|
||||
404: OpenApiResponse(description="The scan has no reports"),
|
||||
},
|
||||
),
|
||||
compliance=extend_schema(
|
||||
tags=["Scan"],
|
||||
summary="Retrieve compliance report as CSV",
|
||||
description="Download a specific compliance report (e.g., 'cis_1.4_aws') as a CSV file.",
|
||||
operation_id="scan_compliance_download",
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="name",
|
||||
type=str,
|
||||
location=OpenApiParameter.PATH,
|
||||
required=True,
|
||||
description="The compliance report name, like 'cis_1.4_aws'",
|
||||
),
|
||||
],
|
||||
responses={
|
||||
200: OpenApiResponse(
|
||||
description="CSV file containing the compliance report"
|
||||
),
|
||||
404: OpenApiResponse(description="Compliance report not found"),
|
||||
},
|
||||
request=None,
|
||||
),
|
||||
)
|
||||
@method_decorator(CACHE_DECORATOR, name="list")
|
||||
@method_decorator(CACHE_DECORATOR, name="retrieve")
|
||||
@@ -1202,6 +1225,10 @@ class ScanViewSet(BaseRLSViewSet):
|
||||
if hasattr(self, "response_serializer_class"):
|
||||
return self.response_serializer_class
|
||||
return ScanReportSerializer
|
||||
elif self.action == "compliance":
|
||||
if hasattr(self, "response_serializer_class"):
|
||||
return self.response_serializer_class
|
||||
return ScanComplianceReportSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
@@ -1219,70 +1246,86 @@ class ScanViewSet(BaseRLSViewSet):
|
||||
)
|
||||
return Response(data=read_serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(detail=True, methods=["get"], url_name="report")
|
||||
def report(self, request, pk=None):
|
||||
scan_instance = self.get_object()
|
||||
def _get_task_status(self, scan_instance):
|
||||
"""
|
||||
If the scan or its report-generation task is still executing,
|
||||
return an `HTTP 202 Accepted` response with the task payload and Content-Location.
|
||||
"""
|
||||
task = None
|
||||
|
||||
if scan_instance.state == StateChoices.EXECUTING:
|
||||
# If the scan is still running, return the task
|
||||
prowler_task = Task.objects.get(id=scan_instance.task.id)
|
||||
self.response_serializer_class = TaskSerializer
|
||||
output_serializer = self.get_serializer(prowler_task)
|
||||
return Response(
|
||||
data=output_serializer.data,
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
headers={
|
||||
"Content-Location": reverse(
|
||||
"task-detail", kwargs={"pk": output_serializer.data["id"]}
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
try:
|
||||
output_celery_task = Task.objects.get(
|
||||
task_runner_task__task_name="scan-report",
|
||||
task_runner_task__task_args__contains=pk,
|
||||
)
|
||||
self.response_serializer_class = TaskSerializer
|
||||
output_serializer = self.get_serializer(output_celery_task)
|
||||
if output_serializer.data["state"] == StateChoices.EXECUTING:
|
||||
# If the task is still running, return the task
|
||||
return Response(
|
||||
data=output_serializer.data,
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
headers={
|
||||
"Content-Location": reverse(
|
||||
"task-detail", kwargs={"pk": output_serializer.data["id"]}
|
||||
)
|
||||
},
|
||||
)
|
||||
except Task.DoesNotExist:
|
||||
# If the task does not exist, it means that the task is removed from the database
|
||||
pass
|
||||
|
||||
output_location = scan_instance.output_location
|
||||
if not output_location:
|
||||
return Response(
|
||||
{"detail": "The scan has no reports."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
if scan_instance.output_location.startswith("s3://"):
|
||||
if scan_instance.state == StateChoices.EXECUTING and scan_instance.task:
|
||||
task = scan_instance.task
|
||||
else:
|
||||
try:
|
||||
s3_client = get_s3_client()
|
||||
task = Task.objects.get(
|
||||
task_runner_task__task_name="scan-report",
|
||||
task_runner_task__task_args__contains=str(scan_instance.id),
|
||||
)
|
||||
except Task.DoesNotExist:
|
||||
return None
|
||||
|
||||
self.response_serializer_class = TaskSerializer
|
||||
serializer = self.get_serializer(task)
|
||||
|
||||
if serializer.data.get("state") != StateChoices.EXECUTING:
|
||||
return None
|
||||
|
||||
return Response(
|
||||
data=serializer.data,
|
||||
status=status.HTTP_202_ACCEPTED,
|
||||
headers={
|
||||
"Content-Location": reverse(
|
||||
"task-detail", kwargs={"pk": serializer.data["id"]}
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
def _load_file(self, path_pattern, s3=False, bucket=None, list_objects=False):
|
||||
"""
|
||||
Load binary content and filename.
|
||||
If s3=True and list_objects=False: treat path_pattern as exact key.
|
||||
If s3=True and list_objects=True: list by prefix, then pick first matching key.
|
||||
Else: treat path_pattern as glob pattern on local FS.
|
||||
Returns (content, filename) or Response on error.
|
||||
"""
|
||||
if s3:
|
||||
try:
|
||||
client = get_s3_client()
|
||||
except (ClientError, NoCredentialsError, ParamValidationError):
|
||||
return Response(
|
||||
{"detail": "There is a problem with credentials."},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
bucket_name = env.str("DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET")
|
||||
key = output_location[len(f"s3://{bucket_name}/") :]
|
||||
if list_objects:
|
||||
# list keys under prefix then match suffix
|
||||
prefix = os.path.dirname(path_pattern)
|
||||
suffix = os.path.basename(path_pattern)
|
||||
try:
|
||||
resp = client.list_objects_v2(Bucket=bucket, Prefix=prefix)
|
||||
except ClientError:
|
||||
return Response(
|
||||
{"detail": "Failed to list compliance files in S3."},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
contents = resp.get("Contents", [])
|
||||
keys = [obj["Key"] for obj in contents if obj["Key"].endswith(suffix)]
|
||||
if not keys:
|
||||
return Response(
|
||||
{
|
||||
"detail": f"No compliance file found for name '{os.path.splitext(suffix)[0]}'."
|
||||
},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
# path_pattern here is prefix, but in compliance we build correct suffix check before
|
||||
key = keys[0]
|
||||
else:
|
||||
# path_pattern is exact key
|
||||
key = path_pattern
|
||||
try:
|
||||
s3_object = s3_client.get_object(Bucket=bucket_name, Key=key)
|
||||
s3_obj = client.get_object(Bucket=bucket, Key=key)
|
||||
except ClientError as e:
|
||||
error_code = e.response.get("Error", {}).get("Code")
|
||||
if error_code == "NoSuchKey":
|
||||
code = e.response.get("Error", {}).get("Code")
|
||||
if code == "NoSuchKey":
|
||||
return Response(
|
||||
{"detail": "The scan has no reports."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
@@ -1291,28 +1334,97 @@ class ScanViewSet(BaseRLSViewSet):
|
||||
{"detail": "There is a problem with credentials."},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
file_content = s3_object["Body"].read()
|
||||
filename = os.path.basename(output_location.split("/")[-1])
|
||||
content = s3_obj["Body"].read()
|
||||
filename = os.path.basename(key)
|
||||
else:
|
||||
zip_files = glob.glob(output_location)
|
||||
try:
|
||||
file_path = zip_files[0]
|
||||
except IndexError as e:
|
||||
sentry_sdk.capture_exception(e)
|
||||
files = glob.glob(path_pattern)
|
||||
if not files:
|
||||
return Response(
|
||||
{"detail": "The scan has no reports."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
with open(file_path, "rb") as f:
|
||||
file_content = f.read()
|
||||
filename = os.path.basename(file_path)
|
||||
filepath = files[0]
|
||||
with open(filepath, "rb") as f:
|
||||
content = f.read()
|
||||
filename = os.path.basename(filepath)
|
||||
|
||||
response = HttpResponse(
|
||||
file_content, content_type="application/x-zip-compressed"
|
||||
)
|
||||
return content, filename
|
||||
|
||||
def _serve_file(self, content, filename, content_type):
|
||||
response = HttpResponse(content, content_type=content_type)
|
||||
response["Content-Disposition"] = f'attachment; filename="{filename}"'
|
||||
|
||||
return response
|
||||
|
||||
@action(detail=True, methods=["get"], url_name="report")
|
||||
def report(self, request, pk=None):
|
||||
scan = self.get_object()
|
||||
# Check for executing tasks
|
||||
running_resp = self._get_task_status(scan)
|
||||
if running_resp:
|
||||
return running_resp
|
||||
|
||||
if not scan.output_location:
|
||||
return Response(
|
||||
{"detail": "The scan has no reports."}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
if scan.output_location.startswith("s3://"):
|
||||
bucket = env.str("DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET", "")
|
||||
key_prefix = scan.output_location.removeprefix(f"s3://{bucket}/")
|
||||
loader = self._load_file(
|
||||
key_prefix, s3=True, bucket=bucket, list_objects=False
|
||||
)
|
||||
else:
|
||||
loader = self._load_file(scan.output_location, s3=False)
|
||||
|
||||
if isinstance(loader, Response):
|
||||
return loader
|
||||
|
||||
content, filename = loader
|
||||
return self._serve_file(content, filename, "application/x-zip-compressed")
|
||||
|
||||
@action(
|
||||
detail=True,
|
||||
methods=["get"],
|
||||
url_path="compliance/(?P<name>[^/]+)",
|
||||
url_name="compliance",
|
||||
)
|
||||
def compliance(self, request, pk=None, name=None):
|
||||
scan = self.get_object()
|
||||
if name not in get_available_compliance_frameworks(scan.provider.provider):
|
||||
return Response(
|
||||
{"detail": f"Compliance '{name}' not found."},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
running_resp = self._get_task_status(scan)
|
||||
if running_resp:
|
||||
return running_resp
|
||||
|
||||
if not scan.output_location:
|
||||
return Response(
|
||||
{"detail": "The scan has no reports."}, status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
if scan.output_location.startswith("s3://"):
|
||||
bucket = env.str("DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET", "")
|
||||
key_prefix = scan.output_location.removeprefix(f"s3://{bucket}/")
|
||||
prefix = os.path.join(
|
||||
os.path.dirname(key_prefix), "compliance", f"{name}.csv"
|
||||
)
|
||||
loader = self._load_file(prefix, s3=True, bucket=bucket, list_objects=True)
|
||||
else:
|
||||
base = os.path.dirname(scan.output_location)
|
||||
pattern = os.path.join(base, "compliance", f"*_{name}.csv")
|
||||
loader = self._load_file(pattern, s3=False)
|
||||
|
||||
if isinstance(loader, Response):
|
||||
return loader
|
||||
|
||||
content, filename = loader
|
||||
return self._serve_file(content, filename, "text/csv")
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
input_serializer = self.get_serializer(data=request.data)
|
||||
input_serializer.is_valid(raise_exception=True)
|
||||
|
||||
@@ -13,6 +13,39 @@ from prowler.config.config import (
|
||||
json_ocsf_file_suffix,
|
||||
output_file_timestamp,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
|
||||
AWSWellArchitected,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_kubernetes import KubernetesCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_m365 import M365CIS
|
||||
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
|
||||
from prowler.lib.outputs.compliance.ens.ens_azure import AzureENS
|
||||
from prowler.lib.outputs.compliance.ens.ens_gcp import GCPENS
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_azure import AzureISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_gcp import GCPISO27001
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_kubernetes import (
|
||||
KubernetesISO27001,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_nhn import NHNISO27001
|
||||
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp_aws import AWSKISAISMSP
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_aws import AWSMitreAttack
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
|
||||
AzureMitreAttack,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_gcp import GCPMitreAttack
|
||||
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_aws import (
|
||||
ProwlerThreatScoreAWS,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_azure import (
|
||||
ProwlerThreatScoreAzure,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.prowler_threatscore.prowler_threatscore_gcp import (
|
||||
ProwlerThreatScoreGCP,
|
||||
)
|
||||
from prowler.lib.outputs.csv.csv import CSV
|
||||
from prowler.lib.outputs.html.html import HTML
|
||||
from prowler.lib.outputs.ocsf.ocsf import OCSF
|
||||
@@ -20,6 +53,46 @@ from prowler.lib.outputs.ocsf.ocsf import OCSF
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
COMPLIANCE_CLASS_MAP = {
|
||||
"aws": [
|
||||
(lambda name: name.startswith("cis_"), AWSCIS),
|
||||
(lambda name: name == "mitre_attack_aws", AWSMitreAttack),
|
||||
(lambda name: name.startswith("ens_"), AWSENS),
|
||||
(
|
||||
lambda name: name.startswith("aws_well_architected_framework"),
|
||||
AWSWellArchitected,
|
||||
),
|
||||
(lambda name: name.startswith("iso27001_"), AWSISO27001),
|
||||
(lambda name: name.startswith("kisa"), AWSKISAISMSP),
|
||||
(lambda name: name == "prowler_threatscore_aws", ProwlerThreatScoreAWS),
|
||||
],
|
||||
"azure": [
|
||||
(lambda name: name.startswith("cis_"), AzureCIS),
|
||||
(lambda name: name == "mitre_attack_azure", AzureMitreAttack),
|
||||
(lambda name: name.startswith("ens_"), AzureENS),
|
||||
(lambda name: name.startswith("iso27001_"), AzureISO27001),
|
||||
(lambda name: name == "prowler_threatscore_azure", ProwlerThreatScoreAzure),
|
||||
],
|
||||
"gcp": [
|
||||
(lambda name: name.startswith("cis_"), GCPCIS),
|
||||
(lambda name: name == "mitre_attack_gcp", GCPMitreAttack),
|
||||
(lambda name: name.startswith("ens_"), GCPENS),
|
||||
(lambda name: name.startswith("iso27001_"), GCPISO27001),
|
||||
(lambda name: name == "prowler_threatscore_gcp", ProwlerThreatScoreGCP),
|
||||
],
|
||||
"kubernetes": [
|
||||
(lambda name: name.startswith("cis_"), KubernetesCIS),
|
||||
(lambda name: name.startswith("iso27001_"), KubernetesISO27001),
|
||||
],
|
||||
"m365": [
|
||||
(lambda name: name.startswith("cis_"), M365CIS),
|
||||
],
|
||||
"nhn": [
|
||||
(lambda name: name.startswith("iso27001_"), NHNISO27001),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
# Predefined mapping for output formats and their configurations
|
||||
OUTPUT_FORMATS_MAPPING = {
|
||||
"csv": {
|
||||
@@ -43,13 +116,17 @@ def _compress_output_files(output_directory: str) -> str:
|
||||
str: The full path to the newly created ZIP archive.
|
||||
"""
|
||||
zip_path = f"{output_directory}.zip"
|
||||
parent_dir = os.path.dirname(output_directory)
|
||||
zip_path_abs = os.path.abspath(zip_path)
|
||||
|
||||
with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf:
|
||||
for suffix in [config["suffix"] for config in OUTPUT_FORMATS_MAPPING.values()]:
|
||||
zipf.write(
|
||||
f"{output_directory}{suffix}",
|
||||
f"output/{output_directory.split('/')[-1]}{suffix}",
|
||||
)
|
||||
for foldername, _, filenames in os.walk(parent_dir):
|
||||
for filename in filenames:
|
||||
file_path = os.path.join(foldername, filename)
|
||||
if os.path.abspath(file_path) == zip_path_abs:
|
||||
continue
|
||||
arcname = os.path.relpath(file_path, start=parent_dir)
|
||||
zipf.write(file_path, arcname)
|
||||
|
||||
return zip_path
|
||||
|
||||
@@ -102,25 +179,38 @@ def _upload_to_s3(tenant_id: str, zip_path: str, scan_id: str) -> str:
|
||||
Raises:
|
||||
botocore.exceptions.ClientError: If the upload attempt to S3 fails for any reason.
|
||||
"""
|
||||
if not base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET:
|
||||
return
|
||||
bucket = base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET
|
||||
if not bucket:
|
||||
return None
|
||||
|
||||
try:
|
||||
s3 = get_s3_client()
|
||||
s3_key = f"{tenant_id}/{scan_id}/{os.path.basename(zip_path)}"
|
||||
|
||||
# Upload the ZIP file (outputs) to the S3 bucket
|
||||
zip_key = f"{tenant_id}/{scan_id}/{os.path.basename(zip_path)}"
|
||||
s3.upload_file(
|
||||
Filename=zip_path,
|
||||
Bucket=base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET,
|
||||
Key=s3_key,
|
||||
Bucket=bucket,
|
||||
Key=zip_key,
|
||||
)
|
||||
return f"s3://{base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET}/{s3_key}"
|
||||
|
||||
# Upload the compliance directory to the S3 bucket
|
||||
compliance_dir = os.path.join(os.path.dirname(zip_path), "compliance")
|
||||
for filename in os.listdir(compliance_dir):
|
||||
local_path = os.path.join(compliance_dir, filename)
|
||||
if not os.path.isfile(local_path):
|
||||
continue
|
||||
file_key = f"{tenant_id}/{scan_id}/compliance/{filename}"
|
||||
s3.upload_file(Filename=local_path, Bucket=bucket, Key=file_key)
|
||||
|
||||
return f"s3://{base.DJANGO_OUTPUT_S3_AWS_OUTPUT_BUCKET}/{zip_key}"
|
||||
except (ClientError, NoCredentialsError, ParamValidationError, ValueError) as e:
|
||||
logger.error(f"S3 upload failed: {str(e)}")
|
||||
|
||||
|
||||
def _generate_output_directory(
|
||||
output_directory, prowler_provider: object, tenant_id: str, scan_id: str
|
||||
) -> str:
|
||||
) -> tuple[str, str]:
|
||||
"""
|
||||
Generate a file system path for the output directory of a prowler scan.
|
||||
|
||||
@@ -145,7 +235,8 @@ def _generate_output_directory(
|
||||
|
||||
Example:
|
||||
>>> _generate_output_directory("/tmp", "aws", "tenant-1234", "scan-5678")
|
||||
'/tmp/tenant-1234/aws/scan-5678/prowler-output-2023-02-15T12:34:56'
|
||||
'/tmp/tenant-1234/aws/scan-5678/prowler-output-2023-02-15T12:34:56',
|
||||
'/tmp/tenant-1234/aws/scan-5678/compliance/prowler-output-2023-02-15T12:34:56'
|
||||
"""
|
||||
path = (
|
||||
f"{output_directory}/{tenant_id}/{scan_id}/prowler-output-"
|
||||
@@ -153,4 +244,10 @@ def _generate_output_directory(
|
||||
)
|
||||
os.makedirs("/".join(path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
return path
|
||||
compliance_path = (
|
||||
f"{output_directory}/{tenant_id}/{scan_id}/compliance/prowler-output-"
|
||||
f"{prowler_provider}-{output_file_timestamp}"
|
||||
)
|
||||
os.makedirs("/".join(compliance_path.split("/")[:-1]), exist_ok=True)
|
||||
|
||||
return path, compliance_path
|
||||
|
||||
@@ -10,6 +10,7 @@ from django_celery_beat.models import PeriodicTask
|
||||
from tasks.jobs.connection import check_provider_connection
|
||||
from tasks.jobs.deletion import delete_provider, delete_tenant
|
||||
from tasks.jobs.export import (
|
||||
COMPLIANCE_CLASS_MAP,
|
||||
OUTPUT_FORMATS_MAPPING,
|
||||
_compress_output_files,
|
||||
_generate_output_directory,
|
||||
@@ -23,6 +24,9 @@ from api.decorators import set_tenant
|
||||
from api.models import Finding, Provider, Scan, ScanSummary, StateChoices
|
||||
from api.utils import initialize_prowler_provider
|
||||
from api.v1.serializers import ScanTaskSerializer
|
||||
from prowler.config.config import get_available_compliance_frameworks
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
|
||||
from prowler.lib.outputs.finding import Finding as FindingOutput
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
@@ -251,84 +255,107 @@ def generate_outputs(scan_id: str, provider_id: str, tenant_id: str):
|
||||
logger.info(f"No findings found for scan {scan_id}")
|
||||
return {"upload": False}
|
||||
|
||||
# Initialize the prowler provider
|
||||
prowler_provider = initialize_prowler_provider(Provider.objects.get(id=provider_id))
|
||||
provider_obj = Provider.objects.get(id=provider_id)
|
||||
prowler_provider = initialize_prowler_provider(provider_obj)
|
||||
provider_uid = provider_obj.uid
|
||||
provider_type = provider_obj.provider
|
||||
|
||||
# Get the provider UID
|
||||
provider_uid = Provider.objects.get(id=provider_id).uid
|
||||
|
||||
# Generate and ensure the output directory exists
|
||||
output_directory = _generate_output_directory(
|
||||
frameworks_bulk = Compliance.get_bulk(provider_type)
|
||||
frameworks_avail = get_available_compliance_frameworks(provider_type)
|
||||
out_dir, comp_dir = _generate_output_directory(
|
||||
DJANGO_TMP_OUTPUT_DIRECTORY, provider_uid, tenant_id, scan_id
|
||||
)
|
||||
|
||||
# Define auxiliary variables
|
||||
def get_writer(writer_map, name, factory, is_last):
|
||||
"""
|
||||
Return existing writer_map[name] or create via factory().
|
||||
In both cases set `.close_file = is_last`.
|
||||
"""
|
||||
initialization = False
|
||||
if name not in writer_map:
|
||||
writer_map[name] = factory()
|
||||
initialization = True
|
||||
w = writer_map[name]
|
||||
w.close_file = is_last
|
||||
|
||||
return w, initialization
|
||||
|
||||
output_writers = {}
|
||||
compliance_writers = {}
|
||||
|
||||
scan_summary = FindingOutput._transform_findings_stats(
|
||||
ScanSummary.objects.filter(scan_id=scan_id)
|
||||
)
|
||||
|
||||
# Retrieve findings queryset
|
||||
findings_qs = Finding.all_objects.filter(scan_id=scan_id).order_by("uid")
|
||||
qs = Finding.all_objects.filter(scan_id=scan_id).order_by("uid").iterator()
|
||||
for batch, is_last in batched(qs, DJANGO_FINDINGS_BATCH_SIZE):
|
||||
fos = [FindingOutput.transform_api_finding(f, prowler_provider) for f in batch]
|
||||
|
||||
# Process findings in batches
|
||||
for batch, is_last_batch in batched(
|
||||
findings_qs.iterator(), DJANGO_FINDINGS_BATCH_SIZE
|
||||
):
|
||||
finding_outputs = [
|
||||
FindingOutput.transform_api_finding(finding, prowler_provider)
|
||||
for finding in batch
|
||||
]
|
||||
|
||||
# Generate output files
|
||||
for mode, config in OUTPUT_FORMATS_MAPPING.items():
|
||||
kwargs = dict(config.get("kwargs", {}))
|
||||
# Outputs
|
||||
for mode, cfg in OUTPUT_FORMATS_MAPPING.items():
|
||||
cls = cfg["class"]
|
||||
suffix = cfg["suffix"]
|
||||
extra = cfg.get("kwargs", {}).copy()
|
||||
if mode == "html":
|
||||
kwargs["provider"] = prowler_provider
|
||||
kwargs["stats"] = scan_summary
|
||||
extra.update(provider=prowler_provider, stats=scan_summary)
|
||||
|
||||
writer_class = config["class"]
|
||||
if writer_class in output_writers:
|
||||
writer = output_writers[writer_class]
|
||||
writer.transform(finding_outputs)
|
||||
writer.close_file = is_last_batch
|
||||
else:
|
||||
writer = writer_class(
|
||||
findings=finding_outputs,
|
||||
file_path=output_directory,
|
||||
file_extension=config["suffix"],
|
||||
writer, initialization = get_writer(
|
||||
output_writers,
|
||||
cls,
|
||||
lambda cls=cls, fos=fos, suffix=suffix: cls(
|
||||
findings=fos,
|
||||
file_path=out_dir,
|
||||
file_extension=suffix,
|
||||
from_cli=False,
|
||||
)
|
||||
writer.close_file = is_last_batch
|
||||
output_writers[writer_class] = writer
|
||||
),
|
||||
is_last,
|
||||
)
|
||||
if not initialization:
|
||||
writer.transform(fos)
|
||||
writer.batch_write_data_to_file(**extra)
|
||||
writer._data.clear()
|
||||
|
||||
# Write the current batch using the writer
|
||||
writer.batch_write_data_to_file(**kwargs)
|
||||
# Compliance CSVs
|
||||
for name in frameworks_avail:
|
||||
compliance_obj = frameworks_bulk[name]
|
||||
klass = next(
|
||||
(
|
||||
c
|
||||
for cond, c in COMPLIANCE_CLASS_MAP.get(provider_type, [])
|
||||
if cond(name)
|
||||
),
|
||||
GenericCompliance,
|
||||
)
|
||||
filename = f"{comp_dir}_{name}.csv"
|
||||
|
||||
# TODO: Refactor the output classes to avoid this manual reset
|
||||
writer._data = []
|
||||
writer, initialization = get_writer(
|
||||
compliance_writers,
|
||||
name,
|
||||
lambda klass=klass, fos=fos: klass(
|
||||
findings=fos,
|
||||
compliance=compliance_obj,
|
||||
file_path=filename,
|
||||
from_cli=False,
|
||||
),
|
||||
is_last,
|
||||
)
|
||||
if not initialization:
|
||||
writer.transform(fos, compliance_obj, name)
|
||||
writer.batch_write_data_to_file()
|
||||
writer._data.clear()
|
||||
|
||||
# Compress output files
|
||||
output_directory = _compress_output_files(output_directory)
|
||||
compressed = _compress_output_files(out_dir)
|
||||
upload_uri = _upload_to_s3(tenant_id, compressed, scan_id)
|
||||
|
||||
# Save to configured storage
|
||||
uploaded = _upload_to_s3(tenant_id, output_directory, scan_id)
|
||||
|
||||
if uploaded:
|
||||
# Remove the local files after upload
|
||||
if upload_uri:
|
||||
try:
|
||||
rmtree(Path(output_directory).parent, ignore_errors=True)
|
||||
except FileNotFoundError as e:
|
||||
rmtree(Path(compressed).parent, ignore_errors=True)
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting output files: {e}")
|
||||
|
||||
output_directory = uploaded
|
||||
uploaded = True
|
||||
final_location, did_upload = upload_uri, True
|
||||
else:
|
||||
uploaded = False
|
||||
final_location, did_upload = compressed, False
|
||||
|
||||
# Update the scan instance with the output path
|
||||
Scan.all_objects.filter(id=scan_id).update(output_location=output_directory)
|
||||
|
||||
logger.info(f"Scan output files generated, output location: {output_directory}")
|
||||
|
||||
return {"upload": uploaded}
|
||||
Scan.all_objects.filter(id=scan_id).update(output_location=final_location)
|
||||
logger.info(f"Scan outputs at {final_location}")
|
||||
return {"upload": did_upload}
|
||||
|
||||
@@ -31,14 +31,21 @@ class ComplianceOutput(Output):
|
||||
compliance: Compliance,
|
||||
file_path: str = None,
|
||||
file_extension: str = "",
|
||||
from_cli: bool = True,
|
||||
) -> None:
|
||||
# TODO: This class needs to be refactored to use the Output class init, methods and properties
|
||||
self._data = []
|
||||
self.close_file = False
|
||||
self.file_path = file_path
|
||||
self.file_descriptor = None
|
||||
# This parameter is to avoid refactoring more code, the CLI does not write in batches, the API does
|
||||
self._from_cli = from_cli
|
||||
|
||||
if not file_extension and file_path:
|
||||
self._file_extension = "".join(Path(file_path).suffixes)
|
||||
if file_extension:
|
||||
self._file_extension = file_extension
|
||||
self.file_path = f"{file_path}{self.file_extension}"
|
||||
|
||||
if findings:
|
||||
# Get the compliance name of the model
|
||||
@@ -49,7 +56,7 @@ class ComplianceOutput(Output):
|
||||
)
|
||||
self.transform(findings, compliance, compliance_name)
|
||||
if not self._file_descriptor and file_path:
|
||||
self.create_file_descriptor(file_path)
|
||||
self.create_file_descriptor(self.file_path)
|
||||
|
||||
def batch_write_data_to_file(self) -> None:
|
||||
"""
|
||||
@@ -69,12 +76,14 @@ class ComplianceOutput(Output):
|
||||
fieldnames=[field.upper() for field in self._data[0].dict().keys()],
|
||||
delimiter=";",
|
||||
)
|
||||
csv_writer.writeheader()
|
||||
if self._file_descriptor.tell() == 0:
|
||||
csv_writer.writeheader()
|
||||
for finding in self._data:
|
||||
csv_writer.writerow(
|
||||
{k.upper(): v for k, v in finding.dict().items()}
|
||||
)
|
||||
self._file_descriptor.close()
|
||||
if self.close_file or self._from_cli:
|
||||
self._file_descriptor.close()
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
|
||||
@@ -257,3 +257,42 @@ export const getExportsZip = async (scanId: string) => {
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
export const getComplianceCsv = async (
|
||||
scanId: string,
|
||||
complianceId: string,
|
||||
) => {
|
||||
const headers = await getAuthHeaders({ contentType: false });
|
||||
|
||||
const url = new URL(
|
||||
`${apiBaseUrl}/scans/${scanId}/compliance/${complianceId}`,
|
||||
);
|
||||
|
||||
try {
|
||||
const response = await fetch(url.toString(), {
|
||||
headers,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
throw new Error(
|
||||
errorData?.errors?.[0]?.detail || "Failed to fetch compliance report",
|
||||
);
|
||||
}
|
||||
|
||||
// Get the blob data as an array buffer
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
// Convert to base64
|
||||
const base64 = Buffer.from(arrayBuffer).toString("base64");
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: base64,
|
||||
filename: `scan-${scanId}-compliance-${complianceId}.csv`,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
error: getErrorMessage(error),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
@@ -159,6 +159,7 @@ const SSRComplianceGrid = async ({
|
||||
framework,
|
||||
version,
|
||||
requirements_status: { passed, total },
|
||||
compliance_id,
|
||||
} = attributes;
|
||||
|
||||
return (
|
||||
@@ -170,6 +171,8 @@ const SSRComplianceGrid = async ({
|
||||
totalRequirements={total}
|
||||
prevPassingRequirements={passed}
|
||||
prevTotalRequirements={total}
|
||||
scanId={scanId}
|
||||
complianceId={compliance_id}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
"use client";
|
||||
|
||||
import { Card, CardBody, Progress } from "@nextui-org/react";
|
||||
import Image from "next/image";
|
||||
import { useSearchParams } from "next/navigation";
|
||||
import React from "react";
|
||||
|
||||
import { DownloadIconButton, toast } from "@/components/ui";
|
||||
import { downloadComplianceCsv } from "@/lib/helper";
|
||||
|
||||
import { getComplianceIcon } from "../icons";
|
||||
|
||||
interface ComplianceCardProps {
|
||||
@@ -11,6 +17,8 @@ interface ComplianceCardProps {
|
||||
totalRequirements: number;
|
||||
prevPassingRequirements: number;
|
||||
prevTotalRequirements: number;
|
||||
scanId: string;
|
||||
complianceId: string;
|
||||
}
|
||||
|
||||
export const ComplianceCard: React.FC<ComplianceCardProps> = ({
|
||||
@@ -18,7 +26,12 @@ export const ComplianceCard: React.FC<ComplianceCardProps> = ({
|
||||
version,
|
||||
passingRequirements,
|
||||
totalRequirements,
|
||||
scanId,
|
||||
complianceId,
|
||||
}) => {
|
||||
const searchParams = useSearchParams();
|
||||
const hasRegionFilter = searchParams.has("filter[region__in]");
|
||||
|
||||
const formatTitle = (title: string) => {
|
||||
return title.split("-").join(" ");
|
||||
};
|
||||
@@ -27,6 +40,8 @@ export const ComplianceCard: React.FC<ComplianceCardProps> = ({
|
||||
(passingRequirements / totalRequirements) * 100,
|
||||
);
|
||||
|
||||
// Calculates the percentage change in passing requirements compared to the previous scan.
|
||||
//
|
||||
// const prevRatingPercentage = Math.floor(
|
||||
// (prevPassingRequirements / prevTotalRequirements) * 100,
|
||||
// );
|
||||
@@ -79,13 +94,22 @@ export const ComplianceCard: React.FC<ComplianceCardProps> = ({
|
||||
}}
|
||||
color={getRatingColor(ratingPercentage)}
|
||||
/>
|
||||
<div className="mt-2 flex justify-between">
|
||||
<div className="mt-2 flex items-center justify-between">
|
||||
<small>
|
||||
<span className="mr-1 text-xs font-semibold">
|
||||
{passingRequirements} / {totalRequirements}
|
||||
</span>
|
||||
Passing Requirements
|
||||
</small>
|
||||
|
||||
<DownloadIconButton
|
||||
paramId={complianceId}
|
||||
onDownload={() =>
|
||||
downloadComplianceCsv(scanId, complianceId, toast)
|
||||
}
|
||||
textTooltip="Download compliance CSV report"
|
||||
isDisabled={hasRegionFilter}
|
||||
/>
|
||||
{/* <small>{getScanChange()}</small> */}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -2,12 +2,10 @@
|
||||
|
||||
import { Tooltip } from "@nextui-org/react";
|
||||
import { ColumnDef } from "@tanstack/react-table";
|
||||
import { DownloadIcon } from "lucide-react";
|
||||
import { useSearchParams } from "next/navigation";
|
||||
|
||||
import { InfoIcon } from "@/components/icons";
|
||||
import { toast } from "@/components/ui";
|
||||
import { CustomButton } from "@/components/ui/custom";
|
||||
import { DownloadIconButton, toast } from "@/components/ui";
|
||||
import { DateWithTime, EntityInfoShort } from "@/components/ui/entities";
|
||||
import { TriggerSheet } from "@/components/ui/sheet";
|
||||
import { DataTableColumnHeader, StatusBadge } from "@/components/ui/table";
|
||||
@@ -136,19 +134,11 @@ export const ColumnGetScans: ColumnDef<ScanProps>[] = [
|
||||
const scanState = row.original.attributes?.state;
|
||||
|
||||
return (
|
||||
<div className="flex w-14 items-center justify-center">
|
||||
<CustomButton
|
||||
variant="ghost"
|
||||
isDisabled={scanState !== "completed"}
|
||||
onPress={() => downloadScanZip(scanId, toast)}
|
||||
className="p-0 text-default-500 hover:text-primary disabled:opacity-30"
|
||||
isIconOnly
|
||||
ariaLabel="Download .zip"
|
||||
size="sm"
|
||||
>
|
||||
<DownloadIcon size={16} />
|
||||
</CustomButton>
|
||||
</div>
|
||||
<DownloadIconButton
|
||||
paramId={scanId}
|
||||
onDownload={() => downloadScanZip(scanId, toast)}
|
||||
isDisabled={scanState !== "completed"}
|
||||
/>
|
||||
);
|
||||
},
|
||||
},
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
"use client";
|
||||
|
||||
import { Tooltip } from "@nextui-org/react";
|
||||
import { DownloadIcon } from "lucide-react";
|
||||
|
||||
import { CustomButton } from "../custom/custom-button";
|
||||
|
||||
interface DownloadIconButtonProps {
|
||||
paramId: string;
|
||||
onDownload: (paramId: string) => void;
|
||||
ariaLabel?: string;
|
||||
isDisabled?: boolean;
|
||||
textTooltip?: string;
|
||||
}
|
||||
|
||||
export const DownloadIconButton = ({
|
||||
paramId,
|
||||
onDownload,
|
||||
ariaLabel = "Download report",
|
||||
isDisabled = false,
|
||||
textTooltip = "Download report",
|
||||
}: DownloadIconButtonProps) => {
|
||||
return (
|
||||
<div className="flex items-center justify-end">
|
||||
<Tooltip content={textTooltip} className="text-xs">
|
||||
<CustomButton
|
||||
variant="ghost"
|
||||
isDisabled={isDisabled}
|
||||
onPress={() => onDownload(paramId)}
|
||||
className="p-0 text-default-500 hover:text-primary disabled:opacity-30"
|
||||
isIconOnly
|
||||
ariaLabel={ariaLabel}
|
||||
size="sm"
|
||||
>
|
||||
<DownloadIcon size={16} />
|
||||
</CustomButton>
|
||||
</Tooltip>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -4,6 +4,7 @@ export * from "./alert-dialog/AlertDialog";
|
||||
export * from "./chart/Chart";
|
||||
export * from "./content-layout/content-layout";
|
||||
export * from "./dialog/dialog";
|
||||
export * from "./download-icon-button/download-icon-button";
|
||||
export * from "./dropdown/Dropdown";
|
||||
export * from "./headers/navigation-header";
|
||||
export * from "./label/Label";
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { getExportsZip } from "@/actions/scans";
|
||||
import { getComplianceCsv, getExportsZip } from "@/actions/scans";
|
||||
import { getTask } from "@/actions/task";
|
||||
import { auth } from "@/auth.config";
|
||||
import { useToast } from "@/components/ui";
|
||||
@@ -91,6 +91,43 @@ export const downloadScanZip = async (
|
||||
}
|
||||
};
|
||||
|
||||
export const downloadComplianceCsv = async (
|
||||
scanId: string,
|
||||
complianceId: string,
|
||||
toast: ReturnType<typeof useToast>["toast"],
|
||||
) => {
|
||||
const result = await getComplianceCsv(scanId, complianceId);
|
||||
|
||||
if (result?.success && result?.data) {
|
||||
const binaryString = window.atob(result.data);
|
||||
const bytes = new Uint8Array(binaryString.length);
|
||||
for (let i = 0; i < binaryString.length; i++) {
|
||||
bytes[i] = binaryString.charCodeAt(i);
|
||||
}
|
||||
const blob = new Blob([bytes], { type: "text/csv" });
|
||||
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
const a = document.createElement("a");
|
||||
a.href = url;
|
||||
a.download = result.filename;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
document.body.removeChild(a);
|
||||
window.URL.revokeObjectURL(url);
|
||||
|
||||
toast({
|
||||
title: "Download Complete",
|
||||
description: "The compliance report has been downloaded successfully.",
|
||||
});
|
||||
} else if (result?.error) {
|
||||
toast({
|
||||
variant: "destructive",
|
||||
title: "Download Failed",
|
||||
description: result.error,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const isGoogleOAuthEnabled =
|
||||
!!process.env.SOCIAL_GOOGLE_OAUTH_CLIENT_ID &&
|
||||
!!process.env.SOCIAL_GOOGLE_OAUTH_CLIENT_SECRET;
|
||||
|
||||
Reference in New Issue
Block a user