Compare commits

..

19 Commits

Author SHA1 Message Date
Prowler Bot
23aded92a3 chore(api): Update CHANGELOG (#7327)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-03-20 15:24:16 +05:45
Prowler Bot
6e56d3862d fix(scan_id): Read the ID from the Scan object (#7326)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-03-20 15:22:57 +05:45
Prowler Bot
d95fccd163 fix(gcp): make provider id mandatory in test_connection (#7315)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
2025-03-19 20:38:37 +05:45
Prowler Bot
7ddf860a55 fix: add a handled response in case local files are missing (#7227)
Co-authored-by: Adrián Jesús Peña Rodríguez <adrianjpr@gmail.com>
2025-03-19 11:58:25 +01:00
Prowler Bot
3f41c75a45 fix(route53): solve false positive in route53_public_hosted_zones_cloudwatch_logging_enabled (#7293)
Co-authored-by: Daniel Barranquero <74871504+danibarranqueroo@users.noreply.github.com>
2025-03-19 13:39:56 +05:45
Prowler Bot
04b6dbf639 fix(microsoft365): typo Microsoft365NotTenantIdButClientIdAndClienSecretError (#7258)
Co-authored-by: Hugo Pereira Brito <101209179+HugoPBrito@users.noreply.github.com>
2025-03-19 13:38:08 +05:45
Prowler Bot
ff4d16deb5 fix(scan): add compliance info inside finding (#7247)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
2025-03-19 13:37:16 +05:45
Prowler Bot
562921cd5e fix(test-connection): Handle provider without secret (#7290)
Co-authored-by: Pepe Fagoaga <pepe@prowler.com>
2025-03-19 13:36:38 +05:45
Prowler Bot
8f061e4fed fix(exports): change the way to remove the local export files after s3 upload (#7224)
Co-authored-by: Adrián Jesús Peña Rodríguez <adrianjpr@gmail.com>
2025-03-17 17:30:57 +05:45
Prowler Bot
3fb86d754a fix(cloudwatch): handle None metric alarms (#7207)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-03-12 16:18:44 +01:00
Prowler Bot
7874707310 chore(sentry): ignore new exceptions in Sentry (#7189)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-03-12 11:35:32 +01:00
Prowler Bot
1c934e37c7 chore(sentry): ignore expected errors in GCP API (#7186)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-03-11 17:27:07 +01:00
Prowler Bot
8459cff16d fix(ens): remove and change duplicated ids (#7180)
Co-authored-by: Pedro Martín <pedromarting3@gmail.com>
2025-03-11 12:46:31 +01:00
Prowler Bot
57ae096395 fix(azure): correct check title for SQL Server Unrestricted (#7160)
Co-authored-by: Gary Mclean <gary.mclean@krrv.io>
2025-03-07 19:22:35 +01:00
Prowler Bot
200185de25 fix(metadata): match type with check results (#7155)
Co-authored-by: Andoni Alonso <14891798+andoniaf@users.noreply.github.com>
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-03-07 18:16:32 +01:00
Prowler Bot
f8447b0f79 fix(metadata): typo in ec2_securitygroup_allow_wide_open_public_ipv4 (#7158)
Co-authored-by: ryan-stavella <71134114+ryan-stavella@users.noreply.github.com>
2025-03-07 16:32:36 +01:00
Prowler Bot
19289bbe20 fix(aws): ecs_task_definitions_no_environment_secrets.metadata.json (#7153)
Co-authored-by: Kay Agahd <kagahd@users.noreply.github.com>
2025-03-07 15:27:59 +01:00
César Arroba
b5b371fa0c chore: increase release to 5.4.1 (#7144)
Co-authored-by: Sergio Garcia <hello@mistercloudsec.com>
2025-03-07 14:20:28 +01:00
Prowler Bot
939a623cec fix: tweaks for compliance cards (#7148)
Co-authored-by: Pablo Lara <larabjj@gmail.com>
2025-03-07 11:40:55 +01:00
41 changed files with 480 additions and 524 deletions

View File

@@ -2,6 +2,16 @@
All notable changes to the **Prowler API** are documented in this file.
---
## [v1.5.1] (Prowler v5.4.1)
### Fixed
- Added a handled response in case local files are missing [(#7183)](https://github.com/prowler-cloud/prowler/pull/7183).
- Fixed a race condition when deleting export files after the S3 upload [(#7172)](https://github.com/prowler-cloud/prowler/pull/7172).
- Handled exception when a provider has no secret in test connection [(#7283)](https://github.com/prowler-cloud/prowler/pull/7283).
---
## [v1.5.0] (Prowler v5.4.0)

3
api/poetry.lock generated
View File

@@ -3535,6 +3535,7 @@ files = [
{file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"},
{file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"},
@@ -3543,6 +3544,8 @@ files = [
{file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"},
{file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"},
{file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"},
{file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"},
{file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"},

View File

@@ -8,7 +8,7 @@ description = "Prowler's API (Django/DRF)"
license = "Apache-2.0"
name = "prowler-api"
package-mode = false
version = "1.5.0"
version = "1.5.1"
[tool.poetry.dependencies]
celery = {extras = ["pytest"], version = "^5.4.0"}

View File

@@ -1,7 +1,7 @@
openapi: 3.0.3
info:
title: Prowler API
version: 1.5.0
version: 1.5.1
description: |-
Prowler API specification.

View File

@@ -1,25 +1,24 @@
from datetime import datetime, timedelta, timezone
from unittest.mock import patch, MagicMock
from unittest.mock import MagicMock, patch
import pytest
from rest_framework.exceptions import NotFound, ValidationError
from api.db_router import MainRouter
from api.exceptions import InvitationTokenExpiredException
from api.models import Invitation, Provider
from api.utils import (
get_prowler_provider_kwargs,
initialize_prowler_provider,
merge_dicts,
prowler_provider_connection_test,
return_prowler_provider,
validate_invitation,
)
from prowler.providers.aws.aws_provider import AwsProvider
from prowler.providers.azure.azure_provider import AzureProvider
from prowler.providers.gcp.gcp_provider import GcpProvider
from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider
from rest_framework.exceptions import ValidationError, NotFound
from api.db_router import MainRouter
from api.exceptions import InvitationTokenExpiredException
from api.models import Invitation
from api.models import Provider
from api.utils import (
merge_dicts,
return_prowler_provider,
initialize_prowler_provider,
prowler_provider_connection_test,
get_prowler_provider_kwargs,
)
from api.utils import validate_invitation
class TestMergeDicts:
@@ -144,6 +143,18 @@ class TestProwlerProviderConnectionTest:
key="value", provider_id="1234567890", raise_on_exception=False
)
@pytest.mark.django_db
@patch("api.utils.return_prowler_provider")
def test_prowler_provider_connection_test_without_secret(
self, mock_return_prowler_provider, providers_fixture
):
mock_return_prowler_provider.return_value = MagicMock()
connection = prowler_provider_connection_test(providers_fixture[0])
assert connection.is_connected is False
assert isinstance(connection.error, Provider.secret.RelatedObjectDoesNotExist)
assert str(connection.error) == "Provider has no secret."
class TestGetProwlerProviderKwargs:
@pytest.mark.parametrize(

View File

@@ -2284,6 +2284,25 @@ class TestScanViewSet:
assert f'filename="{expected_filename}"' in content_disposition
assert response.content == b"s3 zip content"
def test_report_s3_success_no_local_files(
self, authenticated_client, scans_fixture, monkeypatch
):
"""
When output_location is a local path and glob.glob returns an empty list,
the view should return HTTP 404 with detail "The scan has no reports."
"""
scan = scans_fixture[0]
scan.output_location = "/tmp/nonexistent_report_pattern.zip"
scan.state = StateChoices.COMPLETED
scan.save()
monkeypatch.setattr("api.v1.views.glob.glob", lambda pattern: [])
url = reverse("scan-report", kwargs={"pk": scan.id})
response = authenticated_client.get(url)
assert response.status_code == 404
assert response.json()["errors"]["detail"] == "The scan has no reports."
def test_report_local_file(
self, authenticated_client, scans_fixture, tmp_path, monkeypatch
):

View File

@@ -130,7 +130,10 @@ def prowler_provider_connection_test(provider: Provider) -> Connection:
Connection: A connection object representing the result of the connection test for the specified provider.
"""
prowler_provider = return_prowler_provider(provider)
prowler_provider_kwargs = provider.secret.secret
try:
prowler_provider_kwargs = provider.secret.secret
except Provider.secret.RelatedObjectDoesNotExist as secret_error:
return Connection(is_connected=False, error=secret_error)
return prowler_provider.test_connection(
**prowler_provider_kwargs, provider_id=provider.uid, raise_on_exception=False
)

View File

@@ -1,6 +1,7 @@
import glob
import os
import sentry_sdk
from allauth.socialaccount.providers.github.views import GitHubOAuth2Adapter
from allauth.socialaccount.providers.google.views import GoogleOAuth2Adapter
from botocore.exceptions import ClientError, NoCredentialsError, ParamValidationError
@@ -239,7 +240,7 @@ class SchemaView(SpectacularAPIView):
def get(self, request, *args, **kwargs):
spectacular_settings.TITLE = "Prowler API"
spectacular_settings.VERSION = "1.5.0"
spectacular_settings.VERSION = "1.5.1"
spectacular_settings.DESCRIPTION = (
"Prowler API specification.\n\nThis file is auto-generated."
)
@@ -1280,7 +1281,14 @@ class ScanViewSet(BaseRLSViewSet):
filename = os.path.basename(output_location.split("/")[-1])
else:
zip_files = glob.glob(output_location)
file_path = zip_files[0]
try:
file_path = zip_files[0]
except IndexError as e:
sentry_sdk.capture_exception(e)
return Response(
{"detail": "The scan has no reports."},
status=status.HTTP_404_NOT_FOUND,
)
with open(file_path, "rb") as f:
file_content = f.read()
filename = os.path.basename(file_path)

View File

@@ -2,6 +2,8 @@ import sentry_sdk
from config.env import env
IGNORED_EXCEPTIONS = [
# Provider is not connected due to credentials errors
"is not connected",
# Authentication Errors from AWS
"InvalidToken",
"AccessDeniedException",
@@ -11,15 +13,47 @@ IGNORED_EXCEPTIONS = [
"AuthFailure",
"InvalidClientTokenId",
"AccessDenied",
# Shodan Check
"No Shodan API Key",
# For now we don't want to log the RequestLimitExceeded errors
"RequestLimitExceeded",
"No Shodan API Key", # Shodan Check
"RequestLimitExceeded", # For now we don't want to log the RequestLimitExceeded errors
"ThrottlingException",
"Rate exceeded",
# The following comes from urllib3
# eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
"Pool is closed",
"SubscriptionRequiredException",
"UnknownOperationException",
"OptInRequired",
"ReadTimeout",
"LimitExceeded",
"ConnectTimeoutError",
"ExpiredToken",
"IncompleteSignature",
"RegionDisabledException",
"TooManyRequestsException",
"SignatureDoesNotMatch",
"InvalidParameterValueException",
"InvalidInputException",
"ValidationException",
"AWSSecretAccessKeyInvalidError",
"InvalidAction",
"Pool is closed", # The following comes from urllib3: eu-west-1 -- HTTPClientError[126]: An HTTP Client raised an unhandled exception: AWSHTTPSConnectionPool(host='hostname.s3.eu-west-1.amazonaws.com', port=443): Pool is closed.
# Authentication Errors from GCP
"ClientAuthenticationError",
"AuthorizationFailed",
"Reauthentication is needed",
"Permission denied to get service",
"API has not been used in project",
"HttpError 404 when requesting",
"GCPNoAccesibleProjectsError",
# Authentication Errors from Azure
"ClientAuthenticationError",
"AuthorizationFailed",
"Subscription Not Registered",
"AzureNotValidClientIdError",
"AzureNotValidClientSecretError",
"AzureNotValidTenantIdError",
"AzureTenantIdAndClientSecretNotBelongingToClientIdError",
"AzureTenantIdAndClientIdNotBelongingToClientSecretError",
"AzureClientIdAndClientSecretNotBelongingToTenantIdError",
"AzureHTTPResponseError",
"Error with credentials provided",
]

View File

@@ -1,3 +1,4 @@
from pathlib import Path
from shutil import rmtree
from celery import chain, shared_task
@@ -264,10 +265,14 @@ def generate_outputs(scan_id: str, provider_id: str, tenant_id: str):
uploaded = _upload_to_s3(tenant_id, output_directory, scan_id)
if uploaded:
# Remove the local files after upload
try:
rmtree(Path(output_directory).parent, ignore_errors=True)
except FileNotFoundError as e:
logger.error(f"Error deleting output files: {e}")
output_directory = uploaded
uploaded = True
# Remove the local files after upload
rmtree(DJANGO_TMP_OUTPUT_DIRECTORY, ignore_errors=True)
else:
uploaded = False

319
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -2932,7 +2932,7 @@
]
},
{
"Id": "op.pl.2.aws.warch.1",
"Id": "op.pl.2.r1.aws.warch.1",
"Description": "Sistema de gestión",
"Attributes": [
{
@@ -2956,7 +2956,7 @@
"Checks": []
},
{
"Id": "op.pl.2.aws.warch.1",
"Id": "op.pl.2.r2.aws.warch.1",
"Description": "Sistema de gestión de la seguridad con mejora continua",
"Attributes": [
{
@@ -2980,7 +2980,7 @@
"Checks": []
},
{
"Id": "op.pl.2.aws.warch.1",
"Id": "op.pl.2.r3.aws.warch.1",
"Description": "Validación de datos",
"Attributes": [
{
@@ -4304,32 +4304,6 @@
],
"Checks": []
},
{
"Id": "op.mon.3.aws.cwl.1",
"Description": "Vigilancia",
"Attributes": [
{
"IdGrupoControl": "op.mon.3",
"Marco": "operacional",
"Categoria": "monitorización del sistema",
"DescripcionControl": "Deberá asegurarse que todos los servicios que se utilicen en la arquitectura de la aplicación desplegada en AWS estén generando logs",
"Nivel": "alto",
"Tipo": "requisito",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automatico",
"Dependencias": []
}
],
"Checks": [
"cloudtrail_cloudwatch_logging_enabled"
]
},
{
"Id": "mp.com.2.aws.vpn.1",
"Description": "Protección de la confidencialidad",

View File

@@ -12,7 +12,7 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "5.4.0"
prowler_version = "5.4.1"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"

View File

@@ -1,4 +1,5 @@
import datetime
from types import SimpleNamespace
from typing import Generator
from prowler.lib.check.check import (
@@ -22,7 +23,7 @@ from prowler.lib.scan.exceptions.exceptions import (
ScanInvalidSeverityError,
ScanInvalidStatusError,
)
from prowler.providers.common.models import Audit_Metadata
from prowler.providers.common.models import Audit_Metadata, ProviderOutputOptions
from prowler.providers.common.provider import Provider
@@ -38,6 +39,8 @@ class Scan:
_progress: float = 0.0
_duration: int = 0
_status: list[str] = None
_bulk_checks_metadata: dict[str, CheckMetadata]
_bulk_compliance_frameworks: dict
def __init__(
self,
@@ -88,18 +91,18 @@ class Scan:
raise ScanInvalidStatusError(f"Invalid status provided: {s}.")
# Load bulk compliance frameworks
bulk_compliance_frameworks = Compliance.get_bulk(provider.type)
self._bulk_compliance_frameworks = Compliance.get_bulk(provider.type)
# Get bulk checks metadata for the provider
bulk_checks_metadata = CheckMetadata.get_bulk(provider.type)
self._bulk_checks_metadata = CheckMetadata.get_bulk(provider.type)
# Complete checks metadata with the compliance framework specification
bulk_checks_metadata = update_checks_metadata_with_compliance(
bulk_compliance_frameworks, bulk_checks_metadata
self._bulk_checks_metadata = update_checks_metadata_with_compliance(
self._bulk_compliance_frameworks, self._bulk_checks_metadata
)
# Create a list of valid categories
valid_categories = set()
for check, metadata in bulk_checks_metadata.items():
for check, metadata in self._bulk_checks_metadata.items():
for category in metadata.Categories:
if category not in valid_categories:
valid_categories.add(category)
@@ -107,7 +110,7 @@ class Scan:
# Validate checks
if checks:
for check in checks:
if check not in bulk_checks_metadata.keys():
if check not in self._bulk_checks_metadata.keys():
raise ScanInvalidCheckError(f"Invalid check provided: {check}.")
# Validate services
@@ -121,7 +124,7 @@ class Scan:
# Validate compliances
if compliances:
for compliance in compliances:
if compliance not in bulk_compliance_frameworks.keys():
if compliance not in self._bulk_compliance_frameworks.keys():
raise ScanInvalidComplianceFrameworkError(
f"Invalid compliance provided: {compliance}."
)
@@ -147,8 +150,8 @@ class Scan:
# Load checks to execute
self._checks_to_execute = sorted(
load_checks_to_execute(
bulk_checks_metadata=bulk_checks_metadata,
bulk_compliance_frameworks=bulk_compliance_frameworks,
bulk_checks_metadata=self._bulk_checks_metadata,
bulk_compliance_frameworks=self._bulk_compliance_frameworks,
check_list=checks,
service_list=services,
compliance_frameworks=compliances,
@@ -215,9 +218,17 @@ class Scan:
def duration(self) -> int:
return self._duration
@property
def bulk_checks_metadata(self) -> dict[str, CheckMetadata]:
return self._bulk_checks_metadata
@property
def bulk_compliance_frameworks(self) -> dict[str, CheckMetadata]:
return self._bulk_compliance_frameworks
def scan(
self,
custom_checks_metadata: dict = {},
custom_checks_metadata: dict = None,
) -> Generator[tuple[float, list[Finding]], None, None]:
"""
Executes the scan by iterating over the checks to execute and executing each check.
@@ -234,6 +245,14 @@ class Scan:
Exception: If any other error occurs during the execution of a check.
"""
try:
# Using SimpleNamespace to create a mocked object
arguments = SimpleNamespace()
output_options = ProviderOutputOptions(
arguments=arguments,
bulk_checks_metadata=self.bulk_checks_metadata,
)
checks_to_execute = self.checks_to_execute
# Initialize the Audit Metadata
# TODO: this should be done in the provider class
@@ -301,7 +320,9 @@ class Scan:
try:
findings.append(
Finding.generate_output(
self._provider, finding, output_options=None
self.provider,
finding,
output_options=output_options,
)
)
except Exception:

View File

@@ -7,9 +7,9 @@
],
"ServiceName": "account",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:access-recorder:region:account-id:recorder/resource-id",
"ResourceIdTemplate": "arn:partition:service:region:account-id",
"Severity": "medium",
"ResourceType": "Other",
"ResourceType": "AwsAccount",
"Description": "Maintain current contact details.",
"Risk": "Ensure contact email and telephone details for AWS accounts are current and map to more than one individual in your organization. An AWS account supports a number of contact details, and AWS will use these to contact the account owner if activity judged to be in breach of Acceptable Use Policy. If an AWS account is observed to be behaving in a prohibited or suspicious manner, AWS will attempt to contact the account owner by email and phone using the contact details listed. If this is unsuccessful and the account behavior needs urgent mitigation, proactive measures may be taken, including throttling of traffic between the account exhibiting suspicious behavior and the AWS API endpoints and the Internet. This will result in impaired service to and from the account in question.",
"RelatedUrl": "",

View File

@@ -7,9 +7,9 @@
],
"ServiceName": "account",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:access-recorder:region:account-id:recorder/resource-id",
"ResourceIdTemplate": "arn:partition:service:region:account-id",
"Severity": "medium",
"ResourceType": "Other",
"ResourceType": "AwsAccount",
"Description": "Maintain different contact details to security, billing and operations.",
"Risk": "Ensure contact email and telephone details for AWS accounts are current and map to more than one individual in your organization. An AWS account supports a number of contact details, and AWS will use these to contact the account owner if activity judged to be in breach of Acceptable Use Policy. If an AWS account is observed to be behaving in a prohibited or suspicious manner, AWS will attempt to contact the account owner by email and phone using the contact details listed. If this is unsuccessful and the account behavior needs urgent mitigation, proactive measures may be taken, including throttling of traffic between the account exhibiting suspicious behavior and the AWS API endpoints and the Internet. This will result in impaired service to and from the account in question.",
"RelatedUrl": "https://docs.aws.amazon.com/accounts/latest/reference/manage-acct-update-contact.html",

View File

@@ -7,9 +7,9 @@
],
"ServiceName": "account",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:access-recorder:region:account-id:recorder/resource-id",
"ResourceIdTemplate": "arn:partition:service:region:account-id",
"Severity": "medium",
"ResourceType": "Other",
"ResourceType": "AwsAccount",
"Description": "Ensure security contact information is registered.",
"Risk": "AWS provides customers with the option of specifying the contact information for accounts security team. It is recommended that this information be provided. Specifying security-specific contact information will help ensure that security advisories sent by AWS reach the team in your organization that is best equipped to respond to them.",
"RelatedUrl": "",

View File

@@ -7,9 +7,9 @@
],
"ServiceName": "account",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:access-recorder:region:account-id:recorder/resource-id",
"ResourceIdTemplate": "arn:partition:service:region:account-id",
"Severity": "medium",
"ResourceType": "Other",
"ResourceType": "AwsAccount",
"Description": "Ensure security questions are registered in the AWS account.",
"Risk": "The AWS support portal allows account owners to establish security questions that can be used to authenticate individuals calling AWS customer service for support. It is recommended that security questions be established. When creating a new AWS account a default super user is automatically created. This account is referred to as the root account. It is recommended that the use of this account be limited and highly controlled. During events in which the root password is no longer accessible or the MFA token associated with root is lost/destroyed it is possible through authentication using secret questions and associated answers to recover root login access.",
"RelatedUrl": "",

View File

@@ -7,12 +7,15 @@ from prowler.providers.aws.services.cloudwatch.cloudwatch_client import (
class cloudwatch_alarm_actions_alarm_state_configured(Check):
def execute(self):
findings = []
for metric_alarm in cloudwatch_client.metric_alarms:
report = Check_Report_AWS(metadata=self.metadata(), resource=metric_alarm)
report.status = "PASS"
report.status_extended = f"CloudWatch metric alarm {metric_alarm.name} has actions configured for the ALARM state."
if not metric_alarm.alarm_actions:
report.status = "FAIL"
report.status_extended = f"CloudWatch metric alarm {metric_alarm.name} does not have actions configured for the ALARM state."
findings.append(report)
if cloudwatch_client.metric_alarms is not None:
for metric_alarm in cloudwatch_client.metric_alarms:
report = Check_Report_AWS(
metadata=self.metadata(), resource=metric_alarm
)
report.status = "PASS"
report.status_extended = f"CloudWatch metric alarm {metric_alarm.name} has actions configured for the ALARM state."
if not metric_alarm.alarm_actions:
report.status = "FAIL"
report.status_extended = f"CloudWatch metric alarm {metric_alarm.name} does not have actions configured for the ALARM state."
findings.append(report)
return findings

View File

@@ -7,14 +7,17 @@ from prowler.providers.aws.services.cloudwatch.cloudwatch_client import (
class cloudwatch_alarm_actions_enabled(Check):
def execute(self):
findings = []
for metric_alarm in cloudwatch_client.metric_alarms:
report = Check_Report_AWS(metadata=self.metadata(), resource=metric_alarm)
report.status = "PASS"
report.status_extended = (
f"CloudWatch metric alarm {metric_alarm.name} has actions enabled."
)
if not metric_alarm.actions_enabled:
report.status = "FAIL"
report.status_extended = f"CloudWatch metric alarm {metric_alarm.name} does not have actions enabled."
findings.append(report)
if cloudwatch_client.metric_alarms is not None:
for metric_alarm in cloudwatch_client.metric_alarms:
report = Check_Report_AWS(
metadata=self.metadata(), resource=metric_alarm
)
report.status = "PASS"
report.status_extended = (
f"CloudWatch metric alarm {metric_alarm.name} has actions enabled."
)
if not metric_alarm.actions_enabled:
report.status = "FAIL"
report.status_extended = f"CloudWatch metric alarm {metric_alarm.name} does not have actions enabled."
findings.append(report)
return findings

View File

@@ -9,7 +9,7 @@
"SubServiceName": "snapshot",
"ResourceIdTemplate": "arn:partition:service:region:account-id",
"Severity": "high",
"ResourceType": "Other",
"ResourceType": "AwsAccount",
"Description": "EBS snapshots can be shared with other AWS accounts or made public. By default, EBS snapshots are private and only the AWS account that created the snapshot can access it. If an EBS snapshot is shared with another AWS account or made public, the data in the snapshot can be accessed by the other account or by anyone on the internet. Ensure that public access to EBS snapshots is disabled.",
"Risk": "If public access to EBS snapshots is enabled, the data in the snapshot can be accessed by anyone on the internet.",
"RelatedUrl": "https://docs.aws.amazon.com/ebs/latest/userguide/block-public-access-snapshots-work.html#block-public-access-snapshots-enable",

View File

@@ -9,7 +9,7 @@
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id",
"Severity": "high",
"ResourceType": "AwsEc2Instance",
"ResourceType": "AwsAccount",
"Description": "Ensure Instance Metadata Service Version 2 (IMDSv2) is enforced for EC2 instances at the account level to protect against SSRF vulnerabilities.",
"Risk": "EC2 instances that use IMDSv1 are vulnerable to SSRF attacks.",
"RelatedUrl": "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/configuring-IMDS-new-instances.html#set-imdsv2-account-defaults",

View File

@@ -9,7 +9,7 @@
"SubServiceName": "instance",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"Severity": "critical",
"ResourceType": "AwsEc2SecurityGroup",
"ResourceType": "AwsEc2Instance",
"Description": "Ensure no EC2 instances allow ingress from the internet to TCP port 11211 (Memcached).",
"Risk": "Memcached is an open-source, high-performance, distributed memory object caching system. It is often used to speed up dynamic database-driven websites by caching data and objects in RAM to reduce the number of times an external data source must be read. Memcached is designed to be used in trusted environments and should not be exposed to the internet. If Memcached is exposed to the internet, it can be exploited by attackers to perform distributed denial-of-service (DDoS) attacks, data exfiltration, and other malicious activities.",
"RelatedUrl": "",

View File

@@ -10,7 +10,7 @@
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"Severity": "high",
"ResourceType": "AwsEc2SecurityGroup",
"Description": "Ensure no security groups allow ingress and egress from ide-open IP address with a mask between 0 and 24.",
"Description": "Ensure no security groups allow ingress and egress from wide-open IP address with a mask between 0 and 24.",
"Risk": "If Security groups are not properly configured the attack surface is increased.",
"RelatedUrl": "",
"Remediation": {

View File

@@ -12,7 +12,7 @@
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"Severity": "critical",
"ResourceType": "AwsEcsTaskDefinition",
"Description": "Check if secrets exists in ECS task definitions environment variables. If a secret is detected, the line number shown in the finding matches with the environment variable \"Name\" attribute starting to count at the \"environment\" key from the ECS Task Definition in JSON format.",
"Description": "Check if secrets exists in ECS task definitions environment variables.",
"Risk": "The use of a hard-coded password increases the possibility of password guessing. If hard-coded passwords are used, it is possible that malicious users gain access through the account in question.",
"RelatedUrl": "",
"Remediation": {

View File

@@ -7,9 +7,9 @@
],
"ServiceName": "rds",
"SubServiceName": "",
"ResourceIdTemplate": "arn:aws:rds:region:account-id:db-cluster",
"ResourceIdTemplate": "arn:aws:rds:region:account-id:account",
"Severity": "low",
"ResourceType": "AwsRdsDbCluster",
"ResourceType": "AwsAccount",
"Description": "Ensure that Amazon RDS event notification subscriptions are enabled for database cluster events, particularly maintenance and failure.",
"Risk": "Without event subscriptions for critical events, such as maintenance and failures, you may not be aware of issues affecting your RDS clusters, leading to downtime or security vulnerabilities.",
"RelatedUrl": "https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Events.html",

View File

@@ -5,9 +5,9 @@
"CheckType": [],
"ServiceName": "rds",
"SubServiceName": "",
"ResourceIdTemplate": "arn:aws:rds:region:account-id:db-instance",
"ResourceIdTemplate": "arn:aws:rds:region:account-id:account",
"Severity": "low",
"ResourceType": "AwsRdsEventSubscription",
"ResourceType": "AwsAccount",
"Description": "Ensure that Amazon RDS event notification subscriptions are enabled for database parameter groups events.",
"Risk": "Amazon RDS event subscriptions for database parameter groups are designed to provide incident notification of events that may affect the security, availability, and reliability of the RDS database instances associated with these parameter groups.",
"RelatedUrl": "https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Events.html",

View File

@@ -86,13 +86,14 @@ class Route53(AWSService):
)
for page in list_query_logging_configs_paginator.paginate():
for logging_config in page["QueryLoggingConfigs"]:
self.hosted_zones[hosted_zone.id].logging_config = (
LoggingConfig(
cloudwatch_log_group_arn=logging_config[
"CloudWatchLogsLogGroupArn"
]
if logging_config["HostedZoneId"] == hosted_zone.id:
self.hosted_zones[hosted_zone.id].logging_config = (
LoggingConfig(
cloudwatch_log_group_arn=logging_config[
"CloudWatchLogsLogGroupArn"
]
)
)
)
except Exception as error:
logger.error(

View File

@@ -1,7 +1,7 @@
{
"Provider": "azure",
"CheckID": "sqlserver_unrestricted_inbound_access",
"CheckTitle": "Ensure that there are no firewall rules allowing traffic from 0.0.0.0-255.255.255.255",
"CheckTitle": "Ensure no Azure SQL Databases allow ingress from 0.0.0.0/0 (ANY IP)",
"CheckType": [],
"ServiceName": "sqlserver",
"SubServiceName": "",

View File

@@ -28,34 +28,34 @@ class ProviderOutputOptions:
unix_timestamp: bool
def __init__(self, arguments, bulk_checks_metadata):
self.status = arguments.status
self.output_modes = arguments.output_formats
self.output_directory = arguments.output_directory
self.verbose = arguments.verbose
self.status = getattr(arguments, "status", None)
self.output_modes = getattr(arguments, "output_formats", None)
self.output_directory = getattr(arguments, "output_directory", None)
self.verbose = getattr(arguments, "verbose", None)
self.bulk_checks_metadata = bulk_checks_metadata
self.only_logs = arguments.only_logs
self.unix_timestamp = arguments.unix_timestamp
self.shodan_api_key = arguments.shodan
self.only_logs = getattr(arguments, "only_logs", None)
self.unix_timestamp = getattr(arguments, "unix_timestamp", None)
self.shodan_api_key = getattr(arguments, "shodan", None)
self.fixer = getattr(arguments, "fixer", None)
# Shodan API Key
if arguments.shodan:
if self.shodan_api_key:
# TODO: revisit this logic
provider = Provider.get_global_provider()
updated_audit_config = Provider.update_provider_config(
provider.audit_config, "shodan_api_key", arguments.shodan
provider.audit_config, "shodan_api_key", self.shodan_api_key
)
if updated_audit_config:
provider._audit_config = updated_audit_config
# Check output directory, if it is not created -> create it
if arguments.output_directory and not self.fixer:
if not isdir(arguments.output_directory):
if arguments.output_formats:
makedirs(arguments.output_directory, exist_ok=True)
if not isdir(arguments.output_directory + "/compliance"):
if arguments.output_formats:
makedirs(arguments.output_directory + "/compliance", exist_ok=True)
if self.output_directory and not self.fixer:
if not isdir(self.output_directory):
if self.output_modes:
makedirs(self.output_directory, exist_ok=True)
if not isdir(self.output_directory + "/compliance"):
if self.output_modes:
makedirs(self.output_directory + "/compliance", exist_ok=True)
@dataclass

View File

@@ -2,7 +2,6 @@ import json
import os
import re
import sys
from typing import Optional
from colorama import Fore, Style
from google.auth import default, impersonated_credentials, load_credentials_from_dict
@@ -426,7 +425,7 @@ class GcpProvider(Provider):
credentials_file: str = None,
service_account: str = None,
raise_on_exception: bool = True,
provider_id: Optional[str] = None,
provider_id: str = None,
client_id: str = None,
client_secret: str = None,
refresh_token: str = None,
@@ -483,6 +482,11 @@ class GcpProvider(Provider):
... )
"""
try:
if not provider_id:
logger.error("Provider ID is required.")
raise GCPInvalidProviderIdError(
file=__file__, message="Provider ID is required."
)
# Set the GCP credentials using the provided client_id, client_secret and refresh_token from ADC
gcp_credentials = None
if any([client_id, client_secret, refresh_token]):
@@ -495,6 +499,10 @@ class GcpProvider(Provider):
gcp_credentials=gcp_credentials,
service_account_key=service_account_key,
)
if not project_id:
project_id = provider_id
if provider_id and project_id != provider_id:
# Logic to check if the provider ID matches the project ID
GcpProvider.validate_project_id(

View File

@@ -90,7 +90,7 @@ class Microsoft365BaseException(ProwlerException):
"message": "Microsoft365 tenant ID error: browser authentication flag (--browser-auth) not found",
"remediation": "To use browser authentication, ensure the tenant ID is properly set.",
},
(6021, "Microsoft365NotTenantIdButClientIdAndClienSecretError"): {
(6021, "Microsoft365NotTenantIdButClientIdAndClientSecretError"): {
"message": "Tenant Id is required for Microsoft365 static credentials. Make sure you are using the correct credentials.",
"remediation": "Check the Microsoft365 Tenant ID and ensure it is properly set up.",
},
@@ -270,7 +270,7 @@ class Microsoft365BrowserAuthNoFlagError(Microsoft365CredentialsError):
)
class Microsoft365NotTenantIdButClientIdAndClienSecretError(
class Microsoft365NotTenantIdButClientIdAndClientSecretError(
Microsoft365CredentialsError
):
def __init__(self, file=None, original_exception=None, message=None):

View File

@@ -40,7 +40,7 @@ from prowler.providers.microsoft365.exceptions.exceptions import (
Microsoft365InteractiveBrowserCredentialError,
Microsoft365InvalidProviderIdError,
Microsoft365NoAuthenticationMethodError,
Microsoft365NotTenantIdButClientIdAndClienSecretError,
Microsoft365NotTenantIdButClientIdAndClientSecretError,
Microsoft365NotValidClientIdError,
Microsoft365NotValidClientSecretError,
Microsoft365NotValidTenantIdError,
@@ -281,7 +281,7 @@ class Microsoft365Provider(Provider):
)
else:
if not tenant_id:
raise Microsoft365NotTenantIdButClientIdAndClienSecretError(
raise Microsoft365NotTenantIdButClientIdAndClientSecretError(
file=os.path.basename(__file__),
message="Tenant Id is required for Microsoft365 static credentials. Make sure you are using the correct credentials.",
)

View File

@@ -23,7 +23,7 @@ packages = [
{include = "dashboard"}
]
readme = "README.md"
version = "5.4.0"
version = "5.4.1"
[tool.poetry.dependencies]
alive-progress = "3.2.0"

View File

@@ -18,14 +18,16 @@ class Test_cloudwatch_alarm_actions_alarm_state_configured:
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_client",
new=CloudWatch(aws_provider),
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_client",
new=CloudWatch(aws_provider),
),
):
from prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_alarm_actions_alarm_state_configured import (
cloudwatch_alarm_actions_alarm_state_configured,
)
@@ -35,6 +37,38 @@ class Test_cloudwatch_alarm_actions_alarm_state_configured:
assert len(result) == 0
@mock_aws
def test_none_cloudwatch_alarms(self):
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION_US_EAST_1)
cloudwatch_client.metric_alarms = []
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
)
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_client",
new=CloudWatch(aws_provider),
) as cloudwatch_client_mock,
):
from prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_alarm_actions_alarm_state_configured import (
cloudwatch_alarm_actions_alarm_state_configured,
)
cloudwatch_client_mock.metric_alarms = None
check = cloudwatch_alarm_actions_alarm_state_configured()
result = check.execute()
assert len(result) == 0
@mock_aws
def test_cloudwatch_alarms_actions_configured(self):
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION_US_EAST_1)
@@ -53,14 +87,16 @@ class Test_cloudwatch_alarm_actions_alarm_state_configured:
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_client",
new=CloudWatch(aws_provider),
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_client",
new=CloudWatch(aws_provider),
),
):
from prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_alarm_actions_alarm_state_configured import (
cloudwatch_alarm_actions_alarm_state_configured,
)
@@ -100,14 +136,16 @@ class Test_cloudwatch_alarm_actions_alarm_state_configured:
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_client",
new=CloudWatch(aws_provider),
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_client",
new=CloudWatch(aws_provider),
),
):
from prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_alarm_state_configured.cloudwatch_alarm_actions_alarm_state_configured import (
cloudwatch_alarm_actions_alarm_state_configured,
)

View File

@@ -18,14 +18,16 @@ class Test_cloudwatch_alarm_actions_enabled:
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled.cloudwatch_client",
new=CloudWatch(aws_provider),
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled.cloudwatch_client",
new=CloudWatch(aws_provider),
),
):
from prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled import (
cloudwatch_alarm_actions_enabled,
)
@@ -35,6 +37,37 @@ class Test_cloudwatch_alarm_actions_enabled:
assert len(result) == 0
def test_none_cloudwatch_alarms(self):
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION_US_EAST_1)
cloudwatch_client.metric_alarms = []
from prowler.providers.aws.services.cloudwatch.cloudwatch_service import (
CloudWatch,
)
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled.cloudwatch_client",
new=CloudWatch(aws_provider),
) as cloudwatch_client_mock,
):
from prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled import (
cloudwatch_alarm_actions_enabled,
)
cloudwatch_client_mock.metric_alarms = None
check = cloudwatch_alarm_actions_enabled()
result = check.execute()
assert len(result) == 0
@mock_aws
def test_cloudwatch_alarms_actions_enabled(self):
cloudwatch_client = client("cloudwatch", region_name=AWS_REGION_US_EAST_1)
@@ -53,14 +86,16 @@ class Test_cloudwatch_alarm_actions_enabled:
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled.cloudwatch_client",
new=CloudWatch(aws_provider),
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled.cloudwatch_client",
new=CloudWatch(aws_provider),
),
):
from prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled import (
cloudwatch_alarm_actions_enabled,
)
@@ -100,14 +135,16 @@ class Test_cloudwatch_alarm_actions_enabled:
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
), mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled.cloudwatch_client",
new=CloudWatch(aws_provider),
with (
mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
return_value=aws_provider,
),
mock.patch(
"prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled.cloudwatch_client",
new=CloudWatch(aws_provider),
),
):
from prowler.providers.aws.services.cloudwatch.cloudwatch_alarm_actions_enabled.cloudwatch_alarm_actions_enabled import (
cloudwatch_alarm_actions_enabled,
)

View File

@@ -108,6 +108,65 @@ class Test_route53_public_hosted_zones_cloudwatch_logging_enabled:
== f"Route53 Public Hosted Zone {hosted_zone_id} has query logging disabled."
)
def test_two_hosted_zone_public_one_logging_enabled_other_disabled(self):
route53 = mock.MagicMock
hosted_zone_name = "test-domain.com"
hosted_zone_id = "ABCDEF12345678"
log_group_name = "test-log-group"
log_group_arn = f"rn:aws:logs:{AWS_REGION_US_EAST_1}:{AWS_ACCOUNT_NUMBER}:log-group:{log_group_name}"
hosted_zone_name_disabled = "test-domain-disabled.com"
hosted_zone_id_disabled = "ABCDEF123456789"
route53.hosted_zones = {
hosted_zone_name: HostedZone(
name=hosted_zone_name,
arn=f"arn:aws:route53:::{hosted_zone_id}",
id=hosted_zone_id,
private_zone=False,
region=AWS_REGION_US_EAST_1,
logging_config=LoggingConfig(cloudwatch_log_group_arn=log_group_arn),
),
hosted_zone_name_disabled: HostedZone(
name=hosted_zone_name_disabled,
arn=f"arn:aws:route53:::{hosted_zone_id_disabled}",
id=hosted_zone_id_disabled,
private_zone=False,
region=AWS_REGION_US_EAST_1,
),
}
with mock.patch(
"prowler.providers.aws.services.route53.route53_service.Route53",
new=route53,
), mock.patch(
"prowler.providers.aws.services.route53.route53_public_hosted_zones_cloudwatch_logging_enabled.route53_public_hosted_zones_cloudwatch_logging_enabled.route53_client",
new=route53,
):
# Test Check
from prowler.providers.aws.services.route53.route53_public_hosted_zones_cloudwatch_logging_enabled.route53_public_hosted_zones_cloudwatch_logging_enabled import (
route53_public_hosted_zones_cloudwatch_logging_enabled,
)
check = route53_public_hosted_zones_cloudwatch_logging_enabled()
result = check.execute()
assert len(result) == 2
assert result[0].resource_id == hosted_zone_id
assert result[0].region == AWS_REGION_US_EAST_1
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"Route53 Public Hosted Zone {hosted_zone_id} has query logging enabled in Log Group {log_group_arn}."
)
assert result[1].resource_id == hosted_zone_id_disabled
assert result[1].region == AWS_REGION_US_EAST_1
assert result[1].status == "FAIL"
assert (
result[1].status_extended
== f"Route53 Public Hosted Zone {hosted_zone_id_disabled} has query logging disabled."
)
def test_hosted_zone__private(self):
route53 = mock.MagicMock
hosted_zone_name = "test-domain.com"

View File

@@ -810,6 +810,7 @@ class TestGCPProvider:
):
with pytest.raises(Exception) as e:
GcpProvider.test_connection(
provider_id="test-provider-id",
client_id="test-client-id",
client_secret="test-client-secret",
refresh_token="test-refresh-token",
@@ -817,6 +818,20 @@ class TestGCPProvider:
assert e.type == GCPTestConnectionError
assert "Test exception" in e.value.args[0]
def test_test_connection_with_exception_no_project_id(self):
with patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.setup_session",
side_effect=GCPInvalidProviderIdError("Test exception"),
):
with pytest.raises(GCPInvalidProviderIdError) as e:
GcpProvider.test_connection(
client_id="test-client-id",
client_secret="test-client-secret",
refresh_token="test-refresh-token",
)
assert e.type == GCPInvalidProviderIdError
assert "[3008] Provider ID is required." in e.value.args[0]
def test_test_connection_with_exception_service_account_key(self):
with patch(
"prowler.providers.gcp.gcp_provider.GcpProvider.setup_session",
@@ -824,6 +839,7 @@ class TestGCPProvider:
):
with pytest.raises(Exception) as e:
GcpProvider.test_connection(
provider_id="test-provider-id",
service_account_key={"test": "key"},
)
assert e.type == GCPTestConnectionError

View File

@@ -1,60 +0,0 @@
# Prowler UI Changelog
All notable changes to the **Prowler UI** are documented in this file.
---
### [v1.5.0] (Prowler v5.5.0 - UNRELEASED)
---
### [v1.4.0] (Prowler v5.4.0)
#### 🚀 Added
- Added `exports` feature: Users can now download artifacts via a new button. [(#7006)](https://github.com/prowler-cloud/prowler/pull/7006)
- New sidebar with nested menus and integrated mobile navigation. [(#7018)](https://github.com/prowler-cloud/prowler/pull/7018)
- Added animation for scan execution progress—it now updates automatically.[(#6972)](https://github.com/prowler-cloud/prowler/pull/6972)
- Add `status_extended` attribute to finding details. [(#6997)](https://github.com/prowler-cloud/prowler/pull/6997)
- Add `Prowler version` to the sidebar. [(#7086)](https://github.com/prowler-cloud/prowler/pull/7086)
#### 🔄 Changed
- New compliance dropdown. [(#7118)](https://github.com/prowler-cloud/prowler/pull/7118).
#### 🐞 Fixes
- Revalidate the page when a role is deleted. [(#6976)](https://github.com/prowler-cloud/prowler/pull/6976)
- Allows removing group visibility when creating a role. [(#7088)](https://github.com/prowler-cloud/prowler/pull/7088)
- Displays correct error messages when deleting a user. [(#7089)](https://github.com/prowler-cloud/prowler/pull/7089)
- Updated label: *"Select a scan job"**"Select a cloud provider"*. [(#7107)](https://github.com/prowler-cloud/prowler/pull/7107)
- Display uid if alias is missing when creating a group. [(#7137)](https://github.com/prowler-cloud/prowler/pull/7137)
---
### [v1.3.0] (Prowler v5.3.0)
#### 🚀 Added
- Findings endpoints now require at least one date filter [(#6864)](https://github.com/prowler-cloud/prowler/pull/6864).
#### 🔄 Changed
- Scans now appear immediately after launch. [(#6791)](https://github.com/prowler-cloud/prowler/pull/6791).
- Improved sign-in and sign-up forms. [(#6813)](https://github.com/prowler-cloud/prowler/pull/6813).
---
### [v1.2.0] (Prowler v5.2.0)
#### 🚀 Added
- `First seen` field included in finding details. [(#6575)](https://github.com/prowler-cloud/prowler/pull/6575)
#### 🔄 Changed
- Completely redesigned finding details layout. [(#6575)](https://github.com/prowler-cloud/prowler/pull/6575)
- Completely redesigned scan details layout.[(#6665)](https://github.com/prowler-cloud/prowler/pull/6665)
- Simplified provider setup: reduced from 4 to 3 steps. Successful connection now triggers an animation before redirecting to `/scans`. [(#6665)](https://github.com/prowler-cloud/prowler/pull/6665)
---

View File

@@ -62,22 +62,26 @@ export const ComplianceCard: React.FC<ComplianceCardProps> = ({
className="h-10 w-10 min-w-10 rounded-md border-1 border-gray-300 bg-white object-contain p-1"
/>
<div className="flex w-full flex-col">
<h4 className="text-md font-bold leading-5 3xl:text-lg">
<h4 className="mb-1 text-small font-bold leading-5">
{formatTitle(title)}
{version ? ` - ${version}` : ""}
</h4>
<Progress
label="Your Rating:"
label="Score:"
size="sm"
aria-label="Your Rating"
aria-label="Compliance score"
value={ratingPercentage}
showValueLabel={true}
className="mt-2 font-semibold"
classNames={{
track: "drop-shadow-sm border border-default",
label: "tracking-wider font-medium text-default-600 text-xs",
value: "text-foreground/60 -mb-2",
}}
color={getRatingColor(ratingPercentage)}
/>
<div className="mt-2 flex justify-between">
<small>
<span className="mr-1 font-semibold">
<span className="mr-1 text-xs font-semibold">
{passingRequirements} / {totalRequirements}
</span>
Passing Requirements

View File

@@ -89,7 +89,7 @@ export const ScanDetail = ({
<InfoField label="Scan ID" variant="simple">
<Snippet className="bg-gray-50 py-1 dark:bg-slate-800" hideSymbol>
{renderValue(taskDetails?.attributes.task_args.scan_id)}
{scanDetails.id}
</Snippet>
</InfoField>