mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-05-16 01:02:38 +00:00
Compare commits
24 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| d9865d2816 | |||
| cb66b3289b | |||
| 5a51a6f76f | |||
| 6ce9e144d0 | |||
| e5b8215819 | |||
| 946f49df44 | |||
| e4d3a51b9e | |||
| 3ff810405a | |||
| 0f30b4fe79 | |||
| ad3f0d7d92 | |||
| 834a7d3b69 | |||
| 24a50c6ac2 | |||
| ec8afd773f | |||
| a09be4c0ba | |||
| 4b62fdcf53 | |||
| bf0013dae3 | |||
| c82cd5288c | |||
| ad31a6b3f5 | |||
| 20c7c9f8de | |||
| 0cfe41e452 | |||
| 1b254feadc | |||
| 15954d8a01 | |||
| ff122c9779 | |||
| a012397e55 |
@@ -102,8 +102,15 @@ jobs:
|
||||
run: |
|
||||
poetry run vulture --exclude "contrib,api,ui" --min-confidence 100 .
|
||||
|
||||
- name: Dockerfile - Check if Dockerfile has changed
|
||||
id: dockerfile-changed-files
|
||||
uses: tj-actions/changed-files@ed68ef82c095e0d48ec87eccea555d944a631a4c # v46.0.5
|
||||
with:
|
||||
files: |
|
||||
Dockerfile
|
||||
|
||||
- name: Hadolint
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
if: steps.dockerfile-changed-files.outputs.any_changed == 'true'
|
||||
run: |
|
||||
/tmp/hadolint Dockerfile --ignore=DL3013
|
||||
|
||||
|
||||
@@ -136,6 +136,14 @@ If your workstation's architecture is incompatible, you can resolve this by:
|
||||
|
||||
> Once configured, access the Prowler App at http://localhost:3000. Sign up using your email and password to get started.
|
||||
|
||||
### Common Issues with Docker Pull Installation
|
||||
|
||||
> [!Note]
|
||||
If you want to use AWS role assumption (e.g., with the "Connect assuming IAM Role" option), you may need to mount your local `.aws` directory into the container as a volume (e.g., `- "${HOME}/.aws:/home/prowler/.aws:ro"`). There are several ways to configure credentials for Docker containers. See the [Troubleshooting](./docs/troubleshooting.md) section for more details and examples.
|
||||
|
||||
You can find more information in the [Troubleshooting](./docs/troubleshooting.md) section.
|
||||
|
||||
|
||||
### From GitHub
|
||||
|
||||
**Requirements**
|
||||
|
||||
+4
-1
@@ -15,9 +15,12 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
### Fixed
|
||||
- Search filter for findings and resources [(#8112)](https://github.com/prowler-cloud/prowler/pull/8112)
|
||||
- RBAC is now applied to `GET /overviews/providers` [(#8277)](https://github.com/prowler-cloud/prowler/pull/8277)
|
||||
|
||||
### Changed
|
||||
- `POST /schedules/daily` returns a `409 CONFLICT` if already created [(#8258)](https://github.com/prowler-cloud/prowler/pull/8258)
|
||||
|
||||
### Security
|
||||
|
||||
- Enhanced password validation to enforce 12+ character passwords with special characters, uppercase, lowercase, and numbers [(#8225)](https://github.com/prowler-cloud/prowler/pull/8225)
|
||||
|
||||
---
|
||||
|
||||
@@ -78,3 +78,21 @@ def custom_exception_handler(exc, context):
|
||||
message_item["message"] for message_item in exc.detail["messages"]
|
||||
]
|
||||
return exception_handler(exc, context)
|
||||
|
||||
|
||||
class ConflictException(APIException):
|
||||
status_code = status.HTTP_409_CONFLICT
|
||||
default_detail = "A conflict occurred. The resource already exists."
|
||||
default_code = "conflict"
|
||||
|
||||
def __init__(self, detail=None, code=None, pointer=None):
|
||||
error_detail = {
|
||||
"detail": detail or self.default_detail,
|
||||
"status": self.status_code,
|
||||
"code": self.default_code,
|
||||
}
|
||||
|
||||
if pointer:
|
||||
error_detail["source"] = {"pointer": pointer}
|
||||
|
||||
super().__init__(detail=[error_detail])
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from unittest.mock import ANY, Mock, patch
|
||||
|
||||
import pytest
|
||||
from conftest import TODAY
|
||||
from django.urls import reverse
|
||||
from rest_framework import status
|
||||
|
||||
@@ -409,3 +410,87 @@ class TestLimitedVisibility:
|
||||
assert (
|
||||
response.json()["data"]["relationships"]["providers"]["meta"]["count"] == 1
|
||||
)
|
||||
|
||||
def test_overviews_providers(
|
||||
self,
|
||||
authenticated_client_rbac_limited,
|
||||
scan_summaries_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
# By default, the associated provider is the one which has the overview data
|
||||
response = authenticated_client_rbac_limited.get(reverse("overview-providers"))
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) > 0
|
||||
|
||||
# Changing the provider visibility, no data should be returned
|
||||
# Only the associated provider to that group is changed
|
||||
new_provider = providers_fixture[1]
|
||||
ProviderGroupMembership.objects.all().update(provider=new_provider)
|
||||
|
||||
response = authenticated_client_rbac_limited.get(reverse("overview-providers"))
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == 0
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"endpoint_name",
|
||||
[
|
||||
"findings",
|
||||
"findings_severity",
|
||||
],
|
||||
)
|
||||
def test_overviews_findings(
|
||||
self,
|
||||
endpoint_name,
|
||||
authenticated_client_rbac_limited,
|
||||
scan_summaries_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
# By default, the associated provider is the one which has the overview data
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse(f"overview-{endpoint_name}")
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
values = response.json()["data"]["attributes"].values()
|
||||
assert any(value > 0 for value in values)
|
||||
|
||||
# Changing the provider visibility, no data should be returned
|
||||
# Only the associated provider to that group is changed
|
||||
new_provider = providers_fixture[1]
|
||||
ProviderGroupMembership.objects.all().update(provider=new_provider)
|
||||
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse(f"overview-{endpoint_name}")
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
data = response.json()["data"]["attributes"].values()
|
||||
assert all(value == 0 for value in data)
|
||||
|
||||
def test_overviews_services(
|
||||
self,
|
||||
authenticated_client_rbac_limited,
|
||||
scan_summaries_fixture,
|
||||
providers_fixture,
|
||||
):
|
||||
# By default, the associated provider is the one which has the overview data
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse("overview-services"), {"filter[inserted_at]": TODAY}
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) > 0
|
||||
|
||||
# Changing the provider visibility, no data should be returned
|
||||
# Only the associated provider to that group is changed
|
||||
new_provider = providers_fixture[1]
|
||||
ProviderGroupMembership.objects.all().update(provider=new_provider)
|
||||
|
||||
response = authenticated_client_rbac_limited.get(
|
||||
reverse("overview-services"), {"filter[inserted_at]": TODAY}
|
||||
)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert len(response.json()["data"]) == 0
|
||||
|
||||
@@ -14,7 +14,13 @@ import jwt
|
||||
import pytest
|
||||
from allauth.socialaccount.models import SocialAccount, SocialApp
|
||||
from botocore.exceptions import ClientError, NoCredentialsError
|
||||
from conftest import API_JSON_CONTENT_TYPE, TEST_PASSWORD, TEST_USER
|
||||
from conftest import (
|
||||
API_JSON_CONTENT_TYPE,
|
||||
TEST_PASSWORD,
|
||||
TEST_USER,
|
||||
TODAY,
|
||||
today_after_n_days,
|
||||
)
|
||||
from django.conf import settings
|
||||
from django.http import JsonResponse
|
||||
from django.test import RequestFactory
|
||||
@@ -47,14 +53,6 @@ from api.models import (
|
||||
from api.rls import Tenant
|
||||
from api.v1.views import ComplianceOverviewViewSet, TenantFinishACSView
|
||||
|
||||
TODAY = str(datetime.today().date())
|
||||
|
||||
|
||||
def today_after_n_days(n_days: int) -> str:
|
||||
return datetime.strftime(
|
||||
datetime.today().date() + timedelta(days=n_days), "%Y-%m-%d"
|
||||
)
|
||||
|
||||
|
||||
class TestViewSet:
|
||||
def test_security_headers(self, client):
|
||||
@@ -5496,6 +5494,30 @@ class TestScheduleViewSet:
|
||||
)
|
||||
assert response.status_code == status.HTTP_404_NOT_FOUND
|
||||
|
||||
@patch("api.v1.views.Task.objects.get")
|
||||
def test_schedule_daily_already_scheduled(
|
||||
self,
|
||||
mock_task_get,
|
||||
authenticated_client,
|
||||
providers_fixture,
|
||||
tasks_fixture,
|
||||
):
|
||||
provider, *_ = providers_fixture
|
||||
prowler_task = tasks_fixture[0]
|
||||
mock_task_get.return_value = prowler_task
|
||||
json_payload = {
|
||||
"provider_id": str(provider.id),
|
||||
}
|
||||
response = authenticated_client.post(
|
||||
reverse("schedule-daily"), data=json_payload, format="json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_202_ACCEPTED
|
||||
|
||||
response = authenticated_client.post(
|
||||
reverse("schedule-daily"), data=json_payload, format="json"
|
||||
)
|
||||
assert response.status_code == status.HTTP_409_CONFLICT
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestIntegrationViewSet:
|
||||
|
||||
@@ -94,7 +94,6 @@ from api.filters import (
|
||||
UserFilter,
|
||||
)
|
||||
from api.models import (
|
||||
ComplianceOverview,
|
||||
ComplianceRequirementOverview,
|
||||
Finding,
|
||||
Integration,
|
||||
@@ -3469,7 +3468,7 @@ class ComplianceOverviewViewSet(BaseRLSViewSet, TaskManagementMixin):
|
||||
)
|
||||
@method_decorator(CACHE_DECORATOR, name="list")
|
||||
class OverviewViewSet(BaseRLSViewSet):
|
||||
queryset = ComplianceOverview.objects.all()
|
||||
queryset = ScanSummary.objects.all()
|
||||
http_method_names = ["get"]
|
||||
ordering = ["-inserted_at"]
|
||||
# RBAC required permissions (implicit -> MANAGE_PROVIDERS enable unlimited visibility or check the visibility of
|
||||
@@ -3480,19 +3479,10 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
role = get_role(self.request.user)
|
||||
providers = get_providers(role)
|
||||
|
||||
def _get_filtered_queryset(model):
|
||||
if role.unlimited_visibility:
|
||||
return model.all_objects.filter(tenant_id=self.request.tenant_id)
|
||||
return model.all_objects.filter(
|
||||
tenant_id=self.request.tenant_id, scan__provider__in=providers
|
||||
)
|
||||
if not role.unlimited_visibility:
|
||||
self.allowed_providers = providers
|
||||
|
||||
if self.action == "providers":
|
||||
return _get_filtered_queryset(Finding)
|
||||
elif self.action in ("findings", "findings_severity", "services"):
|
||||
return _get_filtered_queryset(ScanSummary)
|
||||
else:
|
||||
return super().get_queryset()
|
||||
return ScanSummary.all_objects.filter(tenant_id=self.request.tenant_id)
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.action == "providers":
|
||||
@@ -3525,18 +3515,24 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
@action(detail=False, methods=["get"], url_name="providers")
|
||||
def providers(self, request):
|
||||
tenant_id = self.request.tenant_id
|
||||
queryset = self.get_queryset()
|
||||
provider_filter = (
|
||||
{"provider__in": self.allowed_providers}
|
||||
if hasattr(self, "allowed_providers")
|
||||
else {}
|
||||
)
|
||||
|
||||
latest_scan_ids = (
|
||||
Scan.all_objects.filter(tenant_id=tenant_id, state=StateChoices.COMPLETED)
|
||||
Scan.all_objects.filter(
|
||||
tenant_id=tenant_id, state=StateChoices.COMPLETED, **provider_filter
|
||||
)
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
.values_list("id", flat=True)
|
||||
)
|
||||
|
||||
findings_aggregated = (
|
||||
ScanSummary.all_objects.filter(
|
||||
tenant_id=tenant_id, scan_id__in=latest_scan_ids
|
||||
)
|
||||
queryset.filter(scan_id__in=latest_scan_ids)
|
||||
.values(
|
||||
"scan__provider_id",
|
||||
provider=F("scan__provider__provider"),
|
||||
@@ -3572,7 +3568,7 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
)
|
||||
|
||||
return Response(
|
||||
OverviewProviderSerializer(overview, many=True).data,
|
||||
self.get_serializer(overview, many=True).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@@ -3581,9 +3577,16 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
tenant_id = self.request.tenant_id
|
||||
queryset = self.get_queryset()
|
||||
filtered_queryset = self.filter_queryset(queryset)
|
||||
provider_filter = (
|
||||
{"provider__in": self.allowed_providers}
|
||||
if hasattr(self, "allowed_providers")
|
||||
else {}
|
||||
)
|
||||
|
||||
latest_scan_ids = (
|
||||
Scan.all_objects.filter(tenant_id=tenant_id, state=StateChoices.COMPLETED)
|
||||
Scan.all_objects.filter(
|
||||
tenant_id=tenant_id, state=StateChoices.COMPLETED, **provider_filter
|
||||
)
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
.values_list("id", flat=True)
|
||||
@@ -3620,9 +3623,16 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
tenant_id = self.request.tenant_id
|
||||
queryset = self.get_queryset()
|
||||
filtered_queryset = self.filter_queryset(queryset)
|
||||
provider_filter = (
|
||||
{"provider__in": self.allowed_providers}
|
||||
if hasattr(self, "allowed_providers")
|
||||
else {}
|
||||
)
|
||||
|
||||
latest_scan_ids = (
|
||||
Scan.all_objects.filter(tenant_id=tenant_id, state=StateChoices.COMPLETED)
|
||||
Scan.all_objects.filter(
|
||||
tenant_id=tenant_id, state=StateChoices.COMPLETED, **provider_filter
|
||||
)
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
.values_list("id", flat=True)
|
||||
@@ -3642,7 +3652,7 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
for item in severity_counts:
|
||||
severity_data[item["severity"]] = item["count"]
|
||||
|
||||
serializer = OverviewSeveritySerializer(severity_data)
|
||||
serializer = self.get_serializer(severity_data)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(detail=False, methods=["get"], url_name="services")
|
||||
@@ -3650,9 +3660,16 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
tenant_id = self.request.tenant_id
|
||||
queryset = self.get_queryset()
|
||||
filtered_queryset = self.filter_queryset(queryset)
|
||||
provider_filter = (
|
||||
{"provider__in": self.allowed_providers}
|
||||
if hasattr(self, "allowed_providers")
|
||||
else {}
|
||||
)
|
||||
|
||||
latest_scan_ids = (
|
||||
Scan.all_objects.filter(tenant_id=tenant_id, state=StateChoices.COMPLETED)
|
||||
Scan.all_objects.filter(
|
||||
tenant_id=tenant_id, state=StateChoices.COMPLETED, **provider_filter
|
||||
)
|
||||
.order_by("provider_id", "-inserted_at")
|
||||
.distinct("provider_id")
|
||||
.values_list("id", flat=True)
|
||||
@@ -3670,7 +3687,7 @@ class OverviewViewSet(BaseRLSViewSet):
|
||||
.order_by("service")
|
||||
)
|
||||
|
||||
serializer = OverviewServiceSerializer(services_data, many=True)
|
||||
serializer = self.get_serializer(services_data, many=True)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@@ -46,12 +46,19 @@ from api.v1.serializers import TokenSerializer
|
||||
from prowler.lib.check.models import Severity
|
||||
from prowler.lib.outputs.finding import Status
|
||||
|
||||
TODAY = str(datetime.today().date())
|
||||
API_JSON_CONTENT_TYPE = "application/vnd.api+json"
|
||||
NO_TENANT_HTTP_STATUS = status.HTTP_401_UNAUTHORIZED
|
||||
TEST_USER = "dev@prowler.com"
|
||||
TEST_PASSWORD = "testing_psswd"
|
||||
|
||||
|
||||
def today_after_n_days(n_days: int) -> str:
|
||||
return datetime.strftime(
|
||||
datetime.today().date() + timedelta(days=n_days), "%Y-%m-%d"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def enforce_test_user_db_connection(django_db_setup, django_db_blocker):
|
||||
"""Ensure tests use the test user for database connections."""
|
||||
|
||||
@@ -2,10 +2,10 @@ import json
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from django_celery_beat.models import IntervalSchedule, PeriodicTask
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from tasks.tasks import perform_scheduled_scan_task
|
||||
|
||||
from api.db_utils import rls_transaction
|
||||
from api.exceptions import ConflictException
|
||||
from api.models import Provider, Scan, StateChoices
|
||||
|
||||
|
||||
@@ -24,15 +24,9 @@ def schedule_provider_scan(provider_instance: Provider):
|
||||
if PeriodicTask.objects.filter(
|
||||
interval=schedule, name=task_name, task="scan-perform-scheduled"
|
||||
).exists():
|
||||
raise ValidationError(
|
||||
[
|
||||
{
|
||||
"detail": "There is already a scheduled scan for this provider.",
|
||||
"status": 400,
|
||||
"source": {"pointer": "/data/attributes/provider_id"},
|
||||
"code": "invalid",
|
||||
}
|
||||
]
|
||||
raise ConflictException(
|
||||
detail="There is already a scheduled scan for this provider.",
|
||||
pointer="/data/attributes/provider_id",
|
||||
)
|
||||
|
||||
with rls_transaction(tenant_id):
|
||||
|
||||
@@ -3,9 +3,9 @@ from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django_celery_beat.models import IntervalSchedule, PeriodicTask
|
||||
from rest_framework_json_api.serializers import ValidationError
|
||||
from tasks.beat import schedule_provider_scan
|
||||
|
||||
from api.exceptions import ConflictException
|
||||
from api.models import Scan
|
||||
|
||||
|
||||
@@ -48,8 +48,8 @@ class TestScheduleProviderScan:
|
||||
with patch("tasks.tasks.perform_scheduled_scan_task.apply_async"):
|
||||
schedule_provider_scan(provider_instance)
|
||||
|
||||
# Now, try scheduling again, should raise ValidationError
|
||||
with pytest.raises(ValidationError) as exc_info:
|
||||
# Now, try scheduling again, should raise ConflictException
|
||||
with pytest.raises(ConflictException) as exc_info:
|
||||
schedule_provider_scan(provider_instance)
|
||||
|
||||
assert "There is already a scheduled scan for this provider." in str(
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# Standard library imports
|
||||
import csv
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
@@ -20,7 +19,6 @@ from dash.dependencies import Input, Output
|
||||
# Config import
|
||||
from dashboard.config import (
|
||||
critical_color,
|
||||
encoding_format,
|
||||
fail_color,
|
||||
folder_path_overview,
|
||||
high_color,
|
||||
@@ -46,6 +44,7 @@ from dashboard.lib.dropdowns import (
|
||||
create_table_row_dropdown,
|
||||
)
|
||||
from dashboard.lib.layouts import create_layout_overview
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
# Suppress warnings
|
||||
warnings.filterwarnings("ignore")
|
||||
@@ -55,11 +54,13 @@ warnings.filterwarnings("ignore")
|
||||
csv_files = []
|
||||
|
||||
for file in glob.glob(os.path.join(folder_path_overview, "*.csv")):
|
||||
with open(file, "r", newline="", encoding=encoding_format) as csvfile:
|
||||
reader = csv.reader(csvfile)
|
||||
num_rows = sum(1 for row in reader)
|
||||
try:
|
||||
df = pd.read_csv(file, sep=";")
|
||||
num_rows = len(df)
|
||||
if num_rows > 1:
|
||||
csv_files.append(file)
|
||||
except Exception:
|
||||
logger.error(f"Error reading file {file}")
|
||||
|
||||
|
||||
# Import logos providers
|
||||
@@ -191,7 +192,13 @@ else:
|
||||
data.rename(columns={"RESOURCE_ID": "RESOURCE_UID"}, inplace=True)
|
||||
|
||||
# Remove dupplicates on the finding_uid colummn but keep the last one taking into account the timestamp
|
||||
data = data.sort_values("TIMESTAMP").drop_duplicates("FINDING_UID", keep="last")
|
||||
data["DATE"] = data["TIMESTAMP"].dt.date
|
||||
data = (
|
||||
data.sort_values("TIMESTAMP")
|
||||
.groupby(["DATE", "FINDING_UID"], as_index=False)
|
||||
.last()
|
||||
)
|
||||
data["TIMESTAMP"] = pd.to_datetime(data["TIMESTAMP"])
|
||||
|
||||
data["ASSESSMENT_TIME"] = data["TIMESTAMP"].dt.strftime("%Y-%m-%d")
|
||||
data_valid = pd.DataFrame()
|
||||
|
||||
@@ -491,11 +491,15 @@ The provided credentials must have the appropriate permissions to perform all th
|
||||
|
||||
## Infrastructure as Code (IaC)
|
||||
|
||||
Prowler's Infrastructure as Code (IaC) provider enables you to scan local infrastructure code for security and compliance issues using [Checkov](https://www.checkov.io/). This provider supports a wide range of IaC frameworks and requires no cloud authentication.
|
||||
Prowler's Infrastructure as Code (IaC) provider enables you to scan local or remote infrastructure code for security and compliance issues using [Checkov](https://www.checkov.io/). This provider supports a wide range of IaC frameworks and requires no cloud authentication for local scans.
|
||||
|
||||
### Authentication
|
||||
|
||||
The IaC provider does not require any authentication or credentials since it scans local files directly. This makes it ideal for CI/CD pipelines and local development environments.
|
||||
- For local scans, no authentication is required.
|
||||
- For remote repository scans, authentication can be provided via:
|
||||
- [**GitHub Username and Personal Access Token (PAT)**](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-personal-access-token-classic)
|
||||
- [**GitHub OAuth App Token**](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-fine-grained-personal-access-token)
|
||||
- [**Git URL**](https://git-scm.com/docs/git-clone#_git_urls)
|
||||
|
||||
### Supported Frameworks
|
||||
|
||||
@@ -515,27 +519,3 @@ The IaC provider leverages Checkov to support multiple frameworks, including:
|
||||
- Kustomize
|
||||
- OpenAPI
|
||||
- SAST, SCA (Software Composition Analysis)
|
||||
|
||||
### Usage
|
||||
|
||||
To run Prowler with the IaC provider, use the `iac` flag. You can specify the directory to scan, frameworks to include, and paths to exclude.
|
||||
|
||||
#### Basic Example
|
||||
|
||||
```console
|
||||
prowler iac --scan-path ./my-iac-directory
|
||||
```
|
||||
|
||||
#### Specify Frameworks
|
||||
|
||||
Scan only Terraform and Kubernetes files:
|
||||
|
||||
```console
|
||||
prowler iac --scan-path ./my-iac-directory --frameworks terraform kubernetes
|
||||
```
|
||||
|
||||
#### Exclude Paths
|
||||
|
||||
```console
|
||||
prowler iac --scan-path ./my-iac-directory --exclude-path ./my-iac-directory/test,./my-iac-directory/examples
|
||||
```
|
||||
|
||||
+16
-3
@@ -614,12 +614,23 @@ prowler github --github-app-id app_id --github-app-key app_key
|
||||
|
||||
#### Infrastructure as Code (IaC)
|
||||
|
||||
Prowler's Infrastructure as Code (IaC) provider enables you to scan local infrastructure code for security and compliance issues using [Checkov](https://www.checkov.io/). This provider supports a wide range of IaC frameworks, allowing you to assess your code before deployment.
|
||||
Prowler's Infrastructure as Code (IaC) provider enables you to scan local or remote infrastructure code for security and compliance issues using [Checkov](https://www.checkov.io/). This provider supports a wide range of IaC frameworks, allowing you to assess your code before deployment.
|
||||
|
||||
```console
|
||||
# Scan a directory for IaC files
|
||||
prowler iac --scan-path ./my-iac-directory
|
||||
|
||||
# Scan a remote GitHub repository (public or private)
|
||||
prowler iac --scan-repository-url https://github.com/user/repo.git
|
||||
|
||||
# Authenticate to a private repo with GitHub username and PAT
|
||||
prowler iac --scan-repository-url https://github.com/user/repo.git \
|
||||
--github-username <username> --personal-access-token <token>
|
||||
|
||||
# Authenticate to a private repo with OAuth App Token
|
||||
prowler iac --scan-repository-url https://github.com/user/repo.git \
|
||||
--oauth-app-token <oauth_token>
|
||||
|
||||
# Specify frameworks to scan (default: all)
|
||||
prowler iac --scan-path ./my-iac-directory --frameworks terraform kubernetes
|
||||
|
||||
@@ -628,8 +639,10 @@ prowler iac --scan-path ./my-iac-directory --exclude-path ./my-iac-directory/tes
|
||||
```
|
||||
|
||||
???+ note
|
||||
- The IaC provider does not require cloud authentication
|
||||
- It is ideal for CI/CD pipelines and local development environments
|
||||
- `--scan-path` and `--scan-repository-url` are mutually exclusive; only one can be specified at a time.
|
||||
- For remote repository scans, authentication can be provided via CLI flags or environment variables (`GITHUB_OAUTH_APP_TOKEN`, `GITHUB_USERNAME`, `GITHUB_PERSONAL_ACCESS_TOKEN`). CLI flags take precedence.
|
||||
- The IaC provider does not require cloud authentication for local scans.
|
||||
- It is ideal for CI/CD pipelines and local development environments.
|
||||
- For more details on supported frameworks and rules, see the [Checkov documentation](https://www.checkov.io/1.Welcome/Quick%20Start.html)
|
||||
|
||||
See more details about IaC scanning in the [IaC Tutorial](tutorials/iac/getting-started-iac.md) section.
|
||||
|
||||
@@ -12,3 +12,34 @@
|
||||
|
||||
|
||||
See section [Logging](./tutorials/logging.md) for further information or [contact us](./contact.md).
|
||||
|
||||
## Common Issues with Docker Compose Installation
|
||||
|
||||
- **Problem adding AWS Provider using "Connect assuming IAM Role" in Docker (see [GitHub Issue #7745](https://github.com/prowler-cloud/prowler/issues/7745))**:
|
||||
|
||||
When running Prowler App via Docker, you may encounter errors such as `Provider not set`, `AWS assume role error - Unable to locate credentials`, or `Provider has no secret` when trying to add an AWS Provider using the "Connect assuming IAM Role" option. This typically happens because the container does not have access to the necessary AWS credentials or profiles.
|
||||
|
||||
**Workaround:**
|
||||
|
||||
- Ensure your AWS credentials and configuration are available to the Docker container. You can do this by mounting your local `.aws` directory into the container. For example, in your `docker-compose.yaml`, add the following volume to the relevant services:
|
||||
|
||||
```yaml
|
||||
volumes:
|
||||
- "${HOME}/.aws:/home/prowler/.aws:ro"
|
||||
```
|
||||
This should be added to the `api`, `worker`, and `worker-beat` services.
|
||||
|
||||
- Create or update your `~/.aws/config` and `~/.aws/credentials` files with the appropriate profiles and roles. For example:
|
||||
|
||||
```ini
|
||||
[profile prowler-profile]
|
||||
role_arn = arn:aws:iam::<account-id>:role/ProwlerScan
|
||||
source_profile = default
|
||||
```
|
||||
And set the environment variable in your `.env` file:
|
||||
|
||||
```env
|
||||
AWS_PROFILE=prowler-profile
|
||||
```
|
||||
|
||||
- If you are scanning multiple AWS accounts, you may need to add multiple profiles to your AWS config. Note that this workaround is mainly for local testing; for production or multi-account setups, follow the [CloudFormation Template guide](https://github.com/prowler-cloud/prowler/issues/7745) and ensure the correct IAM roles and permissions are set up in each account.
|
||||
|
||||
@@ -0,0 +1,209 @@
|
||||
# Getting Started with GitHub Authentication
|
||||
|
||||
This guide explains how to set up authentication with GitHub for Prowler. The documentation covers credential retrieval processes for each supported authentication method.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- GitHub account
|
||||
- Token creation permissions (organization-level access requires admin permissions)
|
||||
|
||||
## Authentication Methods
|
||||
|
||||
### 1. Personal Access Token (PAT)
|
||||
|
||||
Personal Access Tokens provide the simplest GitHub authentication method and support individual user authentication or testing scenarios.
|
||||
|
||||
#### How to Create a Personal Access Token
|
||||
|
||||
1. **Navigate to GitHub Settings**
|
||||
- Open [GitHub](https://github.com) and sign in
|
||||
- Click the profile picture in the top right corner
|
||||
- Select "Settings" from the dropdown menu
|
||||
|
||||
2. **Access Developer Settings**
|
||||
- Scroll down the left sidebar
|
||||
- Click "Developer settings"
|
||||
|
||||
3. **Generate New Token**
|
||||
- Click "Personal access tokens"
|
||||
- Select "Tokens (classic)"
|
||||
- Click "Generate new token"
|
||||
|
||||
4. **Configure Token Permissions**
|
||||
To enable Prowler functionality, configure the following scopes:
|
||||
- `repo`: Full control of private repositories
|
||||
- `read:org`: Read organization and team membership
|
||||
- `read:user`: Read user profile data
|
||||
- `read:discussion`: Read discussions
|
||||
- `read:enterprise`: Read enterprise data (if applicable)
|
||||
|
||||
5. **Copy and Store the Token**
|
||||
- Copy the generated token immediately (GitHub displays tokens only once)
|
||||
- Store tokens securely using environment variables
|
||||
|
||||
#### How to Use Personal Access Tokens
|
||||
|
||||
Choose one of the following methods:
|
||||
|
||||
**Command-line flag:**
|
||||
|
||||
```console
|
||||
prowler github --personal-access-token your_token_here
|
||||
```
|
||||
|
||||
**Environment variable:**
|
||||
|
||||
```console
|
||||
export GITHUB_PERSONAL_ACCESS_TOKEN="your_token_here"
|
||||
prowler github
|
||||
```
|
||||
|
||||
### 2. OAuth App Token
|
||||
|
||||
OAuth Apps enable applications to act on behalf of users with explicit consent.
|
||||
|
||||
#### How to Create an OAuth App
|
||||
|
||||
1. **Navigate to Developer Settings**
|
||||
- Open GitHub Settings → Developer settings
|
||||
- Click "OAuth Apps"
|
||||
|
||||
2. **Register New Application**
|
||||
- Click "New OAuth App"
|
||||
- Complete the required fields:
|
||||
- **Application name**: Descriptive application name
|
||||
- **Homepage URL**: Application homepage
|
||||
- **Authorization callback URL**: User redirection URL after authorization
|
||||
|
||||
3. **Obtain Authorization Code**
|
||||
- Request authorization code (replace `{app_id}` with the application ID):
|
||||
```
|
||||
https://github.com/login/oauth/authorize?client_id={app_id}
|
||||
```
|
||||
|
||||
4. **Exchange Code for Token**
|
||||
- Exchange authorization code for access token (replace `{app_id}`, `{secret}`, and `{code}`):
|
||||
```
|
||||
https://github.com/login/oauth/access_token?code={code}&client_id={app_id}&client_secret={secret}
|
||||
```
|
||||
|
||||
#### How to Use OAuth Tokens
|
||||
|
||||
Choose one of the following methods:
|
||||
|
||||
**Command-line flag:**
|
||||
|
||||
```console
|
||||
prowler github --oauth-app-token your_oauth_token
|
||||
```
|
||||
|
||||
**Environment variable:**
|
||||
|
||||
```console
|
||||
export GITHUB_OAUTH_APP_TOKEN="your_oauth_token"
|
||||
prowler github
|
||||
```
|
||||
|
||||
### 3. GitHub App Credentials
|
||||
|
||||
GitHub Apps provide the recommended integration method for accessing multiple repositories or organizations.
|
||||
|
||||
#### How to Create a GitHub App
|
||||
|
||||
1. **Navigate to Developer Settings**
|
||||
- Open GitHub Settings → Developer settings
|
||||
- Click "GitHub Apps"
|
||||
|
||||
2. **Create New GitHub App**
|
||||
- Click "New GitHub App"
|
||||
- Complete the required fields:
|
||||
- **GitHub App name**: Unique application name
|
||||
- **Homepage URL**: Application homepage
|
||||
- **Webhook URL**: Webhook payload URL (optional)
|
||||
- **Permissions**: Application permission requirements
|
||||
|
||||
3. **Configure Permissions**
|
||||
To enable Prowler functionality, configure these permissions:
|
||||
- **Repository permissions**:
|
||||
- Contents (Read)
|
||||
- Metadata (Read)
|
||||
- Pull requests (Read)
|
||||
- **Organization permissions**:
|
||||
- Members (Read)
|
||||
- Administration (Read)
|
||||
- **Account permissions**:
|
||||
- Email addresses (Read)
|
||||
|
||||
4. **Generate Private Key**
|
||||
- Scroll to the "Private keys" section after app creation
|
||||
- Click "Generate a private key"
|
||||
- Download the `.pem` file and store securely
|
||||
|
||||
5. **Record App ID**
|
||||
- Locate the App ID at the top of the GitHub App settings page
|
||||
|
||||
#### How to Install the GitHub App
|
||||
|
||||
1. **Install Application**
|
||||
- Navigate to GitHub App settings
|
||||
- Click "Install App" in the left sidebar
|
||||
- Select the target account/organization
|
||||
- Choose specific repositories or select "All repositories"
|
||||
|
||||
#### How to Use GitHub App Credentials
|
||||
|
||||
Choose one of the following methods:
|
||||
|
||||
**Command-line flags:**
|
||||
|
||||
```console
|
||||
prowler github --github-app-id your_app_id --github-app-key /path/to/private-key.pem
|
||||
```
|
||||
|
||||
**Environment variables:**
|
||||
|
||||
```console
|
||||
export GITHUB_APP_ID="your_app_id"
|
||||
export GITHUB_APP_KEY="private-key-content"
|
||||
prowler github
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Security Considerations
|
||||
|
||||
Implement the following security measures:
|
||||
|
||||
- **Secure Credential Storage**: Store credentials using environment variables instead of hardcoding tokens
|
||||
- **Secrets Management**: Use dedicated secrets management systems in production environments
|
||||
- **Regular Token Rotation**: Rotate tokens and keys regularly
|
||||
- **Least Privilege Principle**: Grant only minimum required permissions
|
||||
- **Permission Auditing**: Review and audit permissions regularly
|
||||
- **Token Expiration**: Set appropriate expiration times for tokens
|
||||
- **Usage Monitoring**: Monitor token usage and revoke unused tokens
|
||||
|
||||
### Authentication Method Selection
|
||||
|
||||
Choose the appropriate method based on use case:
|
||||
|
||||
- **Personal Access Token**: Individual use, testing, or simple automation
|
||||
- **OAuth App Token**: Applications requiring user consent and delegation
|
||||
- **GitHub App**: Production integrations, especially for organizations
|
||||
|
||||
## Troubleshooting Common Issues
|
||||
|
||||
### Insufficient Permissions
|
||||
- Verify token/app has necessary scopes/permissions
|
||||
- Check organization restrictions on third-party applications
|
||||
|
||||
### Token Expiration
|
||||
- Confirm token has not expired
|
||||
- Verify fine-grained tokens have correct resource access
|
||||
|
||||
### Rate Limiting
|
||||
- GitHub implements API call rate limits
|
||||
- Consider GitHub Apps for higher rate limits
|
||||
|
||||
### Organization Settings
|
||||
- Some organizations restrict third-party applications
|
||||
- Contact organization administrator if access is denied
|
||||
@@ -1,6 +1,6 @@
|
||||
# Getting Started with the IaC Provider
|
||||
|
||||
Prowler's Infrastructure as Code (IaC) provider enables you to scan local infrastructure code for security and compliance issues using [Checkov](https://www.checkov.io/). This provider supports a wide range of IaC frameworks, allowing you to assess your code before deployment.
|
||||
Prowler's Infrastructure as Code (IaC) provider enables you to scan local or remote infrastructure code for security and compliance issues using [Checkov](https://www.checkov.io/). This provider supports a wide range of IaC frameworks, allowing you to assess your code before deployment.
|
||||
|
||||
## Supported Frameworks
|
||||
|
||||
@@ -23,21 +23,50 @@ The IaC provider leverages Checkov to support multiple frameworks, including:
|
||||
|
||||
## How It Works
|
||||
|
||||
- The IaC provider scans your local directory (or a specified path) for supported IaC files.
|
||||
- No cloud credentials or authentication are required.
|
||||
- The IaC provider scans your local directory (or a specified path) for supported IaC files, or scan a remote repository.
|
||||
- No cloud credentials or authentication are required for local scans.
|
||||
- For remote repository scans, authentication can be provided via [git URL](https://git-scm.com/docs/git-clone#_git_urls), CLI flags or environment variables.
|
||||
- Mutelist logic is handled by Checkov, not Prowler.
|
||||
- Results are output in the same formats as other Prowler providers (CSV, JSON, HTML, etc.).
|
||||
|
||||
## Usage
|
||||
|
||||
To run Prowler with the IaC provider, use the `iac` argument. You can specify the directory to scan, frameworks to include, and paths to exclude.
|
||||
To run Prowler with the IaC provider, use the `iac` argument. You can specify the directory or repository to scan, frameworks to include, and paths to exclude.
|
||||
|
||||
### Basic Example
|
||||
### Scan a Local Directory (default)
|
||||
|
||||
```sh
|
||||
prowler iac --scan-path ./my-iac-directory
|
||||
```
|
||||
|
||||
### Scan a Remote GitHub Repository
|
||||
|
||||
```sh
|
||||
prowler iac --scan-repository-url https://github.com/user/repo.git
|
||||
```
|
||||
|
||||
#### Authentication for Remote Private Repositories
|
||||
|
||||
You can provide authentication for private repositories using one of the following methods:
|
||||
|
||||
- **GitHub Username and Personal Access Token (PAT):**
|
||||
```sh
|
||||
prowler iac --scan-repository-url https://github.com/user/repo.git \
|
||||
--github-username <username> --personal-access-token <token>
|
||||
```
|
||||
- **GitHub OAuth App Token:**
|
||||
```sh
|
||||
prowler iac --scan-repository-url https://github.com/user/repo.git \
|
||||
--oauth-app-token <oauth_token>
|
||||
```
|
||||
- If not provided via CLI, the following environment variables will be used (in order of precedence):
|
||||
- `GITHUB_OAUTH_APP_TOKEN`
|
||||
- `GITHUB_USERNAME` and `GITHUB_PERSONAL_ACCESS_TOKEN`
|
||||
- If neither CLI flags nor environment variables are set, the scan will attempt to clone without authentication or using the provided in the [git URL](https://git-scm.com/docs/git-clone#_git_urls).
|
||||
|
||||
#### Mutually Exclusive Flags
|
||||
- `--scan-path` and `--scan-repository-url` are mutually exclusive. Only one can be specified at a time.
|
||||
|
||||
### Specify Frameworks
|
||||
|
||||
Scan only Terraform and Kubernetes files:
|
||||
@@ -62,6 +91,8 @@ prowler iac --scan-path ./iac --output-formats csv json html
|
||||
|
||||
## Notes
|
||||
|
||||
- The IaC provider does not require cloud authentication.
|
||||
- The IaC provider does not require cloud authentication for local scans.
|
||||
- For remote repository scans, authentication is optional but required for private repos.
|
||||
- CLI flags override environment variables for authentication.
|
||||
- It is ideal for CI/CD pipelines and local development environments.
|
||||
- For more details on supported frameworks and rules, see the [Checkov documentation](https://www.checkov.io/1.Welcome/Quick%20Start.html).
|
||||
|
||||
@@ -109,6 +109,7 @@ nav:
|
||||
- Use of PowerShell: tutorials/microsoft365/use-of-powershell.md
|
||||
- GitHub:
|
||||
- Authentication: tutorials/github/authentication.md
|
||||
- Getting Started: tutorials/github/getting-started-github.md
|
||||
- IaC:
|
||||
- Getting Started: tutorials/iac/getting-started-iac.md
|
||||
- Developer Guide:
|
||||
|
||||
Generated
+142
-93
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "about-time"
|
||||
@@ -41,103 +41,103 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.12.13"
|
||||
version = "3.12.14"
|
||||
description = "Async http client/server framework (asyncio)"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5421af8f22a98f640261ee48aae3a37f0c41371e99412d55eaf2f8a46d5dad29"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fcda86f6cb318ba36ed8f1396a6a4a3fd8f856f84d426584392083d10da4de0"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4cd71c9fb92aceb5a23c4c39d8ecc80389c178eba9feab77f19274843eb9412d"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34ebf1aca12845066c963016655dac897651e1544f22a34c9b461ac3b4b1d3aa"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:893a4639694c5b7edd4bdd8141be296042b6806e27cc1d794e585c43010cc294"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:663d8ee3ffb3494502ebcccb49078faddbb84c1d870f9c1dd5a29e85d1f747ce"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0f8f6a85a0006ae2709aa4ce05749ba2cdcb4b43d6c21a16c8517c16593aabe"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1582745eb63df267c92d8b61ca655a0ce62105ef62542c00a74590f306be8cb5"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d59227776ee2aa64226f7e086638baa645f4b044f2947dbf85c76ab11dcba073"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06b07c418bde1c8e737d8fa67741072bd3f5b0fb66cf8c0655172188c17e5fa6"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:9445c1842680efac0f81d272fd8db7163acfcc2b1436e3f420f4c9a9c5a50795"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:09c4767af0b0b98c724f5d47f2bf33395c8986995b0a9dab0575ca81a554a8c0"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f3854fbde7a465318ad8d3fc5bef8f059e6d0a87e71a0d3360bb56c0bf87b18a"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2332b4c361c05ecd381edb99e2a33733f3db906739a83a483974b3df70a51b40"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1561db63fa1b658cd94325d303933553ea7d89ae09ff21cc3bcd41b8521fbbb6"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-win32.whl", hash = "sha256:a0be857f0b35177ba09d7c472825d1b711d11c6d0e8a2052804e3b93166de1ad"},
|
||||
{file = "aiohttp-3.12.13-cp310-cp310-win_amd64.whl", hash = "sha256:fcc30ad4fb5cb41a33953292d45f54ef4066746d625992aeac33b8c681173178"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c229b1437aa2576b99384e4be668af1db84b31a45305d02f61f5497cfa6f60c"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04076d8c63471e51e3689c93940775dc3d12d855c0c80d18ac5a1c68f0904358"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:55683615813ce3601640cfaa1041174dc956d28ba0511c8cbd75273eb0587014"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:921bc91e602d7506d37643e77819cb0b840d4ebb5f8d6408423af3d3bf79a7b7"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e72d17fe0974ddeae8ed86db297e23dba39c7ac36d84acdbb53df2e18505a013"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0653d15587909a52e024a261943cf1c5bdc69acb71f411b0dd5966d065a51a47"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a77b48997c66722c65e157c06c74332cdf9c7ad00494b85ec43f324e5c5a9b9a"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6946bae55fd36cfb8e4092c921075cde029c71c7cb571d72f1079d1e4e013bc"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f95db8c8b219bcf294a53742c7bda49b80ceb9d577c8e7aa075612b7f39ffb7"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:03d5eb3cfb4949ab4c74822fb3326cd9655c2b9fe22e4257e2100d44215b2e2b"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6383dd0ffa15515283c26cbf41ac8e6705aab54b4cbb77bdb8935a713a89bee9"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6548a411bc8219b45ba2577716493aa63b12803d1e5dc70508c539d0db8dbf5a"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:81b0fcbfe59a4ca41dc8f635c2a4a71e63f75168cc91026c61be665945739e2d"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6a83797a0174e7995e5edce9dcecc517c642eb43bc3cba296d4512edf346eee2"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a5734d8469a5633a4e9ffdf9983ff7cdb512524645c7a3d4bc8a3de45b935ac3"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-win32.whl", hash = "sha256:fef8d50dfa482925bb6b4c208b40d8e9fa54cecba923dc65b825a72eed9a5dbd"},
|
||||
{file = "aiohttp-3.12.13-cp311-cp311-win_amd64.whl", hash = "sha256:9a27da9c3b5ed9d04c36ad2df65b38a96a37e9cfba6f1381b842d05d98e6afe9"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0aa580cf80558557285b49452151b9c69f2fa3ad94c5c9e76e684719a8791b73"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b103a7e414b57e6939cc4dece8e282cfb22043efd0c7298044f6594cf83ab347"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:78f64e748e9e741d2eccff9597d09fb3cd962210e5b5716047cbb646dc8fe06f"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29c955989bf4c696d2ededc6b0ccb85a73623ae6e112439398935362bacfaaf6"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d640191016763fab76072c87d8854a19e8e65d7a6fcfcbf017926bdbbb30a7e5"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dc507481266b410dede95dd9f26c8d6f5a14315372cc48a6e43eac652237d9b"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8a94daa873465d518db073bd95d75f14302e0208a08e8c942b2f3f1c07288a75"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f52420cde4ce0bb9425a375d95577fe082cb5721ecb61da3049b55189e4e6"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f7df1f620ec40f1a7fbcb99ea17d7326ea6996715e78f71a1c9a021e31b96b8"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3062d4ad53b36e17796dce1c0d6da0ad27a015c321e663657ba1cc7659cfc710"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:8605e22d2a86b8e51ffb5253d9045ea73683d92d47c0b1438e11a359bdb94462"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:54fbbe6beafc2820de71ece2198458a711e224e116efefa01b7969f3e2b3ddae"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:050bd277dfc3768b606fd4eae79dd58ceda67d8b0b3c565656a89ae34525d15e"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2637a60910b58f50f22379b6797466c3aa6ae28a6ab6404e09175ce4955b4e6a"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e986067357550d1aaa21cfe9897fa19e680110551518a5a7cf44e6c5638cb8b5"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-win32.whl", hash = "sha256:ac941a80aeea2aaae2875c9500861a3ba356f9ff17b9cb2dbfb5cbf91baaf5bf"},
|
||||
{file = "aiohttp-3.12.13-cp312-cp312-win_amd64.whl", hash = "sha256:671f41e6146a749b6c81cb7fd07f5a8356d46febdaaaf07b0e774ff04830461e"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d4a18e61f271127465bdb0e8ff36e8f02ac4a32a80d8927aa52371e93cd87938"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:532542cb48691179455fab429cdb0d558b5e5290b033b87478f2aa6af5d20ace"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d7eea18b52f23c050ae9db5d01f3d264ab08f09e7356d6f68e3f3ac2de9dfabb"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad7c8e5c25f2a26842a7c239de3f7b6bfb92304593ef997c04ac49fb703ff4d7"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6af355b483e3fe9d7336d84539fef460120c2f6e50e06c658fe2907c69262d6b"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a95cf9f097498f35c88e3609f55bb47b28a5ef67f6888f4390b3d73e2bac6177"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8ed8c38a1c584fe99a475a8f60eefc0b682ea413a84c6ce769bb19a7ff1c5ef"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0b9170d5d800126b5bc89d3053a2363406d6e327afb6afaeda2d19ee8bb103"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:372feeace612ef8eb41f05ae014a92121a512bd5067db8f25101dd88a8db11da"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a946d3702f7965d81f7af7ea8fb03bb33fe53d311df48a46eeca17e9e0beed2d"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a0c4725fae86555bbb1d4082129e21de7264f4ab14baf735278c974785cd2041"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b28ea2f708234f0a5c44eb6c7d9eb63a148ce3252ba0140d050b091b6e842d1"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d4f5becd2a5791829f79608c6f3dc745388162376f310eb9c142c985f9441cc1"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:60f2ce6b944e97649051d5f5cc0f439360690b73909230e107fd45a359d3e911"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:69fc1909857401b67bf599c793f2183fbc4804717388b0b888f27f9929aa41f3"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-win32.whl", hash = "sha256:7d7e68787a2046b0e44ba5587aa723ce05d711e3a3665b6b7545328ac8e3c0dd"},
|
||||
{file = "aiohttp-3.12.13-cp313-cp313-win_amd64.whl", hash = "sha256:5a178390ca90419bfd41419a809688c368e63c86bd725e1186dd97f6b89c2706"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:36f6c973e003dc9b0bb4e8492a643641ea8ef0e97ff7aaa5c0f53d68839357b4"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6cbfc73179bd67c229eb171e2e3745d2afd5c711ccd1e40a68b90427f282eab1"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1e8b27b2d414f7e3205aa23bb4a692e935ef877e3a71f40d1884f6e04fd7fa74"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eabded0c2b2ef56243289112c48556c395d70150ce4220d9008e6b4b3dd15690"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:003038e83f1a3ff97409999995ec02fe3008a1d675478949643281141f54751d"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b6f46613031dbc92bdcaad9c4c22c7209236ec501f9c0c5f5f0b6a689bf50f3"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c332c6bb04650d59fb94ed96491f43812549a3ba6e7a16a218e612f99f04145e"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fea41a2c931fb582cb15dc86a3037329e7b941df52b487a9f8b5aa960153cbd"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:846104f45d18fb390efd9b422b27d8f3cf8853f1218c537f36e71a385758c896"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d6c85ac7dd350f8da2520bac8205ce99df4435b399fa7f4dc4a70407073e390"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5a1ecce0ed281bec7da8550da052a6b89552db14d0a0a45554156f085a912f48"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5304d74867028cca8f64f1cc1215eb365388033c5a691ea7aa6b0dc47412f495"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:64d1f24ee95a2d1e094a4cd7a9b7d34d08db1bbcb8aa9fb717046b0a884ac294"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:119c79922a7001ca6a9e253228eb39b793ea994fd2eccb79481c64b5f9d2a055"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:bb18f00396d22e2f10cd8825d671d9f9a3ba968d708a559c02a627536b36d91c"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-win32.whl", hash = "sha256:0022de47ef63fd06b065d430ac79c6b0bd24cdae7feaf0e8c6bac23b805a23a8"},
|
||||
{file = "aiohttp-3.12.13-cp39-cp39-win_amd64.whl", hash = "sha256:29e08111ccf81b2734ae03f1ad1cb03b9615e7d8f616764f22f71209c094f122"},
|
||||
{file = "aiohttp-3.12.13.tar.gz", hash = "sha256:47e2da578528264a12e4e3dd8dd72a7289e5f812758fe086473fab037a10fcce"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:906d5075b5ba0dd1c66fcaaf60eb09926a9fef3ca92d912d2a0bbdbecf8b1248"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c875bf6fc2fd1a572aba0e02ef4e7a63694778c5646cdbda346ee24e630d30fb"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbb284d15c6a45fab030740049d03c0ecd60edad9cd23b211d7e11d3be8d56fd"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e360381e02e1a05d36b223ecab7bc4a6e7b5ab15760022dc92589ee1d4238c"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aaf90137b5e5d84a53632ad95ebee5c9e3e7468f0aab92ba3f608adcb914fa95"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e532a25e4a0a2685fa295a31acf65e027fbe2bea7a4b02cdfbbba8a064577663"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eab9762c4d1b08ae04a6c77474e6136da722e34fdc0e6d6eab5ee93ac29f35d1"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abe53c3812b2899889a7fca763cdfaeee725f5be68ea89905e4275476ffd7e61"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5760909b7080aa2ec1d320baee90d03b21745573780a072b66ce633eb77a8656"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:02fcd3f69051467bbaa7f84d7ec3267478c7df18d68b2e28279116e29d18d4f3"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4dcd1172cd6794884c33e504d3da3c35648b8be9bfa946942d353b939d5f1288"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:224d0da41355b942b43ad08101b1b41ce633a654128ee07e36d75133443adcda"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e387668724f4d734e865c1776d841ed75b300ee61059aca0b05bce67061dcacc"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:dec9cde5b5a24171e0b0a4ca064b1414950904053fb77c707efd876a2da525d8"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bbad68a2af4877cc103cd94af9160e45676fc6f0c14abb88e6e092b945c2c8e3"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-win32.whl", hash = "sha256:ee580cb7c00bd857b3039ebca03c4448e84700dc1322f860cf7a500a6f62630c"},
|
||||
{file = "aiohttp-3.12.14-cp310-cp310-win_amd64.whl", hash = "sha256:cf4f05b8cea571e2ccc3ca744e35ead24992d90a72ca2cf7ab7a2efbac6716db"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f4552ff7b18bcec18b60a90c6982049cdb9dac1dba48cf00b97934a06ce2e597"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8283f42181ff6ccbcf25acaae4e8ab2ff7e92b3ca4a4ced73b2c12d8cd971393"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:040afa180ea514495aaff7ad34ec3d27826eaa5d19812730fe9e529b04bb2179"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b413c12f14c1149f0ffd890f4141a7471ba4b41234fe4fd4a0ff82b1dc299dbb"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1d6f607ce2e1a93315414e3d448b831238f1874b9968e1195b06efaa5c87e245"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:565e70d03e924333004ed101599902bba09ebb14843c8ea39d657f037115201b"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4699979560728b168d5ab63c668a093c9570af2c7a78ea24ca5212c6cdc2b641"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad5fdf6af93ec6c99bf800eba3af9a43d8bfd66dce920ac905c817ef4a712afe"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ac76627c0b7ee0e80e871bde0d376a057916cb008a8f3ffc889570a838f5cc7"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:798204af1180885651b77bf03adc903743a86a39c7392c472891649610844635"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4f1205f97de92c37dd71cf2d5bcfb65fdaed3c255d246172cce729a8d849b4da"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:76ae6f1dd041f85065d9df77c6bc9c9703da9b5c018479d20262acc3df97d419"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a194ace7bc43ce765338ca2dfb5661489317db216ea7ea700b0332878b392cab"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:16260e8e03744a6fe3fcb05259eeab8e08342c4c33decf96a9dad9f1187275d0"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8c779e5ebbf0e2e15334ea404fcce54009dc069210164a244d2eac8352a44b28"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-win32.whl", hash = "sha256:a289f50bf1bd5be227376c067927f78079a7bdeccf8daa6a9e65c38bae14324b"},
|
||||
{file = "aiohttp-3.12.14-cp311-cp311-win_amd64.whl", hash = "sha256:0b8a69acaf06b17e9c54151a6c956339cf46db4ff72b3ac28516d0f7068f4ced"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a0ecbb32fc3e69bc25efcda7d28d38e987d007096cbbeed04f14a6662d0eee22"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0400f0ca9bb3e0b02f6466421f253797f6384e9845820c8b05e976398ac1d81a"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a56809fed4c8a830b5cae18454b7464e1529dbf66f71c4772e3cfa9cbec0a1ff"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f2e373276e4755691a963e5d11756d093e346119f0627c2d6518208483fb6d"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ca39e433630e9a16281125ef57ece6817afd1d54c9f1bf32e901f38f16035869"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c748b3f8b14c77720132b2510a7d9907a03c20ba80f469e58d5dfd90c079a1c"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a568abe1b15ce69d4cc37e23020720423f0728e3cb1f9bcd3f53420ec3bfe7"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9888e60c2c54eaf56704b17feb558c7ed6b7439bca1e07d4818ab878f2083660"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3006a1dc579b9156de01e7916d38c63dc1ea0679b14627a37edf6151bc530088"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aa8ec5c15ab80e5501a26719eb48a55f3c567da45c6ea5bb78c52c036b2655c7"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:39b94e50959aa07844c7fe2206b9f75d63cc3ad1c648aaa755aa257f6f2498a9"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:04c11907492f416dad9885d503fbfc5dcb6768d90cad8639a771922d584609d3"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:88167bd9ab69bb46cee91bd9761db6dfd45b6e76a0438c7e884c3f8160ff21eb"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:791504763f25e8f9f251e4688195e8b455f8820274320204f7eafc467e609425"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2785b112346e435dd3a1a67f67713a3fe692d288542f1347ad255683f066d8e0"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-win32.whl", hash = "sha256:15f5f4792c9c999a31d8decf444e79fcfd98497bf98e94284bf390a7bb8c1729"},
|
||||
{file = "aiohttp-3.12.14-cp312-cp312-win_amd64.whl", hash = "sha256:3b66e1a182879f579b105a80d5c4bd448b91a57e8933564bf41665064796a338"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3143a7893d94dc82bc409f7308bc10d60285a3cd831a68faf1aa0836c5c3c767"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3d62ac3d506cef54b355bd34c2a7c230eb693880001dfcda0bf88b38f5d7af7e"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:48e43e075c6a438937c4de48ec30fa8ad8e6dfef122a038847456bfe7b947b63"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:077b4488411a9724cecc436cbc8c133e0d61e694995b8de51aaf351c7578949d"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d8c35632575653f297dcbc9546305b2c1133391089ab925a6a3706dfa775ccab"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b8ce87963f0035c6834b28f061df90cf525ff7c9b6283a8ac23acee6502afd4"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a2cf66e32a2563bb0766eb24eae7e9a269ac0dc48db0aae90b575dc9583026"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdea089caf6d5cde975084a884c72d901e36ef9c2fd972c9f51efbbc64e96fbd"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7865f27db67d49e81d463da64a59365ebd6b826e0e4847aa111056dcb9dc88"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0ab5b38a6a39781d77713ad930cb5e7feea6f253de656a5f9f281a8f5931b086"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b3b15acee5c17e8848d90a4ebc27853f37077ba6aec4d8cb4dbbea56d156933"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e4c972b0bdaac167c1e53e16a16101b17c6d0ed7eac178e653a07b9f7fad7151"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7442488b0039257a3bdbc55f7209587911f143fca11df9869578db6c26feeeb8"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f68d3067eecb64c5e9bab4a26aa11bd676f4c70eea9ef6536b0a4e490639add3"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f88d3704c8b3d598a08ad17d06006cb1ca52a1182291f04979e305c8be6c9758"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-win32.whl", hash = "sha256:a3c99ab19c7bf375c4ae3debd91ca5d394b98b6089a03231d4c580ef3c2ae4c5"},
|
||||
{file = "aiohttp-3.12.14-cp313-cp313-win_amd64.whl", hash = "sha256:3f8aad695e12edc9d571f878c62bedc91adf30c760c8632f09663e5f564f4baa"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b8cc6b05e94d837bcd71c6531e2344e1ff0fb87abe4ad78a9261d67ef5d83eae"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1dcb015ac6a3b8facd3677597edd5ff39d11d937456702f0bb2b762e390a21b"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3779ed96105cd70ee5e85ca4f457adbce3d9ff33ec3d0ebcdf6c5727f26b21b3"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:717a0680729b4ebd7569c1dcd718c46b09b360745fd8eb12317abc74b14d14d0"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b5dd3a2ef7c7e968dbbac8f5574ebeac4d2b813b247e8cec28174a2ba3627170"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4710f77598c0092239bc12c1fcc278a444e16c7032d91babf5abbf7166463f7b"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f3e9f75ae842a6c22a195d4a127263dbf87cbab729829e0bd7857fb1672400b2"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f9c8d55d6802086edd188e3a7d85a77787e50d56ce3eb4757a3205fa4657922"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79b29053ff3ad307880d94562cca80693c62062a098a5776ea8ef5ef4b28d140"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:23e1332fff36bebd3183db0c7a547a1da9d3b4091509f6d818e098855f2f27d3"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:a564188ce831fd110ea76bcc97085dd6c625b427db3f1dbb14ca4baa1447dcbc"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a7a1b4302f70bb3ec40ca86de82def532c97a80db49cac6a6700af0de41af5ee"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:1b07ccef62950a2519f9bfc1e5b294de5dd84329f444ca0b329605ea787a3de5"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:938bd3ca6259e7e48b38d84f753d548bd863e0c222ed6ee6ace3fd6752768a84"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8bc784302b6b9f163b54c4e93d7a6f09563bd01ff2b841b29ed3ac126e5040bf"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-win32.whl", hash = "sha256:a3416f95961dd7d5393ecff99e3f41dc990fb72eda86c11f2a60308ac6dcd7a0"},
|
||||
{file = "aiohttp-3.12.14-cp39-cp39-win_amd64.whl", hash = "sha256:196858b8820d7f60578f8b47e5669b3195c21d8ab261e39b1d705346458f445f"},
|
||||
{file = "aiohttp-3.12.14.tar.gz", hash = "sha256:6e06e120e34d93100de448fd941522e11dafa78ef1a893c179901b7d66aa29f2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohappyeyeballs = ">=2.5.0"
|
||||
aiosignal = ">=1.1.2"
|
||||
aiosignal = ">=1.4.0"
|
||||
async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""}
|
||||
attrs = ">=17.3.0"
|
||||
frozenlist = ">=1.1.1"
|
||||
@@ -166,18 +166,19 @@ docs = ["sphinx (==7.3.7)", "sphinx-mdinclude (==0.6.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "aiosignal"
|
||||
version = "1.3.2"
|
||||
version = "1.4.0"
|
||||
description = "aiosignal: a list of registered asynchronous callbacks"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"},
|
||||
{file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"},
|
||||
{file = "aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e"},
|
||||
{file = "aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
frozenlist = ">=1.1.0"
|
||||
typing-extensions = {version = ">=4.2", markers = "python_version < \"3.13\""}
|
||||
|
||||
[[package]]
|
||||
name = "alive-progress"
|
||||
@@ -1939,6 +1940,54 @@ files = [
|
||||
{file = "dpath-2.1.3.tar.gz", hash = "sha256:d1a7a0e6427d0a4156c792c82caf1f0109603f68ace792e36ca4596fd2cb8d9d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dulwich"
|
||||
version = "0.23.0"
|
||||
description = "Python Git Library"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "dulwich-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c13b0d5a9009cde23ecb8cb201df6e23e2a7a82c5e2d6ba6443fbb322c9befc6"},
|
||||
{file = "dulwich-0.23.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:a68faf8612bf93de1285048d6ad13160f0fb3c5596a86e694e78f4e212886fa5"},
|
||||
{file = "dulwich-0.23.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:d971566826f16ec67c70641c1fbdb337323aa5b533799bc5a4641f4750e73b36"},
|
||||
{file = "dulwich-0.23.0-cp310-cp310-win32.whl", hash = "sha256:27d970adf539806dfc4fe3e4c9e8dc6ebf0318977a56e24d22f13413535a51ba"},
|
||||
{file = "dulwich-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:025178533e884ffdb0d9d8db4b8870745d438cbfecb782fd1b56c3b6438e86cf"},
|
||||
{file = "dulwich-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d68498fdda13ab00791b483daab3bcfe9f9721c037aa458695e6ad81640c57cc"},
|
||||
{file = "dulwich-0.23.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:cb7bb930b12471a1cfcea4b3d25a671dc0ad32573f0ad25684684298959a1527"},
|
||||
{file = "dulwich-0.23.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a2abbce32fd2bc7902bcc5f69b10bf22576810de21651baaa864b78fd7aec261"},
|
||||
{file = "dulwich-0.23.0-cp311-cp311-win32.whl", hash = "sha256:9e3151f10ce2a9ff91bca64c74345217f53bdd947dc958032343822009832f7a"},
|
||||
{file = "dulwich-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:3ae9f1d9dc92d4e9a3f89ba2c55221f7b6442c5dd93b3f6f539a3c9eb3f37bdd"},
|
||||
{file = "dulwich-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52cdef66a7994d29528ca79ca59452518bbba3fd56a9c61c61f6c467c1c7956e"},
|
||||
{file = "dulwich-0.23.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d473888a6ab9ed5d4a4c3f053cbe5b77f72d54b6efdf5688fed76094316e571e"},
|
||||
{file = "dulwich-0.23.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:19fcf20224c641a61c774da92f098fbaae9938c7e17a52841e64092adf7e78f9"},
|
||||
{file = "dulwich-0.23.0-cp312-cp312-win32.whl", hash = "sha256:7fc8b76b704ef35cd001e993e3aa4e1d666a2064bf467c07c560f12b2959dcaf"},
|
||||
{file = "dulwich-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:cb0566b888b578325350b4d67c61a0de35d417e9877560e3a6df88cae4576a59"},
|
||||
{file = "dulwich-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:624e2223c8b705b3a217f9c8d3bfed3a573093be0b0ba033c46cba8411fb9630"},
|
||||
{file = "dulwich-0.23.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:b4eaf326d15bb3fc5316c777b0312f0fe02f6f82a4368cd971d0ce2167b7ec34"},
|
||||
{file = "dulwich-0.23.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:d754afaf7c133a015c75cc2be11703138b4be932e0eeeb2c70add56083f31109"},
|
||||
{file = "dulwich-0.23.0-cp313-cp313-win32.whl", hash = "sha256:ac53ec438bde3c1f479782c34240479b36cd47230d091979137b7ecc12c0242e"},
|
||||
{file = "dulwich-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:50d3b4ba45671fb8b7d2afbd02c10b4edbc3290a1f92260e64098b409e9ca35c"},
|
||||
{file = "dulwich-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8e18ea3fa49f10932077f39c0b960b5045870c550c3d7c74f3cfaac09457cd6"},
|
||||
{file = "dulwich-0.23.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3e6df0eb8cca21f210e3ddce2ccb64482646893dbec2fee9f3411d037595bf7b"},
|
||||
{file = "dulwich-0.23.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:90c0064d7df8e7fe83d3a03c7d60b9e07a92698b18442f926199b2c3f0bf34d4"},
|
||||
{file = "dulwich-0.23.0-cp39-cp39-win32.whl", hash = "sha256:84eef513aba501cbc1f223863f3b4b351fe732d3fb590cab9bdf5d33eb1a1248"},
|
||||
{file = "dulwich-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:dce943da48217c26e15790fd6df62d27a7f1d067102780351ebf2635fc0ba482"},
|
||||
{file = "dulwich-0.23.0-py3-none-any.whl", hash = "sha256:d8da6694ca332bb48775e35ee2215aa4673821164a91b83062f699c69f7cd135"},
|
||||
{file = "dulwich-0.23.0.tar.gz", hash = "sha256:0aa6c2489dd5e978b27e9b75983b7331a66c999f0efc54ebe37cab808ed322ae"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
urllib3 = ">=1.25"
|
||||
|
||||
[package.extras]
|
||||
dev = ["dissolve (>=0.1.1)", "mypy (==1.16.0)", "ruff (==0.11.13)"]
|
||||
fastimport = ["fastimport"]
|
||||
https = ["urllib3 (>=1.24.1)"]
|
||||
merge = ["merge3"]
|
||||
paramiko = ["paramiko"]
|
||||
pgp = ["gpg"]
|
||||
|
||||
[[package]]
|
||||
name = "durationpy"
|
||||
version = "0.10"
|
||||
@@ -6623,4 +6672,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">3.9.1,<3.13"
|
||||
content-hash = "4b0eee5566caf8e9d1e2e6fe8ac37733b29dd4275c2d65ac5291fa3acd514d9e"
|
||||
content-hash = "7a3f5d9a2b06322b3c4b65d1010116f84ea5e725693e51316ffeb23d4ed09c96"
|
||||
|
||||
@@ -11,12 +11,25 @@ All notable changes to the **Prowler SDK** are documented in this file.
|
||||
- `vm_linux_enforce_ssh_authentication` check for Azure provider [(#8149)](https://github.com/prowler-cloud/prowler/pull/8149)
|
||||
- `vm_ensure_using_approved_images` check for Azure provider [(#8168)](https://github.com/prowler-cloud/prowler/pull/8168)
|
||||
- `vm_scaleset_associated_load_balancer` check for Azure provider [(#8181)](https://github.com/prowler-cloud/prowler/pull/8181)
|
||||
- Support for remote repository scanning in IaC provider [(#8193)](https://github.com/prowler-cloud/prowler/pull/8193)
|
||||
- Add `test_connection` method to GitHub provider [(#8248)](https://github.com/prowler-cloud/prowler/pull/8248)
|
||||
|
||||
### Changed
|
||||
- Refactor the Azure Defender get security contact configuration method to use the API REST endpoint instead of the SDK [(#8241)](https://github.com/prowler-cloud/prowler/pull/8241)
|
||||
|
||||
### Fixed
|
||||
- Title & description wording for `iam_user_accesskey_unused` check for AWS provider [(#8233)](https://github.com/prowler-cloud/prowler/pull/8233)
|
||||
- Add GitHub provider to lateral panel in documentation and change -h environment variable output [(#8246)](https://github.com/prowler-cloud/prowler/pull/8246)
|
||||
- Ensure `is_service_role` only returns `True` for service roles [(#8274)](https://github.com/prowler-cloud/prowler/pull/8274)
|
||||
- Update DynamoDB check metadata to fix broken link [(#8273)](https://github.com/prowler-cloud/prowler/pull/8273)
|
||||
- Show correct count of findings in Dashboard Security Posture page [(#8270)](https://github.com/prowler-cloud/prowler/pull/8270)
|
||||
|
||||
---
|
||||
|
||||
## [v5.8.2] (Prowler UNRELEASED)
|
||||
|
||||
### Fixed
|
||||
- Fix error in Dashboard Overview page when reading CSV files [(#8257)](https://github.com/prowler-cloud/prowler/pull/8257)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -102,6 +102,7 @@ from prowler.providers.github.models import GithubOutputOptions
|
||||
from prowler.providers.iac.models import IACOutputOptions
|
||||
from prowler.providers.kubernetes.models import KubernetesOutputOptions
|
||||
from prowler.providers.m365.models import M365OutputOptions
|
||||
from prowler.providers.mongodbatlas.models import MongoDBAtlasOutputOptions
|
||||
from prowler.providers.nhn.models import NHNOutputOptions
|
||||
|
||||
|
||||
@@ -300,6 +301,10 @@ def prowler():
|
||||
output_options = M365OutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
elif provider == "mongodbatlas":
|
||||
output_options = MongoDBAtlasOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
elif provider == "nhn":
|
||||
output_options = NHNOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
|
||||
@@ -525,3 +525,8 @@ m365:
|
||||
github:
|
||||
# github.repository_inactive_not_archived --> CIS recommends 180 days (6 months)
|
||||
inactive_not_archived_days_threshold: 180
|
||||
|
||||
# MongoDB Atlas Configuration
|
||||
mongodbatlas:
|
||||
# mongodbatlas.organizations_service_account_secrets_expiration --> Maximum hours for service account secrets validity
|
||||
max_service_account_secret_validity_hours: 8
|
||||
|
||||
@@ -666,6 +666,31 @@ class CheckReportNHN(Check_Report):
|
||||
self.location = getattr(resource, "location", "kr1")
|
||||
|
||||
|
||||
@dataclass
|
||||
class CheckReportMongoDBAtlas(Check_Report):
|
||||
"""Contains the MongoDB Atlas Check's finding information."""
|
||||
|
||||
resource_name: str
|
||||
resource_id: str
|
||||
project_id: str
|
||||
location: str
|
||||
|
||||
def __init__(self, metadata: Dict, resource: Any) -> None:
|
||||
"""Initialize the MongoDB Atlas Check's finding information.
|
||||
|
||||
Args:
|
||||
metadata: The metadata of the check.
|
||||
resource: Basic information about the resource. Defaults to None.
|
||||
"""
|
||||
super().__init__(metadata, resource)
|
||||
self.resource_name = getattr(
|
||||
resource, "name", getattr(resource, "resource_name", "")
|
||||
)
|
||||
self.resource_id = getattr(resource, "id", getattr(resource, "resource_id", ""))
|
||||
self.project_id = getattr(resource, "project_id", "")
|
||||
self.location = getattr(resource, "location", self.project_id)
|
||||
|
||||
|
||||
# Testing Pending
|
||||
def load_check_metadata(metadata_file: str) -> CheckMetadata:
|
||||
"""
|
||||
|
||||
@@ -26,10 +26,10 @@ class ProwlerArgumentParser:
|
||||
self.parser = argparse.ArgumentParser(
|
||||
prog="prowler",
|
||||
formatter_class=RawTextHelpFormatter,
|
||||
usage="prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,nhn,dashboard,iac} ...",
|
||||
usage="prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,nhn,mongodbatlas,dashboard,iac} ...",
|
||||
epilog="""
|
||||
Available Cloud Providers:
|
||||
{aws,azure,gcp,kubernetes,m365,github,iac,nhn}
|
||||
{aws,azure,gcp,kubernetes,m365,github,iac,nhn,mongodbatlas}
|
||||
aws AWS Provider
|
||||
azure Azure Provider
|
||||
gcp GCP Provider
|
||||
@@ -38,6 +38,7 @@ Available Cloud Providers:
|
||||
github GitHub Provider
|
||||
iac IaC Provider (Preview)
|
||||
nhn NHN Provider (Unofficial)
|
||||
mongodbatlas MongoDB Atlas Provider
|
||||
|
||||
Available components:
|
||||
dashboard Local dashboard
|
||||
|
||||
@@ -267,6 +267,18 @@ class Finding(BaseModel):
|
||||
output_data["resource_uid"] = check_output.resource_id
|
||||
output_data["region"] = check_output.location
|
||||
|
||||
elif provider.type == "mongodbatlas":
|
||||
output_data["auth_method"] = "api_key"
|
||||
output_data["account_uid"] = get_nested_attribute(
|
||||
provider, "identity.user_id"
|
||||
)
|
||||
output_data["account_name"] = get_nested_attribute(
|
||||
provider, "identity.username"
|
||||
)
|
||||
output_data["resource_name"] = check_output.resource_name
|
||||
output_data["resource_uid"] = check_output.resource_id
|
||||
output_data["region"] = check_output.location
|
||||
|
||||
elif provider.type == "nhn":
|
||||
output_data["auth_method"] = (
|
||||
f"passwordCredentials: username={get_nested_attribute(provider, '_identity.username')}, "
|
||||
@@ -283,16 +295,14 @@ class Finding(BaseModel):
|
||||
output_data["region"] = check_output.location
|
||||
|
||||
elif provider.type == "iac":
|
||||
output_data["auth_method"] = "local" # Until we support remote repos
|
||||
output_data["auth_method"] = provider.auth_method
|
||||
output_data["account_uid"] = "iac"
|
||||
output_data["account_name"] = "iac"
|
||||
output_data["resource_name"] = check_output.resource_name
|
||||
output_data["resource_uid"] = check_output.resource_name
|
||||
output_data["region"] = check_output.resource_path
|
||||
output_data["resource_line_range"] = check_output.resource_line_range
|
||||
output_data["framework"] = (
|
||||
check_output.check_metadata.ServiceName
|
||||
) # TODO: can we get the framework from the check_output?
|
||||
output_data["framework"] = check_output.check_metadata.ServiceName
|
||||
|
||||
# check_output Unique ID
|
||||
# TODO: move this to a function
|
||||
|
||||
@@ -689,6 +689,51 @@ class HTML(Output):
|
||||
)
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def get_mongodbatlas_assessment_summary(provider: Provider) -> str:
|
||||
"""
|
||||
get_mongodbatlas_assessment_summary gets the HTML assessment summary for the provider
|
||||
|
||||
Args:
|
||||
provider (Provider): the provider object
|
||||
|
||||
Returns:
|
||||
str: the HTML assessment summary
|
||||
"""
|
||||
try:
|
||||
return f"""
|
||||
<div class="col-md-2">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
MongoDB Atlas Assessment Summary
|
||||
</div>
|
||||
<ul class="list-group
|
||||
list-group-flush">
|
||||
<li class="list-group-item">
|
||||
<b>MongoDB Atlas user:</b> {provider.identity.username}
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<div class="card">
|
||||
<div class="card-header">
|
||||
MongoDB Atlas Credentials
|
||||
</div>
|
||||
<ul class="list-group
|
||||
list-group-flush">
|
||||
<li class="list-group-item">
|
||||
<b>MongoDB Atlas authentication method:</b> API Key
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>"""
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def get_iac_assessment_summary(provider: Provider) -> str:
|
||||
"""
|
||||
@@ -710,7 +755,7 @@ class HTML(Output):
|
||||
<ul class="list-group
|
||||
list-group-flush">
|
||||
<li class="list-group-item">
|
||||
<b>IAC path:</b> {provider.scan_path}
|
||||
{"<b>IAC repository URL:</b> " + provider.scan_repository_url if provider.scan_repository_url else "<b>IAC path:</b> " + provider.scan_path}
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
@@ -723,7 +768,7 @@ class HTML(Output):
|
||||
<ul class="list-group
|
||||
list-group-flush">
|
||||
<li class="list-group-item">
|
||||
<b>IAC authentication method:</b> local
|
||||
<b>IAC authentication method:</b> {provider.auth_method}
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
@@ -20,6 +20,8 @@ def stdout_report(finding, color, verbose, status, fix):
|
||||
details = finding.owner
|
||||
if finding.check_metadata.Provider == "m365":
|
||||
details = finding.location
|
||||
if finding.check_metadata.Provider == "mongodbatlas":
|
||||
details = finding.location
|
||||
if finding.check_metadata.Provider == "nhn":
|
||||
details = finding.location
|
||||
|
||||
|
||||
@@ -51,12 +51,19 @@ def display_summary_table(
|
||||
elif provider.type == "m365":
|
||||
entity_type = "Tenant Domain"
|
||||
audited_entities = provider.identity.tenant_domain
|
||||
elif provider.type == "mongodbatlas":
|
||||
entity_type = "User"
|
||||
audited_entities = provider.identity.username
|
||||
elif provider.type == "nhn":
|
||||
entity_type = "Tenant Domain"
|
||||
audited_entities = provider.identity.tenant_domain
|
||||
elif provider.type == "iac":
|
||||
entity_type = "Directory"
|
||||
audited_entities = provider.scan_path
|
||||
if provider.scan_repository_url:
|
||||
entity_type = "Repository"
|
||||
audited_entities = provider.scan_repository_url
|
||||
else:
|
||||
entity_type = "Directory"
|
||||
audited_entities = provider.scan_path
|
||||
|
||||
# Check if there are findings and that they are not all MANUAL
|
||||
if findings and not all(finding.status == "MANUAL" for finding in findings):
|
||||
|
||||
+2
-2
@@ -12,7 +12,7 @@
|
||||
"ResourceType": "AwsDynamoDbTable",
|
||||
"Description": "Check if DynamoDB table has encryption at rest enabled using CMK KMS.",
|
||||
"Risk": "All user data stored in Amazon DynamoDB is fully encrypted at rest. This functionality helps reduce the operational burden and complexity involved in protecting sensitive data.",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/amazondynamodbdb/latest/developerguide/EncryptionAtRest.html",
|
||||
"RelatedUrl": "https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/EncryptionAtRest.html",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
@@ -22,7 +22,7 @@
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Specify an encryption key when you create a new table or switch the encryption keys on an existing table by using the AWS Management Console.",
|
||||
"Url": "https://docs.aws.amazon.com/amazondynamodbdb/latest/developerguide/EncryptionAtRest.html"
|
||||
"Url": "https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/EncryptionAtRest.html"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
|
||||
@@ -13,38 +13,28 @@ from prowler.providers.aws.lib.service.service import AWSService
|
||||
|
||||
def is_service_role(role):
|
||||
try:
|
||||
if "Statement" in role["AssumeRolePolicyDocument"]:
|
||||
if isinstance(role["AssumeRolePolicyDocument"]["Statement"], list):
|
||||
for statement in role["AssumeRolePolicyDocument"]["Statement"]:
|
||||
if (
|
||||
statement["Effect"] == "Allow"
|
||||
and (
|
||||
"sts:AssumeRole" in statement["Action"]
|
||||
or "sts:*" in statement["Action"]
|
||||
or "*" in statement["Action"]
|
||||
)
|
||||
# This is what defines a service role
|
||||
and "Service" in statement["Principal"]
|
||||
):
|
||||
return True
|
||||
else:
|
||||
statement = role["AssumeRolePolicyDocument"]["Statement"]
|
||||
if (
|
||||
statement["Effect"] == "Allow"
|
||||
and (
|
||||
"sts:AssumeRole" in statement["Action"]
|
||||
or "sts:*" in statement["Action"]
|
||||
or "*" in statement["Action"]
|
||||
)
|
||||
# This is what defines a service role
|
||||
and "Service" in statement["Principal"]
|
||||
):
|
||||
return True
|
||||
statements = role.get("AssumeRolePolicyDocument", {}).get("Statement", [])
|
||||
if not isinstance(statements, list):
|
||||
statements = [statements]
|
||||
|
||||
for statement in statements:
|
||||
if statement.get("Effect") != "Allow" or not any(
|
||||
action in statement.get("Action", [])
|
||||
for action in ("sts:AssumeRole", "sts:*", "*")
|
||||
):
|
||||
return False
|
||||
|
||||
principal = statement.get("Principal", {})
|
||||
if set(principal.keys()) != {"Service"}:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return False
|
||||
return False
|
||||
|
||||
|
||||
class IAM(AWSService):
|
||||
|
||||
+2
-2
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"Provider": "aws",
|
||||
"CheckID": "iam_user_accesskey_unused",
|
||||
"CheckTitle": "Ensure User Access Keys unused are disabled",
|
||||
"CheckTitle": "Ensure unused User Access Keys are disabled",
|
||||
"CheckType": [
|
||||
"Software and Configuration Checks"
|
||||
],
|
||||
@@ -10,7 +10,7 @@
|
||||
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "AwsIamUser",
|
||||
"Description": "Ensure User Access Keys unused are disabled",
|
||||
"Description": "Ensure unused User Access Keys are disabled",
|
||||
"Risk": "To increase the security of your AWS account, remove IAM user credentials (that is, passwords and access keys) that are not needed. For example, when users leave your organization or no longer need AWS access.",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
|
||||
+9
-15
@@ -1,5 +1,3 @@
|
||||
import re
|
||||
|
||||
from prowler.lib.check.models import Check, Check_Report_Azure
|
||||
from prowler.providers.azure.services.defender.defender_client import defender_client
|
||||
|
||||
@@ -10,21 +8,17 @@ class defender_additional_email_configured_with_a_security_contact(Check):
|
||||
|
||||
for (
|
||||
subscription_name,
|
||||
security_contacts,
|
||||
) in defender_client.security_contacts.items():
|
||||
for contact in security_contacts.values():
|
||||
report = Check_Report_Azure(metadata=self.metadata(), resource=contact)
|
||||
report.status = "PASS"
|
||||
security_contact_configurations,
|
||||
) in defender_client.security_contact_configurations.items():
|
||||
for contact_configuration in security_contact_configurations.values():
|
||||
report = Check_Report_Azure(
|
||||
metadata=self.metadata(), resource=contact_configuration
|
||||
)
|
||||
report.subscription = subscription_name
|
||||
report.status_extended = f"There is another correct email configured for subscription {subscription_name}."
|
||||
|
||||
emails = contact.emails.split(";")
|
||||
|
||||
for email in emails:
|
||||
if re.fullmatch(
|
||||
r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b", email
|
||||
):
|
||||
break
|
||||
if len(contact_configuration.emails) > 0:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"There is another correct email configured for subscription {subscription_name}."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"There is not another correct email configured for subscription {subscription_name}."
|
||||
|
||||
+9
-7
@@ -8,20 +8,22 @@ class defender_ensure_notify_alerts_severity_is_high(Check):
|
||||
|
||||
for (
|
||||
subscription_name,
|
||||
security_contacts,
|
||||
) in defender_client.security_contacts.items():
|
||||
for contact in security_contacts.values():
|
||||
report = Check_Report_Azure(metadata=self.metadata(), resource=contact)
|
||||
security_contact_configurations,
|
||||
) in defender_client.security_contact_configurations.items():
|
||||
for contact_configuration in security_contact_configurations.values():
|
||||
report = Check_Report_Azure(
|
||||
metadata=self.metadata(), resource=contact_configuration
|
||||
)
|
||||
report.subscription = subscription_name
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Notifications are not enabled for alerts with a minimum severity of high or lower in subscription {subscription_name}."
|
||||
|
||||
if (
|
||||
contact.alert_notifications_minimal_severity != "Critical"
|
||||
and contact.alert_notifications_minimal_severity != ""
|
||||
contact_configuration.alert_minimal_severity
|
||||
and contact_configuration.alert_minimal_severity != "Critical"
|
||||
):
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Notifications are enabled for alerts with a minimum severity of high or lower ({contact.alert_notifications_minimal_severity}) in subscription {subscription_name}."
|
||||
report.status_extended = f"Notifications are enabled for alerts with a minimum severity of high or lower ({contact_configuration.alert_minimal_severity}) in subscription {subscription_name}."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
|
||||
+11
-10
@@ -8,19 +8,20 @@ class defender_ensure_notify_emails_to_owners(Check):
|
||||
|
||||
for (
|
||||
subscription_name,
|
||||
security_contacts,
|
||||
) in defender_client.security_contacts.items():
|
||||
for contact in security_contacts.values():
|
||||
report = Check_Report_Azure(metadata=self.metadata(), resource=contact)
|
||||
report.subscription = subscription_name
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"The Owner role is notified for subscription {subscription_name}."
|
||||
security_contact_configurations,
|
||||
) in defender_client.security_contact_configurations.items():
|
||||
for contact_configuration in security_contact_configurations.values():
|
||||
report = Check_Report_Azure(
|
||||
metadata=self.metadata(), resource=contact_configuration
|
||||
)
|
||||
report.subscription = subscription_name
|
||||
if (
|
||||
contact.notified_roles_state != "On"
|
||||
or "Owner" not in contact.notified_roles
|
||||
contact_configuration.notifications_by_role.state
|
||||
and "Owner" in contact_configuration.notifications_by_role.roles
|
||||
):
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"The Owner role is notified for subscription {subscription_name}."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"The Owner role is not notified for subscription {subscription_name}."
|
||||
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
from datetime import timedelta
|
||||
from typing import Dict
|
||||
from typing import Dict, Optional
|
||||
|
||||
from azure.core.exceptions import (
|
||||
ClientAuthenticationError,
|
||||
HttpResponseError,
|
||||
ResourceNotFoundError,
|
||||
)
|
||||
import requests
|
||||
from azure.core.exceptions import ClientAuthenticationError, ResourceNotFoundError
|
||||
from azure.mgmt.security import SecurityCenter
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
@@ -22,7 +19,11 @@ class Defender(AzureService):
|
||||
self.auto_provisioning_settings = self._get_auto_provisioning_settings()
|
||||
self.assessments = self._get_assessments()
|
||||
self.settings = self._get_settings()
|
||||
self.security_contacts = self._get_security_contacts()
|
||||
self.security_contact_configurations = self._get_security_contacts(
|
||||
token=provider.session.get_token(
|
||||
"https://management.azure.com/.default"
|
||||
).token
|
||||
)
|
||||
self.iot_security_solutions = self._get_iot_security_solutions()
|
||||
|
||||
def _get_pricings(self):
|
||||
@@ -149,48 +150,70 @@ class Defender(AzureService):
|
||||
)
|
||||
return settings
|
||||
|
||||
def _get_security_contacts(self):
|
||||
def _get_security_contacts(self, token: str) -> dict[str, dict]:
|
||||
"""
|
||||
Get all security contacts configuration for all subscriptions.
|
||||
|
||||
Args:
|
||||
token: The authentication token to make the request.
|
||||
|
||||
Returns:
|
||||
A dictionary of security contacts for all subscriptions.
|
||||
"""
|
||||
logger.info("Defender - Getting security contacts...")
|
||||
security_contacts = {}
|
||||
for subscription_name, client in self.clients.items():
|
||||
for subscription_name, subscription_id in self.subscriptions.items():
|
||||
try:
|
||||
security_contacts.update({subscription_name: {}})
|
||||
# TODO: List all security contacts. For now, the list method is not working.
|
||||
security_contact_default = client.security_contacts.get("default")
|
||||
security_contacts[subscription_name].update(
|
||||
{
|
||||
security_contact_default.name: SecurityContacts(
|
||||
resource_id=security_contact_default.id,
|
||||
name=getattr(security_contact_default, "name", "default")
|
||||
or "default",
|
||||
emails=security_contact_default.emails,
|
||||
phone=security_contact_default.phone,
|
||||
alert_notifications_minimal_severity=security_contact_default.alert_notifications.minimal_severity,
|
||||
alert_notifications_state=security_contact_default.alert_notifications.state,
|
||||
notified_roles=security_contact_default.notifications_by_role.roles,
|
||||
notified_roles_state=security_contact_default.notifications_by_role.state,
|
||||
)
|
||||
}
|
||||
)
|
||||
except HttpResponseError as error:
|
||||
if error.status_code == 404:
|
||||
security_contacts[subscription_name].update(
|
||||
{
|
||||
"default": SecurityContacts(
|
||||
resource_id=f"/subscriptions/{self.subscriptions[subscription_name]}/providers/Microsoft.Security/securityContacts/default",
|
||||
name="default",
|
||||
emails="",
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="",
|
||||
alert_notifications_state="",
|
||||
notified_roles=[""],
|
||||
notified_roles_state="",
|
||||
)
|
||||
}
|
||||
url = f"https://management.azure.com/subscriptions/{subscription_id}/providers/Microsoft.Security/securityContacts?api-version=2023-12-01-preview"
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
response = requests.get(url, headers=headers)
|
||||
response.raise_for_status()
|
||||
contact_configurations = response.json().get("value", [])
|
||||
security_contacts[subscription_name] = {}
|
||||
for contact_configuration in contact_configurations:
|
||||
props = contact_configuration.get("properties", {})
|
||||
|
||||
# Map notificationsByRole.state from "On"/"Off" to boolean
|
||||
notifications_by_role_state = props.get(
|
||||
"notificationsByRole", {}
|
||||
).get("state", "Off")
|
||||
notifications_by_role_state_bool = (
|
||||
notifications_by_role_state.lower() == "on"
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"Subscription name: {subscription_name} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
notifications_by_role_roles = props.get(
|
||||
"notificationsByRole", {}
|
||||
).get("roles", [])
|
||||
|
||||
# Extract minimalRiskLevel and minimalSeverity from notificationsSources
|
||||
attack_path_minimal_risk_level = None
|
||||
alert_minimal_severity = None
|
||||
for source in props.get("notificationsSources", []):
|
||||
if source.get("sourceType") == "AttackPath":
|
||||
value = source.get("minimalRiskLevel")
|
||||
if value is not None:
|
||||
attack_path_minimal_risk_level = value
|
||||
elif source.get("sourceType") == "Alert":
|
||||
value = source.get("minimalSeverity")
|
||||
if value is not None:
|
||||
alert_minimal_severity = value
|
||||
|
||||
security_contacts[subscription_name][
|
||||
contact_configuration.get("name", "default")
|
||||
] = SecurityContactConfiguration(
|
||||
id=contact_configuration.get("id", ""),
|
||||
name=contact_configuration.get("name", "default"),
|
||||
enabled=props.get("isEnabled", False),
|
||||
emails=props.get("emails", "").split(";"),
|
||||
phone=props.get("phone", ""),
|
||||
notifications_by_role=NotificationsByRole(
|
||||
state=notifications_by_role_state_bool,
|
||||
roles=notifications_by_role_roles,
|
||||
),
|
||||
attack_path_minimal_risk_level=attack_path_minimal_risk_level,
|
||||
alert_minimal_severity=alert_minimal_severity,
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
@@ -252,15 +275,42 @@ class Setting(BaseModel):
|
||||
enabled: bool
|
||||
|
||||
|
||||
class SecurityContacts(BaseModel):
|
||||
resource_id: str
|
||||
class NotificationsByRole(BaseModel):
|
||||
"""
|
||||
Defines whether to send email notifications from Microsoft Defender for Cloud to persons with specific RBAC roles on the subscription.
|
||||
|
||||
Attributes:
|
||||
state: Whether notifications by role are enabled.
|
||||
roles: List of Azure roles (e.g., 'Owner', 'Admin') to be notified.
|
||||
"""
|
||||
|
||||
state: bool
|
||||
roles: list[str]
|
||||
|
||||
|
||||
class SecurityContactConfiguration(BaseModel):
|
||||
"""
|
||||
Represents the configuration of an Azure Security Center security contact.
|
||||
|
||||
Attributes:
|
||||
id: The unique resource ID of the security contact.
|
||||
name: The name of the security contact (usually 'default').
|
||||
enabled: Whether the security contact is enabled. If enabled, the security contact will receive notifications, otherwise it will not.
|
||||
emails: List of email addresses to notify.
|
||||
phone: Contact phone number.
|
||||
notifications_by_role: Defines whether to send email notifications from Microsoft Defender for Cloud to persons with specific RBAC roles on the subscription.
|
||||
attack_path_minimal_risk_level: Minimal risk level for Attack Path notifications (e.g., 'Critical').
|
||||
alert_minimal_severity: Minimal severity for Alert notifications (e.g., 'Medium').
|
||||
"""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
emails: str
|
||||
phone: str
|
||||
alert_notifications_minimal_severity: str
|
||||
alert_notifications_state: str
|
||||
notified_roles: list[str]
|
||||
notified_roles_state: str
|
||||
enabled: bool
|
||||
emails: list[str]
|
||||
phone: Optional[str] = None
|
||||
notifications_by_role: NotificationsByRole
|
||||
attack_path_minimal_risk_level: Optional[str] = None
|
||||
alert_minimal_severity: Optional[str] = None
|
||||
|
||||
|
||||
class IoTSecuritySolution(BaseModel):
|
||||
|
||||
@@ -246,10 +246,24 @@ class Provider(ABC):
|
||||
elif "iac" in provider_class_name.lower():
|
||||
provider_class(
|
||||
scan_path=arguments.scan_path,
|
||||
scan_repository_url=arguments.scan_repository_url,
|
||||
frameworks=arguments.frameworks,
|
||||
exclude_path=arguments.exclude_path,
|
||||
config_path=arguments.config_file,
|
||||
fixer_config=fixer_config,
|
||||
github_username=arguments.github_username,
|
||||
personal_access_token=arguments.personal_access_token,
|
||||
oauth_app_token=arguments.oauth_app_token,
|
||||
)
|
||||
elif "mongodbatlas" in provider_class_name.lower():
|
||||
provider_class(
|
||||
atlas_public_key=arguments.atlas_public_key,
|
||||
atlas_private_key=arguments.atlas_private_key,
|
||||
atlas_organization_id=arguments.atlas_organization_id,
|
||||
atlas_project_id=arguments.atlas_project_id,
|
||||
config_path=arguments.config_file,
|
||||
mutelist_path=arguments.mutelist_file,
|
||||
fixer_config=fixer_config,
|
||||
)
|
||||
|
||||
except TypeError as error:
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import json
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from os import environ
|
||||
from typing import List
|
||||
|
||||
from alive_progress import alive_bar
|
||||
from checkov.ansible.runner import Runner as AnsibleRunner
|
||||
from checkov.argo_workflows.runner import Runner as ArgoWorkflowsRunner
|
||||
from checkov.arm.runner import Runner as ArmRunner
|
||||
@@ -35,6 +39,7 @@ from checkov.terraform.runner import Runner as TerraformRunner
|
||||
from checkov.terraform_json.runner import TerraformJsonRunner
|
||||
from checkov.yaml_doc.runner import Runner as YamlDocRunner
|
||||
from colorama import Fore, Style
|
||||
from dulwich import porcelain
|
||||
|
||||
from prowler.config.config import (
|
||||
default_config_file_path,
|
||||
@@ -54,21 +59,56 @@ class IacProvider(Provider):
|
||||
def __init__(
|
||||
self,
|
||||
scan_path: str = ".",
|
||||
scan_repository_url: str = None,
|
||||
frameworks: list[str] = ["all"],
|
||||
exclude_path: list[str] = [],
|
||||
config_path: str = None,
|
||||
config_content: dict = None,
|
||||
fixer_config: dict = {},
|
||||
github_username: str = None,
|
||||
personal_access_token: str = None,
|
||||
oauth_app_token: str = None,
|
||||
):
|
||||
logger.info("Instantiating IAC Provider...")
|
||||
|
||||
self.scan_path = scan_path
|
||||
self.scan_repository_url = scan_repository_url
|
||||
self.frameworks = frameworks
|
||||
self.exclude_path = exclude_path
|
||||
self.region = "global"
|
||||
self.audited_account = "local-iac"
|
||||
self._session = None
|
||||
self._identity = "prowler"
|
||||
self._auth_method = "No auth"
|
||||
|
||||
if scan_repository_url:
|
||||
oauth_app_token = oauth_app_token or environ.get("GITHUB_OAUTH_APP_TOKEN")
|
||||
github_username = github_username or environ.get("GITHUB_USERNAME")
|
||||
personal_access_token = personal_access_token or environ.get(
|
||||
"GITHUB_PERSONAL_ACCESS_TOKEN"
|
||||
)
|
||||
|
||||
if oauth_app_token:
|
||||
self.oauth_app_token = oauth_app_token
|
||||
self.github_username = None
|
||||
self.personal_access_token = None
|
||||
self._auth_method = "OAuth App Token"
|
||||
logger.info("Using OAuth App Token for GitHub authentication")
|
||||
elif github_username and personal_access_token:
|
||||
self.github_username = github_username
|
||||
self.personal_access_token = personal_access_token
|
||||
self.oauth_app_token = None
|
||||
self._auth_method = "Personal Access Token"
|
||||
logger.info(
|
||||
"Using GitHub username and personal access token for authentication"
|
||||
)
|
||||
else:
|
||||
self.github_username = None
|
||||
self.personal_access_token = None
|
||||
self.oauth_app_token = None
|
||||
logger.debug(
|
||||
"No GitHub authentication method provided; proceeding without authentication."
|
||||
)
|
||||
|
||||
# Audit Config
|
||||
if config_content:
|
||||
@@ -97,6 +137,10 @@ class IacProvider(Provider):
|
||||
|
||||
Provider.set_global_provider(self)
|
||||
|
||||
@property
|
||||
def auth_method(self):
|
||||
return self._auth_method
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return self._type
|
||||
@@ -183,8 +227,72 @@ class IacProvider(Provider):
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
def _clone_repository(
|
||||
self,
|
||||
repository_url: str,
|
||||
github_username: str = None,
|
||||
personal_access_token: str = None,
|
||||
oauth_app_token: str = None,
|
||||
) -> str:
|
||||
"""
|
||||
Clone a git repository to a temporary directory, supporting GitHub authentication.
|
||||
"""
|
||||
try:
|
||||
if github_username and personal_access_token:
|
||||
repository_url = repository_url.replace(
|
||||
"https://github.com/",
|
||||
f"https://{github_username}:{personal_access_token}@github.com/",
|
||||
)
|
||||
elif oauth_app_token:
|
||||
repository_url = repository_url.replace(
|
||||
"https://github.com/",
|
||||
f"https://oauth2:{oauth_app_token}@github.com/",
|
||||
)
|
||||
|
||||
temporary_directory = tempfile.mkdtemp()
|
||||
logger.info(
|
||||
f"Cloning repository {repository_url} into {temporary_directory}..."
|
||||
)
|
||||
with alive_bar(
|
||||
ctrl_c=False,
|
||||
bar="blocks",
|
||||
spinner="classic",
|
||||
stats=False,
|
||||
enrich_print=False,
|
||||
) as bar:
|
||||
try:
|
||||
bar.title = f"-> Cloning {repository_url}..."
|
||||
porcelain.clone(repository_url, temporary_directory, depth=1)
|
||||
bar.title = "-> Repository cloned successfully!"
|
||||
except Exception as clone_error:
|
||||
bar.title = "-> Cloning failed!"
|
||||
raise clone_error
|
||||
return temporary_directory
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}"
|
||||
)
|
||||
|
||||
def run(self) -> List[CheckReportIAC]:
|
||||
return self.run_scan(self.scan_path, self.frameworks, self.exclude_path)
|
||||
temp_dir = None
|
||||
if self.scan_repository_url:
|
||||
scan_dir = temp_dir = self._clone_repository(
|
||||
self.scan_repository_url,
|
||||
getattr(self, "github_username", None),
|
||||
getattr(self, "personal_access_token", None),
|
||||
getattr(self, "oauth_app_token", None),
|
||||
)
|
||||
else:
|
||||
scan_dir = self.scan_path
|
||||
|
||||
try:
|
||||
reports = self.run_scan(scan_dir, self.frameworks, self.exclude_path)
|
||||
finally:
|
||||
if temp_dir:
|
||||
logger.info(f"Removing temporary directory {temp_dir}...")
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
return reports
|
||||
|
||||
def run_scan(
|
||||
self, directory: str, frameworks: list[str], exclude_path: list[str]
|
||||
@@ -249,15 +357,32 @@ class IacProvider(Provider):
|
||||
sys.exit(1)
|
||||
|
||||
def print_credentials(self):
|
||||
report_lines = [
|
||||
f"Directory: {Fore.YELLOW}{self.scan_path}{Style.RESET_ALL}",
|
||||
]
|
||||
if self.scan_repository_url:
|
||||
report_title = (
|
||||
f"{Style.BRIGHT}Scanning remote IaC repository:{Style.RESET_ALL}"
|
||||
)
|
||||
report_lines = [
|
||||
f"Repository: {Fore.YELLOW}{self.scan_repository_url}{Style.RESET_ALL}",
|
||||
]
|
||||
else:
|
||||
report_title = (
|
||||
f"{Style.BRIGHT}Scanning local IaC directory:{Style.RESET_ALL}"
|
||||
)
|
||||
report_lines = [
|
||||
f"Directory: {Fore.YELLOW}{self.scan_path}{Style.RESET_ALL}",
|
||||
]
|
||||
|
||||
if self.exclude_path:
|
||||
report_lines.append(
|
||||
f"Excluded paths: {Fore.YELLOW}{', '.join(self.exclude_path)}{Style.RESET_ALL}"
|
||||
)
|
||||
|
||||
report_lines.append(
|
||||
f"Frameworks: {Fore.YELLOW}{', '.join(self.frameworks)}{Style.RESET_ALL}"
|
||||
)
|
||||
report_title = f"{Style.BRIGHT}Scanning local IaC directory:{Style.RESET_ALL}"
|
||||
|
||||
report_lines.append(
|
||||
f"Authentication method: {Fore.YELLOW}{self.auth_method}{Style.RESET_ALL}"
|
||||
)
|
||||
|
||||
print_boxes(report_lines, report_title)
|
||||
|
||||
@@ -44,8 +44,17 @@ def init_parser(self):
|
||||
"-P",
|
||||
dest="scan_path",
|
||||
default=".",
|
||||
help="Path to the folder containing your infrastructure-as-code files. Default: current directory",
|
||||
help="Path to the folder containing your infrastructure-as-code files. Default: current directory. Mutually exclusive with --scan-repository-url.",
|
||||
)
|
||||
|
||||
iac_scan_subparser.add_argument(
|
||||
"--scan-repository-url",
|
||||
"-R",
|
||||
dest="scan_repository_url",
|
||||
default=None,
|
||||
help="URL to the repository containing your infrastructure-as-code files. Mutually exclusive with --scan-path.",
|
||||
)
|
||||
|
||||
iac_scan_subparser.add_argument(
|
||||
"--frameworks",
|
||||
"-f",
|
||||
@@ -63,3 +72,38 @@ def init_parser(self):
|
||||
default=[],
|
||||
help="Comma-separated list of paths to exclude from the scan. Default: none",
|
||||
)
|
||||
|
||||
iac_scan_subparser.add_argument(
|
||||
"--github-username",
|
||||
dest="github_username",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="GitHub username for authenticated repository cloning (used with --personal-access-token). If not provided, will use GITHUB_USERNAME env var.",
|
||||
)
|
||||
iac_scan_subparser.add_argument(
|
||||
"--personal-access-token",
|
||||
dest="personal_access_token",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="GitHub personal access token for authenticated repository cloning (used with --github-username). If not provided, will use GITHUB_PERSONAL_ACCESS_TOKEN env var.",
|
||||
)
|
||||
iac_scan_subparser.add_argument(
|
||||
"--oauth-app-token",
|
||||
dest="oauth_app_token",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="GitHub OAuth app token for authenticated repository cloning. If not provided, will use GITHUB_OAUTH_APP_TOKEN env var.",
|
||||
)
|
||||
|
||||
|
||||
def validate_arguments(arguments):
|
||||
scan_path = getattr(arguments, "scan_path", None)
|
||||
scan_repository_url = getattr(arguments, "scan_repository_url", None)
|
||||
if scan_path and scan_repository_url:
|
||||
# If scan_path is set to default ("."), allow scan_repository_url
|
||||
if scan_path != ".":
|
||||
return (
|
||||
False,
|
||||
"--scan-path (-P) and --scan-repository-url (-R) are mutually exclusive. Please specify only one.",
|
||||
)
|
||||
return (True, "")
|
||||
|
||||
@@ -0,0 +1,205 @@
|
||||
# MongoDB Atlas Provider for Prowler
|
||||
|
||||
The MongoDB Atlas provider enables Prowler to perform security assessments of MongoDB Atlas cloud database deployments.
|
||||
|
||||
## Features
|
||||
|
||||
- **Authentication**: Supports MongoDB Atlas API key authentication
|
||||
- **Services**: Projects and Clusters services
|
||||
- **Checks**: Network access security and encryption at rest validation
|
||||
- **Pagination**: Handles large numbers of resources efficiently
|
||||
- **Error Handling**: Comprehensive error handling and retry logic
|
||||
|
||||
## Authentication
|
||||
|
||||
The MongoDB Atlas provider uses HTTP Digest Authentication with API key pairs consisting of a public key and private key.
|
||||
|
||||
### Authentication Methods
|
||||
|
||||
1. **Command-line arguments**:
|
||||
```bash
|
||||
prowler mongodbatlas --atlas-public-key <public_key> --atlas-private-key <private_key>
|
||||
```
|
||||
|
||||
2. **Environment variables**:
|
||||
```bash
|
||||
export ATLAS_PUBLIC_KEY=<public_key>
|
||||
export ATLAS_PRIVATE_KEY=<private_key>
|
||||
prowler mongodbatlas
|
||||
```
|
||||
|
||||
### Creating API Keys
|
||||
|
||||
1. Log into MongoDB Atlas
|
||||
2. Navigate to Access Manager
|
||||
3. Select "API Keys" tab
|
||||
4. Click "Create API Key"
|
||||
5. Set permissions (Project permissions recommended)
|
||||
6. Note the public key and private key
|
||||
|
||||
## Configuration Options
|
||||
|
||||
- `--atlas-organization-id`: Filter results to specific organization
|
||||
- `--atlas-project-id`: Filter results to specific project
|
||||
|
||||
## Services
|
||||
|
||||
### Projects Service
|
||||
|
||||
Manages MongoDB Atlas projects (groups) and their configurations:
|
||||
|
||||
- Lists all projects or filters by organization/project ID
|
||||
- Retrieves network access lists
|
||||
- Counts clusters per project
|
||||
- Fetches project settings
|
||||
|
||||
### Clusters Service
|
||||
|
||||
Manages MongoDB Atlas clusters:
|
||||
|
||||
- Lists all clusters across projects
|
||||
- Retrieves cluster configuration details
|
||||
- Checks encryption settings
|
||||
- Validates backup configurations
|
||||
|
||||
## Security Checks
|
||||
|
||||
### Network Access List Security
|
||||
|
||||
**Check**: `projects_network_access_list_exposed_to_internet`
|
||||
|
||||
Ensures that MongoDB Atlas projects don't have network access entries that allow unrestricted access from the internet.
|
||||
|
||||
- **Severity**: High
|
||||
- **Fails if**:
|
||||
- Network access list contains `0.0.0.0/0` or `::/0`
|
||||
- IP addresses like `0.0.0.0` or `::`
|
||||
- No network access entries are configured
|
||||
|
||||
### Encryption at Rest
|
||||
|
||||
**Check**: `clusters_encryption_at_rest_enabled`
|
||||
|
||||
Verifies that MongoDB Atlas clusters have encryption at rest enabled to protect data stored on disk.
|
||||
|
||||
- **Severity**: High
|
||||
- **Fails if**:
|
||||
- Encryption at rest is explicitly disabled (`NONE`)
|
||||
- No encryption provider is configured
|
||||
- Unsupported encryption provider is used
|
||||
- **Passes if**:
|
||||
- Valid encryption provider (AWS, AZURE, GCP)
|
||||
- EBS volume encryption is enabled
|
||||
- Cluster is paused (skipped)
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### Basic Usage
|
||||
|
||||
```bash
|
||||
# Scan all projects and clusters
|
||||
prowler mongodbatlas --atlas-public-key <key> --atlas-private-key <secret>
|
||||
|
||||
# Scan specific organization
|
||||
prowler mongodbatlas --atlas-organization-id <org_id>
|
||||
|
||||
# Scan specific project
|
||||
prowler mongodbatlas --atlas-project-id <project_id>
|
||||
```
|
||||
|
||||
### With Filters
|
||||
|
||||
```bash
|
||||
# Run only network access checks
|
||||
prowler mongodbatlas --checks projects_network_access_list_exposed_to_internet
|
||||
|
||||
# Run only encryption checks
|
||||
prowler mongodbatlas --checks clusters_encryption_at_rest_enabled
|
||||
|
||||
# Run checks for specific service
|
||||
prowler mongodbatlas --services projects
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The provider includes comprehensive error handling:
|
||||
|
||||
- **Rate Limiting**: Automatic retry with exponential backoff
|
||||
- **Authentication Errors**: Clear error messages for invalid credentials
|
||||
- **API Errors**: Detailed error reporting for API failures
|
||||
- **Network Errors**: Retry logic for transient network issues
|
||||
|
||||
## Configuration
|
||||
|
||||
### API Settings
|
||||
|
||||
- **Base URL**: `https://cloud.mongodb.com/api/atlas/v2`
|
||||
- **API Version**: `2025-01-01`
|
||||
- **Default Timeout**: 30 seconds
|
||||
- **Default Page Size**: 100 items
|
||||
- **Max Retries**: 3 attempts
|
||||
|
||||
### Rate Limiting
|
||||
|
||||
The provider respects MongoDB Atlas API rate limits:
|
||||
|
||||
- Automatic retry on 429 (Too Many Requests)
|
||||
- Exponential backoff starting at 1 second
|
||||
- Maximum of 3 retry attempts
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **Authentication Failures**:
|
||||
- Verify API key permissions
|
||||
- Check if API key is enabled
|
||||
- Ensure IP address is in access list
|
||||
|
||||
2. **No Resources Found**:
|
||||
- Check organization/project ID filters
|
||||
- Verify API key has access to resources
|
||||
- Ensure resources exist in MongoDB Atlas
|
||||
|
||||
3. **Rate Limit Errors**:
|
||||
- Reduce concurrent requests
|
||||
- Increase retry delays
|
||||
- Contact MongoDB Atlas support for rate limit increases
|
||||
|
||||
### Debug Mode
|
||||
|
||||
Enable debug logging to troubleshoot issues:
|
||||
|
||||
```bash
|
||||
prowler mongodbatlas --log-level DEBUG
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
When contributing to the MongoDB Atlas provider:
|
||||
|
||||
1. Follow existing code patterns
|
||||
2. Add comprehensive tests for new checks
|
||||
3. Update documentation for new features
|
||||
4. Ensure error handling is consistent
|
||||
5. Test with various MongoDB Atlas configurations
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- Store API keys securely (use environment variables)
|
||||
- Limit API key permissions to required resources
|
||||
- Regularly rotate API keys
|
||||
- Monitor API key usage in MongoDB Atlas
|
||||
- Use network access lists to restrict API access
|
||||
|
||||
## Support
|
||||
|
||||
For issues specific to the MongoDB Atlas provider, please refer to:
|
||||
|
||||
- MongoDB Atlas API Documentation
|
||||
- Prowler GitHub Issues
|
||||
- MongoDB Atlas Support (for API-related issues)
|
||||
|
||||
## License
|
||||
|
||||
This provider is part of Prowler and follows the same license terms.
|
||||
@@ -0,0 +1,2 @@
|
||||
# Supported encryption providers
|
||||
ATLAS_ENCRYPTION_PROVIDERS = ["AWS", "AZURE", "GCP", "NONE"]
|
||||
@@ -0,0 +1,118 @@
|
||||
from prowler.exceptions.exceptions import ProwlerException
|
||||
|
||||
|
||||
# Exceptions codes from 8000 to 8999 are reserved for MongoDB Atlas exceptions
|
||||
class MongoDBAtlasBaseException(ProwlerException):
|
||||
"""Base class for MongoDB Atlas Errors."""
|
||||
|
||||
MONGODBATLAS_ERROR_CODES = {
|
||||
(8000, "MongoDBAtlasCredentialsError"): {
|
||||
"message": "MongoDB Atlas credentials not found or invalid",
|
||||
"remediation": "Check the MongoDB Atlas API credentials and ensure they are properly set.",
|
||||
},
|
||||
(8001, "MongoDBAtlasAuthenticationError"): {
|
||||
"message": "MongoDB Atlas authentication failed",
|
||||
"remediation": "Check the MongoDB Atlas API credentials and ensure they are valid.",
|
||||
},
|
||||
(8002, "MongoDBAtlasSessionError"): {
|
||||
"message": "MongoDB Atlas session setup failed",
|
||||
"remediation": "Check the session setup and ensure it is properly configured.",
|
||||
},
|
||||
(8003, "MongoDBAtlasIdentityError"): {
|
||||
"message": "MongoDB Atlas identity setup failed",
|
||||
"remediation": "Check credentials and ensure they are properly set up for MongoDB Atlas.",
|
||||
},
|
||||
(8004, "MongoDBAtlasAPIError"): {
|
||||
"message": "MongoDB Atlas API call failed",
|
||||
"remediation": "Check the API request and ensure it is properly formatted.",
|
||||
},
|
||||
(8005, "MongoDBAtlasRateLimitError"): {
|
||||
"message": "MongoDB Atlas API rate limit exceeded",
|
||||
"remediation": "Reduce the number of API requests or wait before making more requests.",
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, code, file=None, original_exception=None, message=None):
|
||||
provider = "MongoDB Atlas"
|
||||
error_info = self.MONGODBATLAS_ERROR_CODES.get((code, self.__class__.__name__))
|
||||
if message:
|
||||
error_info["message"] = message
|
||||
super().__init__(
|
||||
code=code,
|
||||
source=provider,
|
||||
file=file,
|
||||
original_exception=original_exception,
|
||||
error_info=error_info,
|
||||
)
|
||||
|
||||
|
||||
class MongoDBAtlasCredentialsError(MongoDBAtlasBaseException):
|
||||
"""Exception for MongoDB Atlas credentials errors"""
|
||||
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
code=8000,
|
||||
file=file,
|
||||
original_exception=original_exception,
|
||||
message=message,
|
||||
)
|
||||
|
||||
|
||||
class MongoDBAtlasAuthenticationError(MongoDBAtlasBaseException):
|
||||
"""Exception for MongoDB Atlas authentication errors"""
|
||||
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
code=8001,
|
||||
file=file,
|
||||
original_exception=original_exception,
|
||||
message=message,
|
||||
)
|
||||
|
||||
|
||||
class MongoDBAtlasSessionError(MongoDBAtlasBaseException):
|
||||
"""Exception for MongoDB Atlas session setup errors"""
|
||||
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
code=8002,
|
||||
file=file,
|
||||
original_exception=original_exception,
|
||||
message=message,
|
||||
)
|
||||
|
||||
|
||||
class MongoDBAtlasIdentityError(MongoDBAtlasBaseException):
|
||||
"""Exception for MongoDB Atlas identity setup errors"""
|
||||
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
code=8003,
|
||||
file=file,
|
||||
original_exception=original_exception,
|
||||
message=message,
|
||||
)
|
||||
|
||||
|
||||
class MongoDBAtlasAPIError(MongoDBAtlasBaseException):
|
||||
"""Exception for MongoDB Atlas API errors"""
|
||||
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
code=8004,
|
||||
file=file,
|
||||
original_exception=original_exception,
|
||||
message=message,
|
||||
)
|
||||
|
||||
|
||||
class MongoDBAtlasRateLimitError(MongoDBAtlasBaseException):
|
||||
"""Exception for MongoDB Atlas rate limit errors"""
|
||||
|
||||
def __init__(self, file=None, original_exception=None, message=None):
|
||||
super().__init__(
|
||||
code=8005,
|
||||
file=file,
|
||||
original_exception=original_exception,
|
||||
message=message,
|
||||
)
|
||||
@@ -0,0 +1,53 @@
|
||||
def init_parser(self):
|
||||
"""Initialize the MongoDB Atlas Provider CLI parser"""
|
||||
mongodbatlas_parser = self.subparsers.add_parser(
|
||||
"mongodbatlas",
|
||||
parents=[self.common_providers_parser],
|
||||
help="MongoDB Atlas Provider",
|
||||
)
|
||||
|
||||
mongodbatlas_auth_subparser = mongodbatlas_parser.add_argument_group(
|
||||
"Authentication Modes"
|
||||
)
|
||||
|
||||
mongodbatlas_auth_subparser.add_argument(
|
||||
"--atlas-public-key",
|
||||
nargs="?",
|
||||
help="MongoDB Atlas API public key",
|
||||
default=None,
|
||||
metavar="ATLAS_PUBLIC_KEY",
|
||||
)
|
||||
|
||||
mongodbatlas_auth_subparser.add_argument(
|
||||
"--atlas-private-key",
|
||||
nargs="?",
|
||||
help="MongoDB Atlas API private key",
|
||||
default=None,
|
||||
metavar="ATLAS_PRIVATE_KEY",
|
||||
)
|
||||
|
||||
mongodbatlas_filters_subparser = mongodbatlas_parser.add_argument_group(
|
||||
"Optional Filters"
|
||||
)
|
||||
|
||||
mongodbatlas_filters_subparser.add_argument(
|
||||
"--atlas-organization-id",
|
||||
nargs="?",
|
||||
help="MongoDB Atlas Organization ID to filter scans to a specific organization",
|
||||
default=None,
|
||||
metavar="ATLAS_ORGANIZATION_ID",
|
||||
)
|
||||
|
||||
mongodbatlas_filters_subparser.add_argument(
|
||||
"--atlas-project-id",
|
||||
nargs="?",
|
||||
help="MongoDB Atlas Project ID to filter scans to a specific project",
|
||||
default=None,
|
||||
metavar="ATLAS_PROJECT_ID",
|
||||
)
|
||||
|
||||
|
||||
def validate_arguments(arguments):
|
||||
"""Validate MongoDB Atlas provider arguments"""
|
||||
# No specific validation needed for MongoDB Atlas arguments currently
|
||||
return (True, "")
|
||||
@@ -0,0 +1,30 @@
|
||||
from prowler.lib.check.models import CheckReportMongoDBAtlas
|
||||
from prowler.lib.mutelist.mutelist import Mutelist
|
||||
from prowler.lib.outputs.utils import unroll_dict, unroll_tags
|
||||
|
||||
|
||||
class MongoDBAtlasMutelist(Mutelist):
|
||||
"""MongoDB Atlas Mutelist class"""
|
||||
|
||||
def is_finding_muted(
|
||||
self,
|
||||
finding: CheckReportMongoDBAtlas,
|
||||
account_name: str,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if a finding is muted in the MongoDB Atlas mutelist.
|
||||
|
||||
Args:
|
||||
finding: The CheckReportMongoDBAtlas finding
|
||||
account_name: The account/project name
|
||||
|
||||
Returns:
|
||||
bool: True if the finding is muted, False otherwise
|
||||
"""
|
||||
return self.is_muted(
|
||||
account_name,
|
||||
finding.check_metadata.CheckID,
|
||||
"*", # TODO: Study regions in MongoDB Atlas
|
||||
finding.resource_name,
|
||||
unroll_dict(unroll_tags(finding.resource_tags)),
|
||||
)
|
||||
@@ -0,0 +1,172 @@
|
||||
import time
|
||||
from threading import current_thread
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import requests
|
||||
from requests.auth import HTTPDigestAuth
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.mongodbatlas.exceptions.exceptions import (
|
||||
MongoDBAtlasAPIError,
|
||||
MongoDBAtlasRateLimitError,
|
||||
)
|
||||
|
||||
|
||||
class MongoDBAtlasService:
|
||||
"""Base class for MongoDB Atlas services"""
|
||||
|
||||
def __init__(self, service_name: str, provider):
|
||||
self.service_name = service_name
|
||||
self.provider = provider
|
||||
self.session = provider.session
|
||||
self.base_url = provider.session.base_url
|
||||
self.audit_config = provider.audit_config
|
||||
self.auth = HTTPDigestAuth(
|
||||
provider.session.public_key, provider.session.private_key
|
||||
)
|
||||
self.headers = {
|
||||
"Accept": "application/vnd.atlas.2025-01-01+json",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
def _make_request(
|
||||
self,
|
||||
method: str,
|
||||
endpoint: str,
|
||||
params: Optional[Dict] = None,
|
||||
data: Optional[Dict] = None,
|
||||
max_retries: int = 3,
|
||||
retry_delay: int = 1,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Make HTTP request to MongoDB Atlas API with retry logic
|
||||
|
||||
Args:
|
||||
method: HTTP method (GET, POST, PUT, DELETE)
|
||||
endpoint: API endpoint (without base URL)
|
||||
params: Query parameters
|
||||
data: Request body data
|
||||
max_retries: Maximum number of retries
|
||||
retry_delay: Delay between retries in seconds
|
||||
|
||||
Returns:
|
||||
dict: Response JSON data
|
||||
|
||||
Raises:
|
||||
MongoDBAtlasAPIError: If the API request fails
|
||||
MongoDBAtlasRateLimitError: If rate limit is exceeded
|
||||
"""
|
||||
url = f"{self.base_url}/{endpoint.lstrip('/')}"
|
||||
|
||||
for attempt in range(max_retries + 1):
|
||||
try:
|
||||
response = requests.request(
|
||||
method=method,
|
||||
url=url,
|
||||
auth=self.auth,
|
||||
headers=self.headers,
|
||||
params=params,
|
||||
json=data,
|
||||
timeout=30,
|
||||
)
|
||||
|
||||
if response.status_code == 429:
|
||||
if attempt < max_retries:
|
||||
logger.warning(
|
||||
f"Rate limit exceeded for {url}, retrying in {retry_delay} seconds..."
|
||||
)
|
||||
time.sleep(retry_delay)
|
||||
retry_delay *= 2
|
||||
continue
|
||||
else:
|
||||
raise MongoDBAtlasRateLimitError(
|
||||
message=f"Rate limit exceeded for {url} after {max_retries} retries"
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
if attempt < max_retries:
|
||||
logger.warning(
|
||||
f"Request failed for {url}, retrying in {retry_delay} seconds: {str(e)}"
|
||||
)
|
||||
time.sleep(retry_delay)
|
||||
retry_delay *= 2
|
||||
continue
|
||||
else:
|
||||
logger.error(
|
||||
f"Request failed for {url} after {max_retries} retries: {str(e)}"
|
||||
)
|
||||
raise MongoDBAtlasAPIError(
|
||||
original_exception=e,
|
||||
message=f"Failed to make request to {url}: {str(e)}",
|
||||
)
|
||||
|
||||
def _paginate_request(
|
||||
self,
|
||||
endpoint: str,
|
||||
params: Optional[Dict] = None,
|
||||
page_size: int = 100,
|
||||
max_pages: Optional[int] = None,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Make paginated requests to MongoDB Atlas API
|
||||
|
||||
Args:
|
||||
endpoint: API endpoint
|
||||
params: Query parameters
|
||||
page_size: Number of items per page
|
||||
max_pages: Maximum number of pages to fetch
|
||||
|
||||
Returns:
|
||||
list: List of all items from all pages
|
||||
"""
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
params.update({"pageNum": 1, "itemsPerPage": page_size})
|
||||
|
||||
all_items = []
|
||||
page_num = 1
|
||||
|
||||
while True:
|
||||
params["pageNum"] = page_num
|
||||
|
||||
try:
|
||||
response = self._make_request("GET", endpoint, params=params)
|
||||
|
||||
if "results" in response:
|
||||
items = response["results"]
|
||||
all_items.extend(items)
|
||||
|
||||
total_count = response.get("totalCount", 0)
|
||||
|
||||
if len(items) < page_size or len(all_items) >= total_count:
|
||||
break
|
||||
|
||||
if max_pages and page_num >= max_pages:
|
||||
logger.warning(
|
||||
f"Reached maximum pages limit ({max_pages}) for {endpoint}"
|
||||
)
|
||||
break
|
||||
|
||||
page_num += 1
|
||||
else:
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error during pagination for {endpoint} at page {page_num}: {str(e)}"
|
||||
)
|
||||
break
|
||||
|
||||
logger.info(
|
||||
f"Retrieved {len(all_items)} items from {endpoint} across {page_num} pages"
|
||||
)
|
||||
|
||||
return all_items
|
||||
|
||||
def _get_thread_info(self) -> str:
|
||||
"""Get thread information for logging"""
|
||||
return f"[{current_thread().name}]"
|
||||
@@ -0,0 +1,76 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
from prowler.config.config import output_file_timestamp
|
||||
from prowler.providers.common.models import ProviderOutputOptions
|
||||
|
||||
|
||||
class MongoDBAtlasSession(BaseModel):
|
||||
"""MongoDB Atlas session model"""
|
||||
|
||||
public_key: str
|
||||
private_key: str
|
||||
base_url: str = "https://cloud.mongodb.com/api/atlas/v2"
|
||||
|
||||
|
||||
class MongoDBAtlasIdentityInfo(BaseModel):
|
||||
"""MongoDB Atlas identity information model"""
|
||||
|
||||
user_id: str
|
||||
username: str
|
||||
roles: Optional[List[str]] = []
|
||||
|
||||
|
||||
class MongoDBAtlasOutputOptions(ProviderOutputOptions):
|
||||
"""MongoDB Atlas output options"""
|
||||
|
||||
def __init__(self, arguments, bulk_checks_metadata, identity):
|
||||
super().__init__(arguments, bulk_checks_metadata)
|
||||
|
||||
if (
|
||||
not hasattr(arguments, "output_filename")
|
||||
or arguments.output_filename is None
|
||||
):
|
||||
self.output_filename = (
|
||||
f"prowler-output-{identity.username}-{output_file_timestamp}"
|
||||
)
|
||||
else:
|
||||
self.output_filename = arguments.output_filename
|
||||
|
||||
|
||||
class MongoDBAtlasProject(BaseModel):
|
||||
"""MongoDB Atlas project model"""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
org_id: str
|
||||
created: str
|
||||
cluster_count: int
|
||||
project_settings: Optional[dict] = {}
|
||||
|
||||
|
||||
class MongoDBAtlasCluster(BaseModel):
|
||||
"""MongoDB Atlas cluster model"""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
project_id: str
|
||||
mongo_db_version: str
|
||||
cluster_type: str
|
||||
state_name: str
|
||||
encryption_at_rest_provider: Optional[str] = None
|
||||
backup_enabled: bool = False
|
||||
bi_connector: Optional[dict] = {}
|
||||
provider_settings: Optional[dict] = {}
|
||||
replication_specs: Optional[List[dict]] = []
|
||||
|
||||
|
||||
class MongoDBAtlasNetworkAccessEntry(BaseModel):
|
||||
"""MongoDB Atlas network access entry model"""
|
||||
|
||||
cidr_block: Optional[str] = None
|
||||
ip_address: Optional[str] = None
|
||||
aws_security_group: Optional[str] = None
|
||||
comment: Optional[str] = None
|
||||
delete_after_date: Optional[str] = None
|
||||
@@ -0,0 +1,319 @@
|
||||
import os
|
||||
from os import environ
|
||||
|
||||
from colorama import Fore, Style
|
||||
|
||||
from prowler.config.config import (
|
||||
default_config_file_path,
|
||||
get_default_mute_file_path,
|
||||
load_and_validate_config_file,
|
||||
)
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.mutelist.mutelist import Mutelist
|
||||
from prowler.lib.utils.utils import print_boxes
|
||||
from prowler.providers.common.models import Audit_Metadata, Connection
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.mongodbatlas.exceptions.exceptions import (
|
||||
MongoDBAtlasAuthenticationError,
|
||||
MongoDBAtlasCredentialsError,
|
||||
MongoDBAtlasIdentityError,
|
||||
MongoDBAtlasSessionError,
|
||||
)
|
||||
from prowler.providers.mongodbatlas.lib.mutelist.mutelist import MongoDBAtlasMutelist
|
||||
from prowler.providers.mongodbatlas.models import (
|
||||
MongoDBAtlasIdentityInfo,
|
||||
MongoDBAtlasSession,
|
||||
)
|
||||
|
||||
|
||||
class MongodbatlasProvider(Provider):
|
||||
"""
|
||||
MongoDB Atlas Provider class
|
||||
|
||||
This class is responsible for setting up the MongoDB Atlas provider,
|
||||
including the session, identity, audit configuration, and mutelist.
|
||||
"""
|
||||
|
||||
_type: str = "mongodbatlas"
|
||||
_session: MongoDBAtlasSession
|
||||
_identity: MongoDBAtlasIdentityInfo
|
||||
_audit_config: dict
|
||||
_mutelist: Mutelist
|
||||
audit_metadata: Audit_Metadata
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
# Authentication credentials
|
||||
atlas_public_key: str = "",
|
||||
atlas_private_key: str = "",
|
||||
# Provider configuration
|
||||
config_path: str = None,
|
||||
config_content: dict = None,
|
||||
fixer_config: dict = {},
|
||||
mutelist_path: str = None,
|
||||
mutelist_content: dict = None,
|
||||
# Optional filters
|
||||
atlas_organization_id: str = None,
|
||||
atlas_project_id: str = None,
|
||||
):
|
||||
"""
|
||||
MongoDB Atlas Provider constructor
|
||||
|
||||
Args:
|
||||
atlas_public_key: MongoDB Atlas API public key
|
||||
atlas_private_key: MongoDB Atlas API private key
|
||||
config_path: Path to the audit configuration file
|
||||
config_content: Audit configuration content
|
||||
fixer_config: Fixer configuration content
|
||||
mutelist_path: Path to the mutelist file
|
||||
mutelist_content: Mutelist content
|
||||
atlas_organization_id: Organization ID to filter
|
||||
atlas_project_id: Project ID to filter
|
||||
"""
|
||||
logger.info("Instantiating MongoDB Atlas Provider...")
|
||||
|
||||
self._session = MongodbatlasProvider.setup_session(
|
||||
atlas_public_key,
|
||||
atlas_private_key,
|
||||
)
|
||||
|
||||
self._identity = MongodbatlasProvider.setup_identity(self._session)
|
||||
|
||||
# Store filter options
|
||||
self._organization_id = atlas_organization_id
|
||||
self._project_id = atlas_project_id
|
||||
|
||||
# Audit Config
|
||||
if config_content:
|
||||
self._audit_config = config_content
|
||||
else:
|
||||
if not config_path:
|
||||
config_path = default_config_file_path
|
||||
self._audit_config = load_and_validate_config_file(self._type, config_path)
|
||||
|
||||
# Fixer Config
|
||||
self._fixer_config = fixer_config
|
||||
|
||||
# Mutelist
|
||||
if mutelist_content:
|
||||
self._mutelist = MongoDBAtlasMutelist(
|
||||
mutelist_content=mutelist_content,
|
||||
)
|
||||
else:
|
||||
if not mutelist_path:
|
||||
mutelist_path = get_default_mute_file_path(self.type)
|
||||
self._mutelist = MongoDBAtlasMutelist(
|
||||
mutelist_path=mutelist_path,
|
||||
)
|
||||
|
||||
Provider.set_global_provider(self)
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
"""Returns the type of the MongoDB Atlas provider"""
|
||||
return self._type
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
"""Returns the session object for the MongoDB Atlas provider"""
|
||||
return self._session
|
||||
|
||||
@property
|
||||
def identity(self):
|
||||
"""Returns the identity information for the MongoDB Atlas provider"""
|
||||
return self._identity
|
||||
|
||||
@property
|
||||
def audit_config(self):
|
||||
"""Returns the audit configuration for the MongoDB Atlas provider"""
|
||||
return self._audit_config
|
||||
|
||||
@property
|
||||
def fixer_config(self):
|
||||
"""Returns the fixer configuration for the MongoDB Atlas provider"""
|
||||
return self._fixer_config
|
||||
|
||||
@property
|
||||
def mutelist(self) -> MongoDBAtlasMutelist:
|
||||
"""Returns the mutelist for the MongoDB Atlas provider"""
|
||||
return self._mutelist
|
||||
|
||||
@property
|
||||
def organization_id(self):
|
||||
"""Returns the organization ID filter"""
|
||||
return self._organization_id
|
||||
|
||||
@property
|
||||
def project_id(self):
|
||||
"""Returns the project ID filter"""
|
||||
return self._project_id
|
||||
|
||||
@staticmethod
|
||||
def setup_session(
|
||||
atlas_public_key: str = None,
|
||||
atlas_private_key: str = None,
|
||||
) -> MongoDBAtlasSession:
|
||||
"""
|
||||
Setup MongoDB Atlas session with authentication credentials
|
||||
|
||||
Args:
|
||||
atlas_public_key: MongoDB Atlas API public key
|
||||
atlas_private_key: MongoDB Atlas API private key
|
||||
|
||||
Returns:
|
||||
MongoDBAtlasSession: Authenticated session for API requests
|
||||
|
||||
Raises:
|
||||
MongoDBAtlasCredentialsError: If credentials are missing
|
||||
MongoDBAtlasSessionError: If session setup fails
|
||||
"""
|
||||
try:
|
||||
public_key = atlas_public_key
|
||||
private_key = atlas_private_key
|
||||
|
||||
# Check environment variables if not provided
|
||||
if not public_key:
|
||||
public_key = environ.get("ATLAS_PUBLIC_KEY", "")
|
||||
if not private_key:
|
||||
private_key = environ.get("ATLAS_PRIVATE_KEY", "")
|
||||
|
||||
if not public_key or not private_key:
|
||||
raise MongoDBAtlasCredentialsError(
|
||||
file=os.path.basename(__file__),
|
||||
message="MongoDB Atlas API credentials not found. Please provide --atlas-public-key and --atlas-private-key or set ATLAS_PUBLIC_KEY and ATLAS_PRIVATE_KEY environment variables.",
|
||||
)
|
||||
|
||||
session = MongoDBAtlasSession(
|
||||
public_key=public_key,
|
||||
private_key=private_key,
|
||||
)
|
||||
|
||||
return session
|
||||
|
||||
except MongoDBAtlasCredentialsError:
|
||||
raise
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
raise MongoDBAtlasSessionError(
|
||||
original_exception=error,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def setup_identity(session: MongoDBAtlasSession) -> MongoDBAtlasIdentityInfo:
|
||||
"""
|
||||
Setup MongoDB Atlas identity information
|
||||
|
||||
Args:
|
||||
session: MongoDB Atlas session
|
||||
|
||||
Returns:
|
||||
MongoDBAtlasIdentityInfo: Identity information
|
||||
|
||||
Raises:
|
||||
MongoDBAtlasAuthenticationError: If authentication fails
|
||||
MongoDBAtlasIdentityError: If identity setup fails
|
||||
"""
|
||||
try:
|
||||
import requests
|
||||
from requests.auth import HTTPDigestAuth
|
||||
|
||||
# Test authentication by getting organizations
|
||||
auth = HTTPDigestAuth(session.public_key, session.private_key)
|
||||
headers = {
|
||||
"Accept": "application/vnd.atlas.2023-01-01+json",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
response = requests.get(
|
||||
f"{session.base_url}/orgs",
|
||||
auth=auth,
|
||||
headers=headers,
|
||||
timeout=30,
|
||||
)
|
||||
|
||||
if response.status_code == 401:
|
||||
raise MongoDBAtlasAuthenticationError(
|
||||
file=os.path.basename(__file__),
|
||||
message="MongoDB Atlas authentication failed. Please check your API credentials.",
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
response.json()
|
||||
|
||||
# Since we can't get user profile from API, we'll use the API key identifier as user info
|
||||
# The organizations response confirms the API key works
|
||||
identity = MongoDBAtlasIdentityInfo(
|
||||
user_id=session.public_key, # Use public key as identifier
|
||||
username=f"api-key-{session.public_key[:8]}", # Create a username from public key
|
||||
roles=["API_KEY"], # Indicate this is an API key authentication
|
||||
)
|
||||
|
||||
return identity
|
||||
|
||||
except MongoDBAtlasAuthenticationError:
|
||||
raise
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
raise MongoDBAtlasIdentityError(
|
||||
original_exception=error,
|
||||
)
|
||||
|
||||
def print_credentials(self):
|
||||
"""Print the MongoDB Atlas credentials"""
|
||||
report_lines = [
|
||||
f"MongoDB Atlas User ID: {Fore.YELLOW}{self.identity.user_id}{Style.RESET_ALL}",
|
||||
]
|
||||
|
||||
if self.organization_id:
|
||||
report_lines.append(
|
||||
f"Organization ID Filter: {Fore.YELLOW}{self.organization_id}{Style.RESET_ALL}"
|
||||
)
|
||||
|
||||
if self.project_id:
|
||||
report_lines.append(
|
||||
f"Project ID Filter: {Fore.YELLOW}{self.project_id}{Style.RESET_ALL}"
|
||||
)
|
||||
|
||||
report_title = (
|
||||
f"{Style.BRIGHT}Using the MongoDB Atlas credentials below:{Style.RESET_ALL}"
|
||||
)
|
||||
print_boxes(report_lines, report_title)
|
||||
|
||||
@staticmethod
|
||||
def test_connection(
|
||||
atlas_public_key: str = "",
|
||||
atlas_private_key: str = "",
|
||||
raise_on_exception: bool = True,
|
||||
) -> Connection:
|
||||
"""
|
||||
Test connection to MongoDB Atlas
|
||||
|
||||
Args:
|
||||
atlas_public_key: MongoDB Atlas API public key
|
||||
atlas_private_key: MongoDB Atlas API private key
|
||||
raise_on_exception: Whether to raise exceptions
|
||||
|
||||
Returns:
|
||||
Connection: Connection status
|
||||
"""
|
||||
try:
|
||||
session = MongodbatlasProvider.setup_session(
|
||||
atlas_public_key=atlas_public_key,
|
||||
atlas_private_key=atlas_private_key,
|
||||
)
|
||||
|
||||
MongodbatlasProvider.setup_identity(session)
|
||||
|
||||
return Connection(is_connected=True)
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
if raise_on_exception:
|
||||
raise error
|
||||
return Connection(error=error)
|
||||
+34
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"Provider": "mongodbatlas",
|
||||
"CheckID": "clusters_authentication_enabled",
|
||||
"CheckTitle": "Ensure MongoDB Atlas clusters have authentication enabled",
|
||||
"CheckType": [
|
||||
"Authentication"
|
||||
],
|
||||
"ServiceName": "clusters",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:mongodbatlas:cluster:{project_id}:{cluster_name}",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Cluster",
|
||||
"Description": "Ensure MongoDB Atlas clusters have authentication enabled to prevent unauthorized access",
|
||||
"Risk": "Without authentication enabled, MongoDB Atlas clusters may be vulnerable to unauthorized access, potentially exposing sensitive data or allowing malicious actions",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.mongodb.com/docs/atlas/security/config-db-auth/",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable authentication for MongoDB Atlas clusters by setting authEnabled to true in the cluster configuration.",
|
||||
"Url": "https://www.mongodb.com/docs/atlas/security/config-db-auth/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"authentication"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "This check verifies that MongoDB Atlas clusters have authentication enabled (authEnabled=true) to prevent unauthorized access to the database."
|
||||
}
|
||||
+45
@@ -0,0 +1,45 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportMongoDBAtlas
|
||||
from prowler.providers.mongodbatlas.services.clusters.clusters_client import (
|
||||
clusters_client,
|
||||
)
|
||||
|
||||
|
||||
class clusters_authentication_enabled(Check):
|
||||
"""Check if MongoDB Atlas clusters have authentication enabled
|
||||
|
||||
This class verifies that MongoDB Atlas clusters have authentication
|
||||
enabled to prevent unauthorized access to the database.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportMongoDBAtlas]:
|
||||
"""Execute the MongoDB Atlas cluster authentication enabled check
|
||||
|
||||
Iterates over all clusters and checks if they have authentication
|
||||
enabled (authEnabled=true).
|
||||
|
||||
Returns:
|
||||
List[CheckReportMongoDBAtlas]: A list of reports for each cluster
|
||||
"""
|
||||
findings = []
|
||||
|
||||
for cluster in clusters_client.clusters.values():
|
||||
report = CheckReportMongoDBAtlas(metadata=self.metadata(), resource=cluster)
|
||||
|
||||
if cluster.auth_enabled:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Cluster {cluster.name} in project {cluster.project_name} "
|
||||
f"has authentication enabled."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Cluster {cluster.name} in project {cluster.project_name} "
|
||||
f"does not have authentication enabled."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
+34
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"Provider": "mongodbatlas",
|
||||
"CheckID": "clusters_backup_enabled",
|
||||
"CheckTitle": "Ensure MongoDB Atlas clusters have backup enabled",
|
||||
"CheckType": [
|
||||
"Backup"
|
||||
],
|
||||
"ServiceName": "clusters",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:mongodbatlas:cluster:{project_id}:{cluster_name}",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Cluster",
|
||||
"Description": "Ensure MongoDB Atlas clusters have backup enabled to protect against data loss",
|
||||
"Risk": "Without backup enabled, MongoDB Atlas clusters are vulnerable to data loss in case of failures, corruption, or accidental deletion",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable backup for MongoDB Atlas clusters by setting backupEnabled to true in the cluster configuration.",
|
||||
"Url": "https://www.mongodb.com/docs/atlas/backup-restore-cluster/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"backup"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "This check verifies that MongoDB Atlas clusters have backup enabled (backupEnabled=true) to ensure data protection and recovery capabilities."
|
||||
}
|
||||
+45
@@ -0,0 +1,45 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportMongoDBAtlas
|
||||
from prowler.providers.mongodbatlas.services.clusters.clusters_client import (
|
||||
clusters_client,
|
||||
)
|
||||
|
||||
|
||||
class clusters_backup_enabled(Check):
|
||||
"""Check if MongoDB Atlas clusters have backup enabled
|
||||
|
||||
This class verifies that MongoDB Atlas clusters have backup enabled
|
||||
to protect against data loss.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportMongoDBAtlas]:
|
||||
"""Execute the MongoDB Atlas cluster backup enabled check
|
||||
|
||||
Iterates over all clusters and checks if they have backup
|
||||
enabled (backupEnabled=true).
|
||||
|
||||
Returns:
|
||||
List[CheckReportMongoDBAtlas]: A list of reports for each cluster
|
||||
"""
|
||||
findings = []
|
||||
|
||||
for cluster in clusters_client.clusters.values():
|
||||
report = CheckReportMongoDBAtlas(metadata=self.metadata(), resource=cluster)
|
||||
|
||||
if cluster.backup_enabled:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Cluster {cluster.name} in project {cluster.project_name} "
|
||||
f"has backup enabled."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Cluster {cluster.name} in project {cluster.project_name} "
|
||||
f"does not have backup enabled."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,4 @@
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.mongodbatlas.services.clusters.clusters_service import Clusters
|
||||
|
||||
clusters_client = Clusters(Provider.get_global_provider())
|
||||
+32
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"Provider": "mongodbatlas",
|
||||
"CheckID": "clusters_encryption_at_rest_enabled",
|
||||
"CheckTitle": "Ensure MongoDB Atlas clusters have encryption at rest enabled",
|
||||
"CheckType": [],
|
||||
"ServiceName": "clusters",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "mongodbatlas:cluster-id:cluster-name",
|
||||
"Severity": "high",
|
||||
"ResourceType": "MongoDBAtlasCluster",
|
||||
"Description": "Ensure that MongoDB Atlas clusters have encryption at rest enabled to protect data stored on disk. Encryption at rest provides an additional layer of security by encrypting data before it's written to storage, protecting against unauthorized access to the underlying storage media.",
|
||||
"Risk": "If encryption at rest is not enabled on MongoDB Atlas clusters, sensitive data stored in the database is vulnerable to unauthorized access if the underlying storage is compromised. This could lead to data breaches, compliance violations, and exposure of sensitive information.",
|
||||
"RelatedUrl": "https://www.mongodb.com/docs/atlas/security-kms-encryption/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable encryption at rest for your MongoDB Atlas clusters. This can be configured when creating a new cluster or by modifying an existing cluster's settings. Choose an appropriate encryption provider (AWS KMS, Azure Key Vault, or Google Cloud KMS) based on your cloud provider and security requirements.",
|
||||
"Url": "https://www.mongodb.com/docs/atlas/security-kms-encryption/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"encryption"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "This check verifies that MongoDB Atlas clusters have encryption at rest enabled through either the MongoDB Atlas encryption provider or cloud provider-specific encryption (such as AWS EBS encryption). Paused clusters are skipped as they are not actively serving data."
|
||||
}
|
||||
+71
@@ -0,0 +1,71 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportMongoDBAtlas
|
||||
from prowler.providers.mongodbatlas.config import ATLAS_ENCRYPTION_PROVIDERS
|
||||
from prowler.providers.mongodbatlas.services.clusters.clusters_client import (
|
||||
clusters_client,
|
||||
)
|
||||
|
||||
|
||||
class clusters_encryption_at_rest_enabled(Check):
|
||||
"""Check if MongoDB Atlas clusters have encryption at rest enabled
|
||||
|
||||
This class verifies that MongoDB Atlas clusters have encryption at rest
|
||||
enabled to protect data stored on disk.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportMongoDBAtlas]:
|
||||
"""Execute the MongoDB Atlas cluster encryption at rest check
|
||||
|
||||
Iterates over all clusters and checks if they have encryption at rest
|
||||
enabled with a supported encryption provider.
|
||||
|
||||
Returns:
|
||||
List[CheckReportMongoDBAtlas]: A list of reports for each cluster
|
||||
"""
|
||||
findings = []
|
||||
|
||||
for cluster in clusters_client.clusters.values():
|
||||
report = CheckReportMongoDBAtlas(metadata=self.metadata(), resource=cluster)
|
||||
|
||||
if cluster.encryption_at_rest_provider:
|
||||
if cluster.encryption_at_rest_provider in ATLAS_ENCRYPTION_PROVIDERS:
|
||||
if cluster.encryption_at_rest_provider == "NONE":
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Cluster {cluster.name} in project {cluster.project_name} "
|
||||
f"has encryption at rest explicitly disabled."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Cluster {cluster.name} in project {cluster.project_name} "
|
||||
f"has encryption at rest enabled with provider: {cluster.encryption_at_rest_provider}."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Cluster {cluster.name} in project {cluster.project_name} "
|
||||
f"has an unsupported encryption provider: {cluster.encryption_at_rest_provider}."
|
||||
)
|
||||
else:
|
||||
# Check provider settings for EBS encryption (AWS specific)
|
||||
provider_settings = cluster.provider_settings or {}
|
||||
encrypt_ebs_volume = provider_settings.get("encryptEBSVolume", False)
|
||||
|
||||
if encrypt_ebs_volume:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Cluster {cluster.name} in project {cluster.project_name} "
|
||||
f"has EBS volume encryption enabled."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Cluster {cluster.name} in project {cluster.project_name} "
|
||||
f"does not have encryption at rest enabled."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,202 @@
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.mongodbatlas.lib.service.service import MongoDBAtlasService
|
||||
|
||||
|
||||
class Cluster(BaseModel):
|
||||
"""MongoDB Atlas Cluster model"""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
project_id: str
|
||||
project_name: str
|
||||
mongo_db_version: str
|
||||
cluster_type: str
|
||||
state_name: str
|
||||
encryption_at_rest_provider: Optional[str] = None
|
||||
backup_enabled: bool = False
|
||||
auth_enabled: bool = False
|
||||
ssl_enabled: bool = False
|
||||
provider_settings: Optional[dict] = {}
|
||||
replication_specs: Optional[List[dict]] = []
|
||||
disk_size_gb: Optional[float] = None
|
||||
num_shards: Optional[int] = None
|
||||
replication_factor: Optional[int] = None
|
||||
auto_scaling: Optional[dict] = {}
|
||||
mongo_db_major_version: Optional[str] = None
|
||||
paused: bool = False
|
||||
pit_enabled: bool = False
|
||||
connection_strings: Optional[dict] = {}
|
||||
tags: Optional[List[dict]] = []
|
||||
|
||||
|
||||
class Clusters(MongoDBAtlasService):
|
||||
"""MongoDB Atlas Clusters service"""
|
||||
|
||||
def __init__(self, provider):
|
||||
super().__init__(__class__.__name__, provider)
|
||||
self.clusters = self._list_clusters()
|
||||
|
||||
def _list_clusters(self) -> Dict[str, Cluster]:
|
||||
"""
|
||||
List all MongoDB Atlas clusters across all projects
|
||||
|
||||
Returns:
|
||||
Dict[str, Cluster]: Dictionary of clusters indexed by cluster name
|
||||
"""
|
||||
logger.info("Clusters - Listing MongoDB Atlas clusters...")
|
||||
clusters = {}
|
||||
|
||||
try:
|
||||
from prowler.providers.mongodbatlas.services.projects.projects_client import (
|
||||
projects_client,
|
||||
)
|
||||
|
||||
for project in projects_client.projects.values():
|
||||
project_clusters = self._get_project_clusters(project.id, project.name)
|
||||
clusters.update(project_clusters)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
logger.info(f"Found {len(clusters)} MongoDB Atlas clusters")
|
||||
return clusters
|
||||
|
||||
def _get_project_clusters(
|
||||
self, project_id: str, project_name: str
|
||||
) -> Dict[str, Cluster]:
|
||||
"""
|
||||
Get all clusters for a specific project
|
||||
|
||||
Args:
|
||||
project_id: Project ID
|
||||
project_name: Project name
|
||||
|
||||
Returns:
|
||||
Dict[str, Cluster]: Dictionary of clusters in the project
|
||||
"""
|
||||
project_clusters = {}
|
||||
|
||||
try:
|
||||
clusters_data = self._paginate_request(f"/groups/{project_id}/clusters")
|
||||
|
||||
for cluster_data in clusters_data:
|
||||
cluster = self._process_cluster(cluster_data, project_id, project_name)
|
||||
# Use a unique key combining project_id and cluster_name
|
||||
cluster_key = f"{project_id}:{cluster.name}"
|
||||
project_clusters[cluster_key] = cluster
|
||||
|
||||
except Exception as error:
|
||||
logger.error(f"Error getting clusters for project {project_id}: {error}")
|
||||
|
||||
return project_clusters
|
||||
|
||||
def _process_cluster(
|
||||
self, cluster_data: dict, project_id: str, project_name: str
|
||||
) -> Cluster:
|
||||
"""
|
||||
Process a single cluster and fetch additional details
|
||||
|
||||
Args:
|
||||
cluster_data: Raw cluster data from API
|
||||
project_id: Project ID
|
||||
project_name: Project name
|
||||
|
||||
Returns:
|
||||
Cluster: Processed cluster object
|
||||
"""
|
||||
cluster_name = cluster_data.get("name", "")
|
||||
|
||||
encryption_provider = self._get_encryption_at_rest_provider(cluster_data)
|
||||
|
||||
backup_enabled = self._get_backup_enabled(cluster_data)
|
||||
|
||||
provider_settings = cluster_data.get("providerSettings", {})
|
||||
|
||||
replication_specs = cluster_data.get("replicationSpecs", [])
|
||||
|
||||
auto_scaling = cluster_data.get("autoScaling", {})
|
||||
|
||||
connection_strings = cluster_data.get("connectionStrings", {})
|
||||
|
||||
tags = cluster_data.get("tags", [])
|
||||
|
||||
return Cluster(
|
||||
id=cluster_data.get("id", ""),
|
||||
name=cluster_name,
|
||||
project_id=project_id,
|
||||
project_name=project_name,
|
||||
mongo_db_version=cluster_data.get("mongoDBVersion", ""),
|
||||
cluster_type=cluster_data.get("clusterType", ""),
|
||||
state_name=cluster_data.get("stateName", ""),
|
||||
encryption_at_rest_provider=encryption_provider,
|
||||
backup_enabled=backup_enabled,
|
||||
auth_enabled=cluster_data.get("authEnabled", False),
|
||||
ssl_enabled=cluster_data.get("sslEnabled", False),
|
||||
provider_settings=provider_settings,
|
||||
replication_specs=replication_specs,
|
||||
disk_size_gb=cluster_data.get("diskSizeGB"),
|
||||
num_shards=cluster_data.get("numShards"),
|
||||
replication_factor=cluster_data.get("replicationFactor"),
|
||||
auto_scaling=auto_scaling,
|
||||
mongo_db_major_version=cluster_data.get("mongoDBMajorVersion"),
|
||||
paused=cluster_data.get("paused", False),
|
||||
pit_enabled=cluster_data.get("pitEnabled", False),
|
||||
connection_strings=connection_strings,
|
||||
tags=tags,
|
||||
)
|
||||
|
||||
def _get_encryption_at_rest_provider(self, cluster_data: dict) -> Optional[str]:
|
||||
"""
|
||||
Get encryption at rest provider from cluster data
|
||||
|
||||
Args:
|
||||
cluster_data: Cluster data from API
|
||||
|
||||
Returns:
|
||||
Optional[str]: Encryption provider or None
|
||||
"""
|
||||
try:
|
||||
encryption_at_rest = cluster_data.get("encryptionAtRestProvider")
|
||||
|
||||
if encryption_at_rest:
|
||||
return encryption_at_rest
|
||||
|
||||
provider_settings = cluster_data.get("providerSettings", {})
|
||||
encrypt_ebs_volume = provider_settings.get("encryptEBSVolume", False)
|
||||
|
||||
if encrypt_ebs_volume:
|
||||
return provider_settings.get("providerName", "AWS")
|
||||
|
||||
return None
|
||||
|
||||
except Exception as error:
|
||||
logger.error(f"Error getting encryption provider for cluster: {error}")
|
||||
return None
|
||||
|
||||
def _get_backup_enabled(self, cluster_data: dict) -> bool:
|
||||
"""
|
||||
Get backup enabled status from cluster data
|
||||
|
||||
Args:
|
||||
cluster_data: Cluster data from API
|
||||
|
||||
Returns:
|
||||
bool: True if backup is enabled, False otherwise
|
||||
"""
|
||||
try:
|
||||
backup_enabled = cluster_data.get("backupEnabled", False)
|
||||
|
||||
# Also check for point-in-time enabled as an indicator of backup
|
||||
pit_enabled = cluster_data.get("pitEnabled", False)
|
||||
|
||||
return backup_enabled or pit_enabled
|
||||
|
||||
except Exception as error:
|
||||
logger.error(f"Error getting backup status for cluster: {error}")
|
||||
return False
|
||||
+34
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"Provider": "mongodbatlas",
|
||||
"CheckID": "clusters_tls_enabled",
|
||||
"CheckTitle": "Ensure MongoDB Atlas clusters have TLS authentication required",
|
||||
"CheckType": [
|
||||
"Encryption"
|
||||
],
|
||||
"ServiceName": "clusters",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:mongodbatlas:cluster:{project_id}:{cluster_name}",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Cluster",
|
||||
"Description": "Ensure MongoDB Atlas clusters have TLS authentication required to secure data in transit",
|
||||
"Risk": "Without TLS enabled, MongoDB Atlas clusters are vulnerable to man-in-the-middle attacks and data interception during transmission",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable TLS for MongoDB Atlas clusters by setting sslEnabled to true in the cluster configuration.",
|
||||
"Url": "https://www.mongodb.com/docs/atlas/setup-cluster-security/#encryption-in-transit"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"encryption"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "This check verifies that MongoDB Atlas clusters have TLS enabled (sslEnabled=true) to ensure secure data transmission."
|
||||
}
|
||||
+45
@@ -0,0 +1,45 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportMongoDBAtlas
|
||||
from prowler.providers.mongodbatlas.services.clusters.clusters_client import (
|
||||
clusters_client,
|
||||
)
|
||||
|
||||
|
||||
class clusters_tls_enabled(Check):
|
||||
"""Check if MongoDB Atlas clusters have TLS authentication required
|
||||
|
||||
This class verifies that MongoDB Atlas clusters have TLS authentication
|
||||
required to secure data in transit.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportMongoDBAtlas]:
|
||||
"""Execute the MongoDB Atlas cluster TLS enabled check
|
||||
|
||||
Iterates over all clusters and checks if they have TLS
|
||||
enabled (sslEnabled=true).
|
||||
|
||||
Returns:
|
||||
List[CheckReportMongoDBAtlas]: A list of reports for each cluster
|
||||
"""
|
||||
findings = []
|
||||
|
||||
for cluster in clusters_client.clusters.values():
|
||||
report = CheckReportMongoDBAtlas(metadata=self.metadata(), resource=cluster)
|
||||
|
||||
if cluster.ssl_enabled:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Cluster {cluster.name} in project {cluster.project_name} "
|
||||
f"has TLS authentication enabled."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Cluster {cluster.name} in project {cluster.project_name} "
|
||||
f"does not have TLS authentication enabled."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
+34
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"Provider": "mongodbatlas",
|
||||
"CheckID": "organizations_api_access_list_required",
|
||||
"CheckTitle": "Ensure organization requires API access list",
|
||||
"CheckType": [
|
||||
"Access Control"
|
||||
],
|
||||
"ServiceName": "organizations",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:mongodbatlas:organization:{org_id}",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Organization",
|
||||
"Description": "Ensure organization requires API operations to originate from an IP Address added to the API access list",
|
||||
"Risk": "Without API access list requirement, API operations can originate from any IP address, increasing the risk of unauthorized access",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable API access list requirement for the organization by setting apiAccessListRequired to true in the organization settings.",
|
||||
"Url": "https://www.mongodb.com/docs/atlas/security/ip-access-list/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"iam"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "This check verifies that the organization requires API operations to originate from an IP Address added to the API access list (apiAccessListRequired=true)."
|
||||
}
|
||||
+51
@@ -0,0 +1,51 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportMongoDBAtlas
|
||||
from prowler.providers.mongodbatlas.services.organizations.organizations_client import (
|
||||
organizations_client,
|
||||
)
|
||||
|
||||
|
||||
class organizations_api_access_list_required(Check):
|
||||
"""Check if organization requires API access list
|
||||
|
||||
This class verifies that MongoDB Atlas organizations require API operations
|
||||
to originate from an IP Address added to the API access list.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportMongoDBAtlas]:
|
||||
"""Execute the MongoDB Atlas organization API access list required check
|
||||
|
||||
Iterates over all organizations and checks if they require API operations
|
||||
to originate from an IP Address added to the API access list.
|
||||
|
||||
Returns:
|
||||
List[CheckReportMongoDBAtlas]: A list of reports for each organization
|
||||
"""
|
||||
findings = []
|
||||
|
||||
for organization in organizations_client.organizations.values():
|
||||
report = CheckReportMongoDBAtlas(
|
||||
metadata=self.metadata(), resource=organization
|
||||
)
|
||||
|
||||
api_access_list_required = organization.settings.get(
|
||||
"apiAccessListRequired", False
|
||||
)
|
||||
|
||||
if api_access_list_required:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Organization {organization.name} requires API operations "
|
||||
f"to originate from an IP Address added to the API access list."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Organization {organization.name} does not require API operations "
|
||||
f"to originate from an IP Address added to the API access list."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,6 @@
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.mongodbatlas.services.organizations.organizations_service import (
|
||||
Organizations,
|
||||
)
|
||||
|
||||
organizations_client = Organizations(Provider.get_global_provider())
|
||||
+34
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"Provider": "mongodbatlas",
|
||||
"CheckID": "organizations_mfa_required",
|
||||
"CheckTitle": "Ensure organization requires MFA",
|
||||
"CheckType": [
|
||||
"Authentication"
|
||||
],
|
||||
"ServiceName": "organizations",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:mongodbatlas:organization:{org_id}",
|
||||
"Severity": "high",
|
||||
"ResourceType": "Organization",
|
||||
"Description": "Ensure organization requires users to set up Multi-Factor Authentication (MFA) before accessing the organization",
|
||||
"Risk": "Without MFA requirement, user accounts are vulnerable to credential-based attacks and unauthorized access",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "https://www.mongodb.com/docs/atlas/security-multi-factor-authentication/",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable MFA requirement for the organization by setting multiFactorAuthRequired to true in the organization settings.",
|
||||
"Url": "https://www.mongodb.com/docs/atlas/security-multi-factor-authentication/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"iam"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "This check verifies that the organization requires users to set up Multi-Factor Authentication (MFA) before accessing the organization (multiFactorAuthRequired=true)."
|
||||
}
|
||||
+49
@@ -0,0 +1,49 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportMongoDBAtlas
|
||||
from prowler.providers.mongodbatlas.services.organizations.organizations_client import (
|
||||
organizations_client,
|
||||
)
|
||||
|
||||
|
||||
class organizations_mfa_required(Check):
|
||||
"""Check if organization requires MFA
|
||||
|
||||
This class verifies that MongoDB Atlas organizations require users
|
||||
to set up Multi-Factor Authentication (MFA) before accessing the organization.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportMongoDBAtlas]:
|
||||
"""Execute the MongoDB Atlas organization MFA required check
|
||||
|
||||
Iterates over all organizations and checks if they require users
|
||||
to set up Multi-Factor Authentication (MFA) before accessing the organization.
|
||||
|
||||
Returns:
|
||||
List[CheckReportMongoDBAtlas]: A list of reports for each organization
|
||||
"""
|
||||
findings = []
|
||||
|
||||
for organization in organizations_client.organizations.values():
|
||||
report = CheckReportMongoDBAtlas(
|
||||
metadata=self.metadata(), resource=organization
|
||||
)
|
||||
|
||||
mfa_required = organization.settings.get("multiFactorAuthRequired", False)
|
||||
|
||||
if mfa_required:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Organization {organization.name} requires users to set up "
|
||||
f"Multi-Factor Authentication (MFA) before accessing the organization."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Organization {organization.name} does not require users to set up "
|
||||
f"Multi-Factor Authentication (MFA) before accessing the organization."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
+34
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"Provider": "mongodbatlas",
|
||||
"CheckID": "organizations_security_contact_defined",
|
||||
"CheckTitle": "Ensure organization has a Security Contact defined",
|
||||
"CheckType": [
|
||||
"Security Contact"
|
||||
],
|
||||
"ServiceName": "organizations",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:mongodbatlas:organization:{org_id}",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Organization",
|
||||
"Description": "Ensure organization has a security contact defined to receive security-related notifications",
|
||||
"Risk": "Without a security contact, the organization may not receive important security notifications and alerts",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Set a security contact email address in the organization settings to receive security-related notifications.",
|
||||
"Url": "https://www.mongodb.com/docs/atlas/tutorial/manage-organization-settings/#add-security-contact-information"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"security-contacts"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "This check verifies that the organization has a security contact defined (securityContact field) to receive security-related notifications."
|
||||
}
|
||||
+49
@@ -0,0 +1,49 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportMongoDBAtlas
|
||||
from prowler.providers.mongodbatlas.services.organizations.organizations_client import (
|
||||
organizations_client,
|
||||
)
|
||||
|
||||
|
||||
class organizations_security_contact_defined(Check):
|
||||
"""Check if organization has a Security Contact defined
|
||||
|
||||
This class verifies that MongoDB Atlas organizations have a security contact
|
||||
defined to receive security-related notifications.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportMongoDBAtlas]:
|
||||
"""Execute the MongoDB Atlas organization security contact defined check
|
||||
|
||||
Iterates over all organizations and checks if they have a security contact
|
||||
defined to receive security-related notifications.
|
||||
|
||||
Returns:
|
||||
List[CheckReportMongoDBAtlas]: A list of reports for each organization
|
||||
"""
|
||||
findings = []
|
||||
|
||||
for organization in organizations_client.organizations.values():
|
||||
report = CheckReportMongoDBAtlas(
|
||||
metadata=self.metadata(), resource=organization
|
||||
)
|
||||
|
||||
security_contact = organization.settings.get("securityContact")
|
||||
|
||||
if security_contact:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Organization {organization.name} has a security contact defined: "
|
||||
f"{security_contact}"
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Organization {organization.name} does not have a security contact "
|
||||
f"defined to receive security-related notifications."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,94 @@
|
||||
from typing import Dict, Optional
|
||||
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.mongodbatlas.lib.service.service import MongoDBAtlasService
|
||||
|
||||
|
||||
class Organization(BaseModel):
|
||||
"""MongoDB Atlas Organization model"""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
settings: Optional[dict] = {}
|
||||
|
||||
|
||||
class Organizations(MongoDBAtlasService):
|
||||
"""MongoDB Atlas Organizations service"""
|
||||
|
||||
def __init__(self, provider):
|
||||
super().__init__(__class__.__name__, provider)
|
||||
self.organizations = self._list_organizations()
|
||||
|
||||
def _list_organizations(self) -> Dict[str, Organization]:
|
||||
"""
|
||||
List all MongoDB Atlas organizations
|
||||
|
||||
Returns:
|
||||
Dict[str, Organization]: Dictionary of organizations indexed by organization ID
|
||||
"""
|
||||
logger.info("Organizations - Listing MongoDB Atlas organizations...")
|
||||
organizations = {}
|
||||
|
||||
try:
|
||||
# If organization_id filter is set, only get that organization
|
||||
if self.provider.organization_id:
|
||||
org_data = self._make_request(
|
||||
"GET", f"/orgs/{self.provider.organization_id}"
|
||||
)
|
||||
organizations[org_data["id"]] = self._process_organization(org_data)
|
||||
else:
|
||||
# Get all organizations with pagination
|
||||
all_orgs = self._paginate_request("/orgs")
|
||||
|
||||
for org_data in all_orgs:
|
||||
organizations[org_data["id"]] = self._process_organization(org_data)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
logger.info(f"Found {len(organizations)} MongoDB Atlas organizations")
|
||||
return organizations
|
||||
|
||||
def _process_organization(self, org_data: dict) -> Organization:
|
||||
"""
|
||||
Process a single organization and fetch additional details
|
||||
|
||||
Args:
|
||||
org_data: Raw organization data from API
|
||||
|
||||
Returns:
|
||||
Organization: Processed organization object
|
||||
"""
|
||||
org_id = org_data["id"]
|
||||
|
||||
# Get organization settings
|
||||
org_settings = self._get_organization_settings(org_id)
|
||||
|
||||
return Organization(
|
||||
id=org_id,
|
||||
name=org_data.get("name", ""),
|
||||
settings=org_settings,
|
||||
)
|
||||
|
||||
def _get_organization_settings(self, org_id: str) -> dict:
|
||||
"""
|
||||
Get organization settings
|
||||
|
||||
Args:
|
||||
org_id: Organization ID
|
||||
|
||||
Returns:
|
||||
dict: Organization settings
|
||||
"""
|
||||
try:
|
||||
settings = self._make_request("GET", f"/orgs/{org_id}/settings")
|
||||
return settings
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"Error getting organization settings for organization {org_id}: {error}"
|
||||
)
|
||||
return {}
|
||||
+34
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"Provider": "mongodbatlas",
|
||||
"CheckID": "organizations_service_account_secrets_expiration",
|
||||
"CheckTitle": "Ensure organization has maximum period expiration for Admin API Service Account Secrets",
|
||||
"CheckType": [
|
||||
"Secrets Management"
|
||||
],
|
||||
"ServiceName": "organizations",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:mongodbatlas:organization:{org_id}",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Organization",
|
||||
"Description": "Ensure organization has a maximum period before expiry for new Atlas Admin API Service Account secrets",
|
||||
"Risk": "Without proper expiration limits, service account secrets may remain valid for extended periods, increasing security risks",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Set maxServiceAccountSecretValidityInHours to 8 hours or less in the organization settings to ensure service account secrets expire regularly.",
|
||||
"Url": "https://www.mongodb.com/docs/api/doc/atlas-admin-api-v2/2025-03-12/operation/operation-getorganizationsettings#operation-getorganizationsettings-200-body-application-vnd-atlas-2023-01-01-json-maxserviceaccountsecretvalidityinhours"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"secrets-management"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "This check verifies that the organization has a maximum period expiration for Admin API Service Account secrets set to 8 hours or less (configurable)."
|
||||
}
|
||||
+64
@@ -0,0 +1,64 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportMongoDBAtlas
|
||||
from prowler.providers.mongodbatlas.services.organizations.organizations_client import (
|
||||
organizations_client,
|
||||
)
|
||||
|
||||
|
||||
class organizations_service_account_secrets_expiration(Check):
|
||||
"""Check if organization has maximum period expiration for Admin API Service Account Secrets
|
||||
|
||||
This class verifies that MongoDB Atlas organizations have a maximum period
|
||||
before expiry for new Atlas Admin API Service Account secrets.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportMongoDBAtlas]:
|
||||
"""Execute the MongoDB Atlas organization service account secrets expiration check
|
||||
|
||||
Iterates over all organizations and checks if they have a maximum period
|
||||
expiration for Admin API Service Account secrets set to 8 hours or less.
|
||||
|
||||
Returns:
|
||||
List[CheckReportMongoDBAtlas]: A list of reports for each organization
|
||||
"""
|
||||
findings = []
|
||||
|
||||
# Get configurable threshold from audit config, default to 8 hours
|
||||
max_hours_threshold = organizations_client.audit_config.get(
|
||||
"max_service_account_secret_validity_hours", 8
|
||||
)
|
||||
|
||||
for organization in organizations_client.organizations.values():
|
||||
report = CheckReportMongoDBAtlas(
|
||||
metadata=self.metadata(), resource=organization
|
||||
)
|
||||
|
||||
max_validity_hours = organization.settings.get(
|
||||
"maxServiceAccountSecretValidityInHours"
|
||||
)
|
||||
|
||||
if max_validity_hours is None:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Organization {organization.name} does not have a maximum period "
|
||||
f"expiration configured for Admin API Service Account secrets."
|
||||
)
|
||||
elif max_validity_hours <= max_hours_threshold:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Organization {organization.name} has a maximum period expiration "
|
||||
f"of {max_validity_hours} hours for Admin API Service Account secrets, "
|
||||
f"which is within the recommended threshold of {max_hours_threshold} hours."
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Organization {organization.name} has a maximum period expiration "
|
||||
f"of {max_validity_hours} hours for Admin API Service Account secrets, "
|
||||
f"which exceeds the recommended threshold of {max_hours_threshold} hours."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
+34
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"Provider": "mongodbatlas",
|
||||
"CheckID": "projects_auditing_enabled",
|
||||
"CheckTitle": "Ensure database auditing is enabled",
|
||||
"CheckType": [
|
||||
"Auditing"
|
||||
],
|
||||
"ServiceName": "projects",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "arn:mongodbatlas:project:{project_id}",
|
||||
"Severity": "medium",
|
||||
"ResourceType": "Project",
|
||||
"Description": "Ensure database auditing is enabled to track database operations and security events",
|
||||
"Risk": "Without auditing enabled, security events and database operations are not logged, making it difficult to detect unauthorized access or troubleshoot issues",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Enable database auditing for the MongoDB Atlas project by configuring audit filters and destinations.",
|
||||
"Url": "https://www.mongodb.com/docs/atlas/database-auditing/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"logging"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "This check verifies that database auditing is enabled by checking the audit configuration for the project."
|
||||
}
|
||||
+53
@@ -0,0 +1,53 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportMongoDBAtlas
|
||||
from prowler.providers.mongodbatlas.services.projects.projects_client import (
|
||||
projects_client,
|
||||
)
|
||||
|
||||
|
||||
class projects_auditing_enabled(Check):
|
||||
"""Check if database auditing is enabled for MongoDB Atlas projects
|
||||
|
||||
This class verifies that MongoDB Atlas projects have database auditing
|
||||
enabled to track database operations and security events.
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportMongoDBAtlas]:
|
||||
"""Execute the MongoDB Atlas project auditing enabled check
|
||||
|
||||
Iterates over all projects and checks if they have database auditing
|
||||
enabled by examining the audit configuration.
|
||||
|
||||
Returns:
|
||||
List[CheckReportMongoDBAtlas]: A list of reports for each project
|
||||
"""
|
||||
findings = []
|
||||
|
||||
for project in projects_client.projects.values():
|
||||
report = CheckReportMongoDBAtlas(metadata=self.metadata(), resource=project)
|
||||
|
||||
if not project.audit_config:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Project {project.name} does not have audit configuration available."
|
||||
else:
|
||||
# Check if audit configuration is enabled
|
||||
enabled = project.audit_config.get("enabled", False)
|
||||
audit_filter = project.audit_config.get("auditFilter")
|
||||
|
||||
if enabled:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} has database auditing enabled."
|
||||
)
|
||||
if audit_filter:
|
||||
report.status_extended += (
|
||||
f" Audit filter configured: {audit_filter}"
|
||||
)
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Project {project.name} does not have database auditing enabled."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,4 @@
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.mongodbatlas.services.projects.projects_service import Projects
|
||||
|
||||
projects_client = Projects(Provider.get_global_provider())
|
||||
+32
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"Provider": "mongodbatlas",
|
||||
"CheckID": "projects_network_access_list_exposed_to_internet",
|
||||
"CheckTitle": "Ensure MongoDB Atlas project network access list is not exposed to the internet",
|
||||
"CheckType": [],
|
||||
"ServiceName": "projects",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "mongodbatlas:project-id:project-name",
|
||||
"Severity": "high",
|
||||
"ResourceType": "MongoDBAtlasProject",
|
||||
"Description": "Ensure that MongoDB Atlas projects have properly configured network access lists that don't allow unrestricted access from anywhere on the internet. Network access lists should be configured to allow access only from specific IP addresses, CIDR blocks, or AWS security groups to minimize the attack surface.",
|
||||
"Risk": "If a MongoDB Atlas project has network access entries that allow unrestricted access (0.0.0.0/0 or ::/0), it exposes the database to potential attacks from anywhere on the internet. This significantly increases the risk of unauthorized access, data breaches, and malicious activities.",
|
||||
"RelatedUrl": "https://docs.atlas.mongodb.com/security/ip-access-list/",
|
||||
"Remediation": {
|
||||
"Code": {
|
||||
"CLI": "",
|
||||
"NativeIaC": "",
|
||||
"Other": "",
|
||||
"Terraform": ""
|
||||
},
|
||||
"Recommendation": {
|
||||
"Text": "Configure network access lists to allow access only from specific IP addresses, CIDR blocks, or AWS security groups. Remove any entries that allow unrestricted access (0.0.0.0/0 or ::/0) and replace them with more restrictive rules based on your application's requirements.",
|
||||
"Url": "https://docs.atlas.mongodb.com/security/ip-access-list/"
|
||||
}
|
||||
},
|
||||
"Categories": [
|
||||
"network-security"
|
||||
],
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "This check verifies that MongoDB Atlas projects don't have network access entries that allow unrestricted access from the internet. Projects without any network access entries are also flagged as they may default to allowing unrestricted access."
|
||||
}
|
||||
+61
@@ -0,0 +1,61 @@
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.models import Check, CheckReportMongoDBAtlas
|
||||
from prowler.providers.mongodbatlas.services.projects.projects_client import (
|
||||
projects_client,
|
||||
)
|
||||
|
||||
|
||||
class projects_network_access_list_exposed_to_internet(Check):
|
||||
"""Check if MongoDB Atlas project network access list is not open to the world
|
||||
|
||||
This class verifies that MongoDB Atlas projects don't have network access
|
||||
entries that allow unrestricted access from the internet (0.0.0.0/0 or ::/0).
|
||||
"""
|
||||
|
||||
def execute(self) -> List[CheckReportMongoDBAtlas]:
|
||||
"""Execute the MongoDB Atlas project network access list check
|
||||
|
||||
Iterates over all projects and checks if their network access lists
|
||||
contain entries that allow unrestricted access from anywhere.
|
||||
|
||||
Returns:
|
||||
List[CheckReportMongoDBAtlas]: A list of reports for each project
|
||||
"""
|
||||
findings = []
|
||||
|
||||
for project in projects_client.projects.values():
|
||||
report = CheckReportMongoDBAtlas(metadata=self.metadata(), resource=project)
|
||||
|
||||
if not project.network_access_entries:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} has no network access list entries configured, "
|
||||
f"which may allow unrestricted access."
|
||||
)
|
||||
else:
|
||||
open_entries = []
|
||||
|
||||
for entry in project.network_access_entries:
|
||||
if entry.cidr_block and entry.cidr_block in ["0.0.0.0/0", "::/0"]:
|
||||
open_entries.append(f"CIDR: {entry.cidr_block}")
|
||||
|
||||
if entry.ip_address and entry.ip_address in ["0.0.0.0", "::"]:
|
||||
open_entries.append(f"IP: {entry.ip_address}")
|
||||
|
||||
if open_entries:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} has network access entries open to the world: "
|
||||
f"{', '.join(open_entries)}. This allows unrestricted access from anywhere on the internet."
|
||||
)
|
||||
else:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Project {project.name} has properly configured network access list "
|
||||
f"with {len(project.network_access_entries)} restricted entries."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
@@ -0,0 +1,191 @@
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.mongodbatlas.lib.service.service import MongoDBAtlasService
|
||||
from prowler.providers.mongodbatlas.models import MongoDBAtlasNetworkAccessEntry
|
||||
|
||||
|
||||
class Project(BaseModel):
|
||||
"""MongoDB Atlas Project model"""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
org_id: str
|
||||
created: str
|
||||
cluster_count: int
|
||||
network_access_entries: List[MongoDBAtlasNetworkAccessEntry] = []
|
||||
project_settings: Optional[dict] = {}
|
||||
audit_config: Optional[dict] = {}
|
||||
|
||||
|
||||
class Projects(MongoDBAtlasService):
|
||||
"""MongoDB Atlas Projects service"""
|
||||
|
||||
def __init__(self, provider):
|
||||
super().__init__(__class__.__name__, provider)
|
||||
self.projects = self._list_projects()
|
||||
|
||||
def _list_projects(self) -> Dict[str, Project]:
|
||||
"""
|
||||
List all MongoDB Atlas projects
|
||||
|
||||
Returns:
|
||||
Dict[str, Project]: Dictionary of projects indexed by project ID
|
||||
"""
|
||||
logger.info("Projects - Listing MongoDB Atlas projects...")
|
||||
projects = {}
|
||||
|
||||
try:
|
||||
# If project_id filter is set, only get that project
|
||||
if self.provider.project_id:
|
||||
project_data = self._make_request(
|
||||
"GET", f"/groups/{self.provider.project_id}"
|
||||
)
|
||||
projects[project_data["id"]] = self._process_project(project_data)
|
||||
else:
|
||||
# Get all projects with pagination
|
||||
all_projects = self._paginate_request("/groups")
|
||||
|
||||
for project_data in all_projects:
|
||||
# Filter by organization if specified
|
||||
if self.provider.organization_id:
|
||||
if project_data.get("orgId") != self.provider.organization_id:
|
||||
continue
|
||||
|
||||
projects[project_data["id"]] = self._process_project(project_data)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
logger.info(f"Found {len(projects)} MongoDB Atlas projects")
|
||||
return projects
|
||||
|
||||
def _process_project(self, project_data: dict) -> Project:
|
||||
"""
|
||||
Process a single project and fetch additional details
|
||||
|
||||
Args:
|
||||
project_data: Raw project data from API
|
||||
|
||||
Returns:
|
||||
Project: Processed project object
|
||||
"""
|
||||
project_id = project_data["id"]
|
||||
|
||||
# Get cluster count
|
||||
cluster_count = self._get_cluster_count(project_id)
|
||||
|
||||
# Get network access entries
|
||||
network_access_entries = self._get_network_access_entries(project_id)
|
||||
|
||||
# Get project settings
|
||||
project_settings = self._get_project_settings(project_id)
|
||||
|
||||
# Get audit configuration
|
||||
audit_config = self._get_audit_config(project_id)
|
||||
|
||||
return Project(
|
||||
id=project_id,
|
||||
name=project_data.get("name", ""),
|
||||
org_id=project_data.get("orgId", ""),
|
||||
created=project_data.get("created", ""),
|
||||
cluster_count=cluster_count,
|
||||
network_access_entries=network_access_entries,
|
||||
project_settings=project_settings,
|
||||
audit_config=audit_config,
|
||||
)
|
||||
|
||||
def _get_cluster_count(self, project_id: str) -> int:
|
||||
"""
|
||||
Get cluster count for a project
|
||||
|
||||
Args:
|
||||
project_id: Project ID
|
||||
|
||||
Returns:
|
||||
int: Number of clusters in the project
|
||||
"""
|
||||
try:
|
||||
clusters = self._paginate_request(f"/groups/{project_id}/clusters")
|
||||
return len(clusters)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"Error getting cluster count for project {project_id}: {error}"
|
||||
)
|
||||
return 0
|
||||
|
||||
def _get_network_access_entries(
|
||||
self, project_id: str
|
||||
) -> List[MongoDBAtlasNetworkAccessEntry]:
|
||||
"""
|
||||
Get network access entries for a project
|
||||
|
||||
Args:
|
||||
project_id: Project ID
|
||||
|
||||
Returns:
|
||||
List[MongoDBAtlasNetworkAccessEntry]: List of network access entries
|
||||
"""
|
||||
try:
|
||||
entries = self._paginate_request(f"/groups/{project_id}/accessList")
|
||||
network_entries = []
|
||||
|
||||
for entry in entries:
|
||||
network_entry = MongoDBAtlasNetworkAccessEntry(
|
||||
cidr_block=entry.get("cidrBlock"),
|
||||
ip_address=entry.get("ipAddress"),
|
||||
aws_security_group=entry.get("awsSecurityGroup"),
|
||||
comment=entry.get("comment"),
|
||||
delete_after_date=entry.get("deleteAfterDate"),
|
||||
)
|
||||
network_entries.append(network_entry)
|
||||
|
||||
return network_entries
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"Error getting network access entries for project {project_id}: {error}"
|
||||
)
|
||||
return []
|
||||
|
||||
def _get_project_settings(self, project_id: str) -> dict:
|
||||
"""
|
||||
Get project settings
|
||||
|
||||
Args:
|
||||
project_id: Project ID
|
||||
|
||||
Returns:
|
||||
dict: Project settings
|
||||
"""
|
||||
try:
|
||||
settings = self._make_request("GET", f"/groups/{project_id}/settings")
|
||||
return settings
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"Error getting project settings for project {project_id}: {error}"
|
||||
)
|
||||
return {}
|
||||
|
||||
def _get_audit_config(self, project_id: str) -> dict:
|
||||
"""
|
||||
Get audit configuration for a project
|
||||
|
||||
Args:
|
||||
project_id: Project ID
|
||||
|
||||
Returns:
|
||||
dict: Audit configuration
|
||||
"""
|
||||
try:
|
||||
audit_config = self._make_request("GET", f"/groups/{project_id}/auditLog")
|
||||
return audit_config
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"Error getting audit configuration for project {project_id}: {error}"
|
||||
)
|
||||
return {}
|
||||
@@ -43,6 +43,7 @@ dependencies = [
|
||||
"dash==3.1.1",
|
||||
"dash-bootstrap-components==2.0.3",
|
||||
"detect-secrets==1.5.0",
|
||||
"dulwich==0.23.0",
|
||||
"google-api-python-client==2.163.0",
|
||||
"google-auth-httplib2>=0.1,<0.3",
|
||||
"jsonschema==4.23.0",
|
||||
|
||||
@@ -17,7 +17,7 @@ prowler_command = "prowler"
|
||||
|
||||
# capsys
|
||||
# https://docs.pytest.org/en/7.1.x/how-to/capture-stdout-stderr.html
|
||||
prowler_default_usage_error = "usage: prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,nhn,dashboard,iac} ..."
|
||||
prowler_default_usage_error = "usage: prowler [-h] [--version] {aws,azure,gcp,kubernetes,m365,github,nhn,mongodbatlas,dashboard,iac} ..."
|
||||
|
||||
|
||||
def mock_get_available_providers():
|
||||
|
||||
@@ -506,6 +506,57 @@ class TestFinding:
|
||||
assert finding_output.metadata.Notes == "mock_notes"
|
||||
assert finding_output.metadata.Compliance == []
|
||||
|
||||
def test_generate_output_iac_remote(self):
|
||||
# Mock provider
|
||||
provider = MagicMock()
|
||||
provider.type = "iac"
|
||||
provider.scan_repository_url = "https://github.com/user/repo"
|
||||
provider.auth_method = "No auth"
|
||||
|
||||
# Mock check result
|
||||
check_output = MagicMock()
|
||||
check_output.file_path = "/path/to/iac/file.tf"
|
||||
check_output.resource_name = "aws_s3_bucket.example"
|
||||
check_output.resource_path = "/path/to/iac/file.tf"
|
||||
check_output.file_line_range = [1, 5]
|
||||
check_output.resource = {
|
||||
"resource": "aws_s3_bucket.example",
|
||||
"value": {},
|
||||
}
|
||||
check_output.resource_details = "test_resource_details"
|
||||
check_output.status = Status.PASS
|
||||
check_output.status_extended = "mock_status_extended"
|
||||
check_output.muted = False
|
||||
check_output.check_metadata = mock_check_metadata(provider="iac")
|
||||
check_output.compliance = {}
|
||||
|
||||
# Mock output options
|
||||
output_options = MagicMock()
|
||||
output_options.unix_timestamp = False
|
||||
|
||||
# Generate the finding
|
||||
finding_output = Finding.generate_output(provider, check_output, output_options)
|
||||
|
||||
# Finding
|
||||
assert isinstance(finding_output, Finding)
|
||||
assert finding_output.auth_method == "No auth"
|
||||
assert finding_output.resource_name == "aws_s3_bucket.example"
|
||||
assert finding_output.resource_uid == "aws_s3_bucket.example"
|
||||
assert finding_output.region == "/path/to/iac/file.tf"
|
||||
assert finding_output.status == Status.PASS
|
||||
assert finding_output.status_extended == "mock_status_extended"
|
||||
assert finding_output.muted is False
|
||||
|
||||
# Metadata
|
||||
assert finding_output.metadata.Provider == "iac"
|
||||
assert finding_output.metadata.CheckID == "mock_check_id"
|
||||
assert finding_output.metadata.CheckTitle == "mock_check_title"
|
||||
assert finding_output.metadata.CheckType == []
|
||||
assert finding_output.metadata.CheckAliases == []
|
||||
assert finding_output.metadata.ServiceName == "mock_service_name"
|
||||
assert finding_output.metadata.SubServiceName == ""
|
||||
assert finding_output.metadata.ResourceIdTemplate == ""
|
||||
|
||||
def assert_keys_lowercase(self, d):
|
||||
for k, v in d.items():
|
||||
assert k.islower()
|
||||
|
||||
@@ -286,6 +286,22 @@ class Test_IAM_Service:
|
||||
}
|
||||
],
|
||||
}
|
||||
# Hybrid role - assumable by both service and AWS account
|
||||
hybrid_policy_document = {
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Principal": {"Service": "cloudformation.amazonaws.com"},
|
||||
"Action": "sts:AssumeRole",
|
||||
},
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Principal": {"AWS": "arn:aws:iam::123456789012:root"},
|
||||
"Action": "sts:AssumeRole",
|
||||
},
|
||||
],
|
||||
}
|
||||
service_role = iam_client.create_role(
|
||||
RoleName="test-1",
|
||||
AssumeRolePolicyDocument=dumps(service_policy_document),
|
||||
@@ -300,6 +316,13 @@ class Test_IAM_Service:
|
||||
{"Key": "test", "Value": "test"},
|
||||
],
|
||||
)["Role"]
|
||||
hybrid_role = iam_client.create_role(
|
||||
RoleName="test-3",
|
||||
AssumeRolePolicyDocument=dumps(hybrid_policy_document),
|
||||
Tags=[
|
||||
{"Key": "test", "Value": "test"},
|
||||
],
|
||||
)["Role"]
|
||||
|
||||
# IAM client for this test class
|
||||
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
|
||||
@@ -314,6 +337,8 @@ class Test_IAM_Service:
|
||||
]
|
||||
assert is_service_role(service_role)
|
||||
assert not is_service_role(role)
|
||||
# Hybrid role should return False even though it has a service principal
|
||||
assert not is_service_role(hybrid_role)
|
||||
|
||||
# Test IAM Get Groups
|
||||
@mock_aws
|
||||
|
||||
+29
-246
@@ -1,7 +1,10 @@
|
||||
from unittest import mock
|
||||
from uuid import uuid4
|
||||
|
||||
from prowler.providers.azure.services.defender.defender_service import SecurityContacts
|
||||
from prowler.providers.azure.services.defender.defender_service import (
|
||||
NotificationsByRole,
|
||||
SecurityContactConfiguration,
|
||||
)
|
||||
from tests.providers.azure.azure_fixtures import (
|
||||
AZURE_SUBSCRIPTION_ID,
|
||||
set_mocked_azure_provider,
|
||||
@@ -10,8 +13,8 @@ from tests.providers.azure.azure_fixtures import (
|
||||
|
||||
class Test_defender_additional_email_configured_with_a_security_contact:
|
||||
def test_defender_no_subscriptions(self):
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {}
|
||||
defender_client = mock.MagicMock()
|
||||
defender_client.security_contact_configurations = {}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -33,18 +36,20 @@ class Test_defender_additional_email_configured_with_a_security_contact:
|
||||
|
||||
def test_defender_no_additional_emails(self):
|
||||
resource_id = str(uuid4())
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
defender_client = mock.MagicMock()
|
||||
defender_client.security_contact_configurations = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
resource_id: SecurityContacts(
|
||||
resource_id=resource_id,
|
||||
resource_id: SecurityContactConfiguration(
|
||||
id=resource_id,
|
||||
name="default",
|
||||
emails="",
|
||||
enabled=True,
|
||||
emails=[],
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="High",
|
||||
alert_notifications_state="On",
|
||||
notified_roles=["Contributor"],
|
||||
notified_roles_state="On",
|
||||
notifications_by_role=NotificationsByRole(
|
||||
state=True, roles=["Contributor"]
|
||||
),
|
||||
alert_minimal_severity=None,
|
||||
attack_path_minimal_risk_level=None,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -75,108 +80,22 @@ class Test_defender_additional_email_configured_with_a_security_contact:
|
||||
assert result[0].resource_name == "default"
|
||||
assert result[0].resource_id == resource_id
|
||||
|
||||
def test_defender_additional_email_bad_format(self):
|
||||
def test_defender_additional_email_configured(self):
|
||||
resource_id = str(uuid4())
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
defender_client = mock.MagicMock()
|
||||
defender_client.security_contact_configurations = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
resource_id: SecurityContacts(
|
||||
resource_id=resource_id,
|
||||
resource_id: SecurityContactConfiguration(
|
||||
id=resource_id,
|
||||
name="default",
|
||||
emails="bad_email",
|
||||
enabled=True,
|
||||
emails=["test@test.com"],
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="High",
|
||||
alert_notifications_state="On",
|
||||
notified_roles=["Contributor"],
|
||||
notified_roles_state="On",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.defender.defender_additional_email_configured_with_a_security_contact.defender_additional_email_configured_with_a_security_contact.defender_client",
|
||||
new=defender_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.defender.defender_additional_email_configured_with_a_security_contact.defender_additional_email_configured_with_a_security_contact import (
|
||||
defender_additional_email_configured_with_a_security_contact,
|
||||
)
|
||||
|
||||
check = defender_additional_email_configured_with_a_security_contact()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"There is not another correct email configured for subscription {AZURE_SUBSCRIPTION_ID}."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == "default"
|
||||
assert result[0].resource_id == resource_id
|
||||
|
||||
def test_defender_additional_email_bad_separator(self):
|
||||
resource_id = str(uuid4())
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
resource_id: SecurityContacts(
|
||||
resource_id=resource_id,
|
||||
name="default",
|
||||
emails="test@test.es, test@test.email.com",
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="High",
|
||||
alert_notifications_state="On",
|
||||
notified_roles=["Contributor"],
|
||||
notified_roles_state="On",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.defender.defender_additional_email_configured_with_a_security_contact.defender_additional_email_configured_with_a_security_contact.defender_client",
|
||||
new=defender_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.defender.defender_additional_email_configured_with_a_security_contact.defender_additional_email_configured_with_a_security_contact import (
|
||||
defender_additional_email_configured_with_a_security_contact,
|
||||
)
|
||||
|
||||
check = defender_additional_email_configured_with_a_security_contact()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"There is not another correct email configured for subscription {AZURE_SUBSCRIPTION_ID}."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == "default"
|
||||
assert result[0].resource_id == resource_id
|
||||
|
||||
def test_defender_additional_email_good_format(self):
|
||||
resource_id = str(uuid4())
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
resource_id: SecurityContacts(
|
||||
resource_id=resource_id,
|
||||
name="default",
|
||||
emails="test@test.com",
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="High",
|
||||
alert_notifications_state="On",
|
||||
notified_roles=["Contributor"],
|
||||
notified_roles_state="On",
|
||||
notifications_by_role=NotificationsByRole(
|
||||
state=True, roles=["Contributor"]
|
||||
),
|
||||
alert_minimal_severity=None,
|
||||
attack_path_minimal_risk_level=None,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -206,139 +125,3 @@ class Test_defender_additional_email_configured_with_a_security_contact:
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == "default"
|
||||
assert result[0].resource_id == resource_id
|
||||
|
||||
def test_defender_additional_email_good_format_multiple_subdomains(self):
|
||||
resource_id = str(uuid4())
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
resource_id: SecurityContacts(
|
||||
resource_id=resource_id,
|
||||
name="default",
|
||||
emails="test@test.mail.es; bad_mail",
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="High",
|
||||
alert_notifications_state="On",
|
||||
notified_roles=["Contributor"],
|
||||
notified_roles_state="On",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.defender.defender_additional_email_configured_with_a_security_contact.defender_additional_email_configured_with_a_security_contact.defender_client",
|
||||
new=defender_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.defender.defender_additional_email_configured_with_a_security_contact.defender_additional_email_configured_with_a_security_contact import (
|
||||
defender_additional_email_configured_with_a_security_contact,
|
||||
)
|
||||
|
||||
check = defender_additional_email_configured_with_a_security_contact()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"There is another correct email configured for subscription {AZURE_SUBSCRIPTION_ID}."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == "default"
|
||||
assert result[0].resource_id == resource_id
|
||||
|
||||
def test_defender_default_security_contact_not_found(self):
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/providers/Microsoft.Security/securityContacts/default": SecurityContacts(
|
||||
resource_id=f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/providers/Microsoft.Security/securityContacts/default",
|
||||
name="default",
|
||||
emails="",
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="",
|
||||
alert_notifications_state="",
|
||||
notified_roles=[""],
|
||||
notified_roles_state="",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.defender.defender_additional_email_configured_with_a_security_contact.defender_additional_email_configured_with_a_security_contact.defender_client",
|
||||
new=defender_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.defender.defender_additional_email_configured_with_a_security_contact.defender_additional_email_configured_with_a_security_contact import (
|
||||
defender_additional_email_configured_with_a_security_contact,
|
||||
)
|
||||
|
||||
check = defender_additional_email_configured_with_a_security_contact()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"There is not another correct email configured for subscription {AZURE_SUBSCRIPTION_ID}."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == "default"
|
||||
assert (
|
||||
result[0].resource_id
|
||||
== f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/providers/Microsoft.Security/securityContacts/default"
|
||||
)
|
||||
|
||||
def test_defender_default_security_contact_not_found_empty_name(self):
|
||||
resource_id = f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/providers/Microsoft.Security/securityContacts/default"
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
resource_id: SecurityContacts(
|
||||
resource_id=resource_id,
|
||||
name="",
|
||||
emails="",
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="",
|
||||
alert_notifications_state="",
|
||||
notified_roles=[""],
|
||||
notified_roles_state="",
|
||||
)
|
||||
}
|
||||
}
|
||||
contact = defender_client.security_contacts[AZURE_SUBSCRIPTION_ID][resource_id]
|
||||
contact.name = getattr(contact, "name", "default") or "default"
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.defender.defender_additional_email_configured_with_a_security_contact.defender_additional_email_configured_with_a_security_contact.defender_client",
|
||||
new=defender_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.defender.defender_additional_email_configured_with_a_security_contact.defender_additional_email_configured_with_a_security_contact import (
|
||||
defender_additional_email_configured_with_a_security_contact,
|
||||
)
|
||||
|
||||
check = defender_additional_email_configured_with_a_security_contact()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"There is not another correct email configured for subscription {AZURE_SUBSCRIPTION_ID}."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == "default"
|
||||
assert result[0].resource_id == resource_id
|
||||
|
||||
+48
-83
@@ -1,7 +1,10 @@
|
||||
from unittest import mock
|
||||
from uuid import uuid4
|
||||
|
||||
from prowler.providers.azure.services.defender.defender_service import SecurityContacts
|
||||
from prowler.providers.azure.services.defender.defender_service import (
|
||||
NotificationsByRole,
|
||||
SecurityContactConfiguration,
|
||||
)
|
||||
from tests.providers.azure.azure_fixtures import (
|
||||
AZURE_SUBSCRIPTION_ID,
|
||||
set_mocked_azure_provider,
|
||||
@@ -10,8 +13,8 @@ from tests.providers.azure.azure_fixtures import (
|
||||
|
||||
class Test_defender_ensure_notify_alerts_severity_is_high:
|
||||
def test_defender_no_subscriptions(self):
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {}
|
||||
defender_client = mock.MagicMock()
|
||||
defender_client.security_contact_configurations = {}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -33,18 +36,20 @@ class Test_defender_ensure_notify_alerts_severity_is_high:
|
||||
|
||||
def test_defender_severity_alerts_critical(self):
|
||||
resource_id = str(uuid4())
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
defender_client = mock.MagicMock()
|
||||
defender_client.security_contact_configurations = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
resource_id: SecurityContacts(
|
||||
resource_id=resource_id,
|
||||
resource_id: SecurityContactConfiguration(
|
||||
id=resource_id,
|
||||
name="default",
|
||||
emails="",
|
||||
enabled=True,
|
||||
emails=[""],
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="Critical",
|
||||
alert_notifications_state="On",
|
||||
notified_roles=["Contributor"],
|
||||
notified_roles_state="On",
|
||||
notifications_by_role=NotificationsByRole(
|
||||
state=True, roles=["Contributor"]
|
||||
),
|
||||
alert_minimal_severity="Critical",
|
||||
attack_path_minimal_risk_level=None,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -77,18 +82,21 @@ class Test_defender_ensure_notify_alerts_severity_is_high:
|
||||
|
||||
def test_defender_severity_alerts_high(self):
|
||||
resource_id = str(uuid4())
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
defender_client = mock.MagicMock()
|
||||
defender_client.security_contact_configurations = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
resource_id: SecurityContacts(
|
||||
resource_id: SecurityContactConfiguration(
|
||||
resource_id=resource_id,
|
||||
id=resource_id,
|
||||
name="default",
|
||||
emails="",
|
||||
enabled=True,
|
||||
emails=[""],
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="High",
|
||||
alert_notifications_state="On",
|
||||
notified_roles=["Contributor"],
|
||||
notified_roles_state="On",
|
||||
notifications_by_role=NotificationsByRole(
|
||||
state=True, roles=["Contributor"]
|
||||
),
|
||||
alert_minimal_severity="High",
|
||||
attack_path_minimal_risk_level=None,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -121,18 +129,21 @@ class Test_defender_ensure_notify_alerts_severity_is_high:
|
||||
|
||||
def test_defender_severity_alerts_low(self):
|
||||
resource_id = str(uuid4())
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
defender_client = mock.MagicMock()
|
||||
defender_client.security_contact_configurations = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
resource_id: SecurityContacts(
|
||||
resource_id: SecurityContactConfiguration(
|
||||
resource_id=resource_id,
|
||||
id=resource_id,
|
||||
name="default",
|
||||
emails="",
|
||||
enabled=True,
|
||||
emails=[""],
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="Low",
|
||||
alert_notifications_state="On",
|
||||
notified_roles=["Contributor"],
|
||||
notified_roles_state="On",
|
||||
notifications_by_role=NotificationsByRole(
|
||||
state=True, roles=["Contributor"]
|
||||
),
|
||||
alert_minimal_severity="Low",
|
||||
attack_path_minimal_risk_level=None,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -164,18 +175,19 @@ class Test_defender_ensure_notify_alerts_severity_is_high:
|
||||
assert result[0].resource_id == resource_id
|
||||
|
||||
def test_defender_default_security_contact_not_found(self):
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
defender_client = mock.MagicMock()
|
||||
defender_client.security_contact_configurations = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/providers/Microsoft.Security/securityContacts/default": SecurityContacts(
|
||||
f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/providers/Microsoft.Security/securityContacts/default": SecurityContactConfiguration(
|
||||
resource_id=f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/providers/Microsoft.Security/securityContacts/default",
|
||||
id=f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/providers/Microsoft.Security/securityContacts/default",
|
||||
name="default",
|
||||
emails="",
|
||||
enabled=True,
|
||||
emails=[""],
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="",
|
||||
alert_notifications_state="",
|
||||
notified_roles=[""],
|
||||
notified_roles_state="",
|
||||
notifications_by_role=NotificationsByRole(state=True, roles=[""]),
|
||||
alert_minimal_severity="",
|
||||
attack_path_minimal_risk_level=None,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -208,50 +220,3 @@ class Test_defender_ensure_notify_alerts_severity_is_high:
|
||||
result[0].resource_id
|
||||
== f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/providers/Microsoft.Security/securityContacts/default"
|
||||
)
|
||||
|
||||
def test_defender_default_security_contact_not_found_empty_name(self):
|
||||
resource_id = f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/providers/Microsoft.Security/securityContacts/default"
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
resource_id: SecurityContacts(
|
||||
resource_id=resource_id,
|
||||
name="",
|
||||
emails="",
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="",
|
||||
alert_notifications_state="",
|
||||
notified_roles=[""],
|
||||
notified_roles_state="",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
contact = defender_client.security_contacts[AZURE_SUBSCRIPTION_ID][resource_id]
|
||||
contact.name = getattr(contact, "name", "default") or "default"
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.defender.defender_ensure_notify_alerts_severity_is_high.defender_ensure_notify_alerts_severity_is_high.defender_client",
|
||||
new=defender_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.defender.defender_ensure_notify_alerts_severity_is_high.defender_ensure_notify_alerts_severity_is_high import (
|
||||
defender_ensure_notify_alerts_severity_is_high,
|
||||
)
|
||||
|
||||
check = defender_ensure_notify_alerts_severity_is_high()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Notifications are not enabled for alerts with a minimum severity of high or lower in subscription {AZURE_SUBSCRIPTION_ID}."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == "default"
|
||||
assert result[0].resource_id == resource_id
|
||||
|
||||
+39
-129
@@ -1,7 +1,10 @@
|
||||
from unittest import mock
|
||||
from uuid import uuid4
|
||||
|
||||
from prowler.providers.azure.services.defender.defender_service import SecurityContacts
|
||||
from prowler.providers.azure.services.defender.defender_service import (
|
||||
NotificationsByRole,
|
||||
SecurityContactConfiguration,
|
||||
)
|
||||
from tests.providers.azure.azure_fixtures import (
|
||||
AZURE_SUBSCRIPTION_ID,
|
||||
set_mocked_azure_provider,
|
||||
@@ -10,8 +13,8 @@ from tests.providers.azure.azure_fixtures import (
|
||||
|
||||
class Test_defender_ensure_notify_emails_to_owners:
|
||||
def test_defender_no_subscriptions(self):
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {}
|
||||
defender_client = mock.MagicMock()
|
||||
defender_client.security_contact_configurations = {}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
@@ -33,22 +36,23 @@ class Test_defender_ensure_notify_emails_to_owners:
|
||||
|
||||
def test_defender_no_notify_emails_to_owners(self):
|
||||
resource_id = str(uuid4())
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
defender_client = mock.MagicMock()
|
||||
defender_client.security_contact_configurations = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
resource_id: SecurityContacts(
|
||||
resource_id=resource_id,
|
||||
resource_id: SecurityContactConfiguration(
|
||||
id=resource_id,
|
||||
name="default",
|
||||
emails="",
|
||||
enabled=True,
|
||||
emails=[""],
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="High",
|
||||
alert_notifications_state="On",
|
||||
notified_roles=["Contributor"],
|
||||
notified_roles_state="On",
|
||||
notifications_by_role=NotificationsByRole(
|
||||
state=True, roles=["Contributor"]
|
||||
),
|
||||
alert_minimal_severity="Critical",
|
||||
attack_path_minimal_risk_level=None,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
@@ -67,28 +71,24 @@ class Test_defender_ensure_notify_emails_to_owners:
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"The Owner role is not notified for subscription {AZURE_SUBSCRIPTION_ID}."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == "default"
|
||||
assert result[0].resource_id == resource_id
|
||||
|
||||
def test_defender_notify_emails_to_owners_off(self):
|
||||
resource_id = str(uuid4())
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
defender_client = mock.MagicMock()
|
||||
defender_client.security_contact_configurations = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
resource_id: SecurityContacts(
|
||||
resource_id=resource_id,
|
||||
resource_id: SecurityContactConfiguration(
|
||||
id=resource_id,
|
||||
name="default",
|
||||
emails="",
|
||||
enabled=True,
|
||||
emails=[""],
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="High",
|
||||
alert_notifications_state="On",
|
||||
notified_roles=["Owner", "Contributor"],
|
||||
notified_roles_state="Off",
|
||||
notifications_by_role=NotificationsByRole(
|
||||
state=False, roles=["Owner", "Contributor"]
|
||||
),
|
||||
alert_minimal_severity="Critical",
|
||||
attack_path_minimal_risk_level=None,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -121,18 +121,20 @@ class Test_defender_ensure_notify_emails_to_owners:
|
||||
|
||||
def test_defender_notify_emails_to_owners(self):
|
||||
resource_id = str(uuid4())
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
defender_client = mock.MagicMock()
|
||||
defender_client.security_contact_configurations = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
resource_id: SecurityContacts(
|
||||
resource_id=resource_id,
|
||||
resource_id: SecurityContactConfiguration(
|
||||
id=resource_id,
|
||||
name="default",
|
||||
emails="test@test.es",
|
||||
enabled=True,
|
||||
emails=["test@test.es"],
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="High",
|
||||
alert_notifications_state="On",
|
||||
notified_roles=["Owner", "Contributor"],
|
||||
notified_roles_state="On",
|
||||
notifications_by_role=NotificationsByRole(
|
||||
state=True, roles=["Owner", "Contributor"]
|
||||
),
|
||||
alert_minimal_severity="Critical",
|
||||
attack_path_minimal_risk_level=None,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -162,95 +164,3 @@ class Test_defender_ensure_notify_emails_to_owners:
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == "default"
|
||||
assert result[0].resource_id == resource_id
|
||||
|
||||
def test_defender_default_security_contact_not_found(self):
|
||||
defender_client = mock.MagicMock
|
||||
defender_client.security_contacts = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/providers/Microsoft.Security/securityContacts/default": SecurityContacts(
|
||||
resource_id=f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/providers/Microsoft.Security/securityContacts/default",
|
||||
name="default",
|
||||
emails="",
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="",
|
||||
alert_notifications_state="",
|
||||
notified_roles=[""],
|
||||
notified_roles_state="",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.defender.defender_ensure_notify_emails_to_owners.defender_ensure_notify_emails_to_owners.defender_client",
|
||||
new=defender_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.defender.defender_ensure_notify_emails_to_owners.defender_ensure_notify_emails_to_owners import (
|
||||
defender_ensure_notify_emails_to_owners,
|
||||
)
|
||||
|
||||
check = defender_ensure_notify_emails_to_owners()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"The Owner role is not notified for subscription {AZURE_SUBSCRIPTION_ID}."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == "default"
|
||||
assert (
|
||||
result[0].resource_id
|
||||
== f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/providers/Microsoft.Security/securityContacts/default"
|
||||
)
|
||||
|
||||
def test_defender_default_security_contact_not_found_empty_name(self):
|
||||
defender_client = mock.MagicMock()
|
||||
resource_id = f"/subscriptions/{AZURE_SUBSCRIPTION_ID}/providers/Microsoft.Security/securityContacts/default"
|
||||
defender_client.security_contacts = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
resource_id: SecurityContacts(
|
||||
resource_id=resource_id,
|
||||
name="",
|
||||
emails="",
|
||||
phone="",
|
||||
alert_notifications_minimal_severity="",
|
||||
alert_notifications_state="",
|
||||
notified_roles=[""],
|
||||
notified_roles_state="",
|
||||
)
|
||||
}
|
||||
}
|
||||
contact = defender_client.security_contacts[AZURE_SUBSCRIPTION_ID][resource_id]
|
||||
contact.name = getattr(contact, "name", "default") or "default"
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.defender.defender_ensure_notify_emails_to_owners.defender_ensure_notify_emails_to_owners.defender_client",
|
||||
new=defender_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.defender.defender_ensure_notify_emails_to_owners.defender_ensure_notify_emails_to_owners import (
|
||||
defender_ensure_notify_emails_to_owners,
|
||||
)
|
||||
|
||||
check = defender_ensure_notify_emails_to_owners()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"The Owner role is not notified for subscription {AZURE_SUBSCRIPTION_ID}."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == "default"
|
||||
assert result[0].resource_id == resource_id
|
||||
|
||||
@@ -7,7 +7,7 @@ from prowler.providers.azure.services.defender.defender_service import (
|
||||
Defender,
|
||||
IoTSecuritySolution,
|
||||
Pricing,
|
||||
SecurityContacts,
|
||||
SecurityContactConfiguration,
|
||||
Setting,
|
||||
)
|
||||
from tests.providers.azure.azure_fixtures import (
|
||||
@@ -55,18 +55,24 @@ def mock_defender_get_assessments(_):
|
||||
}
|
||||
|
||||
|
||||
def mock_defender_get_security_contacts(_):
|
||||
def mock_defender_get_security_contacts(*args, **kwargs):
|
||||
from prowler.providers.azure.services.defender.defender_service import (
|
||||
NotificationsByRole,
|
||||
)
|
||||
|
||||
return {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
"/subscriptions/resource_id": SecurityContacts(
|
||||
resource_id="/subscriptions/resource_id",
|
||||
"/subscriptions/resource_id": SecurityContactConfiguration(
|
||||
id="/subscriptions/resource_id",
|
||||
name="default",
|
||||
emails="user@user.com, test@test.es",
|
||||
enabled=True,
|
||||
emails=["user@user.com", "test@test.es"],
|
||||
phone="666666666",
|
||||
alert_notifications_minimal_severity="High",
|
||||
alert_notifications_state="On",
|
||||
notified_roles=["Owner", "Contributor"],
|
||||
notified_roles_state="On",
|
||||
notifications_by_role=NotificationsByRole(
|
||||
state=True, roles=["Owner", "Contributor"]
|
||||
),
|
||||
alert_minimal_severity="High",
|
||||
attack_path_minimal_risk_level=None,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -216,52 +222,17 @@ class Test_Defender_Service:
|
||||
|
||||
def test_get_security_contacts(self):
|
||||
defender = Defender(set_mocked_azure_provider())
|
||||
assert len(defender.security_contacts) == 1
|
||||
assert (
|
||||
defender.security_contacts[AZURE_SUBSCRIPTION_ID][
|
||||
"/subscriptions/resource_id"
|
||||
].resource_id
|
||||
== "/subscriptions/resource_id"
|
||||
)
|
||||
assert (
|
||||
defender.security_contacts[AZURE_SUBSCRIPTION_ID][
|
||||
"/subscriptions/resource_id"
|
||||
].name
|
||||
== "default"
|
||||
)
|
||||
assert (
|
||||
defender.security_contacts[AZURE_SUBSCRIPTION_ID][
|
||||
"/subscriptions/resource_id"
|
||||
].emails
|
||||
== "user@user.com, test@test.es"
|
||||
)
|
||||
assert (
|
||||
defender.security_contacts[AZURE_SUBSCRIPTION_ID][
|
||||
"/subscriptions/resource_id"
|
||||
].phone
|
||||
== "666666666"
|
||||
)
|
||||
assert (
|
||||
defender.security_contacts[AZURE_SUBSCRIPTION_ID][
|
||||
"/subscriptions/resource_id"
|
||||
].alert_notifications_minimal_severity
|
||||
== "High"
|
||||
)
|
||||
assert (
|
||||
defender.security_contacts[AZURE_SUBSCRIPTION_ID][
|
||||
"/subscriptions/resource_id"
|
||||
].alert_notifications_state
|
||||
== "On"
|
||||
)
|
||||
assert defender.security_contacts[AZURE_SUBSCRIPTION_ID][
|
||||
assert len(defender.security_contact_configurations) == 1
|
||||
contact = defender.security_contact_configurations[AZURE_SUBSCRIPTION_ID][
|
||||
"/subscriptions/resource_id"
|
||||
].notified_roles == ["Owner", "Contributor"]
|
||||
assert (
|
||||
defender.security_contacts[AZURE_SUBSCRIPTION_ID][
|
||||
"/subscriptions/resource_id"
|
||||
].notified_roles_state
|
||||
== "On"
|
||||
)
|
||||
]
|
||||
assert contact.id == "/subscriptions/resource_id"
|
||||
assert contact.name == "default"
|
||||
assert contact.emails == ["user@user.com", "test@test.es"]
|
||||
assert contact.phone == "666666666"
|
||||
assert contact.alert_minimal_severity == "High"
|
||||
assert contact.notifications_by_role.state is True
|
||||
assert contact.notifications_by_role.roles == ["Owner", "Contributor"]
|
||||
|
||||
def test_get_iot_security_solutions(self):
|
||||
defender = Defender(set_mocked_azure_provider())
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import os
|
||||
import tempfile
|
||||
from unittest import mock
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
@@ -131,6 +134,63 @@ class TestIacProvider:
|
||||
assert report.status == "FAIL"
|
||||
assert report.check_metadata.RelatedUrl == ""
|
||||
|
||||
def test_provider_run_local_scan(self):
|
||||
scan_path = "."
|
||||
provider = IacProvider(scan_path=scan_path)
|
||||
with mock.patch(
|
||||
"prowler.providers.iac.iac_provider.IacProvider.run_scan",
|
||||
) as mock_run_scan:
|
||||
provider.run()
|
||||
mock_run_scan.assert_called_with(scan_path, ["all"], [])
|
||||
|
||||
@mock.patch.dict(os.environ, {}, clear=True)
|
||||
def test_provider_run_remote_scan(self):
|
||||
scan_repository_url = "https://github.com/user/repo"
|
||||
provider = IacProvider(scan_repository_url=scan_repository_url)
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.iac.iac_provider.IacProvider._clone_repository",
|
||||
return_value=temp_dir,
|
||||
) as mock_clone,
|
||||
mock.patch(
|
||||
"prowler.providers.iac.iac_provider.IacProvider.run_scan"
|
||||
) as mock_run_scan,
|
||||
):
|
||||
provider.run()
|
||||
mock_clone.assert_called_with(scan_repository_url, None, None, None)
|
||||
mock_run_scan.assert_called_with(temp_dir, ["all"], [])
|
||||
|
||||
@mock.patch.dict(os.environ, {}, clear=True)
|
||||
def test_print_credentials_local(self):
|
||||
scan_path = "/path/to/scan"
|
||||
provider = IacProvider(scan_path=scan_path)
|
||||
with mock.patch("builtins.print") as mock_print:
|
||||
provider.print_credentials()
|
||||
assert any(
|
||||
f"Directory: \x1b[33m{scan_path}\x1b[0m" in call.args[0]
|
||||
for call in mock_print.call_args_list
|
||||
)
|
||||
assert any(
|
||||
"Scanning local IaC directory:" in call.args[0]
|
||||
for call in mock_print.call_args_list
|
||||
)
|
||||
|
||||
@mock.patch.dict(os.environ, {}, clear=True)
|
||||
def test_print_credentials_remote(self):
|
||||
repo_url = "https://github.com/user/repo"
|
||||
provider = IacProvider(scan_repository_url=repo_url)
|
||||
with mock.patch("builtins.print") as mock_print:
|
||||
provider.print_credentials()
|
||||
assert any(
|
||||
f"Repository: \x1b[33m{repo_url}\x1b[0m" in call.args[0]
|
||||
for call in mock_print.call_args_list
|
||||
)
|
||||
assert any(
|
||||
"Scanning remote IaC repository:" in call.args[0]
|
||||
for call in mock_print.call_args_list
|
||||
)
|
||||
|
||||
def test_iac_provider_process_check_medium_severity(self):
|
||||
"""Test processing a medium severity check"""
|
||||
provider = IacProvider()
|
||||
@@ -543,3 +603,31 @@ class TestIacProvider:
|
||||
mock_run_scan.assert_called_once_with(
|
||||
"/custom/path", ["terraform"], ["exclude"]
|
||||
)
|
||||
|
||||
@mock.patch("prowler.providers.iac.iac_provider.porcelain.clone")
|
||||
@mock.patch("tempfile.mkdtemp", return_value="/tmp/fake-dir")
|
||||
def test_clone_repository_no_auth(self, _mock_mkdtemp, mock_clone):
|
||||
provider = IacProvider()
|
||||
url = "https://github.com/user/repo.git"
|
||||
provider._clone_repository(url)
|
||||
mock_clone.assert_called_with(url, "/tmp/fake-dir", depth=1)
|
||||
|
||||
@mock.patch("prowler.providers.iac.iac_provider.porcelain.clone")
|
||||
@mock.patch("tempfile.mkdtemp", return_value="/tmp/fake-dir")
|
||||
def test_clone_repository_with_pat(self, _mock_mkdtemp, mock_clone):
|
||||
provider = IacProvider()
|
||||
url = "https://github.com/user/repo.git"
|
||||
provider._clone_repository(
|
||||
url, github_username="user", personal_access_token="token123"
|
||||
)
|
||||
expected_url = "https://user:token123@github.com/user/repo.git"
|
||||
mock_clone.assert_called_with(expected_url, "/tmp/fake-dir", depth=1)
|
||||
|
||||
@mock.patch("prowler.providers.iac.iac_provider.porcelain.clone")
|
||||
@mock.patch("tempfile.mkdtemp", return_value="/tmp/fake-dir")
|
||||
def test_clone_repository_with_oauth(self, _mock_mkdtemp, mock_clone):
|
||||
provider = IacProvider()
|
||||
url = "https://github.com/user/repo.git"
|
||||
provider._clone_repository(url, oauth_app_token="oauth456")
|
||||
expected_url = "https://oauth2:oauth456@github.com/user/repo.git"
|
||||
mock_clone.assert_called_with(expected_url, "/tmp/fake-dir", depth=1)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user