mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-01-25 02:08:11 +00:00
Compare commits
14 Commits
update-api
...
5.12.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ea82ae60ac | ||
|
|
3a214a3956 | ||
|
|
19534bcac7 | ||
|
|
6c521161d1 | ||
|
|
a1168e3082 | ||
|
|
f2341c9878 | ||
|
|
67b8e925e5 | ||
|
|
ad4475efc9 | ||
|
|
4dd6547b9c | ||
|
|
cc4d759f47 | ||
|
|
e9aca866c8 | ||
|
|
12f9e477a3 | ||
|
|
a2a3b7c125 | ||
|
|
31f34fd15e |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -78,3 +78,6 @@ _data/
|
||||
|
||||
# Claude
|
||||
CLAUDE.md
|
||||
|
||||
# LLM's (Until we have a standard one)
|
||||
AGENTS.md
|
||||
|
||||
@@ -2,6 +2,16 @@
|
||||
|
||||
All notable changes to the **Prowler API** are documented in this file.
|
||||
|
||||
## [1.13.1] (Prowler 5.12.2)
|
||||
|
||||
### Changed
|
||||
- Renamed compliance overview task queue to `compliance` [(#8755)](https://github.com/prowler-cloud/prowler/pull/8755)
|
||||
|
||||
### Security
|
||||
- Django updated to the latest 5.1 security release, 5.1.12, due to [problems](https://www.djangoproject.com/weblog/2025/sep/03/security-releases/) with potential SQL injection in FilteredRelation column aliases [(#8693)](https://github.com/prowler-cloud/prowler/pull/8693)
|
||||
|
||||
---
|
||||
|
||||
## [1.13.0] (Prowler 5.12.0)
|
||||
|
||||
### Added
|
||||
@@ -21,6 +31,8 @@ All notable changes to the **Prowler API** are documented in this file.
|
||||
### Fixed
|
||||
- GitHub provider always scans user instead of organization when using provider UID [(#8587)](https://github.com/prowler-cloud/prowler/pull/8587)
|
||||
|
||||
---
|
||||
|
||||
## [1.11.0] (Prowler 5.10.0)
|
||||
|
||||
### Added
|
||||
|
||||
@@ -32,7 +32,7 @@ start_prod_server() {
|
||||
|
||||
start_worker() {
|
||||
echo "Starting the worker..."
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill,overview,integrations -E --max-tasks-per-child 1
|
||||
poetry run python -m celery -A config.celery worker -l "${DJANGO_LOGGING_LEVEL:-info}" -Q celery,scans,scan-reports,deletion,backfill,overview,integrations,compliance -E --max-tasks-per-child 1
|
||||
}
|
||||
|
||||
start_worker_beat() {
|
||||
|
||||
77
api/poetry.lock
generated
77
api/poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "about-time"
|
||||
@@ -383,6 +383,24 @@ cryptography = ">=2.1.4"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.0.1"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-apimanagement"
|
||||
version = "5.0.0"
|
||||
description = "Microsoft Azure API Management Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "azure_mgmt_apimanagement-5.0.0-py3-none-any.whl", hash = "sha256:b88c42a392333b60722fb86f15d092dfc19a8d67510dccd15c217381dff4e6ec"},
|
||||
{file = "azure_mgmt_apimanagement-5.0.0.tar.gz", hash = "sha256:0ab7fe17e70fe3154cd840ff47d19d7a4610217003eaa7c21acf3511a6e57999"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-common = ">=1.1"
|
||||
azure-mgmt-core = ">=1.3.2"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-applicationinsights"
|
||||
version = "4.1.0"
|
||||
@@ -540,6 +558,23 @@ azure-mgmt-core = ">=1.3.2"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-loganalytics"
|
||||
version = "12.0.0"
|
||||
description = "Microsoft Azure Log Analytics Management Client Library for Python"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "azure-mgmt-loganalytics-12.0.0.zip", hash = "sha256:da128a7e0291be7fa2063848df92a9180cf5c16d42adc09d2bc2efd711536bfb"},
|
||||
{file = "azure_mgmt_loganalytics-12.0.0-py2.py3-none-any.whl", hash = "sha256:75ac1d47dd81179905c40765be8834643d8994acff31056ddc1863017f3faa02"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-common = ">=1.1,<2.0"
|
||||
azure-mgmt-core = ">=1.2.0,<2.0.0"
|
||||
msrest = ">=0.6.21"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-monitor"
|
||||
version = "6.0.2"
|
||||
@@ -750,6 +785,23 @@ azure-mgmt-core = ">=1.3.2"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-monitor-query"
|
||||
version = "2.0.0"
|
||||
description = "Microsoft Corporation Azure Monitor Query Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "azure_monitor_query-2.0.0-py3-none-any.whl", hash = "sha256:8f52d581271d785e12f49cd5aaa144b8910fb843db2373855a7ef94c7fc462ea"},
|
||||
{file = "azure_monitor_query-2.0.0.tar.gz", hash = "sha256:7b05f2fcac4fb67fc9f77a7d4c5d98a0f3099fb73b57c69ec1b080773994671b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-core = ">=1.30.0"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-storage-blob"
|
||||
version = "12.24.1"
|
||||
@@ -1511,14 +1563,14 @@ with-social = ["django-allauth[socialaccount] (>=64.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "django"
|
||||
version = "5.1.10"
|
||||
version = "5.1.12"
|
||||
description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design."
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "django-5.1.10-py3-none-any.whl", hash = "sha256:19c9b771e9cf4de91101861aadd2daaa159bcf10698ca909c5755c88e70ccb84"},
|
||||
{file = "django-5.1.10.tar.gz", hash = "sha256:73e5d191421d177803dbd5495d94bc7d06d156df9561f4eea9e11b4994c07137"},
|
||||
{file = "django-5.1.12-py3-none-any.whl", hash = "sha256:9eb695636cea3601b65690f1596993c042206729afb320ca0960b55f8ed4477b"},
|
||||
{file = "django-5.1.12.tar.gz", hash = "sha256:8a8991b1ec052ef6a44fefd1ef336ab8daa221287bcb91a4a17d5e1abec5bbcc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3987,7 +4039,7 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "prowler"
|
||||
version = "5.11.0"
|
||||
version = "5.12.0"
|
||||
description = "Prowler is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks."
|
||||
optional = false
|
||||
python-versions = ">3.9.1,<3.13"
|
||||
@@ -4000,6 +4052,7 @@ alive-progress = "3.3.0"
|
||||
awsipranges = "0.3.3"
|
||||
azure-identity = "1.21.0"
|
||||
azure-keyvault-keys = "4.10.0"
|
||||
azure-mgmt-apimanagement = "5.0.0"
|
||||
azure-mgmt-applicationinsights = "4.1.0"
|
||||
azure-mgmt-authorization = "4.0.0"
|
||||
azure-mgmt-compute = "34.0.0"
|
||||
@@ -4008,6 +4061,7 @@ azure-mgmt-containerservice = "34.1.0"
|
||||
azure-mgmt-cosmosdb = "9.7.0"
|
||||
azure-mgmt-databricks = "2.0.0"
|
||||
azure-mgmt-keyvault = "10.3.1"
|
||||
azure-mgmt-loganalytics = "12.0.0"
|
||||
azure-mgmt-monitor = "6.0.2"
|
||||
azure-mgmt-network = "28.1.0"
|
||||
azure-mgmt-rdbms = "10.1.0"
|
||||
@@ -4020,6 +4074,7 @@ azure-mgmt-sql = "3.0.1"
|
||||
azure-mgmt-storage = "22.1.1"
|
||||
azure-mgmt-subscription = "3.1.1"
|
||||
azure-mgmt-web = "8.0.0"
|
||||
azure-monitor-query = "2.0.0"
|
||||
azure-storage-blob = "12.24.1"
|
||||
boto3 = "1.39.15"
|
||||
botocore = "1.39.15"
|
||||
@@ -4031,6 +4086,7 @@ detect-secrets = "1.5.0"
|
||||
dulwich = "0.23.0"
|
||||
google-api-python-client = "2.163.0"
|
||||
google-auth-httplib2 = ">=0.1,<0.3"
|
||||
h2 = "4.3.0"
|
||||
jsonschema = "4.23.0"
|
||||
kubernetes = "32.0.1"
|
||||
microsoft-kiota-abstractions = "1.9.2"
|
||||
@@ -4052,8 +4108,8 @@ tzlocal = "5.3.1"
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/prowler-cloud/prowler.git"
|
||||
reference = "master"
|
||||
resolved_reference = "525f152e51f82de2110ed158c8dc489e42c289cf"
|
||||
reference = "v5.12"
|
||||
resolved_reference = "3f5178bffb56a46396e0211f1d121496f8016afd"
|
||||
|
||||
[[package]]
|
||||
name = "psutil"
|
||||
@@ -5223,6 +5279,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
|
||||
@@ -5231,6 +5288,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
|
||||
@@ -5239,6 +5297,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
|
||||
@@ -5247,6 +5306,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
|
||||
@@ -5255,6 +5315,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
|
||||
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
|
||||
@@ -6160,4 +6221,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.11,<3.13"
|
||||
content-hash = "b954196aba7e108cacb94fd15732be7130b27379add09140fabbb55f7335bb7b"
|
||||
content-hash = "0c8181f9bfd77a9dbe0e952cbbcafabbbde53561c56da5795bc5db45556f848f"
|
||||
|
||||
@@ -7,7 +7,7 @@ authors = [{name = "Prowler Engineering", email = "engineering@prowler.com"}]
|
||||
dependencies = [
|
||||
"celery[pytest] (>=5.4.0,<6.0.0)",
|
||||
"dj-rest-auth[with_social,jwt] (==7.0.1)",
|
||||
"django==5.1.10",
|
||||
"django (==5.1.12)",
|
||||
"django-allauth[saml] (>=65.8.0,<66.0.0)",
|
||||
"django-celery-beat (>=2.7.0,<3.0.0)",
|
||||
"django-celery-results (>=2.5.1,<3.0.0)",
|
||||
@@ -24,7 +24,7 @@ dependencies = [
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"gunicorn==23.0.0",
|
||||
"lxml==5.3.2",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@master",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.12",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
|
||||
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
|
||||
@@ -39,7 +39,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.13.0"
|
||||
version = "1.13.1"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -300,7 +300,7 @@ class SchemaView(SpectacularAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
spectacular_settings.TITLE = "Prowler API"
|
||||
spectacular_settings.VERSION = "1.13.0"
|
||||
spectacular_settings.VERSION = "1.13.1"
|
||||
spectacular_settings.DESCRIPTION = (
|
||||
"Prowler API specification.\n\nThis file is auto-generated."
|
||||
)
|
||||
|
||||
@@ -461,7 +461,7 @@ def backfill_scan_resource_summaries_task(tenant_id: str, scan_id: str):
|
||||
return backfill_resource_scan_summaries(tenant_id=tenant_id, scan_id=scan_id)
|
||||
|
||||
|
||||
@shared_task(base=RLSTask, name="scan-compliance-overviews", queue="overview")
|
||||
@shared_task(base=RLSTask, name="scan-compliance-overviews", queue="compliance")
|
||||
def create_compliance_requirements_task(tenant_id: str, scan_id: str):
|
||||
"""
|
||||
Creates detailed compliance requirement records for a scan.
|
||||
|
||||
9
poetry.lock
generated
9
poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "about-time"
|
||||
@@ -2404,6 +2404,8 @@ python-versions = "*"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "jsonpath-ng-1.7.0.tar.gz", hash = "sha256:f6f5f7fd4e5ff79c785f1573b394043b39849fb2bb47bcead935d12b00beab3c"},
|
||||
{file = "jsonpath_ng-1.7.0-py2-none-any.whl", hash = "sha256:898c93fc173f0c336784a3fa63d7434297544b7198124a68f9a3ef9597b0ae6e"},
|
||||
{file = "jsonpath_ng-1.7.0-py3-none-any.whl", hash = "sha256:f3d7f9e848cba1b6da28c55b1c26ff915dc9e0b1ba7e752a53d6da8d5cbd00b6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -5031,6 +5033,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"},
|
||||
@@ -5039,6 +5042,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"},
|
||||
@@ -5047,6 +5051,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"},
|
||||
@@ -5055,6 +5060,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"},
|
||||
@@ -5063,6 +5069,7 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"},
|
||||
{file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"},
|
||||
{file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"},
|
||||
|
||||
@@ -1,6 +1,14 @@
|
||||
# Prowler SDK Changelog
|
||||
|
||||
All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
## [v5.12.1] (Prowler v5.12.1)
|
||||
|
||||
### Fixed
|
||||
- Replaced old check id with new ones for compliance files [(#8682)](https://github.com/prowler-cloud/prowler/pull/8682)
|
||||
- `firehose_stream_encrypted_at_rest` check false positives and new api call in kafka service [(#8599)](https://github.com/prowler-cloud/prowler/pull/8599)
|
||||
- Replace defender rules policies key to use old name [(#8702)](https://github.com/prowler-cloud/prowler/pull/8702)
|
||||
|
||||
## [v5.12.0] (Prowler v5.12.0)
|
||||
|
||||
### Added
|
||||
|
||||
@@ -364,8 +364,8 @@
|
||||
"ec2_ami_public",
|
||||
"ec2_instance_public_ip",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
|
||||
@@ -721,8 +721,8 @@
|
||||
"ec2_networkacl_allow_ingress_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
|
||||
@@ -1510,8 +1510,8 @@
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
@@ -1604,8 +1604,8 @@
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
@@ -1698,8 +1698,8 @@
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
|
||||
@@ -1558,8 +1558,8 @@
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
@@ -1682,7 +1682,7 @@
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_kibana_9200_9300_5601",
|
||||
@@ -1814,7 +1814,7 @@
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306",
|
||||
@@ -1917,7 +1917,7 @@
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23",
|
||||
@@ -3024,8 +3024,8 @@
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
@@ -4588,4 +4588,4 @@
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1557,8 +1557,8 @@
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
@@ -1682,7 +1682,7 @@
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_kibana_9200_9300_5601",
|
||||
@@ -1816,7 +1816,7 @@
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306",
|
||||
@@ -1919,7 +1919,7 @@
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23",
|
||||
@@ -3028,8 +3028,8 @@
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
@@ -4603,4 +4603,4 @@
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -107,8 +107,8 @@
|
||||
"ec2_networkacl_allow_ingress_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
@@ -1024,8 +1024,8 @@
|
||||
"ec2_networkacl_allow_ingress_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
@@ -1470,8 +1470,8 @@
|
||||
"ec2_networkacl_allow_ingress_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
@@ -1650,8 +1650,8 @@
|
||||
"ec2_networkacl_allow_ingress_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
@@ -1902,8 +1902,8 @@
|
||||
"ec2_networkacl_allow_ingress_tcp_port_22",
|
||||
"ec2_networkacl_allow_ingress_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
|
||||
@@ -553,8 +553,8 @@
|
||||
"Description": "Ensure that ec2 security groups do not allow ingress from internet to common ports",
|
||||
"Checks": [
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
|
||||
@@ -66,7 +66,7 @@
|
||||
"elbv2_ssl_listeners",
|
||||
"ssm_documents_set_as_public",
|
||||
"vpc_subnet_no_public_ip_by_default",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306",
|
||||
"s3_account_level_public_access_blocks"
|
||||
|
||||
@@ -253,8 +253,8 @@
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mongodb_27017_27018",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_ftp_20_21",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888",
|
||||
|
||||
@@ -12,7 +12,7 @@ from prowler.lib.logger import logger
|
||||
|
||||
timestamp = datetime.today()
|
||||
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||
prowler_version = "5.12.0"
|
||||
prowler_version = "5.12.2"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
|
||||
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
|
||||
|
||||
@@ -3,6 +3,7 @@ from typing import List
|
||||
from prowler.lib.check.models import Check, Check_Report_AWS
|
||||
from prowler.providers.aws.services.firehose.firehose_client import firehose_client
|
||||
from prowler.providers.aws.services.firehose.firehose_service import EncryptionStatus
|
||||
from prowler.providers.aws.services.kafka.kafka_client import kafka_client
|
||||
from prowler.providers.aws.services.kinesis.kinesis_client import kinesis_client
|
||||
from prowler.providers.aws.services.kinesis.kinesis_service import EncryptionType
|
||||
|
||||
@@ -37,7 +38,28 @@ class firehose_stream_encrypted_at_rest(Check):
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Firehose Stream {stream.name} does not have at rest encryption enabled but the source stream {source_stream.name} has at rest encryption enabled."
|
||||
|
||||
# Check if the stream has encryption enabled directly
|
||||
# MSK source - check if the MSK cluster has encryption at rest with CMK
|
||||
elif stream.delivery_stream_type == "MSKAsSource":
|
||||
msk_cluster_arn = stream.source.msk.msk_cluster_arn
|
||||
if msk_cluster_arn:
|
||||
msk_cluster = None
|
||||
for cluster in kafka_client.clusters.values():
|
||||
if cluster.arn == msk_cluster_arn:
|
||||
msk_cluster = cluster
|
||||
break
|
||||
|
||||
if msk_cluster:
|
||||
# All MSK clusters (both provisioned and serverless) always have encryption at rest enabled by AWS
|
||||
# AWS MSK always encrypts data at rest - either with AWS managed keys or CMK
|
||||
report.status = "PASS"
|
||||
if msk_cluster.kafka_version == "SERVERLESS":
|
||||
report.status_extended = f"Firehose Stream {stream.name} uses MSK serverless source which always has encryption at rest enabled by default."
|
||||
else:
|
||||
report.status_extended = f"Firehose Stream {stream.name} uses MSK provisioned source which always has encryption at rest enabled by AWS (either with AWS managed keys or CMK)."
|
||||
else:
|
||||
report.status_extended = f"Firehose Stream {stream.name} uses MSK source which always has encryption at rest enabled by AWS."
|
||||
|
||||
# Check if the stream has encryption enabled directly (DirectPut or DatabaseAsSource cases)
|
||||
elif stream.kms_encryption == EncryptionStatus.ENABLED:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Firehose Stream {stream.name} does have at rest encryption enabled."
|
||||
|
||||
@@ -12,7 +12,12 @@ class kafka_cluster_encryption_at_rest_uses_cmk(Check):
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Kafka cluster '{cluster.name}' does not have encryption at rest enabled with a CMK."
|
||||
|
||||
if any(
|
||||
# Serverless clusters always have encryption at rest enabled by default
|
||||
if cluster.kafka_version == "SERVERLESS":
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Kafka cluster '{cluster.name}' is serverless and always has encryption at rest enabled by default."
|
||||
# For provisioned clusters, check if they use a customer managed KMS key
|
||||
elif any(
|
||||
(
|
||||
cluster.data_volume_kms_key_id == key.arn
|
||||
and getattr(key, "manager", "") == "CUSTOMER"
|
||||
|
||||
@@ -13,7 +13,12 @@ class kafka_cluster_enhanced_monitoring_enabled(Check):
|
||||
f"Kafka cluster '{cluster.name}' has enhanced monitoring enabled."
|
||||
)
|
||||
|
||||
if cluster.enhanced_monitoring == "DEFAULT":
|
||||
# Serverless clusters always have enhanced monitoring enabled by default
|
||||
if cluster.kafka_version == "SERVERLESS":
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Kafka cluster '{cluster.name}' is serverless and always has enhanced monitoring enabled by default."
|
||||
# For provisioned clusters, check the enhanced monitoring configuration
|
||||
elif cluster.enhanced_monitoring == "DEFAULT":
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Kafka cluster '{cluster.name}' does not have enhanced monitoring enabled."
|
||||
|
||||
|
||||
@@ -11,7 +11,12 @@ class kafka_cluster_in_transit_encryption_enabled(Check):
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Kafka cluster '{cluster.name}' does not have encryption in transit enabled."
|
||||
|
||||
if (
|
||||
# Serverless clusters always have encryption in transit enabled by default
|
||||
if cluster.kafka_version == "SERVERLESS":
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Kafka cluster '{cluster.name}' is serverless and always has encryption in transit enabled by default."
|
||||
# For provisioned clusters, check the encryption configuration
|
||||
elif (
|
||||
cluster.encryption_in_transit.client_broker == "TLS"
|
||||
and cluster.encryption_in_transit.in_cluster
|
||||
):
|
||||
|
||||
@@ -13,7 +13,12 @@ class kafka_cluster_is_public(Check):
|
||||
f"Kafka cluster {cluster.name} is publicly accessible."
|
||||
)
|
||||
|
||||
if not cluster.public_access:
|
||||
# Serverless clusters are always private by default
|
||||
if cluster.kafka_version == "SERVERLESS":
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Kafka cluster {cluster.name} is serverless and always private by default."
|
||||
# For provisioned clusters, check the public access configuration
|
||||
elif not cluster.public_access:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Kafka cluster {cluster.name} is not publicly accessible."
|
||||
|
||||
@@ -11,7 +11,12 @@ class kafka_cluster_mutual_tls_authentication_enabled(Check):
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Kafka cluster '{cluster.name}' does not have mutual TLS authentication enabled."
|
||||
|
||||
if cluster.tls_authentication:
|
||||
# Serverless clusters always have TLS authentication enabled by default
|
||||
if cluster.kafka_version == "SERVERLESS":
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Kafka cluster '{cluster.name}' is serverless and always has TLS authentication enabled by default."
|
||||
# For provisioned clusters, check the TLS configuration
|
||||
elif cluster.tls_authentication:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Kafka cluster '{cluster.name}' has mutual TLS authentication enabled."
|
||||
|
||||
|
||||
@@ -13,7 +13,12 @@ class kafka_cluster_unrestricted_access_disabled(Check):
|
||||
f"Kafka cluster '{cluster.name}' has unrestricted access enabled."
|
||||
)
|
||||
|
||||
if not cluster.unauthentication_access:
|
||||
# Serverless clusters always require authentication by default
|
||||
if cluster.kafka_version == "SERVERLESS":
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Kafka cluster '{cluster.name}' is serverless and always requires authentication by default."
|
||||
# For provisioned clusters, check the unauthenticated access configuration
|
||||
elif not cluster.unauthentication_access:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Kafka cluster '{cluster.name}' does not have unrestricted access enabled."
|
||||
|
||||
|
||||
@@ -13,7 +13,12 @@ class kafka_cluster_uses_latest_version(Check):
|
||||
f"Kafka cluster '{cluster.name}' is using the latest version."
|
||||
)
|
||||
|
||||
if cluster.kafka_version != kafka_client.kafka_versions[-1].version:
|
||||
# Serverless clusters don't have specific Kafka versions - AWS manages them automatically
|
||||
if cluster.kafka_version == "SERVERLESS":
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Kafka cluster '{cluster.name}' is serverless and AWS automatically manages the Kafka version."
|
||||
# For provisioned clusters, check if they're using the latest version
|
||||
elif cluster.kafka_version != kafka_client.kafka_versions[-1].version:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Kafka cluster '{cluster.name}' is not using the latest version."
|
||||
|
||||
@@ -15,61 +15,133 @@ class Kafka(AWSService):
|
||||
self.__threading_call__(self._list_kafka_versions)
|
||||
|
||||
def _list_clusters(self, regional_client):
|
||||
logger.info(f"Kafka - Listing clusters in region {regional_client.region}...")
|
||||
try:
|
||||
cluster_paginator = regional_client.get_paginator("list_clusters")
|
||||
# Use list_clusters_v2 to support both provisioned and serverless clusters
|
||||
cluster_paginator = regional_client.get_paginator("list_clusters_v2")
|
||||
logger.info(
|
||||
f"Kafka - Paginator created for region {regional_client.region}"
|
||||
)
|
||||
|
||||
for page in cluster_paginator.paginate():
|
||||
logger.info(
|
||||
f"Kafka - Processing page with {len(page.get('ClusterInfoList', []))} clusters in region {regional_client.region}"
|
||||
)
|
||||
for cluster in page["ClusterInfoList"]:
|
||||
logger.info(
|
||||
f"Kafka - Found cluster: {cluster.get('ClusterName', 'Unknown')} in region {regional_client.region}"
|
||||
)
|
||||
arn = cluster.get(
|
||||
"ClusterArn",
|
||||
f"{self.account_arn_template}/{cluster.get('ClusterName', '')}",
|
||||
)
|
||||
cluster_type = cluster.get("ClusterType", "UNKNOWN")
|
||||
|
||||
if not self.audit_resources or is_resource_filtered(
|
||||
arn, self.audit_resources
|
||||
):
|
||||
self.clusters[cluster.get("ClusterArn", "")] = Cluster(
|
||||
id=arn.split(":")[-1].split("/")[-1],
|
||||
name=cluster.get("ClusterName", ""),
|
||||
arn=arn,
|
||||
region=regional_client.region,
|
||||
tags=list(cluster.get("Tags", {})),
|
||||
state=cluster.get("State", ""),
|
||||
kafka_version=cluster.get(
|
||||
"CurrentBrokerSoftwareInfo", {}
|
||||
).get("KafkaVersion", ""),
|
||||
data_volume_kms_key_id=cluster.get("EncryptionInfo", {})
|
||||
.get("EncryptionAtRest", {})
|
||||
.get("DataVolumeKMSKeyId", ""),
|
||||
encryption_in_transit=EncryptionInTransit(
|
||||
client_broker=cluster.get("EncryptionInfo", {})
|
||||
.get("EncryptionInTransit", {})
|
||||
.get("ClientBroker", "PLAINTEXT"),
|
||||
in_cluster=cluster.get("EncryptionInfo", {})
|
||||
.get("EncryptionInTransit", {})
|
||||
.get("InCluster", False),
|
||||
),
|
||||
tls_authentication=cluster.get("ClientAuthentication", {})
|
||||
.get("Tls", {})
|
||||
.get("Enabled", False),
|
||||
public_access=cluster.get("BrokerNodeGroupInfo", {})
|
||||
.get("ConnectivityInfo", {})
|
||||
.get("PublicAccess", {})
|
||||
.get("Type", "SERVICE_PROVIDED_EIPS")
|
||||
!= "DISABLED",
|
||||
unauthentication_access=cluster.get(
|
||||
"ClientAuthentication", {}
|
||||
# Handle provisioned clusters
|
||||
if cluster_type == "PROVISIONED" and "Provisioned" in cluster:
|
||||
provisioned = cluster["Provisioned"]
|
||||
self.clusters[cluster.get("ClusterArn", "")] = Cluster(
|
||||
id=arn.split(":")[-1].split("/")[-1],
|
||||
name=cluster.get("ClusterName", ""),
|
||||
arn=arn,
|
||||
region=regional_client.region,
|
||||
tags=(
|
||||
list(cluster.get("Tags", {}).values())
|
||||
if cluster.get("Tags")
|
||||
else []
|
||||
),
|
||||
state=cluster.get("State", ""),
|
||||
kafka_version=provisioned.get(
|
||||
"CurrentBrokerSoftwareInfo", {}
|
||||
).get("KafkaVersion", ""),
|
||||
data_volume_kms_key_id=provisioned.get(
|
||||
"EncryptionInfo", {}
|
||||
)
|
||||
.get("EncryptionAtRest", {})
|
||||
.get("DataVolumeKMSKeyId", ""),
|
||||
encryption_in_transit=EncryptionInTransit(
|
||||
client_broker=provisioned.get("EncryptionInfo", {})
|
||||
.get("EncryptionInTransit", {})
|
||||
.get("ClientBroker", "PLAINTEXT"),
|
||||
in_cluster=provisioned.get("EncryptionInfo", {})
|
||||
.get("EncryptionInTransit", {})
|
||||
.get("InCluster", False),
|
||||
),
|
||||
tls_authentication=provisioned.get(
|
||||
"ClientAuthentication", {}
|
||||
)
|
||||
.get("Tls", {})
|
||||
.get("Enabled", False),
|
||||
public_access=provisioned.get("BrokerNodeGroupInfo", {})
|
||||
.get("ConnectivityInfo", {})
|
||||
.get("PublicAccess", {})
|
||||
.get("Type", "SERVICE_PROVIDED_EIPS")
|
||||
!= "DISABLED",
|
||||
unauthentication_access=provisioned.get(
|
||||
"ClientAuthentication", {}
|
||||
)
|
||||
.get("Unauthenticated", {})
|
||||
.get("Enabled", False),
|
||||
enhanced_monitoring=provisioned.get(
|
||||
"EnhancedMonitoring", "DEFAULT"
|
||||
),
|
||||
)
|
||||
.get("Unauthenticated", {})
|
||||
.get("Enabled", False),
|
||||
enhanced_monitoring=cluster.get(
|
||||
"EnhancedMonitoring", "DEFAULT"
|
||||
),
|
||||
logger.info(
|
||||
f"Kafka - Added provisioned cluster {cluster.get('ClusterName', 'Unknown')} to clusters dict"
|
||||
)
|
||||
|
||||
# Handle serverless clusters
|
||||
elif cluster_type == "SERVERLESS" and "Serverless" in cluster:
|
||||
# For serverless clusters, encryption is always enabled by default
|
||||
# We'll create a Cluster object with default encryption values
|
||||
self.clusters[cluster.get("ClusterArn", "")] = Cluster(
|
||||
id=arn.split(":")[-1].split("/")[-1],
|
||||
name=cluster.get("ClusterName", ""),
|
||||
arn=arn,
|
||||
region=regional_client.region,
|
||||
tags=(
|
||||
list(cluster.get("Tags", {}).values())
|
||||
if cluster.get("Tags")
|
||||
else []
|
||||
),
|
||||
state=cluster.get("State", ""),
|
||||
kafka_version="SERVERLESS", # Serverless doesn't have specific Kafka version
|
||||
data_volume_kms_key_id="AWS_MANAGED", # Serverless uses AWS managed keys
|
||||
encryption_in_transit=EncryptionInTransit(
|
||||
client_broker="TLS", # Serverless always has TLS enabled
|
||||
in_cluster=True, # Serverless always has in-cluster encryption
|
||||
),
|
||||
tls_authentication=True, # Serverless always has TLS authentication
|
||||
public_access=False, # Serverless clusters are always private
|
||||
unauthentication_access=False, # Serverless requires authentication
|
||||
enhanced_monitoring="DEFAULT",
|
||||
)
|
||||
logger.info(
|
||||
f"Kafka - Added serverless cluster {cluster.get('ClusterName', 'Unknown')} to clusters dict"
|
||||
)
|
||||
|
||||
else:
|
||||
logger.warning(
|
||||
f"Kafka - Unknown cluster type {cluster_type} for cluster {cluster.get('ClusterName', 'Unknown')}"
|
||||
)
|
||||
else:
|
||||
logger.info(
|
||||
f"Kafka - Cluster {cluster.get('ClusterName', 'Unknown')} filtered out by audit_resources"
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Kafka - Total clusters found in region {regional_client.region}: {len(self.clusters)}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
logger.error(
|
||||
f"Kafka - Error details in region {regional_client.region}: {str(error)}"
|
||||
)
|
||||
|
||||
def _list_kafka_versions(self, regional_client):
|
||||
try:
|
||||
|
||||
@@ -91,7 +91,7 @@ class Defender(M365Service):
|
||||
malware_rule = [malware_rule]
|
||||
for rule in malware_rule:
|
||||
if rule:
|
||||
malware_rules[rule.get("Name", "")] = MalwareRule(
|
||||
malware_rules[rule.get("MalwareFilterPolicy", "")] = MalwareRule(
|
||||
state=rule.get("State", ""),
|
||||
priority=rule.get("Priority", 0),
|
||||
users=rule.get("SentTo", None),
|
||||
@@ -152,12 +152,14 @@ class Defender(M365Service):
|
||||
antiphishing_rule = [antiphishing_rule]
|
||||
for rule in antiphishing_rule:
|
||||
if rule:
|
||||
antiphishing_rules[rule.get("Name", "")] = AntiphishingRule(
|
||||
state=rule.get("State", ""),
|
||||
priority=rule.get("Priority", 0),
|
||||
users=rule.get("SentTo", None),
|
||||
groups=rule.get("SentToMemberOf", None),
|
||||
domains=rule.get("RecipientDomainIs", None),
|
||||
antiphishing_rules[rule.get("AntiPhishPolicy", "")] = (
|
||||
AntiphishingRule(
|
||||
state=rule.get("State", ""),
|
||||
priority=rule.get("Priority", 0),
|
||||
users=rule.get("SentTo", None),
|
||||
groups=rule.get("SentToMemberOf", None),
|
||||
domains=rule.get("RecipientDomainIs", None),
|
||||
)
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
@@ -250,7 +252,9 @@ class Defender(M365Service):
|
||||
outbound_spam_rule = [outbound_spam_rule]
|
||||
for rule in outbound_spam_rule:
|
||||
if rule:
|
||||
outbound_spam_rules[rule.get("Name", "")] = OutboundSpamRule(
|
||||
outbound_spam_rules[
|
||||
rule.get("HostedOutboundSpamFilterPolicy", "")
|
||||
] = OutboundSpamRule(
|
||||
state=rule.get("State", "Disabled"),
|
||||
priority=rule.get("Priority", 0),
|
||||
users=rule.get("From", None),
|
||||
@@ -330,12 +334,14 @@ class Defender(M365Service):
|
||||
inbound_spam_rule = [inbound_spam_rule]
|
||||
for rule in inbound_spam_rule:
|
||||
if rule:
|
||||
inbound_spam_rules[rule.get("Name", "")] = InboundSpamRule(
|
||||
state=rule.get("State", "Disabled"),
|
||||
priority=rule.get("Priority", 0),
|
||||
users=rule.get("SentTo", None),
|
||||
groups=rule.get("SentToMemberOf", None),
|
||||
domains=rule.get("RecipientDomainIs", None),
|
||||
inbound_spam_rules[rule.get("HostedContentFilterPolicy", "")] = (
|
||||
InboundSpamRule(
|
||||
state=rule.get("State", "Disabled"),
|
||||
priority=rule.get("Priority", 0),
|
||||
users=rule.get("SentTo", None),
|
||||
groups=rule.get("SentToMemberOf", None),
|
||||
domains=rule.get("RecipientDomainIs", None),
|
||||
)
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
|
||||
@@ -74,7 +74,7 @@ maintainers = [{name = "Prowler Engineering", email = "engineering@prowler.com"}
|
||||
name = "prowler"
|
||||
readme = "README.md"
|
||||
requires-python = ">3.9.1,<3.13"
|
||||
version = "5.12.0"
|
||||
version = "5.12.2"
|
||||
|
||||
[project.scripts]
|
||||
prowler = "prowler.__main__:prowler"
|
||||
|
||||
@@ -162,3 +162,64 @@ class Test_kafka_cluster_encryption_at_rest_uses_cmk:
|
||||
)
|
||||
assert result[0].resource_tags == []
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
|
||||
def test_kafka_cluster_serverless_encryption_at_rest(self):
|
||||
kafka_client = MagicMock
|
||||
kafka_client.clusters = {
|
||||
"arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6": Cluster(
|
||||
id="6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
name="serverless-cluster-1",
|
||||
arn="arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
region=AWS_REGION_US_EAST_1,
|
||||
tags=[],
|
||||
state="ACTIVE",
|
||||
kafka_version="SERVERLESS",
|
||||
data_volume_kms_key_id="AWS_MANAGED",
|
||||
encryption_in_transit=EncryptionInTransit(
|
||||
client_broker="TLS",
|
||||
in_cluster=True,
|
||||
),
|
||||
tls_authentication=True,
|
||||
public_access=False,
|
||||
unauthentication_access=False,
|
||||
enhanced_monitoring="DEFAULT",
|
||||
)
|
||||
}
|
||||
|
||||
kms_client = MagicMock
|
||||
kms_client.keys = []
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_cluster_encryption_at_rest_uses_cmk.kafka_cluster_encryption_at_rest_uses_cmk.kms_client",
|
||||
new=kms_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.aws.services.kafka.kafka_cluster_encryption_at_rest_uses_cmk.kafka_cluster_encryption_at_rest_uses_cmk import (
|
||||
kafka_cluster_encryption_at_rest_uses_cmk,
|
||||
)
|
||||
|
||||
check = kafka_cluster_encryption_at_rest_uses_cmk()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "Kafka cluster 'serverless-cluster-1' is serverless and always has encryption at rest enabled by default."
|
||||
)
|
||||
assert result[0].resource_id == "6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== "arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
)
|
||||
assert result[0].resource_tags == []
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
|
||||
@@ -4,7 +4,7 @@ from prowler.providers.aws.services.kafka.kafka_service import (
|
||||
Cluster,
|
||||
EncryptionInTransit,
|
||||
)
|
||||
from tests.providers.aws.utils import AWS_REGION_US_EAST_1, set_mocked_aws_provider
|
||||
from tests.providers.aws.utils import AWS_REGION_US_EAST_1
|
||||
|
||||
|
||||
class Test_kafka_cluster_enhanced_monitoring_enabled:
|
||||
@@ -14,11 +14,11 @@ class Test_kafka_cluster_enhanced_monitoring_enabled:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -56,11 +56,11 @@ class Test_kafka_cluster_enhanced_monitoring_enabled:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -110,11 +110,11 @@ class Test_kafka_cluster_enhanced_monitoring_enabled:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -138,3 +138,57 @@ class Test_kafka_cluster_enhanced_monitoring_enabled:
|
||||
)
|
||||
assert result[0].resource_tags == []
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
|
||||
def test_kafka_cluster_serverless_enhanced_monitoring(self):
|
||||
kafka_client = MagicMock
|
||||
kafka_client.clusters = {
|
||||
"arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6": Cluster(
|
||||
id="6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
name="serverless-cluster-1",
|
||||
arn="arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
region=AWS_REGION_US_EAST_1,
|
||||
tags=[],
|
||||
state="ACTIVE",
|
||||
kafka_version="SERVERLESS",
|
||||
data_volume_kms_key_id="AWS_MANAGED",
|
||||
encryption_in_transit=EncryptionInTransit(
|
||||
client_broker="TLS",
|
||||
in_cluster=True,
|
||||
),
|
||||
tls_authentication=True,
|
||||
public_access=False,
|
||||
unauthentication_access=False,
|
||||
enhanced_monitoring="DEFAULT",
|
||||
)
|
||||
}
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.aws.services.kafka.kafka_cluster_enhanced_monitoring_enabled.kafka_cluster_enhanced_monitoring_enabled import (
|
||||
kafka_cluster_enhanced_monitoring_enabled,
|
||||
)
|
||||
|
||||
check = kafka_cluster_enhanced_monitoring_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "Kafka cluster 'serverless-cluster-1' is serverless and always has enhanced monitoring enabled by default."
|
||||
)
|
||||
assert result[0].resource_id == "6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== "arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
)
|
||||
assert result[0].resource_tags == []
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
|
||||
@@ -4,7 +4,7 @@ from prowler.providers.aws.services.kafka.kafka_service import (
|
||||
Cluster,
|
||||
EncryptionInTransit,
|
||||
)
|
||||
from tests.providers.aws.utils import AWS_REGION_US_EAST_1, set_mocked_aws_provider
|
||||
from tests.providers.aws.utils import AWS_REGION_US_EAST_1
|
||||
|
||||
|
||||
class Test_kafka_cluster_in_transit_encryption_enabled:
|
||||
@@ -14,11 +14,11 @@ class Test_kafka_cluster_in_transit_encryption_enabled:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -56,11 +56,11 @@ class Test_kafka_cluster_in_transit_encryption_enabled:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -110,11 +110,11 @@ class Test_kafka_cluster_in_transit_encryption_enabled:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -164,11 +164,11 @@ class Test_kafka_cluster_in_transit_encryption_enabled:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -191,3 +191,57 @@ class Test_kafka_cluster_in_transit_encryption_enabled:
|
||||
== "arn:aws:kafka:us-east-1:123456789012:cluster/demo-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-5"
|
||||
)
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
|
||||
def test_kafka_cluster_serverless_in_transit_encryption(self):
|
||||
kafka_client = MagicMock
|
||||
kafka_client.clusters = {
|
||||
"arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6": Cluster(
|
||||
id="6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
name="serverless-cluster-1",
|
||||
arn="arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
region=AWS_REGION_US_EAST_1,
|
||||
tags=[],
|
||||
state="ACTIVE",
|
||||
kafka_version="SERVERLESS",
|
||||
data_volume_kms_key_id="AWS_MANAGED",
|
||||
encryption_in_transit=EncryptionInTransit(
|
||||
client_broker="TLS",
|
||||
in_cluster=True,
|
||||
),
|
||||
tls_authentication=True,
|
||||
public_access=False,
|
||||
unauthentication_access=False,
|
||||
enhanced_monitoring="DEFAULT",
|
||||
)
|
||||
}
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.aws.services.kafka.kafka_cluster_in_transit_encryption_enabled.kafka_cluster_in_transit_encryption_enabled import (
|
||||
kafka_cluster_in_transit_encryption_enabled,
|
||||
)
|
||||
|
||||
check = kafka_cluster_in_transit_encryption_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "Kafka cluster 'serverless-cluster-1' is serverless and always has encryption in transit enabled by default."
|
||||
)
|
||||
assert result[0].resource_id == "6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== "arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
)
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
@@ -4,7 +4,7 @@ from prowler.providers.aws.services.kafka.kafka_service import (
|
||||
Cluster,
|
||||
EncryptionInTransit,
|
||||
)
|
||||
from tests.providers.aws.utils import AWS_REGION_US_EAST_1, set_mocked_aws_provider
|
||||
from tests.providers.aws.utils import AWS_REGION_US_EAST_1
|
||||
|
||||
|
||||
class Test_kafka_cluster_is_public:
|
||||
@@ -14,11 +14,11 @@ class Test_kafka_cluster_is_public:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -56,11 +56,11 @@ class Test_kafka_cluster_is_public:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -110,11 +110,11 @@ class Test_kafka_cluster_is_public:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -138,3 +138,57 @@ class Test_kafka_cluster_is_public:
|
||||
assert result[0].resource_id == "6357e0b2-0e6a-4b86-a0b4-70df934c2e31-5"
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
def test_kafka_cluster_serverless_public(self):
|
||||
kafka_client = MagicMock
|
||||
kafka_client.clusters = {
|
||||
"arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6": Cluster(
|
||||
id="6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
name="serverless-cluster-1",
|
||||
arn="arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
region=AWS_REGION_US_EAST_1,
|
||||
tags=[],
|
||||
state="ACTIVE",
|
||||
kafka_version="SERVERLESS",
|
||||
data_volume_kms_key_id="AWS_MANAGED",
|
||||
encryption_in_transit=EncryptionInTransit(
|
||||
client_broker="TLS",
|
||||
in_cluster=True,
|
||||
),
|
||||
tls_authentication=True,
|
||||
public_access=False,
|
||||
unauthentication_access=False,
|
||||
enhanced_monitoring="DEFAULT",
|
||||
)
|
||||
}
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.aws.services.kafka.kafka_cluster_is_public.kafka_cluster_is_public import (
|
||||
kafka_cluster_is_public,
|
||||
)
|
||||
|
||||
check = kafka_cluster_is_public()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "Kafka cluster serverless-cluster-1 is serverless and always private by default."
|
||||
)
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== "arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
)
|
||||
assert result[0].resource_id == "6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
@@ -4,7 +4,7 @@ from prowler.providers.aws.services.kafka.kafka_service import (
|
||||
Cluster,
|
||||
EncryptionInTransit,
|
||||
)
|
||||
from tests.providers.aws.utils import AWS_REGION_US_EAST_1, set_mocked_aws_provider
|
||||
from tests.providers.aws.utils import AWS_REGION_US_EAST_1
|
||||
|
||||
|
||||
class Test_kafka_cluster_mutual_tls_authentication_enabled:
|
||||
@@ -14,11 +14,11 @@ class Test_kafka_cluster_mutual_tls_authentication_enabled:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -56,11 +56,11 @@ class Test_kafka_cluster_mutual_tls_authentication_enabled:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -110,11 +110,11 @@ class Test_kafka_cluster_mutual_tls_authentication_enabled:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -138,3 +138,57 @@ class Test_kafka_cluster_mutual_tls_authentication_enabled:
|
||||
)
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
def test_kafka_cluster_serverless_mutual_tls_authentication(self):
|
||||
kafka_client = MagicMock
|
||||
kafka_client.clusters = {
|
||||
"arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6": Cluster(
|
||||
id="6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
name="serverless-cluster-1",
|
||||
arn="arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
region=AWS_REGION_US_EAST_1,
|
||||
tags=[],
|
||||
state="ACTIVE",
|
||||
kafka_version="SERVERLESS",
|
||||
data_volume_kms_key_id="AWS_MANAGED",
|
||||
encryption_in_transit=EncryptionInTransit(
|
||||
client_broker="TLS",
|
||||
in_cluster=True,
|
||||
),
|
||||
tls_authentication=True,
|
||||
public_access=False,
|
||||
unauthentication_access=False,
|
||||
enhanced_monitoring="DEFAULT",
|
||||
)
|
||||
}
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.aws.services.kafka.kafka_cluster_mutual_tls_authentication_enabled.kafka_cluster_mutual_tls_authentication_enabled import (
|
||||
kafka_cluster_mutual_tls_authentication_enabled,
|
||||
)
|
||||
|
||||
check = kafka_cluster_mutual_tls_authentication_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "Kafka cluster 'serverless-cluster-1' is serverless and always has TLS authentication enabled by default."
|
||||
)
|
||||
assert result[0].resource_id == "6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== "arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
)
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
@@ -4,7 +4,7 @@ from prowler.providers.aws.services.kafka.kafka_service import (
|
||||
Cluster,
|
||||
EncryptionInTransit,
|
||||
)
|
||||
from tests.providers.aws.utils import AWS_REGION_US_EAST_1, set_mocked_aws_provider
|
||||
from tests.providers.aws.utils import AWS_REGION_US_EAST_1
|
||||
|
||||
|
||||
class Test_kafka_cluster_unrestricted_access_disabled:
|
||||
@@ -14,11 +14,11 @@ class Test_kafka_cluster_unrestricted_access_disabled:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -56,11 +56,11 @@ class Test_kafka_cluster_unrestricted_access_disabled:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -110,11 +110,11 @@ class Test_kafka_cluster_unrestricted_access_disabled:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -138,3 +138,57 @@ class Test_kafka_cluster_unrestricted_access_disabled:
|
||||
)
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
def test_kafka_cluster_serverless_unrestricted_access_disabled(self):
|
||||
kafka_client = MagicMock
|
||||
kafka_client.clusters = {
|
||||
"arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6": Cluster(
|
||||
id="6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
name="serverless-cluster-1",
|
||||
arn="arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
region=AWS_REGION_US_EAST_1,
|
||||
tags=[],
|
||||
state="ACTIVE",
|
||||
kafka_version="SERVERLESS",
|
||||
data_volume_kms_key_id="AWS_MANAGED",
|
||||
encryption_in_transit=EncryptionInTransit(
|
||||
client_broker="TLS",
|
||||
in_cluster=True,
|
||||
),
|
||||
tls_authentication=True,
|
||||
public_access=False,
|
||||
unauthentication_access=False,
|
||||
enhanced_monitoring="DEFAULT",
|
||||
)
|
||||
}
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.aws.services.kafka.kafka_cluster_unrestricted_access_disabled.kafka_cluster_unrestricted_access_disabled import (
|
||||
kafka_cluster_unrestricted_access_disabled,
|
||||
)
|
||||
|
||||
check = kafka_cluster_unrestricted_access_disabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "Kafka cluster 'serverless-cluster-1' is serverless and always requires authentication by default."
|
||||
)
|
||||
assert result[0].resource_id == "6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== "arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
)
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
@@ -5,7 +5,7 @@ from prowler.providers.aws.services.kafka.kafka_service import (
|
||||
EncryptionInTransit,
|
||||
KafkaVersion,
|
||||
)
|
||||
from tests.providers.aws.utils import AWS_REGION_US_EAST_1, set_mocked_aws_provider
|
||||
from tests.providers.aws.utils import AWS_REGION_US_EAST_1
|
||||
|
||||
|
||||
class Test_kafka_cluster_latest_version:
|
||||
@@ -15,11 +15,11 @@ class Test_kafka_cluster_latest_version:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -62,11 +62,11 @@ class Test_kafka_cluster_latest_version:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -121,11 +121,11 @@ class Test_kafka_cluster_latest_version:
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider([AWS_REGION_US_EAST_1]),
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
@@ -149,3 +149,62 @@ class Test_kafka_cluster_latest_version:
|
||||
)
|
||||
assert result[0].resource_tags == []
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
|
||||
def test_kafka_cluster_serverless_uses_latest_version(self):
|
||||
kafka_client = MagicMock
|
||||
kafka_client.clusters = {
|
||||
"arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6": Cluster(
|
||||
id="6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
name="serverless-cluster-1",
|
||||
arn="arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
region=AWS_REGION_US_EAST_1,
|
||||
tags=[],
|
||||
state="ACTIVE",
|
||||
kafka_version="SERVERLESS",
|
||||
data_volume_kms_key_id="AWS_MANAGED",
|
||||
encryption_in_transit=EncryptionInTransit(
|
||||
client_broker="TLS",
|
||||
in_cluster=True,
|
||||
),
|
||||
tls_authentication=True,
|
||||
public_access=False,
|
||||
unauthentication_access=False,
|
||||
enhanced_monitoring="DEFAULT",
|
||||
)
|
||||
}
|
||||
|
||||
kafka_client.kafka_versions = [
|
||||
KafkaVersion(version="1.0.0", status="DEPRECATED"),
|
||||
KafkaVersion(version="2.8.0", status="ACTIVE"),
|
||||
]
|
||||
|
||||
with (
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_service.Kafka",
|
||||
new=kafka_client,
|
||||
),
|
||||
patch(
|
||||
"prowler.providers.aws.services.kafka.kafka_client.kafka_client",
|
||||
new=kafka_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.aws.services.kafka.kafka_cluster_uses_latest_version.kafka_cluster_uses_latest_version import (
|
||||
kafka_cluster_uses_latest_version,
|
||||
)
|
||||
|
||||
check = kafka_cluster_uses_latest_version()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "Kafka cluster 'serverless-cluster-1' is serverless and AWS automatically manages the Kafka version."
|
||||
)
|
||||
assert result[0].resource_id == "6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== "arn:aws:kafka:us-east-1:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
)
|
||||
assert result[0].resource_tags == []
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
|
||||
@@ -13,47 +13,67 @@ make_api_call = botocore.client.BaseClient._make_api_call
|
||||
|
||||
|
||||
def mock_make_api_call(self, operation_name, kwarg):
|
||||
if operation_name == "ListClusters":
|
||||
if operation_name == "ListClustersV2":
|
||||
return {
|
||||
"ClusterInfoList": [
|
||||
{
|
||||
"BrokerNodeGroupInfo": {
|
||||
"BrokerAZDistribution": "DEFAULT",
|
||||
"ClientSubnets": ["subnet-cbfff283", "subnet-6746046b"],
|
||||
"InstanceType": "kafka.m5.large",
|
||||
"SecurityGroups": ["sg-f839b688"],
|
||||
"StorageInfo": {"EbsStorageInfo": {"VolumeSize": 100}},
|
||||
},
|
||||
"ClusterType": "PROVISIONED",
|
||||
"ClusterArn": f"arn:aws:kafka:{AWS_REGION_US_EAST_1}:123456789012:cluster/demo-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-5",
|
||||
"ClusterName": "demo-cluster-1",
|
||||
"CreationTime": "2020-07-09T02:31:36.223000+00:00",
|
||||
"CurrentBrokerSoftwareInfo": {"KafkaVersion": "2.2.1"},
|
||||
"CurrentVersion": "K3AEGXETSR30VB",
|
||||
"EncryptionInfo": {
|
||||
"EncryptionAtRest": {
|
||||
"DataVolumeKMSKeyId": f"arn:aws:kms:{AWS_REGION_US_EAST_1}:123456789012:key/a7ca56d5-0768-4b64-a670-339a9fbef81c"
|
||||
},
|
||||
"EncryptionInTransit": {
|
||||
"ClientBroker": "TLS_PLAINTEXT",
|
||||
"InCluster": True,
|
||||
},
|
||||
},
|
||||
"ClientAuthentication": {
|
||||
"Tls": {"CertificateAuthorityArnList": [], "Enabled": True},
|
||||
"Unauthenticated": {"Enabled": False},
|
||||
},
|
||||
"EnhancedMonitoring": "DEFAULT",
|
||||
"OpenMonitoring": {
|
||||
"Prometheus": {
|
||||
"JmxExporter": {"EnabledInBroker": False},
|
||||
"NodeExporter": {"EnabledInBroker": False},
|
||||
}
|
||||
},
|
||||
"NumberOfBrokerNodes": 2,
|
||||
"State": "ACTIVE",
|
||||
"Tags": {},
|
||||
"ZookeeperConnectString": f"z-2.demo-cluster-1.xuy0sb.c5.kafka.{AWS_REGION_US_EAST_1}.amazonaws.com:2181,z-1.demo-cluster-1.xuy0sb.c5.kafka.{AWS_REGION_US_EAST_1}.amazonaws.com:2181,z-3.demo-cluster-1.xuy0sb.c5.kafka.{AWS_REGION_US_EAST_1}.amazonaws.com:2181",
|
||||
}
|
||||
"Provisioned": {
|
||||
"BrokerNodeGroupInfo": {
|
||||
"BrokerAZDistribution": "DEFAULT",
|
||||
"ClientSubnets": ["subnet-cbfff283", "subnet-6746046b"],
|
||||
"InstanceType": "kafka.m5.large",
|
||||
"SecurityGroups": ["sg-f839b688"],
|
||||
"StorageInfo": {"EbsStorageInfo": {"VolumeSize": 100}},
|
||||
"ConnectivityInfo": {
|
||||
"PublicAccess": {"Type": "SERVICE_PROVIDED_EIPS"}
|
||||
},
|
||||
},
|
||||
"CurrentBrokerSoftwareInfo": {"KafkaVersion": "2.2.1"},
|
||||
"CurrentVersion": "K3AEGXETSR30VB",
|
||||
"EncryptionInfo": {
|
||||
"EncryptionAtRest": {
|
||||
"DataVolumeKMSKeyId": f"arn:aws:kms:{AWS_REGION_US_EAST_1}:123456789012:key/a7ca56d5-0768-4b64-a670-339a9fbef81c"
|
||||
},
|
||||
"EncryptionInTransit": {
|
||||
"ClientBroker": "TLS_PLAINTEXT",
|
||||
"InCluster": True,
|
||||
},
|
||||
},
|
||||
"ClientAuthentication": {
|
||||
"Tls": {"CertificateAuthorityArnList": [], "Enabled": True},
|
||||
"Unauthenticated": {"Enabled": False},
|
||||
},
|
||||
"EnhancedMonitoring": "DEFAULT",
|
||||
"OpenMonitoring": {
|
||||
"Prometheus": {
|
||||
"JmxExporter": {"EnabledInBroker": False},
|
||||
"NodeExporter": {"EnabledInBroker": False},
|
||||
}
|
||||
},
|
||||
"NumberOfBrokerNodes": 2,
|
||||
"ZookeeperConnectString": f"z-2.demo-cluster-1.xuy0sb.c5.kafka.{AWS_REGION_US_EAST_1}.amazonaws.com:2181,z-1.demo-cluster-1.xuy0sb.c5.kafka.{AWS_REGION_US_EAST_1}.amazonaws.com:2181,z-3.demo-cluster-1.xuy0sb.c5.kafka.{AWS_REGION_US_EAST_1}.amazonaws.com:2181",
|
||||
},
|
||||
},
|
||||
{
|
||||
"ClusterType": "SERVERLESS",
|
||||
"ClusterArn": f"arn:aws:kafka:{AWS_REGION_US_EAST_1}:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6",
|
||||
"ClusterName": "serverless-cluster-1",
|
||||
"State": "ACTIVE",
|
||||
"Tags": {},
|
||||
"Serverless": {
|
||||
"VpcConfigs": [
|
||||
{
|
||||
"SubnetIds": ["subnet-cbfff283", "subnet-6746046b"],
|
||||
"SecurityGroups": ["sg-f839b688"],
|
||||
}
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
elif operation_name == "ListKafkaVersions":
|
||||
@@ -86,32 +106,53 @@ class TestKafkaService:
|
||||
assert kafka.__class__.__name__ == "Kafka"
|
||||
assert kafka.session.__class__.__name__ == "Session"
|
||||
assert kafka.audited_account == AWS_ACCOUNT_NUMBER
|
||||
# Clusters assertions
|
||||
assert len(kafka.clusters) == 1
|
||||
cluster_arn = f"arn:aws:kafka:{AWS_REGION_US_EAST_1}:123456789012:cluster/demo-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-5"
|
||||
assert cluster_arn in kafka.clusters
|
||||
|
||||
# Clusters assertions - should now include both provisioned and serverless
|
||||
assert len(kafka.clusters) == 2
|
||||
|
||||
# Check provisioned cluster
|
||||
provisioned_cluster_arn = f"arn:aws:kafka:{AWS_REGION_US_EAST_1}:123456789012:cluster/demo-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-5"
|
||||
assert provisioned_cluster_arn in kafka.clusters
|
||||
provisioned_cluster = kafka.clusters[provisioned_cluster_arn]
|
||||
assert provisioned_cluster.id == "6357e0b2-0e6a-4b86-a0b4-70df934c2e31-5"
|
||||
assert provisioned_cluster.arn == provisioned_cluster_arn
|
||||
assert provisioned_cluster.name == "demo-cluster-1"
|
||||
assert provisioned_cluster.region == AWS_REGION_US_EAST_1
|
||||
assert provisioned_cluster.tags == []
|
||||
assert provisioned_cluster.state == "ACTIVE"
|
||||
assert provisioned_cluster.kafka_version == "2.2.1"
|
||||
assert (
|
||||
kafka.clusters[cluster_arn].id == "6357e0b2-0e6a-4b86-a0b4-70df934c2e31-5"
|
||||
)
|
||||
assert kafka.clusters[cluster_arn].arn == cluster_arn
|
||||
assert kafka.clusters[cluster_arn].name == "demo-cluster-1"
|
||||
assert kafka.clusters[cluster_arn].region == AWS_REGION_US_EAST_1
|
||||
assert kafka.clusters[cluster_arn].tags == []
|
||||
assert kafka.clusters[cluster_arn].state == "ACTIVE"
|
||||
assert kafka.clusters[cluster_arn].kafka_version == "2.2.1"
|
||||
assert (
|
||||
kafka.clusters[cluster_arn].data_volume_kms_key_id
|
||||
provisioned_cluster.data_volume_kms_key_id
|
||||
== f"arn:aws:kms:{AWS_REGION_US_EAST_1}:123456789012:key/a7ca56d5-0768-4b64-a670-339a9fbef81c"
|
||||
)
|
||||
assert (
|
||||
kafka.clusters[cluster_arn].encryption_in_transit.client_broker
|
||||
== "TLS_PLAINTEXT"
|
||||
provisioned_cluster.encryption_in_transit.client_broker == "TLS_PLAINTEXT"
|
||||
)
|
||||
assert kafka.clusters[cluster_arn].encryption_in_transit.in_cluster
|
||||
assert kafka.clusters[cluster_arn].enhanced_monitoring == "DEFAULT"
|
||||
assert kafka.clusters[cluster_arn].tls_authentication
|
||||
assert kafka.clusters[cluster_arn].public_access
|
||||
assert not kafka.clusters[cluster_arn].unauthentication_access
|
||||
assert provisioned_cluster.encryption_in_transit.in_cluster
|
||||
assert provisioned_cluster.enhanced_monitoring == "DEFAULT"
|
||||
assert provisioned_cluster.tls_authentication
|
||||
assert provisioned_cluster.public_access
|
||||
assert not provisioned_cluster.unauthentication_access
|
||||
|
||||
# Check serverless cluster
|
||||
serverless_cluster_arn = f"arn:aws:kafka:{AWS_REGION_US_EAST_1}:123456789012:cluster/serverless-cluster-1/6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
assert serverless_cluster_arn in kafka.clusters
|
||||
serverless_cluster = kafka.clusters[serverless_cluster_arn]
|
||||
assert serverless_cluster.id == "6357e0b2-0e6a-4b86-a0b4-70df934c2e31-6"
|
||||
assert serverless_cluster.arn == serverless_cluster_arn
|
||||
assert serverless_cluster.name == "serverless-cluster-1"
|
||||
assert serverless_cluster.region == AWS_REGION_US_EAST_1
|
||||
assert serverless_cluster.tags == []
|
||||
assert serverless_cluster.state == "ACTIVE"
|
||||
assert serverless_cluster.kafka_version == "SERVERLESS"
|
||||
assert serverless_cluster.data_volume_kms_key_id == "AWS_MANAGED"
|
||||
assert serverless_cluster.encryption_in_transit.client_broker == "TLS"
|
||||
assert serverless_cluster.encryption_in_transit.in_cluster
|
||||
assert serverless_cluster.enhanced_monitoring == "DEFAULT"
|
||||
assert serverless_cluster.tls_authentication
|
||||
assert not serverless_cluster.public_access
|
||||
assert not serverless_cluster.unauthentication_access
|
||||
|
||||
# Kafka versions assertions
|
||||
assert len(kafka.kafka_versions) == 2
|
||||
assert kafka.kafka_versions[0].version == "1.0.0"
|
||||
|
||||
@@ -2,6 +2,20 @@
|
||||
|
||||
All notable changes to the **Prowler UI** are documented in this file.
|
||||
|
||||
|
||||
## [1.12.2] (Prowler v5.12.2)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Handle 4XX errors consistently and 204 responses properly[(#8722)](https://github.com/prowler-cloud/prowler/pull/8722)
|
||||
|
||||
## [1.12.1] (Prowler v5.12.1)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Field-level email validation message [(#8698)](https://github.com/prowler-cloud/prowler/pull/8698)
|
||||
- POST method on auth form [(#8699)](https://github.com/prowler-cloud/prowler/pull/8699)
|
||||
|
||||
## [1.12.0] (Prowler v5.12.0)
|
||||
|
||||
### 🚀 Added
|
||||
|
||||
@@ -35,7 +35,7 @@ export async function authenticate(
|
||||
message: "Credentials error",
|
||||
errors: {
|
||||
...defaultValues,
|
||||
credentials: "Incorrect email or password",
|
||||
credentials: "Invalid email or password",
|
||||
},
|
||||
};
|
||||
case "CallbackRouteError":
|
||||
|
||||
@@ -159,8 +159,7 @@ export const updateRole = async (formData: FormData, roleId: string) => {
|
||||
manage_providers: formData.get("manage_providers") === "true",
|
||||
manage_account: formData.get("manage_account") === "true",
|
||||
manage_scans: formData.get("manage_scans") === "true",
|
||||
// TODO: Add back when we have integrations ready
|
||||
// manage_integrations: formData.get("manage_integrations") === "true",
|
||||
manage_integrations: formData.get("manage_integrations") === "true",
|
||||
unlimited_visibility: formData.get("unlimited_visibility") === "true",
|
||||
},
|
||||
relationships: {},
|
||||
|
||||
@@ -12,7 +12,7 @@ import { authenticate, createNewUser } from "@/actions/auth";
|
||||
import { initiateSamlAuth } from "@/actions/integrations/saml";
|
||||
import { PasswordRequirementsMessage } from "@/components/auth/oss/password-validator";
|
||||
import { SocialButtons } from "@/components/auth/oss/social-buttons";
|
||||
import { NotificationIcon, ProwlerExtended } from "@/components/icons";
|
||||
import { ProwlerExtended } from "@/components/icons";
|
||||
import { ThemeSwitch } from "@/components/ThemeSwitch";
|
||||
import { useToast } from "@/components/ui";
|
||||
import { CustomButton, CustomInput } from "@/components/ui/custom";
|
||||
@@ -65,6 +65,8 @@ export const AuthForm = ({
|
||||
|
||||
const form = useForm<z.infer<typeof formSchema>>({
|
||||
resolver: zodResolver(formSchema),
|
||||
mode: "onSubmit",
|
||||
reValidateMode: "onSubmit",
|
||||
defaultValues: {
|
||||
email: "",
|
||||
password: "",
|
||||
@@ -111,10 +113,11 @@ export const AuthForm = ({
|
||||
if (result?.message === "Success") {
|
||||
router.push("/");
|
||||
} else if (result?.errors && "credentials" in result.errors) {
|
||||
form.setError("email", {
|
||||
type: "server",
|
||||
message: result.errors.credentials ?? "Incorrect email or password",
|
||||
});
|
||||
const message =
|
||||
result.errors.credentials ?? "Invalid email or password";
|
||||
|
||||
form.setError("email", { type: "server", message });
|
||||
form.setError("password", { type: "server", message });
|
||||
} else if (result?.message === "User email is not verified") {
|
||||
router.push("/email-verification");
|
||||
} else {
|
||||
@@ -144,7 +147,8 @@ export const AuthForm = ({
|
||||
} else {
|
||||
newUser.errors.forEach((error: ApiError) => {
|
||||
const errorMessage = error.detail;
|
||||
switch (error.source.pointer) {
|
||||
const pointer = error.source?.pointer;
|
||||
switch (pointer) {
|
||||
case "/data/attributes/name":
|
||||
form.setError("name", { type: "server", message: errorMessage });
|
||||
break;
|
||||
@@ -206,6 +210,8 @@ export const AuthForm = ({
|
||||
|
||||
<Form {...form}>
|
||||
<form
|
||||
noValidate
|
||||
method="post"
|
||||
className="flex flex-col gap-4"
|
||||
onSubmit={form.handleSubmit(onSubmit)}
|
||||
>
|
||||
@@ -237,7 +243,8 @@ export const AuthForm = ({
|
||||
label="Email"
|
||||
placeholder="Enter your email"
|
||||
isInvalid={!!form.formState.errors.email}
|
||||
showFormMessage={type !== "sign-in"}
|
||||
// Always show field validation message, including on sign-in
|
||||
showFormMessage
|
||||
/>
|
||||
{!isSamlMode && (
|
||||
<>
|
||||
@@ -245,10 +252,8 @@ export const AuthForm = ({
|
||||
control={form.control}
|
||||
name="password"
|
||||
password
|
||||
isInvalid={
|
||||
!!form.formState.errors.password ||
|
||||
!!form.formState.errors.email
|
||||
}
|
||||
// Only mark invalid when the password field has an error
|
||||
isInvalid={!!form.formState.errors.password}
|
||||
/>
|
||||
{type === "sign-up" && (
|
||||
<PasswordRequirementsMessage
|
||||
@@ -319,12 +324,7 @@ export const AuthForm = ({
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
{type === "sign-in" && form.formState.errors?.email && (
|
||||
<div className="flex flex-row items-center text-system-error">
|
||||
<NotificationIcon size={16} />
|
||||
<p className="text-small">Invalid email or password</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<CustomButton
|
||||
type="submit"
|
||||
ariaLabel={type === "sign-in" ? "Log in" : "Sign up"}
|
||||
|
||||
@@ -53,7 +53,8 @@ export const SendInvitationForm = ({
|
||||
if (data?.errors && data.errors.length > 0) {
|
||||
data.errors.forEach((error: ApiError) => {
|
||||
const errorMessage = error.detail;
|
||||
switch (error.source.pointer) {
|
||||
const pointer = error.source?.pointer;
|
||||
switch (pointer) {
|
||||
case "/data/attributes/email":
|
||||
form.setError("email", {
|
||||
type: "server",
|
||||
|
||||
@@ -69,7 +69,8 @@ export const AddGroupForm = ({
|
||||
if (data?.errors && data.errors.length > 0) {
|
||||
data.errors.forEach((error: ApiError) => {
|
||||
const errorMessage = error.detail;
|
||||
switch (error.source.pointer) {
|
||||
const pointer = error.source?.pointer;
|
||||
switch (pointer) {
|
||||
case "/data/attributes/name":
|
||||
form.setError("name", {
|
||||
type: "server",
|
||||
|
||||
@@ -105,7 +105,8 @@ export const EditGroupForm = ({
|
||||
if (data?.errors && data.errors.length > 0) {
|
||||
data.errors.forEach((error: ApiError) => {
|
||||
const errorMessage = error.detail;
|
||||
switch (error.source.pointer) {
|
||||
const pointer = error.source?.pointer;
|
||||
switch (pointer) {
|
||||
case "/data/attributes/name":
|
||||
form.setError("name", {
|
||||
type: "server",
|
||||
|
||||
@@ -17,7 +17,7 @@ import {
|
||||
CustomInput,
|
||||
} from "@/components/ui/custom";
|
||||
import { Form } from "@/components/ui/form";
|
||||
import { permissionFormFields } from "@/lib";
|
||||
import { getErrorMessage, permissionFormFields } from "@/lib";
|
||||
import { addRoleFormSchema, ApiError } from "@/types";
|
||||
|
||||
type FormValues = z.infer<typeof addRoleFormSchema>;
|
||||
@@ -113,7 +113,8 @@ export const AddRoleForm = ({
|
||||
if (data?.errors && data.errors.length > 0) {
|
||||
data.errors.forEach((error: ApiError) => {
|
||||
const errorMessage = error.detail;
|
||||
switch (error.source.pointer) {
|
||||
const pointer = error.source?.pointer;
|
||||
switch (pointer) {
|
||||
case "/data/attributes/name":
|
||||
form.setError("name", {
|
||||
type: "server",
|
||||
@@ -139,7 +140,7 @@ export const AddRoleForm = ({
|
||||
toast({
|
||||
variant: "destructive",
|
||||
title: "Error",
|
||||
description: "An unexpected error occurred. Please try again.",
|
||||
description: getErrorMessage(error),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -17,7 +17,7 @@ import {
|
||||
CustomInput,
|
||||
} from "@/components/ui/custom";
|
||||
import { Form } from "@/components/ui/form";
|
||||
import { permissionFormFields } from "@/lib";
|
||||
import { getErrorMessage, permissionFormFields } from "@/lib";
|
||||
import { ApiError, editRoleFormSchema } from "@/types";
|
||||
|
||||
type FormValues = z.infer<typeof editRoleFormSchema>;
|
||||
@@ -133,7 +133,8 @@ export const EditRoleForm = ({
|
||||
if (data?.errors && data.errors.length > 0) {
|
||||
data.errors.forEach((error: ApiError) => {
|
||||
const errorMessage = error.detail;
|
||||
switch (error.source.pointer) {
|
||||
const pointer = error.source?.pointer;
|
||||
switch (pointer) {
|
||||
case "/data/attributes/name":
|
||||
form.setError("name", {
|
||||
type: "server",
|
||||
@@ -159,7 +160,7 @@ export const EditRoleForm = ({
|
||||
toast({
|
||||
variant: "destructive",
|
||||
title: "Error",
|
||||
description: "An unexpected error occurred. Please try again.",
|
||||
description: getErrorMessage(error),
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -19,7 +19,8 @@ export const useFormServerErrors = <T extends Record<string, any>>(
|
||||
) => {
|
||||
errors.forEach((error: ApiError) => {
|
||||
const errorMessage = error.detail;
|
||||
const fieldName = errorMapping?.[error.source.pointer];
|
||||
const pointer = error.source?.pointer;
|
||||
const fieldName = pointer ? errorMapping?.[pointer] : undefined;
|
||||
|
||||
if (fieldName && fieldName in form.formState.defaultValues!) {
|
||||
form.setError(fieldName as any, {
|
||||
|
||||
@@ -348,10 +348,27 @@ export const handleApiResponse = async (
|
||||
parse = true,
|
||||
) => {
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null);
|
||||
const errorDetail = errorData?.errors?.[0]?.detail;
|
||||
// Read error body safely; prefer JSON, fallback to plain text
|
||||
const rawErrorText = await response.text().catch(() => "");
|
||||
let errorData: any = null;
|
||||
try {
|
||||
errorData = rawErrorText ? JSON.parse(rawErrorText) : null;
|
||||
} catch {
|
||||
errorData = null;
|
||||
}
|
||||
|
||||
// Special handling for server errors (500+)
|
||||
const errorsArray = Array.isArray(errorData?.errors)
|
||||
? (errorData.errors as any[])
|
||||
: undefined;
|
||||
const errorDetail =
|
||||
errorsArray?.[0]?.detail ||
|
||||
errorData?.error ||
|
||||
errorData?.message ||
|
||||
(rawErrorText && rawErrorText.trim()) ||
|
||||
response.statusText ||
|
||||
"Oops! Something went wrong.";
|
||||
|
||||
//5XX errors
|
||||
if (response.status >= 500) {
|
||||
throw new Error(
|
||||
errorDetail ||
|
||||
@@ -359,14 +376,37 @@ export const handleApiResponse = async (
|
||||
);
|
||||
}
|
||||
|
||||
// Client errors (4xx)
|
||||
throw new Error(
|
||||
errorDetail ||
|
||||
`Request failed (${response.status}): ${response.statusText}`,
|
||||
);
|
||||
return errorsArray
|
||||
? { error: errorDetail, errors: errorsArray, status: response.status }
|
||||
: ({ error: errorDetail, status: response.status } as any);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
// Handle empty or no-content responses gracefully (e.g., 204, empty body)
|
||||
if (response.status === 204) {
|
||||
if (pathToRevalidate && pathToRevalidate !== "") {
|
||||
revalidatePath(pathToRevalidate);
|
||||
}
|
||||
return { success: true, status: response.status } as any;
|
||||
}
|
||||
|
||||
// Read raw text to determine if there's a body to parse
|
||||
const rawText = await response.text();
|
||||
const hasBody = rawText && rawText.trim().length > 0;
|
||||
|
||||
if (!hasBody) {
|
||||
if (pathToRevalidate && pathToRevalidate !== "") {
|
||||
revalidatePath(pathToRevalidate);
|
||||
}
|
||||
return { success: true, status: response.status } as any;
|
||||
}
|
||||
|
||||
let data: any;
|
||||
try {
|
||||
data = JSON.parse(rawText);
|
||||
} catch (e) {
|
||||
// If body isn't valid JSON, return as text payload
|
||||
data = { data: rawText };
|
||||
}
|
||||
|
||||
if (pathToRevalidate && pathToRevalidate !== "") {
|
||||
revalidatePath(pathToRevalidate);
|
||||
|
||||
@@ -48,22 +48,20 @@ test.describe("Login Flow", () => {
|
||||
test("should handle empty form submission", async ({ page }) => {
|
||||
// Submit empty form
|
||||
await submitLoginForm(page);
|
||||
await verifyLoginError(page, ERROR_MESSAGES.INVALID_CREDENTIALS);
|
||||
await verifyLoginError(page, ERROR_MESSAGES.INVALID_EMAIL);
|
||||
|
||||
// Verify we're still on login page
|
||||
await expect(page).toHaveURL(URLS.LOGIN);
|
||||
});
|
||||
|
||||
/*
|
||||
TODO: This test is failing, need UI work before.
|
||||
test("should validate email format", async ({ page }) => {
|
||||
// Attempt login with invalid email format
|
||||
await login(page, TEST_CREDENTIALS.INVALID_EMAIL_FORMAT);
|
||||
// Verify error message (application shows generic error for invalid email format too)
|
||||
await verifyLoginError(page, ERROR_MESSAGES.INVALID_CREDENTIALS);
|
||||
// Verify field-level email validation message
|
||||
await verifyLoginError(page, ERROR_MESSAGES.INVALID_EMAIL);
|
||||
// Verify we're still on login page
|
||||
await expect(page).toHaveURL(URLS.LOGIN);
|
||||
});
|
||||
*/
|
||||
|
||||
test("should toggle SAML SSO mode", async ({ page }) => {
|
||||
// Toggle to SAML mode
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Page, expect } from "@playwright/test";
|
||||
|
||||
export const ERROR_MESSAGES = {
|
||||
INVALID_CREDENTIALS: "Invalid email or password",
|
||||
INVALID_EMAIL: "Please enter a valid email address.",
|
||||
} as const;
|
||||
|
||||
export const URLS = {
|
||||
@@ -69,7 +70,8 @@ export async function verifyLoginError(
|
||||
page: Page,
|
||||
errorMessage = "Invalid email or password",
|
||||
) {
|
||||
await expect(page.getByText(errorMessage)).toBeVisible();
|
||||
// There may be multiple field-level errors with the same text; assert at least one is visible
|
||||
await expect(page.getByText(errorMessage).first()).toBeVisible();
|
||||
await expect(page).toHaveURL("/sign-in");
|
||||
}
|
||||
|
||||
|
||||
@@ -96,7 +96,12 @@ export const authFormSchema = (type: string) =>
|
||||
}),
|
||||
|
||||
// Fields for Sign In and Sign Up
|
||||
email: z.string().email(),
|
||||
// Trim and normalize email, and provide consistent message
|
||||
email: z
|
||||
.string()
|
||||
.trim()
|
||||
.toLowerCase()
|
||||
.email({ message: "Please enter a valid email address." }),
|
||||
password: type === "sign-in" ? z.string() : validatePassword(),
|
||||
isSamlMode: z.boolean().optional(),
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user