mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-01-25 02:08:11 +00:00
Compare commits
20 Commits
trigger-pr
...
5.10.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a872dff507 | ||
|
|
8c5fc108c4 | ||
|
|
d9805e6308 | ||
|
|
7982bc3e18 | ||
|
|
e506eb4dce | ||
|
|
ab434bbb04 | ||
|
|
496d3dbb15 | ||
|
|
a2065bc80b | ||
|
|
3d9d30111f | ||
|
|
2edf3ba2c1 | ||
|
|
c0bd62f519 | ||
|
|
7f53ae7f62 | ||
|
|
6cd1bcaca8 | ||
|
|
3fbff8c8cd | ||
|
|
c96e8eeeb1 | ||
|
|
953d518bf3 | ||
|
|
8cdcbc7e60 | ||
|
|
e969b24652 | ||
|
|
ec54a00f1d | ||
|
|
05eba69058 |
2
.env
2
.env
@@ -133,7 +133,7 @@ SENTRY_ENVIRONMENT=local
|
||||
SENTRY_RELEASE=local
|
||||
|
||||
#### Prowler release version ####
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.7.5
|
||||
NEXT_PUBLIC_PROWLER_RELEASE_VERSION=v5.10.0
|
||||
|
||||
# Social login credentials
|
||||
SOCIAL_GOOGLE_OAUTH_CALLBACK_URL="${AUTH_URL}/api/auth/callback/google"
|
||||
|
||||
518
api/poetry.lock
generated
518
api/poetry.lock
generated
@@ -150,19 +150,19 @@ typing-extensions = {version = ">=4.2", markers = "python_version < \"3.13\""}
|
||||
|
||||
[[package]]
|
||||
name = "alive-progress"
|
||||
version = "3.2.0"
|
||||
version = "3.3.0"
|
||||
description = "A new kind of Progress Bar, with real-time throughput, ETA, and very cool animations!"
|
||||
optional = false
|
||||
python-versions = "<4,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "alive-progress-3.2.0.tar.gz", hash = "sha256:ede29d046ff454fe56b941f686f89dd9389430c4a5b7658e445cb0b80e0e4deb"},
|
||||
{file = "alive_progress-3.2.0-py3-none-any.whl", hash = "sha256:0677929f8d3202572e9d142f08170b34dbbe256cc6d2afbf75ef187c7da964a8"},
|
||||
{file = "alive-progress-3.3.0.tar.gz", hash = "sha256:457dd2428b48dacd49854022a46448d236a48f1b7277874071c39395307e830c"},
|
||||
{file = "alive_progress-3.3.0-py3-none-any.whl", hash = "sha256:63dd33bb94cde15ad9e5b666dbba8fedf71b72a4935d6fb9a92931e69402c9ff"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
about-time = "4.2.1"
|
||||
grapheme = "0.6.0"
|
||||
graphemeu = "0.7.2"
|
||||
|
||||
[[package]]
|
||||
name = "amqp"
|
||||
@@ -179,6 +179,18 @@ files = [
|
||||
[package.dependencies]
|
||||
vine = ">=5.0.0,<6.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
version = "0.7.0"
|
||||
description = "Reusable constraint types to use with typing.Annotated"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
|
||||
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyio"
|
||||
version = "4.9.0"
|
||||
@@ -495,6 +507,23 @@ azure-mgmt-core = ">=1.3.2"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-databricks"
|
||||
version = "2.0.0"
|
||||
description = "Microsoft Azure Data Bricks Management Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "azure-mgmt-databricks-2.0.0.zip", hash = "sha256:70d11362dc2d17f5fb1db0cfe65c1af55b8f136f1a0db9a5b51e7acf760cf5b9"},
|
||||
{file = "azure_mgmt_databricks-2.0.0-py3-none-any.whl", hash = "sha256:0c29434a7339e74231bd171a6c08dcdf8153abaebd332658d7f66b8ea143fa17"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-common = ">=1.1,<2.0"
|
||||
azure-mgmt-core = ">=1.3.2,<2.0.0"
|
||||
isodate = ">=0.6.1,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-keyvault"
|
||||
version = "10.3.1"
|
||||
@@ -565,6 +594,42 @@ azure-common = ">=1.1,<2.0"
|
||||
azure-mgmt-core = ">=1.3.0,<2.0.0"
|
||||
msrest = ">=0.6.21"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-recoveryservices"
|
||||
version = "3.1.0"
|
||||
description = "Microsoft Azure Recovery Services Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "azure_mgmt_recoveryservices-3.1.0-py3-none-any.whl", hash = "sha256:21c58afdf4ae66806783e95f8cd17e3bec31be7178c48784db21f0b05de7fa66"},
|
||||
{file = "azure_mgmt_recoveryservices-3.1.0.tar.gz", hash = "sha256:7f2db98401708cf145322f50bc491caf7967bec4af3bf7b0984b9f07d3092687"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-common = ">=1.1"
|
||||
azure-mgmt-core = ">=1.5.0"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-recoveryservicesbackup"
|
||||
version = "9.2.0"
|
||||
description = "Microsoft Azure Recovery Services Backup Management Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "azure_mgmt_recoveryservicesbackup-9.2.0-py3-none-any.whl", hash = "sha256:c0002858d0166b6a10189a1fd580a49c83dc31b111e98010a5b2ea0f767dfff1"},
|
||||
{file = "azure_mgmt_recoveryservicesbackup-9.2.0.tar.gz", hash = "sha256:c402b3e22a6c3879df56bc37e0063142c3352c5102599ff102d19824f1b32b29"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
azure-common = ">=1.1"
|
||||
azure-mgmt-core = ">=1.3.2"
|
||||
isodate = ">=0.6.1"
|
||||
typing-extensions = ">=4.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "azure-mgmt-resource"
|
||||
version = "23.3.0"
|
||||
@@ -759,34 +824,34 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "boto3"
|
||||
version = "1.35.99"
|
||||
version = "1.39.15"
|
||||
description = "The AWS SDK for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "boto3-1.35.99-py3-none-any.whl", hash = "sha256:83e560faaec38a956dfb3d62e05e1703ee50432b45b788c09e25107c5058bd71"},
|
||||
{file = "boto3-1.35.99.tar.gz", hash = "sha256:e0abd794a7a591d90558e92e29a9f8837d25ece8e3c120e530526fe27eba5fca"},
|
||||
{file = "boto3-1.39.15-py3-none-any.whl", hash = "sha256:38fc54576b925af0075636752de9974e172c8a2cf7133400e3e09b150d20fb6a"},
|
||||
{file = "boto3-1.39.15.tar.gz", hash = "sha256:b4483625f0d8c35045254dee46cd3c851bbc0450814f20b9b25bee1b5c0d8409"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.35.99,<1.36.0"
|
||||
botocore = ">=1.39.15,<1.40.0"
|
||||
jmespath = ">=0.7.1,<2.0.0"
|
||||
s3transfer = ">=0.10.0,<0.11.0"
|
||||
s3transfer = ">=0.13.0,<0.14.0"
|
||||
|
||||
[package.extras]
|
||||
crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
|
||||
|
||||
[[package]]
|
||||
name = "botocore"
|
||||
version = "1.35.99"
|
||||
version = "1.39.15"
|
||||
description = "Low-level, data-driven core of boto 3."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "botocore-1.35.99-py3-none-any.whl", hash = "sha256:b22d27b6b617fc2d7342090d6129000af2efd20174215948c0d7ae2da0fab445"},
|
||||
{file = "botocore-1.35.99.tar.gz", hash = "sha256:1eab44e969c39c5f3d9a3104a0836c24715579a455f12b3979a31d7cde51b3c3"},
|
||||
{file = "botocore-1.39.15-py3-none-any.whl", hash = "sha256:eb9cfe918ebfbfb8654e1b153b29f0c129d586d2c0d7fb4032731d49baf04cff"},
|
||||
{file = "botocore-1.39.15.tar.gz", hash = "sha256:2aa29a717f14f8c7ca058c2e297aaed0aa10ecea24b91514eee802814d1b7600"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -795,7 +860,7 @@ python-dateutil = ">=2.1,<3.0.0"
|
||||
urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}
|
||||
|
||||
[package.extras]
|
||||
crt = ["awscrt (==0.22.0)"]
|
||||
crt = ["awscrt (==0.23.8)"]
|
||||
|
||||
[[package]]
|
||||
name = "cachetools"
|
||||
@@ -1142,6 +1207,18 @@ files = [
|
||||
]
|
||||
markers = {dev = "platform_system == \"Windows\" or sys_platform == \"win32\""}
|
||||
|
||||
[[package]]
|
||||
name = "contextlib2"
|
||||
version = "21.6.0"
|
||||
description = "Backports and enhancements for the contextlib module"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "contextlib2-21.6.0-py2.py3-none-any.whl", hash = "sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f"},
|
||||
{file = "contextlib2-21.6.0.tar.gz", hash = "sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.5.4"
|
||||
@@ -1278,21 +1355,18 @@ test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
name = "dash"
|
||||
version = "2.18.2"
|
||||
version = "3.1.1"
|
||||
description = "A Python framework for building reactive web-apps. Developed by Plotly."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "dash-2.18.2-py3-none-any.whl", hash = "sha256:0ce0479d1bc958e934630e2de7023b8a4558f23ce1f9f5a4b34b65eb3903a869"},
|
||||
{file = "dash-2.18.2.tar.gz", hash = "sha256:20e8404f73d0fe88ce2eae33c25bbc513cbe52f30d23a401fa5f24dbb44296c8"},
|
||||
{file = "dash-3.1.1-py3-none-any.whl", hash = "sha256:66fff37e79c6aa114cd55aea13683d1e9afe0e3f96b35388baca95ff6cfdad23"},
|
||||
{file = "dash-3.1.1.tar.gz", hash = "sha256:916b31cec46da0a3339da0e9df9f446126aa7f293c0544e07adf9fe4ba060b18"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
dash-core-components = "2.0.0"
|
||||
dash-html-components = "2.0.0"
|
||||
dash-table = "5.0.0"
|
||||
Flask = ">=1.0.4,<3.1"
|
||||
Flask = ">=1.0.4,<3.2"
|
||||
importlib-metadata = "*"
|
||||
nest-asyncio = "*"
|
||||
plotly = ">=5.0.0"
|
||||
@@ -1300,11 +1374,12 @@ requests = "*"
|
||||
retrying = "*"
|
||||
setuptools = "*"
|
||||
typing-extensions = ">=4.1.1"
|
||||
Werkzeug = "<3.1"
|
||||
Werkzeug = "<3.2"
|
||||
|
||||
[package.extras]
|
||||
celery = ["celery[redis] (>=5.1.2)", "redis (>=3.5.3)"]
|
||||
ci = ["black (==22.3.0)", "dash-dangerously-set-inner-html", "dash-flow-example (==0.0.5)", "flake8 (==7.0.0)", "flaky (==3.8.1)", "flask-talisman (==1.0.0)", "jupyterlab (<4.0.0)", "mimesis (<=11.1.0)", "mock (==4.0.3)", "numpy (<=1.26.3)", "openpyxl", "orjson (==3.10.3)", "pandas (>=1.4.0)", "pyarrow", "pylint (==3.0.3)", "pytest-mock", "pytest-rerunfailures", "pytest-sugar (==0.9.6)", "pyzmq (==25.1.2)", "xlrd (>=2.0.1)"]
|
||||
async = ["flask[async]"]
|
||||
celery = ["celery[redis] (>=5.1.2,<5.4.0)", "kombu (<5.4.0)", "redis (>=3.5.3,<=5.0.4)"]
|
||||
ci = ["black (==22.3.0)", "flake8 (==7.0.0)", "flaky (==3.8.1)", "flask-talisman (==1.0.0)", "ipython (<9.0.0)", "jupyterlab (<4.0.0)", "mimesis (<=11.1.0)", "mock (==4.0.3)", "mypy (==1.15.0) ; python_version >= \"3.12\"", "numpy (<=1.26.3)", "openpyxl", "orjson (==3.10.3)", "pandas (>=1.4.0)", "pyarrow", "pylint (==3.0.3)", "pyright (==1.1.398) ; python_version >= \"3.7\"", "pytest-mock", "pytest-rerunfailures", "pytest-sugar (==0.9.6)", "pyzmq (==25.1.2)", "xlrd (>=2.0.1)"]
|
||||
compress = ["flask-compress"]
|
||||
dev = ["PyYAML (>=5.4.1)", "coloredlogs (>=15.0.1)", "fire (>=0.4.0)"]
|
||||
diskcache = ["diskcache (>=5.2.1)", "multiprocess (>=0.70.12)", "psutil (>=5.8.0)"]
|
||||
@@ -1312,57 +1387,21 @@ testing = ["beautifulsoup4 (>=4.8.2)", "cryptography", "dash-testing-stub (>=0.0
|
||||
|
||||
[[package]]
|
||||
name = "dash-bootstrap-components"
|
||||
version = "1.6.0"
|
||||
version = "2.0.3"
|
||||
description = "Bootstrap themed components for use in Plotly Dash"
|
||||
optional = false
|
||||
python-versions = "<4,>=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "dash_bootstrap_components-1.6.0-py3-none-any.whl", hash = "sha256:97f0f47b38363f18863e1b247462229266ce12e1e171cfb34d3c9898e6e5cd1e"},
|
||||
{file = "dash_bootstrap_components-1.6.0.tar.gz", hash = "sha256:960a1ec9397574792f49a8241024fa3cecde0f5930c971a3fc81f016cbeb1095"},
|
||||
{file = "dash_bootstrap_components-2.0.3-py3-none-any.whl", hash = "sha256:82754d3d001ad5482b8a82b496c7bf98a1c68d2669d607a89dda7ec627304af5"},
|
||||
{file = "dash_bootstrap_components-2.0.3.tar.gz", hash = "sha256:5c161b04a6e7ed19a7d54e42f070c29fd6c385d5a7797e7a82999aa2fc15b1de"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
dash = ">=2.0.0"
|
||||
dash = ">=3.0.4"
|
||||
|
||||
[package.extras]
|
||||
pandas = ["numpy", "pandas"]
|
||||
|
||||
[[package]]
|
||||
name = "dash-core-components"
|
||||
version = "2.0.0"
|
||||
description = "Core component suite for Dash"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "dash_core_components-2.0.0-py3-none-any.whl", hash = "sha256:52b8e8cce13b18d0802ee3acbc5e888cb1248a04968f962d63d070400af2e346"},
|
||||
{file = "dash_core_components-2.0.0.tar.gz", hash = "sha256:c6733874af975e552f95a1398a16c2ee7df14ce43fa60bb3718a3c6e0b63ffee"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dash-html-components"
|
||||
version = "2.0.0"
|
||||
description = "Vanilla HTML components for Dash"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "dash_html_components-2.0.0-py3-none-any.whl", hash = "sha256:b42cc903713c9706af03b3f2548bda4be7307a7cf89b7d6eae3da872717d1b63"},
|
||||
{file = "dash_html_components-2.0.0.tar.gz", hash = "sha256:8703a601080f02619a6390998e0b3da4a5daabe97a1fd7a9cebc09d015f26e50"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dash-table"
|
||||
version = "5.0.0"
|
||||
description = "Dash table"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "dash_table-5.0.0-py3-none-any.whl", hash = "sha256:19036fa352bb1c11baf38068ec62d172f0515f73ca3276c79dee49b95ddc16c9"},
|
||||
{file = "dash_table-5.0.0.tar.gz", hash = "sha256:18624d693d4c8ef2ddec99a6f167593437a7ea0bf153aa20f318c170c5bc7308"},
|
||||
]
|
||||
pandas = ["numpy (>=2.0.2)", "pandas (>=2.2.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "debugpy"
|
||||
@@ -1889,6 +1928,54 @@ djangorestframework-jsonapi = ">=6.0.0"
|
||||
drf-extensions = ">=0.7.1"
|
||||
drf-spectacular = ">=0.25.0"
|
||||
|
||||
[[package]]
|
||||
name = "dulwich"
|
||||
version = "0.23.0"
|
||||
description = "Python Git Library"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "dulwich-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c13b0d5a9009cde23ecb8cb201df6e23e2a7a82c5e2d6ba6443fbb322c9befc6"},
|
||||
{file = "dulwich-0.23.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:a68faf8612bf93de1285048d6ad13160f0fb3c5596a86e694e78f4e212886fa5"},
|
||||
{file = "dulwich-0.23.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:d971566826f16ec67c70641c1fbdb337323aa5b533799bc5a4641f4750e73b36"},
|
||||
{file = "dulwich-0.23.0-cp310-cp310-win32.whl", hash = "sha256:27d970adf539806dfc4fe3e4c9e8dc6ebf0318977a56e24d22f13413535a51ba"},
|
||||
{file = "dulwich-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:025178533e884ffdb0d9d8db4b8870745d438cbfecb782fd1b56c3b6438e86cf"},
|
||||
{file = "dulwich-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d68498fdda13ab00791b483daab3bcfe9f9721c037aa458695e6ad81640c57cc"},
|
||||
{file = "dulwich-0.23.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:cb7bb930b12471a1cfcea4b3d25a671dc0ad32573f0ad25684684298959a1527"},
|
||||
{file = "dulwich-0.23.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a2abbce32fd2bc7902bcc5f69b10bf22576810de21651baaa864b78fd7aec261"},
|
||||
{file = "dulwich-0.23.0-cp311-cp311-win32.whl", hash = "sha256:9e3151f10ce2a9ff91bca64c74345217f53bdd947dc958032343822009832f7a"},
|
||||
{file = "dulwich-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:3ae9f1d9dc92d4e9a3f89ba2c55221f7b6442c5dd93b3f6f539a3c9eb3f37bdd"},
|
||||
{file = "dulwich-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52cdef66a7994d29528ca79ca59452518bbba3fd56a9c61c61f6c467c1c7956e"},
|
||||
{file = "dulwich-0.23.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d473888a6ab9ed5d4a4c3f053cbe5b77f72d54b6efdf5688fed76094316e571e"},
|
||||
{file = "dulwich-0.23.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:19fcf20224c641a61c774da92f098fbaae9938c7e17a52841e64092adf7e78f9"},
|
||||
{file = "dulwich-0.23.0-cp312-cp312-win32.whl", hash = "sha256:7fc8b76b704ef35cd001e993e3aa4e1d666a2064bf467c07c560f12b2959dcaf"},
|
||||
{file = "dulwich-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:cb0566b888b578325350b4d67c61a0de35d417e9877560e3a6df88cae4576a59"},
|
||||
{file = "dulwich-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:624e2223c8b705b3a217f9c8d3bfed3a573093be0b0ba033c46cba8411fb9630"},
|
||||
{file = "dulwich-0.23.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:b4eaf326d15bb3fc5316c777b0312f0fe02f6f82a4368cd971d0ce2167b7ec34"},
|
||||
{file = "dulwich-0.23.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:d754afaf7c133a015c75cc2be11703138b4be932e0eeeb2c70add56083f31109"},
|
||||
{file = "dulwich-0.23.0-cp313-cp313-win32.whl", hash = "sha256:ac53ec438bde3c1f479782c34240479b36cd47230d091979137b7ecc12c0242e"},
|
||||
{file = "dulwich-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:50d3b4ba45671fb8b7d2afbd02c10b4edbc3290a1f92260e64098b409e9ca35c"},
|
||||
{file = "dulwich-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8e18ea3fa49f10932077f39c0b960b5045870c550c3d7c74f3cfaac09457cd6"},
|
||||
{file = "dulwich-0.23.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3e6df0eb8cca21f210e3ddce2ccb64482646893dbec2fee9f3411d037595bf7b"},
|
||||
{file = "dulwich-0.23.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:90c0064d7df8e7fe83d3a03c7d60b9e07a92698b18442f926199b2c3f0bf34d4"},
|
||||
{file = "dulwich-0.23.0-cp39-cp39-win32.whl", hash = "sha256:84eef513aba501cbc1f223863f3b4b351fe732d3fb590cab9bdf5d33eb1a1248"},
|
||||
{file = "dulwich-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:dce943da48217c26e15790fd6df62d27a7f1d067102780351ebf2635fc0ba482"},
|
||||
{file = "dulwich-0.23.0-py3-none-any.whl", hash = "sha256:d8da6694ca332bb48775e35ee2215aa4673821164a91b83062f699c69f7cd135"},
|
||||
{file = "dulwich-0.23.0.tar.gz", hash = "sha256:0aa6c2489dd5e978b27e9b75983b7331a66c999f0efc54ebe37cab808ed322ae"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
urllib3 = ">=1.25"
|
||||
|
||||
[package.extras]
|
||||
dev = ["dissolve (>=0.1.1)", "mypy (==1.16.0)", "ruff (==0.11.13)"]
|
||||
fastimport = ["fastimport"]
|
||||
https = ["urllib3 (>=1.24.1)"]
|
||||
merge = ["merge3"]
|
||||
paramiko = ["paramiko"]
|
||||
pgp = ["gpg"]
|
||||
|
||||
[[package]]
|
||||
name = "durationpy"
|
||||
version = "0.9"
|
||||
@@ -2219,18 +2306,20 @@ files = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "grapheme"
|
||||
version = "0.6.0"
|
||||
name = "graphemeu"
|
||||
version = "0.7.2"
|
||||
description = "Unicode grapheme helpers"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "grapheme-0.6.0.tar.gz", hash = "sha256:44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca"},
|
||||
{file = "graphemeu-0.7.2-py3-none-any.whl", hash = "sha256:1444520f6899fd30114fc2a39f297d86d10fa0f23bf7579f772f8bc7efaa2542"},
|
||||
{file = "graphemeu-0.7.2.tar.gz", hash = "sha256:42bbe373d7c146160f286cd5f76b1a8ad29172d7333ce10705c5cc282462a4f8"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest", "sphinx", "sphinx-autobuild", "twine", "wheel"]
|
||||
dev = ["pytest"]
|
||||
docs = ["sphinx", "sphinx-autobuild"]
|
||||
|
||||
[[package]]
|
||||
name = "gunicorn"
|
||||
@@ -2369,6 +2458,18 @@ files = [
|
||||
{file = "hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iamdata"
|
||||
version = "0.1.202507291"
|
||||
description = "IAM data for AWS actions, resources, and conditions based on IAM policy documents. Checked for updates daily."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "iamdata-0.1.202507291-py3-none-any.whl", hash = "sha256:11dfdacc3ce0312468aa5ccafee461cd39b1deb7be112042deea91cbcd4b292b"},
|
||||
{file = "iamdata-0.1.202507291.tar.gz", hash = "sha256:b386ce94819464554dc1258238ee1b232d86f0467edc13fffbf4de7332b3c7ad"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
@@ -3885,7 +3986,7 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "prowler"
|
||||
version = "5.8.0"
|
||||
version = "5.10.0"
|
||||
description = "Prowler is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks."
|
||||
optional = false
|
||||
python-versions = ">3.9.1,<3.13"
|
||||
@@ -3894,7 +3995,7 @@ files = []
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
alive-progress = "3.2.0"
|
||||
alive-progress = "3.3.0"
|
||||
awsipranges = "0.3.3"
|
||||
azure-identity = "1.21.0"
|
||||
azure-keyvault-keys = "4.10.0"
|
||||
@@ -3904,10 +4005,13 @@ azure-mgmt-compute = "34.0.0"
|
||||
azure-mgmt-containerregistry = "12.0.0"
|
||||
azure-mgmt-containerservice = "34.1.0"
|
||||
azure-mgmt-cosmosdb = "9.7.0"
|
||||
azure-mgmt-databricks = "2.0.0"
|
||||
azure-mgmt-keyvault = "10.3.1"
|
||||
azure-mgmt-monitor = "6.0.2"
|
||||
azure-mgmt-network = "28.1.0"
|
||||
azure-mgmt-rdbms = "10.1.0"
|
||||
azure-mgmt-recoveryservices = "3.1.0"
|
||||
azure-mgmt-recoveryservicesbackup = "9.2.0"
|
||||
azure-mgmt-resource = "23.3.0"
|
||||
azure-mgmt-search = "9.1.0"
|
||||
azure-mgmt-security = "7.0.0"
|
||||
@@ -3916,13 +4020,14 @@ azure-mgmt-storage = "22.1.1"
|
||||
azure-mgmt-subscription = "3.1.1"
|
||||
azure-mgmt-web = "8.0.0"
|
||||
azure-storage-blob = "12.24.1"
|
||||
boto3 = "1.35.99"
|
||||
botocore = "1.35.99"
|
||||
boto3 = "1.39.15"
|
||||
botocore = "1.39.15"
|
||||
colorama = "0.4.6"
|
||||
cryptography = "44.0.1"
|
||||
dash = "2.18.2"
|
||||
dash-bootstrap-components = "1.6.0"
|
||||
dash = "3.1.1"
|
||||
dash-bootstrap-components = "2.0.3"
|
||||
detect-secrets = "1.5.0"
|
||||
dulwich = "0.23.0"
|
||||
google-api-python-client = "2.163.0"
|
||||
google-auth-httplib2 = ">=0.1,<0.3"
|
||||
jsonschema = "4.23.0"
|
||||
@@ -3931,12 +4036,13 @@ microsoft-kiota-abstractions = "1.9.2"
|
||||
msgraph-sdk = "1.23.0"
|
||||
numpy = "2.0.2"
|
||||
pandas = "2.2.3"
|
||||
py-ocsf-models = "0.3.1"
|
||||
pydantic = "1.10.21"
|
||||
py-iam-expand = "0.1.0"
|
||||
py-ocsf-models = "0.5.0"
|
||||
pydantic = ">=2.0,<3.0"
|
||||
pygithub = "2.5.0"
|
||||
python-dateutil = ">=2.9.0.post0,<3.0.0"
|
||||
pytz = "2025.1"
|
||||
schema = "0.7.7"
|
||||
schema = "0.7.5"
|
||||
shodan = "1.31.0"
|
||||
slack-sdk = "3.34.0"
|
||||
tabulate = "0.9.0"
|
||||
@@ -3945,8 +4051,8 @@ tzlocal = "5.3.1"
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/prowler-cloud/prowler.git"
|
||||
reference = "master"
|
||||
resolved_reference = "ea97de7f43a2063476b49f7697bb6c7b51137c11"
|
||||
reference = "v5.10"
|
||||
resolved_reference = "ff900a2a455def25eb7f5a7d25248e58eae24a34"
|
||||
|
||||
[[package]]
|
||||
name = "psutil"
|
||||
@@ -4061,21 +4167,36 @@ files = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "py-ocsf-models"
|
||||
version = "0.3.1"
|
||||
description = "This is a Python implementation of the OCSF models. The models are used to represent the data of the OCSF Schema defined in https://schema.ocsf.io/."
|
||||
name = "py-iam-expand"
|
||||
version = "0.1.0"
|
||||
description = "This is a Python package to expand and deobfuscate IAM policies."
|
||||
optional = false
|
||||
python-versions = "<3.13,>3.9.1"
|
||||
python-versions = "<3.14,>3.9.1"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "py_ocsf_models-0.3.1-py3-none-any.whl", hash = "sha256:e722d567a7f3e5190fdd053c2e75a69cf33fab6f5c0a4b7de678768ba340ae3a"},
|
||||
{file = "py_ocsf_models-0.3.1.tar.gz", hash = "sha256:60defd2cc86e8882f42dc9c6dacca6dc16d6bc05f9477c2a3486a0d4b5882b94"},
|
||||
{file = "py_iam_expand-0.1.0-py3-none-any.whl", hash = "sha256:b845ce7b50ac895b02b4f338e09c62a68ea51849794f76e189b02009bd388510"},
|
||||
{file = "py_iam_expand-0.1.0.tar.gz", hash = "sha256:5a2884dc267ac59a02c3a80fefc0b34c309dac681baa0f87c436067c6cf53a96"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
iamdata = ">=0.1.202504091"
|
||||
|
||||
[[package]]
|
||||
name = "py-ocsf-models"
|
||||
version = "0.5.0"
|
||||
description = "This is a Python implementation of the OCSF models. The models are used to represent the data of the OCSF Schema defined in https://schema.ocsf.io/."
|
||||
optional = false
|
||||
python-versions = "<3.14,>3.9.1"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "py_ocsf_models-0.5.0-py3-none-any.whl", hash = "sha256:7933253f56782c04c412d976796db429577810b951fe4195351794500b5962d8"},
|
||||
{file = "py_ocsf_models-0.5.0.tar.gz", hash = "sha256:bf05e955809d1ec3ab1007e4a4b2a8a0afa74b6e744ea8ffbf386e46b3af0a76"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = "44.0.1"
|
||||
email-validator = "2.2.0"
|
||||
pydantic = "1.10.21"
|
||||
pydantic = ">=2.9.2,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "pyasn1"
|
||||
@@ -4173,70 +4294,137 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "1.10.21"
|
||||
description = "Data validation and settings management using python type hints"
|
||||
version = "2.11.7"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "pydantic-1.10.21-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:245e486e0fec53ec2366df9cf1cba36e0bbf066af7cd9c974bbbd9ba10e1e586"},
|
||||
{file = "pydantic-1.10.21-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c54f8d4c151c1de784c5b93dfbb872067e3414619e10e21e695f7bb84d1d1fd"},
|
||||
{file = "pydantic-1.10.21-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b64708009cfabd9c2211295144ff455ec7ceb4c4fb45a07a804309598f36187"},
|
||||
{file = "pydantic-1.10.21-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a148410fa0e971ba333358d11a6dea7b48e063de127c2b09ece9d1c1137dde4"},
|
||||
{file = "pydantic-1.10.21-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:36ceadef055af06e7756eb4b871cdc9e5a27bdc06a45c820cd94b443de019bbf"},
|
||||
{file = "pydantic-1.10.21-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0501e1d12df6ab1211b8cad52d2f7b2cd81f8e8e776d39aa5e71e2998d0379f"},
|
||||
{file = "pydantic-1.10.21-cp310-cp310-win_amd64.whl", hash = "sha256:c261127c275d7bce50b26b26c7d8427dcb5c4803e840e913f8d9df3f99dca55f"},
|
||||
{file = "pydantic-1.10.21-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8b6350b68566bb6b164fb06a3772e878887f3c857c46c0c534788081cb48adf4"},
|
||||
{file = "pydantic-1.10.21-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:935b19fdcde236f4fbf691959fa5c3e2b6951fff132964e869e57c70f2ad1ba3"},
|
||||
{file = "pydantic-1.10.21-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b6a04efdcd25486b27f24c1648d5adc1633ad8b4506d0e96e5367f075ed2e0b"},
|
||||
{file = "pydantic-1.10.21-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1ba253eb5af8d89864073e6ce8e6c8dec5f49920cff61f38f5c3383e38b1c9f"},
|
||||
{file = "pydantic-1.10.21-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:57f0101e6c97b411f287a0b7cf5ebc4e5d3b18254bf926f45a11615d29475793"},
|
||||
{file = "pydantic-1.10.21-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:90e85834f0370d737c77a386ce505c21b06bfe7086c1c568b70e15a568d9670d"},
|
||||
{file = "pydantic-1.10.21-cp311-cp311-win_amd64.whl", hash = "sha256:6a497bc66b3374b7d105763d1d3de76d949287bf28969bff4656206ab8a53aa9"},
|
||||
{file = "pydantic-1.10.21-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2ed4a5f13cf160d64aa331ab9017af81f3481cd9fd0e49f1d707b57fe1b9f3ae"},
|
||||
{file = "pydantic-1.10.21-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b7693bb6ed3fbe250e222f9415abb73111bb09b73ab90d2d4d53f6390e0ccc1"},
|
||||
{file = "pydantic-1.10.21-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185d5f1dff1fead51766da9b2de4f3dc3b8fca39e59383c273f34a6ae254e3e2"},
|
||||
{file = "pydantic-1.10.21-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38e6d35cf7cd1727822c79e324fa0677e1a08c88a34f56695101f5ad4d5e20e5"},
|
||||
{file = "pydantic-1.10.21-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:1d7c332685eafacb64a1a7645b409a166eb7537f23142d26895746f628a3149b"},
|
||||
{file = "pydantic-1.10.21-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c9b782db6f993a36092480eeaab8ba0609f786041b01f39c7c52252bda6d85f"},
|
||||
{file = "pydantic-1.10.21-cp312-cp312-win_amd64.whl", hash = "sha256:7ce64d23d4e71d9698492479505674c5c5b92cda02b07c91dfc13633b2eef805"},
|
||||
{file = "pydantic-1.10.21-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0067935d35044950be781933ab91b9a708eaff124bf860fa2f70aeb1c4be7212"},
|
||||
{file = "pydantic-1.10.21-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5e8148c2ce4894ce7e5a4925d9d3fdce429fb0e821b5a8783573f3611933a251"},
|
||||
{file = "pydantic-1.10.21-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4973232c98b9b44c78b1233693e5e1938add5af18042f031737e1214455f9b8"},
|
||||
{file = "pydantic-1.10.21-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:662bf5ce3c9b1cef32a32a2f4debe00d2f4839fefbebe1d6956e681122a9c839"},
|
||||
{file = "pydantic-1.10.21-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:98737c3ab5a2f8a85f2326eebcd214510f898881a290a7939a45ec294743c875"},
|
||||
{file = "pydantic-1.10.21-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0bb58bbe65a43483d49f66b6c8474424d551a3fbe8a7796c42da314bac712738"},
|
||||
{file = "pydantic-1.10.21-cp313-cp313-win_amd64.whl", hash = "sha256:e622314542fb48542c09c7bd1ac51d71c5632dd3c92dc82ede6da233f55f4848"},
|
||||
{file = "pydantic-1.10.21-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d356aa5b18ef5a24d8081f5c5beb67c0a2a6ff2a953ee38d65a2aa96526b274f"},
|
||||
{file = "pydantic-1.10.21-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08caa8c0468172d27c669abfe9e7d96a8b1655ec0833753e117061febaaadef5"},
|
||||
{file = "pydantic-1.10.21-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c677aa39ec737fec932feb68e4a2abe142682f2885558402602cd9746a1c92e8"},
|
||||
{file = "pydantic-1.10.21-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:79577cc045d3442c4e845df53df9f9202546e2ba54954c057d253fc17cd16cb1"},
|
||||
{file = "pydantic-1.10.21-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:b6b73ab347284719f818acb14f7cd80696c6fdf1bd34feee1955d7a72d2e64ce"},
|
||||
{file = "pydantic-1.10.21-cp37-cp37m-win_amd64.whl", hash = "sha256:46cffa24891b06269e12f7e1ec50b73f0c9ab4ce71c2caa4ccf1fb36845e1ff7"},
|
||||
{file = "pydantic-1.10.21-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:298d6f765e3c9825dfa78f24c1efd29af91c3ab1b763e1fd26ae4d9e1749e5c8"},
|
||||
{file = "pydantic-1.10.21-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2f4a2305f15eff68f874766d982114ac89468f1c2c0b97640e719cf1a078374"},
|
||||
{file = "pydantic-1.10.21-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35b263b60c519354afb3a60107d20470dd5250b3ce54c08753f6975c406d949b"},
|
||||
{file = "pydantic-1.10.21-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e23a97a6c2f2db88995496db9387cd1727acdacc85835ba8619dce826c0b11a6"},
|
||||
{file = "pydantic-1.10.21-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:3c96fed246ccc1acb2df032ff642459e4ae18b315ecbab4d95c95cfa292e8517"},
|
||||
{file = "pydantic-1.10.21-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b92893ebefc0151474f682e7debb6ab38552ce56a90e39a8834734c81f37c8a9"},
|
||||
{file = "pydantic-1.10.21-cp38-cp38-win_amd64.whl", hash = "sha256:b8460bc256bf0de821839aea6794bb38a4c0fbd48f949ea51093f6edce0be459"},
|
||||
{file = "pydantic-1.10.21-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5d387940f0f1a0adb3c44481aa379122d06df8486cc8f652a7b3b0caf08435f7"},
|
||||
{file = "pydantic-1.10.21-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:266ecfc384861d7b0b9c214788ddff75a2ea123aa756bcca6b2a1175edeca0fe"},
|
||||
{file = "pydantic-1.10.21-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61da798c05a06a362a2f8c5e3ff0341743e2818d0f530eaac0d6898f1b187f1f"},
|
||||
{file = "pydantic-1.10.21-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a621742da75ce272d64ea57bd7651ee2a115fa67c0f11d66d9dcfc18c2f1b106"},
|
||||
{file = "pydantic-1.10.21-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9e3e4000cd54ef455694b8be9111ea20f66a686fc155feda1ecacf2322b115da"},
|
||||
{file = "pydantic-1.10.21-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f198c8206640f4c0ef5a76b779241efb1380a300d88b1bce9bfe95a6362e674d"},
|
||||
{file = "pydantic-1.10.21-cp39-cp39-win_amd64.whl", hash = "sha256:e7f0cda108b36a30c8fc882e4fc5b7eec8ef584aa43aa43694c6a7b274fb2b56"},
|
||||
{file = "pydantic-1.10.21-py3-none-any.whl", hash = "sha256:db70c920cba9d05c69ad4a9e7f8e9e83011abb2c6490e561de9ae24aee44925c"},
|
||||
{file = "pydantic-1.10.21.tar.gz", hash = "sha256:64b48e2b609a6c22178a56c408ee1215a7206077ecb8a193e2fda31858b2362a"},
|
||||
{file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"},
|
||||
{file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = ">=4.2.0"
|
||||
annotated-types = ">=0.6.0"
|
||||
pydantic-core = "2.33.2"
|
||||
typing-extensions = ">=4.12.2"
|
||||
typing-inspection = ">=0.4.0"
|
||||
|
||||
[package.extras]
|
||||
dotenv = ["python-dotenv (>=0.10.4)"]
|
||||
email = ["email-validator (>=1.0.3)"]
|
||||
email = ["email-validator (>=2.0.0)"]
|
||||
timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.33.2"
|
||||
description = "Core functionality for Pydantic validation and serialization"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"},
|
||||
{file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pygithub"
|
||||
@@ -5066,21 +5254,21 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "s3transfer"
|
||||
version = "0.10.4"
|
||||
version = "0.13.1"
|
||||
description = "An Amazon S3 Transfer Manager"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e"},
|
||||
{file = "s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7"},
|
||||
{file = "s3transfer-0.13.1-py3-none-any.whl", hash = "sha256:a981aa7429be23fe6dfc13e80e4020057cbab622b08c0315288758d67cabc724"},
|
||||
{file = "s3transfer-0.13.1.tar.gz", hash = "sha256:c3fdba22ba1bd367922f27ec8032d6a1cf5f10c934fb5d68cf60fd5a23d936cf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.33.2,<2.0a.0"
|
||||
botocore = ">=1.37.4,<2.0a.0"
|
||||
|
||||
[package.extras]
|
||||
crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"]
|
||||
crt = ["botocore[crt] (>=1.37.4,<2.0a.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "safety"
|
||||
@@ -5139,16 +5327,19 @@ typing-extensions = ">=4.7.1"
|
||||
|
||||
[[package]]
|
||||
name = "schema"
|
||||
version = "0.7.7"
|
||||
version = "0.7.5"
|
||||
description = "Simple data validation library"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "schema-0.7.7-py2.py3-none-any.whl", hash = "sha256:5d976a5b50f36e74e2157b47097b60002bd4d42e65425fcc9c9befadb4255dde"},
|
||||
{file = "schema-0.7.7.tar.gz", hash = "sha256:7da553abd2958a19dc2547c388cde53398b39196175a9be59ea1caf5ab0a1807"},
|
||||
{file = "schema-0.7.5-py2.py3-none-any.whl", hash = "sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c"},
|
||||
{file = "schema-0.7.5.tar.gz", hash = "sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
contextlib2 = ">=0.5.5"
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "2.26.1"
|
||||
@@ -5458,6 +5649,21 @@ files = [
|
||||
{file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-inspection"
|
||||
version = "0.4.1"
|
||||
description = "Runtime typing introspection tools"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"},
|
||||
{file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = ">=4.12.0"
|
||||
|
||||
[[package]]
|
||||
name = "tzdata"
|
||||
version = "2025.2"
|
||||
@@ -5916,4 +6122,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.11,<3.13"
|
||||
content-hash = "6802b33984c2f8438c9dc02dac0a0c14d5a78af60251bd0c80ca59bc2182c48e"
|
||||
content-hash = "7aa50d0e8afd3dfa080541d0bfd7ea960720a9848d1e6f801bc082528f43c56b"
|
||||
|
||||
@@ -24,7 +24,7 @@ dependencies = [
|
||||
"drf-spectacular-jsonapi==0.5.1",
|
||||
"gunicorn==23.0.0",
|
||||
"lxml==5.3.2",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@master",
|
||||
"prowler @ git+https://github.com/prowler-cloud/prowler.git@v5.10",
|
||||
"psycopg2-binary==2.9.9",
|
||||
"pytest-celery[redis] (>=1.0.1,<2.0.0)",
|
||||
"sentry-sdk[django] (>=2.20.0,<3.0.0)",
|
||||
@@ -38,7 +38,7 @@ name = "prowler-api"
|
||||
package-mode = false
|
||||
# Needed for the SDK compatibility
|
||||
requires-python = ">=3.11,<3.13"
|
||||
version = "1.11.0"
|
||||
version = "1.11.2"
|
||||
|
||||
[project.scripts]
|
||||
celery = "src.backend.config.settings.celery"
|
||||
|
||||
@@ -293,7 +293,7 @@ class SchemaView(SpectacularAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
spectacular_settings.TITLE = "Prowler API"
|
||||
spectacular_settings.VERSION = "1.11.0"
|
||||
spectacular_settings.VERSION = "1.11.2"
|
||||
spectacular_settings.DESCRIPTION = (
|
||||
"Prowler API specification.\n\nThis file is auto-generated."
|
||||
)
|
||||
|
||||
@@ -24,7 +24,56 @@ Personal Access Tokens provide the simplest GitHub authentication method and sup
|
||||
- Scroll down the left sidebar
|
||||
- Click "Developer settings"
|
||||
|
||||
3. **Generate New Token**
|
||||
3. **Generate Fine-Grained Token**
|
||||
- Click "Personal access tokens"
|
||||
- Select "Fine-grained tokens"
|
||||
- Click "Generate new token"
|
||||
|
||||
4. **Configure Token Settings**
|
||||
- **Token name**: Give your token a descriptive name (e.g., "Prowler Security Scanner")
|
||||
- **Expiration**: Set an appropriate expiration date (recommended: 90 days or less)
|
||||
- **Repository access**: Choose "All repositories" or "Only select repositories" based on your needs
|
||||
|
||||
???+ note "Public repositories"
|
||||
Even if you select 'Only select repositories', the token will have access to the public repositories that you own or are a member of.
|
||||
|
||||
5. **Configure Token Permissions**
|
||||
To enable Prowler functionality, configure the following permissions:
|
||||
|
||||
- **Repository permissions:**
|
||||
- **Contents**: Read-only access
|
||||
- **Metadata**: Read-only access
|
||||
- **Pull requests**: Read-only access
|
||||
- **Security advisories**: Read-only access
|
||||
- **Statuses**: Read-only access
|
||||
|
||||
- **Organization permissions:**
|
||||
- **Members**: Read-only access
|
||||
|
||||
- **Account permissions:**
|
||||
- **Email addresses**: Read-only access
|
||||
|
||||
6. **Copy and Store the Token**
|
||||
- Copy the generated token immediately (GitHub displays tokens only once)
|
||||
- Store tokens securely using environment variables
|
||||
|
||||

|
||||
|
||||
#### **Option 2: Create a Classic Personal Access Token (Not Recommended)**
|
||||
|
||||
???+ warning "Security Risk"
|
||||
Classic tokens provide broad permissions that may exceed what Prowler actually needs. Use fine-grained tokens instead for better security.
|
||||
|
||||
1. **Navigate to GitHub Settings**
|
||||
- Open [GitHub](https://github.com) and sign in
|
||||
- Click the profile picture in the top right corner
|
||||
- Select "Settings" from the dropdown menu
|
||||
|
||||
2. **Access Developer Settings**
|
||||
- Scroll down the left sidebar
|
||||
- Click "Developer settings"
|
||||
|
||||
3. **Generate Classic Token**
|
||||
- Click "Personal access tokens"
|
||||
- Select "Tokens (classic)"
|
||||
- Click "Generate new token"
|
||||
|
||||
BIN
docs/tutorials/github/img/github-pat-permissions.png
Normal file
BIN
docs/tutorials/github/img/github-pat-permissions.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 89 KiB |
@@ -2,6 +2,27 @@
|
||||
|
||||
All notable changes to the **Prowler SDK** are documented in this file.
|
||||
|
||||
## [v5.10.2] (Prowler 5.10.2)
|
||||
|
||||
### Fixed
|
||||
- Order requirements by ID in Prowler ThreatScore AWS compliance framework [(#8495)](https://github.com/prowler-cloud/prowler/pull/8495)
|
||||
- Add explicit resource name to GCP and Azure Defender checks [(#8352)](https://github.com/prowler-cloud/prowler/pull/8352)
|
||||
- Validation errors in Azure and M365 providers [(#8353)](https://github.com/prowler-cloud/prowler/pull/8353)
|
||||
- Azure `app_http_logs_enabled` check false positives [(#8507)](https://github.com/prowler-cloud/prowler/pull/8507)
|
||||
- Azure `storage_geo_redundant_enabled` check false positives [(#8504)](https://github.com/prowler-cloud/prowler/pull/8504)
|
||||
- AWS `kafka_cluster_is_public` check false positives [(#8514)](https://github.com/prowler-cloud/prowler/pull/8514)
|
||||
- List all accessible repositories in GitHub [(#8522)](https://github.com/prowler-cloud/prowler/pull/8522)
|
||||
- GitHub CIS 1.0 Compliance Reports [(#8519)](https://github.com/prowler-cloud/prowler/pull/8519)
|
||||
|
||||
---
|
||||
|
||||
## [v5.10.1] (Prowler v5.10.1)
|
||||
|
||||
### Fixed
|
||||
- Remove invalid requirements from CIS 1.0 for GitHub provider [(#8472)](https://github.com/prowler-cloud/prowler/pull/8472)
|
||||
|
||||
---
|
||||
|
||||
## [v5.10.0] (Prowler v5.10.0)
|
||||
|
||||
### Added
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -12,7 +12,7 @@ from prowler.lib.logger import logger
|
||||
|
||||
timestamp = datetime.today()
|
||||
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||
prowler_version = "5.10.0"
|
||||
prowler_version = "5.10.2"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
|
||||
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
|
||||
|
||||
@@ -550,9 +550,7 @@ class Check_Report_GCP(Check_Report):
|
||||
or ""
|
||||
)
|
||||
self.resource_name = (
|
||||
resource_name
|
||||
or getattr(resource, "name", "")
|
||||
or getattr(resource, "id", "")
|
||||
resource_name or getattr(resource, "name", "") or "GCP Project"
|
||||
)
|
||||
self.project_id = project_id or getattr(resource, "project_id", "")
|
||||
self.location = (
|
||||
|
||||
@@ -10,13 +10,13 @@ class kafka_cluster_is_public(Check):
|
||||
report = Check_Report_AWS(metadata=self.metadata(), resource=cluster)
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"Kafka cluster '{cluster.name}' is publicly accessible."
|
||||
f"Kafka cluster {cluster.name} is publicly accessible."
|
||||
)
|
||||
|
||||
if cluster.public_access:
|
||||
if not cluster.public_access:
|
||||
report.status = "PASS"
|
||||
report.status_extended = (
|
||||
f"Kafka cluster '{cluster.name}' is not publicly accessible."
|
||||
f"Kafka cluster {cluster.name} is not publicly accessible."
|
||||
)
|
||||
|
||||
findings.append(report)
|
||||
|
||||
@@ -22,6 +22,10 @@ class app_http_logs_enabled(Check):
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"App {app.name} has HTTP Logs enabled in diagnostic setting {diagnostic_setting.name} in subscription {subscription_name}"
|
||||
break
|
||||
elif log.category_group == "allLogs" and log.enabled:
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"App {app.name} has allLogs category group which includes HTTP Logs enabled in diagnostic setting {diagnostic_setting.name} in subscription {subscription_name}"
|
||||
break
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@@ -14,6 +14,11 @@ class defender_additional_email_configured_with_a_security_contact(Check):
|
||||
report = Check_Report_Azure(
|
||||
metadata=self.metadata(), resource=contact_configuration
|
||||
)
|
||||
report.resource_name = (
|
||||
contact_configuration.name
|
||||
if contact_configuration.name
|
||||
else "Security Contact"
|
||||
)
|
||||
report.subscription = subscription_name
|
||||
|
||||
if len(contact_configuration.emails) > 0:
|
||||
|
||||
@@ -31,6 +31,11 @@ class defender_attack_path_notifications_properly_configured(Check):
|
||||
report = Check_Report_Azure(
|
||||
metadata=self.metadata(), resource=contact_configuration
|
||||
)
|
||||
report.resource_name = (
|
||||
contact_configuration.name
|
||||
if contact_configuration.name
|
||||
else "Security Contact"
|
||||
)
|
||||
report.subscription = subscription_name
|
||||
actual_risk_level = getattr(
|
||||
contact_configuration, "attack_path_minimal_risk_level", None
|
||||
|
||||
@@ -14,6 +14,11 @@ class defender_ensure_notify_alerts_severity_is_high(Check):
|
||||
report = Check_Report_Azure(
|
||||
metadata=self.metadata(), resource=contact_configuration
|
||||
)
|
||||
report.resource_name = (
|
||||
contact_configuration.name
|
||||
if contact_configuration.name
|
||||
else "Security Contact"
|
||||
)
|
||||
report.subscription = subscription_name
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Notifications are not enabled for alerts with a minimum severity of high or lower in subscription {subscription_name}."
|
||||
|
||||
@@ -12,7 +12,13 @@ class defender_ensure_notify_emails_to_owners(Check):
|
||||
) in defender_client.security_contact_configurations.items():
|
||||
for contact_configuration in security_contact_configurations.values():
|
||||
report = Check_Report_Azure(
|
||||
metadata=self.metadata(), resource=contact_configuration
|
||||
metadata=self.metadata(),
|
||||
resource=contact_configuration,
|
||||
)
|
||||
report.resource_name = (
|
||||
contact_configuration.name
|
||||
if contact_configuration.name
|
||||
else "Security Contact"
|
||||
)
|
||||
report.subscription = subscription_name
|
||||
if (
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from prowler.lib.check.models import Check, Check_Report_Azure
|
||||
from prowler.providers.azure.services.storage.storage_client import storage_client
|
||||
from prowler.providers.azure.services.storage.storage_service import ReplicationSettings
|
||||
|
||||
|
||||
class storage_geo_redundant_enabled(Check):
|
||||
@@ -27,14 +26,16 @@ class storage_geo_redundant_enabled(Check):
|
||||
report.subscription = subscription
|
||||
|
||||
if (
|
||||
storage_account.replication_settings
|
||||
== ReplicationSettings.STANDARD_GRS
|
||||
storage_account.replication_settings == "Standard_GRS"
|
||||
or storage_account.replication_settings == "Standard_GZRS"
|
||||
or storage_account.replication_settings == "Standard_RAGRS"
|
||||
or storage_account.replication_settings == "Standard_RAGZRS"
|
||||
):
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Storage account {storage_account.name} from subscription {subscription} has Geo-redundant storage (GRS) enabled."
|
||||
report.status_extended = f"Storage account {storage_account.name} from subscription {subscription} has Geo-redundant storage {storage_account.replication_settings} enabled."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Storage account {storage_account.name} from subscription {subscription} does not have Geo-redundant storage (GRS) enabled."
|
||||
report.status_extended = f"Storage account {storage_account.name} from subscription {subscription} does not have Geo-redundant storage enabled, it has {storage_account.replication_settings} instead."
|
||||
|
||||
findings.append(report)
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from azure.mgmt.storage import StorageManagementClient
|
||||
@@ -35,7 +34,6 @@ class Storage(AzureService):
|
||||
key_expiration_period_in_days = int(
|
||||
storage_account.key_policy.key_expiration_period_in_days
|
||||
)
|
||||
replication_settings = ReplicationSettings(storage_account.sku.name)
|
||||
storage_accounts[subscription].append(
|
||||
Account(
|
||||
id=storage_account.id,
|
||||
@@ -84,7 +82,7 @@ class Storage(AzureService):
|
||||
False,
|
||||
)
|
||||
),
|
||||
replication_settings=replication_settings,
|
||||
replication_settings=storage_account.sku.name,
|
||||
allow_cross_tenant_replication=(
|
||||
True
|
||||
if getattr(
|
||||
@@ -273,17 +271,6 @@ class PrivateEndpointConnection(BaseModel):
|
||||
type: str
|
||||
|
||||
|
||||
class ReplicationSettings(Enum):
|
||||
STANDARD_LRS = "Standard_LRS"
|
||||
STANDARD_GRS = "Standard_GRS"
|
||||
STANDARD_RAGRS = "Standard_RAGRS"
|
||||
STANDARD_ZRS = "Standard_ZRS"
|
||||
PREMIUM_LRS = "Premium_LRS"
|
||||
PREMIUM_ZRS = "Premium_ZRS"
|
||||
STANDARD_GZRS = "Standard_GZRS"
|
||||
STANDARD_RAGZRS = "Standard_RAGZRS"
|
||||
|
||||
|
||||
class SMBProtocolSettings(BaseModel):
|
||||
channel_encryption: list[str]
|
||||
supported_versions: list[str]
|
||||
@@ -310,7 +297,7 @@ class Account(BaseModel):
|
||||
minimum_tls_version: str
|
||||
private_endpoint_connections: list[PrivateEndpointConnection]
|
||||
key_expiration_period_in_days: Optional[int] = None
|
||||
replication_settings: ReplicationSettings = ReplicationSettings.STANDARD_LRS
|
||||
replication_settings: str = "Standard_LRS"
|
||||
allow_cross_tenant_replication: bool = True
|
||||
allow_shared_key_access: bool = True
|
||||
blob_properties: Optional[BlobProperties] = None
|
||||
|
||||
@@ -659,6 +659,9 @@ class GcpProvider(Provider):
|
||||
if asset["resource"]["data"].get("name")
|
||||
else project_id
|
||||
)
|
||||
# Handle empty or null project names
|
||||
if not project_name or project_name.strip() == "":
|
||||
project_name = "GCP Project"
|
||||
gcp_project = GCPProject(
|
||||
number=project_number,
|
||||
id=project_id,
|
||||
@@ -717,6 +720,9 @@ class GcpProvider(Provider):
|
||||
if project.get("name")
|
||||
else project_id
|
||||
)
|
||||
# Handle empty or null project names
|
||||
if not project_name or project_name.strip() == "":
|
||||
project_name = "GCP Project"
|
||||
project_id = project["projectId"]
|
||||
gcp_project = GCPProject(
|
||||
number=project_number,
|
||||
@@ -757,9 +763,15 @@ class GcpProvider(Provider):
|
||||
# If no projects were able to be accessed via API, add them manually if provided by the user in arguments
|
||||
if project_ids:
|
||||
for input_project in project_ids:
|
||||
# Handle empty or null project names
|
||||
project_name = (
|
||||
input_project
|
||||
if input_project and input_project.strip() != ""
|
||||
else "GCP Project"
|
||||
)
|
||||
projects[input_project] = GCPProject(
|
||||
id=input_project,
|
||||
name=input_project,
|
||||
name=project_name,
|
||||
number=0,
|
||||
labels={},
|
||||
lifecycle_state="ACTIVE",
|
||||
@@ -768,9 +780,15 @@ class GcpProvider(Provider):
|
||||
elif credentials_file:
|
||||
with open(credentials_file, "r", encoding="utf-8") as file:
|
||||
project_id = json.load(file)["project_id"]
|
||||
# Handle empty or null project names
|
||||
project_name = (
|
||||
project_id
|
||||
if project_id and project_id.strip() != ""
|
||||
else "GCP Project"
|
||||
)
|
||||
projects[project_id] = GCPProject(
|
||||
id=project_id,
|
||||
name=project_id,
|
||||
name=project_name,
|
||||
number=0,
|
||||
labels={},
|
||||
lifecycle_state="ACTIVE",
|
||||
|
||||
@@ -13,7 +13,7 @@ class iam_no_service_roles_at_project_level(Check):
|
||||
metadata=self.metadata(),
|
||||
resource=binding,
|
||||
resource_id=binding.role,
|
||||
resource_name=binding.role,
|
||||
resource_name=binding.role if binding.role else "Service Role",
|
||||
location=cloudresourcemanager_client.region,
|
||||
)
|
||||
if binding.role in [
|
||||
@@ -31,7 +31,6 @@ class iam_no_service_roles_at_project_level(Check):
|
||||
metadata=self.metadata(),
|
||||
resource=cloudresourcemanager_client.projects[project],
|
||||
project_id=project,
|
||||
resource_name=project,
|
||||
location=cloudresourcemanager_client.region,
|
||||
)
|
||||
report.status = "PASS"
|
||||
|
||||
@@ -20,6 +20,7 @@ class logging_log_metric_filter_and_alert_for_audit_configuration_changes_enable
|
||||
metadata=self.metadata(),
|
||||
resource=metric,
|
||||
location=logging_client.region,
|
||||
resource_name=metric.name if metric.name else "Log Metric Filter",
|
||||
)
|
||||
projects_with_metric.add(metric.project_id)
|
||||
report.status = "FAIL"
|
||||
|
||||
@@ -18,6 +18,7 @@ class logging_log_metric_filter_and_alert_for_bucket_permission_changes_enabled(
|
||||
metadata=self.metadata(),
|
||||
resource=metric,
|
||||
location=logging_client.region,
|
||||
resource_name=metric.name if metric.name else "Log Metric Filter",
|
||||
)
|
||||
projects_with_metric.add(metric.project_id)
|
||||
report.status = "FAIL"
|
||||
|
||||
@@ -18,6 +18,7 @@ class logging_log_metric_filter_and_alert_for_custom_role_changes_enabled(Check)
|
||||
metadata=self.metadata(),
|
||||
resource=metric,
|
||||
location=logging_client.region,
|
||||
resource_name=metric.name if metric.name else "Log Metric Filter",
|
||||
)
|
||||
projects_with_metric.add(metric.project_id)
|
||||
report.status = "FAIL"
|
||||
|
||||
@@ -18,6 +18,7 @@ class logging_log_metric_filter_and_alert_for_project_ownership_changes_enabled(
|
||||
metadata=self.metadata(),
|
||||
resource=metric,
|
||||
location=logging_client.region,
|
||||
resource_name=metric.name if metric.name else "Log Metric Filter",
|
||||
)
|
||||
projects_with_metric.add(metric.project_id)
|
||||
report.status = "FAIL"
|
||||
|
||||
@@ -17,6 +17,7 @@ class logging_log_metric_filter_and_alert_for_sql_instance_configuration_changes
|
||||
metadata=self.metadata(),
|
||||
resource=metric,
|
||||
location=logging_client.region,
|
||||
resource_name=metric.name if metric.name else "Log Metric Filter",
|
||||
)
|
||||
projects_with_metric.add(metric.project_id)
|
||||
report.status = "FAIL"
|
||||
|
||||
@@ -18,6 +18,7 @@ class logging_log_metric_filter_and_alert_for_vpc_firewall_rule_changes_enabled(
|
||||
metadata=self.metadata(),
|
||||
resource=metric,
|
||||
location=logging_client.region,
|
||||
resource_name=metric.name if metric.name else "Log Metric Filter",
|
||||
)
|
||||
projects_with_metric.add(metric.project_id)
|
||||
report.status = "FAIL"
|
||||
|
||||
@@ -18,6 +18,7 @@ class logging_log_metric_filter_and_alert_for_vpc_network_changes_enabled(Check)
|
||||
metadata=self.metadata(),
|
||||
resource=metric,
|
||||
location=logging_client.region,
|
||||
resource_name=metric.name if metric.name else "Log Metric Filter",
|
||||
)
|
||||
projects_with_metric.add(metric.project_id)
|
||||
report.status = "FAIL"
|
||||
|
||||
@@ -18,6 +18,7 @@ class logging_log_metric_filter_and_alert_for_vpc_network_route_changes_enabled(
|
||||
metadata=self.metadata(),
|
||||
resource=metric,
|
||||
location=logging_client.region,
|
||||
resource_name=metric.name if metric.name else "Log Metric Filter",
|
||||
)
|
||||
projects_with_metric.add(metric.project_id)
|
||||
report.status = "FAIL"
|
||||
|
||||
@@ -26,6 +26,11 @@ class logging_sink_created(Check):
|
||||
metadata=self.metadata(),
|
||||
resource=projects_with_logging_sink[project],
|
||||
location=logging_client.region,
|
||||
resource_name=(
|
||||
projects_with_logging_sink[project].name
|
||||
if projects_with_logging_sink[project].name
|
||||
else "Logging Sink"
|
||||
),
|
||||
)
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Sink {projects_with_logging_sink[project].name} is enabled exporting copies of all the log entries in project {project}."
|
||||
|
||||
@@ -133,6 +133,12 @@ class GithubProvider(Provider):
|
||||
"""
|
||||
logger.info("Instantiating GitHub Provider...")
|
||||
|
||||
# Mute GitHub library logs to reduce noise since it is already handled by the Prowler logger
|
||||
import logging
|
||||
|
||||
logging.getLogger("github").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("github.GithubRetry").setLevel(logging.CRITICAL)
|
||||
|
||||
# Set repositories and organizations for scoping
|
||||
self._repositories = repositories or []
|
||||
self._organizations = organizations or []
|
||||
|
||||
@@ -2,6 +2,7 @@ from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
import github
|
||||
import requests
|
||||
from pydantic.v1 import BaseModel
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
@@ -50,25 +51,57 @@ class Repository(GithubService):
|
||||
|
||||
return True
|
||||
|
||||
def _get_accessible_repos_graphql(self) -> list[str]:
|
||||
"""
|
||||
Use the GitHub GraphQL API to list all repositories that the authentication token has access to.
|
||||
This works with high-granularity (fine-grained) PATs.
|
||||
"""
|
||||
graphql_url = "https://api.github.com/graphql"
|
||||
token = self.provider.session.token
|
||||
headers = {
|
||||
"Authorization": f"bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
query = """
|
||||
{
|
||||
viewer {
|
||||
repositories(first: 100, affiliations: [OWNER, ORGANIZATION_MEMBER]) {
|
||||
nodes {
|
||||
nameWithOwner
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
try:
|
||||
response = requests.post(
|
||||
graphql_url, json={"query": query}, headers=headers
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
if "errors" in data:
|
||||
logger.error(f"Error in GraphQL query: {data['errors']}")
|
||||
return []
|
||||
|
||||
repo_nodes = (
|
||||
data.get("data", {})
|
||||
.get("viewer", {})
|
||||
.get("repositories", {})
|
||||
.get("nodes", [])
|
||||
)
|
||||
return [repo["nameWithOwner"] for repo in repo_nodes]
|
||||
|
||||
except requests.exceptions.RequestException as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return []
|
||||
|
||||
def _list_repositories(self):
|
||||
"""
|
||||
List repositories based on provider scoping configuration.
|
||||
|
||||
Scoping behavior:
|
||||
- No scoping: Returns all accessible repositories for authenticated user
|
||||
- Repository scoping: Returns only specified repositories
|
||||
Example: --repository owner1/repo1 owner2/repo2
|
||||
- Organization scoping: Returns all repositories from specified organizations
|
||||
Example: --organization org1 org2
|
||||
- Combined scoping: Returns specified repositories + all repos from organizations
|
||||
Example: --repository owner1/repo1 --organization org2
|
||||
|
||||
Returns:
|
||||
dict: Dictionary of repository ID to Repo objects
|
||||
|
||||
Raises:
|
||||
github.GithubException: When GitHub API access fails
|
||||
github.RateLimitExceededException: When API rate limits are exceeded
|
||||
"""
|
||||
logger.info("Repository - Listing Repositories...")
|
||||
repos = {}
|
||||
@@ -109,11 +142,38 @@ class Repository(GithubService):
|
||||
error, "processing organization", org_name
|
||||
)
|
||||
else:
|
||||
for repo in client.get_user().get_repos():
|
||||
self._process_repository(repo, repos)
|
||||
logger.info(
|
||||
"No repository or organization specified, discovering accessible repositories via GraphQL API..."
|
||||
)
|
||||
accessible_repo_names = self._get_accessible_repos_graphql()
|
||||
|
||||
if not accessible_repo_names:
|
||||
logger.warning(
|
||||
"Could not find any accessible repositories with the provided token."
|
||||
)
|
||||
|
||||
for repo_name in accessible_repo_names:
|
||||
try:
|
||||
repo = client.get_repo(repo_name)
|
||||
logger.info(
|
||||
f"Processing repository found via GraphQL: {repo.full_name}"
|
||||
)
|
||||
self._process_repository(repo, repos)
|
||||
except Exception as error:
|
||||
if hasattr(self, "_handle_github_api_error"):
|
||||
self._handle_github_api_error(
|
||||
error,
|
||||
"accessing repository discovered via GraphQL",
|
||||
repo_name,
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
except github.RateLimitExceededException as error:
|
||||
logger.error(f"GitHub API rate limit exceeded: {error}")
|
||||
raise # Re-raise rate limit errors as they need special handling
|
||||
raise
|
||||
except github.GithubException as error:
|
||||
logger.error(f"GitHub API error while listing repositories: {error}")
|
||||
except Exception as error:
|
||||
@@ -124,158 +184,167 @@ class Repository(GithubService):
|
||||
|
||||
def _process_repository(self, repo, repos):
|
||||
"""Process a single repository and extract all its information."""
|
||||
default_branch = repo.default_branch
|
||||
securitymd_exists = self._file_exists(repo, "SECURITY.md")
|
||||
# CODEOWNERS file can be in .github/, root, or docs/
|
||||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#codeowners-file-location
|
||||
codeowners_paths = [
|
||||
".github/CODEOWNERS",
|
||||
"CODEOWNERS",
|
||||
"docs/CODEOWNERS",
|
||||
]
|
||||
codeowners_files = [self._file_exists(repo, path) for path in codeowners_paths]
|
||||
if True in codeowners_files:
|
||||
codeowners_exists = True
|
||||
elif all(file is None for file in codeowners_files):
|
||||
codeowners_exists = None
|
||||
else:
|
||||
codeowners_exists = False
|
||||
delete_branch_on_merge = (
|
||||
repo.delete_branch_on_merge
|
||||
if repo.delete_branch_on_merge is not None
|
||||
else False
|
||||
)
|
||||
|
||||
require_pr = False
|
||||
approval_cnt = 0
|
||||
branch_protection = False
|
||||
required_linear_history = False
|
||||
allow_force_pushes = True
|
||||
branch_deletion = True
|
||||
require_code_owner_reviews = False
|
||||
require_signed_commits = False
|
||||
status_checks = False
|
||||
enforce_admins = False
|
||||
conversation_resolution = False
|
||||
try:
|
||||
branch = repo.get_branch(default_branch)
|
||||
if branch.protected:
|
||||
protection = branch.get_protection()
|
||||
if protection:
|
||||
require_pr = protection.required_pull_request_reviews is not None
|
||||
approval_cnt = (
|
||||
protection.required_pull_request_reviews.required_approving_review_count
|
||||
if require_pr
|
||||
else 0
|
||||
)
|
||||
required_linear_history = protection.required_linear_history
|
||||
allow_force_pushes = protection.allow_force_pushes
|
||||
branch_deletion = protection.allow_deletions
|
||||
status_checks = protection.required_status_checks is not None
|
||||
enforce_admins = protection.enforce_admins
|
||||
conversation_resolution = (
|
||||
protection.required_conversation_resolution
|
||||
)
|
||||
branch_protection = True
|
||||
require_code_owner_reviews = (
|
||||
protection.required_pull_request_reviews.require_code_owner_reviews
|
||||
if require_pr
|
||||
else False
|
||||
)
|
||||
require_signed_commits = branch.get_required_signatures()
|
||||
except Exception as error:
|
||||
# If the branch is not found, it is not protected
|
||||
if "404" in str(error):
|
||||
logger.warning(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
# Any other error, we cannot know if the branch is protected or not
|
||||
default_branch = repo.default_branch
|
||||
securitymd_exists = self._file_exists(repo, "SECURITY.md")
|
||||
# CODEOWNERS file can be in .github/, root, or docs/
|
||||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#codeowners-file-location
|
||||
codeowners_paths = [
|
||||
".github/CODEOWNERS",
|
||||
"CODEOWNERS",
|
||||
"docs/CODEOWNERS",
|
||||
]
|
||||
codeowners_files = [
|
||||
self._file_exists(repo, path) for path in codeowners_paths
|
||||
]
|
||||
if True in codeowners_files:
|
||||
codeowners_exists = True
|
||||
elif all(file is None for file in codeowners_files):
|
||||
codeowners_exists = None
|
||||
else:
|
||||
require_pr = None
|
||||
approval_cnt = None
|
||||
branch_protection = None
|
||||
required_linear_history = None
|
||||
allow_force_pushes = None
|
||||
branch_deletion = None
|
||||
require_code_owner_reviews = None
|
||||
require_signed_commits = None
|
||||
status_checks = None
|
||||
enforce_admins = None
|
||||
conversation_resolution = None
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
codeowners_exists = False
|
||||
delete_branch_on_merge = (
|
||||
repo.delete_branch_on_merge
|
||||
if repo.delete_branch_on_merge is not None
|
||||
else False
|
||||
)
|
||||
|
||||
secret_scanning_enabled = False
|
||||
dependabot_alerts_enabled = False
|
||||
try:
|
||||
if (
|
||||
repo.security_and_analysis
|
||||
and repo.security_and_analysis.secret_scanning
|
||||
):
|
||||
secret_scanning_enabled = (
|
||||
repo.security_and_analysis.secret_scanning.status == "enabled"
|
||||
)
|
||||
require_pr = False
|
||||
approval_cnt = 0
|
||||
branch_protection = False
|
||||
required_linear_history = False
|
||||
allow_force_pushes = True
|
||||
branch_deletion = True
|
||||
require_code_owner_reviews = False
|
||||
require_signed_commits = False
|
||||
status_checks = False
|
||||
enforce_admins = False
|
||||
conversation_resolution = False
|
||||
try:
|
||||
# Use get_dependabot_alerts to check if Dependabot alerts are enabled
|
||||
repo.get_dependabot_alerts().totalCount
|
||||
# If the call succeeds, Dependabot is enabled (even if no alerts)
|
||||
dependabot_alerts_enabled = True
|
||||
branch = repo.get_branch(default_branch)
|
||||
if branch.protected:
|
||||
protection = branch.get_protection()
|
||||
if protection:
|
||||
require_pr = (
|
||||
protection.required_pull_request_reviews is not None
|
||||
)
|
||||
approval_cnt = (
|
||||
protection.required_pull_request_reviews.required_approving_review_count
|
||||
if require_pr
|
||||
else 0
|
||||
)
|
||||
required_linear_history = protection.required_linear_history
|
||||
allow_force_pushes = protection.allow_force_pushes
|
||||
branch_deletion = protection.allow_deletions
|
||||
status_checks = protection.required_status_checks is not None
|
||||
enforce_admins = protection.enforce_admins
|
||||
conversation_resolution = (
|
||||
protection.required_conversation_resolution
|
||||
)
|
||||
branch_protection = True
|
||||
require_code_owner_reviews = (
|
||||
protection.required_pull_request_reviews.require_code_owner_reviews
|
||||
if require_pr
|
||||
else False
|
||||
)
|
||||
require_signed_commits = branch.get_required_signatures()
|
||||
except Exception as error:
|
||||
error_str = str(error)
|
||||
if (
|
||||
"403" in error_str
|
||||
and "Dependabot alerts are disabled for this repository."
|
||||
in error_str
|
||||
):
|
||||
dependabot_alerts_enabled = False
|
||||
else:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
# If the branch is not found, it is not protected
|
||||
if "404" in str(error):
|
||||
logger.warning(
|
||||
f"{repo.full_name}: {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
dependabot_alerts_enabled = None
|
||||
# Any other error, we cannot know if the branch is protected or not
|
||||
else:
|
||||
require_pr = None
|
||||
approval_cnt = None
|
||||
branch_protection = None
|
||||
required_linear_history = None
|
||||
allow_force_pushes = None
|
||||
branch_deletion = None
|
||||
require_code_owner_reviews = None
|
||||
require_signed_commits = None
|
||||
status_checks = None
|
||||
enforce_admins = None
|
||||
conversation_resolution = None
|
||||
logger.error(
|
||||
f"{repo.full_name}: {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
secret_scanning_enabled = False
|
||||
dependabot_alerts_enabled = False
|
||||
try:
|
||||
if (
|
||||
repo.security_and_analysis
|
||||
and repo.security_and_analysis.secret_scanning
|
||||
):
|
||||
secret_scanning_enabled = (
|
||||
repo.security_and_analysis.secret_scanning.status == "enabled"
|
||||
)
|
||||
try:
|
||||
# Use get_dependabot_alerts to check if Dependabot alerts are enabled
|
||||
repo.get_dependabot_alerts().totalCount
|
||||
# If the call succeeds, Dependabot is enabled (even if no alerts)
|
||||
dependabot_alerts_enabled = True
|
||||
except Exception as error:
|
||||
error_str = str(error)
|
||||
if (
|
||||
"403" in error_str
|
||||
and "Dependabot alerts are disabled for this repository."
|
||||
in error_str
|
||||
):
|
||||
dependabot_alerts_enabled = False
|
||||
else:
|
||||
logger.error(
|
||||
f"{repo.full_name}: {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
dependabot_alerts_enabled = None
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{repo.full_name}: {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
secret_scanning_enabled = None
|
||||
dependabot_alerts_enabled = None
|
||||
repos[repo.id] = Repo(
|
||||
id=repo.id,
|
||||
name=repo.name,
|
||||
owner=repo.owner.login,
|
||||
full_name=repo.full_name,
|
||||
default_branch=Branch(
|
||||
name=default_branch,
|
||||
protected=branch_protection,
|
||||
default_branch=True,
|
||||
require_pull_request=require_pr,
|
||||
approval_count=approval_cnt,
|
||||
required_linear_history=required_linear_history,
|
||||
allow_force_pushes=allow_force_pushes,
|
||||
branch_deletion=branch_deletion,
|
||||
status_checks=status_checks,
|
||||
enforce_admins=enforce_admins,
|
||||
conversation_resolution=conversation_resolution,
|
||||
require_code_owner_reviews=require_code_owner_reviews,
|
||||
require_signed_commits=require_signed_commits,
|
||||
),
|
||||
private=repo.private,
|
||||
archived=repo.archived,
|
||||
pushed_at=repo.pushed_at,
|
||||
securitymd=securitymd_exists,
|
||||
codeowners_exists=codeowners_exists,
|
||||
secret_scanning_enabled=secret_scanning_enabled,
|
||||
dependabot_alerts_enabled=dependabot_alerts_enabled,
|
||||
delete_branch_on_merge=delete_branch_on_merge,
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
f"{repo.full_name}: {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
secret_scanning_enabled = None
|
||||
dependabot_alerts_enabled = None
|
||||
repos[repo.id] = Repo(
|
||||
id=repo.id,
|
||||
name=repo.name,
|
||||
owner=repo.owner.login,
|
||||
full_name=repo.full_name,
|
||||
default_branch=Branch(
|
||||
name=default_branch,
|
||||
protected=branch_protection,
|
||||
default_branch=True,
|
||||
require_pull_request=require_pr,
|
||||
approval_count=approval_cnt,
|
||||
required_linear_history=required_linear_history,
|
||||
allow_force_pushes=allow_force_pushes,
|
||||
branch_deletion=branch_deletion,
|
||||
status_checks=status_checks,
|
||||
enforce_admins=enforce_admins,
|
||||
conversation_resolution=conversation_resolution,
|
||||
require_code_owner_reviews=require_code_owner_reviews,
|
||||
require_signed_commits=require_signed_commits,
|
||||
),
|
||||
private=repo.private,
|
||||
archived=repo.archived,
|
||||
pushed_at=repo.pushed_at,
|
||||
securitymd=securitymd_exists,
|
||||
codeowners_exists=codeowners_exists,
|
||||
secret_scanning_enabled=secret_scanning_enabled,
|
||||
dependabot_alerts_enabled=dependabot_alerts_enabled,
|
||||
delete_branch_on_merge=delete_branch_on_merge,
|
||||
)
|
||||
|
||||
|
||||
class Branch(BaseModel):
|
||||
"""Model for Github Branch"""
|
||||
|
||||
name: str
|
||||
protected: bool
|
||||
protected: Optional[bool]
|
||||
default_branch: bool
|
||||
require_pull_request: Optional[bool]
|
||||
approval_count: Optional[int]
|
||||
|
||||
@@ -87,9 +87,11 @@ class AdminCenter(M365Service):
|
||||
{
|
||||
user.id: User(
|
||||
id=user.id,
|
||||
name=user.display_name,
|
||||
name=getattr(user, "display_name", ""),
|
||||
license=(
|
||||
license_details.value[0].sku_part_number
|
||||
getattr(
|
||||
license_details.value[0], "sku_part_number", None
|
||||
)
|
||||
if license_details.value
|
||||
else None
|
||||
),
|
||||
@@ -149,8 +151,8 @@ class AdminCenter(M365Service):
|
||||
{
|
||||
group.id: Group(
|
||||
id=group.id,
|
||||
name=group.display_name,
|
||||
visibility=group.visibility,
|
||||
name=getattr(group, "display_name", ""),
|
||||
visibility=getattr(group, "visibility", ""),
|
||||
)
|
||||
}
|
||||
)
|
||||
@@ -168,14 +170,21 @@ class AdminCenter(M365Service):
|
||||
domains_list = await self.client.domains.get()
|
||||
domains.update({})
|
||||
for domain in domains_list.value:
|
||||
domains.update(
|
||||
{
|
||||
domain.id: Domain(
|
||||
id=domain.id,
|
||||
password_validity_period=domain.password_validity_period_in_days,
|
||||
)
|
||||
}
|
||||
)
|
||||
if domain:
|
||||
password_validity_period = getattr(
|
||||
domain, "password_validity_period_in_days", None
|
||||
)
|
||||
if password_validity_period is None:
|
||||
password_validity_period = 0
|
||||
|
||||
domains.update(
|
||||
{
|
||||
domain.id: Domain(
|
||||
id=domain.id,
|
||||
password_validity_period=password_validity_period,
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
|
||||
@@ -166,4 +166,4 @@ class defender_antispam_outbound_policy_forwarding_disabled(Check):
|
||||
policy.default
|
||||
or defender_client.outbound_spam_rules[policy.name].state.lower()
|
||||
== "enabled"
|
||||
) and not policy.auto_forwarding_mode
|
||||
) and policy.auto_forwarding_mode == "Off"
|
||||
|
||||
@@ -44,6 +44,23 @@ class Defender(M365Service):
|
||||
malware_policy = [malware_policy]
|
||||
for policy in malware_policy:
|
||||
if policy:
|
||||
file_types_raw = policy.get("FileTypes", [])
|
||||
file_types = []
|
||||
if file_types_raw is not None:
|
||||
if isinstance(file_types_raw, list):
|
||||
file_types = file_types_raw
|
||||
else:
|
||||
try:
|
||||
if isinstance(file_types_raw, str):
|
||||
file_types = [file_types_raw]
|
||||
else:
|
||||
file_types = [str(file_types_raw)]
|
||||
except (ValueError, TypeError):
|
||||
logger.warning(
|
||||
f"Skipping invalid file_types value: {file_types_raw}"
|
||||
)
|
||||
file_types = []
|
||||
|
||||
malware_policies.append(
|
||||
MalwarePolicy(
|
||||
enable_file_filter=policy.get("EnableFileFilter", False),
|
||||
@@ -54,7 +71,7 @@ class Defender(M365Service):
|
||||
internal_sender_admin_address=policy.get(
|
||||
"InternalSenderAdminAddress", ""
|
||||
),
|
||||
file_types=policy.get("FileTypes", []),
|
||||
file_types=file_types,
|
||||
is_default=policy.get("IsDefault", False),
|
||||
)
|
||||
)
|
||||
@@ -207,7 +224,7 @@ class Defender(M365Service):
|
||||
notify_sender_blocked_addresses=policy.get(
|
||||
"NotifyOutboundSpamRecipients", []
|
||||
),
|
||||
auto_forwarding_mode=policy.get("AutoForwardingMode", True),
|
||||
auto_forwarding_mode=policy.get("AutoForwardingMode", "On"),
|
||||
default=policy.get("IsDefault", False),
|
||||
)
|
||||
|
||||
@@ -257,12 +274,43 @@ class Defender(M365Service):
|
||||
inbound_spam_policy = [inbound_spam_policy]
|
||||
for policy in inbound_spam_policy:
|
||||
if policy:
|
||||
allowed_domains_raw = policy.get("AllowedSenderDomains", [])
|
||||
allowed_domains = []
|
||||
|
||||
if isinstance(allowed_domains_raw, str):
|
||||
try:
|
||||
import json
|
||||
|
||||
parsed_domains = json.loads(allowed_domains_raw)
|
||||
if isinstance(parsed_domains, list):
|
||||
allowed_domains_raw = parsed_domains
|
||||
else:
|
||||
logger.warning(
|
||||
f"Expected list from JSON string, got: {type(parsed_domains)}"
|
||||
)
|
||||
allowed_domains_raw = []
|
||||
except (json.JSONDecodeError, ValueError) as e:
|
||||
logger.warning(
|
||||
f"Failed to parse AllowedSenderDomains as JSON: {e}"
|
||||
)
|
||||
allowed_domains_raw = []
|
||||
|
||||
if allowed_domains_raw:
|
||||
for domain in allowed_domains_raw:
|
||||
if isinstance(domain, str):
|
||||
allowed_domains.append(domain)
|
||||
else:
|
||||
try:
|
||||
allowed_domains.append(str(domain))
|
||||
except (ValueError, TypeError):
|
||||
logger.warning(
|
||||
f"Skipping invalid domain value: {domain}"
|
||||
)
|
||||
|
||||
inbound_spam_policies.append(
|
||||
DefenderInboundSpamPolicy(
|
||||
identity=policy.get("Identity", ""),
|
||||
allowed_sender_domains=policy.get(
|
||||
"AllowedSenderDomains", []
|
||||
),
|
||||
allowed_sender_domains=allowed_domains,
|
||||
default=policy.get("IsDefault", False),
|
||||
)
|
||||
)
|
||||
@@ -389,7 +437,7 @@ class OutboundSpamPolicy(BaseModel):
|
||||
notify_limit_exceeded: bool
|
||||
notify_limit_exceeded_addresses: List[str]
|
||||
notify_sender_blocked_addresses: List[str]
|
||||
auto_forwarding_mode: bool
|
||||
auto_forwarding_mode: str
|
||||
default: bool
|
||||
|
||||
|
||||
|
||||
@@ -123,12 +123,20 @@ class Exchange(M365Service):
|
||||
rules_data = [rules_data]
|
||||
for rule in rules_data:
|
||||
if rule:
|
||||
sender_domain_is = rule.get("SenderDomainIs", [])
|
||||
if sender_domain_is is None:
|
||||
sender_domain_is = []
|
||||
|
||||
redirect_message_to = rule.get("RedirectMessageTo", [])
|
||||
if redirect_message_to is None:
|
||||
redirect_message_to = []
|
||||
|
||||
transport_rules.append(
|
||||
TransportRule(
|
||||
name=rule.get("Name", ""),
|
||||
scl=rule.get("SetSCL", None),
|
||||
sender_domain_is=rule.get("SenderDomainIs", []),
|
||||
redirect_message_to=rule.get("RedirectMessageTo", None),
|
||||
sender_domain_is=sender_domain_is,
|
||||
redirect_message_to=redirect_message_to,
|
||||
)
|
||||
)
|
||||
except Exception as error:
|
||||
|
||||
@@ -70,7 +70,7 @@ maintainers = [{name = "Prowler Engineering", email = "engineering@prowler.com"}
|
||||
name = "prowler"
|
||||
readme = "README.md"
|
||||
requires-python = ">3.9.1,<3.13"
|
||||
version = "5.10.0"
|
||||
version = "5.10.2"
|
||||
|
||||
[project.scripts]
|
||||
prowler = "prowler.__main__:prowler"
|
||||
|
||||
@@ -72,10 +72,10 @@ class Test_kafka_cluster_is_public:
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "Kafka cluster 'demo-cluster-1' is publicly accessible."
|
||||
== "Kafka cluster demo-cluster-1 is not publicly accessible."
|
||||
)
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
@@ -126,10 +126,10 @@ class Test_kafka_cluster_is_public:
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== "Kafka cluster 'demo-cluster-1' is not publicly accessible."
|
||||
== "Kafka cluster demo-cluster-1 is publicly accessible."
|
||||
)
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
|
||||
@@ -136,26 +136,32 @@ class Test_app_http_logs_enabled:
|
||||
logs=[
|
||||
mock.MagicMock(
|
||||
category="AppServiceHTTPLogs",
|
||||
category_group=None,
|
||||
enabled=True,
|
||||
),
|
||||
mock.MagicMock(
|
||||
category="AppServiceConsoleLogs",
|
||||
category_group=None,
|
||||
enabled=False,
|
||||
),
|
||||
mock.MagicMock(
|
||||
category="AppServiceAppLogs",
|
||||
category_group=None,
|
||||
enabled=True,
|
||||
),
|
||||
mock.MagicMock(
|
||||
category="AppServiceAuditLogs",
|
||||
category_group=None,
|
||||
enabled=False,
|
||||
),
|
||||
mock.MagicMock(
|
||||
category="AppServiceIPSecAuditLogs",
|
||||
category_group=None,
|
||||
enabled=False,
|
||||
),
|
||||
mock.MagicMock(
|
||||
category="AppServicePlatformLogs",
|
||||
category_group=None,
|
||||
enabled=False,
|
||||
),
|
||||
],
|
||||
@@ -181,26 +187,32 @@ class Test_app_http_logs_enabled:
|
||||
logs=[
|
||||
mock.MagicMock(
|
||||
category="AppServiceHTTPLogs",
|
||||
category_group=None,
|
||||
enabled=True,
|
||||
),
|
||||
mock.MagicMock(
|
||||
category="AppServiceConsoleLogs",
|
||||
category_group=None,
|
||||
enabled=True,
|
||||
),
|
||||
mock.MagicMock(
|
||||
category="AppServiceAppLogs",
|
||||
category_group=None,
|
||||
enabled=True,
|
||||
),
|
||||
mock.MagicMock(
|
||||
category="AppServiceAuditLogs",
|
||||
category_group=None,
|
||||
enabled=False,
|
||||
),
|
||||
mock.MagicMock(
|
||||
category="AppServiceIPSecAuditLogs",
|
||||
category_group=None,
|
||||
enabled=True,
|
||||
),
|
||||
mock.MagicMock(
|
||||
category="AppServicePlatformLogs",
|
||||
category_group=None,
|
||||
enabled=False,
|
||||
),
|
||||
],
|
||||
@@ -223,3 +235,129 @@ class Test_app_http_logs_enabled:
|
||||
result[0].status_extended
|
||||
== f"App app_id-2 has HTTP Logs enabled in diagnostic setting name_diagnostic_setting2 in subscription {AZURE_SUBSCRIPTION_ID}"
|
||||
)
|
||||
|
||||
def test_diagnostic_setting_with_all_logs_category_group(self):
|
||||
app_client = mock.MagicMock
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.app.app_http_logs_enabled.app_http_logs_enabled.app_client",
|
||||
new=app_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.app.app_http_logs_enabled.app_http_logs_enabled import (
|
||||
app_http_logs_enabled,
|
||||
)
|
||||
from prowler.providers.azure.services.app.app_service import WebApp
|
||||
from prowler.providers.azure.services.monitor.monitor_service import (
|
||||
DiagnosticSetting,
|
||||
)
|
||||
|
||||
app_client.apps = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
"resource_id3": WebApp(
|
||||
resource_id="resource_id3",
|
||||
name="app_id-3",
|
||||
auth_enabled=True,
|
||||
configurations=None,
|
||||
client_cert_mode="Ignore",
|
||||
https_only=False,
|
||||
kind="WebApp",
|
||||
identity=mock.MagicMock,
|
||||
location="West Europe",
|
||||
monitor_diagnostic_settings=[
|
||||
DiagnosticSetting(
|
||||
id="id3/id3",
|
||||
logs=[
|
||||
mock.MagicMock(
|
||||
category=None,
|
||||
category_group="allLogs",
|
||||
enabled=True,
|
||||
),
|
||||
],
|
||||
storage_account_name="storage_account_name3",
|
||||
storage_account_id="storage_account_id3",
|
||||
name="name_diagnostic_setting3",
|
||||
),
|
||||
],
|
||||
),
|
||||
}
|
||||
}
|
||||
check = app_http_logs_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == "app_id-3"
|
||||
assert result[0].resource_id == "resource_id3"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"App app_id-3 has allLogs category group which includes HTTP Logs enabled in diagnostic setting name_diagnostic_setting3 in subscription {AZURE_SUBSCRIPTION_ID}"
|
||||
)
|
||||
|
||||
def test_diagnostic_setting_with_all_logs_category_group_disabled(self):
|
||||
app_client = mock.MagicMock
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.app.app_http_logs_enabled.app_http_logs_enabled.app_client",
|
||||
new=app_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.app.app_http_logs_enabled.app_http_logs_enabled import (
|
||||
app_http_logs_enabled,
|
||||
)
|
||||
from prowler.providers.azure.services.app.app_service import WebApp
|
||||
from prowler.providers.azure.services.monitor.monitor_service import (
|
||||
DiagnosticSetting,
|
||||
)
|
||||
|
||||
app_client.apps = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
"resource_id4": WebApp(
|
||||
resource_id="resource_id4",
|
||||
name="app_id-4",
|
||||
auth_enabled=True,
|
||||
configurations=None,
|
||||
client_cert_mode="Ignore",
|
||||
https_only=False,
|
||||
kind="WebApp",
|
||||
identity=mock.MagicMock,
|
||||
location="West Europe",
|
||||
monitor_diagnostic_settings=[
|
||||
DiagnosticSetting(
|
||||
id="id4/id4",
|
||||
logs=[
|
||||
mock.MagicMock(
|
||||
category=None,
|
||||
category_group="allLogs",
|
||||
enabled=False, # Disabled
|
||||
),
|
||||
],
|
||||
storage_account_name="storage_account_name4",
|
||||
storage_account_id="storage_account_id4",
|
||||
name="name_diagnostic_setting4",
|
||||
),
|
||||
],
|
||||
),
|
||||
}
|
||||
}
|
||||
check = app_http_logs_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == "app_id-4"
|
||||
assert result[0].resource_id == "resource_id4"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"App app_id-4 does not have HTTP Logs enabled in diagnostic setting name_diagnostic_setting4 in subscription {AZURE_SUBSCRIPTION_ID}"
|
||||
)
|
||||
|
||||
@@ -4,7 +4,6 @@ from uuid import uuid4
|
||||
from prowler.providers.azure.services.storage.storage_service import (
|
||||
Account,
|
||||
NetworkRuleSet,
|
||||
ReplicationSettings,
|
||||
)
|
||||
from tests.providers.azure.azure_fixtures import (
|
||||
AZURE_SUBSCRIPTION_ID,
|
||||
@@ -35,10 +34,11 @@ class Test_storage_geo_redundant_enabled:
|
||||
result = check.execute()
|
||||
assert len(result) == 0
|
||||
|
||||
def test_storage_geo_redundant_enabled(self):
|
||||
def test_storage_account_standard_grs_enabled(self):
|
||||
storage_account_id = str(uuid4())
|
||||
storage_account_name = "Test Storage Account GRS"
|
||||
storage_client = mock.MagicMock()
|
||||
replication_setting = "Standard_GRS"
|
||||
storage_client.storage_accounts = {
|
||||
AZURE_SUBSCRIPTION_ID: [
|
||||
Account(
|
||||
@@ -56,7 +56,7 @@ class Test_storage_geo_redundant_enabled:
|
||||
private_endpoint_connections=[],
|
||||
key_expiration_period_in_days=None,
|
||||
location="westeurope",
|
||||
replication_settings=ReplicationSettings.STANDARD_GRS,
|
||||
replication_settings=replication_setting,
|
||||
)
|
||||
]
|
||||
}
|
||||
@@ -81,17 +81,18 @@ class Test_storage_geo_redundant_enabled:
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Storage account {storage_account_name} from subscription {AZURE_SUBSCRIPTION_ID} has Geo-redundant storage (GRS) enabled."
|
||||
== f"Storage account {storage_account_name} from subscription {AZURE_SUBSCRIPTION_ID} has Geo-redundant storage {replication_setting} enabled."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == storage_account_name
|
||||
assert result[0].resource_id == storage_account_id
|
||||
assert result[0].location == "westeurope"
|
||||
|
||||
def test_storage_account_geo_redundant_disabled(self):
|
||||
def test_storage_account_standard_ragrs_enabled(self):
|
||||
storage_account_id = str(uuid4())
|
||||
storage_account_name = "Test Storage Account LRS"
|
||||
storage_account_name = "Test Storage Account RAGRS"
|
||||
storage_client = mock.MagicMock()
|
||||
replication_setting = "Standard_RAGRS"
|
||||
storage_client.storage_accounts = {
|
||||
AZURE_SUBSCRIPTION_ID: [
|
||||
Account(
|
||||
@@ -109,7 +110,169 @@ class Test_storage_geo_redundant_enabled:
|
||||
private_endpoint_connections=[],
|
||||
key_expiration_period_in_days=None,
|
||||
location="westeurope",
|
||||
replication_settings=ReplicationSettings.STANDARD_LRS,
|
||||
replication_settings=replication_setting,
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.storage.storage_geo_redundant_enabled.storage_geo_redundant_enabled.storage_client",
|
||||
new=storage_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.storage.storage_geo_redundant_enabled.storage_geo_redundant_enabled import (
|
||||
storage_geo_redundant_enabled,
|
||||
)
|
||||
|
||||
check = storage_geo_redundant_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Storage account {storage_account_name} from subscription {AZURE_SUBSCRIPTION_ID} has Geo-redundant storage {replication_setting} enabled."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == storage_account_name
|
||||
assert result[0].resource_id == storage_account_id
|
||||
assert result[0].location == "westeurope"
|
||||
|
||||
def test_storage_account_standard_gzrs_enabled(self):
|
||||
storage_account_id = str(uuid4())
|
||||
storage_account_name = "Test Storage Account GZRS"
|
||||
storage_client = mock.MagicMock()
|
||||
replication_setting = "Standard_GZRS"
|
||||
storage_client.storage_accounts = {
|
||||
AZURE_SUBSCRIPTION_ID: [
|
||||
Account(
|
||||
id=storage_account_id,
|
||||
name=storage_account_name,
|
||||
resouce_group_name="rg",
|
||||
enable_https_traffic_only=False,
|
||||
infrastructure_encryption=False,
|
||||
allow_blob_public_access=False,
|
||||
network_rule_set=NetworkRuleSet(
|
||||
bypass="AzureServices", default_action="Allow"
|
||||
),
|
||||
encryption_type="None",
|
||||
minimum_tls_version="TLS1_2",
|
||||
private_endpoint_connections=[],
|
||||
key_expiration_period_in_days=None,
|
||||
location="westeurope",
|
||||
replication_settings=replication_setting,
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.storage.storage_geo_redundant_enabled.storage_geo_redundant_enabled.storage_client",
|
||||
new=storage_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.storage.storage_geo_redundant_enabled.storage_geo_redundant_enabled import (
|
||||
storage_geo_redundant_enabled,
|
||||
)
|
||||
|
||||
check = storage_geo_redundant_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Storage account {storage_account_name} from subscription {AZURE_SUBSCRIPTION_ID} has Geo-redundant storage {replication_setting} enabled."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == storage_account_name
|
||||
assert result[0].resource_id == storage_account_id
|
||||
assert result[0].location == "westeurope"
|
||||
|
||||
def test_storage_account_standard_ragzrs_enabled(self):
|
||||
storage_account_id = str(uuid4())
|
||||
storage_account_name = "Test Storage Account RAGZRS"
|
||||
storage_client = mock.MagicMock()
|
||||
replication_setting = "Standard_RAGZRS"
|
||||
storage_client.storage_accounts = {
|
||||
AZURE_SUBSCRIPTION_ID: [
|
||||
Account(
|
||||
id=storage_account_id,
|
||||
name=storage_account_name,
|
||||
resouce_group_name="rg",
|
||||
enable_https_traffic_only=False,
|
||||
infrastructure_encryption=False,
|
||||
allow_blob_public_access=False,
|
||||
network_rule_set=NetworkRuleSet(
|
||||
bypass="AzureServices", default_action="Allow"
|
||||
),
|
||||
encryption_type="None",
|
||||
minimum_tls_version="TLS1_2",
|
||||
private_endpoint_connections=[],
|
||||
key_expiration_period_in_days=None,
|
||||
location="westeurope",
|
||||
replication_settings=replication_setting,
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.storage.storage_geo_redundant_enabled.storage_geo_redundant_enabled.storage_client",
|
||||
new=storage_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.storage.storage_geo_redundant_enabled.storage_geo_redundant_enabled import (
|
||||
storage_geo_redundant_enabled,
|
||||
)
|
||||
|
||||
check = storage_geo_redundant_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Storage account {storage_account_name} from subscription {AZURE_SUBSCRIPTION_ID} has Geo-redundant storage {replication_setting} enabled."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == storage_account_name
|
||||
assert result[0].resource_id == storage_account_id
|
||||
assert result[0].location == "westeurope"
|
||||
|
||||
def test_storage_account_standard_lrs_disabled(self):
|
||||
storage_account_id = str(uuid4())
|
||||
storage_account_name = "Test Storage Account LRS"
|
||||
storage_client = mock.MagicMock()
|
||||
replication_setting = "Standard_LRS"
|
||||
storage_client.storage_accounts = {
|
||||
AZURE_SUBSCRIPTION_ID: [
|
||||
Account(
|
||||
id=storage_account_id,
|
||||
name=storage_account_name,
|
||||
resouce_group_name="rg",
|
||||
enable_https_traffic_only=False,
|
||||
infrastructure_encryption=False,
|
||||
allow_blob_public_access=False,
|
||||
network_rule_set=NetworkRuleSet(
|
||||
bypass="AzureServices", default_action="Allow"
|
||||
),
|
||||
encryption_type="None",
|
||||
minimum_tls_version="TLS1_2",
|
||||
private_endpoint_connections=[],
|
||||
key_expiration_period_in_days=None,
|
||||
location="westeurope",
|
||||
replication_settings=replication_setting,
|
||||
)
|
||||
]
|
||||
}
|
||||
@@ -134,7 +297,169 @@ class Test_storage_geo_redundant_enabled:
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Storage account {storage_account_name} from subscription {AZURE_SUBSCRIPTION_ID} does not have Geo-redundant storage (GRS) enabled."
|
||||
== f"Storage account {storage_account_name} from subscription {AZURE_SUBSCRIPTION_ID} does not have Geo-redundant storage enabled, it has {replication_setting} instead."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == storage_account_name
|
||||
assert result[0].resource_id == storage_account_id
|
||||
assert result[0].location == "westeurope"
|
||||
|
||||
def test_storage_account_standard_zrs_disabled(self):
|
||||
storage_account_id = str(uuid4())
|
||||
storage_account_name = "Test Storage Account ZRS"
|
||||
storage_client = mock.MagicMock()
|
||||
replication_setting = "Standard_ZRS"
|
||||
storage_client.storage_accounts = {
|
||||
AZURE_SUBSCRIPTION_ID: [
|
||||
Account(
|
||||
id=storage_account_id,
|
||||
name=storage_account_name,
|
||||
resouce_group_name="rg",
|
||||
enable_https_traffic_only=False,
|
||||
infrastructure_encryption=False,
|
||||
allow_blob_public_access=False,
|
||||
network_rule_set=NetworkRuleSet(
|
||||
bypass="AzureServices", default_action="Allow"
|
||||
),
|
||||
encryption_type="None",
|
||||
minimum_tls_version="TLS1_2",
|
||||
private_endpoint_connections=[],
|
||||
key_expiration_period_in_days=None,
|
||||
location="westeurope",
|
||||
replication_settings=replication_setting,
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.storage.storage_geo_redundant_enabled.storage_geo_redundant_enabled.storage_client",
|
||||
new=storage_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.storage.storage_geo_redundant_enabled.storage_geo_redundant_enabled import (
|
||||
storage_geo_redundant_enabled,
|
||||
)
|
||||
|
||||
check = storage_geo_redundant_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Storage account {storage_account_name} from subscription {AZURE_SUBSCRIPTION_ID} does not have Geo-redundant storage enabled, it has {replication_setting} instead."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == storage_account_name
|
||||
assert result[0].resource_id == storage_account_id
|
||||
assert result[0].location == "westeurope"
|
||||
|
||||
def test_storage_account_premium_lrs_disabled(self):
|
||||
storage_account_id = str(uuid4())
|
||||
storage_account_name = "Test Storage Account Premium LRS"
|
||||
storage_client = mock.MagicMock()
|
||||
replication_setting = "Premium_LRS"
|
||||
storage_client.storage_accounts = {
|
||||
AZURE_SUBSCRIPTION_ID: [
|
||||
Account(
|
||||
id=storage_account_id,
|
||||
name=storage_account_name,
|
||||
resouce_group_name="rg",
|
||||
enable_https_traffic_only=False,
|
||||
infrastructure_encryption=False,
|
||||
allow_blob_public_access=False,
|
||||
network_rule_set=NetworkRuleSet(
|
||||
bypass="AzureServices", default_action="Allow"
|
||||
),
|
||||
encryption_type="None",
|
||||
minimum_tls_version="TLS1_2",
|
||||
private_endpoint_connections=[],
|
||||
key_expiration_period_in_days=None,
|
||||
location="westeurope",
|
||||
replication_settings=replication_setting,
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.storage.storage_geo_redundant_enabled.storage_geo_redundant_enabled.storage_client",
|
||||
new=storage_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.storage.storage_geo_redundant_enabled.storage_geo_redundant_enabled import (
|
||||
storage_geo_redundant_enabled,
|
||||
)
|
||||
|
||||
check = storage_geo_redundant_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Storage account {storage_account_name} from subscription {AZURE_SUBSCRIPTION_ID} does not have Geo-redundant storage enabled, it has {replication_setting} instead."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == storage_account_name
|
||||
assert result[0].resource_id == storage_account_id
|
||||
assert result[0].location == "westeurope"
|
||||
|
||||
def test_storage_account_premium_zrs_disabled(self):
|
||||
storage_account_id = str(uuid4())
|
||||
storage_account_name = "Test Storage Account Premium ZRS"
|
||||
storage_client = mock.MagicMock()
|
||||
replication_setting = "Premium_ZRS"
|
||||
storage_client.storage_accounts = {
|
||||
AZURE_SUBSCRIPTION_ID: [
|
||||
Account(
|
||||
id=storage_account_id,
|
||||
name=storage_account_name,
|
||||
resouce_group_name="rg",
|
||||
enable_https_traffic_only=False,
|
||||
infrastructure_encryption=False,
|
||||
allow_blob_public_access=False,
|
||||
network_rule_set=NetworkRuleSet(
|
||||
bypass="AzureServices", default_action="Allow"
|
||||
),
|
||||
encryption_type="None",
|
||||
minimum_tls_version="TLS1_2",
|
||||
private_endpoint_connections=[],
|
||||
key_expiration_period_in_days=None,
|
||||
location="westeurope",
|
||||
replication_settings=replication_setting,
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.azure.services.storage.storage_geo_redundant_enabled.storage_geo_redundant_enabled.storage_client",
|
||||
new=storage_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.azure.services.storage.storage_geo_redundant_enabled.storage_geo_redundant_enabled import (
|
||||
storage_geo_redundant_enabled,
|
||||
)
|
||||
|
||||
check = storage_geo_redundant_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Storage account {storage_account_name} from subscription {AZURE_SUBSCRIPTION_ID} does not have Geo-redundant storage enabled, it has {replication_setting} instead."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].resource_name == storage_account_name
|
||||
|
||||
@@ -6,7 +6,6 @@ from prowler.providers.azure.services.storage.storage_service import (
|
||||
DeleteRetentionPolicy,
|
||||
FileServiceProperties,
|
||||
NetworkRuleSet,
|
||||
ReplicationSettings,
|
||||
SMBProtocolSettings,
|
||||
Storage,
|
||||
)
|
||||
@@ -53,7 +52,7 @@ def mock_storage_get_storage_accounts(_):
|
||||
location="westeurope",
|
||||
blob_properties=blob_properties,
|
||||
default_to_entra_authorization=True,
|
||||
replication_settings=ReplicationSettings.STANDARD_LRS,
|
||||
replication_settings="Standard_LRS",
|
||||
allow_cross_tenant_replication=True,
|
||||
allow_shared_key_access=True,
|
||||
file_service_properties=file_service_properties,
|
||||
@@ -150,7 +149,7 @@ class Test_Storage_Service:
|
||||
].default_to_entra_authorization
|
||||
assert (
|
||||
storage.storage_accounts[AZURE_SUBSCRIPTION_ID][0].replication_settings
|
||||
== ReplicationSettings.STANDARD_LRS
|
||||
== "Standard_LRS"
|
||||
)
|
||||
assert (
|
||||
storage.storage_accounts[AZURE_SUBSCRIPTION_ID][
|
||||
|
||||
@@ -175,7 +175,7 @@ class Test_compute_project_os_login_enabled:
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == project.id
|
||||
assert result[0].resource_name == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "GCP Project"
|
||||
assert result[0].location == "global"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
@@ -225,6 +225,6 @@ class Test_compute_project_os_login_enabled:
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == project.id
|
||||
assert result[0].resource_name == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "GCP Project"
|
||||
assert result[0].location == "global"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
@@ -44,6 +44,7 @@ class Test_iam_account_access_approval_enabled:
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "test"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
assert result[0].location == "global"
|
||||
|
||||
@@ -95,5 +96,58 @@ class Test_iam_account_access_approval_enabled:
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "test"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
assert result[0].location == "global"
|
||||
|
||||
def test_iam_project_with_settings_empty_project_name(self):
|
||||
cloudresourcemanager_client = mock.MagicMock()
|
||||
accessapproval_client = mock.MagicMock()
|
||||
accessapproval_client.project_ids = [GCP_PROJECT_ID]
|
||||
accessapproval_client.region = "global"
|
||||
accessapproval_client.projects = {
|
||||
GCP_PROJECT_ID: GCPProject(
|
||||
id=GCP_PROJECT_ID,
|
||||
number="123456789012",
|
||||
name="",
|
||||
labels={},
|
||||
lifecycle_state="ACTIVE",
|
||||
)
|
||||
}
|
||||
|
||||
with (
|
||||
mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.iam.iam_account_access_approval_enabled.iam_account_access_approval_enabled.accessapproval_client",
|
||||
new=accessapproval_client,
|
||||
),
|
||||
mock.patch(
|
||||
"prowler.providers.gcp.services.iam.iam_service.cloudresourcemanager_client",
|
||||
new=cloudresourcemanager_client,
|
||||
),
|
||||
):
|
||||
from prowler.providers.gcp.services.iam.iam_service import Setting
|
||||
|
||||
accessapproval_client.settings = {
|
||||
GCP_PROJECT_ID: Setting(name="test", project_id=GCP_PROJECT_ID)
|
||||
}
|
||||
|
||||
from prowler.providers.gcp.services.iam.iam_account_access_approval_enabled.iam_account_access_approval_enabled import (
|
||||
iam_account_access_approval_enabled,
|
||||
)
|
||||
|
||||
check = iam_account_access_approval_enabled()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert search(
|
||||
"has Access Approval enabled",
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "GCP Project"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
assert result[0].location == "global"
|
||||
|
||||
@@ -176,7 +176,7 @@ class Test_iam_audit_logs_enabled:
|
||||
r.status_extended,
|
||||
)
|
||||
assert r.resource_id == GCP_PROJECT_ID
|
||||
assert r.resource_name == GCP_PROJECT_ID
|
||||
assert r.resource_name == "GCP Project"
|
||||
assert r.project_id == GCP_PROJECT_ID
|
||||
assert r.location == cloudresourcemanager_client.region
|
||||
|
||||
@@ -226,6 +226,6 @@ class Test_iam_audit_logs_enabled:
|
||||
r.status_extended,
|
||||
)
|
||||
assert r.resource_id == GCP_PROJECT_ID
|
||||
assert r.resource_name == GCP_PROJECT_ID
|
||||
assert r.resource_name == "GCP Project"
|
||||
assert r.project_id == GCP_PROJECT_ID
|
||||
assert r.location == cloudresourcemanager_client.region
|
||||
|
||||
@@ -113,7 +113,7 @@ class Test_iam_no_service_roles_at_project_level:
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "test"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
assert result[0].location == cloudresourcemanager_client.region
|
||||
|
||||
@@ -255,6 +255,6 @@ class Test_iam_no_service_roles_at_project_level:
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "GCP Project"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
assert result[0].location == cloudresourcemanager_client.region
|
||||
|
||||
@@ -213,7 +213,7 @@ class Test_iam_role_kms_enforce_separation_of_duties:
|
||||
r.status_extended,
|
||||
)
|
||||
assert r.resource_id == GCP_PROJECT_ID
|
||||
assert r.resource_name == GCP_PROJECT_ID
|
||||
assert r.resource_name == "GCP Project"
|
||||
assert r.project_id == GCP_PROJECT_ID
|
||||
assert r.location == cloudresourcemanager_client.region
|
||||
|
||||
@@ -277,6 +277,6 @@ class Test_iam_role_kms_enforce_separation_of_duties:
|
||||
r.status_extended,
|
||||
)
|
||||
assert r.resource_id == GCP_PROJECT_ID
|
||||
assert r.resource_name == GCP_PROJECT_ID
|
||||
assert r.resource_name == "GCP Project"
|
||||
assert r.project_id == GCP_PROJECT_ID
|
||||
assert r.location == cloudresourcemanager_client.region
|
||||
|
||||
@@ -213,7 +213,7 @@ class Test_iam_role_sa_enforce_separation_of_duties:
|
||||
r.status_extended,
|
||||
)
|
||||
assert r.resource_id == GCP_PROJECT_ID
|
||||
assert r.resource_name == GCP_PROJECT_ID
|
||||
assert r.resource_name == "GCP Project"
|
||||
assert r.project_id == GCP_PROJECT_ID
|
||||
assert r.location == cloudresourcemanager_client.region
|
||||
|
||||
@@ -277,6 +277,6 @@ class Test_iam_role_sa_enforce_separation_of_duties:
|
||||
r.status_extended,
|
||||
)
|
||||
assert r.resource_id == GCP_PROJECT_ID
|
||||
assert r.resource_name == GCP_PROJECT_ID
|
||||
assert r.resource_name == "GCP Project"
|
||||
assert r.project_id == GCP_PROJECT_ID
|
||||
assert r.location == cloudresourcemanager_client.region
|
||||
|
||||
@@ -141,7 +141,7 @@ class Test_logging_log_metric_filter_and_alert_for_audit_configuration_changes_e
|
||||
== f"There are no log metric filters or alerts associated in project {GCP_PROJECT_ID}."
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "GCP Project"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
assert result[0].location == GCP_EU1_LOCATION
|
||||
|
||||
|
||||
@@ -141,7 +141,7 @@ class Test_logging_log_metric_filter_and_alert_for_bucket_permission_changes_ena
|
||||
== f"There are no log metric filters or alerts associated in project {GCP_PROJECT_ID}."
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "GCP Project"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
assert result[0].location == GCP_EU1_LOCATION
|
||||
|
||||
|
||||
@@ -141,7 +141,7 @@ class Test_logging_log_metric_filter_and_alert_for_custom_role_changes_enabled:
|
||||
== f"There are no log metric filters or alerts associated in project {GCP_PROJECT_ID}."
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "GCP Project"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
assert result[0].location == GCP_EU1_LOCATION
|
||||
|
||||
|
||||
@@ -141,7 +141,7 @@ class Test_logging_log_metric_filter_and_alert_for_project_ownership_changes_ena
|
||||
== f"There are no log metric filters or alerts associated in project {GCP_PROJECT_ID}."
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "GCP Project"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
assert result[0].location == GCP_EU1_LOCATION
|
||||
|
||||
|
||||
@@ -141,7 +141,7 @@ class Test_logging_log_metric_filter_and_alert_for_sql_instance_configuration_ch
|
||||
== f"There are no log metric filters or alerts associated in project {GCP_PROJECT_ID}."
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "GCP Project"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
assert result[0].location == GCP_EU1_LOCATION
|
||||
|
||||
|
||||
@@ -141,7 +141,7 @@ class Test_logging_log_metric_filter_and_alert_for_vpc_firewall_rule_changes_ena
|
||||
== f"There are no log metric filters or alerts associated in project {GCP_PROJECT_ID}."
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "GCP Project"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
assert result[0].location == GCP_EU1_LOCATION
|
||||
|
||||
|
||||
@@ -141,7 +141,7 @@ class Test_logging_log_metric_filter_and_alert_for_vpc_network_changes_enabled:
|
||||
== f"There are no log metric filters or alerts associated in project {GCP_PROJECT_ID}."
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "GCP Project"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
assert result[0].location == GCP_EU1_LOCATION
|
||||
|
||||
|
||||
@@ -141,7 +141,7 @@ class Test_logging_log_metric_filter_and_alert_for_vpc_network_route_changes_ena
|
||||
== f"There are no log metric filters or alerts associated in project {GCP_PROJECT_ID}."
|
||||
)
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "GCP Project"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
assert result[0].location == GCP_EU1_LOCATION
|
||||
|
||||
|
||||
@@ -204,7 +204,7 @@ class Test_logging_sink_created:
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert result[0].resource_id == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == GCP_PROJECT_ID
|
||||
assert result[0].resource_name == "GCP Project"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
assert result[0].location == GCP_EU1_LOCATION
|
||||
assert (
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from datetime import datetime, timezone
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import requests
|
||||
from github import GithubException, RateLimitExceededException
|
||||
|
||||
from prowler.providers.github.services.repository.repository_service import (
|
||||
@@ -40,6 +41,7 @@ def mock_list_repositories(_):
|
||||
archived=False,
|
||||
pushed_at=datetime.now(timezone.utc),
|
||||
delete_branch_on_merge=True,
|
||||
dependabot_alerts_enabled=True,
|
||||
),
|
||||
}
|
||||
|
||||
@@ -110,6 +112,105 @@ class Test_Repository_FileExists:
|
||||
assert mock_logger.error.called
|
||||
|
||||
|
||||
class Test_Repository_GraphQL:
|
||||
def setup_method(self):
|
||||
self.mock_repo1 = MagicMock()
|
||||
self.mock_repo1.id = 1
|
||||
self.mock_repo1.name = "repo1"
|
||||
self.mock_repo1.owner.login = "owner1"
|
||||
self.mock_repo1.full_name = "owner1/repo1"
|
||||
self.mock_repo1.default_branch = "main"
|
||||
self.mock_repo1.private = False
|
||||
self.mock_repo1.archived = False
|
||||
self.mock_repo1.pushed_at = datetime.now(timezone.utc)
|
||||
self.mock_repo1.delete_branch_on_merge = False
|
||||
self.mock_repo1.security_and_analysis = None
|
||||
self.mock_repo1.get_contents.side_effect = [None, None, None]
|
||||
self.mock_repo1.get_branch.side_effect = Exception("404 Not Found")
|
||||
self.mock_repo1.get_dependabot_alerts.side_effect = Exception("403 Forbidden")
|
||||
|
||||
def test_no_scoping_uses_graphql(self):
|
||||
"""Test that no scoping triggers the GraphQL discovery method successfully"""
|
||||
provider = set_mocked_github_provider()
|
||||
provider.repositories = []
|
||||
provider.organizations = []
|
||||
|
||||
with patch.object(Repository, "__init__", lambda x, y: None):
|
||||
repository_service = Repository(provider)
|
||||
mock_client = MagicMock()
|
||||
repository_service.clients = [mock_client]
|
||||
repository_service.provider = provider
|
||||
|
||||
with patch.object(
|
||||
repository_service,
|
||||
"_get_accessible_repos_graphql",
|
||||
return_value=["owner1/repo1"],
|
||||
) as mock_graphql_call:
|
||||
mock_client.get_repo.return_value = self.mock_repo1
|
||||
|
||||
repos = repository_service._list_repositories()
|
||||
|
||||
assert len(repos) == 1
|
||||
assert 1 in repos
|
||||
assert repos[1].name == "repo1"
|
||||
|
||||
mock_graphql_call.assert_called_once()
|
||||
mock_client.get_repo.assert_called_once_with("owner1/repo1")
|
||||
|
||||
def test_graphql_call_api_error(self):
|
||||
"""Test that an error during the GraphQL call is handled gracefully"""
|
||||
provider = set_mocked_github_provider()
|
||||
provider.repositories = []
|
||||
provider.organizations = []
|
||||
|
||||
with patch.object(Repository, "__init__", lambda x, y: None):
|
||||
repository_service = Repository(provider)
|
||||
repository_service.clients = [MagicMock()]
|
||||
repository_service.provider = provider
|
||||
|
||||
with patch(
|
||||
"requests.post",
|
||||
side_effect=requests.exceptions.RequestException("API Error"),
|
||||
):
|
||||
with patch(
|
||||
"prowler.providers.github.services.repository.repository_service.logger"
|
||||
) as mock_logger:
|
||||
|
||||
repos = repository_service._list_repositories()
|
||||
|
||||
assert len(repos) == 0
|
||||
mock_logger.error.assert_called_once()
|
||||
|
||||
log_output = str(mock_logger.error.call_args)
|
||||
assert "RequestException" in log_output
|
||||
assert "API Error" in log_output
|
||||
|
||||
def test_graphql_returns_empty_list(self):
|
||||
"""Test the case where GraphQL returns no repositories"""
|
||||
provider = set_mocked_github_provider()
|
||||
provider.repositories = []
|
||||
provider.organizations = []
|
||||
|
||||
with patch.object(Repository, "__init__", lambda x, y: None):
|
||||
repository_service = Repository(provider)
|
||||
repository_service.clients = [MagicMock()]
|
||||
repository_service.provider = provider
|
||||
|
||||
with patch.object(
|
||||
repository_service, "_get_accessible_repos_graphql", return_value=[]
|
||||
):
|
||||
with patch(
|
||||
"prowler.providers.github.services.repository.repository_service.logger"
|
||||
) as mock_logger:
|
||||
|
||||
repos = repository_service._list_repositories()
|
||||
|
||||
assert len(repos) == 0
|
||||
mock_logger.warning.assert_called_with(
|
||||
"Could not find any accessible repositories with the provided token."
|
||||
)
|
||||
|
||||
|
||||
class Test_Repository_Scoping:
|
||||
def setup_method(self):
|
||||
self.mock_repo1 = MagicMock()
|
||||
@@ -123,7 +224,7 @@ class Test_Repository_Scoping:
|
||||
self.mock_repo1.pushed_at = datetime.now(timezone.utc)
|
||||
self.mock_repo1.delete_branch_on_merge = True
|
||||
self.mock_repo1.security_and_analysis = None
|
||||
self.mock_repo1.get_contents.return_value = None
|
||||
self.mock_repo1.get_contents.side_effect = [None, None, None]
|
||||
self.mock_repo1.get_branch.side_effect = Exception("404 Not Found")
|
||||
self.mock_repo1.get_dependabot_alerts.side_effect = Exception("404 Not Found")
|
||||
|
||||
@@ -138,200 +239,10 @@ class Test_Repository_Scoping:
|
||||
self.mock_repo2.pushed_at = datetime.now(timezone.utc)
|
||||
self.mock_repo2.delete_branch_on_merge = True
|
||||
self.mock_repo2.security_and_analysis = None
|
||||
self.mock_repo2.get_contents.return_value = None
|
||||
self.mock_repo2.get_contents.side_effect = [None, None, None]
|
||||
self.mock_repo2.get_branch.side_effect = Exception("404 Not Found")
|
||||
self.mock_repo2.get_dependabot_alerts.side_effect = Exception("404 Not Found")
|
||||
|
||||
def test_no_repository_scoping(self):
|
||||
"""Test that all repositories are returned when no scoping is specified"""
|
||||
provider = set_mocked_github_provider()
|
||||
provider.repositories = []
|
||||
provider.organizations = []
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_user = MagicMock()
|
||||
mock_user.get_repos.return_value = [self.mock_repo1, self.mock_repo2]
|
||||
mock_client.get_user.return_value = mock_user
|
||||
|
||||
with patch(
|
||||
"prowler.providers.github.services.repository.repository_service.GithubService.__init__"
|
||||
):
|
||||
repository_service = Repository(provider)
|
||||
repository_service.clients = [mock_client]
|
||||
repository_service.provider = provider
|
||||
|
||||
repos = repository_service._list_repositories()
|
||||
|
||||
assert len(repos) == 2
|
||||
assert 1 in repos
|
||||
assert 2 in repos
|
||||
assert repos[1].name == "repo1"
|
||||
assert repos[2].name == "repo2"
|
||||
|
||||
def test_specific_repository_scoping(self):
|
||||
"""Test that only specified repositories are returned"""
|
||||
provider = set_mocked_github_provider()
|
||||
provider.repositories = ["owner1/repo1"]
|
||||
provider.organizations = []
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get_repo.return_value = self.mock_repo1
|
||||
|
||||
with patch(
|
||||
"prowler.providers.github.services.repository.repository_service.GithubService.__init__"
|
||||
):
|
||||
repository_service = Repository(provider)
|
||||
repository_service.clients = [mock_client]
|
||||
repository_service.provider = provider
|
||||
|
||||
repos = repository_service._list_repositories()
|
||||
|
||||
assert len(repos) == 1
|
||||
assert 1 in repos
|
||||
assert repos[1].name == "repo1"
|
||||
assert repos[1].full_name == "owner1/repo1"
|
||||
mock_client.get_repo.assert_called_once_with("owner1/repo1")
|
||||
|
||||
def test_multiple_repository_scoping(self):
|
||||
"""Test that multiple specified repositories are returned"""
|
||||
provider = set_mocked_github_provider()
|
||||
provider.repositories = ["owner1/repo1", "owner2/repo2"]
|
||||
provider.organizations = []
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get_repo.side_effect = [self.mock_repo1, self.mock_repo2]
|
||||
|
||||
with patch(
|
||||
"prowler.providers.github.services.repository.repository_service.GithubService.__init__"
|
||||
):
|
||||
repository_service = Repository(provider)
|
||||
repository_service.clients = [mock_client]
|
||||
repository_service.provider = provider
|
||||
|
||||
repos = repository_service._list_repositories()
|
||||
|
||||
assert len(repos) == 2
|
||||
assert 1 in repos
|
||||
assert 2 in repos
|
||||
assert repos[1].name == "repo1"
|
||||
assert repos[2].name == "repo2"
|
||||
assert mock_client.get_repo.call_count == 2
|
||||
|
||||
def test_invalid_repository_format(self):
|
||||
"""Test that invalid repository formats are skipped with warning"""
|
||||
provider = set_mocked_github_provider()
|
||||
provider.repositories = ["invalid-repo-name", "owner/valid-repo"]
|
||||
provider.organizations = []
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get_repo.return_value = self.mock_repo1
|
||||
|
||||
with patch(
|
||||
"prowler.providers.github.services.repository.repository_service.GithubService.__init__"
|
||||
):
|
||||
repository_service = Repository(provider)
|
||||
repository_service.clients = [mock_client]
|
||||
repository_service.provider = provider
|
||||
|
||||
with patch(
|
||||
"prowler.providers.github.services.repository.repository_service.logger"
|
||||
) as mock_logger:
|
||||
repos = repository_service._list_repositories()
|
||||
|
||||
# Should only have the valid repository
|
||||
assert len(repos) == 1
|
||||
assert 1 in repos
|
||||
# Should log warning for invalid format
|
||||
assert mock_logger.warning.call_count >= 1
|
||||
# Check that at least one warning is about invalid format
|
||||
warning_calls = [
|
||||
call[0][0] for call in mock_logger.warning.call_args_list
|
||||
]
|
||||
assert any(
|
||||
"should be in 'owner/repo-name' format" in call
|
||||
for call in warning_calls
|
||||
)
|
||||
|
||||
def test_repository_not_found(self):
|
||||
"""Test that inaccessible repositories are skipped with warning"""
|
||||
provider = set_mocked_github_provider()
|
||||
provider.repositories = ["owner/nonexistent-repo"]
|
||||
provider.organizations = []
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.get_repo.side_effect = Exception("404 Not Found")
|
||||
|
||||
with patch(
|
||||
"prowler.providers.github.services.repository.repository_service.GithubService.__init__"
|
||||
):
|
||||
repository_service = Repository(provider)
|
||||
repository_service.clients = [mock_client]
|
||||
repository_service.provider = provider
|
||||
|
||||
repos = repository_service._list_repositories()
|
||||
|
||||
# Should be empty since repository wasn't found
|
||||
assert len(repos) == 0
|
||||
|
||||
def test_organization_scoping(self):
|
||||
"""Test that repositories from specified organizations are returned"""
|
||||
provider = set_mocked_github_provider()
|
||||
provider.repositories = []
|
||||
provider.organizations = ["org1"]
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_org = MagicMock()
|
||||
mock_org.get_repos.return_value = [self.mock_repo1]
|
||||
mock_client.get_organization.return_value = mock_org
|
||||
|
||||
with patch(
|
||||
"prowler.providers.github.services.repository.repository_service.GithubService.__init__"
|
||||
):
|
||||
repository_service = Repository(provider)
|
||||
repository_service.clients = [mock_client]
|
||||
repository_service.provider = provider
|
||||
|
||||
repos = repository_service._list_repositories()
|
||||
|
||||
assert len(repos) == 1
|
||||
assert 1 in repos
|
||||
assert repos[1].name == "repo1"
|
||||
mock_client.get_organization.assert_called_once_with("org1")
|
||||
|
||||
def test_organization_as_user_fallback(self):
|
||||
"""Test that organization scoping falls back to user when organization not found"""
|
||||
provider = set_mocked_github_provider()
|
||||
provider.repositories = []
|
||||
provider.organizations = ["user1"]
|
||||
|
||||
mock_client = MagicMock()
|
||||
# Organization lookup fails
|
||||
mock_client.get_organization.side_effect = GithubException(
|
||||
404, "Not Found", None
|
||||
)
|
||||
# User lookup succeeds
|
||||
mock_user = MagicMock()
|
||||
mock_user.get_repos.return_value = [self.mock_repo1]
|
||||
mock_client.get_user.return_value = mock_user
|
||||
|
||||
# Create service without calling the parent constructor
|
||||
repository_service = Repository.__new__(Repository)
|
||||
repository_service.clients = [mock_client]
|
||||
repository_service.provider = provider
|
||||
|
||||
with patch(
|
||||
"prowler.providers.github.services.repository.repository_service.logger"
|
||||
) as mock_logger:
|
||||
repos = repository_service._list_repositories()
|
||||
|
||||
assert len(repos) == 1
|
||||
assert 1 in repos
|
||||
assert repos[1].name == "repo1"
|
||||
mock_client.get_organization.assert_called_once_with("user1")
|
||||
mock_client.get_user.assert_called_once_with("user1")
|
||||
# Should log info about trying as user
|
||||
mock_logger.info.assert_called()
|
||||
|
||||
def test_combined_repository_and_organization_scoping(self):
|
||||
"""Test that both repository and organization scoping can be used together"""
|
||||
provider = set_mocked_github_provider()
|
||||
|
||||
@@ -36,7 +36,7 @@ class Test_defender_antispam_outbound_policy_configured:
|
||||
notify_sender_blocked=True,
|
||||
notify_limit_exceeded_addresses=["admin@example.com"],
|
||||
notify_sender_blocked_addresses=["admin@example.com"],
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
default=True,
|
||||
)
|
||||
}
|
||||
@@ -93,7 +93,7 @@ class Test_defender_antispam_outbound_policy_configured:
|
||||
notify_sender_blocked=True,
|
||||
notify_limit_exceeded_addresses=["admin@example.com"],
|
||||
notify_sender_blocked_addresses=["admin@example.com"],
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
default=True,
|
||||
),
|
||||
"Policy1": OutboundSpamPolicy(
|
||||
@@ -102,7 +102,7 @@ class Test_defender_antispam_outbound_policy_configured:
|
||||
notify_sender_blocked=True,
|
||||
notify_limit_exceeded_addresses=["admin@example.com"],
|
||||
notify_sender_blocked_addresses=["admin@example.com"],
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
default=False,
|
||||
),
|
||||
}
|
||||
@@ -177,7 +177,7 @@ class Test_defender_antispam_outbound_policy_configured:
|
||||
notify_sender_blocked=True,
|
||||
notify_limit_exceeded_addresses=["admin@example.com"],
|
||||
notify_sender_blocked_addresses=["admin@example.com"],
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
default=True,
|
||||
),
|
||||
"Policy1": OutboundSpamPolicy(
|
||||
@@ -186,7 +186,7 @@ class Test_defender_antispam_outbound_policy_configured:
|
||||
notify_sender_blocked=False,
|
||||
notify_limit_exceeded_addresses=[],
|
||||
notify_sender_blocked_addresses=[],
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
default=False,
|
||||
),
|
||||
}
|
||||
@@ -261,7 +261,7 @@ class Test_defender_antispam_outbound_policy_configured:
|
||||
notify_sender_blocked=False,
|
||||
notify_limit_exceeded_addresses=[],
|
||||
notify_sender_blocked_addresses=[],
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
default=True,
|
||||
),
|
||||
"Policy1": OutboundSpamPolicy(
|
||||
@@ -270,7 +270,7 @@ class Test_defender_antispam_outbound_policy_configured:
|
||||
notify_sender_blocked=True,
|
||||
notify_limit_exceeded_addresses=["admin@example.com"],
|
||||
notify_sender_blocked_addresses=["admin@example.com"],
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
default=False,
|
||||
),
|
||||
}
|
||||
@@ -344,7 +344,7 @@ class Test_defender_antispam_outbound_policy_configured:
|
||||
notify_sender_blocked=False,
|
||||
notify_limit_exceeded_addresses=[],
|
||||
notify_sender_blocked_addresses=[],
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
default=True,
|
||||
)
|
||||
}
|
||||
@@ -398,7 +398,7 @@ class Test_defender_antispam_outbound_policy_configured:
|
||||
notify_sender_blocked=False,
|
||||
notify_limit_exceeded_addresses=[],
|
||||
notify_sender_blocked_addresses=[],
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
default=True,
|
||||
),
|
||||
"Policy1": OutboundSpamPolicy(
|
||||
@@ -407,7 +407,7 @@ class Test_defender_antispam_outbound_policy_configured:
|
||||
notify_sender_blocked=False,
|
||||
notify_limit_exceeded_addresses=[],
|
||||
notify_sender_blocked_addresses=[],
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
default=False,
|
||||
),
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ class Test_defender_antispam_outbound_policy_forwarding_disabled:
|
||||
defender_client.outbound_spam_policies = {
|
||||
"Default": OutboundSpamPolicy(
|
||||
name="Default",
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
notify_limit_exceeded=True,
|
||||
notify_sender_blocked=True,
|
||||
notify_limit_exceeded_addresses=["admin@example.com"],
|
||||
@@ -86,7 +86,7 @@ class Test_defender_antispam_outbound_policy_forwarding_disabled:
|
||||
defender_client.outbound_spam_policies = {
|
||||
"Default": OutboundSpamPolicy(
|
||||
name="Default",
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
notify_limit_exceeded=True,
|
||||
notify_sender_blocked=True,
|
||||
notify_limit_exceeded_addresses=["admin@example.com"],
|
||||
@@ -95,7 +95,7 @@ class Test_defender_antispam_outbound_policy_forwarding_disabled:
|
||||
),
|
||||
"Policy1": OutboundSpamPolicy(
|
||||
name="Policy1",
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
notify_limit_exceeded=True,
|
||||
notify_sender_blocked=True,
|
||||
notify_limit_exceeded_addresses=["admin@example.com"],
|
||||
@@ -172,7 +172,7 @@ class Test_defender_antispam_outbound_policy_forwarding_disabled:
|
||||
defender_client.outbound_spam_policies = {
|
||||
"Default": OutboundSpamPolicy(
|
||||
name="Default",
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
notify_limit_exceeded=True,
|
||||
notify_sender_blocked=True,
|
||||
notify_limit_exceeded_addresses=["admin@example.com"],
|
||||
@@ -181,7 +181,7 @@ class Test_defender_antispam_outbound_policy_forwarding_disabled:
|
||||
),
|
||||
"Policy1": OutboundSpamPolicy(
|
||||
name="Policy1",
|
||||
auto_forwarding_mode=True,
|
||||
auto_forwarding_mode="On",
|
||||
notify_limit_exceeded=False,
|
||||
notify_sender_blocked=False,
|
||||
notify_limit_exceeded_addresses=[],
|
||||
@@ -258,7 +258,7 @@ class Test_defender_antispam_outbound_policy_forwarding_disabled:
|
||||
defender_client.outbound_spam_policies = {
|
||||
"Default": OutboundSpamPolicy(
|
||||
name="Default",
|
||||
auto_forwarding_mode=True,
|
||||
auto_forwarding_mode="On",
|
||||
notify_limit_exceeded=False,
|
||||
notify_sender_blocked=False,
|
||||
notify_limit_exceeded_addresses=[],
|
||||
@@ -267,7 +267,7 @@ class Test_defender_antispam_outbound_policy_forwarding_disabled:
|
||||
),
|
||||
"Policy1": OutboundSpamPolicy(
|
||||
name="Policy1",
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
notify_limit_exceeded=True,
|
||||
notify_sender_blocked=True,
|
||||
notify_limit_exceeded_addresses=["admin@example.com"],
|
||||
@@ -343,7 +343,7 @@ class Test_defender_antispam_outbound_policy_forwarding_disabled:
|
||||
defender_client.outbound_spam_policies = {
|
||||
"Default": OutboundSpamPolicy(
|
||||
name="Default",
|
||||
auto_forwarding_mode=True,
|
||||
auto_forwarding_mode="On",
|
||||
notify_limit_exceeded=False,
|
||||
notify_sender_blocked=False,
|
||||
notify_limit_exceeded_addresses=[],
|
||||
@@ -397,7 +397,7 @@ class Test_defender_antispam_outbound_policy_forwarding_disabled:
|
||||
defender_client.outbound_spam_policies = {
|
||||
"Default": OutboundSpamPolicy(
|
||||
name="Default",
|
||||
auto_forwarding_mode=True,
|
||||
auto_forwarding_mode="On",
|
||||
notify_limit_exceeded=False,
|
||||
notify_sender_blocked=False,
|
||||
notify_limit_exceeded_addresses=[],
|
||||
@@ -406,7 +406,7 @@ class Test_defender_antispam_outbound_policy_forwarding_disabled:
|
||||
),
|
||||
"Policy1": OutboundSpamPolicy(
|
||||
name="Policy1",
|
||||
auto_forwarding_mode=True,
|
||||
auto_forwarding_mode="On",
|
||||
notify_limit_exceeded=False,
|
||||
notify_sender_blocked=False,
|
||||
notify_limit_exceeded_addresses=[],
|
||||
|
||||
@@ -180,7 +180,7 @@ def mock_defender_get_outbound_spam_filter_policy(_):
|
||||
notify_limit_exceeded=True,
|
||||
notify_limit_exceeded_addresses=["security@example.com"],
|
||||
notify_sender_blocked_addresses=["security@example.com"],
|
||||
auto_forwarding_mode=False,
|
||||
auto_forwarding_mode="Off",
|
||||
default=False,
|
||||
),
|
||||
"Policy2": OutboundSpamPolicy(
|
||||
@@ -189,7 +189,7 @@ def mock_defender_get_outbound_spam_filter_policy(_):
|
||||
notify_limit_exceeded=False,
|
||||
notify_limit_exceeded_addresses=[],
|
||||
notify_sender_blocked_addresses=[],
|
||||
auto_forwarding_mode=True,
|
||||
auto_forwarding_mode="On",
|
||||
default=True,
|
||||
),
|
||||
}
|
||||
@@ -438,7 +438,7 @@ class Test_Defender_Service:
|
||||
assert outbound_spam_policies[
|
||||
"Policy1"
|
||||
].notify_sender_blocked_addresses == ["security@example.com"]
|
||||
assert outbound_spam_policies["Policy1"].auto_forwarding_mode is False
|
||||
assert outbound_spam_policies["Policy1"].auto_forwarding_mode == "Off"
|
||||
assert outbound_spam_policies["Policy1"].default is False
|
||||
assert outbound_spam_policies["Policy2"].name == "Policy2"
|
||||
assert outbound_spam_policies["Policy2"].notify_sender_blocked is False
|
||||
@@ -449,7 +449,7 @@ class Test_Defender_Service:
|
||||
assert (
|
||||
outbound_spam_policies["Policy2"].notify_sender_blocked_addresses == []
|
||||
)
|
||||
assert outbound_spam_policies["Policy2"].auto_forwarding_mode is True
|
||||
assert outbound_spam_policies["Policy2"].auto_forwarding_mode == "On"
|
||||
assert outbound_spam_policies["Policy2"].default is True
|
||||
defender_client.powershell.close()
|
||||
|
||||
|
||||
@@ -12,6 +12,18 @@ All notable changes to the **Prowler UI** are documented in this file.
|
||||
|
||||
### ❌ Removed
|
||||
|
||||
|
||||
---
|
||||
|
||||
## [1.10.1] (Prowler v5.10.1)
|
||||
|
||||
### 🐞 Fixed
|
||||
|
||||
- Field for `Assume Role` in AWS role credentials form shown again [(#8484)](https://github.com/prowler-cloud/prowler/pull/8484)
|
||||
- `GitHub` submenu to High Risk Findings [(#8488)](https://github.com/prowler-cloud/prowler/pull/8488)
|
||||
- Improved Overview chart `Findings by Severity` spacing [(#8491)](https://github.com/prowler-cloud/prowler/pull/8491)
|
||||
|
||||
|
||||
---
|
||||
|
||||
## [1.10.0] (Prowler v5.10.0)
|
||||
|
||||
@@ -73,15 +73,15 @@ export const FindingsBySeverityChart = ({
|
||||
<Card className="h-full dark:bg-prowler-blue-400">
|
||||
<CardBody>
|
||||
<div className="my-auto">
|
||||
<ChartContainer config={chartConfig}>
|
||||
<ChartContainer
|
||||
config={chartConfig}
|
||||
className="aspect-auto h-[450px] w-full"
|
||||
>
|
||||
<BarChart
|
||||
accessibilityLayer
|
||||
data={chartData}
|
||||
layout="vertical"
|
||||
barGap={2}
|
||||
height={200}
|
||||
margin={{ left: 50 }}
|
||||
width={500}
|
||||
margin={{ left: 72, right: 16, top: 8, bottom: 8 }}
|
||||
>
|
||||
<YAxis
|
||||
dataKey="severity"
|
||||
@@ -104,7 +104,7 @@ export const FindingsBySeverityChart = ({
|
||||
dataKey="findings"
|
||||
layout="vertical"
|
||||
radius={12}
|
||||
barSize={20}
|
||||
barSize={26}
|
||||
onClick={(data) => {
|
||||
const severity = data.severity as keyof typeof chartConfig;
|
||||
const link = chartConfig[severity]?.link;
|
||||
@@ -119,6 +119,14 @@ export const FindingsBySeverityChart = ({
|
||||
offset={5}
|
||||
className="fill-foreground font-bold"
|
||||
fontSize={11}
|
||||
formatter={(value: number) => (value === 0 ? "" : value)}
|
||||
/>
|
||||
<LabelList
|
||||
position="insideLeft"
|
||||
offset={6}
|
||||
className="fill-foreground font-bold"
|
||||
fontSize={11}
|
||||
formatter={(value: number) => (value === 0 ? "0" : "")}
|
||||
/>
|
||||
</Bar>
|
||||
</BarChart>
|
||||
|
||||
@@ -121,7 +121,7 @@ export const FindingsByStatusChart: React.FC<FindingsByStatusChartProps> = ({
|
||||
return (
|
||||
<Card className="h-full dark:bg-prowler-blue-400">
|
||||
<CardBody>
|
||||
<div className="flex h-full flex-col items-center justify-between">
|
||||
<div className="flex h-full flex-col items-center justify-around">
|
||||
<ChartContainer
|
||||
config={chartConfig}
|
||||
className="aspect-square w-[250px] min-w-[250px]"
|
||||
|
||||
@@ -35,7 +35,9 @@ export const AWSRoleCredentialsForm = ({
|
||||
});
|
||||
const [showOptionalRole, setShowOptionalRole] = useState(false);
|
||||
const showRoleSection =
|
||||
(isCloudEnv && credentialsType === "aws-sdk-default") || showOptionalRole;
|
||||
type === "providers" ||
|
||||
(isCloudEnv && credentialsType === "aws-sdk-default") ||
|
||||
showOptionalRole;
|
||||
|
||||
return (
|
||||
<>
|
||||
|
||||
@@ -27,6 +27,7 @@ import {
|
||||
CircleHelpIcon,
|
||||
DocIcon,
|
||||
GCPIcon,
|
||||
GithubIcon,
|
||||
KubernetesIcon,
|
||||
LighthouseIcon,
|
||||
M365Icon,
|
||||
@@ -117,6 +118,11 @@ export const getMenuList = (pathname: string): GroupProps[] => {
|
||||
label: "Kubernetes",
|
||||
icon: KubernetesIcon,
|
||||
},
|
||||
{
|
||||
href: "/findings?filter[status__in]=FAIL&filter[severity__in]=critical%2Chigh%2Cmedium&filter[provider_type__in]=github&sort=severity,-inserted_at",
|
||||
label: "Github",
|
||||
icon: GithubIcon,
|
||||
},
|
||||
],
|
||||
defaultOpen: false,
|
||||
},
|
||||
|
||||
@@ -10,13 +10,14 @@ import sys
|
||||
file_name = sys.argv[1]
|
||||
|
||||
# read the CSV file rows and use the column fields to form the Prowler compliance JSON file 'cis_1.0_github.json'
|
||||
output = {"Framework": "CIS-GitHub", "Version": "1.5", "Requirements": []}
|
||||
output = {"Framework": "CIS-GitHub", "Version": "1.0", "Requirements": []}
|
||||
with open(file_name, newline="", encoding="utf-8") as f:
|
||||
reader = csv.reader(f, delimiter=",")
|
||||
reader = csv.reader(f, delimiter=";")
|
||||
for row in reader:
|
||||
attribute = {
|
||||
"Section": row[3],
|
||||
"Profile": row[4],
|
||||
"Section": row[0],
|
||||
"Subsection": row[1],
|
||||
"Profile": row[3],
|
||||
"AssessmentStatus": row[5],
|
||||
"Description": row[6],
|
||||
"RationaleStatement": row[7],
|
||||
@@ -24,17 +25,19 @@ with open(file_name, newline="", encoding="utf-8") as f:
|
||||
"RemediationProcedure": row[9],
|
||||
"AuditProcedure": row[10],
|
||||
"AdditionalInformation": row[11],
|
||||
"References": row[12],
|
||||
"References": row[25],
|
||||
"DefaultValue": row[26],
|
||||
}
|
||||
output["Requirements"].append(
|
||||
{
|
||||
"Id": row[0],
|
||||
"Description": row[1],
|
||||
"Checks": list(map(str.strip, row[2].split(","))),
|
||||
"Id": row[2],
|
||||
"Description": row[6],
|
||||
"Checks": [],
|
||||
"Attributes": [attribute],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# Write the output Prowler compliance JSON file 'cis_1.0_github.json' locally
|
||||
with open("cis_1.0_github.json", "w", encoding="utf-8") as outfile:
|
||||
json.dump(output, outfile, indent=4, ensure_ascii=False)
|
||||
|
||||
Reference in New Issue
Block a user