mirror of
https://github.com/prowler-cloud/prowler.git
synced 2026-03-22 03:08:23 +00:00
Compare commits
76 Commits
PRWLR-5956
...
fix-audit-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
70fde82284 | ||
|
|
620de6f68e | ||
|
|
20495d2b1f | ||
|
|
2db9c359a0 | ||
|
|
1584ac3dec | ||
|
|
5cf72e5a27 | ||
|
|
de01f45f6e | ||
|
|
be24317733 | ||
|
|
e7b2b344e8 | ||
|
|
34c01d2ee4 | ||
|
|
3a0dcba279 | ||
|
|
dda8c0264c | ||
|
|
f1cea0c3cd | ||
|
|
f7766fa4de | ||
|
|
fdcc2ac5cb | ||
|
|
9099bd79f8 | ||
|
|
a01683d8f6 | ||
|
|
6d2b2a9a93 | ||
|
|
de4166bf0d | ||
|
|
1cbef30788 | ||
|
|
89c6e27489 | ||
|
|
f74ffc530d | ||
|
|
441d4d6a38 | ||
|
|
3c6b9d63a6 | ||
|
|
254d8616b7 | ||
|
|
d3bc6fda74 | ||
|
|
e4a5d9376f | ||
|
|
523605e3e7 | ||
|
|
ed33fac337 | ||
|
|
bf0e62aca5 | ||
|
|
60c0b79b10 | ||
|
|
f9d2e7aa93 | ||
|
|
0646748e24 | ||
|
|
f6408e9df7 | ||
|
|
5769bc815c | ||
|
|
5a3e3e9b1f | ||
|
|
26cbafa204 | ||
|
|
d14541d1de | ||
|
|
3955ebd56c | ||
|
|
e212645cf0 | ||
|
|
db9c1c24d3 | ||
|
|
0a305c281f | ||
|
|
43c96a7875 | ||
|
|
3a93aba7d7 | ||
|
|
3d563356e5 | ||
|
|
9205ef30f8 | ||
|
|
19c2dccc6d | ||
|
|
8f819048ed | ||
|
|
3a3bb44f11 | ||
|
|
f8e713a544 | ||
|
|
573f1eba56 | ||
|
|
a36be258d8 | ||
|
|
690ec057c3 | ||
|
|
2681feb1f6 | ||
|
|
e662adb8c5 | ||
|
|
c94bd96c93 | ||
|
|
6d85433194 | ||
|
|
7a6092a779 | ||
|
|
4c84529aed | ||
|
|
512d3e018f | ||
|
|
c6aff985c9 | ||
|
|
7fadf31a2b | ||
|
|
e7d098ed1e | ||
|
|
21fba27355 | ||
|
|
74e37307f7 | ||
|
|
d9d7c009a5 | ||
|
|
2220cf9733 | ||
|
|
3325b72b86 | ||
|
|
9182d56246 | ||
|
|
299ece19a8 | ||
|
|
0a0732d7c0 | ||
|
|
28011d97a9 | ||
|
|
e71b0d1b6a | ||
|
|
ec01b62a82 | ||
|
|
12b45c6896 | ||
|
|
51c60dd4ee |
@@ -178,11 +178,7 @@ Prowler will follow the same credentials search as [Google authentication librar
|
||||
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
|
||||
|
||||
- Viewer
|
||||
- Security Reviewer
|
||||
- Stackdriver Account Viewer
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
|
||||
|
||||
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
|
||||
|
||||
|
||||
@@ -97,10 +97,6 @@ Prowler will follow the same credentials search as [Google authentication librar
|
||||
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
|
||||
|
||||
- Viewer
|
||||
- Security Reviewer
|
||||
- Stackdriver Account Viewer
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
|
||||
|
||||
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
|
||||
|
||||
16
docs/tutorials/azure/use-non-default-cloud.md
Normal file
16
docs/tutorials/azure/use-non-default-cloud.md
Normal file
@@ -0,0 +1,16 @@
|
||||
# Use non default Azure regions
|
||||
|
||||
Microsoft provides clouds for compliance with regional laws, which are available for your use.
|
||||
By default, Prowler uses `AzureCloud` cloud which is the comercial one. (you can list all the available with `az cloud list --output table`).
|
||||
|
||||
At the time of writing this documentation the available Azure Clouds from different regions are the following:
|
||||
- AzureCloud
|
||||
- AzureChinaCloud
|
||||
- AzureUSGovernment
|
||||
- AzureGermanCloud
|
||||
|
||||
If you want to change the default one you must include the flag `--azure-region`, i.e.:
|
||||
|
||||
```console
|
||||
prowler azure --az-cli-auth --azure-region AzureChinaCloud
|
||||
```
|
||||
43
docs/tutorials/custom-checks-metadata.md
Normal file
43
docs/tutorials/custom-checks-metadata.md
Normal file
@@ -0,0 +1,43 @@
|
||||
# Custom Checks Metadata
|
||||
|
||||
In certain organizations, the severity of specific checks might differ from the default values defined in the check's metadata. For instance, while `s3_bucket_level_public_access_block` could be deemed `critical` for some organizations, others might assign a different severity level.
|
||||
|
||||
The custom metadata option offers a means to override default metadata set by Prowler
|
||||
|
||||
You can utilize `--custom-checks-metadata-file` followed by the path to your custom checks metadata YAML file.
|
||||
|
||||
## Available Fields
|
||||
|
||||
The list of supported check's metadata fields that can be override are listed as follows:
|
||||
|
||||
- Severity
|
||||
|
||||
## File Syntax
|
||||
|
||||
This feature is available for all the providers supported in Prowler since the metadata format is common between all the providers. The following is the YAML format for the custom checks metadata file:
|
||||
```yaml title="custom_checks_metadata.yaml"
|
||||
CustomChecksMetadata:
|
||||
aws:
|
||||
Checks:
|
||||
s3_bucket_level_public_access_block:
|
||||
Severity: high
|
||||
s3_bucket_no_mfa_delete:
|
||||
Severity: high
|
||||
azure:
|
||||
Checks:
|
||||
storage_infrastructure_encryption_is_enabled:
|
||||
Severity: medium
|
||||
gcp:
|
||||
Checks:
|
||||
compute_instance_public_ip:
|
||||
Severity: critical
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
Executing the following command will assess all checks and generate a report while overriding the metadata for those checks:
|
||||
```sh
|
||||
prowler <provider> --custom-checks-metadata-file <path/to/custom/metadata>
|
||||
```
|
||||
|
||||
This customization feature enables organizations to tailor the severity of specific checks based on their unique requirements, providing greater flexibility in security assessment and reporting.
|
||||
@@ -22,8 +22,4 @@ Prowler will follow the same credentials search as [Google authentication librar
|
||||
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
|
||||
|
||||
- Viewer
|
||||
- Security Reviewer
|
||||
- Stackdriver Account Viewer
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
|
||||
|
||||
@@ -38,6 +38,7 @@ nav:
|
||||
- Logging: tutorials/logging.md
|
||||
- Allowlist: tutorials/allowlist.md
|
||||
- Check Aliases: tutorials/check-aliases.md
|
||||
- Custom Metadata: tutorials/custom-checks-metadata.md
|
||||
- Ignore Unused Services: tutorials/ignore-unused-services.md
|
||||
- Pentesting: tutorials/pentesting.md
|
||||
- Developer Guide: developer-guide/introduction.md
|
||||
@@ -56,6 +57,7 @@ nav:
|
||||
- Boto3 Configuration: tutorials/aws/boto3-configuration.md
|
||||
- Azure:
|
||||
- Authentication: tutorials/azure/authentication.md
|
||||
- Non default clouds: tutorials/azure/use-non-default-cloud.md
|
||||
- Subscriptions: tutorials/azure/subscriptions.md
|
||||
- Google Cloud:
|
||||
- Authentication: tutorials/gcp/authentication.md
|
||||
|
||||
543
poetry.lock
generated
543
poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "about-time"
|
||||
@@ -11,15 +11,32 @@ files = [
|
||||
{file = "about_time-4.2.1-py3-none-any.whl", hash = "sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "adal"
|
||||
version = "1.2.7"
|
||||
description = "Note: This library is already replaced by MSAL Python, available here: https://pypi.org/project/msal/ .ADAL Python remains available here as a legacy. The ADAL for Python library makes it easy for python application to authenticate to Azure Active Directory (AAD) in order to access AAD protected web resources."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "adal-1.2.7-py2.py3-none-any.whl", hash = "sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d"},
|
||||
{file = "adal-1.2.7.tar.gz", hash = "sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = ">=1.1.0"
|
||||
PyJWT = ">=1.0.0,<3"
|
||||
python-dateutil = ">=2.1.0,<3"
|
||||
requests = ">=2.0.0,<3"
|
||||
|
||||
[[package]]
|
||||
name = "alive-progress"
|
||||
version = "3.1.4"
|
||||
version = "3.1.5"
|
||||
description = "A new kind of Progress Bar, with real-time throughput, ETA, and very cool animations!"
|
||||
optional = false
|
||||
python-versions = ">=3.7, <4"
|
||||
files = [
|
||||
{file = "alive-progress-3.1.4.tar.gz", hash = "sha256:74a95d8d0d42bc99d3a3725dbd06ebb852245f1b64e301a7c375b92b22663f7b"},
|
||||
{file = "alive_progress-3.1.4-py3-none-any.whl", hash = "sha256:c80ad87ce9c1054b01135a87fae69ecebbfc2107497ae87cbe6aec7e534903db"},
|
||||
{file = "alive-progress-3.1.5.tar.gz", hash = "sha256:42e399a66c8150dc507602dff7b7953f105ef11faf97ddaa6d27b1cbf45c4c98"},
|
||||
{file = "alive_progress-3.1.5-py3-none-any.whl", hash = "sha256:347220c1858e3abe137fa0746895668c04df09c5261a13dc03f05795e8a29be5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -58,6 +75,41 @@ docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-
|
||||
tests = ["attrs[tests-no-zope]", "zope-interface"]
|
||||
tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
|
||||
[[package]]
|
||||
name = "aws-sam-translator"
|
||||
version = "1.80.0"
|
||||
description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates"
|
||||
optional = false
|
||||
python-versions = ">=3.7, <=4.0, !=4.0"
|
||||
files = [
|
||||
{file = "aws-sam-translator-1.80.0.tar.gz", hash = "sha256:36afb8b802af0180a35efa68a8ab19d5d929d0a6a649a0101e8a4f8e1f05681f"},
|
||||
{file = "aws_sam_translator-1.80.0-py3-none-any.whl", hash = "sha256:f00215f9314cef1bbbdbd7520e3b0c75a76b88bdc3f0dedb6a2c69a12e904b12"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
boto3 = ">=1.19.5,<2.dev0"
|
||||
jsonschema = ">=3.2,<5"
|
||||
pydantic = ">=1.8,<3"
|
||||
typing-extensions = ">=4.4,<5"
|
||||
|
||||
[package.extras]
|
||||
dev = ["black (==23.3.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.284)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "aws-xray-sdk"
|
||||
version = "2.12.1"
|
||||
description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "aws-xray-sdk-2.12.1.tar.gz", hash = "sha256:0bbfdbc773cfef4061062ac940b85e408297a2242f120bcdfee2593209b1e432"},
|
||||
{file = "aws_xray_sdk-2.12.1-py2.py3-none-any.whl", hash = "sha256:f6803832dc08d18cc265e2327a69bfa9ee41c121fac195edc9745d04b7a566c3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
botocore = ">=1.11.3"
|
||||
wrapt = "*"
|
||||
|
||||
[[package]]
|
||||
name = "awsipranges"
|
||||
version = "0.3.3"
|
||||
@@ -212,13 +264,13 @@ msrest = ">=0.7.1"
|
||||
|
||||
[[package]]
|
||||
name = "azure-storage-blob"
|
||||
version = "12.18.3"
|
||||
version = "12.19.0"
|
||||
description = "Microsoft Azure Blob Storage Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "azure-storage-blob-12.18.3.tar.gz", hash = "sha256:d8ced0deee3367fa3d4f3d1a03cd9edadf4440c0a371f503d623fa6c807554ee"},
|
||||
{file = "azure_storage_blob-12.18.3-py3-none-any.whl", hash = "sha256:c278dde2ac41857a68d615c9f2b36d894ba877a7e84d62795603c7e79d0bb5e9"},
|
||||
{file = "azure-storage-blob-12.19.0.tar.gz", hash = "sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897"},
|
||||
{file = "azure_storage_blob-12.19.0-py3-none-any.whl", hash = "sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -435,6 +487,29 @@ files = [
|
||||
[package.dependencies]
|
||||
pycparser = "*"
|
||||
|
||||
[[package]]
|
||||
name = "cfn-lint"
|
||||
version = "0.83.3"
|
||||
description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved"
|
||||
optional = false
|
||||
python-versions = ">=3.7, <=4.0, !=4.0"
|
||||
files = [
|
||||
{file = "cfn-lint-0.83.3.tar.gz", hash = "sha256:cb1b5da6f3f15742f07f89006b9cc6ca459745f350196b559688ac0982111c5f"},
|
||||
{file = "cfn_lint-0.83.3-py3-none-any.whl", hash = "sha256:7acb5c40b6ae454006bfa19d586c67d0c4ed9a6dbb344fd470bc773981a0642a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aws-sam-translator = ">=1.79.0"
|
||||
jschema-to-python = ">=1.2.3,<1.3.0"
|
||||
jsonpatch = "*"
|
||||
jsonschema = ">=3.0,<5"
|
||||
junit-xml = ">=1.9,<2.0"
|
||||
networkx = ">=2.4,<4"
|
||||
pyyaml = ">5.4"
|
||||
regex = ">=2021.7.1"
|
||||
sarif-om = ">=1.0.4,<1.1.0"
|
||||
sympy = ">=1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.1.0"
|
||||
@@ -641,34 +716,34 @@ toml = ["tomli"]
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "41.0.4"
|
||||
version = "41.0.6"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"},
|
||||
{file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"},
|
||||
{file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"},
|
||||
{file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"},
|
||||
{file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"},
|
||||
{file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"},
|
||||
{file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"},
|
||||
{file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"},
|
||||
{file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"},
|
||||
{file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"},
|
||||
{file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"},
|
||||
{file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"},
|
||||
{file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"},
|
||||
{file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"},
|
||||
{file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"},
|
||||
{file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"},
|
||||
{file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"},
|
||||
{file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"},
|
||||
{file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"},
|
||||
{file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"},
|
||||
{file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"},
|
||||
{file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"},
|
||||
{file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-win32.whl", hash = "sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660"},
|
||||
{file = "cryptography-41.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7"},
|
||||
{file = "cryptography-41.0.6-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c"},
|
||||
{file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9"},
|
||||
{file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da"},
|
||||
{file = "cryptography-41.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36"},
|
||||
{file = "cryptography-41.0.6-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65"},
|
||||
{file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead"},
|
||||
{file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09"},
|
||||
{file = "cryptography-41.0.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c"},
|
||||
{file = "cryptography-41.0.6-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed"},
|
||||
{file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6"},
|
||||
{file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43"},
|
||||
{file = "cryptography-41.0.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4"},
|
||||
{file = "cryptography-41.0.6.tar.gz", hash = "sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -757,6 +832,24 @@ toml = "*"
|
||||
conda = ["pyyaml"]
|
||||
pipenv = ["pipenv"]
|
||||
|
||||
[[package]]
|
||||
name = "ecdsa"
|
||||
version = "0.18.0"
|
||||
description = "ECDSA cryptographic signature library (pure python)"
|
||||
optional = false
|
||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
files = [
|
||||
{file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"},
|
||||
{file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
six = ">=1.9.0"
|
||||
|
||||
[package.extras]
|
||||
gmpy = ["gmpy"]
|
||||
gmpy2 = ["gmpy2"]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.1.1"
|
||||
@@ -902,13 +995,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"]
|
||||
|
||||
[[package]]
|
||||
name = "google-api-python-client"
|
||||
version = "2.105.0"
|
||||
version = "2.108.0"
|
||||
description = "Google API Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "google-api-python-client-2.105.0.tar.gz", hash = "sha256:0a8b32cfc2d9b3c1868ae6faef7ee1ab9c89a6cec30be709ea9c97f9a3e5902d"},
|
||||
{file = "google_api_python_client-2.105.0-py2.py3-none-any.whl", hash = "sha256:571ce7c41e53415e385aab5a955725f71780550683ffcb71596f5809677d40b7"},
|
||||
{file = "google-api-python-client-2.108.0.tar.gz", hash = "sha256:6396efca83185fb205c0abdbc1c2ee57b40475578c6af37f6d0e30a639aade99"},
|
||||
{file = "google_api_python_client-2.108.0-py2.py3-none-any.whl", hash = "sha256:9d1327213e388943ebcd7db5ce6e7f47987a7e6874e3e1f6116010eea4a0e75d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -987,6 +1080,17 @@ files = [
|
||||
[package.extras]
|
||||
test = ["pytest", "sphinx", "sphinx-autobuild", "twine", "wheel"]
|
||||
|
||||
[[package]]
|
||||
name = "graphql-core"
|
||||
version = "3.2.3"
|
||||
description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL."
|
||||
optional = false
|
||||
python-versions = ">=3.6,<4"
|
||||
files = [
|
||||
{file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"},
|
||||
{file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httplib2"
|
||||
version = "0.22.0"
|
||||
@@ -1101,6 +1205,74 @@ files = [
|
||||
{file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jschema-to-python"
|
||||
version = "1.2.3"
|
||||
description = "Generate source code for Python classes from a JSON schema."
|
||||
optional = false
|
||||
python-versions = ">= 2.7"
|
||||
files = [
|
||||
{file = "jschema_to_python-1.2.3-py3-none-any.whl", hash = "sha256:8a703ca7604d42d74b2815eecf99a33359a8dccbb80806cce386d5e2dd992b05"},
|
||||
{file = "jschema_to_python-1.2.3.tar.gz", hash = "sha256:76ff14fe5d304708ccad1284e4b11f96a658949a31ee7faed9e0995279549b91"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
attrs = "*"
|
||||
jsonpickle = "*"
|
||||
pbr = "*"
|
||||
|
||||
[[package]]
|
||||
name = "jsondiff"
|
||||
version = "2.0.0"
|
||||
description = "Diff JSON and JSON-like structures in Python"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "jsondiff-2.0.0-py3-none-any.whl", hash = "sha256:689841d66273fc88fc79f7d33f4c074774f4f214b6466e3aff0e5adaf889d1e0"},
|
||||
{file = "jsondiff-2.0.0.tar.gz", hash = "sha256:2795844ef075ec8a2b8d385c4d59f5ea48b08e7180fce3cb2787be0db00b1fb4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonpatch"
|
||||
version = "1.33"
|
||||
description = "Apply JSON-Patches (RFC 6902)"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*"
|
||||
files = [
|
||||
{file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"},
|
||||
{file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
jsonpointer = ">=1.9"
|
||||
|
||||
[[package]]
|
||||
name = "jsonpickle"
|
||||
version = "3.0.2"
|
||||
description = "Python library for serializing any arbitrary object graph into JSON"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "jsonpickle-3.0.2-py3-none-any.whl", hash = "sha256:4a8442d97ca3f77978afa58068768dba7bff2dbabe79a9647bc3cdafd4ef019f"},
|
||||
{file = "jsonpickle-3.0.2.tar.gz", hash = "sha256:e37abba4bfb3ca4a4647d28bb9f4706436f7b46c8a8333b4a718abafa8e46b37"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"]
|
||||
testing = ["ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"]
|
||||
testing-libs = ["simplejson", "ujson"]
|
||||
|
||||
[[package]]
|
||||
name = "jsonpointer"
|
||||
version = "2.4"
|
||||
description = "Identify specific nodes in a JSON document (RFC 6901)"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*"
|
||||
files = [
|
||||
{file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"},
|
||||
{file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonschema"
|
||||
version = "4.18.0"
|
||||
@@ -1153,6 +1325,20 @@ files = [
|
||||
[package.dependencies]
|
||||
referencing = ">=0.28.0"
|
||||
|
||||
[[package]]
|
||||
name = "junit-xml"
|
||||
version = "1.9"
|
||||
description = "Creates JUnit XML test result documents that can be read by tools such as Jenkins"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "junit-xml-1.9.tar.gz", hash = "sha256:de16a051990d4e25a3982b2dd9e89d671067548718866416faec14d9de56db9f"},
|
||||
{file = "junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
six = "*"
|
||||
|
||||
[[package]]
|
||||
name = "lazy-object-proxy"
|
||||
version = "1.9.0"
|
||||
@@ -1365,13 +1551,13 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp
|
||||
|
||||
[[package]]
|
||||
name = "mkdocs-material"
|
||||
version = "9.4.7"
|
||||
version = "9.4.14"
|
||||
description = "Documentation that simply works"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "mkdocs_material-9.4.7-py3-none-any.whl", hash = "sha256:4d698d52bb6a6a3c452ab854481c4cdb68453a0420956a6aee2de55fe15fe610"},
|
||||
{file = "mkdocs_material-9.4.7.tar.gz", hash = "sha256:e704e001c9ef17291e1d3462c202425217601653e18f68f85d28eff4690e662b"},
|
||||
{file = "mkdocs_material-9.4.14-py3-none-any.whl", hash = "sha256:dbc78a4fea97b74319a6aa9a2f0be575a6028be6958f813ba367188f7b8428f6"},
|
||||
{file = "mkdocs_material-9.4.14.tar.gz", hash = "sha256:a511d3ff48fa8718b033e7e37d17abd9cc1de0fdf0244a625ca2ae2387e2416d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1421,53 +1607,84 @@ test = ["pytest", "pytest-cov"]
|
||||
|
||||
[[package]]
|
||||
name = "moto"
|
||||
version = "4.2.7"
|
||||
version = "4.2.10"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "moto-4.2.7-py2.py3-none-any.whl", hash = "sha256:3e0ef388900448485cd6eff18e9f7fcaa6cf4560b6fb536ba2e2e1278a5ecc59"},
|
||||
{file = "moto-4.2.7.tar.gz", hash = "sha256:1298006aaa6996b886658eb194cac0e3a5679c9fcce6cb13e741ccc5a7247abb"},
|
||||
{file = "moto-4.2.10-py2.py3-none-any.whl", hash = "sha256:5cf0736d1f43cb887498d00b00ae522774bfddb7db1f4994fedea65b290b9f0e"},
|
||||
{file = "moto-4.2.10.tar.gz", hash = "sha256:92595fe287474a31ac3ef847941ebb097e8ffb0c3d6c106e47cf573db06933b2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aws-xray-sdk = {version = ">=0.93,<0.96 || >0.96", optional = true, markers = "extra == \"all\""}
|
||||
boto3 = ">=1.9.201"
|
||||
botocore = ">=1.12.201"
|
||||
cfn-lint = {version = ">=0.40.0", optional = true, markers = "extra == \"all\""}
|
||||
cryptography = ">=3.3.1"
|
||||
docker = {version = ">=3.0.0", optional = true, markers = "extra == \"all\""}
|
||||
ecdsa = {version = "!=0.15", optional = true, markers = "extra == \"all\""}
|
||||
graphql-core = {version = "*", optional = true, markers = "extra == \"all\""}
|
||||
Jinja2 = ">=2.10.1"
|
||||
jsondiff = {version = ">=1.1.2", optional = true, markers = "extra == \"all\""}
|
||||
multipart = {version = "*", optional = true, markers = "extra == \"all\""}
|
||||
openapi-spec-validator = {version = ">=0.5.0", optional = true, markers = "extra == \"all\""}
|
||||
py-partiql-parser = {version = "0.4.2", optional = true, markers = "extra == \"all\""}
|
||||
pyparsing = {version = ">=3.0.7", optional = true, markers = "extra == \"all\""}
|
||||
python-dateutil = ">=2.1,<3.0.0"
|
||||
python-jose = {version = ">=3.1.0,<4.0.0", extras = ["cryptography"], optional = true, markers = "extra == \"all\""}
|
||||
PyYAML = {version = ">=5.1", optional = true, markers = "extra == \"all\""}
|
||||
requests = ">=2.5"
|
||||
responses = ">=0.13.0"
|
||||
setuptools = {version = "*", optional = true, markers = "extra == \"all\""}
|
||||
sshpubkeys = {version = ">=3.1.0", optional = true, markers = "extra == \"all\""}
|
||||
werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1"
|
||||
xmltodict = "*"
|
||||
|
||||
[package.extras]
|
||||
all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
|
||||
all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
|
||||
apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.5.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"]
|
||||
apigatewayv2 = ["PyYAML (>=5.1)"]
|
||||
appsync = ["graphql-core"]
|
||||
awslambda = ["docker (>=3.0.0)"]
|
||||
batch = ["docker (>=3.0.0)"]
|
||||
cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
|
||||
cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
|
||||
cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"]
|
||||
ds = ["sshpubkeys (>=3.1.0)"]
|
||||
dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.1)"]
|
||||
dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.1)"]
|
||||
dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.2)"]
|
||||
dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.2)"]
|
||||
ebs = ["sshpubkeys (>=3.1.0)"]
|
||||
ec2 = ["sshpubkeys (>=3.1.0)"]
|
||||
efs = ["sshpubkeys (>=3.1.0)"]
|
||||
eks = ["sshpubkeys (>=3.1.0)"]
|
||||
glue = ["pyparsing (>=3.0.7)"]
|
||||
iotdata = ["jsondiff (>=1.1.2)"]
|
||||
proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
|
||||
resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "sshpubkeys (>=3.1.0)"]
|
||||
proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
|
||||
resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "sshpubkeys (>=3.1.0)"]
|
||||
route53resolver = ["sshpubkeys (>=3.1.0)"]
|
||||
s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.4.1)"]
|
||||
s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.4.1)"]
|
||||
server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
|
||||
s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.4.2)"]
|
||||
s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.4.2)"]
|
||||
server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
|
||||
ssm = ["PyYAML (>=5.1)"]
|
||||
xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"]
|
||||
|
||||
[[package]]
|
||||
name = "mpmath"
|
||||
version = "1.3.0"
|
||||
description = "Python library for arbitrary-precision floating-point arithmetic"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"},
|
||||
{file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"]
|
||||
docs = ["sphinx"]
|
||||
gmpy = ["gmpy2 (>=2.1.0a4)"]
|
||||
tests = ["pytest (>=4.6)"]
|
||||
|
||||
[[package]]
|
||||
name = "msal"
|
||||
version = "1.24.1"
|
||||
@@ -1537,6 +1754,33 @@ requests-oauthlib = ">=0.5.0"
|
||||
[package.extras]
|
||||
async = ["aiodns", "aiohttp (>=3.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "msrestazure"
|
||||
version = "0.6.4"
|
||||
description = "AutoRest swagger generator Python client runtime. Azure-specific module."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "msrestazure-0.6.4-py2.py3-none-any.whl", hash = "sha256:3de50f56147ef529b31e099a982496690468ecef33f0544cb0fa0cfe1e1de5b9"},
|
||||
{file = "msrestazure-0.6.4.tar.gz", hash = "sha256:a06f0dabc9a6f5efe3b6add4bd8fb623aeadacf816b7a35b0f89107e0544d189"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
adal = ">=0.6.0,<2.0.0"
|
||||
msrest = ">=0.6.0,<2.0.0"
|
||||
six = "*"
|
||||
|
||||
[[package]]
|
||||
name = "multipart"
|
||||
version = "0.2.4"
|
||||
description = "Parser for multipart/form-data."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "multipart-0.2.4-py3-none-any.whl", hash = "sha256:5aec990820b8a9e94f9c164fbeb58cf118cfbde2854865b67a9a730edd1fb9d1"},
|
||||
{file = "multipart-0.2.4.tar.gz", hash = "sha256:06ba205360bc7096fefe618e4f1e9b2cdb890b4f2157053a81f386912a2522cb"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mypy-extensions"
|
||||
version = "1.0.0"
|
||||
@@ -1548,6 +1792,24 @@ files = [
|
||||
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "networkx"
|
||||
version = "3.2.1"
|
||||
description = "Python package for creating and manipulating graphs and networks"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"},
|
||||
{file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
default = ["matplotlib (>=3.5)", "numpy (>=1.22)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"]
|
||||
developer = ["changelist (==0.4)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"]
|
||||
doc = ["nb2plots (>=0.7)", "nbconvert (<7.9)", "numpydoc (>=1.6)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"]
|
||||
extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.11)", "sympy (>=1.10)"]
|
||||
test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "oauthlib"
|
||||
version = "3.2.2"
|
||||
@@ -1725,6 +1987,20 @@ files = [
|
||||
{file = "protobuf-4.23.0.tar.gz", hash = "sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "py-partiql-parser"
|
||||
version = "0.4.2"
|
||||
description = "Pure Python PartiQL Parser"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "py-partiql-parser-0.4.2.tar.gz", hash = "sha256:9c99d545be7897c6bfa97a107f6cfbcd92e359d394e4f3b95430e6409e8dd1e1"},
|
||||
{file = "py_partiql_parser-0.4.2-py3-none-any.whl", hash = "sha256:f3f34de8dddf65ed2d47b4263560bbf97be1ecc6bd5c61da039ede90f26a10ce"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["black (==22.6.0)", "flake8", "mypy (==0.971)", "pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "pyasn1"
|
||||
version = "0.5.0"
|
||||
@@ -1885,8 +2161,7 @@ astroid = ">=3.0.1,<=3.1.0-dev0"
|
||||
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
|
||||
dill = [
|
||||
{version = ">=0.2", markers = "python_version < \"3.11\""},
|
||||
{version = ">=0.3.7", markers = "python_version >= \"3.12\""},
|
||||
{version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""},
|
||||
{version = ">=0.3.6", markers = "python_version >= \"3.11\""},
|
||||
]
|
||||
isort = ">=4.2.5,<6"
|
||||
mccabe = ">=0.6,<0.8"
|
||||
@@ -1988,13 +2263,13 @@ pytest = "*"
|
||||
|
||||
[[package]]
|
||||
name = "pytest-xdist"
|
||||
version = "3.3.1"
|
||||
version = "3.5.0"
|
||||
description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"},
|
||||
{file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"},
|
||||
{file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"},
|
||||
{file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2020,6 +2295,28 @@ files = [
|
||||
[package.dependencies]
|
||||
six = ">=1.5"
|
||||
|
||||
[[package]]
|
||||
name = "python-jose"
|
||||
version = "3.3.0"
|
||||
description = "JOSE implementation in Python"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"},
|
||||
{file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryptography\""}
|
||||
ecdsa = "!=0.15"
|
||||
pyasn1 = "*"
|
||||
rsa = "*"
|
||||
|
||||
[package.extras]
|
||||
cryptography = ["cryptography (>=3.4.0)"]
|
||||
pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"]
|
||||
pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pywin32"
|
||||
version = "306"
|
||||
@@ -2125,7 +2422,7 @@ rpds-py = ">=0.7.0"
|
||||
name = "regex"
|
||||
version = "2023.8.8"
|
||||
description = "Alternative regular expression module, to replace re."
|
||||
optional = true
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "regex-2023.8.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:88900f521c645f784260a8d346e12a1590f79e96403971241e64c3a265c8ecdb"},
|
||||
@@ -2548,6 +2845,21 @@ setuptools = ">=19.3"
|
||||
github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"]
|
||||
gitlab = ["python-gitlab (>=1.3.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "sarif-om"
|
||||
version = "1.0.4"
|
||||
description = "Classes implementing the SARIF 2.1.0 object model."
|
||||
optional = false
|
||||
python-versions = ">= 2.7"
|
||||
files = [
|
||||
{file = "sarif_om-1.0.4-py3-none-any.whl", hash = "sha256:539ef47a662329b1c8502388ad92457425e95dc0aaaf995fe46f4984c4771911"},
|
||||
{file = "sarif_om-1.0.4.tar.gz", hash = "sha256:cd5f416b3083e00d402a92e449a7ff67af46f11241073eea0461802a3b5aef98"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
attrs = "*"
|
||||
pbr = "*"
|
||||
|
||||
[[package]]
|
||||
name = "schema"
|
||||
version = "0.7.5"
|
||||
@@ -2609,13 +2921,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "slack-sdk"
|
||||
version = "3.23.0"
|
||||
version = "3.26.0"
|
||||
description = "The Slack API Platform SDK for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.6.0"
|
||||
files = [
|
||||
{file = "slack_sdk-3.23.0-py2.py3-none-any.whl", hash = "sha256:2a8513505cced20ceee22b5b49c11d9545caa6234b56bf0ad47133ea5b357d10"},
|
||||
{file = "slack_sdk-3.23.0.tar.gz", hash = "sha256:9d6ebc4ff74e7983e1b27dbdb0f2bb6fc3c2a2451694686eaa2be23bbb085a73"},
|
||||
{file = "slack_sdk-3.26.0-py2.py3-none-any.whl", hash = "sha256:b84c2d93163166eb682e290c19334683c2d0f0cb4a5479c809706b44038fdda1"},
|
||||
{file = "slack_sdk-3.26.0.tar.gz", hash = "sha256:147946f388ce73b17c377b823759fcb39c0eca7444ca0a942dc12a3940a4f44f"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -2633,6 +2945,24 @@ files = [
|
||||
{file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sshpubkeys"
|
||||
version = "3.3.1"
|
||||
description = "SSH public key parser"
|
||||
optional = false
|
||||
python-versions = ">=3"
|
||||
files = [
|
||||
{file = "sshpubkeys-3.3.1-py2.py3-none-any.whl", hash = "sha256:946f76b8fe86704b0e7c56a00d80294e39bc2305999844f079a217885060b1ac"},
|
||||
{file = "sshpubkeys-3.3.1.tar.gz", hash = "sha256:3020ed4f8c846849299370fbe98ff4157b0ccc1accec105e07cfa9ae4bb55064"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = ">=2.1.4"
|
||||
ecdsa = ">=0.13"
|
||||
|
||||
[package.extras]
|
||||
dev = ["twine", "wheel", "yapf"]
|
||||
|
||||
[[package]]
|
||||
name = "stevedore"
|
||||
version = "5.0.0"
|
||||
@@ -2647,6 +2977,20 @@ files = [
|
||||
[package.dependencies]
|
||||
pbr = ">=2.0.0,<2.1.0 || >2.1.0"
|
||||
|
||||
[[package]]
|
||||
name = "sympy"
|
||||
version = "1.12"
|
||||
description = "Computer algebra system (CAS) in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"},
|
||||
{file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mpmath = ">=0.19"
|
||||
|
||||
[[package]]
|
||||
name = "tabulate"
|
||||
version = "0.9.0"
|
||||
@@ -2846,6 +3190,85 @@ MarkupSafe = ">=2.1.1"
|
||||
[package.extras]
|
||||
watchdog = ["watchdog (>=2.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "wrapt"
|
||||
version = "1.16.0"
|
||||
description = "Module for decorators, wrappers and monkey patching."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"},
|
||||
{file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"},
|
||||
{file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"},
|
||||
{file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"},
|
||||
{file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"},
|
||||
{file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"},
|
||||
{file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"},
|
||||
{file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"},
|
||||
{file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"},
|
||||
{file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"},
|
||||
{file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"},
|
||||
{file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"},
|
||||
{file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"},
|
||||
{file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"},
|
||||
{file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"},
|
||||
{file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"},
|
||||
{file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"},
|
||||
{file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"},
|
||||
{file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"},
|
||||
{file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"},
|
||||
{file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"},
|
||||
{file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"},
|
||||
{file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"},
|
||||
{file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"},
|
||||
{file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"},
|
||||
{file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"},
|
||||
{file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"},
|
||||
{file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"},
|
||||
{file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"},
|
||||
{file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"},
|
||||
{file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"},
|
||||
{file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"},
|
||||
{file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"},
|
||||
{file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"},
|
||||
{file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"},
|
||||
{file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"},
|
||||
{file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"},
|
||||
{file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"},
|
||||
{file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"},
|
||||
{file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"},
|
||||
{file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"},
|
||||
{file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"},
|
||||
{file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"},
|
||||
{file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"},
|
||||
{file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"},
|
||||
{file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"},
|
||||
{file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"},
|
||||
{file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"},
|
||||
{file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"},
|
||||
{file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"},
|
||||
{file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"},
|
||||
{file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"},
|
||||
{file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"},
|
||||
{file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"},
|
||||
{file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"},
|
||||
{file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"},
|
||||
{file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"},
|
||||
{file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"},
|
||||
{file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"},
|
||||
{file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"},
|
||||
{file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"},
|
||||
{file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"},
|
||||
{file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"},
|
||||
{file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"},
|
||||
{file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"},
|
||||
{file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"},
|
||||
{file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"},
|
||||
{file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"},
|
||||
{file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"},
|
||||
{file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xlsxwriter"
|
||||
version = "3.1.0"
|
||||
@@ -2888,5 +3311,5 @@ docs = ["mkdocs", "mkdocs-material"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.9"
|
||||
content-hash = "6ed432b0310655c247da3b4f542b9410842fb46de838408b99b6e61fb367cf38"
|
||||
python-versions = ">=3.9,<3.12"
|
||||
content-hash = "7e28daf704e53d057e66bc8fb71558361ab36a7cca85c7498a963f6406f54ef4"
|
||||
|
||||
@@ -26,6 +26,10 @@ from prowler.lib.check.check import (
|
||||
)
|
||||
from prowler.lib.check.checks_loader import load_checks_to_execute
|
||||
from prowler.lib.check.compliance import update_checks_metadata_with_compliance
|
||||
from prowler.lib.check.custom_checks_metadata import (
|
||||
parse_custom_checks_metadata_file,
|
||||
update_checks_metadata,
|
||||
)
|
||||
from prowler.lib.cli.parser import ProwlerArgumentParser
|
||||
from prowler.lib.logger import logger, set_logging_config
|
||||
from prowler.lib.outputs.compliance import display_compliance_table
|
||||
@@ -67,6 +71,7 @@ def prowler():
|
||||
checks_folder = args.checks_folder
|
||||
severities = args.severity
|
||||
compliance_framework = args.compliance
|
||||
custom_checks_metadata_file = args.custom_checks_metadata_file
|
||||
|
||||
if not args.no_banner:
|
||||
print_banner(args)
|
||||
@@ -96,9 +101,19 @@ def prowler():
|
||||
|
||||
bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
|
||||
# Complete checks metadata with the compliance framework specification
|
||||
update_checks_metadata_with_compliance(
|
||||
bulk_checks_metadata = update_checks_metadata_with_compliance(
|
||||
bulk_compliance_frameworks, bulk_checks_metadata
|
||||
)
|
||||
# Update checks metadata if the --custom-checks-metadata-file is present
|
||||
custom_checks_metadata = None
|
||||
if custom_checks_metadata_file:
|
||||
custom_checks_metadata = parse_custom_checks_metadata_file(
|
||||
provider, custom_checks_metadata_file
|
||||
)
|
||||
bulk_checks_metadata = update_checks_metadata(
|
||||
bulk_checks_metadata, custom_checks_metadata
|
||||
)
|
||||
|
||||
if args.list_compliance:
|
||||
print_compliance_frameworks(bulk_compliance_frameworks)
|
||||
sys.exit()
|
||||
@@ -174,7 +189,11 @@ def prowler():
|
||||
findings = []
|
||||
if len(checks_to_execute):
|
||||
findings = execute_checks(
|
||||
checks_to_execute, provider, audit_info, audit_output_options
|
||||
checks_to_execute,
|
||||
provider,
|
||||
audit_info,
|
||||
audit_output_options,
|
||||
custom_checks_metadata,
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
@@ -246,7 +265,10 @@ def prowler():
|
||||
for region in security_hub_regions:
|
||||
# Save the regions where AWS Security Hub is enabled
|
||||
if verify_security_hub_integration_enabled_per_region(
|
||||
region, audit_info.audit_session
|
||||
audit_info.audited_partition,
|
||||
region,
|
||||
audit_info.audit_session,
|
||||
audit_info.audited_account,
|
||||
):
|
||||
aws_security_enabled_regions.append(region)
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ from prowler.lib.logger import logger
|
||||
|
||||
timestamp = datetime.today()
|
||||
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||
prowler_version = "3.11.0"
|
||||
prowler_version = "3.11.3"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
html_logo_img = "https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png"
|
||||
square_logo_img = "https://user-images.githubusercontent.com/38561120/235905862-9ece5bd7-9aa3-4e48-807a-3a9035eb8bfb.png"
|
||||
@@ -22,6 +22,9 @@ gcp_logo = "https://user-images.githubusercontent.com/38561120/235928332-eb4accd
|
||||
orange_color = "\033[38;5;208m"
|
||||
banner_color = "\033[1;92m"
|
||||
|
||||
# Severities
|
||||
valid_severities = ["critical", "high", "medium", "low", "informational"]
|
||||
|
||||
# Compliance
|
||||
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
|
||||
@@ -70,7 +73,9 @@ def check_current_version():
|
||||
if latest_version != prowler_version:
|
||||
return f"{prowler_version_string} (latest is {latest_version}, upgrade for the latest features)"
|
||||
else:
|
||||
return f"{prowler_version_string} (it is the latest version, yay!)"
|
||||
return (
|
||||
f"{prowler_version_string} (You are running the latest version, yay!)"
|
||||
)
|
||||
except requests.RequestException:
|
||||
return f"{prowler_version_string}"
|
||||
except Exception:
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
aws:
|
||||
|
||||
# AWS Global Configuration
|
||||
# aws.allowlist_non_default_regions --> Set to True to allowlist failed findings in non-default regions for GuardDuty, SecurityHub, DRS and Config
|
||||
# aws.allowlist_non_default_regions --> Set to True to allowlist failed findings in non-default regions for AccessAnalyzer, GuardDuty, SecurityHub, DRS and Config
|
||||
allowlist_non_default_regions: False
|
||||
# If you want to allowlist/mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w allowlist.yaml`:
|
||||
# Allowlist:
|
||||
|
||||
15
prowler/config/custom_checks_metadata_example.yaml
Normal file
15
prowler/config/custom_checks_metadata_example.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
CustomChecksMetadata:
|
||||
aws:
|
||||
Checks:
|
||||
s3_bucket_level_public_access_block:
|
||||
Severity: high
|
||||
s3_bucket_no_mfa_delete:
|
||||
Severity: high
|
||||
azure:
|
||||
Checks:
|
||||
storage_infrastructure_encryption_is_enabled:
|
||||
Severity: medium
|
||||
gcp:
|
||||
Checks:
|
||||
compute_instance_public_ip:
|
||||
Severity: critical
|
||||
@@ -16,6 +16,7 @@ from colorama import Fore, Style
|
||||
import prowler
|
||||
from prowler.config.config import orange_color
|
||||
from prowler.lib.check.compliance_models import load_compliance_framework
|
||||
from prowler.lib.check.custom_checks_metadata import update_check_metadata
|
||||
from prowler.lib.check.models import Check, load_check_metadata
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.outputs import report
|
||||
@@ -106,14 +107,20 @@ def exclude_services_to_run(
|
||||
|
||||
# Load checks from checklist.json
|
||||
def parse_checks_from_file(input_file: str, provider: str) -> set:
|
||||
checks_to_execute = set()
|
||||
with open_file(input_file) as f:
|
||||
json_file = parse_json_file(f)
|
||||
"""parse_checks_from_file returns a set of checks read from the given file"""
|
||||
try:
|
||||
checks_to_execute = set()
|
||||
with open_file(input_file) as f:
|
||||
json_file = parse_json_file(f)
|
||||
|
||||
for check_name in json_file[provider]:
|
||||
checks_to_execute.add(check_name)
|
||||
for check_name in json_file[provider]:
|
||||
checks_to_execute.add(check_name)
|
||||
|
||||
return checks_to_execute
|
||||
return checks_to_execute
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
|
||||
|
||||
# Load checks from custom folder
|
||||
@@ -309,7 +316,7 @@ def print_checks(
|
||||
def parse_checks_from_compliance_framework(
|
||||
compliance_frameworks: list, bulk_compliance_frameworks: dict
|
||||
) -> list:
|
||||
"""Parse checks from compliance frameworks specification"""
|
||||
"""parse_checks_from_compliance_framework returns a set of checks from the given compliance_frameworks"""
|
||||
checks_to_execute = set()
|
||||
try:
|
||||
for framework in compliance_frameworks:
|
||||
@@ -416,6 +423,7 @@ def execute_checks(
|
||||
provider: str,
|
||||
audit_info: Any,
|
||||
audit_output_options: Provider_Output_Options,
|
||||
custom_checks_metadata: Any,
|
||||
) -> list:
|
||||
# List to store all the check's findings
|
||||
all_findings = []
|
||||
@@ -461,6 +469,7 @@ def execute_checks(
|
||||
audit_info,
|
||||
services_executed,
|
||||
checks_executed,
|
||||
custom_checks_metadata,
|
||||
)
|
||||
all_findings.extend(check_findings)
|
||||
|
||||
@@ -506,6 +515,7 @@ def execute_checks(
|
||||
audit_info,
|
||||
services_executed,
|
||||
checks_executed,
|
||||
custom_checks_metadata,
|
||||
)
|
||||
all_findings.extend(check_findings)
|
||||
|
||||
@@ -531,6 +541,7 @@ def execute(
|
||||
audit_info: Any,
|
||||
services_executed: set,
|
||||
checks_executed: set,
|
||||
custom_checks_metadata: Any,
|
||||
):
|
||||
# Import check module
|
||||
check_module_path = (
|
||||
@@ -541,6 +552,10 @@ def execute(
|
||||
check_to_execute = getattr(lib, check_name)
|
||||
c = check_to_execute()
|
||||
|
||||
# Update check metadata to reflect that in the outputs
|
||||
if custom_checks_metadata and custom_checks_metadata["Checks"].get(c.CheckID):
|
||||
c = update_check_metadata(c, custom_checks_metadata["Checks"][c.CheckID])
|
||||
|
||||
# Run check
|
||||
check_findings = run_check(c, audit_output_options)
|
||||
|
||||
@@ -598,22 +613,32 @@ def update_audit_metadata(
|
||||
)
|
||||
|
||||
|
||||
def recover_checks_from_service(service_list: list, provider: str) -> list:
|
||||
checks = set()
|
||||
service_list = [
|
||||
"awslambda" if service == "lambda" else service for service in service_list
|
||||
]
|
||||
for service in service_list:
|
||||
modules = recover_checks_from_provider(provider, service)
|
||||
if not modules:
|
||||
logger.error(f"Service '{service}' does not have checks.")
|
||||
def recover_checks_from_service(service_list: list, provider: str) -> set:
|
||||
"""
|
||||
Recover all checks from the selected provider and service
|
||||
|
||||
else:
|
||||
for check_module in modules:
|
||||
# Recover check name and module name from import path
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check_module[0].split(".")[-1]
|
||||
# If the service is present in the group list passed as parameters
|
||||
# if service_name in group_list: checks_from_arn.add(check_name)
|
||||
checks.add(check_name)
|
||||
return checks
|
||||
Returns a set of checks from the given services
|
||||
"""
|
||||
try:
|
||||
checks = set()
|
||||
service_list = [
|
||||
"awslambda" if service == "lambda" else service for service in service_list
|
||||
]
|
||||
for service in service_list:
|
||||
service_checks = recover_checks_from_provider(provider, service)
|
||||
if not service_checks:
|
||||
logger.error(f"Service '{service}' does not have checks.")
|
||||
|
||||
else:
|
||||
for check in service_checks:
|
||||
# Recover check name and module name from import path
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check[0].split(".")[-1]
|
||||
# If the service is present in the group list passed as parameters
|
||||
# if service_name in group_list: checks_from_arn.add(check_name)
|
||||
checks.add(check_name)
|
||||
return checks
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from colorama import Fore, Style
|
||||
|
||||
from prowler.config.config import valid_severities
|
||||
from prowler.lib.check.check import (
|
||||
parse_checks_from_compliance_framework,
|
||||
parse_checks_from_file,
|
||||
@@ -10,7 +11,6 @@ from prowler.lib.logger import logger
|
||||
|
||||
|
||||
# Generate the list of checks to execute
|
||||
# PENDING Test for this function
|
||||
def load_checks_to_execute(
|
||||
bulk_checks_metadata: dict,
|
||||
bulk_compliance_frameworks: dict,
|
||||
@@ -22,69 +22,93 @@ def load_checks_to_execute(
|
||||
categories: set,
|
||||
provider: str,
|
||||
) -> set:
|
||||
"""Generate the list of checks to execute based on the cloud provider and input arguments specified"""
|
||||
checks_to_execute = set()
|
||||
"""Generate the list of checks to execute based on the cloud provider and the input arguments given"""
|
||||
try:
|
||||
# Local subsets
|
||||
checks_to_execute = set()
|
||||
check_aliases = {}
|
||||
check_severities = {key: [] for key in valid_severities}
|
||||
check_categories = {}
|
||||
|
||||
# Handle if there are checks passed using -c/--checks
|
||||
if check_list:
|
||||
for check_name in check_list:
|
||||
checks_to_execute.add(check_name)
|
||||
# First, loop over the bulk_checks_metadata to extract the needed subsets
|
||||
for check, metadata in bulk_checks_metadata.items():
|
||||
# Aliases
|
||||
for alias in metadata.CheckAliases:
|
||||
check_aliases[alias] = check
|
||||
|
||||
# Handle if there are some severities passed using --severity
|
||||
elif severities:
|
||||
for check in bulk_checks_metadata:
|
||||
# Check check's severity
|
||||
if bulk_checks_metadata[check].Severity in severities:
|
||||
checks_to_execute.add(check)
|
||||
# Severities
|
||||
if metadata.Severity:
|
||||
check_severities[metadata.Severity].append(check)
|
||||
|
||||
# Handle if there are checks passed using -C/--checks-file
|
||||
elif checks_file:
|
||||
try:
|
||||
# Categories
|
||||
for category in metadata.Categories:
|
||||
if category not in check_categories:
|
||||
check_categories[category] = []
|
||||
check_categories[category].append(check)
|
||||
|
||||
# Handle if there are checks passed using -c/--checks
|
||||
if check_list:
|
||||
for check_name in check_list:
|
||||
checks_to_execute.add(check_name)
|
||||
|
||||
# Handle if there are some severities passed using --severity
|
||||
elif severities:
|
||||
for severity in severities:
|
||||
checks_to_execute.update(check_severities[severity])
|
||||
|
||||
if service_list:
|
||||
checks_to_execute = (
|
||||
recover_checks_from_service(service_list, provider)
|
||||
& checks_to_execute
|
||||
)
|
||||
|
||||
# Handle if there are checks passed using -C/--checks-file
|
||||
elif checks_file:
|
||||
checks_to_execute = parse_checks_from_file(checks_file, provider)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
# Handle if there are services passed using -s/--services
|
||||
elif service_list:
|
||||
checks_to_execute = recover_checks_from_service(service_list, provider)
|
||||
# Handle if there are services passed using -s/--services
|
||||
elif service_list:
|
||||
checks_to_execute = recover_checks_from_service(service_list, provider)
|
||||
|
||||
# Handle if there are compliance frameworks passed using --compliance
|
||||
elif compliance_frameworks:
|
||||
try:
|
||||
# Handle if there are compliance frameworks passed using --compliance
|
||||
elif compliance_frameworks:
|
||||
checks_to_execute = parse_checks_from_compliance_framework(
|
||||
compliance_frameworks, bulk_compliance_frameworks
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
# Handle if there are categories passed using --categories
|
||||
elif categories:
|
||||
for cat in categories:
|
||||
for check in bulk_checks_metadata:
|
||||
# Check check's categories
|
||||
if cat in bulk_checks_metadata[check].Categories:
|
||||
checks_to_execute.add(check)
|
||||
# Handle if there are categories passed using --categories
|
||||
elif categories:
|
||||
for category in categories:
|
||||
checks_to_execute.update(check_categories[category])
|
||||
|
||||
# If there are no checks passed as argument
|
||||
else:
|
||||
try:
|
||||
# If there are no checks passed as argument
|
||||
else:
|
||||
# Get all check modules to run with the specific provider
|
||||
checks = recover_checks_from_provider(provider)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
else:
|
||||
|
||||
for check_info in checks:
|
||||
# Recover check name from import path (last part)
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check_info[0]
|
||||
checks_to_execute.add(check_name)
|
||||
|
||||
# Get Check Aliases mapping
|
||||
check_aliases = {}
|
||||
for check, metadata in bulk_checks_metadata.items():
|
||||
for alias in metadata.CheckAliases:
|
||||
check_aliases[alias] = check
|
||||
# Check Aliases
|
||||
checks_to_execute = update_checks_to_execute_with_aliases(
|
||||
checks_to_execute, check_aliases
|
||||
)
|
||||
|
||||
return checks_to_execute
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
|
||||
|
||||
def update_checks_to_execute_with_aliases(
|
||||
checks_to_execute: set, check_aliases: dict
|
||||
) -> set:
|
||||
"""update_checks_to_execute_with_aliases returns the checks_to_execute updated using the check aliases."""
|
||||
# Verify if any input check is an alias of another check
|
||||
for input_check in checks_to_execute:
|
||||
if (
|
||||
@@ -97,5 +121,4 @@ def load_checks_to_execute(
|
||||
print(
|
||||
f"\nUsing alias {Fore.YELLOW}{input_check}{Style.RESET_ALL} for check {Fore.YELLOW}{check_aliases[input_check]}{Style.RESET_ALL}...\n"
|
||||
)
|
||||
|
||||
return checks_to_execute
|
||||
|
||||
77
prowler/lib/check/custom_checks_metadata.py
Normal file
77
prowler/lib/check/custom_checks_metadata.py
Normal file
@@ -0,0 +1,77 @@
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
from jsonschema import validate
|
||||
|
||||
from prowler.config.config import valid_severities
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
custom_checks_metadata_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Checks": {
|
||||
"type": "object",
|
||||
"patternProperties": {
|
||||
".*": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Severity": {
|
||||
"type": "string",
|
||||
"enum": valid_severities,
|
||||
}
|
||||
},
|
||||
"required": ["Severity"],
|
||||
"additionalProperties": False,
|
||||
}
|
||||
},
|
||||
"additionalProperties": False,
|
||||
}
|
||||
},
|
||||
"required": ["Checks"],
|
||||
"additionalProperties": False,
|
||||
}
|
||||
|
||||
|
||||
def parse_custom_checks_metadata_file(provider: str, parse_custom_checks_metadata_file):
|
||||
"""parse_custom_checks_metadata_file returns the custom_checks_metadata object if it is valid, otherwise aborts the execution returning the ValidationError."""
|
||||
try:
|
||||
with open(parse_custom_checks_metadata_file) as f:
|
||||
custom_checks_metadata = yaml.safe_load(f)["CustomChecksMetadata"][provider]
|
||||
validate(custom_checks_metadata, schema=custom_checks_metadata_schema)
|
||||
return custom_checks_metadata
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def update_checks_metadata(bulk_checks_metadata, custom_checks_metadata):
|
||||
"""update_checks_metadata returns the bulk_checks_metadata with the check's metadata updated based on the custom_checks_metadata provided."""
|
||||
try:
|
||||
# Update checks metadata from CustomChecksMetadata file
|
||||
for check, custom_metadata in custom_checks_metadata["Checks"].items():
|
||||
check_metadata = bulk_checks_metadata.get(check)
|
||||
if check_metadata:
|
||||
bulk_checks_metadata[check] = update_check_metadata(
|
||||
check_metadata, custom_metadata
|
||||
)
|
||||
return bulk_checks_metadata
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def update_check_metadata(check_metadata, custom_metadata):
|
||||
"""update_check_metadata updates the check_metadata fields present in the custom_metadata and returns the updated version of the check_metadata. If some field is not present or valid the check_metadata is returned with the original fields."""
|
||||
try:
|
||||
if custom_metadata:
|
||||
for attribute in custom_metadata:
|
||||
try:
|
||||
setattr(check_metadata, attribute, custom_metadata[attribute])
|
||||
except ValueError:
|
||||
pass
|
||||
finally:
|
||||
return check_metadata
|
||||
@@ -7,6 +7,7 @@ from prowler.config.config import (
|
||||
check_current_version,
|
||||
default_config_file_path,
|
||||
default_output_directory,
|
||||
valid_severities,
|
||||
)
|
||||
from prowler.providers.common.arguments import (
|
||||
init_providers_parser,
|
||||
@@ -49,6 +50,7 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
self.__init_exclude_checks_parser__()
|
||||
self.__init_list_checks_parser__()
|
||||
self.__init_config_parser__()
|
||||
self.__init_custom_checks_metadata_parser__()
|
||||
|
||||
# Init Providers Arguments
|
||||
init_providers_parser(self)
|
||||
@@ -220,11 +222,11 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
group.add_argument(
|
||||
"-s", "--services", nargs="+", help="List of services to be executed."
|
||||
)
|
||||
group.add_argument(
|
||||
common_checks_parser.add_argument(
|
||||
"--severity",
|
||||
nargs="+",
|
||||
help="List of severities to be executed [informational, low, medium, high, critical]",
|
||||
choices=["informational", "low", "medium", "high", "critical"],
|
||||
help=f"List of severities to be executed {valid_severities}",
|
||||
choices=valid_severities,
|
||||
)
|
||||
group.add_argument(
|
||||
"--compliance",
|
||||
@@ -286,3 +288,15 @@ Detailed documentation at https://docs.prowler.cloud
|
||||
default=default_config_file_path,
|
||||
help="Set configuration file path",
|
||||
)
|
||||
|
||||
def __init_custom_checks_metadata_parser__(self):
|
||||
# CustomChecksMetadata
|
||||
custom_checks_metadata_subparser = (
|
||||
self.common_providers_parser.add_argument_group("Custom Checks Metadata")
|
||||
)
|
||||
custom_checks_metadata_subparser.add_argument(
|
||||
"--custom-checks-metadata-file",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="Path for the custom checks metadata YAML file. See example prowler/config/custom_checks_metadata_example.yaml for reference and format. See more in https://docs.prowler.cloud/en/latest/tutorials/custom-checks-metadata/",
|
||||
)
|
||||
|
||||
@@ -12,8 +12,6 @@ from prowler.config.config import (
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.html import add_html_header
|
||||
from prowler.lib.outputs.models import (
|
||||
Aws_Check_Output_CSV,
|
||||
Azure_Check_Output_CSV,
|
||||
Check_Output_CSV_AWS_CIS,
|
||||
Check_Output_CSV_AWS_ISO27001_2013,
|
||||
Check_Output_CSV_AWS_Well_Architected,
|
||||
@@ -21,19 +19,18 @@ from prowler.lib.outputs.models import (
|
||||
Check_Output_CSV_GCP_CIS,
|
||||
Check_Output_CSV_Generic_Compliance,
|
||||
Check_Output_MITRE_ATTACK,
|
||||
Gcp_Check_Output_CSV,
|
||||
generate_csv_fields,
|
||||
)
|
||||
from prowler.lib.utils.utils import file_exists, open_file
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
|
||||
from prowler.providers.common.outputs import get_provider_output_model
|
||||
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
|
||||
|
||||
|
||||
def initialize_file_descriptor(
|
||||
filename: str,
|
||||
output_mode: str,
|
||||
audit_info: AWS_Audit_Info,
|
||||
audit_info: Any,
|
||||
format: Any = None,
|
||||
) -> TextIOWrapper:
|
||||
"""Open/Create the output file. If needed include headers or the required format"""
|
||||
@@ -75,27 +72,15 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
|
||||
for output_mode in output_modes:
|
||||
if output_mode == "csv":
|
||||
filename = f"{output_directory}/{output_filename}{csv_file_suffix}"
|
||||
if isinstance(audit_info, AWS_Audit_Info):
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Aws_Check_Output_CSV,
|
||||
)
|
||||
if isinstance(audit_info, Azure_Audit_Info):
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Azure_Check_Output_CSV,
|
||||
)
|
||||
if isinstance(audit_info, GCP_Audit_Info):
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
Gcp_Check_Output_CSV,
|
||||
)
|
||||
output_model = get_provider_output_model(
|
||||
audit_info.__class__.__name__
|
||||
)
|
||||
file_descriptor = initialize_file_descriptor(
|
||||
filename,
|
||||
output_mode,
|
||||
audit_info,
|
||||
output_model,
|
||||
)
|
||||
file_descriptors.update({output_mode: file_descriptor})
|
||||
|
||||
elif output_mode == "json":
|
||||
|
||||
@@ -338,8 +338,9 @@ def add_html_footer(output_filename, output_directory):
|
||||
def get_aws_html_assessment_summary(audit_info):
|
||||
try:
|
||||
if isinstance(audit_info, AWS_Audit_Info):
|
||||
if not audit_info.profile:
|
||||
audit_info.profile = "ENV"
|
||||
profile = (
|
||||
audit_info.profile if audit_info.profile is not None else "default"
|
||||
)
|
||||
if isinstance(audit_info.audited_regions, list):
|
||||
audited_regions = " ".join(audit_info.audited_regions)
|
||||
elif not audit_info.audited_regions:
|
||||
@@ -361,7 +362,7 @@ def get_aws_html_assessment_summary(audit_info):
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
<b>AWS-CLI Profile:</b> """
|
||||
+ audit_info.profile
|
||||
+ profile
|
||||
+ """
|
||||
</li>
|
||||
<li class="list-group-item">
|
||||
|
||||
@@ -31,6 +31,7 @@ from prowler.lib.outputs.models import (
|
||||
unroll_dict_to_list,
|
||||
)
|
||||
from prowler.lib.utils.utils import hash_sha512, open_file, outputs_unix_timestamp
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
|
||||
|
||||
def fill_json_asff(finding_output, audit_info, finding, output_options):
|
||||
@@ -155,6 +156,11 @@ def fill_json_ocsf(audit_info, finding, output_options) -> Check_Output_JSON_OCS
|
||||
aws_org_uid = ""
|
||||
account = None
|
||||
org = None
|
||||
profile = ""
|
||||
if isinstance(audit_info, AWS_Audit_Info):
|
||||
profile = (
|
||||
audit_info.profile if audit_info.profile is not None else "default"
|
||||
)
|
||||
if (
|
||||
hasattr(audit_info, "organizations_metadata")
|
||||
and audit_info.organizations_metadata
|
||||
@@ -249,9 +255,7 @@ def fill_json_ocsf(audit_info, finding, output_options) -> Check_Output_JSON_OCS
|
||||
original_time=outputs_unix_timestamp(
|
||||
output_options.unix_timestamp, timestamp
|
||||
),
|
||||
profiles=[audit_info.profile]
|
||||
if hasattr(audit_info, "organizations_metadata")
|
||||
else [],
|
||||
profiles=[profile],
|
||||
)
|
||||
compliance = Compliance_OCSF(
|
||||
status=generate_json_ocsf_status(finding.status),
|
||||
|
||||
@@ -157,18 +157,24 @@ def generate_regional_clients(
|
||||
try:
|
||||
regional_clients = {}
|
||||
service_regions = get_available_aws_service_regions(service, audit_info)
|
||||
|
||||
# Check if it is global service to gather only one region
|
||||
if global_service:
|
||||
if service_regions:
|
||||
if audit_info.profile_region in service_regions:
|
||||
service_regions = [audit_info.profile_region]
|
||||
service_regions = service_regions[:1]
|
||||
for region in service_regions:
|
||||
|
||||
# Get the regions enabled for the account and get the intersection with the service available regions
|
||||
enabled_regions = service_regions.intersection(audit_info.enabled_regions)
|
||||
|
||||
for region in enabled_regions:
|
||||
regional_client = audit_info.audit_session.client(
|
||||
service, region_name=region, config=audit_info.session_config
|
||||
)
|
||||
regional_client.region = region
|
||||
regional_clients[region] = regional_client
|
||||
|
||||
return regional_clients
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
@@ -176,6 +182,22 @@ def generate_regional_clients(
|
||||
)
|
||||
|
||||
|
||||
def get_aws_enabled_regions(audit_info: AWS_Audit_Info) -> set:
|
||||
"""get_aws_enabled_regions returns a set of enabled AWS regions"""
|
||||
|
||||
# EC2 Client to check enabled regions
|
||||
service = "ec2"
|
||||
default_region = get_default_region(service, audit_info)
|
||||
ec2_client = audit_info.audit_session.client(service, region_name=default_region)
|
||||
|
||||
enabled_regions = set()
|
||||
# With AllRegions=False we only get the enabled regions for the account
|
||||
for region in ec2_client.describe_regions(AllRegions=False).get("Regions", []):
|
||||
enabled_regions.add(region.get("RegionName"))
|
||||
|
||||
return enabled_regions
|
||||
|
||||
|
||||
def get_aws_available_regions():
|
||||
try:
|
||||
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
@@ -267,17 +289,19 @@ def get_regions_from_audit_resources(audit_resources: list) -> set:
|
||||
return audited_regions
|
||||
|
||||
|
||||
def get_available_aws_service_regions(service: str, audit_info: AWS_Audit_Info) -> list:
|
||||
def get_available_aws_service_regions(service: str, audit_info: AWS_Audit_Info) -> set:
|
||||
# Get json locally
|
||||
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
|
||||
with open_file(f"{actual_directory}/{aws_services_json_file}") as f:
|
||||
data = parse_json_file(f)
|
||||
# Check if it is a subservice
|
||||
json_regions = data["services"][service]["regions"][audit_info.audited_partition]
|
||||
if audit_info.audited_regions: # Check for input aws audit_info.audited_regions
|
||||
regions = list(
|
||||
set(json_regions).intersection(audit_info.audited_regions)
|
||||
) # Get common regions between input and json
|
||||
json_regions = set(
|
||||
data["services"][service]["regions"][audit_info.audited_partition]
|
||||
)
|
||||
# Check for input aws audit_info.audited_regions
|
||||
if audit_info.audited_regions:
|
||||
# Get common regions between input and json
|
||||
regions = json_regions.intersection(audit_info.audited_regions)
|
||||
else: # Get all regions from json of the service and partition
|
||||
regions = json_regions
|
||||
return regions
|
||||
|
||||
@@ -498,17 +498,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"appfabric": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"ap-northeast-1",
|
||||
"eu-west-1",
|
||||
"us-east-1"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"appflow": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -674,10 +663,13 @@
|
||||
"regions": {
|
||||
"aws": [
|
||||
"ap-northeast-1",
|
||||
"ap-south-1",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"eu-central-1",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
"us-west-2"
|
||||
@@ -805,7 +797,10 @@
|
||||
"cn-north-1",
|
||||
"cn-northwest-1"
|
||||
],
|
||||
"aws-us-gov": []
|
||||
"aws-us-gov": [
|
||||
"us-gov-east-1",
|
||||
"us-gov-west-1"
|
||||
]
|
||||
}
|
||||
},
|
||||
"artifact": {
|
||||
@@ -1013,6 +1008,17 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"aws-appfabric": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"ap-northeast-1",
|
||||
"eu-west-1",
|
||||
"us-east-1"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"awshealthdashboard": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -2068,17 +2074,24 @@
|
||||
"ap-east-1",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
"ap-south-2",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"eu-south-2",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"il-central-1",
|
||||
"me-central-1",
|
||||
"me-south-1",
|
||||
"sa-east-1",
|
||||
"us-east-1",
|
||||
@@ -2299,15 +2312,22 @@
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
"ap-south-2",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"eu-south-2",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"il-central-1",
|
||||
"me-central-1",
|
||||
"me-south-1",
|
||||
"sa-east-1",
|
||||
"us-east-1",
|
||||
@@ -2467,6 +2487,7 @@
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
@@ -2941,6 +2962,7 @@
|
||||
"cn-northwest-1"
|
||||
],
|
||||
"aws-us-gov": [
|
||||
"us-gov-east-1",
|
||||
"us-gov-west-1"
|
||||
]
|
||||
}
|
||||
@@ -2978,7 +3000,10 @@
|
||||
"us-west-2"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
"aws-us-gov": [
|
||||
"us-gov-east-1",
|
||||
"us-gov-west-1"
|
||||
]
|
||||
}
|
||||
},
|
||||
"ds": {
|
||||
@@ -3615,6 +3640,7 @@
|
||||
"ap-south-1",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-north-1",
|
||||
@@ -3622,6 +3648,7 @@
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"me-central-1",
|
||||
"me-south-1",
|
||||
"sa-east-1",
|
||||
"us-east-1",
|
||||
@@ -3642,15 +3669,19 @@
|
||||
"emr-serverless": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"af-south-1",
|
||||
"ap-east-1",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
@@ -4336,16 +4367,6 @@
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"gamesparks": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"ap-northeast-1",
|
||||
"us-east-1"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"glacier": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -5605,6 +5626,44 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"launch-wizard": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"af-south-1",
|
||||
"ap-east-1",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
"ap-south-2",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"me-central-1",
|
||||
"me-south-1",
|
||||
"sa-east-1",
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
"us-west-1",
|
||||
"us-west-2"
|
||||
],
|
||||
"aws-cn": [
|
||||
"cn-north-1",
|
||||
"cn-northwest-1"
|
||||
],
|
||||
"aws-us-gov": [
|
||||
"us-gov-east-1",
|
||||
"us-gov-west-1"
|
||||
]
|
||||
}
|
||||
},
|
||||
"launchwizard": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -5718,6 +5777,7 @@
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"eu-south-2",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
@@ -5801,6 +5861,7 @@
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"eu-south-2",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
@@ -6062,6 +6123,15 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"managedblockchain-query": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"us-east-1"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"managedservices": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -6388,11 +6458,18 @@
|
||||
"aws": [
|
||||
"af-south-1",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-north-1",
|
||||
"eu-west-1",
|
||||
"eu-west-3",
|
||||
"sa-east-1",
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
"us-west-2"
|
||||
@@ -6703,6 +6780,7 @@
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"il-central-1",
|
||||
"me-central-1",
|
||||
"me-south-1",
|
||||
"sa-east-1",
|
||||
@@ -7079,8 +7157,11 @@
|
||||
"regions": {
|
||||
"aws": [
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-south-1",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
@@ -7152,6 +7233,41 @@
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"pca-connector-ad": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"af-south-1",
|
||||
"ap-east-1",
|
||||
"ap-northeast-1",
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
"ap-south-2",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-3",
|
||||
"ap-southeast-4",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-central-2",
|
||||
"eu-north-1",
|
||||
"eu-south-1",
|
||||
"eu-south-2",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"me-central-1",
|
||||
"me-south-1",
|
||||
"sa-east-1",
|
||||
"us-east-1",
|
||||
"us-east-2",
|
||||
"us-west-1",
|
||||
"us-west-2"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"personalize": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -7740,6 +7856,20 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"redshift-serverless": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"ap-south-1",
|
||||
"ca-central-1",
|
||||
"eu-west-3",
|
||||
"us-west-1"
|
||||
],
|
||||
"aws-cn": [
|
||||
"cn-north-1"
|
||||
],
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"rekognition": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -8801,6 +8931,7 @@
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"eu-west-3",
|
||||
"il-central-1",
|
||||
"me-central-1",
|
||||
"me-south-1",
|
||||
"sa-east-1",
|
||||
@@ -9631,6 +9762,21 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"thinclient": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
"ap-south-1",
|
||||
"ca-central-1",
|
||||
"eu-central-1",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"us-east-1",
|
||||
"us-west-2"
|
||||
],
|
||||
"aws-cn": [],
|
||||
"aws-us-gov": []
|
||||
}
|
||||
},
|
||||
"timestream": {
|
||||
"regions": {
|
||||
"aws": [
|
||||
@@ -9928,6 +10074,7 @@
|
||||
"ap-northeast-2",
|
||||
"ap-northeast-3",
|
||||
"ap-south-1",
|
||||
"ap-south-2",
|
||||
"ap-southeast-1",
|
||||
"ap-southeast-2",
|
||||
"ap-southeast-4",
|
||||
@@ -10344,6 +10491,7 @@
|
||||
"eu-central-1",
|
||||
"eu-west-1",
|
||||
"eu-west-2",
|
||||
"il-central-1",
|
||||
"sa-east-1",
|
||||
"us-east-1",
|
||||
"us-west-2"
|
||||
|
||||
@@ -135,7 +135,12 @@ def allowlist_findings(
|
||||
|
||||
|
||||
def is_allowlisted(
|
||||
allowlist: dict, audited_account: str, check: str, region: str, resource: str, tags
|
||||
allowlist: dict,
|
||||
audited_account: str,
|
||||
check: str,
|
||||
finding_region: str,
|
||||
finding_resource: str,
|
||||
finding_tags,
|
||||
):
|
||||
try:
|
||||
allowlisted_checks = {}
|
||||
@@ -150,15 +155,15 @@ def is_allowlisted(
|
||||
if "*" in allowlist["Accounts"]:
|
||||
checks_multi_account = allowlist["Accounts"]["*"]["Checks"]
|
||||
allowlisted_checks.update(checks_multi_account)
|
||||
|
||||
# Test if it is allowlisted
|
||||
if is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
audited_account,
|
||||
audited_account,
|
||||
check,
|
||||
region,
|
||||
resource,
|
||||
tags,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
):
|
||||
is_finding_allowlisted = True
|
||||
|
||||
@@ -171,23 +176,29 @@ def is_allowlisted(
|
||||
|
||||
|
||||
def is_allowlisted_in_check(
|
||||
allowlisted_checks, audited_account, account, check, region, resource, tags
|
||||
allowlisted_checks,
|
||||
audited_account,
|
||||
check,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
):
|
||||
try:
|
||||
# Default value is not allowlisted
|
||||
is_check_allowlisted = False
|
||||
|
||||
for allowlisted_check, allowlisted_check_info in allowlisted_checks.items():
|
||||
# map lambda to awslambda
|
||||
allowlisted_check = re.sub("^lambda", "awslambda", allowlisted_check)
|
||||
# extract the exceptions
|
||||
|
||||
# Check if the finding is excepted
|
||||
exceptions = allowlisted_check_info.get("Exceptions")
|
||||
# Check if there are exceptions
|
||||
if is_excepted(
|
||||
exceptions,
|
||||
audited_account,
|
||||
region,
|
||||
resource,
|
||||
tags,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
):
|
||||
# Break loop and return default value since is excepted
|
||||
break
|
||||
@@ -201,13 +212,27 @@ def is_allowlisted_in_check(
|
||||
or check == allowlisted_check
|
||||
or re.search(allowlisted_check, check)
|
||||
):
|
||||
if is_allowlisted_in_region(
|
||||
allowlisted_regions,
|
||||
allowlisted_resources,
|
||||
allowlisted_tags,
|
||||
region,
|
||||
resource,
|
||||
tags,
|
||||
allowlisted_in_check = True
|
||||
allowlisted_in_region = is_allowlisted_in_region(
|
||||
allowlisted_regions, finding_region
|
||||
)
|
||||
allowlisted_in_resource = is_allowlisted_in_resource(
|
||||
allowlisted_resources, finding_resource
|
||||
)
|
||||
allowlisted_in_tags = is_allowlisted_in_tags(
|
||||
allowlisted_tags, finding_tags
|
||||
)
|
||||
|
||||
# For a finding to be allowlisted requires the following set to True:
|
||||
# - allowlisted_in_check -> True
|
||||
# - allowlisted_in_region -> True
|
||||
# - allowlisted_in_tags -> True or allowlisted_in_resource -> True
|
||||
# - excepted -> False
|
||||
|
||||
if (
|
||||
allowlisted_in_check
|
||||
and allowlisted_in_region
|
||||
and (allowlisted_in_tags or allowlisted_in_resource)
|
||||
):
|
||||
is_check_allowlisted = True
|
||||
|
||||
@@ -220,25 +245,11 @@ def is_allowlisted_in_check(
|
||||
|
||||
|
||||
def is_allowlisted_in_region(
|
||||
allowlist_regions, allowlist_resources, allowlisted_tags, region, resource, tags
|
||||
allowlisted_regions,
|
||||
finding_region,
|
||||
):
|
||||
try:
|
||||
# By default is not allowlisted
|
||||
is_region_allowlisted = False
|
||||
# If there is a *, it affects to all regions
|
||||
if "*" in allowlist_regions or region in allowlist_regions:
|
||||
for elem in allowlist_resources:
|
||||
if is_allowlisted_in_tags(
|
||||
allowlisted_tags,
|
||||
elem,
|
||||
resource,
|
||||
tags,
|
||||
):
|
||||
is_region_allowlisted = True
|
||||
# if we find the element there is no point in continuing with the loop
|
||||
break
|
||||
|
||||
return is_region_allowlisted
|
||||
return __is_item_matched__(allowlisted_regions, finding_region)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
@@ -246,25 +257,9 @@ def is_allowlisted_in_region(
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_allowlisted_in_tags(allowlisted_tags, elem, resource, tags):
|
||||
def is_allowlisted_in_tags(allowlisted_tags, finding_tags):
|
||||
try:
|
||||
# By default is not allowlisted
|
||||
is_tag_allowlisted = False
|
||||
# Check if it is an *
|
||||
if elem == "*":
|
||||
elem = ".*"
|
||||
# Check if there are allowlisted tags
|
||||
if allowlisted_tags:
|
||||
for allowlisted_tag in allowlisted_tags:
|
||||
if re.search(allowlisted_tag, tags):
|
||||
is_tag_allowlisted = True
|
||||
break
|
||||
|
||||
else:
|
||||
if re.search(elem, resource):
|
||||
is_tag_allowlisted = True
|
||||
|
||||
return is_tag_allowlisted
|
||||
return __is_item_matched__(allowlisted_tags, finding_tags)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
@@ -272,7 +267,25 @@ def is_allowlisted_in_tags(allowlisted_tags, elem, resource, tags):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_excepted(exceptions, audited_account, region, resource, tags):
|
||||
def is_allowlisted_in_resource(allowlisted_resources, finding_resource):
|
||||
try:
|
||||
return __is_item_matched__(allowlisted_resources, finding_resource)
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_excepted(
|
||||
exceptions,
|
||||
audited_account,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
):
|
||||
"""is_excepted returns True if the account, region, resource and tags are excepted"""
|
||||
try:
|
||||
excepted = False
|
||||
is_account_excepted = False
|
||||
@@ -281,39 +294,50 @@ def is_excepted(exceptions, audited_account, region, resource, tags):
|
||||
is_tag_excepted = False
|
||||
if exceptions:
|
||||
excepted_accounts = exceptions.get("Accounts", [])
|
||||
is_account_excepted = __is_item_matched__(
|
||||
excepted_accounts, audited_account
|
||||
)
|
||||
|
||||
excepted_regions = exceptions.get("Regions", [])
|
||||
is_region_excepted = __is_item_matched__(excepted_regions, finding_region)
|
||||
|
||||
excepted_resources = exceptions.get("Resources", [])
|
||||
is_resource_excepted = __is_item_matched__(
|
||||
excepted_resources, finding_resource
|
||||
)
|
||||
|
||||
excepted_tags = exceptions.get("Tags", [])
|
||||
if exceptions:
|
||||
if audited_account in excepted_accounts:
|
||||
is_account_excepted = True
|
||||
if region in excepted_regions:
|
||||
is_region_excepted = True
|
||||
for excepted_resource in excepted_resources:
|
||||
if re.search(excepted_resource, resource):
|
||||
is_resource_excepted = True
|
||||
for tag in excepted_tags:
|
||||
if tag in tags:
|
||||
is_tag_excepted = True
|
||||
if (
|
||||
(
|
||||
(excepted_accounts and is_account_excepted)
|
||||
or not excepted_accounts
|
||||
)
|
||||
and (
|
||||
(excepted_regions and is_region_excepted)
|
||||
or not excepted_regions
|
||||
)
|
||||
and (
|
||||
(excepted_resources and is_resource_excepted)
|
||||
or not excepted_resources
|
||||
)
|
||||
and ((excepted_tags and is_tag_excepted) or not excepted_tags)
|
||||
):
|
||||
excepted = True
|
||||
is_tag_excepted = __is_item_matched__(excepted_tags, finding_tags)
|
||||
|
||||
if (
|
||||
is_account_excepted
|
||||
and is_region_excepted
|
||||
and is_resource_excepted
|
||||
and is_tag_excepted
|
||||
):
|
||||
excepted = True
|
||||
return excepted
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def __is_item_matched__(matched_items, finding_items):
|
||||
"""__is_item_matched__ return True if any of the matched_items are present in the finding_items, otherwise returns False."""
|
||||
try:
|
||||
is_item_matched = False
|
||||
if matched_items and (finding_items or finding_items == ""):
|
||||
for item in matched_items:
|
||||
if item == "*":
|
||||
item = ".*"
|
||||
if re.search(item, finding_items):
|
||||
is_item_matched = True
|
||||
break
|
||||
return is_item_matched
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
@@ -126,6 +126,7 @@ def init_parser(self):
|
||||
default=None,
|
||||
help="Path for allowlist yaml file. See example prowler/config/aws_allowlist.yaml for reference and format. It also accepts AWS DynamoDB Table or Lambda ARNs or S3 URIs, see more in https://docs.prowler.cloud/en/latest/tutorials/allowlist/",
|
||||
)
|
||||
|
||||
# Based Scans
|
||||
aws_based_scans_subparser = aws_parser.add_argument_group("AWS Based Scans")
|
||||
aws_based_scans_parser = aws_based_scans_subparser.add_mutually_exclusive_group()
|
||||
|
||||
@@ -38,4 +38,5 @@ current_audit_info = AWS_Audit_Info(
|
||||
audit_metadata=None,
|
||||
audit_config=None,
|
||||
ignore_unused_services=False,
|
||||
enabled_regions=set(),
|
||||
)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
@@ -53,3 +53,4 @@ class AWS_Audit_Info:
|
||||
audit_metadata: Optional[Any] = None
|
||||
audit_config: Optional[dict] = None
|
||||
ignore_unused_services: bool = False
|
||||
enabled_regions: set = field(default_factory=set)
|
||||
|
||||
@@ -56,12 +56,15 @@ def is_account_only_allowed_in_condition(
|
||||
):
|
||||
# if there is an arn/account without the source account -> we do not consider it safe
|
||||
# here by default we assume is true and look for false entries
|
||||
is_condition_valid = True
|
||||
is_condition_key_restrictive = True
|
||||
for item in condition_statement[condition_operator][value]:
|
||||
if source_account not in item:
|
||||
is_condition_valid = False
|
||||
is_condition_key_restrictive = False
|
||||
break
|
||||
|
||||
if is_condition_key_restrictive:
|
||||
is_condition_valid = True
|
||||
|
||||
# value is a string
|
||||
elif isinstance(
|
||||
condition_statement[condition_operator][value],
|
||||
|
||||
@@ -14,9 +14,11 @@ def prepare_security_hub_findings(
|
||||
findings: [], audit_info: AWS_Audit_Info, output_options, enabled_regions: []
|
||||
) -> dict:
|
||||
security_hub_findings_per_region = {}
|
||||
# Create a key per region
|
||||
for region in audit_info.audited_regions:
|
||||
|
||||
# Create a key per audited region
|
||||
for region in enabled_regions:
|
||||
security_hub_findings_per_region[region] = []
|
||||
|
||||
for finding in findings:
|
||||
# We don't send the INFO findings to AWS Security Hub
|
||||
if finding.status == "INFO":
|
||||
@@ -47,8 +49,10 @@ def prepare_security_hub_findings(
|
||||
|
||||
|
||||
def verify_security_hub_integration_enabled_per_region(
|
||||
partition: str,
|
||||
region: str,
|
||||
session: session.Session,
|
||||
aws_account_number: str,
|
||||
) -> bool:
|
||||
f"""verify_security_hub_integration_enabled returns True if the {SECURITY_HUB_INTEGRATION_NAME} is enabled for the given region. Otherwise returns false."""
|
||||
prowler_integration_enabled = False
|
||||
@@ -62,7 +66,8 @@ def verify_security_hub_integration_enabled_per_region(
|
||||
security_hub_client.describe_hub()
|
||||
|
||||
# Check if Prowler integration is enabled in Security Hub
|
||||
if "prowler/prowler" not in str(
|
||||
security_hub_prowler_integration_arn = f"arn:{partition}:securityhub:{region}:{aws_account_number}:product-subscription/{SECURITY_HUB_INTEGRATION_NAME}"
|
||||
if security_hub_prowler_integration_arn not in str(
|
||||
security_hub_client.list_enabled_products_for_import()
|
||||
):
|
||||
logger.error(
|
||||
|
||||
@@ -19,17 +19,23 @@ class accessanalyzer_enabled(Check):
|
||||
f"IAM Access Analyzer {analyzer.name} is enabled."
|
||||
)
|
||||
|
||||
elif analyzer.status == "NOT_AVAILABLE":
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"IAM Access Analyzer in account {analyzer.name} is not enabled."
|
||||
)
|
||||
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"IAM Access Analyzer {analyzer.name} is not active."
|
||||
)
|
||||
if analyzer.status == "NOT_AVAILABLE":
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"IAM Access Analyzer in account {analyzer.name} is not enabled."
|
||||
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.status_extended = (
|
||||
f"IAM Access Analyzer {analyzer.name} is not active."
|
||||
)
|
||||
if (
|
||||
accessanalyzer_client.audit_config.get(
|
||||
"allowlist_non_default_regions", False
|
||||
)
|
||||
and not analyzer.region == accessanalyzer_client.region
|
||||
):
|
||||
report.status = "WARNING"
|
||||
|
||||
findings.append(report)
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ class codeartifact_packages_external_public_publishing_disabled(Check):
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = repository.region
|
||||
report.resource_id = package.name
|
||||
report.resource_arn = repository.arn
|
||||
report.resource_arn = f"{repository.arn}/{package.namespace + ':' if package.namespace else ''}{package.name}"
|
||||
report.resource_tags = repository.tags
|
||||
|
||||
if package.latest_version.origin.origin_type in (
|
||||
|
||||
@@ -63,7 +63,7 @@ class CodeArtifact(AWSService):
|
||||
list_packages_parameters = {
|
||||
"domain": self.repositories[repository].domain_name,
|
||||
"domainOwner": self.repositories[repository].domain_owner,
|
||||
"repository": repository,
|
||||
"repository": self.repositories[repository].name,
|
||||
}
|
||||
packages = []
|
||||
for page in list_packages_paginator.paginate(
|
||||
@@ -83,18 +83,37 @@ class CodeArtifact(AWSService):
|
||||
]
|
||||
)
|
||||
# Get Latest Package Version
|
||||
latest_version_information = (
|
||||
regional_client.list_package_versions(
|
||||
domain=self.repositories[repository].domain_name,
|
||||
domainOwner=self.repositories[
|
||||
repository
|
||||
].domain_owner,
|
||||
repository=repository,
|
||||
format=package_format,
|
||||
package=package_name,
|
||||
sortBy="PUBLISHED_TIME",
|
||||
if package_namespace:
|
||||
latest_version_information = (
|
||||
regional_client.list_package_versions(
|
||||
domain=self.repositories[
|
||||
repository
|
||||
].domain_name,
|
||||
domainOwner=self.repositories[
|
||||
repository
|
||||
].domain_owner,
|
||||
repository=self.repositories[repository].name,
|
||||
format=package_format,
|
||||
namespace=package_namespace,
|
||||
package=package_name,
|
||||
sortBy="PUBLISHED_TIME",
|
||||
)
|
||||
)
|
||||
else:
|
||||
latest_version_information = (
|
||||
regional_client.list_package_versions(
|
||||
domain=self.repositories[
|
||||
repository
|
||||
].domain_name,
|
||||
domainOwner=self.repositories[
|
||||
repository
|
||||
].domain_owner,
|
||||
repository=self.repositories[repository].name,
|
||||
format=package_format,
|
||||
package=package_name,
|
||||
sortBy="PUBLISHED_TIME",
|
||||
)
|
||||
)
|
||||
)
|
||||
latest_version = ""
|
||||
latest_origin_type = "UNKNOWN"
|
||||
latest_status = "Published"
|
||||
|
||||
@@ -18,10 +18,18 @@ class ec2_securitygroup_not_used(Check):
|
||||
report.status = "PASS"
|
||||
report.status_extended = f"Security group {security_group.name} ({security_group.id}) it is being used."
|
||||
sg_in_lambda = False
|
||||
sg_associated = False
|
||||
for function in awslambda_client.functions.values():
|
||||
if security_group.id in function.security_groups:
|
||||
sg_in_lambda = True
|
||||
if len(security_group.network_interfaces) == 0 and not sg_in_lambda:
|
||||
for sg in ec2_client.security_groups:
|
||||
if security_group.id in sg.associated_sgs:
|
||||
sg_associated = True
|
||||
if (
|
||||
len(security_group.network_interfaces) == 0
|
||||
and not sg_in_lambda
|
||||
and not sg_associated
|
||||
):
|
||||
report.status = "FAIL"
|
||||
report.status_extended = f"Security group {security_group.name} ({security_group.id}) it is not being used."
|
||||
|
||||
|
||||
@@ -117,6 +117,7 @@ class EC2(AWSService):
|
||||
if not self.audit_resources or (
|
||||
is_resource_filtered(arn, self.audit_resources)
|
||||
):
|
||||
associated_sgs = []
|
||||
# check if sg has public access to all ports
|
||||
all_public_ports = False
|
||||
for ingress_rule in sg["IpPermissions"]:
|
||||
@@ -128,7 +129,10 @@ class EC2(AWSService):
|
||||
in self.audited_checks
|
||||
):
|
||||
all_public_ports = True
|
||||
break
|
||||
# check associated security groups
|
||||
for sg_group in ingress_rule.get("UserIdGroupPairs", []):
|
||||
if sg_group.get("GroupId"):
|
||||
associated_sgs.append(sg_group["GroupId"])
|
||||
self.security_groups.append(
|
||||
SecurityGroup(
|
||||
name=sg["GroupName"],
|
||||
@@ -138,6 +142,7 @@ class EC2(AWSService):
|
||||
ingress_rules=sg["IpPermissions"],
|
||||
egress_rules=sg["IpPermissionsEgress"],
|
||||
public_ports=all_public_ports,
|
||||
associated_sgs=associated_sgs,
|
||||
vpc_id=sg["VpcId"],
|
||||
tags=sg.get("Tags"),
|
||||
)
|
||||
@@ -464,6 +469,7 @@ class SecurityGroup(BaseModel):
|
||||
id: str
|
||||
vpc_id: str
|
||||
public_ports: bool
|
||||
associated_sgs: list
|
||||
network_interfaces: list[str] = []
|
||||
ingress_rules: list[dict]
|
||||
egress_rules: list[dict]
|
||||
|
||||
@@ -6,7 +6,7 @@ class guardduty_centrally_managed(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for detector in guardduty_client.detectors:
|
||||
if detector.id:
|
||||
if detector.id and detector.enabled_in_account:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = detector.region
|
||||
report.resource_id = detector.id
|
||||
|
||||
@@ -6,7 +6,7 @@ class guardduty_no_high_severity_findings(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
for detector in guardduty_client.detectors:
|
||||
if detector.id:
|
||||
if detector.id and detector.enabled_in_account:
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = detector.region
|
||||
report.resource_id = detector.id
|
||||
|
||||
@@ -139,7 +139,10 @@ class IAM(AWSService):
|
||||
logger.warning(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
else:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
@@ -208,14 +211,24 @@ class IAM(AWSService):
|
||||
reuse_prevention=reuse_prevention,
|
||||
hard_expiry=hard_expiry,
|
||||
)
|
||||
except Exception as error:
|
||||
if "NoSuchEntity" in str(error):
|
||||
|
||||
except ClientError as error:
|
||||
if error.response["Error"]["Code"] == "NoSuchEntity":
|
||||
# Password policy does not exist
|
||||
stored_password_policy = None
|
||||
logger.warning(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
finally:
|
||||
return stored_password_policy
|
||||
|
||||
@@ -268,17 +281,22 @@ class IAM(AWSService):
|
||||
logger.info("IAM - List Attached Group Policies...")
|
||||
try:
|
||||
for group in self.groups:
|
||||
list_attached_group_policies_paginator = self.client.get_paginator(
|
||||
"list_attached_group_policies"
|
||||
)
|
||||
attached_group_policies = []
|
||||
for page in list_attached_group_policies_paginator.paginate(
|
||||
GroupName=group.name
|
||||
):
|
||||
for attached_group_policy in page["AttachedPolicies"]:
|
||||
attached_group_policies.append(attached_group_policy)
|
||||
try:
|
||||
list_attached_group_policies_paginator = self.client.get_paginator(
|
||||
"list_attached_group_policies"
|
||||
)
|
||||
attached_group_policies = []
|
||||
for page in list_attached_group_policies_paginator.paginate(
|
||||
GroupName=group.name
|
||||
):
|
||||
for attached_group_policy in page["AttachedPolicies"]:
|
||||
attached_group_policies.append(attached_group_policy)
|
||||
|
||||
group.attached_policies = attached_group_policies
|
||||
group.attached_policies = attached_group_policies
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
@@ -337,18 +355,33 @@ class IAM(AWSService):
|
||||
logger.info("IAM - List Attached User Policies...")
|
||||
try:
|
||||
for user in self.users:
|
||||
attached_user_policies = []
|
||||
get_user_attached_policies_paginator = self.client.get_paginator(
|
||||
"list_attached_user_policies"
|
||||
)
|
||||
for page in get_user_attached_policies_paginator.paginate(
|
||||
UserName=user.name
|
||||
):
|
||||
for policy in page["AttachedPolicies"]:
|
||||
attached_user_policies.append(policy)
|
||||
try:
|
||||
attached_user_policies = []
|
||||
get_user_attached_policies_paginator = self.client.get_paginator(
|
||||
"list_attached_user_policies"
|
||||
)
|
||||
for page in get_user_attached_policies_paginator.paginate(
|
||||
UserName=user.name
|
||||
):
|
||||
for policy in page["AttachedPolicies"]:
|
||||
attached_user_policies.append(policy)
|
||||
|
||||
user.attached_policies = attached_user_policies
|
||||
user.attached_policies = attached_user_policies
|
||||
|
||||
except ClientError as error:
|
||||
if error.response["Error"]["Code"] == "NoSuchEntity":
|
||||
logger.warning(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
@@ -371,10 +404,19 @@ class IAM(AWSService):
|
||||
|
||||
role.attached_policies = attached_role_policies
|
||||
except ClientError as error:
|
||||
if error.response["Error"]["Code"] == "NoSuchEntityException":
|
||||
if error.response["Error"]["Code"] == "NoSuchEntity":
|
||||
logger.warning(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
@@ -639,8 +681,16 @@ class IAM(AWSService):
|
||||
response = self.client.list_role_tags(RoleName=role.name)["Tags"]
|
||||
role.tags = response
|
||||
except ClientError as error:
|
||||
if error.response["Error"]["Code"] == "NoSuchEntityException":
|
||||
if error.response["Error"]["Code"] == "NoSuchEntity":
|
||||
role.tags = []
|
||||
else:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
@@ -653,8 +703,12 @@ class IAM(AWSService):
|
||||
response = self.client.list_user_tags(UserName=user.name)["Tags"]
|
||||
user.tags = response
|
||||
except ClientError as error:
|
||||
if error.response["Error"]["Code"] == "NoSuchEntityException":
|
||||
if error.response["Error"]["Code"] == "NoSuchEntity":
|
||||
user.tags = []
|
||||
else:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
@@ -664,13 +718,22 @@ class IAM(AWSService):
|
||||
try:
|
||||
for policy in self.policies:
|
||||
try:
|
||||
response = self.client.list_policy_tags(PolicyArn=policy.arn)[
|
||||
"Tags"
|
||||
]
|
||||
policy.tags = response
|
||||
if policy.type != "Inline":
|
||||
response = self.client.list_policy_tags(PolicyArn=policy.arn)[
|
||||
"Tags"
|
||||
]
|
||||
policy.tags = response
|
||||
except ClientError as error:
|
||||
if error.response["Error"]["Code"] == "NoSuchEntityException":
|
||||
if error.response["Error"]["Code"] == "NoSuchEntity":
|
||||
policy.tags = []
|
||||
else:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
@@ -697,9 +760,19 @@ class IAM(AWSService):
|
||||
]
|
||||
|
||||
except ClientError as error:
|
||||
if error.response["Error"]["Code"] == "NoSuchEntity":
|
||||
logger.warning(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
@@ -717,6 +790,15 @@ class IAM(AWSService):
|
||||
"AccessKeyMetadata"
|
||||
]
|
||||
except ClientError as error:
|
||||
if error.response["Error"]["Code"] == "NoSuchEntity":
|
||||
logger.warning(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
@@ -13,9 +13,14 @@ class rds_instance_deprecated_engine_version(Check):
|
||||
report.resource_arn = db_instance.arn
|
||||
report.resource_tags = db_instance.tags
|
||||
report.status_extended = f"RDS instance {db_instance.id} is using a deprecated engine {db_instance.engine} with version {db_instance.engine_version}."
|
||||
|
||||
if (
|
||||
db_instance.engine_version
|
||||
hasattr(
|
||||
rds_client.db_engines.get(db_instance.region, {}).get(
|
||||
db_instance.engine, {}
|
||||
),
|
||||
"engine_versions",
|
||||
)
|
||||
and db_instance.engine_version
|
||||
in rds_client.db_engines[db_instance.region][
|
||||
db_instance.engine
|
||||
].engine_versions
|
||||
|
||||
@@ -16,23 +16,30 @@ class SQS(AWSService):
|
||||
super().__init__(__class__.__name__, audit_info)
|
||||
self.queues = []
|
||||
self.__threading_call__(self.__list_queues__)
|
||||
self.__get_queue_attributes__(self.regional_clients)
|
||||
self.__get_queue_attributes__()
|
||||
self.__list_queue_tags__()
|
||||
|
||||
def __list_queues__(self, regional_client):
|
||||
logger.info("SQS - describing queues...")
|
||||
try:
|
||||
list_queues_paginator = regional_client.get_paginator("list_queues")
|
||||
for page in list_queues_paginator.paginate():
|
||||
# The SQS API uses nonstandard pagination
|
||||
# you must specify a PageSize if there are more than 1000 queues
|
||||
for page in list_queues_paginator.paginate(
|
||||
PaginationConfig={"PageSize": 1000}
|
||||
):
|
||||
if "QueueUrls" in page:
|
||||
for queue in page["QueueUrls"]:
|
||||
arn = f"arn:{self.audited_partition}:sqs:{regional_client.region}:{self.audited_account}:{queue}"
|
||||
# the queue name is the last path segment of the url
|
||||
queue_name = queue.split("/")[-1]
|
||||
arn = f"arn:{self.audited_partition}:sqs:{regional_client.region}:{self.audited_account}:{queue_name}"
|
||||
if not self.audit_resources or (
|
||||
is_resource_filtered(arn, self.audit_resources)
|
||||
):
|
||||
self.queues.append(
|
||||
Queue(
|
||||
arn=arn,
|
||||
name=queue_name,
|
||||
id=queue,
|
||||
region=regional_client.region,
|
||||
)
|
||||
@@ -42,28 +49,46 @@ class SQS(AWSService):
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def __get_queue_attributes__(self, regional_clients):
|
||||
def __get_queue_attributes__(self):
|
||||
try:
|
||||
logger.info("SQS - describing queue attributes...")
|
||||
for queue in self.queues:
|
||||
regional_client = regional_clients[queue.region]
|
||||
queue_attributes = regional_client.get_queue_attributes(
|
||||
QueueUrl=queue.id, AttributeNames=["All"]
|
||||
)
|
||||
if "Attributes" in queue_attributes:
|
||||
if "Policy" in queue_attributes["Attributes"]:
|
||||
queue.policy = loads(queue_attributes["Attributes"]["Policy"])
|
||||
if "KmsMasterKeyId" in queue_attributes["Attributes"]:
|
||||
queue.kms_key_id = queue_attributes["Attributes"][
|
||||
"KmsMasterKeyId"
|
||||
]
|
||||
if "SqsManagedSseEnabled" in queue_attributes["Attributes"]:
|
||||
if (
|
||||
queue_attributes["Attributes"]["SqsManagedSseEnabled"]
|
||||
== "true"
|
||||
):
|
||||
queue.kms_key_id = "SqsManagedSseEnabled"
|
||||
|
||||
try:
|
||||
regional_client = self.regional_clients[queue.region]
|
||||
queue_attributes = regional_client.get_queue_attributes(
|
||||
QueueUrl=queue.id, AttributeNames=["All"]
|
||||
)
|
||||
if "Attributes" in queue_attributes:
|
||||
if "Policy" in queue_attributes["Attributes"]:
|
||||
queue.policy = loads(
|
||||
queue_attributes["Attributes"]["Policy"]
|
||||
)
|
||||
if "KmsMasterKeyId" in queue_attributes["Attributes"]:
|
||||
queue.kms_key_id = queue_attributes["Attributes"][
|
||||
"KmsMasterKeyId"
|
||||
]
|
||||
if "SqsManagedSseEnabled" in queue_attributes["Attributes"]:
|
||||
if (
|
||||
queue_attributes["Attributes"]["SqsManagedSseEnabled"]
|
||||
== "true"
|
||||
):
|
||||
queue.kms_key_id = "SqsManagedSseEnabled"
|
||||
except ClientError as error:
|
||||
if (
|
||||
error.response["Error"]["Code"]
|
||||
== "AWS.SimpleQueueService.NonExistentQueue"
|
||||
):
|
||||
logger.warning(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
@@ -87,6 +112,14 @@ class SQS(AWSService):
|
||||
logger.warning(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
@@ -96,6 +129,7 @@ class SQS(AWSService):
|
||||
|
||||
class Queue(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
arn: str
|
||||
region: str
|
||||
policy: dict = None
|
||||
|
||||
@@ -34,9 +34,9 @@ class TrustedAdvisor(AWSService):
|
||||
def __describe_trusted_advisor_checks__(self):
|
||||
logger.info("TrustedAdvisor - Describing Checks...")
|
||||
try:
|
||||
for check in self.client.describe_trusted_advisor_checks(language="en")[
|
||||
"checks"
|
||||
]:
|
||||
for check in self.client.describe_trusted_advisor_checks(language="en").get(
|
||||
"checks", []
|
||||
):
|
||||
self.checks.append(
|
||||
Check(
|
||||
id=check["id"],
|
||||
|
||||
@@ -5,22 +5,23 @@ from prowler.providers.aws.services.vpc.vpc_client import vpc_client
|
||||
class vpc_different_regions(Check):
|
||||
def execute(self):
|
||||
findings = []
|
||||
vpc_regions = set()
|
||||
for vpc in vpc_client.vpcs.values():
|
||||
if not vpc.default:
|
||||
vpc_regions.add(vpc.region)
|
||||
if len(vpc_client.vpcs) > 0:
|
||||
vpc_regions = set()
|
||||
for vpc in vpc_client.vpcs.values():
|
||||
if not vpc.default:
|
||||
vpc_regions.add(vpc.region)
|
||||
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
# This is a global check under the vpc service: region, resource_id and tags are not relevant here but we keep them for consistency
|
||||
report.region = vpc_client.region
|
||||
report.resource_id = vpc_client.audited_account
|
||||
report.resource_arn = vpc_client.audited_account_arn
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "VPCs found only in one region."
|
||||
if len(vpc_regions) > 1:
|
||||
report.status = "PASS"
|
||||
report.status_extended = "VPCs found in more than one region."
|
||||
report = Check_Report_AWS(self.metadata())
|
||||
report.region = vpc_client.region
|
||||
report.resource_id = vpc_client.audited_account
|
||||
report.resource_arn = vpc_client.audited_account_arn
|
||||
|
||||
findings.append(report)
|
||||
report.status = "FAIL"
|
||||
report.status_extended = "VPCs found only in one region."
|
||||
|
||||
if len(vpc_regions) > 1:
|
||||
report.status = "PASS"
|
||||
report.status_extended = "VPCs found in more than one region."
|
||||
findings.append(report)
|
||||
|
||||
return findings
|
||||
|
||||
@@ -7,6 +7,7 @@ from msgraph.core import GraphClient
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.azure.lib.audit_info.models import Azure_Identity_Info
|
||||
from prowler.providers.azure.lib.regions.regions import get_regions_config
|
||||
|
||||
|
||||
class Azure_Provider:
|
||||
@@ -18,12 +19,14 @@ class Azure_Provider:
|
||||
managed_entity_auth: bool,
|
||||
subscription_ids: list,
|
||||
tenant_id: str,
|
||||
region: str,
|
||||
):
|
||||
logger.info("Instantiating Azure Provider ...")
|
||||
self.credentials = self.__set_credentials__(
|
||||
self.region_config = self.__get_region_config__(region)
|
||||
self.credentials = self.__get_credentials__(
|
||||
az_cli_auth, sp_env_auth, browser_auth, managed_entity_auth, tenant_id
|
||||
)
|
||||
self.identity = self.__set_identity_info__(
|
||||
self.identity = self.__get_identity_info__(
|
||||
self.credentials,
|
||||
az_cli_auth,
|
||||
sp_env_auth,
|
||||
@@ -32,7 +35,10 @@ class Azure_Provider:
|
||||
subscription_ids,
|
||||
)
|
||||
|
||||
def __set_credentials__(
|
||||
def __get_region_config__(self, region):
|
||||
return get_regions_config(region)
|
||||
|
||||
def __get_credentials__(
|
||||
self, az_cli_auth, sp_env_auth, browser_auth, managed_entity_auth, tenant_id
|
||||
):
|
||||
# Browser auth creds cannot be set with DefaultAzureCredentials()
|
||||
@@ -52,6 +58,8 @@ class Azure_Provider:
|
||||
exclude_shared_token_cache_credential=True,
|
||||
# Azure Auth using PowerShell is not supported
|
||||
exclude_powershell_credential=True,
|
||||
# set Authority of a Microsoft Entra endpoint
|
||||
authority=self.region_config["authority"],
|
||||
)
|
||||
except Exception as error:
|
||||
logger.critical("Failed to retrieve azure credentials")
|
||||
@@ -61,7 +69,6 @@ class Azure_Provider:
|
||||
sys.exit(1)
|
||||
else:
|
||||
try:
|
||||
print(tenant_id)
|
||||
credentials = InteractiveBrowserCredential(tenant_id=tenant_id)
|
||||
except Exception as error:
|
||||
logger.critical("Failed to retrieve azure credentials")
|
||||
@@ -83,7 +90,7 @@ class Azure_Provider:
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
def __set_identity_info__(
|
||||
def __get_identity_info__(
|
||||
self,
|
||||
credentials,
|
||||
az_cli_auth,
|
||||
@@ -153,7 +160,11 @@ class Azure_Provider:
|
||||
logger.info(
|
||||
"Trying to subscriptions and tenant ids to populate identity structure ..."
|
||||
)
|
||||
subscriptions_client = SubscriptionClient(credential=credentials)
|
||||
subscriptions_client = SubscriptionClient(
|
||||
credential=credentials,
|
||||
base_url=self.region_config["base_url"],
|
||||
credential_scopes=self.region_config["credential_scopes"],
|
||||
)
|
||||
if not subscription_ids:
|
||||
logger.info("Scanning all the Azure subscriptions...")
|
||||
for subscription in subscriptions_client.subscriptions.list():
|
||||
@@ -195,3 +206,6 @@ class Azure_Provider:
|
||||
|
||||
def get_identity(self):
|
||||
return self.identity
|
||||
|
||||
def get_region_config(self):
|
||||
return self.region_config
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
from argparse import ArgumentTypeError
|
||||
|
||||
|
||||
def init_parser(self):
|
||||
"""Init the Azure Provider CLI parser"""
|
||||
azure_parser = self.subparsers.add_parser(
|
||||
@@ -40,3 +43,27 @@ def init_parser(self):
|
||||
default=None,
|
||||
help="Azure Tenant ID to be used with --browser-auth option",
|
||||
)
|
||||
# Regions
|
||||
azure_regions_subparser = azure_parser.add_argument_group("Regions")
|
||||
azure_regions_subparser.add_argument(
|
||||
"--azure-region",
|
||||
nargs="?",
|
||||
default="AzureCloud",
|
||||
type=validate_azure_region,
|
||||
help="Azure region from `az cloud list --output table`, by default AzureCloud",
|
||||
)
|
||||
|
||||
|
||||
def validate_azure_region(region):
|
||||
"""validate_azure_region validates if the region passed as argument is valid"""
|
||||
regions_allowed = [
|
||||
"AzureChinaCloud",
|
||||
"AzureUSGovernment",
|
||||
"AzureGermanCloud",
|
||||
"AzureCloud",
|
||||
]
|
||||
if region not in regions_allowed:
|
||||
raise ArgumentTypeError(
|
||||
f"Region {region} not allowed, allowed regions are {' '.join(regions_allowed)}"
|
||||
)
|
||||
return region
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from prowler.providers.azure.lib.audit_info.models import (
|
||||
Azure_Audit_Info,
|
||||
Azure_Identity_Info,
|
||||
Azure_Region_Config,
|
||||
)
|
||||
|
||||
azure_audit_info = Azure_Audit_Info(
|
||||
@@ -9,4 +10,5 @@ azure_audit_info = Azure_Audit_Info(
|
||||
audit_resources=None,
|
||||
audit_metadata=None,
|
||||
audit_config=None,
|
||||
azure_region_config=Azure_Region_Config(),
|
||||
)
|
||||
|
||||
@@ -13,6 +13,13 @@ class Azure_Identity_Info(BaseModel):
|
||||
subscriptions: dict = {}
|
||||
|
||||
|
||||
class Azure_Region_Config(BaseModel):
|
||||
name: str = ""
|
||||
authority: str = None
|
||||
base_url: str = ""
|
||||
credential_scopes: list = []
|
||||
|
||||
|
||||
@dataclass
|
||||
class Azure_Audit_Info:
|
||||
credentials: DefaultAzureCredential
|
||||
@@ -20,12 +27,20 @@ class Azure_Audit_Info:
|
||||
audit_resources: Optional[Any]
|
||||
audit_metadata: Optional[Any]
|
||||
audit_config: dict
|
||||
azure_region_config: Azure_Region_Config
|
||||
|
||||
def __init__(
|
||||
self, credentials, identity, audit_metadata, audit_resources, audit_config
|
||||
self,
|
||||
credentials,
|
||||
identity,
|
||||
audit_metadata,
|
||||
audit_resources,
|
||||
audit_config,
|
||||
azure_region_config,
|
||||
):
|
||||
self.credentials = credentials
|
||||
self.identity = identity
|
||||
self.audit_metadata = audit_metadata
|
||||
self.audit_resources = audit_resources
|
||||
self.audit_config = audit_config
|
||||
self.azure_region_config = azure_region_config
|
||||
|
||||
0
prowler/providers/azure/lib/exception/__init__.py
Normal file
0
prowler/providers/azure/lib/exception/__init__.py
Normal file
11
prowler/providers/azure/lib/exception/exception.py
Normal file
11
prowler/providers/azure/lib/exception/exception.py
Normal file
@@ -0,0 +1,11 @@
|
||||
class AzureException(Exception):
|
||||
"""
|
||||
Exception raised when dealing with Azure Provider/Azure audit info instance
|
||||
|
||||
Attributes:
|
||||
message -- message to be displayed
|
||||
"""
|
||||
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
super().__init__(self.message)
|
||||
0
prowler/providers/azure/lib/regions/__init__.py
Normal file
0
prowler/providers/azure/lib/regions/__init__.py
Normal file
38
prowler/providers/azure/lib/regions/regions.py
Normal file
38
prowler/providers/azure/lib/regions/regions.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from azure.identity import AzureAuthorityHosts
|
||||
from msrestazure.azure_cloud import (
|
||||
AZURE_CHINA_CLOUD,
|
||||
AZURE_GERMAN_CLOUD,
|
||||
AZURE_US_GOV_CLOUD,
|
||||
)
|
||||
|
||||
|
||||
def get_regions_config(region):
|
||||
allowed_regions = {
|
||||
"AzureCloud": {
|
||||
"authority": None,
|
||||
"base_url": "https://management.azure.com",
|
||||
"credential_scopes": ["https://management.azure.com/.default"],
|
||||
},
|
||||
"AzureChinaCloud": {
|
||||
"authority": AzureAuthorityHosts.AZURE_CHINA,
|
||||
"base_url": AZURE_CHINA_CLOUD.endpoints.resource_manager,
|
||||
"credential_scopes": [
|
||||
AZURE_CHINA_CLOUD.endpoints.resource_manager + "/.default"
|
||||
],
|
||||
},
|
||||
"AzureUSGovernment": {
|
||||
"authority": AzureAuthorityHosts.AZURE_GOVERNMENT,
|
||||
"base_url": AZURE_US_GOV_CLOUD.endpoints.resource_manager,
|
||||
"credential_scopes": [
|
||||
AZURE_US_GOV_CLOUD.endpoints.resource_manager + "/.default"
|
||||
],
|
||||
},
|
||||
"AzureGermanCloud": {
|
||||
"authority": AzureAuthorityHosts.AZURE_GERMANY,
|
||||
"base_url": AZURE_GERMAN_CLOUD.endpoints.resource_manager,
|
||||
"credential_scopes": [
|
||||
AZURE_GERMAN_CLOUD.endpoints.resource_manager + "/.default"
|
||||
],
|
||||
},
|
||||
}
|
||||
return allowed_regions[region]
|
||||
@@ -9,17 +9,27 @@ class AzureService:
|
||||
audit_info: Azure_Audit_Info,
|
||||
):
|
||||
self.clients = self.__set_clients__(
|
||||
audit_info.identity.subscriptions, audit_info.credentials, service
|
||||
audit_info.identity.subscriptions,
|
||||
audit_info.credentials,
|
||||
service,
|
||||
audit_info.azure_region_config,
|
||||
)
|
||||
|
||||
self.subscriptions = audit_info.identity.subscriptions
|
||||
|
||||
def __set_clients__(self, subscriptions, credentials, service):
|
||||
def __set_clients__(self, subscriptions, credentials, service, region_config):
|
||||
clients = {}
|
||||
try:
|
||||
for display_name, id in subscriptions.items():
|
||||
clients.update(
|
||||
{display_name: service(credential=credentials, subscription_id=id)}
|
||||
{
|
||||
display_name: service(
|
||||
credential=credentials,
|
||||
subscription_id=id,
|
||||
base_url=region_config.base_url,
|
||||
credential_scopes=region_config.credential_scopes,
|
||||
)
|
||||
}
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
|
||||
@@ -8,6 +8,7 @@ from prowler.lib.logger import logger
|
||||
from prowler.providers.aws.aws_provider import (
|
||||
AWS_Provider,
|
||||
assume_role,
|
||||
get_aws_enabled_regions,
|
||||
get_checks_from_input_arn,
|
||||
get_regions_from_audit_resources,
|
||||
)
|
||||
@@ -26,7 +27,11 @@ from prowler.providers.aws.lib.resource_api_tagging.resource_api_tagging import
|
||||
)
|
||||
from prowler.providers.azure.azure_provider import Azure_Provider
|
||||
from prowler.providers.azure.lib.audit_info.audit_info import azure_audit_info
|
||||
from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
|
||||
from prowler.providers.azure.lib.audit_info.models import (
|
||||
Azure_Audit_Info,
|
||||
Azure_Region_Config,
|
||||
)
|
||||
from prowler.providers.azure.lib.exception.exception import AzureException
|
||||
from prowler.providers.gcp.gcp_provider import GCP_Provider
|
||||
from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info
|
||||
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
|
||||
@@ -63,7 +68,7 @@ GCP Account: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} GCP Project IDs: {Fore.Y
|
||||
report = f"""
|
||||
This report is being generated using the identity below:
|
||||
|
||||
Azure Tenant IDs: {Fore.YELLOW}[{" ".join(audit_info.identity.tenant_ids)}]{Style.RESET_ALL} Azure Tenant Domain: {Fore.YELLOW}[{audit_info.identity.domain}]{Style.RESET_ALL}
|
||||
Azure Tenant IDs: {Fore.YELLOW}[{" ".join(audit_info.identity.tenant_ids)}]{Style.RESET_ALL} Azure Tenant Domain: {Fore.YELLOW}[{audit_info.identity.domain}]{Style.RESET_ALL} Azure Region: {Fore.YELLOW}[{audit_info.azure_region_config.name}]{Style.RESET_ALL}
|
||||
Azure Subscriptions: {Fore.YELLOW}{printed_subscriptions}{Style.RESET_ALL}
|
||||
Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RESET_ALL} Azure Identity ID: {Fore.YELLOW}[{audit_info.identity.identity_id}]{Style.RESET_ALL}
|
||||
"""
|
||||
@@ -253,6 +258,9 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE
|
||||
if arguments.get("resource_arn"):
|
||||
current_audit_info.audit_resources = arguments.get("resource_arn")
|
||||
|
||||
# Get Enabled Regions
|
||||
current_audit_info.enabled_regions = get_aws_enabled_regions(current_audit_info)
|
||||
|
||||
return current_audit_info
|
||||
|
||||
def set_aws_execution_parameters(self, provider, audit_info) -> list[str]:
|
||||
@@ -282,17 +290,21 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE
|
||||
browser_auth = arguments.get("browser_auth")
|
||||
managed_entity_auth = arguments.get("managed_entity_auth")
|
||||
tenant_id = arguments.get("tenant_id")
|
||||
|
||||
logger.info("Checking if region is different than default one")
|
||||
region = arguments.get("azure_region")
|
||||
|
||||
if (
|
||||
not az_cli_auth
|
||||
and not sp_env_auth
|
||||
and not browser_auth
|
||||
and not managed_entity_auth
|
||||
):
|
||||
raise Exception(
|
||||
raise AzureException(
|
||||
"Azure provider requires at least one authentication method set: [--az-cli-auth | --sp-env-auth | --browser-auth | --managed-identity-auth]"
|
||||
)
|
||||
if (not browser_auth and tenant_id) or (browser_auth and not tenant_id):
|
||||
raise Exception(
|
||||
raise AzureException(
|
||||
"Azure Tenant ID (--tenant-id) is required only for browser authentication mode"
|
||||
)
|
||||
|
||||
@@ -303,9 +315,17 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE
|
||||
managed_entity_auth,
|
||||
subscription_ids,
|
||||
tenant_id,
|
||||
region,
|
||||
)
|
||||
azure_audit_info.credentials = azure_provider.get_credentials()
|
||||
azure_audit_info.identity = azure_provider.get_identity()
|
||||
region_config = azure_provider.get_region_config()
|
||||
azure_audit_info.azure_region_config = Azure_Region_Config(
|
||||
name=region,
|
||||
authority=region_config["authority"],
|
||||
base_url=region_config["base_url"],
|
||||
credential_scopes=region_config["credential_scopes"],
|
||||
)
|
||||
|
||||
if not arguments.get("only_logs"):
|
||||
self.print_azure_credentials(azure_audit_info)
|
||||
|
||||
@@ -29,6 +29,21 @@ def set_provider_output_options(
|
||||
return provider_output_options
|
||||
|
||||
|
||||
def get_provider_output_model(audit_info_class_name):
|
||||
"""
|
||||
get_provider_output_model returns the model _Check_Output_CSV for each provider
|
||||
"""
|
||||
# from AWS_Audit_Info -> AWS -> aws -> Aws
|
||||
output_provider = audit_info_class_name.split("_", 1)[0].lower().capitalize()
|
||||
output_provider_model_name = f"{output_provider}_Check_Output_CSV"
|
||||
output_provider_models_path = "prowler.lib.outputs.models"
|
||||
output_provider_model = getattr(
|
||||
importlib.import_module(output_provider_models_path), output_provider_model_name
|
||||
)
|
||||
|
||||
return output_provider_model
|
||||
|
||||
|
||||
@dataclass
|
||||
class Provider_Output_Options:
|
||||
is_quiet: bool
|
||||
|
||||
@@ -3,10 +3,8 @@ import sys
|
||||
|
||||
from google import auth
|
||||
from googleapiclient import discovery
|
||||
from googleapiclient.discovery import Resource
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
|
||||
|
||||
|
||||
class GCP_Provider:
|
||||
@@ -92,16 +90,3 @@ class GCP_Provider:
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return []
|
||||
|
||||
|
||||
def generate_client(
|
||||
service: str,
|
||||
api_version: str,
|
||||
audit_info: GCP_Audit_Info,
|
||||
) -> Resource:
|
||||
try:
|
||||
return discovery.build(service, api_version, credentials=audit_info.credentials)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
@@ -3,10 +3,11 @@ import threading
|
||||
import google_auth_httplib2
|
||||
import httplib2
|
||||
from colorama import Fore, Style
|
||||
from google.oauth2.credentials import Credentials
|
||||
from googleapiclient import discovery
|
||||
from googleapiclient.discovery import Resource
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.gcp.gcp_provider import generate_client
|
||||
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
|
||||
|
||||
|
||||
@@ -25,7 +26,9 @@ class GCPService:
|
||||
self.api_version = api_version
|
||||
self.default_project_id = audit_info.default_project_id
|
||||
self.region = region
|
||||
self.client = generate_client(service, api_version, audit_info)
|
||||
self.client = self.__generate_client__(
|
||||
service, api_version, audit_info.credentials
|
||||
)
|
||||
# Only project ids that have their API enabled will be scanned
|
||||
self.project_ids = self.__is_api_active__(audit_info.project_ids)
|
||||
|
||||
@@ -66,3 +69,16 @@ class GCPService:
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return project_ids
|
||||
|
||||
def __generate_client__(
|
||||
self,
|
||||
service: str,
|
||||
api_version: str,
|
||||
credentials: Credentials,
|
||||
) -> Resource:
|
||||
try:
|
||||
return discovery.build(service, api_version, credentials=credentials)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
@@ -22,10 +22,10 @@ packages = [
|
||||
{include = "prowler"}
|
||||
]
|
||||
readme = "README.md"
|
||||
version = "3.11.0"
|
||||
version = "3.11.3"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
alive-progress = "3.1.4"
|
||||
alive-progress = "3.1.5"
|
||||
awsipranges = "0.3.3"
|
||||
azure-identity = "1.15.0"
|
||||
azure-mgmt-authorization = "4.0.0"
|
||||
@@ -33,21 +33,23 @@ azure-mgmt-security = "5.0.0"
|
||||
azure-mgmt-sql = "3.0.1"
|
||||
azure-mgmt-storage = "21.1.0"
|
||||
azure-mgmt-subscription = "3.1.1"
|
||||
azure-storage-blob = "12.18.3"
|
||||
azure-storage-blob = "12.19.0"
|
||||
boto3 = "1.26.165"
|
||||
botocore = "1.29.165"
|
||||
colorama = "0.4.6"
|
||||
detect-secrets = "1.4.0"
|
||||
google-api-python-client = "2.105.0"
|
||||
google-api-python-client = "2.108.0"
|
||||
google-auth-httplib2 = "^0.1.0"
|
||||
jsonschema = "4.18.0"
|
||||
mkdocs = {version = "1.5.3", optional = true}
|
||||
mkdocs-material = {version = "9.4.7", optional = true}
|
||||
mkdocs-material = {version = "9.4.14", optional = true}
|
||||
msgraph-core = "0.2.2"
|
||||
msrestazure = "^0.6.4"
|
||||
pydantic = "1.10.13"
|
||||
python = "^3.9"
|
||||
python = ">=3.9,<3.12"
|
||||
schema = "0.7.5"
|
||||
shodan = "1.30.1"
|
||||
slack-sdk = "3.23.0"
|
||||
slack-sdk = "3.26.0"
|
||||
tabulate = "0.9.0"
|
||||
|
||||
[tool.poetry.extras]
|
||||
@@ -61,13 +63,13 @@ docker = "6.1.3"
|
||||
flake8 = "6.1.0"
|
||||
freezegun = "1.2.2"
|
||||
mock = "5.1.0"
|
||||
moto = "4.2.7"
|
||||
moto = {extras = ["all"], version = "4.2.10"}
|
||||
openapi-spec-validator = "0.7.1"
|
||||
pylint = "3.0.2"
|
||||
pytest = "7.4.3"
|
||||
pytest-cov = "4.1.0"
|
||||
pytest-randomly = "3.15.0"
|
||||
pytest-xdist = "3.3.1"
|
||||
pytest-xdist = "3.5.0"
|
||||
safety = "2.3.5"
|
||||
vulture = "2.10"
|
||||
|
||||
|
||||
@@ -63,7 +63,7 @@ class Test_Config:
|
||||
def test_check_current_version_with_latest(self):
|
||||
assert (
|
||||
check_current_version()
|
||||
== f"Prowler {MOCK_PROWLER_VERSION} (it is the latest version, yay!)"
|
||||
== f"Prowler {MOCK_PROWLER_VERSION} (You are running the latest version, yay!)"
|
||||
)
|
||||
|
||||
@mock.patch(
|
||||
|
||||
319
tests/lib/check/check_loader_test.py
Normal file
319
tests/lib/check/check_loader_test.py
Normal file
@@ -0,0 +1,319 @@
|
||||
from mock import patch
|
||||
|
||||
from prowler.lib.check.checks_loader import (
|
||||
load_checks_to_execute,
|
||||
update_checks_to_execute_with_aliases,
|
||||
)
|
||||
from prowler.lib.check.models import (
|
||||
Check_Metadata_Model,
|
||||
Code,
|
||||
Recommendation,
|
||||
Remediation,
|
||||
)
|
||||
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME = "s3_bucket_level_public_access_block"
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_CUSTOM_ALIAS = (
|
||||
"s3_bucket_level_public_access_block"
|
||||
)
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY = "medium"
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_SERVICE = "s3"
|
||||
|
||||
|
||||
class TestCheckLoader:
|
||||
provider = "aws"
|
||||
|
||||
def get_custom_check_metadata(self):
|
||||
return Check_Metadata_Model(
|
||||
Provider="aws",
|
||||
CheckID=S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME,
|
||||
CheckTitle="Check S3 Bucket Level Public Access Block.",
|
||||
CheckType=["Data Protection"],
|
||||
CheckAliases=[S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_CUSTOM_ALIAS],
|
||||
ServiceName=S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_SERVICE,
|
||||
SubServiceName="",
|
||||
ResourceIdTemplate="arn:partition:s3:::bucket_name",
|
||||
Severity=S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY,
|
||||
ResourceType="AwsS3Bucket",
|
||||
Description="Check S3 Bucket Level Public Access Block.",
|
||||
Risk="Public access policies may be applied to sensitive data buckets.",
|
||||
RelatedUrl="https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-control-block-public-access.html",
|
||||
Remediation=Remediation(
|
||||
Code=Code(
|
||||
NativeIaC="",
|
||||
Terraform="https://docs.bridgecrew.io/docs/bc_aws_s3_20#terraform",
|
||||
CLI="aws s3api put-public-access-block --region <REGION_NAME> --public-access-block-configuration BlockPublicAcls=true,IgnorePublicAcls=true,BlockPublicPolicy=true,RestrictPublicBuckets=true --bucket <BUCKET_NAME>",
|
||||
Other="https://github.com/cloudmatos/matos/tree/master/remediations/aws/s3/s3/block-public-access",
|
||||
),
|
||||
Recommendation=Recommendation(
|
||||
Text="You can enable Public Access Block at the bucket level to prevent the exposure of your data stored in S3.",
|
||||
Url="https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-control-block-public-access.html",
|
||||
),
|
||||
),
|
||||
Categories=["internet-exposed"],
|
||||
DependsOn=[],
|
||||
RelatedTo=[],
|
||||
Notes="",
|
||||
Compliance=[],
|
||||
)
|
||||
|
||||
def test_load_checks_to_execute(self):
|
||||
bulk_checks_metatada = {
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
|
||||
}
|
||||
bulk_compliance_frameworks = None
|
||||
checks_file = None
|
||||
check_list = None
|
||||
service_list = None
|
||||
severities = None
|
||||
compliance_frameworks = None
|
||||
categories = None
|
||||
|
||||
with patch(
|
||||
"prowler.lib.check.checks_loader.recover_checks_from_provider",
|
||||
return_value=[
|
||||
(
|
||||
f"{S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME}",
|
||||
"path/to/{S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME}",
|
||||
)
|
||||
],
|
||||
):
|
||||
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
|
||||
bulk_checks_metatada,
|
||||
bulk_compliance_frameworks,
|
||||
checks_file,
|
||||
check_list,
|
||||
service_list,
|
||||
severities,
|
||||
compliance_frameworks,
|
||||
categories,
|
||||
self.provider,
|
||||
)
|
||||
|
||||
def test_load_checks_to_execute_with_check_list(self):
|
||||
bulk_checks_metatada = {
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
|
||||
}
|
||||
bulk_compliance_frameworks = None
|
||||
checks_file = None
|
||||
check_list = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME]
|
||||
service_list = None
|
||||
severities = None
|
||||
compliance_frameworks = None
|
||||
categories = None
|
||||
|
||||
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
|
||||
bulk_checks_metatada,
|
||||
bulk_compliance_frameworks,
|
||||
checks_file,
|
||||
check_list,
|
||||
service_list,
|
||||
severities,
|
||||
compliance_frameworks,
|
||||
categories,
|
||||
self.provider,
|
||||
)
|
||||
|
||||
def test_load_checks_to_execute_with_severities(self):
|
||||
bulk_checks_metatada = {
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
|
||||
}
|
||||
bulk_compliance_frameworks = None
|
||||
checks_file = None
|
||||
check_list = []
|
||||
service_list = None
|
||||
severities = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY]
|
||||
compliance_frameworks = None
|
||||
categories = None
|
||||
|
||||
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
|
||||
bulk_checks_metatada,
|
||||
bulk_compliance_frameworks,
|
||||
checks_file,
|
||||
check_list,
|
||||
service_list,
|
||||
severities,
|
||||
compliance_frameworks,
|
||||
categories,
|
||||
self.provider,
|
||||
)
|
||||
|
||||
def test_load_checks_to_execute_with_severities_and_services(self):
|
||||
bulk_checks_metatada = {
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
|
||||
}
|
||||
bulk_compliance_frameworks = None
|
||||
checks_file = None
|
||||
check_list = []
|
||||
service_list = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_SERVICE]
|
||||
severities = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY]
|
||||
compliance_frameworks = None
|
||||
categories = None
|
||||
|
||||
with patch(
|
||||
"prowler.lib.check.checks_loader.recover_checks_from_service",
|
||||
return_value={S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME},
|
||||
):
|
||||
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
|
||||
bulk_checks_metatada,
|
||||
bulk_compliance_frameworks,
|
||||
checks_file,
|
||||
check_list,
|
||||
service_list,
|
||||
severities,
|
||||
compliance_frameworks,
|
||||
categories,
|
||||
self.provider,
|
||||
)
|
||||
|
||||
def test_load_checks_to_execute_with_severities_and_services_not_within_severity(
|
||||
self,
|
||||
):
|
||||
bulk_checks_metatada = {
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
|
||||
}
|
||||
bulk_compliance_frameworks = None
|
||||
checks_file = None
|
||||
check_list = []
|
||||
service_list = ["ec2"]
|
||||
severities = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY]
|
||||
compliance_frameworks = None
|
||||
categories = None
|
||||
|
||||
with patch(
|
||||
"prowler.lib.check.checks_loader.recover_checks_from_service",
|
||||
return_value={"ec2_ami_public"},
|
||||
):
|
||||
assert set() == load_checks_to_execute(
|
||||
bulk_checks_metatada,
|
||||
bulk_compliance_frameworks,
|
||||
checks_file,
|
||||
check_list,
|
||||
service_list,
|
||||
severities,
|
||||
compliance_frameworks,
|
||||
categories,
|
||||
self.provider,
|
||||
)
|
||||
|
||||
def test_load_checks_to_execute_with_checks_file(
|
||||
self,
|
||||
):
|
||||
bulk_checks_metatada = {
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
|
||||
}
|
||||
bulk_compliance_frameworks = None
|
||||
checks_file = "path/to/test_file"
|
||||
check_list = []
|
||||
service_list = []
|
||||
severities = []
|
||||
compliance_frameworks = None
|
||||
categories = None
|
||||
|
||||
with patch(
|
||||
"prowler.lib.check.checks_loader.parse_checks_from_file",
|
||||
return_value={S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME},
|
||||
):
|
||||
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
|
||||
bulk_checks_metatada,
|
||||
bulk_compliance_frameworks,
|
||||
checks_file,
|
||||
check_list,
|
||||
service_list,
|
||||
severities,
|
||||
compliance_frameworks,
|
||||
categories,
|
||||
self.provider,
|
||||
)
|
||||
|
||||
def test_load_checks_to_execute_with_service_list(
|
||||
self,
|
||||
):
|
||||
bulk_checks_metatada = {
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
|
||||
}
|
||||
bulk_compliance_frameworks = None
|
||||
checks_file = None
|
||||
check_list = []
|
||||
service_list = [S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME_SERVICE]
|
||||
severities = []
|
||||
compliance_frameworks = None
|
||||
categories = None
|
||||
|
||||
with patch(
|
||||
"prowler.lib.check.checks_loader.recover_checks_from_service",
|
||||
return_value={S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME},
|
||||
):
|
||||
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
|
||||
bulk_checks_metatada,
|
||||
bulk_compliance_frameworks,
|
||||
checks_file,
|
||||
check_list,
|
||||
service_list,
|
||||
severities,
|
||||
compliance_frameworks,
|
||||
categories,
|
||||
self.provider,
|
||||
)
|
||||
|
||||
def test_load_checks_to_execute_with_compliance_frameworks(
|
||||
self,
|
||||
):
|
||||
bulk_checks_metatada = {
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
|
||||
}
|
||||
bulk_compliance_frameworks = None
|
||||
checks_file = None
|
||||
check_list = []
|
||||
service_list = []
|
||||
severities = []
|
||||
compliance_frameworks = ["test-compliance-framework"]
|
||||
categories = None
|
||||
|
||||
with patch(
|
||||
"prowler.lib.check.checks_loader.parse_checks_from_compliance_framework",
|
||||
return_value={S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME},
|
||||
):
|
||||
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
|
||||
bulk_checks_metatada,
|
||||
bulk_compliance_frameworks,
|
||||
checks_file,
|
||||
check_list,
|
||||
service_list,
|
||||
severities,
|
||||
compliance_frameworks,
|
||||
categories,
|
||||
self.provider,
|
||||
)
|
||||
|
||||
def test_load_checks_to_execute_with_categories(
|
||||
self,
|
||||
):
|
||||
bulk_checks_metatada = {
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata()
|
||||
}
|
||||
bulk_compliance_frameworks = None
|
||||
checks_file = None
|
||||
check_list = []
|
||||
service_list = []
|
||||
severities = []
|
||||
compliance_frameworks = []
|
||||
categories = {"internet-exposed"}
|
||||
|
||||
assert {S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME} == load_checks_to_execute(
|
||||
bulk_checks_metatada,
|
||||
bulk_compliance_frameworks,
|
||||
checks_file,
|
||||
check_list,
|
||||
service_list,
|
||||
severities,
|
||||
compliance_frameworks,
|
||||
categories,
|
||||
self.provider,
|
||||
)
|
||||
|
||||
def test_update_checks_to_execute_with_aliases(self):
|
||||
checks_to_execute = {"renamed_check"}
|
||||
check_aliases = {"renamed_check": "check_name"}
|
||||
assert {"check_name"} == update_checks_to_execute_with_aliases(
|
||||
checks_to_execute, check_aliases
|
||||
)
|
||||
@@ -3,7 +3,7 @@ import pathlib
|
||||
from importlib.machinery import FileFinder
|
||||
from pkgutil import ModuleInfo
|
||||
|
||||
from boto3 import client, session
|
||||
from boto3 import client
|
||||
from fixtures.bulk_checks_metadata import test_bulk_checks_metadata
|
||||
from mock import patch
|
||||
from moto import mock_s3
|
||||
@@ -27,8 +27,7 @@ from prowler.providers.aws.aws_provider import (
|
||||
get_checks_from_input_arn,
|
||||
get_regions_from_audit_resources,
|
||||
)
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
from tests.providers.aws.audit_info_utils import set_mocked_aws_audit_info
|
||||
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
AWS_REGION = "us-east-1"
|
||||
@@ -258,36 +257,6 @@ def mock_recover_checks_from_aws_provider_rds_service(*_):
|
||||
|
||||
|
||||
class Test_Check:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=None,
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
return audit_info
|
||||
|
||||
def test_load_check_metadata(self):
|
||||
test_cases = [
|
||||
{
|
||||
@@ -363,7 +332,7 @@ class Test_Check:
|
||||
provider = test["input"]["provider"]
|
||||
assert (
|
||||
parse_checks_from_folder(
|
||||
self.set_mocked_audit_info(), check_folder, provider
|
||||
set_mocked_aws_audit_info(), check_folder, provider
|
||||
)
|
||||
== test["expected"]
|
||||
)
|
||||
|
||||
164
tests/lib/check/custom_checks_metadata_test.py
Normal file
164
tests/lib/check/custom_checks_metadata_test.py
Normal file
@@ -0,0 +1,164 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
from prowler.lib.check.custom_checks_metadata import (
|
||||
parse_custom_checks_metadata_file,
|
||||
update_check_metadata,
|
||||
update_checks_metadata,
|
||||
)
|
||||
from prowler.lib.check.models import (
|
||||
Check_Metadata_Model,
|
||||
Code,
|
||||
Recommendation,
|
||||
Remediation,
|
||||
)
|
||||
|
||||
CUSTOM_CHECKS_METADATA_FIXTURE_FILE = f"{os.path.dirname(os.path.realpath(__file__))}/fixtures/custom_checks_metadata_example.yaml"
|
||||
CUSTOM_CHECKS_METADATA_FIXTURE_FILE_NOT_VALID = f"{os.path.dirname(os.path.realpath(__file__))}/fixtures/custom_checks_metadata_example_not_valid.yaml"
|
||||
|
||||
AWS_PROVIDER = "aws"
|
||||
AZURE_PROVIDER = "azure"
|
||||
GCP_PROVIDER = "gcp"
|
||||
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME = "s3_bucket_level_public_access_block"
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY = "medium"
|
||||
|
||||
|
||||
class TestCustomChecksMetadata:
|
||||
def get_custom_check_metadata(self):
|
||||
return Check_Metadata_Model(
|
||||
Provider="aws",
|
||||
CheckID=S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME,
|
||||
CheckTitle="Check S3 Bucket Level Public Access Block.",
|
||||
CheckType=["Data Protection"],
|
||||
CheckAliases=[],
|
||||
ServiceName="s3",
|
||||
SubServiceName="",
|
||||
ResourceIdTemplate="arn:partition:s3:::bucket_name",
|
||||
Severity=S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY,
|
||||
ResourceType="AwsS3Bucket",
|
||||
Description="Check S3 Bucket Level Public Access Block.",
|
||||
Risk="Public access policies may be applied to sensitive data buckets.",
|
||||
RelatedUrl="https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-control-block-public-access.html",
|
||||
Remediation=Remediation(
|
||||
Code=Code(
|
||||
NativeIaC="",
|
||||
Terraform="https://docs.bridgecrew.io/docs/bc_aws_s3_20#terraform",
|
||||
CLI="aws s3api put-public-access-block --region <REGION_NAME> --public-access-block-configuration BlockPublicAcls=true,IgnorePublicAcls=true,BlockPublicPolicy=true,RestrictPublicBuckets=true --bucket <BUCKET_NAME>",
|
||||
Other="https://github.com/cloudmatos/matos/tree/master/remediations/aws/s3/s3/block-public-access",
|
||||
),
|
||||
Recommendation=Recommendation(
|
||||
Text="You can enable Public Access Block at the bucket level to prevent the exposure of your data stored in S3.",
|
||||
Url="https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-control-block-public-access.html",
|
||||
),
|
||||
),
|
||||
Categories=[],
|
||||
DependsOn=[],
|
||||
RelatedTo=[],
|
||||
Notes="",
|
||||
Compliance=[],
|
||||
)
|
||||
|
||||
def test_parse_custom_checks_metadata_file_for_aws(self):
|
||||
assert parse_custom_checks_metadata_file(
|
||||
AWS_PROVIDER, CUSTOM_CHECKS_METADATA_FIXTURE_FILE
|
||||
) == {
|
||||
"Checks": {
|
||||
"s3_bucket_level_public_access_block": {"Severity": "high"},
|
||||
"s3_bucket_no_mfa_delete": {"Severity": "high"},
|
||||
}
|
||||
}
|
||||
|
||||
def test_parse_custom_checks_metadata_file_for_azure(self):
|
||||
assert parse_custom_checks_metadata_file(
|
||||
AZURE_PROVIDER, CUSTOM_CHECKS_METADATA_FIXTURE_FILE
|
||||
) == {"Checks": {"sqlserver_auditing_enabled": {"Severity": "high"}}}
|
||||
|
||||
def test_parse_custom_checks_metadata_file_for_gcp(self):
|
||||
assert parse_custom_checks_metadata_file(
|
||||
GCP_PROVIDER, CUSTOM_CHECKS_METADATA_FIXTURE_FILE
|
||||
) == {"Checks": {"bigquery_dataset_cmk_encryption": {"Severity": "low"}}}
|
||||
|
||||
def test_parse_custom_checks_metadata_file_for_aws_validation_error(self, caplog):
|
||||
caplog.set_level(logging.CRITICAL)
|
||||
|
||||
with pytest.raises(SystemExit) as error:
|
||||
parse_custom_checks_metadata_file(
|
||||
AWS_PROVIDER, CUSTOM_CHECKS_METADATA_FIXTURE_FILE_NOT_VALID
|
||||
)
|
||||
assert error.type == SystemExit
|
||||
assert error.value.code == 1
|
||||
assert "'Checks' is a required property" in caplog.text
|
||||
|
||||
def test_update_checks_metadata(self):
|
||||
updated_severity = "high"
|
||||
bulk_checks_metadata = {
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata(),
|
||||
}
|
||||
custom_checks_metadata = {
|
||||
"Checks": {
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: {
|
||||
"Severity": updated_severity
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
bulk_checks_metadata_updated = update_checks_metadata(
|
||||
bulk_checks_metadata, custom_checks_metadata
|
||||
).get(S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME)
|
||||
|
||||
assert bulk_checks_metadata_updated.Severity == updated_severity
|
||||
|
||||
def test_update_checks_metadata_not_present_field(self):
|
||||
bulk_checks_metadata = {
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata(),
|
||||
}
|
||||
custom_checks_metadata = {
|
||||
"Checks": {
|
||||
S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: {
|
||||
"RandomField": "random_value"
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
bulk_checks_metadata_updated = update_checks_metadata(
|
||||
bulk_checks_metadata, custom_checks_metadata
|
||||
).get(S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME)
|
||||
|
||||
assert (
|
||||
bulk_checks_metadata_updated.Severity
|
||||
== S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY
|
||||
)
|
||||
|
||||
def test_update_check_metadata(self):
|
||||
updated_severity = "high"
|
||||
custom_checks_metadata = {"Severity": updated_severity}
|
||||
|
||||
check_metadata_updated = update_check_metadata(
|
||||
self.get_custom_check_metadata(), custom_checks_metadata
|
||||
)
|
||||
assert check_metadata_updated.Severity == updated_severity
|
||||
|
||||
def test_update_check_metadata_not_present_field(self):
|
||||
custom_checks_metadata = {"RandomField": "random_value"}
|
||||
|
||||
check_metadata_updated = update_check_metadata(
|
||||
self.get_custom_check_metadata(), custom_checks_metadata
|
||||
)
|
||||
assert (
|
||||
check_metadata_updated.Severity
|
||||
== S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY
|
||||
)
|
||||
|
||||
def test_update_check_metadata_none_custom_metadata(self):
|
||||
custom_checks_metadata = None
|
||||
|
||||
check_metadata_updated = update_check_metadata(
|
||||
self.get_custom_check_metadata(), custom_checks_metadata
|
||||
)
|
||||
assert (
|
||||
check_metadata_updated.Severity
|
||||
== S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY
|
||||
)
|
||||
15
tests/lib/check/fixtures/custom_checks_metadata_example.yaml
Normal file
15
tests/lib/check/fixtures/custom_checks_metadata_example.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
CustomChecksMetadata:
|
||||
aws:
|
||||
Checks:
|
||||
s3_bucket_level_public_access_block:
|
||||
Severity: high
|
||||
s3_bucket_no_mfa_delete:
|
||||
Severity: high
|
||||
azure:
|
||||
Checks:
|
||||
sqlserver_auditing_enabled:
|
||||
Severity: high
|
||||
gcp:
|
||||
Checks:
|
||||
bigquery_dataset_cmk_encryption:
|
||||
Severity: low
|
||||
@@ -0,0 +1,5 @@
|
||||
CustomChecksMetadata:
|
||||
aws:
|
||||
Check:
|
||||
s3_bucket_level_public_access_block:
|
||||
Severity: high
|
||||
@@ -1,9 +1,11 @@
|
||||
import uuid
|
||||
from argparse import ArgumentTypeError
|
||||
|
||||
import pytest
|
||||
from mock import patch
|
||||
|
||||
from prowler.lib.cli.parser import ProwlerArgumentParser
|
||||
from prowler.providers.azure.lib.arguments.arguments import validate_azure_region
|
||||
|
||||
prowler_command = "prowler"
|
||||
|
||||
@@ -502,6 +504,18 @@ class Test_Parser:
|
||||
assert service_1 in parsed.services
|
||||
assert service_2 in parsed.services
|
||||
|
||||
def test_checks_parser_services_with_severity(self):
|
||||
argument1 = "--services"
|
||||
service_1 = "iam"
|
||||
argument2 = "--severity"
|
||||
severity = "low"
|
||||
command = [prowler_command, argument1, service_1, argument2, severity]
|
||||
parsed = self.parser.parse(command)
|
||||
assert len(parsed.services) == 1
|
||||
assert service_1 in parsed.services
|
||||
assert len(parsed.severity) == 1
|
||||
assert severity in parsed.severity
|
||||
|
||||
def test_checks_parser_informational_severity(self):
|
||||
argument = "--severity"
|
||||
severity = "informational"
|
||||
@@ -1038,6 +1052,14 @@ class Test_Parser:
|
||||
assert parsed.subscription_ids[0] == subscription_1
|
||||
assert parsed.subscription_ids[1] == subscription_2
|
||||
|
||||
def test_parser_azure_region(self):
|
||||
argument = "--azure-region"
|
||||
region = "AzureChinaCloud"
|
||||
command = [prowler_command, "azure", argument, region]
|
||||
parsed = self.parser.parse(command)
|
||||
assert parsed.provider == "azure"
|
||||
assert parsed.azure_region == region
|
||||
|
||||
# Test AWS flags with Azure provider
|
||||
def test_parser_azure_with_aws_flag(self, capsys):
|
||||
command = [prowler_command, "azure", "-p"]
|
||||
@@ -1080,3 +1102,33 @@ class Test_Parser:
|
||||
assert len(parsed.project_ids) == 2
|
||||
assert parsed.project_ids[0] == project_1
|
||||
assert parsed.project_ids[1] == project_2
|
||||
|
||||
def test_validate_azure_region_valid_regions(self):
|
||||
expected_regions = [
|
||||
"AzureChinaCloud",
|
||||
"AzureUSGovernment",
|
||||
"AzureGermanCloud",
|
||||
"AzureCloud",
|
||||
]
|
||||
input_regions = [
|
||||
"AzureChinaCloud",
|
||||
"AzureUSGovernment",
|
||||
"AzureGermanCloud",
|
||||
"AzureCloud",
|
||||
]
|
||||
for region in input_regions:
|
||||
assert validate_azure_region(region) in expected_regions
|
||||
|
||||
def test_validate_azure_region_invalid_regions(self):
|
||||
expected_regions = [
|
||||
"AzureChinaCloud",
|
||||
"AzureUSGovernment",
|
||||
"AzureGermanCloud",
|
||||
"AzureCloud",
|
||||
]
|
||||
invalid_region = "non-valid-region"
|
||||
with pytest.raises(
|
||||
ArgumentTypeError,
|
||||
match=f"Region {invalid_region} not allowed, allowed regions are {' '.join(expected_regions)}",
|
||||
):
|
||||
validate_azure_region(invalid_region)
|
||||
|
||||
@@ -11,6 +11,7 @@ from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.azure.lib.audit_info.models import (
|
||||
Azure_Audit_Info,
|
||||
Azure_Identity_Info,
|
||||
Azure_Region_Config,
|
||||
)
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
|
||||
@@ -76,6 +77,7 @@ class Test_Slack_Integration:
|
||||
audit_resources=None,
|
||||
audit_metadata=None,
|
||||
audit_config=None,
|
||||
azure_region_config=Azure_Region_Config(),
|
||||
)
|
||||
assert create_message_identity("aws", aws_audit_info) == (
|
||||
f"AWS Account *{aws_audit_info.audited_account}*",
|
||||
|
||||
@@ -5,9 +5,10 @@ from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_REGION_US_EAST_1 = "us-east-1"
|
||||
AWS_REGION_EU_WEST_1 = "eu-west-1"
|
||||
AWS_PARTITION = "aws"
|
||||
AWS_REGION_EU_WEST_2 = "eu-west-2"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
AWS_ACCOUNT_ARN = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
|
||||
AWS_COMMERCIAL_PARTITION = "aws"
|
||||
|
||||
|
||||
# Mocked AWS Audit Info
|
||||
@@ -15,6 +16,8 @@ def set_mocked_aws_audit_info(
|
||||
audited_regions: [str] = [],
|
||||
audited_account: str = AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn: str = AWS_ACCOUNT_ARN,
|
||||
expected_checks: [str] = [],
|
||||
audit_config: dict = {},
|
||||
):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
@@ -26,8 +29,9 @@ def set_mocked_aws_audit_info(
|
||||
audited_account=audited_account,
|
||||
audited_account_arn=audited_account_arn,
|
||||
audited_user_id=None,
|
||||
audited_partition=AWS_PARTITION,
|
||||
audited_partition=AWS_COMMERCIAL_PARTITION,
|
||||
audited_identity_arn=None,
|
||||
audit_config=audit_config,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
@@ -38,9 +42,10 @@ def set_mocked_aws_audit_info(
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
expected_checks=expected_checks,
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
enabled_regions=set(audited_regions),
|
||||
)
|
||||
return audit_info
|
||||
|
||||
@@ -14,9 +14,10 @@ from prowler.providers.aws.aws_provider import (
|
||||
)
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Assume_Role, AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
ACCOUNT_ID = 123456789012
|
||||
AWS_REGION = "us-east-1"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
|
||||
class Test_AWS_Provider:
|
||||
@@ -26,7 +27,7 @@ class Test_AWS_Provider:
|
||||
audited_regions = ["eu-west-1"]
|
||||
# sessionName = "ProwlerAsessmentSession"
|
||||
# Boto 3 client to create our user
|
||||
iam_client = boto3.client("iam", region_name=AWS_REGION)
|
||||
iam_client = boto3.client("iam", region_name=AWS_REGION_EU_WEST_1)
|
||||
# IAM user
|
||||
iam_user = iam_client.create_user(UserName="test-user")["User"]
|
||||
access_key = iam_client.create_access_key(UserName=iam_user["UserName"])[
|
||||
@@ -38,7 +39,7 @@ class Test_AWS_Provider:
|
||||
session = boto3.session.Session(
|
||||
aws_access_key_id=access_key_id,
|
||||
aws_secret_access_key=secret_access_key,
|
||||
region_name=AWS_REGION,
|
||||
region_name=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
# Fulfil the input session object for Prowler
|
||||
@@ -75,7 +76,10 @@ class Test_AWS_Provider:
|
||||
# Call assume_role
|
||||
with patch(
|
||||
"prowler.providers.aws.aws_provider.input_role_mfa_token_and_code",
|
||||
return_value=(f"arn:aws:iam::{ACCOUNT_ID}:mfa/test-role-mfa", "111111"),
|
||||
return_value=(
|
||||
f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:mfa/test-role-mfa",
|
||||
"111111",
|
||||
),
|
||||
):
|
||||
aws_provider = AWS_Provider(audit_info)
|
||||
assert aws_provider.aws_session.region_name is None
|
||||
@@ -91,7 +95,7 @@ class Test_AWS_Provider:
|
||||
def test_aws_provider_user_with_mfa(self):
|
||||
audited_regions = "eu-west-1"
|
||||
# Boto 3 client to create our user
|
||||
iam_client = boto3.client("iam", region_name=AWS_REGION)
|
||||
iam_client = boto3.client("iam", region_name=AWS_REGION_EU_WEST_1)
|
||||
# IAM user
|
||||
iam_user = iam_client.create_user(UserName="test-user")["User"]
|
||||
access_key = iam_client.create_access_key(UserName=iam_user["UserName"])[
|
||||
@@ -103,7 +107,7 @@ class Test_AWS_Provider:
|
||||
session = boto3.session.Session(
|
||||
aws_access_key_id=access_key_id,
|
||||
aws_secret_access_key=secret_access_key,
|
||||
region_name=AWS_REGION,
|
||||
region_name=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
# Fulfil the input session object for Prowler
|
||||
@@ -117,7 +121,7 @@ class Test_AWS_Provider:
|
||||
audited_identity_arn=None,
|
||||
audited_user_id=None,
|
||||
profile=None,
|
||||
profile_region=AWS_REGION,
|
||||
profile_region=AWS_REGION_EU_WEST_1,
|
||||
credentials=None,
|
||||
assumed_role_info=AWS_Assume_Role(
|
||||
role_arn=None,
|
||||
@@ -134,7 +138,10 @@ class Test_AWS_Provider:
|
||||
# # Call assume_role
|
||||
with patch(
|
||||
"prowler.providers.aws.aws_provider.input_role_mfa_token_and_code",
|
||||
return_value=(f"arn:aws:iam::{ACCOUNT_ID}:mfa/test-role-mfa", "111111"),
|
||||
return_value=(
|
||||
f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:mfa/test-role-mfa",
|
||||
"111111",
|
||||
),
|
||||
):
|
||||
aws_provider = AWS_Provider(audit_info)
|
||||
assert aws_provider.aws_session.region_name is None
|
||||
@@ -150,12 +157,12 @@ class Test_AWS_Provider:
|
||||
def test_aws_provider_assume_role_with_mfa(self):
|
||||
# Variables
|
||||
role_name = "test-role"
|
||||
role_arn = f"arn:aws:iam::{ACCOUNT_ID}:role/{role_name}"
|
||||
role_arn = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:role/{role_name}"
|
||||
session_duration_seconds = 900
|
||||
audited_regions = ["eu-west-1"]
|
||||
sessionName = "ProwlerAsessmentSession"
|
||||
# Boto 3 client to create our user
|
||||
iam_client = boto3.client("iam", region_name=AWS_REGION)
|
||||
iam_client = boto3.client("iam", region_name=AWS_REGION_EU_WEST_1)
|
||||
# IAM user
|
||||
iam_user = iam_client.create_user(UserName="test-user")["User"]
|
||||
access_key = iam_client.create_access_key(UserName=iam_user["UserName"])[
|
||||
@@ -167,7 +174,7 @@ class Test_AWS_Provider:
|
||||
session = boto3.session.Session(
|
||||
aws_access_key_id=access_key_id,
|
||||
aws_secret_access_key=secret_access_key,
|
||||
region_name=AWS_REGION,
|
||||
region_name=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
# Fulfil the input session object for Prowler
|
||||
@@ -206,7 +213,10 @@ class Test_AWS_Provider:
|
||||
# Patch MFA
|
||||
with patch(
|
||||
"prowler.providers.aws.aws_provider.input_role_mfa_token_and_code",
|
||||
return_value=(f"arn:aws:iam::{ACCOUNT_ID}:mfa/test-role-mfa", "111111"),
|
||||
return_value=(
|
||||
f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:mfa/test-role-mfa",
|
||||
"111111",
|
||||
),
|
||||
):
|
||||
assume_role_response = assume_role(
|
||||
aws_provider.aws_session, aws_provider.role_info
|
||||
@@ -225,7 +235,7 @@ class Test_AWS_Provider:
|
||||
# Assumed Role
|
||||
assert (
|
||||
assume_role_response["AssumedRoleUser"]["Arn"]
|
||||
== f"arn:aws:sts::{ACCOUNT_ID}:assumed-role/{role_name}/{sessionName}"
|
||||
== f"arn:aws:sts::{AWS_ACCOUNT_NUMBER}:assumed-role/{role_name}/{sessionName}"
|
||||
)
|
||||
|
||||
# AssumedRoleUser
|
||||
@@ -245,12 +255,12 @@ class Test_AWS_Provider:
|
||||
def test_aws_provider_assume_role_without_mfa(self):
|
||||
# Variables
|
||||
role_name = "test-role"
|
||||
role_arn = f"arn:aws:iam::{ACCOUNT_ID}:role/{role_name}"
|
||||
role_arn = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:role/{role_name}"
|
||||
session_duration_seconds = 900
|
||||
audited_regions = "eu-west-1"
|
||||
sessionName = "ProwlerAsessmentSession"
|
||||
# Boto 3 client to create our user
|
||||
iam_client = boto3.client("iam", region_name=AWS_REGION)
|
||||
iam_client = boto3.client("iam", region_name=AWS_REGION_EU_WEST_1)
|
||||
# IAM user
|
||||
iam_user = iam_client.create_user(UserName="test-user")["User"]
|
||||
access_key = iam_client.create_access_key(UserName=iam_user["UserName"])[
|
||||
@@ -262,7 +272,7 @@ class Test_AWS_Provider:
|
||||
session = boto3.session.Session(
|
||||
aws_access_key_id=access_key_id,
|
||||
aws_secret_access_key=secret_access_key,
|
||||
region_name=AWS_REGION,
|
||||
region_name=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
# Fulfil the input session object for Prowler
|
||||
@@ -315,7 +325,7 @@ class Test_AWS_Provider:
|
||||
# Assumed Role
|
||||
assert (
|
||||
assume_role_response["AssumedRoleUser"]["Arn"]
|
||||
== f"arn:aws:sts::{ACCOUNT_ID}:assumed-role/{role_name}/{sessionName}"
|
||||
== f"arn:aws:sts::{AWS_ACCOUNT_NUMBER}:assumed-role/{role_name}/{sessionName}"
|
||||
)
|
||||
|
||||
# AssumedRoleUser
|
||||
@@ -335,14 +345,14 @@ class Test_AWS_Provider:
|
||||
def test_assume_role_with_sts_endpoint_region(self):
|
||||
# Variables
|
||||
role_name = "test-role"
|
||||
role_arn = f"arn:aws:iam::{ACCOUNT_ID}:role/{role_name}"
|
||||
role_arn = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:role/{role_name}"
|
||||
session_duration_seconds = 900
|
||||
aws_region = "eu-west-1"
|
||||
sts_endpoint_region = aws_region
|
||||
audited_regions = [aws_region]
|
||||
sessionName = "ProwlerAsessmentSession"
|
||||
# Boto 3 client to create our user
|
||||
iam_client = boto3.client("iam", region_name=AWS_REGION)
|
||||
iam_client = boto3.client("iam", region_name=AWS_REGION_EU_WEST_1)
|
||||
# IAM user
|
||||
iam_user = iam_client.create_user(UserName="test-user")["User"]
|
||||
access_key = iam_client.create_access_key(UserName=iam_user["UserName"])[
|
||||
@@ -354,7 +364,7 @@ class Test_AWS_Provider:
|
||||
session = boto3.session.Session(
|
||||
aws_access_key_id=access_key_id,
|
||||
aws_secret_access_key=secret_access_key,
|
||||
region_name=AWS_REGION,
|
||||
region_name=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
# Fulfil the input session object for Prowler
|
||||
@@ -407,7 +417,7 @@ class Test_AWS_Provider:
|
||||
# Assumed Role
|
||||
assert (
|
||||
assume_role_response["AssumedRoleUser"]["Arn"]
|
||||
== f"arn:aws:sts::{ACCOUNT_ID}:assumed-role/{role_name}/{sessionName}"
|
||||
== f"arn:aws:sts::{AWS_ACCOUNT_NUMBER}:assumed-role/{role_name}/{sessionName}"
|
||||
)
|
||||
|
||||
# AssumedRoleUser
|
||||
@@ -425,9 +435,9 @@ class Test_AWS_Provider:
|
||||
def test_generate_regional_clients(self):
|
||||
# New Boto3 session with the previously create user
|
||||
session = boto3.session.Session(
|
||||
region_name=AWS_REGION,
|
||||
region_name=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
audited_regions = ["eu-west-1", AWS_REGION]
|
||||
audited_regions = ["eu-west-1", AWS_REGION_EU_WEST_1]
|
||||
# Fulfil the input session object for Prowler
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
@@ -462,10 +472,10 @@ class Test_AWS_Provider:
|
||||
def test_generate_regional_clients_global_service(self):
|
||||
# New Boto3 session with the previously create user
|
||||
session = boto3.session.Session(
|
||||
region_name=AWS_REGION,
|
||||
region_name=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
audited_regions = ["eu-west-1", AWS_REGION]
|
||||
profile_region = AWS_REGION
|
||||
audited_regions = ["eu-west-1", AWS_REGION_EU_WEST_1]
|
||||
profile_region = AWS_REGION_EU_WEST_1
|
||||
# Fulfil the input session object for Prowler
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
@@ -500,7 +510,7 @@ class Test_AWS_Provider:
|
||||
def test_generate_regional_clients_cn_partition(self):
|
||||
# New Boto3 session with the previously create user
|
||||
session = boto3.session.Session(
|
||||
region_name=AWS_REGION,
|
||||
region_name=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
audited_regions = ["cn-northwest-1", "cn-north-1"]
|
||||
# Fulfil the input session object for Prowler
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import yaml
|
||||
from boto3 import resource, session
|
||||
from boto3 import resource
|
||||
from mock import MagicMock
|
||||
from moto import mock_dynamodb, mock_s3
|
||||
|
||||
@@ -8,55 +8,26 @@ from prowler.providers.aws.lib.allowlist.allowlist import (
|
||||
is_allowlisted,
|
||||
is_allowlisted_in_check,
|
||||
is_allowlisted_in_region,
|
||||
is_allowlisted_in_resource,
|
||||
is_allowlisted_in_tags,
|
||||
is_excepted,
|
||||
parse_allowlist_file,
|
||||
)
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
AWS_REGION = "us-east-1"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_EU_WEST_1,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
|
||||
class Test_Allowlist:
|
||||
# Mocked Audit Info
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=None,
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
return audit_info
|
||||
|
||||
# Test S3 allowlist
|
||||
@mock_s3
|
||||
def test_s3_allowlist(self):
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
# Create bucket and upload allowlist yaml
|
||||
s3_resource = resource("s3", region_name=AWS_REGION)
|
||||
s3_resource = resource("s3", region_name=AWS_REGION_US_EAST_1)
|
||||
s3_resource.create_bucket(Bucket="test-allowlist")
|
||||
s3_resource.Object("test-allowlist", "allowlist.yaml").put(
|
||||
Body=open(
|
||||
@@ -73,9 +44,9 @@ class Test_Allowlist:
|
||||
# Test DynamoDB allowlist
|
||||
@mock_dynamodb
|
||||
def test_dynamo_allowlist(self):
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
# Create table and put item
|
||||
dynamodb_resource = resource("dynamodb", region_name=AWS_REGION)
|
||||
dynamodb_resource = resource("dynamodb", region_name=AWS_REGION_US_EAST_1)
|
||||
table_name = "test-allowlist"
|
||||
params = {
|
||||
"TableName": table_name,
|
||||
@@ -97,7 +68,7 @@ class Test_Allowlist:
|
||||
Item={
|
||||
"Accounts": "*",
|
||||
"Checks": "iam_user_hardware_mfa_enabled",
|
||||
"Regions": ["eu-west-1", AWS_REGION],
|
||||
"Regions": [AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1],
|
||||
"Resources": ["keyword"],
|
||||
}
|
||||
)
|
||||
@@ -107,7 +78,7 @@ class Test_Allowlist:
|
||||
in parse_allowlist_file(
|
||||
audit_info,
|
||||
"arn:aws:dynamodb:"
|
||||
+ AWS_REGION
|
||||
+ AWS_REGION_US_EAST_1
|
||||
+ ":"
|
||||
+ str(AWS_ACCOUNT_NUMBER)
|
||||
+ ":table/"
|
||||
@@ -117,9 +88,9 @@ class Test_Allowlist:
|
||||
|
||||
@mock_dynamodb
|
||||
def test_dynamo_allowlist_with_tags(self):
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
# Create table and put item
|
||||
dynamodb_resource = resource("dynamodb", region_name=AWS_REGION)
|
||||
dynamodb_resource = resource("dynamodb", region_name=AWS_REGION_US_EAST_1)
|
||||
table_name = "test-allowlist"
|
||||
params = {
|
||||
"TableName": table_name,
|
||||
@@ -152,7 +123,7 @@ class Test_Allowlist:
|
||||
in parse_allowlist_file(
|
||||
audit_info,
|
||||
"arn:aws:dynamodb:"
|
||||
+ AWS_REGION
|
||||
+ AWS_REGION_US_EAST_1
|
||||
+ ":"
|
||||
+ str(AWS_ACCOUNT_NUMBER)
|
||||
+ ":table/"
|
||||
@@ -169,7 +140,7 @@ class Test_Allowlist:
|
||||
"*": {
|
||||
"Checks": {
|
||||
"check_test": {
|
||||
"Regions": [AWS_REGION, "eu-west-1"],
|
||||
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
|
||||
"Resources": ["prowler", "^test", "prowler-pro"],
|
||||
}
|
||||
}
|
||||
@@ -183,7 +154,7 @@ class Test_Allowlist:
|
||||
finding_1.check_metadata = MagicMock
|
||||
finding_1.check_metadata.CheckID = "check_test"
|
||||
finding_1.status = "FAIL"
|
||||
finding_1.region = AWS_REGION
|
||||
finding_1.region = AWS_REGION_US_EAST_1
|
||||
finding_1.resource_id = "prowler"
|
||||
finding_1.resource_tags = []
|
||||
|
||||
@@ -195,6 +166,66 @@ class Test_Allowlist:
|
||||
assert len(allowlisted_findings) == 1
|
||||
assert allowlisted_findings[0].status == "WARNING"
|
||||
|
||||
def test_is_allowlisted_with_everything_excepted(self):
|
||||
allowlist = {
|
||||
"Accounts": {
|
||||
"*": {
|
||||
"Checks": {
|
||||
"athena_*": {
|
||||
"Regions": "*",
|
||||
"Resources": "*",
|
||||
"Tags": "*",
|
||||
"Exceptions": {
|
||||
"Accounts": ["*"],
|
||||
"Regions": ["*"],
|
||||
"Resources": ["*"],
|
||||
"Tags": ["*"],
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert not is_allowlisted(
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"athena_1",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
|
||||
def test_is_allowlisted_with_default_allowlist(self):
|
||||
allowlist = {
|
||||
"Accounts": {
|
||||
"*": {
|
||||
"Checks": {
|
||||
"*": {
|
||||
"Tags": ["*"],
|
||||
"Regions": ["*"],
|
||||
"Resources": ["*"],
|
||||
"Exceptions": {
|
||||
"Tags": [],
|
||||
"Regions": [],
|
||||
"Accounts": [],
|
||||
"Resources": [],
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"athena_1",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
|
||||
def test_is_allowlisted(self):
|
||||
# Allowlist example
|
||||
allowlist = {
|
||||
@@ -202,7 +233,7 @@ class Test_Allowlist:
|
||||
"*": {
|
||||
"Checks": {
|
||||
"check_test": {
|
||||
"Regions": [AWS_REGION, "eu-west-1"],
|
||||
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
|
||||
"Resources": ["prowler", "^test", "prowler-pro"],
|
||||
}
|
||||
}
|
||||
@@ -211,22 +242,37 @@ class Test_Allowlist:
|
||||
}
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", ""
|
||||
)
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test", ""
|
||||
)
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler", ""
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler-test",
|
||||
"",
|
||||
)
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"test-prowler",
|
||||
"",
|
||||
)
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler-pro-test",
|
||||
"",
|
||||
)
|
||||
@@ -244,7 +290,7 @@ class Test_Allowlist:
|
||||
"*": {
|
||||
"Checks": {
|
||||
"check_test": {
|
||||
"Regions": [AWS_REGION, "eu-west-1"],
|
||||
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
|
||||
"Resources": [".*"],
|
||||
}
|
||||
}
|
||||
@@ -253,15 +299,30 @@ class Test_Allowlist:
|
||||
}
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", ""
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test", ""
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler-test",
|
||||
"",
|
||||
)
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler", ""
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"test-prowler",
|
||||
"",
|
||||
)
|
||||
|
||||
assert not (
|
||||
@@ -277,7 +338,7 @@ class Test_Allowlist:
|
||||
"*": {
|
||||
"Checks": {
|
||||
"check_test": {
|
||||
"Regions": [AWS_REGION, "eu-west-1"],
|
||||
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
|
||||
"Resources": ["*"],
|
||||
}
|
||||
}
|
||||
@@ -286,15 +347,30 @@ class Test_Allowlist:
|
||||
}
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", ""
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test", ""
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler-test",
|
||||
"",
|
||||
)
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler", ""
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"test-prowler",
|
||||
"",
|
||||
)
|
||||
|
||||
assert not (
|
||||
@@ -310,7 +386,7 @@ class Test_Allowlist:
|
||||
"*": {
|
||||
"Checks": {
|
||||
"check_test_2": {
|
||||
"Regions": [AWS_REGION, "eu-west-1"],
|
||||
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
|
||||
"Resources": ["*"],
|
||||
}
|
||||
}
|
||||
@@ -318,7 +394,7 @@ class Test_Allowlist:
|
||||
AWS_ACCOUNT_NUMBER: {
|
||||
"Checks": {
|
||||
"check_test": {
|
||||
"Regions": [AWS_REGION],
|
||||
"Regions": [AWS_REGION_US_EAST_1],
|
||||
"Resources": ["*"],
|
||||
}
|
||||
}
|
||||
@@ -327,19 +403,39 @@ class Test_Allowlist:
|
||||
}
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test_2", AWS_REGION, "prowler", ""
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test_2",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", ""
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test", ""
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler-test",
|
||||
"",
|
||||
)
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler", ""
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"test-prowler",
|
||||
"",
|
||||
)
|
||||
|
||||
assert not (
|
||||
@@ -354,7 +450,7 @@ class Test_Allowlist:
|
||||
AWS_ACCOUNT_NUMBER: {
|
||||
"Checks": {
|
||||
"check_test": {
|
||||
"Regions": [AWS_REGION],
|
||||
"Regions": [AWS_REGION_US_EAST_1],
|
||||
"Resources": ["prowler"],
|
||||
}
|
||||
}
|
||||
@@ -363,7 +459,12 @@ class Test_Allowlist:
|
||||
}
|
||||
|
||||
assert is_allowlisted(
|
||||
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", ""
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
|
||||
assert not (
|
||||
@@ -373,47 +474,27 @@ class Test_Allowlist:
|
||||
)
|
||||
|
||||
def test_is_allowlisted_in_region(self):
|
||||
# Allowlist example
|
||||
allowlisted_regions = [AWS_REGION, "eu-west-1"]
|
||||
allowlisted_resources = ["*"]
|
||||
allowlisted_regions = [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1]
|
||||
finding_region = AWS_REGION_US_EAST_1
|
||||
|
||||
assert is_allowlisted_in_region(
|
||||
allowlisted_regions, allowlisted_resources, None, AWS_REGION, "prowler", ""
|
||||
)
|
||||
assert is_allowlisted_in_region(allowlisted_regions, finding_region)
|
||||
|
||||
assert is_allowlisted_in_region(
|
||||
allowlisted_regions,
|
||||
allowlisted_resources,
|
||||
None,
|
||||
AWS_REGION,
|
||||
"prowler-test",
|
||||
"",
|
||||
)
|
||||
def test_is_allowlisted_in_region_wildcard(self):
|
||||
allowlisted_regions = ["*"]
|
||||
finding_region = AWS_REGION_US_EAST_1
|
||||
|
||||
assert is_allowlisted_in_region(
|
||||
allowlisted_regions,
|
||||
allowlisted_resources,
|
||||
None,
|
||||
AWS_REGION,
|
||||
"test-prowler",
|
||||
"",
|
||||
)
|
||||
assert is_allowlisted_in_region(allowlisted_regions, finding_region)
|
||||
|
||||
assert not (
|
||||
is_allowlisted_in_region(
|
||||
allowlisted_regions,
|
||||
allowlisted_resources,
|
||||
None,
|
||||
"us-east-2",
|
||||
"test",
|
||||
"",
|
||||
)
|
||||
)
|
||||
def test_is_not_allowlisted_in_region(self):
|
||||
allowlisted_regions = [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1]
|
||||
finding_region = "eu-west-2"
|
||||
|
||||
assert not is_allowlisted_in_region(allowlisted_regions, finding_region)
|
||||
|
||||
def test_is_allowlisted_in_check(self):
|
||||
allowlisted_checks = {
|
||||
"check_test": {
|
||||
"Regions": [AWS_REGION, "eu-west-1"],
|
||||
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
|
||||
"Resources": ["*"],
|
||||
}
|
||||
}
|
||||
@@ -421,9 +502,8 @@ class Test_Allowlist:
|
||||
assert is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
@@ -431,9 +511,8 @@ class Test_Allowlist:
|
||||
assert is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler-test",
|
||||
"",
|
||||
)
|
||||
@@ -441,9 +520,8 @@ class Test_Allowlist:
|
||||
assert is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"test-prowler",
|
||||
"",
|
||||
)
|
||||
@@ -452,7 +530,6 @@ class Test_Allowlist:
|
||||
is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
"us-east-2",
|
||||
"test",
|
||||
@@ -464,7 +541,7 @@ class Test_Allowlist:
|
||||
# Allowlist example
|
||||
allowlisted_checks = {
|
||||
"s3_*": {
|
||||
"Regions": [AWS_REGION, "eu-west-1"],
|
||||
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
|
||||
"Resources": ["*"],
|
||||
}
|
||||
}
|
||||
@@ -472,9 +549,8 @@ class Test_Allowlist:
|
||||
assert is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"s3_bucket_public_access",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
@@ -482,9 +558,8 @@ class Test_Allowlist:
|
||||
assert is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"s3_bucket_no_mfa_delete",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler-test",
|
||||
"",
|
||||
)
|
||||
@@ -492,9 +567,8 @@ class Test_Allowlist:
|
||||
assert is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"s3_bucket_policy_public_write_access",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"test-prowler",
|
||||
"",
|
||||
)
|
||||
@@ -503,9 +577,8 @@ class Test_Allowlist:
|
||||
is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"iam_user_hardware_mfa_enabled",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"test",
|
||||
"",
|
||||
)
|
||||
@@ -514,7 +587,7 @@ class Test_Allowlist:
|
||||
def test_is_allowlisted_lambda_generic_check(self):
|
||||
allowlisted_checks = {
|
||||
"lambda_*": {
|
||||
"Regions": [AWS_REGION, "eu-west-1"],
|
||||
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
|
||||
"Resources": ["*"],
|
||||
}
|
||||
}
|
||||
@@ -522,9 +595,8 @@ class Test_Allowlist:
|
||||
assert is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"awslambda_function_invoke_api_operations_cloudtrail_logging_enabled",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
@@ -532,9 +604,8 @@ class Test_Allowlist:
|
||||
assert is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"awslambda_function_no_secrets_in_code",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
@@ -542,9 +613,8 @@ class Test_Allowlist:
|
||||
assert is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"awslambda_function_no_secrets_in_variables",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
@@ -552,9 +622,8 @@ class Test_Allowlist:
|
||||
assert is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"awslambda_function_not_publicly_accessible",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
@@ -562,9 +631,8 @@ class Test_Allowlist:
|
||||
assert is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"awslambda_function_url_cors_policy",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
@@ -572,9 +640,8 @@ class Test_Allowlist:
|
||||
assert is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"awslambda_function_url_public",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
@@ -582,9 +649,8 @@ class Test_Allowlist:
|
||||
assert is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"awslambda_function_using_supported_runtimes",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
@@ -592,7 +658,7 @@ class Test_Allowlist:
|
||||
def test_is_allowlisted_lambda_concrete_check(self):
|
||||
allowlisted_checks = {
|
||||
"lambda_function_no_secrets_in_variables": {
|
||||
"Regions": [AWS_REGION, "eu-west-1"],
|
||||
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
|
||||
"Resources": ["*"],
|
||||
}
|
||||
}
|
||||
@@ -600,9 +666,8 @@ class Test_Allowlist:
|
||||
assert is_allowlisted_in_check(
|
||||
allowlisted_checks,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"awslambda_function_no_secrets_in_variables",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"",
|
||||
)
|
||||
@@ -614,7 +679,7 @@ class Test_Allowlist:
|
||||
"*": {
|
||||
"Checks": {
|
||||
"check_test": {
|
||||
"Regions": [AWS_REGION, "eu-west-1"],
|
||||
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
|
||||
"Resources": ["*"],
|
||||
"Tags": ["environment=dev", "project=.*"],
|
||||
}
|
||||
@@ -627,7 +692,7 @@ class Test_Allowlist:
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler",
|
||||
"environment=dev",
|
||||
)
|
||||
@@ -636,7 +701,7 @@ class Test_Allowlist:
|
||||
allowlist,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
"check_test",
|
||||
AWS_REGION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
"prowler-test",
|
||||
"environment=dev | project=prowler",
|
||||
)
|
||||
@@ -654,56 +719,45 @@ class Test_Allowlist:
|
||||
|
||||
def test_is_allowlisted_in_tags(self):
|
||||
allowlist_tags = ["environment=dev", "project=prowler"]
|
||||
allowlist_resource = "*"
|
||||
|
||||
assert is_allowlisted_in_tags(allowlist_tags, "environment=dev")
|
||||
|
||||
assert is_allowlisted_in_tags(
|
||||
allowlist_tags,
|
||||
"*",
|
||||
"prowler",
|
||||
"environment=dev",
|
||||
)
|
||||
|
||||
assert is_allowlisted_in_tags(
|
||||
allowlist_tags,
|
||||
allowlist_resource,
|
||||
"prowler-test",
|
||||
"environment=dev | project=prowler",
|
||||
)
|
||||
|
||||
assert not (
|
||||
is_allowlisted_in_tags(
|
||||
allowlist_tags,
|
||||
allowlist_resource,
|
||||
"test",
|
||||
"environment=pro",
|
||||
)
|
||||
)
|
||||
|
||||
def test_is_allowlisted_in_tags_regex(self):
|
||||
allowlist_tags = ["environment=(dev|test)", ".*=prowler"]
|
||||
allowlist_resource = "*"
|
||||
|
||||
assert is_allowlisted_in_tags(
|
||||
allowlist_tags,
|
||||
allowlist_resource,
|
||||
"prowler-test",
|
||||
"environment=test | proj=prowler",
|
||||
)
|
||||
|
||||
assert is_allowlisted_in_tags(
|
||||
allowlist_tags,
|
||||
allowlist_resource,
|
||||
"prowler-test",
|
||||
"env=prod | project=prowler",
|
||||
)
|
||||
|
||||
assert not is_allowlisted_in_tags(
|
||||
allowlist_tags,
|
||||
allowlist_resource,
|
||||
"prowler-test",
|
||||
"environment=prod | project=myproj",
|
||||
)
|
||||
|
||||
def test_is_allowlisted_in_tags_with_no_tags_in_finding(self):
|
||||
allowlist_tags = ["environment=(dev|test)", ".*=prowler"]
|
||||
finding_tags = ""
|
||||
|
||||
assert not is_allowlisted_in_tags(allowlist_tags, finding_tags)
|
||||
|
||||
def test_is_excepted(self):
|
||||
# Allowlist example
|
||||
exceptions = {
|
||||
@@ -737,6 +791,28 @@ class Test_Allowlist:
|
||||
"environment=test",
|
||||
)
|
||||
|
||||
def test_is_excepted_all_wildcard(self):
|
||||
exceptions = {
|
||||
"Accounts": ["*"],
|
||||
"Regions": ["*"],
|
||||
"Resources": ["*"],
|
||||
"Tags": ["*"],
|
||||
}
|
||||
assert is_excepted(
|
||||
exceptions, AWS_ACCOUNT_NUMBER, "eu-south-2", "test", "environment=test"
|
||||
)
|
||||
assert not is_excepted(
|
||||
exceptions, AWS_ACCOUNT_NUMBER, "eu-south-2", "test", None
|
||||
)
|
||||
|
||||
def test_is_not_excepted(self):
|
||||
exceptions = {
|
||||
"Accounts": [AWS_ACCOUNT_NUMBER],
|
||||
"Regions": ["eu-central-1", "eu-south-3"],
|
||||
"Resources": ["test"],
|
||||
"Tags": ["environment=test", "project=.*"],
|
||||
}
|
||||
|
||||
assert not is_excepted(
|
||||
exceptions,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
@@ -760,3 +836,11 @@ class Test_Allowlist:
|
||||
"test",
|
||||
"environment=pro",
|
||||
)
|
||||
|
||||
def test_is_allowlisted_in_resource(self):
|
||||
allowlist_resources = ["prowler", "^test", "prowler-pro"]
|
||||
|
||||
assert is_allowlisted_in_resource(allowlist_resources, "prowler")
|
||||
assert is_allowlisted_in_resource(allowlist_resources, "prowler-test")
|
||||
assert is_allowlisted_in_resource(allowlist_resources, "test-prowler")
|
||||
assert not is_allowlisted_in_resource(allowlist_resources, "random")
|
||||
|
||||
@@ -287,7 +287,7 @@ class Test_ARN_Parsing:
|
||||
assert error._excinfo[0] == RoleArnParsingServiceNotIAMnorSTS
|
||||
|
||||
def test_iam_credentials_arn_parsing_raising_RoleArnParsingInvalidAccountID(self):
|
||||
input_arn = "arn:aws:iam::AWS_ACCOUNT_ID:user/prowler"
|
||||
input_arn = "arn:aws:iam::AWS_ACCOUNT_NUMBER:user/prowler"
|
||||
with raises(RoleArnParsingInvalidAccountID) as error:
|
||||
parse_iam_credentials_arn(input_arn)
|
||||
|
||||
|
||||
@@ -1282,3 +1282,75 @@ class Test_policy_condition_parser:
|
||||
assert not is_account_only_allowed_in_condition(
|
||||
condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER
|
||||
)
|
||||
|
||||
def test_condition_parser_two_lists_unrestrictive(self):
|
||||
condition_statement = {
|
||||
"StringLike": {
|
||||
"AWS:ResourceAccount": [
|
||||
TRUSTED_AWS_ACCOUNT_NUMBER,
|
||||
NON_TRUSTED_AWS_ACCOUNT_NUMBER,
|
||||
]
|
||||
},
|
||||
"ArnLike": {
|
||||
"AWS:SourceArn": [
|
||||
f"arn:aws:cloudtrail:*:{TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*",
|
||||
f"arn:aws:cloudtrail:*:{NON_TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*",
|
||||
]
|
||||
},
|
||||
}
|
||||
assert not is_account_only_allowed_in_condition(
|
||||
condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER
|
||||
)
|
||||
|
||||
def test_condition_parser_two_lists_both_restrictive(self):
|
||||
condition_statement = {
|
||||
"StringLike": {
|
||||
"AWS:ResourceAccount": [
|
||||
TRUSTED_AWS_ACCOUNT_NUMBER,
|
||||
]
|
||||
},
|
||||
"ArnLike": {
|
||||
"AWS:SourceArn": [
|
||||
f"arn:aws:cloudtrail:*:{TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*",
|
||||
]
|
||||
},
|
||||
}
|
||||
assert is_account_only_allowed_in_condition(
|
||||
condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER
|
||||
)
|
||||
|
||||
def test_condition_parser_two_lists_first_restrictive(self):
|
||||
condition_statement = {
|
||||
"StringLike": {
|
||||
"AWS:ResourceAccount": [
|
||||
TRUSTED_AWS_ACCOUNT_NUMBER,
|
||||
]
|
||||
},
|
||||
"ArnLike": {
|
||||
"AWS:SourceArn": [
|
||||
f"arn:aws:cloudtrail:*:{TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*",
|
||||
f"arn:aws:cloudtrail:*:{NON_TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*",
|
||||
]
|
||||
},
|
||||
}
|
||||
assert is_account_only_allowed_in_condition(
|
||||
condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER
|
||||
)
|
||||
|
||||
def test_condition_parser_two_lists_second_restrictive(self):
|
||||
condition_statement = {
|
||||
"StringLike": {
|
||||
"AWS:ResourceAccount": [
|
||||
TRUSTED_AWS_ACCOUNT_NUMBER,
|
||||
NON_TRUSTED_AWS_ACCOUNT_NUMBER,
|
||||
]
|
||||
},
|
||||
"ArnLike": {
|
||||
"AWS:SourceArn": [
|
||||
f"arn:aws:cloudtrail:*:{TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*",
|
||||
]
|
||||
},
|
||||
}
|
||||
assert is_account_only_allowed_in_condition(
|
||||
condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER
|
||||
)
|
||||
|
||||
@@ -7,9 +7,10 @@ from moto import mock_s3
|
||||
|
||||
from prowler.config.config import csv_file_suffix
|
||||
from prowler.providers.aws.lib.s3.s3 import get_s3_object_path, send_to_s3_bucket
|
||||
|
||||
AWS_ACCOUNT_ID = "123456789012"
|
||||
AWS_REGION = "us-east-1"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
ACTUAL_DIRECTORY = Path(path.dirname(path.realpath(__file__)))
|
||||
FIXTURES_DIR_NAME = "fixtures"
|
||||
@@ -27,8 +28,10 @@ class TestS3:
|
||||
audit_info = MagicMock()
|
||||
|
||||
# Create mock session
|
||||
audit_info.audit_session = boto3.session.Session(region_name=AWS_REGION)
|
||||
audit_info.audited_account = AWS_ACCOUNT_ID
|
||||
audit_info.audit_session = boto3.session.Session(
|
||||
region_name=AWS_REGION_EU_WEST_1
|
||||
)
|
||||
audit_info.audited_account = AWS_ACCOUNT_NUMBER
|
||||
|
||||
# Create mock bucket
|
||||
client = audit_info.audit_session.client("s3")
|
||||
@@ -66,8 +69,10 @@ class TestS3:
|
||||
audit_info = MagicMock()
|
||||
|
||||
# Create mock session
|
||||
audit_info.audit_session = boto3.session.Session(region_name=AWS_REGION)
|
||||
audit_info.audited_account = AWS_ACCOUNT_ID
|
||||
audit_info.audit_session = boto3.session.Session(
|
||||
region_name=AWS_REGION_EU_WEST_1
|
||||
)
|
||||
audit_info.audited_account = AWS_ACCOUNT_NUMBER
|
||||
|
||||
# Create mock bucket
|
||||
client = audit_info.audit_session.client("s3")
|
||||
|
||||
@@ -6,7 +6,6 @@ from mock import MagicMock, patch
|
||||
|
||||
from prowler.config.config import prowler_version, timestamp_utc
|
||||
from prowler.lib.check.models import Check_Report, load_check_metadata
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
|
||||
# from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.aws.lib.security_hub.security_hub import (
|
||||
@@ -14,11 +13,13 @@ from prowler.providers.aws.lib.security_hub.security_hub import (
|
||||
prepare_security_hub_findings,
|
||||
verify_security_hub_integration_enabled_per_region,
|
||||
)
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_ACCOUNT_ID = "123456789012"
|
||||
AWS_REGION_1 = "eu-west-1"
|
||||
AWS_REGION_2 = "eu-west-2"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_COMMERCIAL_PARTITION,
|
||||
AWS_REGION_EU_WEST_1,
|
||||
AWS_REGION_EU_WEST_2,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
# Mocking Security Hub Get Findings
|
||||
make_api_call = botocore.client.BaseClient._make_api_call
|
||||
@@ -32,7 +33,7 @@ def mock_make_api_call(self, operation_name, kwarg):
|
||||
}
|
||||
if operation_name == "DescribeHub":
|
||||
return {
|
||||
"HubArn": f"arn:aws:securityhub:{AWS_REGION_1}:{AWS_ACCOUNT_ID}:hub/default",
|
||||
"HubArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:hub/default",
|
||||
"SubscribedAt": "2023-02-07T09:45:43.742Z",
|
||||
"AutoEnableControls": True,
|
||||
"ControlFindingGenerator": "STANDARD_CONTROL",
|
||||
@@ -41,7 +42,7 @@ def mock_make_api_call(self, operation_name, kwarg):
|
||||
if operation_name == "ListEnabledProductsForImport":
|
||||
return {
|
||||
"ProductSubscriptions": [
|
||||
f"arn:aws:securityhub:{AWS_REGION_1}:{AWS_ACCOUNT_ID}:product-subscription/prowler/prowler",
|
||||
f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:product-subscription/prowler/prowler",
|
||||
]
|
||||
}
|
||||
|
||||
@@ -49,32 +50,6 @@ def mock_make_api_call(self, operation_name, kwarg):
|
||||
|
||||
|
||||
class Test_SecurityHub:
|
||||
def set_mocked_audit_info(self):
|
||||
return AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=None,
|
||||
audited_account=AWS_ACCOUNT_ID,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_ID}:root",
|
||||
audited_identity_arn="test-arn",
|
||||
audited_user_id="test",
|
||||
audited_partition="aws",
|
||||
profile="default",
|
||||
profile_region="eu-west-1",
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["eu-west-2", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
def generate_finding(self, status, region):
|
||||
finding = Check_Report(
|
||||
load_check_metadata(
|
||||
@@ -104,14 +79,18 @@ class Test_SecurityHub:
|
||||
|
||||
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
|
||||
def test_verify_security_hub_integration_enabled_per_region(self):
|
||||
session = self.set_mocked_session(AWS_REGION_1)
|
||||
assert verify_security_hub_integration_enabled_per_region(AWS_REGION_1, session)
|
||||
session = self.set_mocked_session(AWS_REGION_EU_WEST_1)
|
||||
assert verify_security_hub_integration_enabled_per_region(
|
||||
AWS_COMMERCIAL_PARTITION, AWS_REGION_EU_WEST_1, session, AWS_ACCOUNT_NUMBER
|
||||
)
|
||||
|
||||
def test_prepare_security_hub_findings_enabled_region_not_quiet(self):
|
||||
enabled_regions = [AWS_REGION_1]
|
||||
enabled_regions = [AWS_REGION_EU_WEST_1]
|
||||
output_options = self.set_mocked_output_options(is_quiet=False)
|
||||
findings = [self.generate_finding("PASS", AWS_REGION_1)]
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)]
|
||||
audit_info = set_mocked_aws_audit_info(
|
||||
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
|
||||
)
|
||||
|
||||
assert prepare_security_hub_findings(
|
||||
findings,
|
||||
@@ -119,11 +98,11 @@ class Test_SecurityHub:
|
||||
output_options,
|
||||
enabled_regions,
|
||||
) == {
|
||||
AWS_REGION_1: [
|
||||
AWS_REGION_EU_WEST_1: [
|
||||
{
|
||||
"SchemaVersion": "2018-10-08",
|
||||
"Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_ID}-{AWS_REGION_1}-ee26b0dd4",
|
||||
"ProductArn": f"arn:aws:securityhub:{AWS_REGION_1}::product/prowler/prowler",
|
||||
"Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_NUMBER}-{AWS_REGION_EU_WEST_1}-ee26b0dd4",
|
||||
"ProductArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}::product/prowler/prowler",
|
||||
"RecordState": "ACTIVE",
|
||||
"ProductFields": {
|
||||
"ProviderName": "Prowler",
|
||||
@@ -131,7 +110,7 @@ class Test_SecurityHub:
|
||||
"ProwlerResourceName": "test",
|
||||
},
|
||||
"GeneratorId": "prowler-iam_user_accesskey_unused",
|
||||
"AwsAccountId": f"{AWS_ACCOUNT_ID}",
|
||||
"AwsAccountId": f"{AWS_ACCOUNT_NUMBER}",
|
||||
"Types": ["Software and Configuration Checks"],
|
||||
"FirstObservedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
"UpdatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
@@ -144,7 +123,7 @@ class Test_SecurityHub:
|
||||
"Type": "AwsIamAccessAnalyzer",
|
||||
"Id": "test",
|
||||
"Partition": "aws",
|
||||
"Region": f"{AWS_REGION_1}",
|
||||
"Region": f"{AWS_REGION_EU_WEST_1}",
|
||||
}
|
||||
],
|
||||
"Compliance": {
|
||||
@@ -160,55 +139,117 @@ class Test_SecurityHub:
|
||||
},
|
||||
}
|
||||
],
|
||||
AWS_REGION_2: [],
|
||||
}
|
||||
|
||||
def test_prepare_security_hub_findings_quiet_INFO_finding(self):
|
||||
enabled_regions = [AWS_REGION_1]
|
||||
enabled_regions = [AWS_REGION_EU_WEST_1]
|
||||
output_options = self.set_mocked_output_options(is_quiet=False)
|
||||
findings = [self.generate_finding("INFO", AWS_REGION_1)]
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
findings = [self.generate_finding("INFO", AWS_REGION_EU_WEST_1)]
|
||||
audit_info = set_mocked_aws_audit_info(
|
||||
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
|
||||
)
|
||||
|
||||
assert prepare_security_hub_findings(
|
||||
findings,
|
||||
audit_info,
|
||||
output_options,
|
||||
enabled_regions,
|
||||
) == {AWS_REGION_1: [], AWS_REGION_2: []}
|
||||
) == {AWS_REGION_EU_WEST_1: []}
|
||||
|
||||
def test_prepare_security_hub_findings_disabled_region(self):
|
||||
enabled_regions = [AWS_REGION_1]
|
||||
enabled_regions = [AWS_REGION_EU_WEST_1]
|
||||
output_options = self.set_mocked_output_options(is_quiet=False)
|
||||
findings = [self.generate_finding("PASS", AWS_REGION_2)]
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_2)]
|
||||
audit_info = set_mocked_aws_audit_info(
|
||||
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
|
||||
)
|
||||
|
||||
assert prepare_security_hub_findings(
|
||||
findings,
|
||||
audit_info,
|
||||
output_options,
|
||||
enabled_regions,
|
||||
) == {AWS_REGION_1: [], AWS_REGION_2: []}
|
||||
) == {AWS_REGION_EU_WEST_1: []}
|
||||
|
||||
def test_prepare_security_hub_findings_quiet(self):
|
||||
enabled_regions = [AWS_REGION_1]
|
||||
enabled_regions = [AWS_REGION_EU_WEST_1]
|
||||
output_options = self.set_mocked_output_options(is_quiet=True)
|
||||
findings = [self.generate_finding("PASS", AWS_REGION_1)]
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)]
|
||||
audit_info = set_mocked_aws_audit_info(
|
||||
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
|
||||
)
|
||||
|
||||
assert prepare_security_hub_findings(
|
||||
findings,
|
||||
audit_info,
|
||||
output_options,
|
||||
enabled_regions,
|
||||
) == {AWS_REGION_1: [], AWS_REGION_2: []}
|
||||
) == {AWS_REGION_EU_WEST_1: []}
|
||||
|
||||
def test_prepare_security_hub_findings_no_audited_regions(self):
|
||||
enabled_regions = [AWS_REGION_EU_WEST_1]
|
||||
output_options = self.set_mocked_output_options(is_quiet=False)
|
||||
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)]
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
|
||||
assert prepare_security_hub_findings(
|
||||
findings,
|
||||
audit_info,
|
||||
output_options,
|
||||
enabled_regions,
|
||||
) == {
|
||||
AWS_REGION_EU_WEST_1: [
|
||||
{
|
||||
"SchemaVersion": "2018-10-08",
|
||||
"Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_NUMBER}-{AWS_REGION_EU_WEST_1}-ee26b0dd4",
|
||||
"ProductArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}::product/prowler/prowler",
|
||||
"RecordState": "ACTIVE",
|
||||
"ProductFields": {
|
||||
"ProviderName": "Prowler",
|
||||
"ProviderVersion": prowler_version,
|
||||
"ProwlerResourceName": "test",
|
||||
},
|
||||
"GeneratorId": "prowler-iam_user_accesskey_unused",
|
||||
"AwsAccountId": f"{AWS_ACCOUNT_NUMBER}",
|
||||
"Types": ["Software and Configuration Checks"],
|
||||
"FirstObservedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
"UpdatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
"CreatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
"Severity": {"Label": "LOW"},
|
||||
"Title": "Ensure Access Keys unused are disabled",
|
||||
"Description": "test",
|
||||
"Resources": [
|
||||
{
|
||||
"Type": "AwsIamAccessAnalyzer",
|
||||
"Id": "test",
|
||||
"Partition": "aws",
|
||||
"Region": f"{AWS_REGION_EU_WEST_1}",
|
||||
}
|
||||
],
|
||||
"Compliance": {
|
||||
"Status": "PASSED",
|
||||
"RelatedRequirements": [],
|
||||
"AssociatedStandards": [],
|
||||
},
|
||||
"Remediation": {
|
||||
"Recommendation": {
|
||||
"Text": "Run sudo yum update and cross your fingers and toes.",
|
||||
"Url": "https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html",
|
||||
}
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
|
||||
def test_batch_send_to_security_hub_one_finding(self):
|
||||
enabled_regions = [AWS_REGION_1]
|
||||
enabled_regions = [AWS_REGION_EU_WEST_1]
|
||||
output_options = self.set_mocked_output_options(is_quiet=False)
|
||||
findings = [self.generate_finding("PASS", AWS_REGION_1)]
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
session = self.set_mocked_session(AWS_REGION_1)
|
||||
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)]
|
||||
audit_info = set_mocked_aws_audit_info(
|
||||
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
|
||||
)
|
||||
session = self.set_mocked_session(AWS_REGION_EU_WEST_1)
|
||||
|
||||
security_hub_findings = prepare_security_hub_findings(
|
||||
findings,
|
||||
|
||||
@@ -1,20 +1,21 @@
|
||||
from boto3 import session
|
||||
from mock import patch
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.aws.lib.service.service import AWSService
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
AWS_ACCOUNT_ARN = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
|
||||
AWS_PARTITION = "aws"
|
||||
AWS_REGION = "us-east-1"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_ARN,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_COMMERCIAL_PARTITION,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
|
||||
def mock_generate_regional_clients(service, audit_info, _):
|
||||
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
|
||||
regional_client.region = AWS_REGION
|
||||
return {AWS_REGION: regional_client}
|
||||
regional_client = audit_info.audit_session.client(
|
||||
service, region_name=AWS_REGION_US_EAST_1
|
||||
)
|
||||
regional_client.region = AWS_REGION_US_EAST_1
|
||||
return {AWS_REGION_US_EAST_1: regional_client}
|
||||
|
||||
|
||||
@patch(
|
||||
@@ -22,50 +23,19 @@ def mock_generate_regional_clients(service, audit_info, _):
|
||||
new=mock_generate_regional_clients,
|
||||
)
|
||||
class Test_AWSService:
|
||||
# Mocked Audit Info
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=AWS_ACCOUNT_ARN,
|
||||
audited_user_id=None,
|
||||
audited_partition=AWS_PARTITION,
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=None,
|
||||
organizations_metadata=None,
|
||||
audit_resources=[],
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
return audit_info
|
||||
|
||||
def test_AWSService_init(self):
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
service = AWSService("s3", audit_info)
|
||||
|
||||
assert service.audit_info == audit_info
|
||||
assert service.audited_account == AWS_ACCOUNT_NUMBER
|
||||
assert service.audited_account_arn == AWS_ACCOUNT_ARN
|
||||
assert service.audited_partition == AWS_PARTITION
|
||||
assert service.audited_partition == AWS_COMMERCIAL_PARTITION
|
||||
assert service.audit_resources == []
|
||||
assert service.audited_checks == []
|
||||
assert service.session == audit_info.audit_session
|
||||
assert service.service == "s3"
|
||||
assert len(service.regional_clients) == 1
|
||||
assert service.regional_clients[AWS_REGION].__class__.__name__ == "S3"
|
||||
assert service.region == AWS_REGION
|
||||
assert service.regional_clients[AWS_REGION_US_EAST_1].__class__.__name__ == "S3"
|
||||
assert service.region == AWS_REGION_US_EAST_1
|
||||
assert service.client.__class__.__name__ == "S3"
|
||||
|
||||
@@ -3,13 +3,15 @@ from unittest import mock
|
||||
from prowler.providers.aws.services.accessanalyzer.accessanalyzer_service import (
|
||||
Analyzer,
|
||||
)
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_ARN,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_EU_WEST_1,
|
||||
AWS_REGION_EU_WEST_2,
|
||||
)
|
||||
|
||||
AWS_REGION_1 = "eu-west-1"
|
||||
AWS_REGION_2 = "eu-west-2"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
AWS_ACCOUNT_ARN = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
|
||||
ACCESS_ANALYZER_NAME = "test-analyzer"
|
||||
ACCESS_ANALYZER_ARN = f"arn:aws:access-analyzer:{AWS_REGION_2}:{AWS_ACCOUNT_NUMBER}:analyzer/{ACCESS_ANALYZER_NAME}"
|
||||
ACCESS_ANALYZER_ARN = f"arn:aws:access-analyzer:{AWS_REGION_EU_WEST_2}:{AWS_ACCOUNT_NUMBER}:analyzer/{ACCESS_ANALYZER_NAME}"
|
||||
|
||||
|
||||
class Test_accessanalyzer_enabled:
|
||||
@@ -33,6 +35,7 @@ class Test_accessanalyzer_enabled:
|
||||
def test_one_analyzer_not_available(self):
|
||||
# Include analyzers to check
|
||||
accessanalyzer_client = mock.MagicMock
|
||||
accessanalyzer_client.region = AWS_REGION_EU_WEST_1
|
||||
accessanalyzer_client.analyzers = [
|
||||
Analyzer(
|
||||
arn=AWS_ACCOUNT_ARN,
|
||||
@@ -40,7 +43,7 @@ class Test_accessanalyzer_enabled:
|
||||
status="NOT_AVAILABLE",
|
||||
tags=[],
|
||||
type="",
|
||||
region=AWS_REGION_1,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
]
|
||||
with mock.patch(
|
||||
@@ -62,11 +65,14 @@ class Test_accessanalyzer_enabled:
|
||||
)
|
||||
assert result[0].resource_id == AWS_ACCOUNT_NUMBER
|
||||
assert result[0].resource_arn == AWS_ACCOUNT_ARN
|
||||
assert result[0].region == AWS_REGION_1
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
def test_two_analyzers(self):
|
||||
def test_one_analyzer_not_available_allowlisted(self):
|
||||
# Include analyzers to check
|
||||
accessanalyzer_client = mock.MagicMock
|
||||
accessanalyzer_client.region = AWS_REGION_EU_WEST_2
|
||||
accessanalyzer_client.audit_config = {"allowlist_non_default_regions": True}
|
||||
accessanalyzer_client.analyzers = [
|
||||
Analyzer(
|
||||
arn=AWS_ACCOUNT_ARN,
|
||||
@@ -74,7 +80,42 @@ class Test_accessanalyzer_enabled:
|
||||
status="NOT_AVAILABLE",
|
||||
tags=[],
|
||||
type="",
|
||||
region=AWS_REGION_1,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
]
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.accessanalyzer.accessanalyzer_service.AccessAnalyzer",
|
||||
accessanalyzer_client,
|
||||
):
|
||||
from prowler.providers.aws.services.accessanalyzer.accessanalyzer_enabled.accessanalyzer_enabled import (
|
||||
accessanalyzer_enabled,
|
||||
)
|
||||
|
||||
check = accessanalyzer_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "WARNING"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"IAM Access Analyzer in account {AWS_ACCOUNT_NUMBER} is not enabled."
|
||||
)
|
||||
assert result[0].resource_id == AWS_ACCOUNT_NUMBER
|
||||
assert result[0].resource_arn == AWS_ACCOUNT_ARN
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
def test_two_analyzers(self):
|
||||
accessanalyzer_client = mock.MagicMock
|
||||
accessanalyzer_client.region = AWS_REGION_EU_WEST_1
|
||||
accessanalyzer_client.analyzers = [
|
||||
Analyzer(
|
||||
arn=AWS_ACCOUNT_ARN,
|
||||
name=AWS_ACCOUNT_NUMBER,
|
||||
status="NOT_AVAILABLE",
|
||||
tags=[],
|
||||
type="",
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
),
|
||||
Analyzer(
|
||||
arn=ACCESS_ANALYZER_ARN,
|
||||
@@ -82,7 +123,7 @@ class Test_accessanalyzer_enabled:
|
||||
status="ACTIVE",
|
||||
tags=[],
|
||||
type="",
|
||||
region=AWS_REGION_2,
|
||||
region=AWS_REGION_EU_WEST_2,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -109,7 +150,7 @@ class Test_accessanalyzer_enabled:
|
||||
assert result[0].resource_id == AWS_ACCOUNT_NUMBER
|
||||
assert result[0].resource_arn == AWS_ACCOUNT_ARN
|
||||
assert result[0].resource_tags == []
|
||||
assert result[0].region == AWS_REGION_1
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
|
||||
assert result[1].status == "PASS"
|
||||
assert (
|
||||
@@ -119,7 +160,7 @@ class Test_accessanalyzer_enabled:
|
||||
assert result[1].resource_id == ACCESS_ANALYZER_NAME
|
||||
assert result[1].resource_arn == ACCESS_ANALYZER_ARN
|
||||
assert result[1].resource_tags == []
|
||||
assert result[1].region == AWS_REGION_2
|
||||
assert result[1].region == AWS_REGION_EU_WEST_2
|
||||
|
||||
def test_one_active_analyzer(self):
|
||||
accessanalyzer_client = mock.MagicMock
|
||||
@@ -130,7 +171,7 @@ class Test_accessanalyzer_enabled:
|
||||
status="ACTIVE",
|
||||
tags=[],
|
||||
type="",
|
||||
region=AWS_REGION_2,
|
||||
region=AWS_REGION_EU_WEST_2,
|
||||
)
|
||||
]
|
||||
|
||||
@@ -156,4 +197,4 @@ class Test_accessanalyzer_enabled:
|
||||
assert result[0].resource_id == ACCESS_ANALYZER_NAME
|
||||
assert result[0].resource_arn == ACCESS_ANALYZER_ARN
|
||||
assert result[0].resource_tags == []
|
||||
assert result[0].region == AWS_REGION_2
|
||||
assert result[0].region == AWS_REGION_EU_WEST_2
|
||||
|
||||
@@ -4,13 +4,15 @@ from prowler.providers.aws.services.accessanalyzer.accessanalyzer_service import
|
||||
Analyzer,
|
||||
Finding,
|
||||
)
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_ARN,
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_EU_WEST_1,
|
||||
AWS_REGION_EU_WEST_2,
|
||||
)
|
||||
|
||||
AWS_REGION_1 = "eu-west-1"
|
||||
AWS_REGION_2 = "eu-west-2"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
AWS_ACCOUNT_ARN = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
|
||||
ACCESS_ANALYZER_NAME = "test-analyzer"
|
||||
ACCESS_ANALYZER_ARN = f"arn:aws:access-analyzer:{AWS_REGION_2}:{AWS_ACCOUNT_NUMBER}:analyzer/{ACCESS_ANALYZER_NAME}"
|
||||
ACCESS_ANALYZER_ARN = f"arn:aws:access-analyzer:{AWS_REGION_EU_WEST_2}:{AWS_ACCOUNT_NUMBER}:analyzer/{ACCESS_ANALYZER_NAME}"
|
||||
|
||||
|
||||
class Test_accessanalyzer_enabled_without_findings:
|
||||
@@ -42,7 +44,7 @@ class Test_accessanalyzer_enabled_without_findings:
|
||||
tags=[],
|
||||
type="",
|
||||
fidings=[],
|
||||
region=AWS_REGION_1,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
]
|
||||
with mock.patch(
|
||||
@@ -68,7 +70,7 @@ class Test_accessanalyzer_enabled_without_findings:
|
||||
tags=[],
|
||||
fidings=[],
|
||||
type="",
|
||||
region=AWS_REGION_1,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
),
|
||||
Analyzer(
|
||||
arn=ACCESS_ANALYZER_ARN,
|
||||
@@ -86,7 +88,7 @@ class Test_accessanalyzer_enabled_without_findings:
|
||||
],
|
||||
tags=[],
|
||||
type="",
|
||||
region=AWS_REGION_2,
|
||||
region=AWS_REGION_EU_WEST_2,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -112,7 +114,7 @@ class Test_accessanalyzer_enabled_without_findings:
|
||||
)
|
||||
assert result[0].resource_id == ACCESS_ANALYZER_NAME
|
||||
assert result[0].resource_arn == ACCESS_ANALYZER_ARN
|
||||
assert result[0].region == AWS_REGION_2
|
||||
assert result[0].region == AWS_REGION_EU_WEST_2
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
def test_one_active_analyzer_without_findings(self):
|
||||
@@ -125,7 +127,7 @@ class Test_accessanalyzer_enabled_without_findings:
|
||||
tags=[],
|
||||
fidings=[],
|
||||
type="",
|
||||
region=AWS_REGION_2,
|
||||
region=AWS_REGION_EU_WEST_2,
|
||||
)
|
||||
]
|
||||
|
||||
@@ -149,7 +151,7 @@ class Test_accessanalyzer_enabled_without_findings:
|
||||
)
|
||||
assert result[0].resource_id == ACCESS_ANALYZER_NAME
|
||||
assert result[0].resource_arn == ACCESS_ANALYZER_ARN
|
||||
assert result[0].region == AWS_REGION_2
|
||||
assert result[0].region == AWS_REGION_EU_WEST_2
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
def test_one_active_analyzer_not_active_without_findings(self):
|
||||
@@ -162,7 +164,7 @@ class Test_accessanalyzer_enabled_without_findings:
|
||||
tags=[],
|
||||
fidings=[],
|
||||
type="",
|
||||
region=AWS_REGION_1,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
),
|
||||
]
|
||||
# Patch AccessAnalyzer Client
|
||||
@@ -195,7 +197,7 @@ class Test_accessanalyzer_enabled_without_findings:
|
||||
],
|
||||
tags=[],
|
||||
type="",
|
||||
region=AWS_REGION_1,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -220,5 +222,5 @@ class Test_accessanalyzer_enabled_without_findings:
|
||||
)
|
||||
assert result[0].resource_id == ACCESS_ANALYZER_NAME
|
||||
assert result[0].resource_arn == ACCESS_ANALYZER_ARN
|
||||
assert result[0].region == AWS_REGION_1
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
@@ -1,19 +1,15 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
import botocore
|
||||
from boto3 import session
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.aws.services.accessanalyzer.accessanalyzer_service import (
|
||||
AccessAnalyzer,
|
||||
)
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
# Mock Test Region
|
||||
AWS_REGION = "eu-west-1"
|
||||
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_REGION_EU_WEST_1,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
# Mocking Access Analyzer Calls
|
||||
make_api_call = botocore.client.BaseClient._make_api_call
|
||||
@@ -59,9 +55,11 @@ def mock_make_api_call(self, operation_name, kwarg):
|
||||
|
||||
|
||||
def mock_generate_regional_clients(service, audit_info, _):
|
||||
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
|
||||
regional_client.region = AWS_REGION
|
||||
return {AWS_REGION: regional_client}
|
||||
regional_client = audit_info.audit_session.client(
|
||||
service, region_name=AWS_REGION_EU_WEST_1
|
||||
)
|
||||
regional_client.region = AWS_REGION_EU_WEST_1
|
||||
return {AWS_REGION_EU_WEST_1: regional_client}
|
||||
|
||||
|
||||
# Patch every AWS call using Boto3 and generate_regional_clients to have 1 client
|
||||
@@ -71,66 +69,46 @@ def mock_generate_regional_clients(service, audit_info, _):
|
||||
new=mock_generate_regional_clients,
|
||||
)
|
||||
class Test_AccessAnalyzer_Service:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["us-east-1", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
return audit_info
|
||||
|
||||
# Test AccessAnalyzer Client
|
||||
def test__get_client__(self):
|
||||
access_analyzer = AccessAnalyzer(self.set_mocked_audit_info())
|
||||
access_analyzer = AccessAnalyzer(
|
||||
set_mocked_aws_audit_info([AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1])
|
||||
)
|
||||
assert (
|
||||
access_analyzer.regional_clients[AWS_REGION].__class__.__name__
|
||||
access_analyzer.regional_clients[AWS_REGION_EU_WEST_1].__class__.__name__
|
||||
== "AccessAnalyzer"
|
||||
)
|
||||
|
||||
# Test AccessAnalyzer Session
|
||||
def test__get_session__(self):
|
||||
access_analyzer = AccessAnalyzer(self.set_mocked_audit_info())
|
||||
access_analyzer = AccessAnalyzer(
|
||||
set_mocked_aws_audit_info([AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1])
|
||||
)
|
||||
assert access_analyzer.session.__class__.__name__ == "Session"
|
||||
|
||||
# Test AccessAnalyzer Service
|
||||
def test__get_service__(self):
|
||||
access_analyzer = AccessAnalyzer(self.set_mocked_audit_info())
|
||||
access_analyzer = AccessAnalyzer(
|
||||
set_mocked_aws_audit_info([AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1])
|
||||
)
|
||||
assert access_analyzer.service == "accessanalyzer"
|
||||
|
||||
def test__list_analyzers__(self):
|
||||
access_analyzer = AccessAnalyzer(self.set_mocked_audit_info())
|
||||
access_analyzer = AccessAnalyzer(
|
||||
set_mocked_aws_audit_info([AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1])
|
||||
)
|
||||
assert len(access_analyzer.analyzers) == 1
|
||||
assert access_analyzer.analyzers[0].arn == "ARN"
|
||||
assert access_analyzer.analyzers[0].name == "Test Analyzer"
|
||||
assert access_analyzer.analyzers[0].status == "ACTIVE"
|
||||
assert access_analyzer.analyzers[0].tags == [{"test": "test"}]
|
||||
assert access_analyzer.analyzers[0].type == "ACCOUNT"
|
||||
assert access_analyzer.analyzers[0].region == AWS_REGION
|
||||
assert access_analyzer.analyzers[0].region == AWS_REGION_EU_WEST_1
|
||||
|
||||
def test__list_findings__(self):
|
||||
access_analyzer = AccessAnalyzer(self.set_mocked_audit_info())
|
||||
access_analyzer = AccessAnalyzer(
|
||||
set_mocked_aws_audit_info([AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1])
|
||||
)
|
||||
assert len(access_analyzer.analyzers) == 1
|
||||
assert len(access_analyzer.analyzers[0].findings) == 1
|
||||
assert access_analyzer.analyzers[0].findings[0].status == "ARCHIVED"
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
from unittest import mock
|
||||
|
||||
from prowler.providers.aws.services.account.account_service import Contact
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
AWS_ACCOUNT_ARN = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
|
||||
AWS_REGION = "us-east-1"
|
||||
|
||||
|
||||
class Test_account_maintain_different_contact_details_to_security_billing_and_operations:
|
||||
def test_contacts_not_configured_or_equal(self):
|
||||
account_client = mock.MagicMock
|
||||
account_client.region = AWS_REGION
|
||||
account_client.region = AWS_REGION_EU_WEST_1
|
||||
account_client.audited_account = AWS_ACCOUNT_NUMBER
|
||||
account_client.audited_account_arn = AWS_ACCOUNT_ARN
|
||||
|
||||
@@ -49,13 +51,13 @@ class Test_account_maintain_different_contact_details_to_security_billing_and_op
|
||||
result[0].status_extended
|
||||
== "SECURITY, BILLING and OPERATIONS contacts not found or they are not different between each other and between ROOT contact."
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
assert result[0].resource_id == AWS_ACCOUNT_NUMBER
|
||||
assert result[0].resource_arn == AWS_ACCOUNT_ARN
|
||||
|
||||
def test_contacts_diffent(self):
|
||||
account_client = mock.MagicMock
|
||||
account_client.region = AWS_REGION
|
||||
account_client.region = AWS_REGION_EU_WEST_1
|
||||
account_client.audited_account = AWS_ACCOUNT_NUMBER
|
||||
account_client.audited_account_arn = AWS_ACCOUNT_ARN
|
||||
|
||||
@@ -98,6 +100,6 @@ class Test_account_maintain_different_contact_details_to_security_billing_and_op
|
||||
result[0].status_extended
|
||||
== "SECURITY, BILLING and OPERATIONS contacts found and they are different between each other and between ROOT contact."
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
assert result[0].resource_id == AWS_ACCOUNT_NUMBER
|
||||
assert result[0].resource_arn == AWS_ACCOUNT_ARN
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
import botocore
|
||||
from boto3 import session
|
||||
from mock import patch
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.aws.services.account.account_service import Account, Contact
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
AWS_ACCOUNT_ARN = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
|
||||
AWS_REGION = "us-east-1"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
# Mocking Access Analyzer Calls
|
||||
make_api_call = botocore.client.BaseClient._make_api_call
|
||||
@@ -56,65 +53,34 @@ def mock_make_api_call(self, operation_name, kwargs):
|
||||
# Patch every AWS call using Boto3
|
||||
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
|
||||
class Test_Account_Service:
|
||||
# Mocked Audit Info
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=AWS_ACCOUNT_ARN,
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=None,
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
return audit_info
|
||||
|
||||
# Test Account Service
|
||||
def test_service(self):
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
account = Account(audit_info)
|
||||
assert account.service == "account"
|
||||
|
||||
# Test Account Client
|
||||
def test_client(self):
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
account = Account(audit_info)
|
||||
assert account.client.__class__.__name__ == "Account"
|
||||
|
||||
# Test Account Session
|
||||
def test__get_session__(self):
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
account = Account(audit_info)
|
||||
assert account.session.__class__.__name__ == "Session"
|
||||
|
||||
# Test Account Session
|
||||
def test_audited_account(self):
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
account = Account(audit_info)
|
||||
assert account.audited_account == AWS_ACCOUNT_NUMBER
|
||||
|
||||
# Test Account Get Account Contacts
|
||||
def test_get_account_contacts(self):
|
||||
# Account client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
account = Account(audit_info)
|
||||
assert account.number_of_contacts == 4
|
||||
assert account.contact_base == Contact(
|
||||
|
||||
@@ -2,9 +2,11 @@ import uuid
|
||||
from unittest import mock
|
||||
|
||||
from prowler.providers.aws.services.acm.acm_service import Certificate
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
AWS_REGION = "us-east-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
DAYS_TO_EXPIRE_THRESHOLD = 7
|
||||
|
||||
|
||||
@@ -29,7 +31,7 @@ class Test_acm_certificates_expiration_check:
|
||||
|
||||
def test_acm_certificate_expirated(self):
|
||||
certificate_id = str(uuid.uuid4())
|
||||
certificate_arn = f"arn:aws:acm:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:certificate/{certificate_id}"
|
||||
certificate_arn = f"arn:aws:acm:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:certificate/{certificate_id}"
|
||||
certificate_name = "test-certificate.com"
|
||||
certificate_type = "AMAZON_ISSUED"
|
||||
|
||||
@@ -42,7 +44,7 @@ class Test_acm_certificates_expiration_check:
|
||||
type=certificate_type,
|
||||
expiration_days=5,
|
||||
transparency_logging=True,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
]
|
||||
|
||||
@@ -66,12 +68,12 @@ class Test_acm_certificates_expiration_check:
|
||||
)
|
||||
assert result[0].resource_id == certificate_id
|
||||
assert result[0].resource_arn == certificate_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
def test_acm_certificate_not_expirated(self):
|
||||
certificate_id = str(uuid.uuid4())
|
||||
certificate_arn = f"arn:aws:acm:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:certificate/{certificate_id}"
|
||||
certificate_arn = f"arn:aws:acm:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:certificate/{certificate_id}"
|
||||
certificate_name = "test-certificate.com"
|
||||
certificate_type = "AMAZON_ISSUED"
|
||||
expiration_days = 365
|
||||
@@ -85,7 +87,7 @@ class Test_acm_certificates_expiration_check:
|
||||
type=certificate_type,
|
||||
expiration_days=expiration_days,
|
||||
transparency_logging=True,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
]
|
||||
|
||||
@@ -109,5 +111,5 @@ class Test_acm_certificates_expiration_check:
|
||||
)
|
||||
assert result[0].resource_id == certificate_id
|
||||
assert result[0].resource_arn == certificate_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
@@ -2,9 +2,10 @@ import uuid
|
||||
from unittest import mock
|
||||
|
||||
from prowler.providers.aws.services.acm.acm_service import Certificate
|
||||
|
||||
AWS_REGION = "us-east-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
|
||||
class Test_acm_certificates_transparency_logs_enabled:
|
||||
@@ -28,7 +29,7 @@ class Test_acm_certificates_transparency_logs_enabled:
|
||||
|
||||
def test_acm_certificate_with_logging(self):
|
||||
certificate_id = str(uuid.uuid4())
|
||||
certificate_arn = f"arn:aws:acm:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:certificate/{certificate_id}"
|
||||
certificate_arn = f"arn:aws:acm:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:certificate/{certificate_id}"
|
||||
certificate_name = "test-certificate.com"
|
||||
certificate_type = "AMAZON_ISSUED"
|
||||
|
||||
@@ -41,7 +42,7 @@ class Test_acm_certificates_transparency_logs_enabled:
|
||||
type=certificate_type,
|
||||
expiration_days=365,
|
||||
transparency_logging=True,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
]
|
||||
|
||||
@@ -65,12 +66,12 @@ class Test_acm_certificates_transparency_logs_enabled:
|
||||
)
|
||||
assert result[0].resource_id == certificate_id
|
||||
assert result[0].resource_arn == certificate_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
def test_acm_certificate_without_logging(self):
|
||||
certificate_id = str(uuid.uuid4())
|
||||
certificate_arn = f"arn:aws:acm:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:certificate/{certificate_id}"
|
||||
certificate_arn = f"arn:aws:acm:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:certificate/{certificate_id}"
|
||||
certificate_name = "test-certificate.com"
|
||||
certificate_type = "AMAZON_ISSUED"
|
||||
|
||||
@@ -83,7 +84,7 @@ class Test_acm_certificates_transparency_logs_enabled:
|
||||
type=certificate_type,
|
||||
expiration_days=365,
|
||||
transparency_logging=False,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
]
|
||||
|
||||
@@ -107,5 +108,5 @@ class Test_acm_certificates_transparency_logs_enabled:
|
||||
)
|
||||
assert result[0].resource_id == certificate_id
|
||||
assert result[0].resource_arn == certificate_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
@@ -2,26 +2,20 @@ import uuid
|
||||
from datetime import datetime
|
||||
|
||||
import botocore
|
||||
from boto3 import session
|
||||
from freezegun import freeze_time
|
||||
from mock import patch
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.aws.services.acm.acm_service import ACM
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
# from moto import mock_acm
|
||||
|
||||
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
AWS_REGION = "us-east-1"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
# Mocking Access Analyzer Calls
|
||||
make_api_call = botocore.client.BaseClient._make_api_call
|
||||
|
||||
certificate_arn = (
|
||||
f"arn:aws:acm:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:certificate/{str(uuid.uuid4())}"
|
||||
)
|
||||
certificate_arn = f"arn:aws:acm:{AWS_REGION_US_EAST_1}:{AWS_ACCOUNT_NUMBER}:certificate/{str(uuid.uuid4())}"
|
||||
certificate_name = "test-certificate.com"
|
||||
certificate_type = "AMAZON_ISSUED"
|
||||
|
||||
@@ -81,9 +75,11 @@ def mock_make_api_call(self, operation_name, kwargs):
|
||||
|
||||
# Mock generate_regional_clients()
|
||||
def mock_generate_regional_clients(service, audit_info, _):
|
||||
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
|
||||
regional_client.region = AWS_REGION
|
||||
return {AWS_REGION: regional_client}
|
||||
regional_client = audit_info.audit_session.client(
|
||||
service, region_name=AWS_REGION_US_EAST_1
|
||||
)
|
||||
regional_client.region = AWS_REGION_US_EAST_1
|
||||
return {AWS_REGION_US_EAST_1: regional_client}
|
||||
|
||||
|
||||
# Patch every AWS call using Boto3 and generate_regional_clients to have 1 client
|
||||
@@ -96,42 +92,11 @@ def mock_generate_regional_clients(service, audit_info, _):
|
||||
@freeze_time("2023-01-01")
|
||||
# FIXME: Pending Moto PR to update ACM responses
|
||||
class Test_ACM_Service:
|
||||
# Mocked Audit Info
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=None,
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
return audit_info
|
||||
|
||||
# Test ACM Service
|
||||
# @mock_acm
|
||||
def test_service(self):
|
||||
# ACM client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
acm = ACM(audit_info)
|
||||
assert acm.service == "acm"
|
||||
|
||||
@@ -139,7 +104,7 @@ class Test_ACM_Service:
|
||||
# @mock_acm
|
||||
def test_client(self):
|
||||
# ACM client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
acm = ACM(audit_info)
|
||||
for regional_client in acm.regional_clients.values():
|
||||
assert regional_client.__class__.__name__ == "ACM"
|
||||
@@ -148,7 +113,7 @@ class Test_ACM_Service:
|
||||
# @mock_acm
|
||||
def test__get_session__(self):
|
||||
# ACM client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
acm = ACM(audit_info)
|
||||
assert acm.session.__class__.__name__ == "Session"
|
||||
|
||||
@@ -156,7 +121,7 @@ class Test_ACM_Service:
|
||||
# @mock_acm
|
||||
def test_audited_account(self):
|
||||
# ACM client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
acm = ACM(audit_info)
|
||||
assert acm.audited_account == AWS_ACCOUNT_NUMBER
|
||||
|
||||
@@ -164,14 +129,14 @@ class Test_ACM_Service:
|
||||
# @mock_acm
|
||||
def test__list_and_describe_certificates__(self):
|
||||
# Generate ACM Client
|
||||
# acm_client = client("acm", region_name=AWS_REGION)
|
||||
# acm_client = client("acm", region_name=AWS_REGION_EU_WEST_1)
|
||||
# Request ACM certificate
|
||||
# certificate = acm_client.request_certificate(
|
||||
# DomainName="test.com",
|
||||
# )
|
||||
|
||||
# ACM client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
acm = ACM(audit_info)
|
||||
assert len(acm.certificates) == 1
|
||||
assert acm.certificates[0].arn == certificate_arn
|
||||
@@ -179,20 +144,20 @@ class Test_ACM_Service:
|
||||
assert acm.certificates[0].type == certificate_type
|
||||
assert acm.certificates[0].expiration_days == 365
|
||||
assert acm.certificates[0].transparency_logging is False
|
||||
assert acm.certificates[0].region == AWS_REGION
|
||||
assert acm.certificates[0].region == AWS_REGION_US_EAST_1
|
||||
|
||||
# Test ACM List Tags
|
||||
# @mock_acm
|
||||
def test__list_tags_for_certificate__(self):
|
||||
# Generate ACM Client
|
||||
# acm_client = client("acm", region_name=AWS_REGION)
|
||||
# acm_client = client("acm", region_name=AWS_REGION_EU_WEST_1)
|
||||
# Request ACM certificate
|
||||
# certificate = acm_client.request_certificate(
|
||||
# DomainName="test.com",
|
||||
# )
|
||||
|
||||
# ACM client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info()
|
||||
acm = ACM(audit_info)
|
||||
assert len(acm.certificates) == 1
|
||||
assert acm.certificates[0].tags == [
|
||||
|
||||
@@ -1,55 +1,26 @@
|
||||
from unittest import mock
|
||||
|
||||
from boto3 import client, session
|
||||
from boto3 import client
|
||||
from moto import mock_apigateway, mock_iam, mock_lambda
|
||||
from moto.core import DEFAULT_ACCOUNT_ID as ACCOUNT_ID
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_REGION = "us-east-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_REGION_EU_WEST_1,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
|
||||
class Test_apigateway_restapi_authorizers_enabled:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["us-east-1", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
|
||||
@mock_apigateway
|
||||
def test_apigateway_no_rest_apis(self):
|
||||
from prowler.providers.aws.services.apigateway.apigateway_service import (
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -73,8 +44,8 @@ class Test_apigateway_restapi_authorizers_enabled:
|
||||
@mock_lambda
|
||||
def test_apigateway_one_rest_api_with_lambda_authorizer(self):
|
||||
# Create APIGateway Mocked Resources
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
lambda_client = client("lambda", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
lambda_client = client("lambda", region_name=AWS_REGION_US_EAST_1)
|
||||
iam_client = client("iam")
|
||||
# Create APIGateway Rest API
|
||||
role_arn = iam_client.create_role(
|
||||
@@ -103,7 +74,9 @@ class Test_apigateway_restapi_authorizers_enabled:
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -129,15 +102,15 @@ class Test_apigateway_restapi_authorizers_enabled:
|
||||
assert result[0].resource_id == "test-rest-api"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION}::/restapis/{rest_api['id']}"
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION_US_EAST_1}::/restapis/{rest_api['id']}"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == [{}]
|
||||
|
||||
@mock_apigateway
|
||||
def test_apigateway_one_rest_api_without_lambda_authorizer(self):
|
||||
# Create APIGateway Mocked Resources
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create APIGateway Rest API
|
||||
rest_api = apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
@@ -146,7 +119,9 @@ class Test_apigateway_restapi_authorizers_enabled:
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -172,7 +147,7 @@ class Test_apigateway_restapi_authorizers_enabled:
|
||||
assert result[0].resource_id == "test-rest-api"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION}::/restapis/{rest_api['id']}"
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION_US_EAST_1}::/restapis/{rest_api['id']}"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == [{}]
|
||||
|
||||
@@ -1,52 +1,21 @@
|
||||
from unittest import mock
|
||||
|
||||
from boto3 import client, session
|
||||
from boto3 import client
|
||||
from moto import mock_apigateway
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.aws.services.apigateway.apigateway_service import Stage
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_REGION = "us-east-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_REGION_EU_WEST_1,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
|
||||
class Test_apigateway_restapi_client_certificate_enabled:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["us-east-1", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
|
||||
@mock_apigateway
|
||||
def test_apigateway_no_stages(self):
|
||||
# Create APIGateway Mocked Resources
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create APIGateway Rest API
|
||||
apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
@@ -55,7 +24,9 @@ class Test_apigateway_restapi_client_certificate_enabled:
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -77,7 +48,7 @@ class Test_apigateway_restapi_client_certificate_enabled:
|
||||
@mock_apigateway
|
||||
def test_apigateway_one_stage_without_certificate(self):
|
||||
# Create APIGateway Mocked Resources
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create APIGateway Deployment Stage
|
||||
rest_api = apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
@@ -113,7 +84,9 @@ class Test_apigateway_restapi_client_certificate_enabled:
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -139,15 +112,15 @@ class Test_apigateway_restapi_client_certificate_enabled:
|
||||
assert result[0].resource_id == "test-rest-api"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION}::/restapis/{rest_api['id']}/stages/test"
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION_US_EAST_1}::/restapis/{rest_api['id']}/stages/test"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == [None]
|
||||
|
||||
@mock_apigateway
|
||||
def test_apigateway_one_stage_with_certificate(self):
|
||||
# Create APIGateway Mocked Resources
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create APIGateway Deployment Stage
|
||||
rest_api = apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
@@ -156,7 +129,9 @@ class Test_apigateway_restapi_client_certificate_enabled:
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -173,7 +148,7 @@ class Test_apigateway_restapi_client_certificate_enabled:
|
||||
service_client.rest_apis[0].stages.append(
|
||||
Stage(
|
||||
name="test",
|
||||
arn=f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION}::/restapis/test-rest-api/stages/test",
|
||||
arn=f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION_US_EAST_1}::/restapis/test-rest-api/stages/test",
|
||||
logging=True,
|
||||
client_certificate=True,
|
||||
waf=True,
|
||||
@@ -192,7 +167,7 @@ class Test_apigateway_restapi_client_certificate_enabled:
|
||||
assert result[0].resource_id == "test-rest-api"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION}::/restapis/test-rest-api/stages/test"
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION_US_EAST_1}::/restapis/test-rest-api/stages/test"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
@@ -1,54 +1,25 @@
|
||||
from unittest import mock
|
||||
|
||||
from boto3 import client, session
|
||||
from boto3 import client
|
||||
from moto import mock_apigateway
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_REGION = "us-east-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_REGION_EU_WEST_1,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
|
||||
class Test_apigateway_restapi_public:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["us-east-1", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
|
||||
@mock_apigateway
|
||||
def test_apigateway_no_rest_apis(self):
|
||||
from prowler.providers.aws.services.apigateway.apigateway_service import (
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -70,7 +41,7 @@ class Test_apigateway_restapi_public:
|
||||
@mock_apigateway
|
||||
def test_apigateway_one_private_rest_api(self):
|
||||
# Create APIGateway Mocked Resources
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create APIGateway Deployment Stage
|
||||
rest_api = apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
@@ -84,7 +55,9 @@ class Test_apigateway_restapi_public:
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -110,15 +83,15 @@ class Test_apigateway_restapi_public:
|
||||
assert result[0].resource_id == "test-rest-api"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION}::/restapis/{rest_api['id']}"
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION_US_EAST_1}::/restapis/{rest_api['id']}"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == [{}]
|
||||
|
||||
@mock_apigateway
|
||||
def test_apigateway_one_public_rest_api(self):
|
||||
# Create APIGateway Mocked Resources
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create APIGateway Deployment Stage
|
||||
rest_api = apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
@@ -132,7 +105,9 @@ class Test_apigateway_restapi_public:
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -158,7 +133,7 @@ class Test_apigateway_restapi_public:
|
||||
assert result[0].resource_id == "test-rest-api"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION}::/restapis/{rest_api['id']}"
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION_US_EAST_1}::/restapis/{rest_api['id']}"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == [{}]
|
||||
|
||||
@@ -1,56 +1,27 @@
|
||||
from unittest import mock
|
||||
|
||||
from boto3 import client, session
|
||||
from boto3 import client
|
||||
from moto import mock_apigateway
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_REGION = "us-east-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_REGION_EU_WEST_1,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
API_GW_NAME = "test-rest-api"
|
||||
|
||||
|
||||
class Test_apigateway_restapi_public_with_authorizer:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["us-east-1", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
|
||||
@mock_apigateway
|
||||
def test_apigateway_no_rest_apis(self):
|
||||
from prowler.providers.aws.services.apigateway.apigateway_service import (
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -72,7 +43,7 @@ class Test_apigateway_restapi_public_with_authorizer:
|
||||
@mock_apigateway
|
||||
def test_apigateway_one_public_rest_api_without_authorizer(self):
|
||||
# Create APIGateway Mocked Resources
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create APIGateway Deployment Stage
|
||||
rest_api = apigateway_client.create_rest_api(
|
||||
name=API_GW_NAME,
|
||||
@@ -86,7 +57,9 @@ class Test_apigateway_restapi_public_with_authorizer:
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -112,15 +85,15 @@ class Test_apigateway_restapi_public_with_authorizer:
|
||||
assert result[0].resource_id == API_GW_NAME
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION}::/restapis/{rest_api['id']}"
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION_US_EAST_1}::/restapis/{rest_api['id']}"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == [{}]
|
||||
|
||||
@mock_apigateway
|
||||
def test_apigateway_one_public_rest_api_with_authorizer(self):
|
||||
# Create APIGateway Mocked Resources
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create APIGateway Deployment Stage
|
||||
rest_api = apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
@@ -137,7 +110,9 @@ class Test_apigateway_restapi_public_with_authorizer:
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -163,7 +138,7 @@ class Test_apigateway_restapi_public_with_authorizer:
|
||||
assert result[0].resource_id == API_GW_NAME
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION}::/restapis/{rest_api['id']}"
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION_US_EAST_1}::/restapis/{rest_api['id']}"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == [{}]
|
||||
|
||||
@@ -1,54 +1,25 @@
|
||||
from unittest import mock
|
||||
|
||||
from boto3 import client, session
|
||||
from boto3 import client
|
||||
from moto import mock_apigateway
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_REGION = "us-east-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_REGION_EU_WEST_1,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
|
||||
class Test_apigateway_restapi_logging_enabled:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["us-east-1", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
|
||||
@mock_apigateway
|
||||
def test_apigateway_no_rest_apis(self):
|
||||
from prowler.providers.aws.services.apigateway.apigateway_service import (
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -70,7 +41,7 @@ class Test_apigateway_restapi_logging_enabled:
|
||||
@mock_apigateway
|
||||
def test_apigateway_one_rest_api_with_logging(self):
|
||||
# Create APIGateway Mocked Resources
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
rest_api = apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
)
|
||||
@@ -116,7 +87,9 @@ class Test_apigateway_restapi_logging_enabled:
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -142,15 +115,15 @@ class Test_apigateway_restapi_logging_enabled:
|
||||
assert result[0].resource_id == "test-rest-api"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION}::/restapis/{rest_api['id']}/stages/test"
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION_US_EAST_1}::/restapis/{rest_api['id']}/stages/test"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == [None]
|
||||
|
||||
@mock_apigateway
|
||||
def test_apigateway_one_rest_api_without_logging(self):
|
||||
# Create APIGateway Mocked Resources
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create APIGateway Rest API
|
||||
rest_api = apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
@@ -187,7 +160,9 @@ class Test_apigateway_restapi_logging_enabled:
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -213,7 +188,7 @@ class Test_apigateway_restapi_logging_enabled:
|
||||
assert result[0].resource_id == "test-rest-api"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION}::/restapis/{rest_api['id']}/stages/test"
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION_US_EAST_1}::/restapis/{rest_api['id']}/stages/test"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == [None]
|
||||
|
||||
@@ -1,51 +1,20 @@
|
||||
from boto3 import client, session
|
||||
from boto3 import client
|
||||
from moto import mock_apigateway
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.aws.services.apigateway.apigateway_service import APIGateway
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
AWS_REGION = "us-east-1"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
|
||||
class Test_APIGateway_Service:
|
||||
# Mocked Audit Info
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=None,
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
return audit_info
|
||||
|
||||
# Test APIGateway Service
|
||||
@mock_apigateway
|
||||
def test_service(self):
|
||||
# APIGateway client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigateway = APIGateway(audit_info)
|
||||
assert apigateway.service == "apigateway"
|
||||
|
||||
@@ -53,7 +22,7 @@ class Test_APIGateway_Service:
|
||||
@mock_apigateway
|
||||
def test_client(self):
|
||||
# APIGateway client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigateway = APIGateway(audit_info)
|
||||
for regional_client in apigateway.regional_clients.values():
|
||||
assert regional_client.__class__.__name__ == "APIGateway"
|
||||
@@ -62,7 +31,7 @@ class Test_APIGateway_Service:
|
||||
@mock_apigateway
|
||||
def test__get_session__(self):
|
||||
# APIGateway client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigateway = APIGateway(audit_info)
|
||||
assert apigateway.session.__class__.__name__ == "Session"
|
||||
|
||||
@@ -70,7 +39,7 @@ class Test_APIGateway_Service:
|
||||
@mock_apigateway
|
||||
def test_audited_account(self):
|
||||
# APIGateway client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigateway = APIGateway(audit_info)
|
||||
assert apigateway.audited_account == AWS_ACCOUNT_NUMBER
|
||||
|
||||
@@ -78,13 +47,13 @@ class Test_APIGateway_Service:
|
||||
@mock_apigateway
|
||||
def test__get_rest_apis__(self):
|
||||
# Generate APIGateway Client
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create APIGateway Rest API
|
||||
apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
)
|
||||
# APIGateway client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigateway = APIGateway(audit_info)
|
||||
assert len(apigateway.rest_apis) == len(
|
||||
apigateway_client.get_rest_apis()["items"]
|
||||
@@ -94,7 +63,7 @@ class Test_APIGateway_Service:
|
||||
@mock_apigateway
|
||||
def test__get_authorizers__(self):
|
||||
# Generate APIGateway Client
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create APIGateway Rest API
|
||||
rest_api = apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
@@ -106,7 +75,7 @@ class Test_APIGateway_Service:
|
||||
type="TOKEN",
|
||||
)
|
||||
# APIGateway client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigateway = APIGateway(audit_info)
|
||||
assert apigateway.rest_apis[0].authorizer is True
|
||||
|
||||
@@ -114,7 +83,7 @@ class Test_APIGateway_Service:
|
||||
@mock_apigateway
|
||||
def test__get_rest_api__(self):
|
||||
# Generate APIGateway Client
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create private APIGateway Rest API
|
||||
apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
@@ -122,7 +91,7 @@ class Test_APIGateway_Service:
|
||||
tags={"test": "test"},
|
||||
)
|
||||
# APIGateway client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigateway = APIGateway(audit_info)
|
||||
assert apigateway.rest_apis[0].public_endpoint is False
|
||||
assert apigateway.rest_apis[0].tags == [{"test": "test"}]
|
||||
@@ -131,7 +100,7 @@ class Test_APIGateway_Service:
|
||||
@mock_apigateway
|
||||
def test__get_stages__(self):
|
||||
# Generate APIGateway Client
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create APIGateway Rest API and a deployment stage
|
||||
rest_api = apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
@@ -174,6 +143,6 @@ class Test_APIGateway_Service:
|
||||
},
|
||||
],
|
||||
)
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigateway = APIGateway(audit_info)
|
||||
assert apigateway.rest_apis[0].stages[0].logging is True
|
||||
|
||||
@@ -1,54 +1,25 @@
|
||||
from unittest import mock
|
||||
|
||||
from boto3 import client, session
|
||||
from boto3 import client
|
||||
from moto import mock_apigateway, mock_wafv2
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_REGION = "us-east-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_REGION_EU_WEST_1,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
|
||||
class Test_apigateway_restapi_waf_acl_attached:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["us-east-1", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
|
||||
@mock_apigateway
|
||||
def test_apigateway_no_rest_apis(self):
|
||||
from prowler.providers.aws.services.apigateway.apigateway_service import (
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -71,8 +42,8 @@ class Test_apigateway_restapi_waf_acl_attached:
|
||||
@mock_wafv2
|
||||
def test_apigateway_one_rest_api_with_waf(self):
|
||||
# Create APIGateway Mocked Resources
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
waf_client = client("wafv2", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
waf_client = client("wafv2", region_name=AWS_REGION_US_EAST_1)
|
||||
rest_api = apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
)
|
||||
@@ -122,7 +93,9 @@ class Test_apigateway_restapi_waf_acl_attached:
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -148,15 +121,15 @@ class Test_apigateway_restapi_waf_acl_attached:
|
||||
assert result[0].resource_id == "test-rest-api"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION}::/restapis/{rest_api['id']}/stages/test"
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION_US_EAST_1}::/restapis/{rest_api['id']}/stages/test"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == [None]
|
||||
|
||||
@mock_apigateway
|
||||
def test_apigateway_one_rest_api_without_waf(self):
|
||||
# Create APIGateway Mocked Resources
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION)
|
||||
apigateway_client = client("apigateway", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create APIGateway Rest API
|
||||
rest_api = apigateway_client.create_rest_api(
|
||||
name="test-rest-api",
|
||||
@@ -193,7 +166,9 @@ class Test_apigateway_restapi_waf_acl_attached:
|
||||
APIGateway,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -219,7 +194,7 @@ class Test_apigateway_restapi_waf_acl_attached:
|
||||
assert result[0].resource_id == "test-rest-api"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION}::/restapis/{rest_api['id']}/stages/test"
|
||||
== f"arn:{current_audit_info.audited_partition}:apigateway:{AWS_REGION_US_EAST_1}::/restapis/{rest_api['id']}/stages/test"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == [None]
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
from unittest import mock
|
||||
|
||||
import botocore
|
||||
from boto3 import client, session
|
||||
from boto3 import client
|
||||
from mock import patch
|
||||
from moto import mock_apigatewayv2
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_REGION = "us-east-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
# Mocking ApiGatewayV2 Calls
|
||||
make_api_call = botocore.client.BaseClient._make_api_call
|
||||
@@ -40,44 +39,15 @@ def mock_make_api_call(self, operation_name, kwarg):
|
||||
|
||||
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
|
||||
class Test_apigatewayv2_api_access_logging_enabled:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["us-east-1", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
|
||||
@mock_apigatewayv2
|
||||
def test_apigateway_no_apis(self):
|
||||
from prowler.providers.aws.services.apigatewayv2.apigatewayv2_service import (
|
||||
ApiGatewayV2,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -99,7 +69,7 @@ class Test_apigatewayv2_api_access_logging_enabled:
|
||||
@mock_apigatewayv2
|
||||
def test_apigateway_one_api_with_logging_in_stage(self):
|
||||
# Create ApiGatewayV2 Mocked Resources
|
||||
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION)
|
||||
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create ApiGatewayV2 API
|
||||
api = apigatewayv2_client.create_api(Name="test-api", ProtocolType="HTTP")
|
||||
# Get stages mock with stage with logging
|
||||
@@ -107,7 +77,9 @@ class Test_apigatewayv2_api_access_logging_enabled:
|
||||
ApiGatewayV2,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -124,8 +96,8 @@ class Test_apigatewayv2_api_access_logging_enabled:
|
||||
check = apigatewayv2_api_access_logging_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert result[0].status == "PASS"
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"API Gateway V2 test-api ID {api['ApiId']} in stage test-stage has access logging enabled."
|
||||
@@ -134,7 +106,7 @@ class Test_apigatewayv2_api_access_logging_enabled:
|
||||
assert result[0].resource_id == "test-api"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:aws:apigateway:{AWS_REGION}::apis/{api['ApiId']}"
|
||||
== f"arn:aws:apigateway:{AWS_REGION_US_EAST_1}::apis/{api['ApiId']}"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == [{}]
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
from unittest import mock
|
||||
|
||||
import botocore
|
||||
from boto3 import client, session
|
||||
from boto3 import client
|
||||
from mock import patch
|
||||
from moto import mock_apigatewayv2
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_REGION = "us-east-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
# Mocking ApiGatewayV2 Calls
|
||||
make_api_call = botocore.client.BaseClient._make_api_call
|
||||
@@ -40,44 +39,15 @@ def mock_make_api_call(self, operation_name, kwarg):
|
||||
|
||||
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
|
||||
class Test_apigatewayv2_api_authorizers_enabled:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["us-east-1", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
|
||||
@mock_apigatewayv2
|
||||
def test_apigateway_no_apis(self):
|
||||
from prowler.providers.aws.services.apigatewayv2.apigatewayv2_service import (
|
||||
ApiGatewayV2,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -99,7 +69,7 @@ class Test_apigatewayv2_api_authorizers_enabled:
|
||||
@mock_apigatewayv2
|
||||
def test_apigateway_one_api_with_authorizer(self):
|
||||
# Create ApiGatewayV2 Mocked Resources
|
||||
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION)
|
||||
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create ApiGatewayV2 API
|
||||
api = apigatewayv2_client.create_api(Name="test-api", ProtocolType="HTTP")
|
||||
apigatewayv2_client.create_authorizer(
|
||||
@@ -113,7 +83,9 @@ class Test_apigatewayv2_api_authorizers_enabled:
|
||||
ApiGatewayV2,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = current_audit_info = set_mocked_aws_audit_info(
|
||||
[AWS_REGION_US_EAST_1]
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -130,8 +102,8 @@ class Test_apigatewayv2_api_authorizers_enabled:
|
||||
check = apigatewayv2_api_authorizers_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert result[0].status == "PASS"
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"API Gateway V2 test-api ID {api['ApiId']} has an authorizer configured."
|
||||
@@ -139,7 +111,7 @@ class Test_apigatewayv2_api_authorizers_enabled:
|
||||
assert result[0].resource_id == "test-api"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:aws:apigateway:{AWS_REGION}::apis/{api['ApiId']}"
|
||||
== f"arn:aws:apigateway:{AWS_REGION_US_EAST_1}::apis/{api['ApiId']}"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == [{}]
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
import botocore
|
||||
from boto3 import client, session
|
||||
from boto3 import client
|
||||
from mock import patch
|
||||
from moto import mock_apigatewayv2
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.aws.services.apigatewayv2.apigatewayv2_service import (
|
||||
ApiGatewayV2,
|
||||
)
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
AWS_REGION = "us-east-1"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
# Mocking ApiGatewayV2 Calls
|
||||
make_api_call = botocore.client.BaseClient._make_api_call
|
||||
@@ -41,42 +41,11 @@ def mock_make_api_call(self, operation_name, kwarg):
|
||||
|
||||
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
|
||||
class Test_ApiGatewayV2_Service:
|
||||
# Mocked Audit Info
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=None,
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
return audit_info
|
||||
|
||||
# Test ApiGatewayV2 Service
|
||||
@mock_apigatewayv2
|
||||
def test_service(self):
|
||||
# ApiGatewayV2 client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigatewayv2 = ApiGatewayV2(audit_info)
|
||||
assert apigatewayv2.service == "apigatewayv2"
|
||||
|
||||
@@ -84,7 +53,7 @@ class Test_ApiGatewayV2_Service:
|
||||
@mock_apigatewayv2
|
||||
def test_client(self):
|
||||
# ApiGatewayV2 client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigatewayv2 = ApiGatewayV2(audit_info)
|
||||
for regional_client in apigatewayv2.regional_clients.values():
|
||||
assert regional_client.__class__.__name__ == "ApiGatewayV2"
|
||||
@@ -93,7 +62,7 @@ class Test_ApiGatewayV2_Service:
|
||||
@mock_apigatewayv2
|
||||
def test__get_session__(self):
|
||||
# ApiGatewayV2 client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigatewayv2 = ApiGatewayV2(audit_info)
|
||||
assert apigatewayv2.session.__class__.__name__ == "Session"
|
||||
|
||||
@@ -101,7 +70,7 @@ class Test_ApiGatewayV2_Service:
|
||||
@mock_apigatewayv2
|
||||
def test_audited_account(self):
|
||||
# ApiGatewayV2 client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigatewayv2 = ApiGatewayV2(audit_info)
|
||||
assert apigatewayv2.audited_account == AWS_ACCOUNT_NUMBER
|
||||
|
||||
@@ -109,13 +78,13 @@ class Test_ApiGatewayV2_Service:
|
||||
@mock_apigatewayv2
|
||||
def test__get_apis__(self):
|
||||
# Generate ApiGatewayV2 Client
|
||||
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION)
|
||||
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create ApiGatewayV2 API
|
||||
apigatewayv2_client.create_api(
|
||||
Name="test-api", ProtocolType="HTTP", Tags={"test": "test"}
|
||||
)
|
||||
# ApiGatewayV2 client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigatewayv2 = ApiGatewayV2(audit_info)
|
||||
assert len(apigatewayv2.apis) == len(apigatewayv2_client.get_apis()["Items"])
|
||||
assert apigatewayv2.apis[0].tags == [{"test": "test"}]
|
||||
@@ -124,7 +93,7 @@ class Test_ApiGatewayV2_Service:
|
||||
@mock_apigatewayv2
|
||||
def test__get_authorizers__(self):
|
||||
# Generate ApiGatewayV2 Client
|
||||
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION)
|
||||
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create ApiGatewayV2 Rest API
|
||||
api = apigatewayv2_client.create_api(Name="test-api", ProtocolType="HTTP")
|
||||
# Create authorizer
|
||||
@@ -136,7 +105,7 @@ class Test_ApiGatewayV2_Service:
|
||||
AuthorizerPayloadFormatVersion="2.0",
|
||||
)
|
||||
# ApiGatewayV2 client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigatewayv2 = ApiGatewayV2(audit_info)
|
||||
assert apigatewayv2.apis[0].authorizer is True
|
||||
|
||||
@@ -144,10 +113,10 @@ class Test_ApiGatewayV2_Service:
|
||||
@mock_apigatewayv2
|
||||
def test__get_stages__(self):
|
||||
# Generate ApiGatewayV2 Client
|
||||
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION)
|
||||
apigatewayv2_client = client("apigatewayv2", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create ApiGatewayV2 Rest API and a deployment stage
|
||||
apigatewayv2_client.create_api(Name="test-api", ProtocolType="HTTP")
|
||||
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
apigatewayv2 = ApiGatewayV2(audit_info)
|
||||
assert apigatewayv2.apis[0].stages[0].logging is True
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
from unittest import mock
|
||||
|
||||
from prowler.providers.aws.services.appstream.appstream_service import Fleet
|
||||
|
||||
# Mock Test Region
|
||||
AWS_REGION = "eu-west-1"
|
||||
from tests.providers.aws.audit_info_utils import AWS_REGION_EU_WEST_1
|
||||
|
||||
|
||||
class Test_appstream_fleet_default_internet_access_disabled:
|
||||
@@ -34,7 +32,7 @@ class Test_appstream_fleet_default_internet_access_disabled:
|
||||
disconnect_timeout_in_seconds=900,
|
||||
idle_disconnect_timeout_in_seconds=900,
|
||||
enable_default_internet_access=True,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
appstream_client.fleets.append(fleet1)
|
||||
@@ -72,7 +70,7 @@ class Test_appstream_fleet_default_internet_access_disabled:
|
||||
disconnect_timeout_in_seconds=900,
|
||||
idle_disconnect_timeout_in_seconds=900,
|
||||
enable_default_internet_access=False,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
appstream_client.fleets.append(fleet1)
|
||||
@@ -110,7 +108,7 @@ class Test_appstream_fleet_default_internet_access_disabled:
|
||||
disconnect_timeout_in_seconds=900,
|
||||
idle_disconnect_timeout_in_seconds=900,
|
||||
enable_default_internet_access=True,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
fleet2 = Fleet(
|
||||
arn="arn",
|
||||
@@ -119,7 +117,7 @@ class Test_appstream_fleet_default_internet_access_disabled:
|
||||
disconnect_timeout_in_seconds=900,
|
||||
idle_disconnect_timeout_in_seconds=900,
|
||||
enable_default_internet_access=False,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
appstream_client.fleets.append(fleet1)
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
from unittest import mock
|
||||
|
||||
from prowler.providers.aws.services.appstream.appstream_service import Fleet
|
||||
|
||||
# Mock Test Region
|
||||
AWS_REGION = "eu-west-1"
|
||||
from tests.providers.aws.audit_info_utils import AWS_REGION_EU_WEST_1
|
||||
|
||||
|
||||
class Test_appstream_fleet_maximum_session_duration:
|
||||
@@ -35,7 +33,7 @@ class Test_appstream_fleet_maximum_session_duration:
|
||||
disconnect_timeout_in_seconds=900,
|
||||
idle_disconnect_timeout_in_seconds=900,
|
||||
enable_default_internet_access=True,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
appstream_client.fleets.append(fleet1)
|
||||
@@ -76,7 +74,7 @@ class Test_appstream_fleet_maximum_session_duration:
|
||||
disconnect_timeout_in_seconds=900,
|
||||
idle_disconnect_timeout_in_seconds=900,
|
||||
enable_default_internet_access=True,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
appstream_client.fleets.append(fleet1)
|
||||
@@ -119,7 +117,7 @@ class Test_appstream_fleet_maximum_session_duration:
|
||||
disconnect_timeout_in_seconds=900,
|
||||
idle_disconnect_timeout_in_seconds=900,
|
||||
enable_default_internet_access=True,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
fleet2 = Fleet(
|
||||
arn="arn",
|
||||
@@ -129,7 +127,7 @@ class Test_appstream_fleet_maximum_session_duration:
|
||||
disconnect_timeout_in_seconds=900,
|
||||
idle_disconnect_timeout_in_seconds=900,
|
||||
enable_default_internet_access=False,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
appstream_client.fleets.append(fleet1)
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
from unittest import mock
|
||||
|
||||
from prowler.providers.aws.services.appstream.appstream_service import Fleet
|
||||
|
||||
# Mock Test Region
|
||||
AWS_REGION = "eu-west-1"
|
||||
from tests.providers.aws.audit_info_utils import AWS_REGION_EU_WEST_1
|
||||
|
||||
|
||||
class Test_appstream_fleet_session_disconnect_timeout:
|
||||
@@ -35,7 +33,7 @@ class Test_appstream_fleet_session_disconnect_timeout:
|
||||
disconnect_timeout_in_seconds=1 * 60 * 60,
|
||||
idle_disconnect_timeout_in_seconds=900,
|
||||
enable_default_internet_access=True,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
appstream_client.fleets.append(fleet1)
|
||||
@@ -75,7 +73,7 @@ class Test_appstream_fleet_session_disconnect_timeout:
|
||||
disconnect_timeout_in_seconds=4 * 60,
|
||||
idle_disconnect_timeout_in_seconds=900,
|
||||
enable_default_internet_access=True,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
appstream_client.fleets.append(fleet1)
|
||||
@@ -118,7 +116,7 @@ class Test_appstream_fleet_session_disconnect_timeout:
|
||||
disconnect_timeout_in_seconds=1 * 60 * 60,
|
||||
idle_disconnect_timeout_in_seconds=900,
|
||||
enable_default_internet_access=True,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
fleet2 = Fleet(
|
||||
arn="arn",
|
||||
@@ -128,7 +126,7 @@ class Test_appstream_fleet_session_disconnect_timeout:
|
||||
disconnect_timeout_in_seconds=3 * 60,
|
||||
idle_disconnect_timeout_in_seconds=900,
|
||||
enable_default_internet_access=False,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
appstream_client.fleets.append(fleet1)
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
from unittest import mock
|
||||
|
||||
from prowler.providers.aws.services.appstream.appstream_service import Fleet
|
||||
|
||||
# Mock Test Region
|
||||
AWS_REGION = "eu-west-1"
|
||||
from tests.providers.aws.audit_info_utils import AWS_REGION_EU_WEST_1
|
||||
|
||||
|
||||
class Test_appstream_fleet_session_idle_disconnect_timeout:
|
||||
@@ -35,7 +33,7 @@ class Test_appstream_fleet_session_idle_disconnect_timeout:
|
||||
# 15 minutes
|
||||
idle_disconnect_timeout_in_seconds=15 * 60,
|
||||
enable_default_internet_access=True,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
appstream_client.fleets.append(fleet1)
|
||||
@@ -76,7 +74,7 @@ class Test_appstream_fleet_session_idle_disconnect_timeout:
|
||||
# 8 minutes
|
||||
idle_disconnect_timeout_in_seconds=8 * 60,
|
||||
enable_default_internet_access=True,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
appstream_client.fleets.append(fleet1)
|
||||
@@ -119,7 +117,7 @@ class Test_appstream_fleet_session_idle_disconnect_timeout:
|
||||
# 5 minutes
|
||||
idle_disconnect_timeout_in_seconds=5 * 60,
|
||||
enable_default_internet_access=True,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
fleet2 = Fleet(
|
||||
arn="arn",
|
||||
@@ -129,7 +127,7 @@ class Test_appstream_fleet_session_idle_disconnect_timeout:
|
||||
# 45 minutes
|
||||
idle_disconnect_timeout_in_seconds=45 * 60,
|
||||
enable_default_internet_access=False,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
appstream_client.fleets.append(fleet1)
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
import botocore
|
||||
from boto3 import session
|
||||
from moto.core import DEFAULT_ACCOUNT_ID
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.aws.services.appstream.appstream_service import AppStream
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
# Mock Test Region
|
||||
AWS_REGION = "eu-west-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_REGION_EU_WEST_1,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
# Mocking Access Analyzer Calls
|
||||
make_api_call = botocore.client.BaseClient._make_api_call
|
||||
@@ -28,7 +26,7 @@ def mock_make_api_call(self, operation_name, kwarg):
|
||||
return {
|
||||
"Fleets": [
|
||||
{
|
||||
"Arn": f"arn:aws:appstream:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:fleet/test-prowler3-0",
|
||||
"Arn": f"arn:aws:appstream:{AWS_REGION_EU_WEST_1}:{DEFAULT_ACCOUNT_ID}:fleet/test-prowler3-0",
|
||||
"Name": "test-prowler3-0",
|
||||
"MaxUserDurationInSeconds": 100,
|
||||
"DisconnectTimeoutInSeconds": 900,
|
||||
@@ -36,7 +34,7 @@ def mock_make_api_call(self, operation_name, kwarg):
|
||||
"EnableDefaultInternetAccess": False,
|
||||
},
|
||||
{
|
||||
"Arn": f"arn:aws:appstream:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:fleet/test-prowler3-1",
|
||||
"Arn": f"arn:aws:appstream:{AWS_REGION_EU_WEST_1}:{DEFAULT_ACCOUNT_ID}:fleet/test-prowler3-1",
|
||||
"Name": "test-prowler3-1",
|
||||
"MaxUserDurationInSeconds": 57600,
|
||||
"DisconnectTimeoutInSeconds": 900,
|
||||
@@ -52,9 +50,11 @@ def mock_make_api_call(self, operation_name, kwarg):
|
||||
|
||||
# Mock generate_regional_clients()
|
||||
def mock_generate_regional_clients(service, audit_info, _):
|
||||
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
|
||||
regional_client.region = AWS_REGION
|
||||
return {AWS_REGION: regional_client}
|
||||
regional_client = audit_info.audit_session.client(
|
||||
service, region_name=AWS_REGION_EU_WEST_1
|
||||
)
|
||||
regional_client.region = AWS_REGION_EU_WEST_1
|
||||
return {AWS_REGION_EU_WEST_1: regional_client}
|
||||
|
||||
|
||||
# Patch every AWS call using Boto3 and generate_regional_clients to have 1 client
|
||||
@@ -64,82 +64,54 @@ def mock_generate_regional_clients(service, audit_info, _):
|
||||
new=mock_generate_regional_clients,
|
||||
)
|
||||
class Test_AppStream_Service:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["us-east-1", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
|
||||
# Test AppStream Client
|
||||
def test__get_client__(self):
|
||||
appstream = AppStream(self.set_mocked_audit_info())
|
||||
assert appstream.regional_clients[AWS_REGION].__class__.__name__ == "AppStream"
|
||||
appstream = AppStream(set_mocked_aws_audit_info([AWS_REGION_US_EAST_1]))
|
||||
assert (
|
||||
appstream.regional_clients[AWS_REGION_EU_WEST_1].__class__.__name__
|
||||
== "AppStream"
|
||||
)
|
||||
|
||||
# Test AppStream Session
|
||||
def test__get_session__(self):
|
||||
appstream = AppStream(self.set_mocked_audit_info())
|
||||
appstream = AppStream(set_mocked_aws_audit_info([AWS_REGION_US_EAST_1]))
|
||||
assert appstream.session.__class__.__name__ == "Session"
|
||||
|
||||
# Test AppStream Session
|
||||
def test__get_service__(self):
|
||||
appstream = AppStream(self.set_mocked_audit_info())
|
||||
appstream = AppStream(set_mocked_aws_audit_info([AWS_REGION_US_EAST_1]))
|
||||
assert appstream.service == "appstream"
|
||||
|
||||
def test__describe_fleets__(self):
|
||||
# Set partition for the service
|
||||
appstream = AppStream(self.set_mocked_audit_info())
|
||||
appstream = AppStream(set_mocked_aws_audit_info([AWS_REGION_US_EAST_1]))
|
||||
assert len(appstream.fleets) == 2
|
||||
|
||||
assert (
|
||||
appstream.fleets[0].arn
|
||||
== f"arn:aws:appstream:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:fleet/test-prowler3-0"
|
||||
== f"arn:aws:appstream:{AWS_REGION_EU_WEST_1}:{DEFAULT_ACCOUNT_ID}:fleet/test-prowler3-0"
|
||||
)
|
||||
assert appstream.fleets[0].name == "test-prowler3-0"
|
||||
assert appstream.fleets[0].max_user_duration_in_seconds == 100
|
||||
assert appstream.fleets[0].disconnect_timeout_in_seconds == 900
|
||||
assert appstream.fleets[0].idle_disconnect_timeout_in_seconds == 900
|
||||
assert appstream.fleets[0].enable_default_internet_access is False
|
||||
assert appstream.fleets[0].region == AWS_REGION
|
||||
assert appstream.fleets[0].region == AWS_REGION_EU_WEST_1
|
||||
|
||||
assert (
|
||||
appstream.fleets[1].arn
|
||||
== f"arn:aws:appstream:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:fleet/test-prowler3-1"
|
||||
== f"arn:aws:appstream:{AWS_REGION_EU_WEST_1}:{DEFAULT_ACCOUNT_ID}:fleet/test-prowler3-1"
|
||||
)
|
||||
assert appstream.fleets[1].name == "test-prowler3-1"
|
||||
assert appstream.fleets[1].max_user_duration_in_seconds == 57600
|
||||
assert appstream.fleets[1].disconnect_timeout_in_seconds == 900
|
||||
assert appstream.fleets[1].idle_disconnect_timeout_in_seconds == 900
|
||||
assert appstream.fleets[1].enable_default_internet_access is True
|
||||
assert appstream.fleets[1].region == AWS_REGION
|
||||
assert appstream.fleets[1].region == AWS_REGION_EU_WEST_1
|
||||
|
||||
def test__list_tags_for_resource__(self):
|
||||
# Set partition for the service
|
||||
appstream = AppStream(self.set_mocked_audit_info())
|
||||
appstream = AppStream(set_mocked_aws_audit_info([AWS_REGION_US_EAST_1]))
|
||||
assert len(appstream.fleets) == 2
|
||||
|
||||
assert appstream.fleets[0].tags == [{"test": "test"}]
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
from boto3 import session
|
||||
from botocore.client import BaseClient
|
||||
from mock import patch
|
||||
from moto import mock_athena
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.aws.services.athena.athena_service import Athena
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
AWS_REGION = "eu-west-1"
|
||||
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_REGION_EU_WEST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
# Mocking Access Analyzer Calls
|
||||
make_api_call = BaseClient._make_api_call
|
||||
@@ -43,9 +40,11 @@ def mock_make_api_call(self, operation_name, kwarg):
|
||||
|
||||
# Mock generate_regional_clients()
|
||||
def mock_generate_regional_clients(service, audit_info, _):
|
||||
regional_client = audit_info.audit_session.client(service, region_name=AWS_REGION)
|
||||
regional_client.region = AWS_REGION
|
||||
return {AWS_REGION: regional_client}
|
||||
regional_client = audit_info.audit_session.client(
|
||||
service, region_name=AWS_REGION_EU_WEST_1
|
||||
)
|
||||
regional_client.region = AWS_REGION_EU_WEST_1
|
||||
return {AWS_REGION_EU_WEST_1: regional_client}
|
||||
|
||||
|
||||
# Patch every AWS call using Boto3 and generate_regional_clients to have 1 client
|
||||
@@ -54,49 +53,18 @@ def mock_generate_regional_clients(service, audit_info, _):
|
||||
new=mock_generate_regional_clients,
|
||||
)
|
||||
class Test_Athena_Service:
|
||||
# Mocked Audit Info
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=None,
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
return audit_info
|
||||
|
||||
# Test Athena Get Workgrups
|
||||
@mock_athena
|
||||
def test__get_workgroups__not_encrypted(self):
|
||||
default_workgroup_name = "primary"
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
workgroup_arn = f"arn:{audit_info.audited_partition}:athena:{AWS_REGION}:{audit_info.audited_account}:workgroup/{default_workgroup_name}"
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_EU_WEST_1])
|
||||
workgroup_arn = f"arn:{audit_info.audited_partition}:athena:{AWS_REGION_EU_WEST_1}:{audit_info.audited_account}:workgroup/{default_workgroup_name}"
|
||||
athena = Athena(audit_info)
|
||||
assert len(athena.workgroups) == 1
|
||||
assert athena.workgroups[workgroup_arn]
|
||||
assert athena.workgroups[workgroup_arn].arn == workgroup_arn
|
||||
assert athena.workgroups[workgroup_arn].name == default_workgroup_name
|
||||
assert athena.workgroups[workgroup_arn].region == AWS_REGION
|
||||
assert athena.workgroups[workgroup_arn].region == AWS_REGION_EU_WEST_1
|
||||
assert athena.workgroups[workgroup_arn].tags == []
|
||||
assert (
|
||||
athena.workgroups[workgroup_arn].encryption_configuration.encrypted is False
|
||||
@@ -113,12 +81,12 @@ class Test_Athena_Service:
|
||||
@mock_athena
|
||||
def test__get_workgroups__encrypted(self):
|
||||
default_workgroup_name = "primary"
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_EU_WEST_1])
|
||||
|
||||
# Athena client
|
||||
# This API call is not implemented by Moto
|
||||
# athena_client = audit_info.audit_session.client(
|
||||
# "athena", region_name=AWS_REGION
|
||||
# "athena", region_name=AWS_REGION_EU_WEST_1
|
||||
# )
|
||||
# athena_client.update_work_group(
|
||||
# WorkGroup=default_workgroup_name,
|
||||
@@ -129,13 +97,13 @@ class Test_Athena_Service:
|
||||
# },
|
||||
# )
|
||||
|
||||
workgroup_arn = f"arn:{audit_info.audited_partition}:athena:{AWS_REGION}:{audit_info.audited_account}:workgroup/{default_workgroup_name}"
|
||||
workgroup_arn = f"arn:{audit_info.audited_partition}:athena:{AWS_REGION_EU_WEST_1}:{audit_info.audited_account}:workgroup/{default_workgroup_name}"
|
||||
athena = Athena(audit_info)
|
||||
assert len(athena.workgroups) == 1
|
||||
assert athena.workgroups[workgroup_arn]
|
||||
assert athena.workgroups[workgroup_arn].arn == workgroup_arn
|
||||
assert athena.workgroups[workgroup_arn].name == default_workgroup_name
|
||||
assert athena.workgroups[workgroup_arn].region == AWS_REGION
|
||||
assert athena.workgroups[workgroup_arn].region == AWS_REGION_EU_WEST_1
|
||||
assert athena.workgroups[workgroup_arn].tags == []
|
||||
assert (
|
||||
athena.workgroups[workgroup_arn].encryption_configuration.encrypted is True
|
||||
|
||||
@@ -1,56 +1,25 @@
|
||||
from unittest import mock
|
||||
|
||||
from boto3 import session
|
||||
from mock import patch
|
||||
from moto import mock_athena
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_EU_WEST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
from tests.providers.aws.services.athena.athena_service_test import mock_make_api_call
|
||||
|
||||
AWS_REGION = "eu-west-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
ATHENA_PRIMARY_WORKGROUP = "primary"
|
||||
ATHENA_PRIMARY_WORKGROUP_ARN = f"arn:aws:athena:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:workgroup/{ATHENA_PRIMARY_WORKGROUP}"
|
||||
ATHENA_PRIMARY_WORKGROUP_ARN = f"arn:aws:athena:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:workgroup/{ATHENA_PRIMARY_WORKGROUP}"
|
||||
|
||||
|
||||
class Test_athena_workgroup_encryption:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=[AWS_REGION],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
|
||||
@mock_athena
|
||||
def test_primary_workgroup_not_encrypted(self):
|
||||
from prowler.providers.aws.services.athena.athena_service import Athena
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_EU_WEST_1])
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -74,14 +43,14 @@ class Test_athena_workgroup_encryption:
|
||||
)
|
||||
assert result[0].resource_id == ATHENA_PRIMARY_WORKGROUP
|
||||
assert result[0].resource_arn == ATHENA_PRIMARY_WORKGROUP_ARN
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
@mock_athena
|
||||
def test_primary_workgroup_not_encrypted_ignoring(self):
|
||||
from prowler.providers.aws.services.athena.athena_service import Athena
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_EU_WEST_1])
|
||||
current_audit_info.ignore_unused_services = True
|
||||
|
||||
with mock.patch(
|
||||
@@ -106,7 +75,7 @@ class Test_athena_workgroup_encryption:
|
||||
def test_primary_workgroup_encrypted(self):
|
||||
from prowler.providers.aws.services.athena.athena_service import Athena
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_EU_WEST_1])
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -130,5 +99,5 @@ class Test_athena_workgroup_encryption:
|
||||
)
|
||||
assert result[0].resource_id == ATHENA_PRIMARY_WORKGROUP
|
||||
assert result[0].resource_arn == ATHENA_PRIMARY_WORKGROUP_ARN
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
@@ -1,56 +1,25 @@
|
||||
from unittest import mock
|
||||
|
||||
from boto3 import session
|
||||
from mock import patch
|
||||
from moto import mock_athena
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_EU_WEST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
from tests.providers.aws.services.athena.athena_service_test import mock_make_api_call
|
||||
|
||||
AWS_REGION = "eu-west-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
ATHENA_PRIMARY_WORKGROUP = "primary"
|
||||
ATHENA_PRIMARY_WORKGROUP_ARN = f"arn:aws:athena:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:workgroup/{ATHENA_PRIMARY_WORKGROUP}"
|
||||
ATHENA_PRIMARY_WORKGROUP_ARN = f"arn:aws:athena:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:workgroup/{ATHENA_PRIMARY_WORKGROUP}"
|
||||
|
||||
|
||||
class Test_athena_workgroup_enforce_configuration:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=[AWS_REGION],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
|
||||
@mock_athena
|
||||
def test_primary_workgroup_configuration_not_enforced(self):
|
||||
from prowler.providers.aws.services.athena.athena_service import Athena
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_EU_WEST_1])
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -74,14 +43,14 @@ class Test_athena_workgroup_enforce_configuration:
|
||||
)
|
||||
assert result[0].resource_id == ATHENA_PRIMARY_WORKGROUP
|
||||
assert result[0].resource_arn == ATHENA_PRIMARY_WORKGROUP_ARN
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
@mock_athena
|
||||
def test_primary_workgroup_configuration_not_enforced_ignoring(self):
|
||||
from prowler.providers.aws.services.athena.athena_service import Athena
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_EU_WEST_1])
|
||||
current_audit_info.ignore_unused_services = True
|
||||
|
||||
with mock.patch(
|
||||
@@ -106,7 +75,7 @@ class Test_athena_workgroup_enforce_configuration:
|
||||
def test_primary_workgroup_configuration_enforced(self):
|
||||
from prowler.providers.aws.services.athena.athena_service import Athena
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_EU_WEST_1])
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -130,5 +99,5 @@ class Test_athena_workgroup_enforce_configuration:
|
||||
)
|
||||
assert result[0].resource_id == ATHENA_PRIMARY_WORKGROUP
|
||||
assert result[0].resource_arn == ATHENA_PRIMARY_WORKGROUP_ARN
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
@@ -2,61 +2,29 @@ from os import path
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
from boto3 import client, session
|
||||
from boto3 import client
|
||||
from moto import mock_autoscaling
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_REGION = "us-east-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
ACTUAL_DIRECTORY = Path(path.dirname(path.realpath(__file__)))
|
||||
FIXTURES_DIR_NAME = "fixtures"
|
||||
|
||||
|
||||
class Test_autoscaling_find_secrets_ec2_launch_configuration:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["us-east-1", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
|
||||
@mock_autoscaling
|
||||
def test_no_autoscaling(self):
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION_US_EAST_1)
|
||||
autoscaling_client.launch_configurations = []
|
||||
|
||||
from prowler.providers.aws.services.autoscaling.autoscaling_service import (
|
||||
AutoScaling,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -79,7 +47,7 @@ class Test_autoscaling_find_secrets_ec2_launch_configuration:
|
||||
def test_one_autoscaling_with_no_secrets(self):
|
||||
# Include launch_configurations to check
|
||||
launch_configuration_name = "tester"
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION_US_EAST_1)
|
||||
autoscaling_client.create_launch_configuration(
|
||||
LaunchConfigurationName=launch_configuration_name,
|
||||
ImageId="ami-12c6146b",
|
||||
@@ -96,7 +64,7 @@ class Test_autoscaling_find_secrets_ec2_launch_configuration:
|
||||
AutoScaling,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -120,13 +88,13 @@ class Test_autoscaling_find_secrets_ec2_launch_configuration:
|
||||
)
|
||||
assert result[0].resource_id == launch_configuration_name
|
||||
assert result[0].resource_arn == launch_configuration_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
|
||||
@mock_autoscaling
|
||||
def test_one_autoscaling_with_secrets(self):
|
||||
# Include launch_configurations to check
|
||||
launch_configuration_name = "tester"
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION_US_EAST_1)
|
||||
autoscaling_client.create_launch_configuration(
|
||||
LaunchConfigurationName=launch_configuration_name,
|
||||
ImageId="ami-12c6146b",
|
||||
@@ -143,7 +111,7 @@ class Test_autoscaling_find_secrets_ec2_launch_configuration:
|
||||
AutoScaling,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -167,7 +135,7 @@ class Test_autoscaling_find_secrets_ec2_launch_configuration:
|
||||
)
|
||||
assert result[0].resource_id == launch_configuration_name
|
||||
assert result[0].resource_arn == launch_configuration_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
|
||||
@mock_autoscaling
|
||||
def test_one_autoscaling_file_with_secrets(self):
|
||||
@@ -178,7 +146,7 @@ class Test_autoscaling_find_secrets_ec2_launch_configuration:
|
||||
)
|
||||
secrets = f.read()
|
||||
launch_configuration_name = "tester"
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION_US_EAST_1)
|
||||
autoscaling_client.create_launch_configuration(
|
||||
LaunchConfigurationName="tester",
|
||||
ImageId="ami-12c6146b",
|
||||
@@ -195,7 +163,7 @@ class Test_autoscaling_find_secrets_ec2_launch_configuration:
|
||||
AutoScaling,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -219,13 +187,13 @@ class Test_autoscaling_find_secrets_ec2_launch_configuration:
|
||||
)
|
||||
assert result[0].resource_id == launch_configuration_name
|
||||
assert result[0].resource_arn == launch_configuration_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
|
||||
@mock_autoscaling
|
||||
def test_one_launch_configurations_without_user_data(self):
|
||||
# Include launch_configurations to check
|
||||
launch_configuration_name = "tester"
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION_US_EAST_1)
|
||||
autoscaling_client.create_launch_configuration(
|
||||
LaunchConfigurationName=launch_configuration_name,
|
||||
ImageId="ami-12c6146b",
|
||||
@@ -241,7 +209,7 @@ class Test_autoscaling_find_secrets_ec2_launch_configuration:
|
||||
AutoScaling,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -265,7 +233,7 @@ class Test_autoscaling_find_secrets_ec2_launch_configuration:
|
||||
)
|
||||
assert result[0].resource_id == launch_configuration_name
|
||||
assert result[0].resource_arn == launch_configuration_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
|
||||
@mock_autoscaling
|
||||
def test_one_autoscaling_file_with_secrets_gzip(self):
|
||||
@@ -277,7 +245,7 @@ class Test_autoscaling_find_secrets_ec2_launch_configuration:
|
||||
|
||||
secrets = f.read()
|
||||
launch_configuration_name = "tester"
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION_US_EAST_1)
|
||||
autoscaling_client.create_launch_configuration(
|
||||
LaunchConfigurationName="tester",
|
||||
ImageId="ami-12c6146b",
|
||||
@@ -294,7 +262,7 @@ class Test_autoscaling_find_secrets_ec2_launch_configuration:
|
||||
AutoScaling,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -318,4 +286,4 @@ class Test_autoscaling_find_secrets_ec2_launch_configuration:
|
||||
)
|
||||
assert result[0].resource_id == launch_configuration_name
|
||||
assert result[0].resource_arn == launch_configuration_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
|
||||
@@ -1,57 +1,25 @@
|
||||
from unittest import mock
|
||||
|
||||
from boto3 import client, session
|
||||
from boto3 import client
|
||||
from moto import mock_autoscaling
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_REGION = "us-east-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
|
||||
class Test_autoscaling_group_multiple_az:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["us-east-1", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
|
||||
@mock_autoscaling
|
||||
def test_no_autoscaling(self):
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION_US_EAST_1)
|
||||
autoscaling_client.groups = []
|
||||
|
||||
from prowler.providers.aws.services.autoscaling.autoscaling_service import (
|
||||
AutoScaling,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -72,7 +40,7 @@ class Test_autoscaling_group_multiple_az:
|
||||
|
||||
@mock_autoscaling
|
||||
def test_groups_with_multi_az(self):
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION_US_EAST_1)
|
||||
autoscaling_client.create_launch_configuration(
|
||||
LaunchConfigurationName="test",
|
||||
ImageId="ami-12c6146b",
|
||||
@@ -98,7 +66,7 @@ class Test_autoscaling_group_multiple_az:
|
||||
AutoScaling,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -123,12 +91,12 @@ class Test_autoscaling_group_multiple_az:
|
||||
)
|
||||
assert result[0].resource_id == autoscaling_group_name
|
||||
assert result[0].resource_arn == autoscaling_group_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_US_EAST_1
|
||||
assert result[0].resource_tags == []
|
||||
|
||||
@mock_autoscaling
|
||||
def test_groups_with_single_az(self):
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION_US_EAST_1)
|
||||
autoscaling_client.create_launch_configuration(
|
||||
LaunchConfigurationName="test",
|
||||
ImageId="ami-12c6146b",
|
||||
@@ -154,7 +122,7 @@ class Test_autoscaling_group_multiple_az:
|
||||
AutoScaling,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -183,7 +151,7 @@ class Test_autoscaling_group_multiple_az:
|
||||
|
||||
@mock_autoscaling
|
||||
def test_groups_witd_and_without(self):
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION_US_EAST_1)
|
||||
autoscaling_client.create_launch_configuration(
|
||||
LaunchConfigurationName="test",
|
||||
ImageId="ami-12c6146b",
|
||||
@@ -221,7 +189,7 @@ class Test_autoscaling_group_multiple_az:
|
||||
AutoScaling,
|
||||
)
|
||||
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
current_audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
@@ -248,7 +216,7 @@ class Test_autoscaling_group_multiple_az:
|
||||
)
|
||||
assert check.resource_arn == autoscaling_group_arn_1
|
||||
assert check.resource_tags == []
|
||||
assert check.region == AWS_REGION
|
||||
assert check.region == AWS_REGION_US_EAST_1
|
||||
if check.resource_id == autoscaling_group_name_2:
|
||||
assert check.status == "FAIL"
|
||||
assert (
|
||||
@@ -257,4 +225,4 @@ class Test_autoscaling_group_multiple_az:
|
||||
)
|
||||
assert check.resource_tags == []
|
||||
assert check.resource_arn == autoscaling_group_arn_2
|
||||
assert check.region == AWS_REGION
|
||||
assert check.region == AWS_REGION_US_EAST_1
|
||||
|
||||
@@ -1,53 +1,22 @@
|
||||
from base64 import b64decode
|
||||
|
||||
from boto3 import client, session
|
||||
from boto3 import client
|
||||
from moto import mock_autoscaling
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.aws.services.autoscaling.autoscaling_service import AutoScaling
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
AWS_REGION = "us-east-1"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
|
||||
class Test_AutoScaling_Service:
|
||||
# Mocked Audit Info
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=None,
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
return audit_info
|
||||
|
||||
# Test AutoScaling Service
|
||||
@mock_autoscaling
|
||||
def test_service(self):
|
||||
# AutoScaling client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
autoscaling = AutoScaling(audit_info)
|
||||
assert autoscaling.service == "autoscaling"
|
||||
|
||||
@@ -55,7 +24,7 @@ class Test_AutoScaling_Service:
|
||||
@mock_autoscaling
|
||||
def test_client(self):
|
||||
# AutoScaling client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
autoscaling = AutoScaling(audit_info)
|
||||
for regional_client in autoscaling.regional_clients.values():
|
||||
assert regional_client.__class__.__name__ == "AutoScaling"
|
||||
@@ -64,7 +33,7 @@ class Test_AutoScaling_Service:
|
||||
@mock_autoscaling
|
||||
def test__get_session__(self):
|
||||
# AutoScaling client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
autoscaling = AutoScaling(audit_info)
|
||||
assert autoscaling.session.__class__.__name__ == "Session"
|
||||
|
||||
@@ -72,7 +41,7 @@ class Test_AutoScaling_Service:
|
||||
@mock_autoscaling
|
||||
def test_audited_account(self):
|
||||
# AutoScaling client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
autoscaling = AutoScaling(audit_info)
|
||||
assert autoscaling.audited_account == AWS_ACCOUNT_NUMBER
|
||||
|
||||
@@ -80,7 +49,7 @@ class Test_AutoScaling_Service:
|
||||
@mock_autoscaling
|
||||
def test__describe_launch_configurations__(self):
|
||||
# Generate AutoScaling Client
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION_US_EAST_1)
|
||||
# Create AutoScaling API
|
||||
autoscaling_client.create_launch_configuration(
|
||||
LaunchConfigurationName="tester1",
|
||||
@@ -98,7 +67,7 @@ class Test_AutoScaling_Service:
|
||||
SecurityGroups=["default", "default2"],
|
||||
)
|
||||
# AutoScaling client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
autoscaling = AutoScaling(audit_info)
|
||||
assert len(autoscaling.launch_configurations) == 2
|
||||
assert autoscaling.launch_configurations[0].name == "tester1"
|
||||
@@ -114,7 +83,7 @@ class Test_AutoScaling_Service:
|
||||
@mock_autoscaling
|
||||
def test__describe_auto_scaling_groups__(self):
|
||||
# Generate AutoScaling Client
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION)
|
||||
autoscaling_client = client("autoscaling", region_name=AWS_REGION_US_EAST_1)
|
||||
autoscaling_client.create_launch_configuration(
|
||||
LaunchConfigurationName="test",
|
||||
ImageId="ami-12c6146b",
|
||||
@@ -138,14 +107,14 @@ class Test_AutoScaling_Service:
|
||||
)
|
||||
|
||||
# AutoScaling client for this test class
|
||||
audit_info = self.set_mocked_audit_info()
|
||||
audit_info = set_mocked_aws_audit_info([AWS_REGION_US_EAST_1])
|
||||
autoscaling = AutoScaling(audit_info)
|
||||
print("asg", asg)
|
||||
assert len(autoscaling.groups) == 1
|
||||
# create_auto_scaling_group doesn't return the ARN, can't check it
|
||||
# assert autoscaling.groups[0].arn ==
|
||||
assert autoscaling.groups[0].name == "my-autoscaling-group"
|
||||
assert autoscaling.groups[0].region == AWS_REGION
|
||||
assert autoscaling.groups[0].region == AWS_REGION_US_EAST_1
|
||||
assert autoscaling.groups[0].availability_zones == ["us-east-1a", "us-east-1b"]
|
||||
assert autoscaling.groups[0].tags == [
|
||||
{
|
||||
|
||||
@@ -6,17 +6,16 @@ from re import search
|
||||
from unittest.mock import patch
|
||||
|
||||
import mock
|
||||
from boto3 import client, resource, session
|
||||
from boto3 import client, resource
|
||||
from moto import mock_iam, mock_lambda, mock_s3
|
||||
from moto.core import DEFAULT_ACCOUNT_ID
|
||||
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.aws.services.awslambda.awslambda_service import AuthType, Lambda
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
# Mock Test Region
|
||||
AWS_REGION = "eu-west-1"
|
||||
AWS_REGION_NORTH_VIRGINIA = "us-east-1"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_REGION_EU_WEST_1,
|
||||
AWS_REGION_US_EAST_1,
|
||||
set_mocked_aws_audit_info,
|
||||
)
|
||||
|
||||
|
||||
def create_zip_file(code: str = "") -> io.BytesIO:
|
||||
@@ -49,16 +48,16 @@ def mock_request_get(_):
|
||||
# Mock generate_regional_clients()
|
||||
def mock_generate_regional_clients(service, audit_info, _):
|
||||
regional_client_eu_west_1 = audit_info.audit_session.client(
|
||||
service, region_name=AWS_REGION
|
||||
service, region_name=AWS_REGION_EU_WEST_1
|
||||
)
|
||||
regional_client_us_east_1 = audit_info.audit_session.client(
|
||||
service, region_name=AWS_REGION_NORTH_VIRGINIA
|
||||
service, region_name=AWS_REGION_US_EAST_1
|
||||
)
|
||||
regional_client_eu_west_1.region = AWS_REGION
|
||||
regional_client_us_east_1.region = AWS_REGION_NORTH_VIRGINIA
|
||||
regional_client_eu_west_1.region = AWS_REGION_EU_WEST_1
|
||||
regional_client_us_east_1.region = AWS_REGION_US_EAST_1
|
||||
return {
|
||||
AWS_REGION: regional_client_eu_west_1,
|
||||
AWS_REGION_NORTH_VIRGINIA: regional_client_us_east_1,
|
||||
AWS_REGION_EU_WEST_1: regional_client_eu_west_1,
|
||||
AWS_REGION_US_EAST_1: regional_client_us_east_1,
|
||||
}
|
||||
|
||||
|
||||
@@ -67,49 +66,22 @@ def mock_generate_regional_clients(service, audit_info, _):
|
||||
new=mock_generate_regional_clients,
|
||||
)
|
||||
class Test_Lambda_Service:
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audited_account=DEFAULT_ACCOUNT_ID,
|
||||
audited_account_arn=f"arn:aws:iam::{DEFAULT_ACCOUNT_ID}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=None,
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=["awslambda_function_no_secrets_in_code"],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
return audit_info
|
||||
|
||||
# Test Lambda Client
|
||||
def test__get_client__(self):
|
||||
awslambda = Lambda(self.set_mocked_audit_info())
|
||||
assert awslambda.regional_clients[AWS_REGION].__class__.__name__ == "Lambda"
|
||||
awslambda = Lambda(set_mocked_aws_audit_info([AWS_REGION_US_EAST_1]))
|
||||
assert (
|
||||
awslambda.regional_clients[AWS_REGION_EU_WEST_1].__class__.__name__
|
||||
== "Lambda"
|
||||
)
|
||||
|
||||
# Test Lambda Session
|
||||
def test__get_session__(self):
|
||||
awslambda = Lambda(self.set_mocked_audit_info())
|
||||
awslambda = Lambda(set_mocked_aws_audit_info([AWS_REGION_US_EAST_1]))
|
||||
assert awslambda.session.__class__.__name__ == "Session"
|
||||
|
||||
# Test Lambda Service
|
||||
def test__get_service__(self):
|
||||
awslambda = Lambda(self.set_mocked_audit_info())
|
||||
awslambda = Lambda(set_mocked_aws_audit_info([AWS_REGION_US_EAST_1]))
|
||||
assert awslambda.service == "lambda"
|
||||
|
||||
@mock_lambda
|
||||
@@ -117,20 +89,20 @@ class Test_Lambda_Service:
|
||||
@mock_s3
|
||||
def test__list_functions__(self):
|
||||
# Create IAM Lambda Role
|
||||
iam_client = client("iam", region_name=AWS_REGION)
|
||||
iam_client = client("iam", region_name=AWS_REGION_EU_WEST_1)
|
||||
iam_role = iam_client.create_role(
|
||||
RoleName="test-lambda-role",
|
||||
AssumeRolePolicyDocument="test-policy",
|
||||
Path="/",
|
||||
)["Role"]["Arn"]
|
||||
# Create S3 Bucket
|
||||
s3_client = resource("s3", region_name=AWS_REGION)
|
||||
s3_client = resource("s3", region_name=AWS_REGION_EU_WEST_1)
|
||||
s3_client.create_bucket(
|
||||
Bucket="test-bucket",
|
||||
CreateBucketConfiguration={"LocationConstraint": AWS_REGION},
|
||||
CreateBucketConfiguration={"LocationConstraint": AWS_REGION_EU_WEST_1},
|
||||
)
|
||||
# Create Test Lambda 1
|
||||
lambda_client = client("lambda", region_name=AWS_REGION)
|
||||
lambda_client = client("lambda", region_name=AWS_REGION_EU_WEST_1)
|
||||
lambda_name = "test-lambda"
|
||||
resp = lambda_client.create_function(
|
||||
FunctionName=lambda_name,
|
||||
@@ -160,7 +132,7 @@ class Test_Lambda_Service:
|
||||
"Action": "lambda:GetFunction",
|
||||
"Principal": "*",
|
||||
"Effect": "Allow",
|
||||
"Resource": f"arn:aws:lambda:{AWS_REGION}:{DEFAULT_ACCOUNT_ID}:function:{lambda_name}",
|
||||
"Resource": f"arn:aws:lambda:{AWS_REGION_EU_WEST_1}:{DEFAULT_ACCOUNT_ID}:function:{lambda_name}",
|
||||
"Sid": "test",
|
||||
}
|
||||
],
|
||||
@@ -194,7 +166,7 @@ class Test_Lambda_Service:
|
||||
)
|
||||
|
||||
# Create Test Lambda 2 (with the same attributes but different region)
|
||||
lambda_client_2 = client("lambda", region_name=AWS_REGION_NORTH_VIRGINIA)
|
||||
lambda_client_2 = client("lambda", region_name=AWS_REGION_US_EAST_1)
|
||||
lambda_name = "test-lambda"
|
||||
resp_2 = lambda_client_2.create_function(
|
||||
FunctionName=lambda_name,
|
||||
@@ -220,7 +192,12 @@ class Test_Lambda_Service:
|
||||
"prowler.providers.aws.services.awslambda.awslambda_service.requests.get",
|
||||
new=mock_request_get,
|
||||
):
|
||||
awslambda = Lambda(self.set_mocked_audit_info())
|
||||
awslambda = Lambda(
|
||||
set_mocked_aws_audit_info(
|
||||
audited_regions=[AWS_REGION_US_EAST_1],
|
||||
expected_checks=["awslambda_function_no_secrets_in_code"],
|
||||
)
|
||||
)
|
||||
assert awslambda.functions
|
||||
assert len(awslambda.functions) == 2
|
||||
# Lambda 1
|
||||
@@ -230,12 +207,12 @@ class Test_Lambda_Service:
|
||||
assert awslambda.functions[lambda_arn_1].environment == {
|
||||
"db-password": "test-password"
|
||||
}
|
||||
assert awslambda.functions[lambda_arn_1].region == AWS_REGION
|
||||
assert awslambda.functions[lambda_arn_1].region == AWS_REGION_EU_WEST_1
|
||||
assert awslambda.functions[lambda_arn_1].policy == lambda_policy
|
||||
|
||||
assert awslambda.functions[lambda_arn_1].code
|
||||
assert search(
|
||||
f"s3://awslambda-{AWS_REGION}-tasks.s3-{AWS_REGION}.amazonaws.com",
|
||||
f"s3://awslambda-{AWS_REGION_EU_WEST_1}-tasks.s3-{AWS_REGION_EU_WEST_1}.amazonaws.com",
|
||||
awslambda.functions[lambda_arn_1].code.location,
|
||||
)
|
||||
|
||||
@@ -280,7 +257,7 @@ class Test_Lambda_Service:
|
||||
assert awslambda.functions[lambda_arn_2].environment == {
|
||||
"db-password": "test-password"
|
||||
}
|
||||
assert awslambda.functions[lambda_arn_2].region == AWS_REGION_NORTH_VIRGINIA
|
||||
assert awslambda.functions[lambda_arn_2].region == AWS_REGION_US_EAST_1
|
||||
# Emtpy policy
|
||||
assert awslambda.functions[lambda_arn_2].policy == {
|
||||
"Id": "default",
|
||||
@@ -290,6 +267,6 @@ class Test_Lambda_Service:
|
||||
|
||||
assert awslambda.functions[lambda_arn_2].code
|
||||
assert search(
|
||||
f"s3://awslambda-{AWS_REGION_NORTH_VIRGINIA}-tasks.s3-{AWS_REGION_NORTH_VIRGINIA}.amazonaws.com",
|
||||
f"s3://awslambda-{AWS_REGION_US_EAST_1}-tasks.s3-{AWS_REGION_US_EAST_1}.amazonaws.com",
|
||||
awslambda.functions[lambda_arn_2].code.location,
|
||||
)
|
||||
|
||||
@@ -3,9 +3,10 @@ from unittest import mock
|
||||
from uuid import uuid4
|
||||
|
||||
from prowler.providers.aws.services.backup.backup_service import BackupPlan
|
||||
|
||||
AWS_REGION = "eu-west-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
|
||||
class Test_backup_plans_exist:
|
||||
@@ -13,7 +14,7 @@ class Test_backup_plans_exist:
|
||||
backup_client = mock.MagicMock
|
||||
backup_client.audited_account = AWS_ACCOUNT_NUMBER
|
||||
backup_client.audited_account_arn = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
|
||||
backup_client.region = AWS_REGION
|
||||
backup_client.region = AWS_REGION_EU_WEST_1
|
||||
backup_client.backup_plans = []
|
||||
backup_client.backup_vaults = ["vault"]
|
||||
with mock.patch(
|
||||
@@ -33,13 +34,13 @@ class Test_backup_plans_exist:
|
||||
assert result[0].status_extended == "No Backup Plan exist."
|
||||
assert result[0].resource_id == AWS_ACCOUNT_NUMBER
|
||||
assert result[0].resource_arn == f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
|
||||
def test_no_backup_plans_not_vaults(self):
|
||||
backup_client = mock.MagicMock
|
||||
backup_client.audited_account = AWS_ACCOUNT_NUMBER
|
||||
backup_client.audited_account_arn = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
|
||||
backup_client.region = AWS_REGION
|
||||
backup_client.region = AWS_REGION_EU_WEST_1
|
||||
backup_client.backup_plans = []
|
||||
backup_client.backup_vaults = []
|
||||
with mock.patch(
|
||||
@@ -60,16 +61,14 @@ class Test_backup_plans_exist:
|
||||
backup_client = mock.MagicMock
|
||||
backup_client.audited_account = AWS_ACCOUNT_NUMBER
|
||||
backup_client.audited_account_arn = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
|
||||
backup_client.region = AWS_REGION
|
||||
backup_client.region = AWS_REGION_EU_WEST_1
|
||||
backup_plan_id = str(uuid4()).upper()
|
||||
backup_plan_arn = (
|
||||
f"arn:aws:backup:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:plan:{backup_plan_id}"
|
||||
)
|
||||
backup_plan_arn = f"arn:aws:backup:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:plan:{backup_plan_id}"
|
||||
backup_client.backup_plans = [
|
||||
BackupPlan(
|
||||
arn=backup_plan_arn,
|
||||
id=backup_plan_id,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
name="MyBackupPlan",
|
||||
version_id="version_id",
|
||||
last_execution_date=datetime(2015, 1, 1),
|
||||
@@ -97,6 +96,6 @@ class Test_backup_plans_exist:
|
||||
assert result[0].resource_id == "MyBackupPlan"
|
||||
assert (
|
||||
result[0].resource_arn
|
||||
== f"arn:aws:backup:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:plan:{backup_plan_id}"
|
||||
== f"arn:aws:backup:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:plan:{backup_plan_id}"
|
||||
)
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
|
||||
@@ -6,15 +6,16 @@ from prowler.providers.aws.services.backup.backup_service import (
|
||||
BackupPlan,
|
||||
BackupReportPlan,
|
||||
)
|
||||
|
||||
AWS_REGION = "eu-west-1"
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
from tests.providers.aws.audit_info_utils import (
|
||||
AWS_ACCOUNT_NUMBER,
|
||||
AWS_REGION_EU_WEST_1,
|
||||
)
|
||||
|
||||
|
||||
class Test_backup_reportplans_exist:
|
||||
def test_no_backup_plans(self):
|
||||
backup_client = mock.MagicMock
|
||||
backup_client.region = AWS_REGION
|
||||
backup_client.region = AWS_REGION_EU_WEST_1
|
||||
backup_client.backup_plans = []
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.services.backup.backup_service.Backup",
|
||||
@@ -34,16 +35,14 @@ class Test_backup_reportplans_exist:
|
||||
backup_client = mock.MagicMock
|
||||
backup_client.audited_account = AWS_ACCOUNT_NUMBER
|
||||
backup_client.audited_account_arn = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
|
||||
backup_client.region = AWS_REGION
|
||||
backup_client.region = AWS_REGION_EU_WEST_1
|
||||
backup_plan_id = str(uuid4()).upper()
|
||||
backup_plan_arn = (
|
||||
f"arn:aws:backup:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:plan:{backup_plan_id}"
|
||||
)
|
||||
backup_plan_arn = f"arn:aws:backup:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:plan:{backup_plan_id}"
|
||||
backup_client.backup_plans = [
|
||||
BackupPlan(
|
||||
arn=backup_plan_arn,
|
||||
id=backup_plan_arn,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
name="MyBackupPlan",
|
||||
version_id="version_id",
|
||||
last_execution_date=datetime(2015, 1, 1),
|
||||
@@ -68,22 +67,20 @@ class Test_backup_reportplans_exist:
|
||||
assert result[0].status_extended == "No Backup Report Plan exist."
|
||||
assert result[0].resource_id == AWS_ACCOUNT_NUMBER
|
||||
assert result[0].resource_arn == f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
|
||||
def test_one_backup_report_plan(self):
|
||||
backup_client = mock.MagicMock
|
||||
backup_client.audited_account = AWS_ACCOUNT_NUMBER
|
||||
backup_client.audited_account_arn = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
|
||||
backup_client.region = AWS_REGION
|
||||
backup_client.region = AWS_REGION_EU_WEST_1
|
||||
backup_plan_id = str(uuid4()).upper()
|
||||
backup_plan_arn = (
|
||||
f"arn:aws:backup:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:plan:{backup_plan_id}"
|
||||
)
|
||||
backup_plan_arn = f"arn:aws:backup:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:plan:{backup_plan_id}"
|
||||
backup_client.backup_plans = [
|
||||
BackupPlan(
|
||||
arn=backup_plan_arn,
|
||||
id=backup_plan_id,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
name="MyBackupPlan",
|
||||
version_id="version_id",
|
||||
last_execution_date=datetime(2015, 1, 1),
|
||||
@@ -91,11 +88,11 @@ class Test_backup_reportplans_exist:
|
||||
)
|
||||
]
|
||||
backup_report_plan_id = str(uuid4()).upper()
|
||||
backup_report_plan_arn = f"arn:aws:backup:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:report-plan:MyBackupReportPlan-{backup_report_plan_id}"
|
||||
backup_report_plan_arn = f"arn:aws:backup:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:report-plan:MyBackupReportPlan-{backup_report_plan_id}"
|
||||
backup_client.backup_report_plans = [
|
||||
BackupReportPlan(
|
||||
arn=backup_report_plan_arn,
|
||||
region=AWS_REGION,
|
||||
region=AWS_REGION_EU_WEST_1,
|
||||
name="MyBackupReportPlan",
|
||||
last_attempted_execution_date=datetime(2015, 1, 1),
|
||||
last_successful_execution_date=datetime(2015, 1, 1),
|
||||
@@ -122,4 +119,4 @@ class Test_backup_reportplans_exist:
|
||||
)
|
||||
assert result[0].resource_id == "MyBackupReportPlan"
|
||||
assert result[0].resource_arn == backup_report_plan_arn
|
||||
assert result[0].region == AWS_REGION
|
||||
assert result[0].region == AWS_REGION_EU_WEST_1
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user