From 856afb3966f526964367bf014ff82b9420f33178 Mon Sep 17 00:00:00 2001 From: Nacho Rivera Date: Mon, 27 Nov 2023 13:58:45 +0100 Subject: [PATCH 01/10] chore(update): rebase from master (#3067) Signed-off-by: dependabot[bot] Signed-off-by: r3drun3 Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: John Mastron <14130495+mtronrd@users.noreply.github.com> Co-authored-by: John Mastron Co-authored-by: Sergio Garcia Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com> Co-authored-by: sergargar Co-authored-by: Pepe Fagoaga Co-authored-by: github-actions Co-authored-by: simone ragonesi <102741679+R3DRUN3@users.noreply.github.com> Co-authored-by: Johnny Lu Co-authored-by: Vajrala Venkateswarlu <59252985+venkyvajrala@users.noreply.github.com> Co-authored-by: Ignacio Dominguez --- README.md | 6 +- docs/getting-started/requirements.md | 6 +- docs/tutorials/azure/use-non-default-cloud.md | 16 + docs/tutorials/custom-checks-metadata.md | 43 ++ docs/tutorials/gcp/authentication.md | 6 +- mkdocs.yml | 2 + poetry.lock | 493 ++++++++++++++++-- prowler/__main__.py | 32 +- prowler/config/config.py | 6 +- prowler/config/config.yaml | 2 +- .../custom_checks_metadata_example.yaml | 15 + prowler/lib/check/check.py | 9 + prowler/lib/check/checks_loader.py | 4 + prowler/lib/check/custom_checks_metadata.py | 77 +++ prowler/lib/cli/parser.py | 15 +- prowler/lib/outputs/file_descriptors.py | 37 +- prowler/lib/outputs/html.py | 7 +- prowler/lib/outputs/json.py | 10 +- .../providers/aws/aws_regions_by_service.json | 160 +++++- .../providers/aws/lib/allowlist/allowlist.py | 186 ++++--- .../providers/aws/lib/arguments/arguments.py | 1 + .../policy_condition_parser.py | 7 +- .../aws/lib/security_hub/security_hub.py | 11 +- .../accessanalyzer_enabled.py | 26 +- ...ges_external_public_publishing_disabled.py | 2 +- .../codeartifact/codeartifact_service.py | 43 +- .../ec2_securitygroup_not_used.py | 10 +- .../providers/aws/services/ec2/ec2_service.py | 8 +- .../guardduty_centrally_managed.py | 2 +- .../guardduty_no_high_severity_findings.py | 2 +- .../providers/aws/services/iam/iam_service.py | 144 +++-- .../rds_instance_deprecated_engine_version.py | 9 +- .../providers/aws/services/sqs/sqs_service.py | 78 ++- prowler/providers/azure/azure_provider.py | 26 +- .../azure/lib/arguments/arguments.py | 27 + .../azure/lib/audit_info/audit_info.py | 2 + .../providers/azure/lib/audit_info/models.py | 17 +- .../providers/azure/lib/exception/__init__.py | 0 .../azure/lib/exception/exception.py | 11 + .../providers/azure/lib/regions/__init__.py | 0 .../providers/azure/lib/regions/regions.py | 38 ++ .../providers/azure/lib/service/service.py | 16 +- prowler/providers/common/audit_info.py | 24 +- prowler/providers/common/clean.py | 32 ++ prowler/providers/common/outputs.py | 15 + prowler/providers/gcp/gcp_provider.py | 15 - prowler/providers/gcp/lib/service/service.py | 20 +- pyproject.toml | 20 +- tests/config/config_test.py | 2 +- .../lib/check/custom_checks_metadata_test.py | 164 ++++++ .../custom_checks_metadata_example.yaml | 15 + ...tom_checks_metadata_example_not_valid.yaml | 5 + tests/lib/cli/parser_test.py | 52 ++ tests/lib/outputs/slack_test.py | 2 + tests/providers/aws/audit_info_utils.py | 2 + .../aws/lib/allowlist/allowlist_test.py | 370 ++++++++----- .../policy_condition_parser_test.py | 72 +++ .../aws/lib/security_hub/security_hub_test.py | 163 +++--- .../accessanalyzer_enabled_test.py | 39 ++ ...xternal_public_publishing_disabled_test.py | 10 +- .../ec2_securitygroup_not_used_test.py | 85 +++ .../guardduty_centrally_managed_test.py | 25 + ...uardduty_no_high_severity_findings_test.py | 23 + ...sqs_queues_not_publicly_accessible_test.py | 66 +-- ...ues_server_side_encryption_enabled_test.py | 25 +- .../aws/services/sqs/sqs_service_test.py | 16 + .../azure/lib/regions/regions_test.py | 50 ++ tests/providers/common/audit_info_test.py | 105 +++- tests/providers/common/clean_test.py | 87 ++++ tests/providers/common/common_outputs_test.py | 18 +- 70 files changed, 2582 insertions(+), 552 deletions(-) create mode 100644 docs/tutorials/azure/use-non-default-cloud.md create mode 100644 docs/tutorials/custom-checks-metadata.md create mode 100644 prowler/config/custom_checks_metadata_example.yaml create mode 100644 prowler/lib/check/custom_checks_metadata.py create mode 100644 prowler/providers/azure/lib/exception/__init__.py create mode 100644 prowler/providers/azure/lib/exception/exception.py create mode 100644 prowler/providers/azure/lib/regions/__init__.py create mode 100644 prowler/providers/azure/lib/regions/regions.py create mode 100644 prowler/providers/common/clean.py create mode 100644 tests/lib/check/custom_checks_metadata_test.py create mode 100644 tests/lib/check/fixtures/custom_checks_metadata_example.yaml create mode 100644 tests/lib/check/fixtures/custom_checks_metadata_example_not_valid.yaml create mode 100644 tests/providers/azure/lib/regions/regions_test.py create mode 100644 tests/providers/common/clean_test.py diff --git a/README.md b/README.md index 3dd8df5dc2..82ab6fe4ee 100644 --- a/README.md +++ b/README.md @@ -178,11 +178,7 @@ Prowler will follow the same credentials search as [Google authentication librar 2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal) 3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa) -Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials: - - - Viewer - - Security Reviewer - - Stackdriver Account Viewer +Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials. > By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned. diff --git a/docs/getting-started/requirements.md b/docs/getting-started/requirements.md index 999f2fa886..d758cb7211 100644 --- a/docs/getting-started/requirements.md +++ b/docs/getting-started/requirements.md @@ -97,10 +97,6 @@ Prowler will follow the same credentials search as [Google authentication librar 2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal) 3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa) -Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials: - - - Viewer - - Security Reviewer - - Stackdriver Account Viewer +Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials. > By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned. diff --git a/docs/tutorials/azure/use-non-default-cloud.md b/docs/tutorials/azure/use-non-default-cloud.md new file mode 100644 index 0000000000..50e9422433 --- /dev/null +++ b/docs/tutorials/azure/use-non-default-cloud.md @@ -0,0 +1,16 @@ +# Use non default Azure regions + +Microsoft provides clouds for compliance with regional laws, which are available for your use. +By default, Prowler uses `AzureCloud` cloud which is the comercial one. (you can list all the available with `az cloud list --output table`). + +At the time of writing this documentation the available Azure Clouds from different regions are the following: +- AzureCloud +- AzureChinaCloud +- AzureUSGovernment +- AzureGermanCloud + +If you want to change the default one you must include the flag `--azure-region`, i.e.: + +```console +prowler azure --az-cli-auth --azure-region AzureChinaCloud +``` diff --git a/docs/tutorials/custom-checks-metadata.md b/docs/tutorials/custom-checks-metadata.md new file mode 100644 index 0000000000..6a32238d06 --- /dev/null +++ b/docs/tutorials/custom-checks-metadata.md @@ -0,0 +1,43 @@ +# Custom Checks Metadata + +In certain organizations, the severity of specific checks might differ from the default values defined in the check's metadata. For instance, while `s3_bucket_level_public_access_block` could be deemed `critical` for some organizations, others might assign a different severity level. + +The custom metadata option offers a means to override default metadata set by Prowler + +You can utilize `--custom-checks-metadata-file` followed by the path to your custom checks metadata YAML file. + +## Available Fields + +The list of supported check's metadata fields that can be override are listed as follows: + +- Severity + +## File Syntax + +This feature is available for all the providers supported in Prowler since the metadata format is common between all the providers. The following is the YAML format for the custom checks metadata file: +```yaml title="custom_checks_metadata.yaml" +CustomChecksMetadata: + aws: + Checks: + s3_bucket_level_public_access_block: + Severity: high + s3_bucket_no_mfa_delete: + Severity: high + azure: + Checks: + storage_infrastructure_encryption_is_enabled: + Severity: medium + gcp: + Checks: + compute_instance_public_ip: + Severity: critical +``` + +## Usage + +Executing the following command will assess all checks and generate a report while overriding the metadata for those checks: +```sh +prowler --custom-checks-metadata-file +``` + +This customization feature enables organizations to tailor the severity of specific checks based on their unique requirements, providing greater flexibility in security assessment and reporting. diff --git a/docs/tutorials/gcp/authentication.md b/docs/tutorials/gcp/authentication.md index f4f46d08dd..35977dab5d 100644 --- a/docs/tutorials/gcp/authentication.md +++ b/docs/tutorials/gcp/authentication.md @@ -22,8 +22,4 @@ Prowler will follow the same credentials search as [Google authentication librar 2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal) 3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa) -Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials: - - - Viewer - - Security Reviewer - - Stackdriver Account Viewer +Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials. diff --git a/mkdocs.yml b/mkdocs.yml index d06c747a17..86761e4f68 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -38,6 +38,7 @@ nav: - Logging: tutorials/logging.md - Allowlist: tutorials/allowlist.md - Check Aliases: tutorials/check-aliases.md + - Custom Metadata: tutorials/custom-checks-metadata.md - Ignore Unused Services: tutorials/ignore-unused-services.md - Pentesting: tutorials/pentesting.md - Developer Guide: developer-guide/introduction.md @@ -56,6 +57,7 @@ nav: - Boto3 Configuration: tutorials/aws/boto3-configuration.md - Azure: - Authentication: tutorials/azure/authentication.md + - Non default clouds: tutorials/azure/use-non-default-cloud.md - Subscriptions: tutorials/azure/subscriptions.md - Google Cloud: - Authentication: tutorials/gcp/authentication.md diff --git a/poetry.lock b/poetry.lock index 0a53bee52e..667e7f6659 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,15 +11,32 @@ files = [ {file = "about_time-4.2.1-py3-none-any.whl", hash = "sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341"}, ] +[[package]] +name = "adal" +version = "1.2.7" +description = "Note: This library is already replaced by MSAL Python, available here: https://pypi.org/project/msal/ .ADAL Python remains available here as a legacy. The ADAL for Python library makes it easy for python application to authenticate to Azure Active Directory (AAD) in order to access AAD protected web resources." +optional = false +python-versions = "*" +files = [ + {file = "adal-1.2.7-py2.py3-none-any.whl", hash = "sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d"}, + {file = "adal-1.2.7.tar.gz", hash = "sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1"}, +] + +[package.dependencies] +cryptography = ">=1.1.0" +PyJWT = ">=1.0.0,<3" +python-dateutil = ">=2.1.0,<3" +requests = ">=2.0.0,<3" + [[package]] name = "alive-progress" -version = "3.1.4" +version = "3.1.5" description = "A new kind of Progress Bar, with real-time throughput, ETA, and very cool animations!" optional = false python-versions = ">=3.7, <4" files = [ - {file = "alive-progress-3.1.4.tar.gz", hash = "sha256:74a95d8d0d42bc99d3a3725dbd06ebb852245f1b64e301a7c375b92b22663f7b"}, - {file = "alive_progress-3.1.4-py3-none-any.whl", hash = "sha256:c80ad87ce9c1054b01135a87fae69ecebbfc2107497ae87cbe6aec7e534903db"}, + {file = "alive-progress-3.1.5.tar.gz", hash = "sha256:42e399a66c8150dc507602dff7b7953f105ef11faf97ddaa6d27b1cbf45c4c98"}, + {file = "alive_progress-3.1.5-py3-none-any.whl", hash = "sha256:347220c1858e3abe137fa0746895668c04df09c5261a13dc03f05795e8a29be5"}, ] [package.dependencies] @@ -58,6 +75,41 @@ docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib- tests = ["attrs[tests-no-zope]", "zope-interface"] tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +[[package]] +name = "aws-sam-translator" +version = "1.80.0" +description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" +optional = false +python-versions = ">=3.7, <=4.0, !=4.0" +files = [ + {file = "aws-sam-translator-1.80.0.tar.gz", hash = "sha256:36afb8b802af0180a35efa68a8ab19d5d929d0a6a649a0101e8a4f8e1f05681f"}, + {file = "aws_sam_translator-1.80.0-py3-none-any.whl", hash = "sha256:f00215f9314cef1bbbdbd7520e3b0c75a76b88bdc3f0dedb6a2c69a12e904b12"}, +] + +[package.dependencies] +boto3 = ">=1.19.5,<2.dev0" +jsonschema = ">=3.2,<5" +pydantic = ">=1.8,<3" +typing-extensions = ">=4.4,<5" + +[package.extras] +dev = ["black (==23.3.0)", "boto3 (>=1.23,<2)", "boto3-stubs[appconfig,serverlessrepo] (>=1.19.5,<2.dev0)", "coverage (>=5.3,<8)", "dateparser (>=1.1,<2.0)", "importlib-metadata", "mypy (>=1.3.0,<1.4.0)", "parameterized (>=0.7,<1.0)", "pytest (>=6.2,<8)", "pytest-cov (>=2.10,<5)", "pytest-env (>=0.6,<1)", "pytest-rerunfailures (>=9.1,<12)", "pytest-xdist (>=2.5,<4)", "pyyaml (>=6.0,<7.0)", "requests (>=2.28,<3.0)", "ruamel.yaml (==0.17.21)", "ruff (==0.0.284)", "tenacity (>=8.0,<9.0)", "types-PyYAML (>=6.0,<7.0)", "types-jsonschema (>=3.2,<4.0)"] + +[[package]] +name = "aws-xray-sdk" +version = "2.12.1" +description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." +optional = false +python-versions = ">=3.7" +files = [ + {file = "aws-xray-sdk-2.12.1.tar.gz", hash = "sha256:0bbfdbc773cfef4061062ac940b85e408297a2242f120bcdfee2593209b1e432"}, + {file = "aws_xray_sdk-2.12.1-py2.py3-none-any.whl", hash = "sha256:f6803832dc08d18cc265e2327a69bfa9ee41c121fac195edc9745d04b7a566c3"}, +] + +[package.dependencies] +botocore = ">=1.11.3" +wrapt = "*" + [[package]] name = "awsipranges" version = "0.3.3" @@ -212,13 +264,13 @@ msrest = ">=0.7.1" [[package]] name = "azure-storage-blob" -version = "12.18.3" +version = "12.19.0" description = "Microsoft Azure Blob Storage Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "azure-storage-blob-12.18.3.tar.gz", hash = "sha256:d8ced0deee3367fa3d4f3d1a03cd9edadf4440c0a371f503d623fa6c807554ee"}, - {file = "azure_storage_blob-12.18.3-py3-none-any.whl", hash = "sha256:c278dde2ac41857a68d615c9f2b36d894ba877a7e84d62795603c7e79d0bb5e9"}, + {file = "azure-storage-blob-12.19.0.tar.gz", hash = "sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897"}, + {file = "azure_storage_blob-12.19.0-py3-none-any.whl", hash = "sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b"}, ] [package.dependencies] @@ -435,6 +487,29 @@ files = [ [package.dependencies] pycparser = "*" +[[package]] +name = "cfn-lint" +version = "0.83.3" +description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" +optional = false +python-versions = ">=3.7, <=4.0, !=4.0" +files = [ + {file = "cfn-lint-0.83.3.tar.gz", hash = "sha256:cb1b5da6f3f15742f07f89006b9cc6ca459745f350196b559688ac0982111c5f"}, + {file = "cfn_lint-0.83.3-py3-none-any.whl", hash = "sha256:7acb5c40b6ae454006bfa19d586c67d0c4ed9a6dbb344fd470bc773981a0642a"}, +] + +[package.dependencies] +aws-sam-translator = ">=1.79.0" +jschema-to-python = ">=1.2.3,<1.3.0" +jsonpatch = "*" +jsonschema = ">=3.0,<5" +junit-xml = ">=1.9,<2.0" +networkx = ">=2.4,<4" +pyyaml = ">5.4" +regex = ">=2021.7.1" +sarif-om = ">=1.0.4,<1.1.0" +sympy = ">=1.0.0" + [[package]] name = "charset-normalizer" version = "3.1.0" @@ -757,6 +832,24 @@ toml = "*" conda = ["pyyaml"] pipenv = ["pipenv"] +[[package]] +name = "ecdsa" +version = "0.18.0" +description = "ECDSA cryptographic signature library (pure python)" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, + {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, +] + +[package.dependencies] +six = ">=1.9.0" + +[package.extras] +gmpy = ["gmpy"] +gmpy2 = ["gmpy2"] + [[package]] name = "exceptiongroup" version = "1.1.1" @@ -902,13 +995,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"] [[package]] name = "google-api-python-client" -version = "2.105.0" +version = "2.108.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.105.0.tar.gz", hash = "sha256:0a8b32cfc2d9b3c1868ae6faef7ee1ab9c89a6cec30be709ea9c97f9a3e5902d"}, - {file = "google_api_python_client-2.105.0-py2.py3-none-any.whl", hash = "sha256:571ce7c41e53415e385aab5a955725f71780550683ffcb71596f5809677d40b7"}, + {file = "google-api-python-client-2.108.0.tar.gz", hash = "sha256:6396efca83185fb205c0abdbc1c2ee57b40475578c6af37f6d0e30a639aade99"}, + {file = "google_api_python_client-2.108.0-py2.py3-none-any.whl", hash = "sha256:9d1327213e388943ebcd7db5ce6e7f47987a7e6874e3e1f6116010eea4a0e75d"}, ] [package.dependencies] @@ -987,6 +1080,17 @@ files = [ [package.extras] test = ["pytest", "sphinx", "sphinx-autobuild", "twine", "wheel"] +[[package]] +name = "graphql-core" +version = "3.2.3" +description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "graphql-core-3.2.3.tar.gz", hash = "sha256:06d2aad0ac723e35b1cb47885d3e5c45e956a53bc1b209a9fc5369007fe46676"}, + {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, +] + [[package]] name = "httplib2" version = "0.22.0" @@ -1101,6 +1205,74 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +[[package]] +name = "jschema-to-python" +version = "1.2.3" +description = "Generate source code for Python classes from a JSON schema." +optional = false +python-versions = ">= 2.7" +files = [ + {file = "jschema_to_python-1.2.3-py3-none-any.whl", hash = "sha256:8a703ca7604d42d74b2815eecf99a33359a8dccbb80806cce386d5e2dd992b05"}, + {file = "jschema_to_python-1.2.3.tar.gz", hash = "sha256:76ff14fe5d304708ccad1284e4b11f96a658949a31ee7faed9e0995279549b91"}, +] + +[package.dependencies] +attrs = "*" +jsonpickle = "*" +pbr = "*" + +[[package]] +name = "jsondiff" +version = "2.0.0" +description = "Diff JSON and JSON-like structures in Python" +optional = false +python-versions = "*" +files = [ + {file = "jsondiff-2.0.0-py3-none-any.whl", hash = "sha256:689841d66273fc88fc79f7d33f4c074774f4f214b6466e3aff0e5adaf889d1e0"}, + {file = "jsondiff-2.0.0.tar.gz", hash = "sha256:2795844ef075ec8a2b8d385c4d59f5ea48b08e7180fce3cb2787be0db00b1fb4"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpickle" +version = "3.0.2" +description = "Python library for serializing any arbitrary object graph into JSON" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonpickle-3.0.2-py3-none-any.whl", hash = "sha256:4a8442d97ca3f77978afa58068768dba7bff2dbabe79a9647bc3cdafd4ef019f"}, + {file = "jsonpickle-3.0.2.tar.gz", hash = "sha256:e37abba4bfb3ca4a4647d28bb9f4706436f7b46c8a8333b4a718abafa8e46b37"}, +] + +[package.extras] +docs = ["jaraco.packaging (>=3.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["ecdsa", "feedparser", "gmpy2", "numpy", "pandas", "pymongo", "pytest (>=3.5,!=3.7.3)", "pytest-black-multipy", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-flake8 (>=1.1.1)", "scikit-learn", "sqlalchemy"] +testing-libs = ["simplejson", "ujson"] + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + [[package]] name = "jsonschema" version = "4.18.0" @@ -1153,6 +1325,20 @@ files = [ [package.dependencies] referencing = ">=0.28.0" +[[package]] +name = "junit-xml" +version = "1.9" +description = "Creates JUnit XML test result documents that can be read by tools such as Jenkins" +optional = false +python-versions = "*" +files = [ + {file = "junit-xml-1.9.tar.gz", hash = "sha256:de16a051990d4e25a3982b2dd9e89d671067548718866416faec14d9de56db9f"}, + {file = "junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732"}, +] + +[package.dependencies] +six = "*" + [[package]] name = "lazy-object-proxy" version = "1.9.0" @@ -1365,13 +1551,13 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp [[package]] name = "mkdocs-material" -version = "9.4.7" +version = "9.4.10" description = "Documentation that simply works" optional = true python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.4.7-py3-none-any.whl", hash = "sha256:4d698d52bb6a6a3c452ab854481c4cdb68453a0420956a6aee2de55fe15fe610"}, - {file = "mkdocs_material-9.4.7.tar.gz", hash = "sha256:e704e001c9ef17291e1d3462c202425217601653e18f68f85d28eff4690e662b"}, + {file = "mkdocs_material-9.4.10-py3-none-any.whl", hash = "sha256:207c4ebc07faebb220437d2c626edb0c9760c82ccfc484500bd3eb30dfce988c"}, + {file = "mkdocs_material-9.4.10.tar.gz", hash = "sha256:421adedaeaa461dcaf55b8d406673934ade3d4f05ed9819e4cc7b4ee1d646a62"}, ] [package.dependencies] @@ -1421,53 +1607,84 @@ test = ["pytest", "pytest-cov"] [[package]] name = "moto" -version = "4.2.7" +version = "4.2.9" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "moto-4.2.7-py2.py3-none-any.whl", hash = "sha256:3e0ef388900448485cd6eff18e9f7fcaa6cf4560b6fb536ba2e2e1278a5ecc59"}, - {file = "moto-4.2.7.tar.gz", hash = "sha256:1298006aaa6996b886658eb194cac0e3a5679c9fcce6cb13e741ccc5a7247abb"}, + {file = "moto-4.2.9-py2.py3-none-any.whl", hash = "sha256:c85289d13d15d5274d0a643381af1f1b03d7ee88f0943c9d2d6c28e6177a298a"}, + {file = "moto-4.2.9.tar.gz", hash = "sha256:24de81eeaa450a20b57c5cdf9a757ea5216bddc7db798e335d2de1f2376bf324"}, ] [package.dependencies] +aws-xray-sdk = {version = ">=0.93,<0.96 || >0.96", optional = true, markers = "extra == \"all\""} boto3 = ">=1.9.201" botocore = ">=1.12.201" +cfn-lint = {version = ">=0.40.0", optional = true, markers = "extra == \"all\""} cryptography = ">=3.3.1" +docker = {version = ">=3.0.0", optional = true, markers = "extra == \"all\""} +ecdsa = {version = "!=0.15", optional = true, markers = "extra == \"all\""} +graphql-core = {version = "*", optional = true, markers = "extra == \"all\""} Jinja2 = ">=2.10.1" +jsondiff = {version = ">=1.1.2", optional = true, markers = "extra == \"all\""} +multipart = {version = "*", optional = true, markers = "extra == \"all\""} +openapi-spec-validator = {version = ">=0.5.0", optional = true, markers = "extra == \"all\""} +py-partiql-parser = {version = "0.4.2", optional = true, markers = "extra == \"all\""} +pyparsing = {version = ">=3.0.7", optional = true, markers = "extra == \"all\""} python-dateutil = ">=2.1,<3.0.0" +python-jose = {version = ">=3.1.0,<4.0.0", extras = ["cryptography"], optional = true, markers = "extra == \"all\""} +PyYAML = {version = ">=5.1", optional = true, markers = "extra == \"all\""} requests = ">=2.5" responses = ">=0.13.0" +setuptools = {version = "*", optional = true, markers = "extra == \"all\""} +sshpubkeys = {version = ">=3.1.0", optional = true, markers = "extra == \"all\""} werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.5.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] apigatewayv2 = ["PyYAML (>=5.1)"] appsync = ["graphql-core"] awslambda = ["docker (>=3.0.0)"] batch = ["docker (>=3.0.0)"] -cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] ds = ["sshpubkeys (>=3.1.0)"] -dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.1)"] -dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.1)"] +dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.2)"] +dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.2)"] ebs = ["sshpubkeys (>=3.1.0)"] ec2 = ["sshpubkeys (>=3.1.0)"] efs = ["sshpubkeys (>=3.1.0)"] eks = ["sshpubkeys (>=3.1.0)"] glue = ["pyparsing (>=3.0.7)"] iotdata = ["jsondiff (>=1.1.2)"] -proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] -resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "sshpubkeys (>=3.1.0)"] +proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "sshpubkeys (>=3.1.0)"] route53resolver = ["sshpubkeys (>=3.1.0)"] -s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.4.1)"] -s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.4.1)"] -server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] +s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.4.2)"] +s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.4.2)"] +server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"] ssm = ["PyYAML (>=5.1)"] xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] +[[package]] +name = "mpmath" +version = "1.3.0" +description = "Python library for arbitrary-precision floating-point arithmetic" +optional = false +python-versions = "*" +files = [ + {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, + {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, +] + +[package.extras] +develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] +docs = ["sphinx"] +gmpy = ["gmpy2 (>=2.1.0a4)"] +tests = ["pytest (>=4.6)"] + [[package]] name = "msal" version = "1.24.1" @@ -1537,6 +1754,33 @@ requests-oauthlib = ">=0.5.0" [package.extras] async = ["aiodns", "aiohttp (>=3.0)"] +[[package]] +name = "msrestazure" +version = "0.6.4" +description = "AutoRest swagger generator Python client runtime. Azure-specific module." +optional = false +python-versions = "*" +files = [ + {file = "msrestazure-0.6.4-py2.py3-none-any.whl", hash = "sha256:3de50f56147ef529b31e099a982496690468ecef33f0544cb0fa0cfe1e1de5b9"}, + {file = "msrestazure-0.6.4.tar.gz", hash = "sha256:a06f0dabc9a6f5efe3b6add4bd8fb623aeadacf816b7a35b0f89107e0544d189"}, +] + +[package.dependencies] +adal = ">=0.6.0,<2.0.0" +msrest = ">=0.6.0,<2.0.0" +six = "*" + +[[package]] +name = "multipart" +version = "0.2.4" +description = "Parser for multipart/form-data." +optional = false +python-versions = "*" +files = [ + {file = "multipart-0.2.4-py3-none-any.whl", hash = "sha256:5aec990820b8a9e94f9c164fbeb58cf118cfbde2854865b67a9a730edd1fb9d1"}, + {file = "multipart-0.2.4.tar.gz", hash = "sha256:06ba205360bc7096fefe618e4f1e9b2cdb890b4f2157053a81f386912a2522cb"}, +] + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -1548,6 +1792,24 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "networkx" +version = "3.2.1" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = ">=3.9" +files = [ + {file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"}, + {file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"}, +] + +[package.extras] +default = ["matplotlib (>=3.5)", "numpy (>=1.22)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] +developer = ["changelist (==0.4)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] +doc = ["nb2plots (>=0.7)", "nbconvert (<7.9)", "numpydoc (>=1.6)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] +extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.11)", "sympy (>=1.10)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] + [[package]] name = "oauthlib" version = "3.2.2" @@ -1725,6 +1987,20 @@ files = [ {file = "protobuf-4.23.0.tar.gz", hash = "sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5"}, ] +[[package]] +name = "py-partiql-parser" +version = "0.4.2" +description = "Pure Python PartiQL Parser" +optional = false +python-versions = "*" +files = [ + {file = "py-partiql-parser-0.4.2.tar.gz", hash = "sha256:9c99d545be7897c6bfa97a107f6cfbcd92e359d394e4f3b95430e6409e8dd1e1"}, + {file = "py_partiql_parser-0.4.2-py3-none-any.whl", hash = "sha256:f3f34de8dddf65ed2d47b4263560bbf97be1ecc6bd5c61da039ede90f26a10ce"}, +] + +[package.extras] +dev = ["black (==22.6.0)", "flake8", "mypy (==0.971)", "pytest"] + [[package]] name = "pyasn1" version = "0.5.0" @@ -1885,8 +2161,7 @@ astroid = ">=3.0.1,<=3.1.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, ] isort = ">=4.2.5,<6" mccabe = ">=0.6,<0.8" @@ -1988,13 +2263,13 @@ pytest = "*" [[package]] name = "pytest-xdist" -version = "3.3.1" +version = "3.4.0" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"}, - {file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"}, + {file = "pytest-xdist-3.4.0.tar.gz", hash = "sha256:3a94a931dd9e268e0b871a877d09fe2efb6175c2c23d60d56a6001359002b832"}, + {file = "pytest_xdist-3.4.0-py3-none-any.whl", hash = "sha256:e513118bf787677a427e025606f55e95937565e06dfaac8d87f55301e57ae607"}, ] [package.dependencies] @@ -2020,6 +2295,28 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-jose" +version = "3.3.0" +description = "JOSE implementation in Python" +optional = false +python-versions = "*" +files = [ + {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, + {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryptography\""} +ecdsa = "!=0.15" +pyasn1 = "*" +rsa = "*" + +[package.extras] +cryptography = ["cryptography (>=3.4.0)"] +pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] +pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] + [[package]] name = "pywin32" version = "306" @@ -2125,7 +2422,7 @@ rpds-py = ">=0.7.0" name = "regex" version = "2023.8.8" description = "Alternative regular expression module, to replace re." -optional = true +optional = false python-versions = ">=3.6" files = [ {file = "regex-2023.8.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:88900f521c645f784260a8d346e12a1590f79e96403971241e64c3a265c8ecdb"}, @@ -2548,6 +2845,21 @@ setuptools = ">=19.3" github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"] gitlab = ["python-gitlab (>=1.3.0)"] +[[package]] +name = "sarif-om" +version = "1.0.4" +description = "Classes implementing the SARIF 2.1.0 object model." +optional = false +python-versions = ">= 2.7" +files = [ + {file = "sarif_om-1.0.4-py3-none-any.whl", hash = "sha256:539ef47a662329b1c8502388ad92457425e95dc0aaaf995fe46f4984c4771911"}, + {file = "sarif_om-1.0.4.tar.gz", hash = "sha256:cd5f416b3083e00d402a92e449a7ff67af46f11241073eea0461802a3b5aef98"}, +] + +[package.dependencies] +attrs = "*" +pbr = "*" + [[package]] name = "schema" version = "0.7.5" @@ -2609,13 +2921,13 @@ files = [ [[package]] name = "slack-sdk" -version = "3.23.0" +version = "3.24.0" description = "The Slack API Platform SDK for Python" optional = false python-versions = ">=3.6.0" files = [ - {file = "slack_sdk-3.23.0-py2.py3-none-any.whl", hash = "sha256:2a8513505cced20ceee22b5b49c11d9545caa6234b56bf0ad47133ea5b357d10"}, - {file = "slack_sdk-3.23.0.tar.gz", hash = "sha256:9d6ebc4ff74e7983e1b27dbdb0f2bb6fc3c2a2451694686eaa2be23bbb085a73"}, + {file = "slack_sdk-3.24.0-py2.py3-none-any.whl", hash = "sha256:cae64f0177a53d34cca59cc691d4535edd18929843a936b97cea421db9e4fbfe"}, + {file = "slack_sdk-3.24.0.tar.gz", hash = "sha256:741ea5381e65f4407d24ed81203912cbd6bfe807a6704b1d3c5ad346c86000b6"}, ] [package.extras] @@ -2633,6 +2945,24 @@ files = [ {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] +[[package]] +name = "sshpubkeys" +version = "3.3.1" +description = "SSH public key parser" +optional = false +python-versions = ">=3" +files = [ + {file = "sshpubkeys-3.3.1-py2.py3-none-any.whl", hash = "sha256:946f76b8fe86704b0e7c56a00d80294e39bc2305999844f079a217885060b1ac"}, + {file = "sshpubkeys-3.3.1.tar.gz", hash = "sha256:3020ed4f8c846849299370fbe98ff4157b0ccc1accec105e07cfa9ae4bb55064"}, +] + +[package.dependencies] +cryptography = ">=2.1.4" +ecdsa = ">=0.13" + +[package.extras] +dev = ["twine", "wheel", "yapf"] + [[package]] name = "stevedore" version = "5.0.0" @@ -2647,6 +2977,20 @@ files = [ [package.dependencies] pbr = ">=2.0.0,<2.1.0 || >2.1.0" +[[package]] +name = "sympy" +version = "1.12" +description = "Computer algebra system (CAS) in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"}, + {file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"}, +] + +[package.dependencies] +mpmath = ">=0.19" + [[package]] name = "tabulate" version = "0.9.0" @@ -2846,6 +3190,85 @@ MarkupSafe = ">=2.1.1" [package.extras] watchdog = ["watchdog (>=2.3)"] +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + [[package]] name = "xlsxwriter" version = "3.1.0" @@ -2888,5 +3311,5 @@ docs = ["mkdocs", "mkdocs-material"] [metadata] lock-version = "2.0" -python-versions = "^3.9" -content-hash = "6ed432b0310655c247da3b4f542b9410842fb46de838408b99b6e61fb367cf38" +python-versions = ">=3.9,<3.12" +content-hash = "653c89aa68d9924b75c01a3dd894fdffc57fb899d46dcc23728e0543a3fc24e9" diff --git a/prowler/__main__.py b/prowler/__main__.py index 7cb07e3624..a6151dec98 100644 --- a/prowler/__main__.py +++ b/prowler/__main__.py @@ -26,6 +26,10 @@ from prowler.lib.check.check import ( ) from prowler.lib.check.checks_loader import load_checks_to_execute from prowler.lib.check.compliance import update_checks_metadata_with_compliance +from prowler.lib.check.custom_checks_metadata import ( + parse_custom_checks_metadata_file, + update_checks_metadata, +) from prowler.lib.cli.parser import ProwlerArgumentParser from prowler.lib.logger import logger, set_logging_config from prowler.lib.outputs.compliance import display_compliance_table @@ -47,6 +51,7 @@ from prowler.providers.common.audit_info import ( set_provider_audit_info, set_provider_execution_parameters, ) +from prowler.providers.common.clean import clean_provider_local_output_directories from prowler.providers.common.outputs import set_provider_output_options from prowler.providers.common.quick_inventory import run_provider_quick_inventory @@ -67,6 +72,7 @@ def prowler(): checks_folder = args.checks_folder severities = args.severity compliance_framework = args.compliance + custom_checks_metadata_file = args.custom_checks_metadata_file if not args.no_banner: print_banner(args) @@ -96,9 +102,19 @@ def prowler(): bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider) # Complete checks metadata with the compliance framework specification - update_checks_metadata_with_compliance( + bulk_checks_metadata = update_checks_metadata_with_compliance( bulk_compliance_frameworks, bulk_checks_metadata ) + # Update checks metadata if the --custom-checks-metadata-file is present + custom_checks_metadata = None + if custom_checks_metadata_file: + custom_checks_metadata = parse_custom_checks_metadata_file( + provider, custom_checks_metadata_file + ) + bulk_checks_metadata = update_checks_metadata( + bulk_checks_metadata, custom_checks_metadata + ) + if args.list_compliance: print_compliance_frameworks(bulk_compliance_frameworks) sys.exit() @@ -174,7 +190,11 @@ def prowler(): findings = [] if len(checks_to_execute): findings = execute_checks( - checks_to_execute, provider, audit_info, audit_output_options + checks_to_execute, + provider, + audit_info, + audit_output_options, + custom_checks_metadata, ) else: logger.error( @@ -246,7 +266,10 @@ def prowler(): for region in security_hub_regions: # Save the regions where AWS Security Hub is enabled if verify_security_hub_integration_enabled_per_region( - region, audit_info.audit_session + audit_info.audited_partition, + region, + audit_info.audit_session, + audit_info.audited_account, ): aws_security_enabled_regions.append(region) @@ -301,6 +324,9 @@ def prowler(): if checks_folder: remove_custom_checks_module(checks_folder, provider) + # clean local directories + clean_provider_local_output_directories(args) + # If there are failed findings exit code 3, except if -z is input if not args.ignore_exit_code_3 and stats["total_fail"] > 0: sys.exit(3) diff --git a/prowler/config/config.py b/prowler/config/config.py index 2038761182..859907b186 100644 --- a/prowler/config/config.py +++ b/prowler/config/config.py @@ -11,7 +11,7 @@ from prowler.lib.logger import logger timestamp = datetime.today() timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc) -prowler_version = "3.11.0" +prowler_version = "3.11.3" html_logo_url = "https://github.com/prowler-cloud/prowler/" html_logo_img = "https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png" square_logo_img = "https://user-images.githubusercontent.com/38561120/235905862-9ece5bd7-9aa3-4e48-807a-3a9035eb8bfb.png" @@ -70,7 +70,9 @@ def check_current_version(): if latest_version != prowler_version: return f"{prowler_version_string} (latest is {latest_version}, upgrade for the latest features)" else: - return f"{prowler_version_string} (it is the latest version, yay!)" + return ( + f"{prowler_version_string} (You are running the latest version, yay!)" + ) except requests.RequestException: return f"{prowler_version_string}" except Exception: diff --git a/prowler/config/config.yaml b/prowler/config/config.yaml index 89568e777b..86a03b5a3b 100644 --- a/prowler/config/config.yaml +++ b/prowler/config/config.yaml @@ -2,7 +2,7 @@ aws: # AWS Global Configuration - # aws.allowlist_non_default_regions --> Set to True to allowlist failed findings in non-default regions for GuardDuty, SecurityHub, DRS and Config + # aws.allowlist_non_default_regions --> Set to True to allowlist failed findings in non-default regions for AccessAnalyzer, GuardDuty, SecurityHub, DRS and Config allowlist_non_default_regions: False # If you want to allowlist/mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w allowlist.yaml`: # Allowlist: diff --git a/prowler/config/custom_checks_metadata_example.yaml b/prowler/config/custom_checks_metadata_example.yaml new file mode 100644 index 0000000000..ed11065333 --- /dev/null +++ b/prowler/config/custom_checks_metadata_example.yaml @@ -0,0 +1,15 @@ +CustomChecksMetadata: + aws: + Checks: + s3_bucket_level_public_access_block: + Severity: high + s3_bucket_no_mfa_delete: + Severity: high + azure: + Checks: + storage_infrastructure_encryption_is_enabled: + Severity: medium + gcp: + Checks: + compute_instance_public_ip: + Severity: critical diff --git a/prowler/lib/check/check.py b/prowler/lib/check/check.py index deabd70dc9..ce5e23cf9d 100644 --- a/prowler/lib/check/check.py +++ b/prowler/lib/check/check.py @@ -16,6 +16,7 @@ from colorama import Fore, Style import prowler from prowler.config.config import orange_color from prowler.lib.check.compliance_models import load_compliance_framework +from prowler.lib.check.custom_checks_metadata import update_check_metadata from prowler.lib.check.models import Check, load_check_metadata from prowler.lib.logger import logger from prowler.lib.outputs.outputs import report @@ -416,6 +417,7 @@ def execute_checks( provider: str, audit_info: Any, audit_output_options: Provider_Output_Options, + custom_checks_metadata: Any, ) -> list: # List to store all the check's findings all_findings = [] @@ -461,6 +463,7 @@ def execute_checks( audit_info, services_executed, checks_executed, + custom_checks_metadata, ) all_findings.extend(check_findings) @@ -506,6 +509,7 @@ def execute_checks( audit_info, services_executed, checks_executed, + custom_checks_metadata, ) all_findings.extend(check_findings) @@ -531,6 +535,7 @@ def execute( audit_info: Any, services_executed: set, checks_executed: set, + custom_checks_metadata: Any, ): # Import check module check_module_path = ( @@ -541,6 +546,10 @@ def execute( check_to_execute = getattr(lib, check_name) c = check_to_execute() + # Update check metadata to reflect that in the outputs + if custom_checks_metadata and custom_checks_metadata["Checks"].get(c.CheckID): + c = update_check_metadata(c, custom_checks_metadata["Checks"][c.CheckID]) + # Run check check_findings = run_check(c, audit_output_options) diff --git a/prowler/lib/check/checks_loader.py b/prowler/lib/check/checks_loader.py index 6a0a92db50..c82a601e1f 100644 --- a/prowler/lib/check/checks_loader.py +++ b/prowler/lib/check/checks_loader.py @@ -36,6 +36,10 @@ def load_checks_to_execute( # Check check's severity if bulk_checks_metadata[check].Severity in severities: checks_to_execute.add(check) + if service_list: + checks_to_execute = ( + recover_checks_from_service(service_list, provider) & checks_to_execute + ) # Handle if there are checks passed using -C/--checks-file elif checks_file: diff --git a/prowler/lib/check/custom_checks_metadata.py b/prowler/lib/check/custom_checks_metadata.py new file mode 100644 index 0000000000..87acb97ab5 --- /dev/null +++ b/prowler/lib/check/custom_checks_metadata.py @@ -0,0 +1,77 @@ +import sys + +import yaml +from jsonschema import validate + +from prowler.lib.logger import logger + +valid_severities = ["critical", "high", "medium", "low", "informational"] +custom_checks_metadata_schema = { + "type": "object", + "properties": { + "Checks": { + "type": "object", + "patternProperties": { + ".*": { + "type": "object", + "properties": { + "Severity": { + "type": "string", + "enum": valid_severities, + } + }, + "required": ["Severity"], + "additionalProperties": False, + } + }, + "additionalProperties": False, + } + }, + "required": ["Checks"], + "additionalProperties": False, +} + + +def parse_custom_checks_metadata_file(provider: str, parse_custom_checks_metadata_file): + """parse_custom_checks_metadata_file returns the custom_checks_metadata object if it is valid, otherwise aborts the execution returning the ValidationError.""" + try: + with open(parse_custom_checks_metadata_file) as f: + custom_checks_metadata = yaml.safe_load(f)["CustomChecksMetadata"][provider] + validate(custom_checks_metadata, schema=custom_checks_metadata_schema) + return custom_checks_metadata + except Exception as error: + logger.critical( + f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]" + ) + sys.exit(1) + + +def update_checks_metadata(bulk_checks_metadata, custom_checks_metadata): + """update_checks_metadata returns the bulk_checks_metadata with the check's metadata updated based on the custom_checks_metadata provided.""" + try: + # Update checks metadata from CustomChecksMetadata file + for check, custom_metadata in custom_checks_metadata["Checks"].items(): + check_metadata = bulk_checks_metadata.get(check) + if check_metadata: + bulk_checks_metadata[check] = update_check_metadata( + check_metadata, custom_metadata + ) + return bulk_checks_metadata + except Exception as error: + logger.critical( + f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]" + ) + sys.exit(1) + + +def update_check_metadata(check_metadata, custom_metadata): + """update_check_metadata updates the check_metadata fields present in the custom_metadata and returns the updated version of the check_metadata. If some field is not present or valid the check_metadata is returned with the original fields.""" + try: + if custom_metadata: + for attribute in custom_metadata: + try: + setattr(check_metadata, attribute, custom_metadata[attribute]) + except ValueError: + pass + finally: + return check_metadata diff --git a/prowler/lib/cli/parser.py b/prowler/lib/cli/parser.py index 28f1e59a53..8db0782174 100644 --- a/prowler/lib/cli/parser.py +++ b/prowler/lib/cli/parser.py @@ -49,6 +49,7 @@ Detailed documentation at https://docs.prowler.cloud self.__init_exclude_checks_parser__() self.__init_list_checks_parser__() self.__init_config_parser__() + self.__init_custom_checks_metadata_parser__() # Init Providers Arguments init_providers_parser(self) @@ -220,7 +221,7 @@ Detailed documentation at https://docs.prowler.cloud group.add_argument( "-s", "--services", nargs="+", help="List of services to be executed." ) - group.add_argument( + common_checks_parser.add_argument( "--severity", nargs="+", help="List of severities to be executed [informational, low, medium, high, critical]", @@ -286,3 +287,15 @@ Detailed documentation at https://docs.prowler.cloud default=default_config_file_path, help="Set configuration file path", ) + + def __init_custom_checks_metadata_parser__(self): + # CustomChecksMetadata + custom_checks_metadata_subparser = ( + self.common_providers_parser.add_argument_group("Custom Checks Metadata") + ) + custom_checks_metadata_subparser.add_argument( + "--custom-checks-metadata-file", + nargs="?", + default=None, + help="Path for the custom checks metadata YAML file. See example prowler/config/custom_checks_metadata_example.yaml for reference and format. See more in https://docs.prowler.cloud/en/latest/tutorials/custom-checks-metadata/", + ) diff --git a/prowler/lib/outputs/file_descriptors.py b/prowler/lib/outputs/file_descriptors.py index a2339e1257..9b5def4d22 100644 --- a/prowler/lib/outputs/file_descriptors.py +++ b/prowler/lib/outputs/file_descriptors.py @@ -12,8 +12,6 @@ from prowler.config.config import ( from prowler.lib.logger import logger from prowler.lib.outputs.html import add_html_header from prowler.lib.outputs.models import ( - Aws_Check_Output_CSV, - Azure_Check_Output_CSV, Check_Output_CSV_AWS_CIS, Check_Output_CSV_AWS_ISO27001_2013, Check_Output_CSV_AWS_Well_Architected, @@ -21,19 +19,18 @@ from prowler.lib.outputs.models import ( Check_Output_CSV_GCP_CIS, Check_Output_CSV_Generic_Compliance, Check_Output_MITRE_ATTACK, - Gcp_Check_Output_CSV, generate_csv_fields, ) from prowler.lib.utils.utils import file_exists, open_file from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info -from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info +from prowler.providers.common.outputs import get_provider_output_model from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info def initialize_file_descriptor( filename: str, output_mode: str, - audit_info: AWS_Audit_Info, + audit_info: Any, format: Any = None, ) -> TextIOWrapper: """Open/Create the output file. If needed include headers or the required format""" @@ -75,27 +72,15 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit for output_mode in output_modes: if output_mode == "csv": filename = f"{output_directory}/{output_filename}{csv_file_suffix}" - if isinstance(audit_info, AWS_Audit_Info): - file_descriptor = initialize_file_descriptor( - filename, - output_mode, - audit_info, - Aws_Check_Output_CSV, - ) - if isinstance(audit_info, Azure_Audit_Info): - file_descriptor = initialize_file_descriptor( - filename, - output_mode, - audit_info, - Azure_Check_Output_CSV, - ) - if isinstance(audit_info, GCP_Audit_Info): - file_descriptor = initialize_file_descriptor( - filename, - output_mode, - audit_info, - Gcp_Check_Output_CSV, - ) + output_model = get_provider_output_model( + audit_info.__class__.__name__ + ) + file_descriptor = initialize_file_descriptor( + filename, + output_mode, + audit_info, + output_model, + ) file_descriptors.update({output_mode: file_descriptor}) elif output_mode == "json": diff --git a/prowler/lib/outputs/html.py b/prowler/lib/outputs/html.py index 6e381bda52..820681b094 100644 --- a/prowler/lib/outputs/html.py +++ b/prowler/lib/outputs/html.py @@ -338,8 +338,9 @@ def add_html_footer(output_filename, output_directory): def get_aws_html_assessment_summary(audit_info): try: if isinstance(audit_info, AWS_Audit_Info): - if not audit_info.profile: - audit_info.profile = "ENV" + profile = ( + audit_info.profile if audit_info.profile is not None else "default" + ) if isinstance(audit_info.audited_regions, list): audited_regions = " ".join(audit_info.audited_regions) elif not audit_info.audited_regions: @@ -361,7 +362,7 @@ def get_aws_html_assessment_summary(audit_info):
  • AWS-CLI Profile: """ - + audit_info.profile + + profile + """
  • diff --git a/prowler/lib/outputs/json.py b/prowler/lib/outputs/json.py index f43360ff48..6f1403cf05 100644 --- a/prowler/lib/outputs/json.py +++ b/prowler/lib/outputs/json.py @@ -31,6 +31,7 @@ from prowler.lib.outputs.models import ( unroll_dict_to_list, ) from prowler.lib.utils.utils import hash_sha512, open_file, outputs_unix_timestamp +from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info def fill_json_asff(finding_output, audit_info, finding, output_options): @@ -155,6 +156,11 @@ def fill_json_ocsf(audit_info, finding, output_options) -> Check_Output_JSON_OCS aws_org_uid = "" account = None org = None + profile = "" + if isinstance(audit_info, AWS_Audit_Info): + profile = ( + audit_info.profile if audit_info.profile is not None else "default" + ) if ( hasattr(audit_info, "organizations_metadata") and audit_info.organizations_metadata @@ -249,9 +255,7 @@ def fill_json_ocsf(audit_info, finding, output_options) -> Check_Output_JSON_OCS original_time=outputs_unix_timestamp( output_options.unix_timestamp, timestamp ), - profiles=[audit_info.profile] - if hasattr(audit_info, "organizations_metadata") - else [], + profiles=[profile], ) compliance = Compliance_OCSF( status=generate_json_ocsf_status(finding.status), diff --git a/prowler/providers/aws/aws_regions_by_service.json b/prowler/providers/aws/aws_regions_by_service.json index 68abb57346..96b19504e2 100644 --- a/prowler/providers/aws/aws_regions_by_service.json +++ b/prowler/providers/aws/aws_regions_by_service.json @@ -498,17 +498,6 @@ ] } }, - "appfabric": { - "regions": { - "aws": [ - "ap-northeast-1", - "eu-west-1", - "us-east-1" - ], - "aws-cn": [], - "aws-us-gov": [] - } - }, "appflow": { "regions": { "aws": [ @@ -674,10 +663,13 @@ "regions": { "aws": [ "ap-northeast-1", + "ap-south-1", "ap-southeast-1", "ap-southeast-2", "eu-central-1", "eu-west-1", + "eu-west-2", + "eu-west-3", "us-east-1", "us-east-2", "us-west-2" @@ -805,7 +797,10 @@ "cn-north-1", "cn-northwest-1" ], - "aws-us-gov": [] + "aws-us-gov": [ + "us-gov-east-1", + "us-gov-west-1" + ] } }, "artifact": { @@ -1013,6 +1008,17 @@ ] } }, + "aws-appfabric": { + "regions": { + "aws": [ + "ap-northeast-1", + "eu-west-1", + "us-east-1" + ], + "aws-cn": [], + "aws-us-gov": [] + } + }, "awshealthdashboard": { "regions": { "aws": [ @@ -2068,17 +2074,24 @@ "ap-east-1", "ap-northeast-1", "ap-northeast-2", + "ap-northeast-3", "ap-south-1", + "ap-south-2", "ap-southeast-1", "ap-southeast-2", + "ap-southeast-3", + "ap-southeast-4", "ca-central-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", + "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", + "il-central-1", + "me-central-1", "me-south-1", "sa-east-1", "us-east-1", @@ -2299,15 +2312,22 @@ "ap-northeast-2", "ap-northeast-3", "ap-south-1", + "ap-south-2", "ap-southeast-1", "ap-southeast-2", + "ap-southeast-3", + "ap-southeast-4", "ca-central-1", "eu-central-1", + "eu-central-2", "eu-north-1", "eu-south-1", + "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", + "il-central-1", + "me-central-1", "me-south-1", "sa-east-1", "us-east-1", @@ -2467,6 +2487,7 @@ "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", + "ap-southeast-4", "ca-central-1", "eu-central-1", "eu-central-2", @@ -2941,6 +2962,7 @@ "cn-northwest-1" ], "aws-us-gov": [ + "us-gov-east-1", "us-gov-west-1" ] } @@ -3615,6 +3637,7 @@ "ap-south-1", "ap-southeast-1", "ap-southeast-2", + "ap-southeast-3", "ca-central-1", "eu-central-1", "eu-north-1", @@ -3622,6 +3645,7 @@ "eu-west-1", "eu-west-2", "eu-west-3", + "me-central-1", "me-south-1", "sa-east-1", "us-east-1", @@ -3642,15 +3666,19 @@ "emr-serverless": { "regions": { "aws": [ + "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", + "ap-northeast-3", "ap-south-1", "ap-southeast-1", "ap-southeast-2", + "ap-southeast-3", "ca-central-1", "eu-central-1", "eu-north-1", + "eu-south-1", "eu-west-1", "eu-west-2", "eu-west-3", @@ -4336,16 +4364,6 @@ "aws-us-gov": [] } }, - "gamesparks": { - "regions": { - "aws": [ - "ap-northeast-1", - "us-east-1" - ], - "aws-cn": [], - "aws-us-gov": [] - } - }, "glacier": { "regions": { "aws": [ @@ -5605,6 +5623,44 @@ ] } }, + "launch-wizard": { + "regions": { + "aws": [ + "af-south-1", + "ap-east-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-northeast-3", + "ap-south-1", + "ap-south-2", + "ap-southeast-1", + "ap-southeast-2", + "ap-southeast-3", + "ca-central-1", + "eu-central-1", + "eu-north-1", + "eu-south-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "me-central-1", + "me-south-1", + "sa-east-1", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2" + ], + "aws-cn": [ + "cn-north-1", + "cn-northwest-1" + ], + "aws-us-gov": [ + "us-gov-east-1", + "us-gov-west-1" + ] + } + }, "launchwizard": { "regions": { "aws": [ @@ -5718,6 +5774,7 @@ "eu-central-2", "eu-north-1", "eu-south-1", + "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", @@ -5801,6 +5858,7 @@ "eu-central-2", "eu-north-1", "eu-south-1", + "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", @@ -6062,6 +6120,15 @@ ] } }, + "managedblockchain-query": { + "regions": { + "aws": [ + "us-east-1" + ], + "aws-cn": [], + "aws-us-gov": [] + } + }, "managedservices": { "regions": { "aws": [ @@ -6388,11 +6455,18 @@ "aws": [ "af-south-1", "ap-northeast-1", + "ap-northeast-2", + "ap-northeast-3", "ap-south-1", "ap-southeast-1", "ap-southeast-2", + "ap-southeast-4", + "ca-central-1", "eu-central-1", + "eu-north-1", "eu-west-1", + "eu-west-3", + "sa-east-1", "us-east-1", "us-east-2", "us-west-2" @@ -6703,6 +6777,7 @@ "eu-west-1", "eu-west-2", "eu-west-3", + "il-central-1", "me-central-1", "me-south-1", "sa-east-1", @@ -7079,8 +7154,11 @@ "regions": { "aws": [ "ap-northeast-1", + "ap-northeast-2", + "ap-south-1", "ap-southeast-1", "ap-southeast-2", + "ca-central-1", "eu-central-1", "eu-west-1", "eu-west-2", @@ -7152,6 +7230,41 @@ "aws-us-gov": [] } }, + "pca-connector-ad": { + "regions": { + "aws": [ + "af-south-1", + "ap-east-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-northeast-3", + "ap-south-1", + "ap-south-2", + "ap-southeast-1", + "ap-southeast-2", + "ap-southeast-3", + "ap-southeast-4", + "ca-central-1", + "eu-central-1", + "eu-central-2", + "eu-north-1", + "eu-south-1", + "eu-south-2", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "me-central-1", + "me-south-1", + "sa-east-1", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2" + ], + "aws-cn": [], + "aws-us-gov": [] + } + }, "personalize": { "regions": { "aws": [ @@ -8801,6 +8914,7 @@ "eu-west-1", "eu-west-2", "eu-west-3", + "il-central-1", "me-central-1", "me-south-1", "sa-east-1", @@ -9928,6 +10042,7 @@ "ap-northeast-2", "ap-northeast-3", "ap-south-1", + "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-4", @@ -10344,6 +10459,7 @@ "eu-central-1", "eu-west-1", "eu-west-2", + "il-central-1", "sa-east-1", "us-east-1", "us-west-2" diff --git a/prowler/providers/aws/lib/allowlist/allowlist.py b/prowler/providers/aws/lib/allowlist/allowlist.py index b63340f70e..6789ff9e92 100644 --- a/prowler/providers/aws/lib/allowlist/allowlist.py +++ b/prowler/providers/aws/lib/allowlist/allowlist.py @@ -135,7 +135,12 @@ def allowlist_findings( def is_allowlisted( - allowlist: dict, audited_account: str, check: str, region: str, resource: str, tags + allowlist: dict, + audited_account: str, + check: str, + finding_region: str, + finding_resource: str, + finding_tags, ): try: allowlisted_checks = {} @@ -150,15 +155,15 @@ def is_allowlisted( if "*" in allowlist["Accounts"]: checks_multi_account = allowlist["Accounts"]["*"]["Checks"] allowlisted_checks.update(checks_multi_account) + # Test if it is allowlisted if is_allowlisted_in_check( allowlisted_checks, audited_account, - audited_account, check, - region, - resource, - tags, + finding_region, + finding_resource, + finding_tags, ): is_finding_allowlisted = True @@ -171,23 +176,29 @@ def is_allowlisted( def is_allowlisted_in_check( - allowlisted_checks, audited_account, account, check, region, resource, tags + allowlisted_checks, + audited_account, + check, + finding_region, + finding_resource, + finding_tags, ): try: # Default value is not allowlisted is_check_allowlisted = False + for allowlisted_check, allowlisted_check_info in allowlisted_checks.items(): # map lambda to awslambda allowlisted_check = re.sub("^lambda", "awslambda", allowlisted_check) - # extract the exceptions + + # Check if the finding is excepted exceptions = allowlisted_check_info.get("Exceptions") - # Check if there are exceptions if is_excepted( exceptions, audited_account, - region, - resource, - tags, + finding_region, + finding_resource, + finding_tags, ): # Break loop and return default value since is excepted break @@ -201,13 +212,27 @@ def is_allowlisted_in_check( or check == allowlisted_check or re.search(allowlisted_check, check) ): - if is_allowlisted_in_region( - allowlisted_regions, - allowlisted_resources, - allowlisted_tags, - region, - resource, - tags, + allowlisted_in_check = True + allowlisted_in_region = is_allowlisted_in_region( + allowlisted_regions, finding_region + ) + allowlisted_in_resource = is_allowlisted_in_resource( + allowlisted_resources, finding_resource + ) + allowlisted_in_tags = is_allowlisted_in_tags( + allowlisted_tags, finding_tags + ) + + # For a finding to be allowlisted requires the following set to True: + # - allowlisted_in_check -> True + # - allowlisted_in_region -> True + # - allowlisted_in_tags -> True or allowlisted_in_resource -> True + # - excepted -> False + + if ( + allowlisted_in_check + and allowlisted_in_region + and (allowlisted_in_tags or allowlisted_in_resource) ): is_check_allowlisted = True @@ -220,25 +245,11 @@ def is_allowlisted_in_check( def is_allowlisted_in_region( - allowlist_regions, allowlist_resources, allowlisted_tags, region, resource, tags + allowlisted_regions, + finding_region, ): try: - # By default is not allowlisted - is_region_allowlisted = False - # If there is a *, it affects to all regions - if "*" in allowlist_regions or region in allowlist_regions: - for elem in allowlist_resources: - if is_allowlisted_in_tags( - allowlisted_tags, - elem, - resource, - tags, - ): - is_region_allowlisted = True - # if we find the element there is no point in continuing with the loop - break - - return is_region_allowlisted + return __is_item_matched__(allowlisted_regions, finding_region) except Exception as error: logger.critical( f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]" @@ -246,25 +257,9 @@ def is_allowlisted_in_region( sys.exit(1) -def is_allowlisted_in_tags(allowlisted_tags, elem, resource, tags): +def is_allowlisted_in_tags(allowlisted_tags, finding_tags): try: - # By default is not allowlisted - is_tag_allowlisted = False - # Check if it is an * - if elem == "*": - elem = ".*" - # Check if there are allowlisted tags - if allowlisted_tags: - for allowlisted_tag in allowlisted_tags: - if re.search(allowlisted_tag, tags): - is_tag_allowlisted = True - break - - else: - if re.search(elem, resource): - is_tag_allowlisted = True - - return is_tag_allowlisted + return __is_item_matched__(allowlisted_tags, finding_tags) except Exception as error: logger.critical( f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]" @@ -272,7 +267,25 @@ def is_allowlisted_in_tags(allowlisted_tags, elem, resource, tags): sys.exit(1) -def is_excepted(exceptions, audited_account, region, resource, tags): +def is_allowlisted_in_resource(allowlisted_resources, finding_resource): + try: + return __is_item_matched__(allowlisted_resources, finding_resource) + + except Exception as error: + logger.critical( + f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]" + ) + sys.exit(1) + + +def is_excepted( + exceptions, + audited_account, + finding_region, + finding_resource, + finding_tags, +): + """is_excepted returns True if the account, region, resource and tags are excepted""" try: excepted = False is_account_excepted = False @@ -281,39 +294,50 @@ def is_excepted(exceptions, audited_account, region, resource, tags): is_tag_excepted = False if exceptions: excepted_accounts = exceptions.get("Accounts", []) + is_account_excepted = __is_item_matched__( + excepted_accounts, audited_account + ) + excepted_regions = exceptions.get("Regions", []) + is_region_excepted = __is_item_matched__(excepted_regions, finding_region) + excepted_resources = exceptions.get("Resources", []) + is_resource_excepted = __is_item_matched__( + excepted_resources, finding_resource + ) + excepted_tags = exceptions.get("Tags", []) - if exceptions: - if audited_account in excepted_accounts: - is_account_excepted = True - if region in excepted_regions: - is_region_excepted = True - for excepted_resource in excepted_resources: - if re.search(excepted_resource, resource): - is_resource_excepted = True - for tag in excepted_tags: - if tag in tags: - is_tag_excepted = True - if ( - ( - (excepted_accounts and is_account_excepted) - or not excepted_accounts - ) - and ( - (excepted_regions and is_region_excepted) - or not excepted_regions - ) - and ( - (excepted_resources and is_resource_excepted) - or not excepted_resources - ) - and ((excepted_tags and is_tag_excepted) or not excepted_tags) - ): - excepted = True + is_tag_excepted = __is_item_matched__(excepted_tags, finding_tags) + + if ( + is_account_excepted + and is_region_excepted + and is_resource_excepted + and is_tag_excepted + ): + excepted = True return excepted except Exception as error: logger.critical( f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]" ) sys.exit(1) + + +def __is_item_matched__(matched_items, finding_items): + """__is_item_matched__ return True if any of the matched_items are present in the finding_items, otherwise returns False.""" + try: + is_item_matched = False + if matched_items and (finding_items or finding_items == ""): + for item in matched_items: + if item == "*": + item = ".*" + if re.search(item, finding_items): + is_item_matched = True + break + return is_item_matched + except Exception as error: + logger.critical( + f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]" + ) + sys.exit(1) diff --git a/prowler/providers/aws/lib/arguments/arguments.py b/prowler/providers/aws/lib/arguments/arguments.py index 9ffe46a2fb..96fab145db 100644 --- a/prowler/providers/aws/lib/arguments/arguments.py +++ b/prowler/providers/aws/lib/arguments/arguments.py @@ -126,6 +126,7 @@ def init_parser(self): default=None, help="Path for allowlist yaml file. See example prowler/config/aws_allowlist.yaml for reference and format. It also accepts AWS DynamoDB Table or Lambda ARNs or S3 URIs, see more in https://docs.prowler.cloud/en/latest/tutorials/allowlist/", ) + # Based Scans aws_based_scans_subparser = aws_parser.add_argument_group("AWS Based Scans") aws_based_scans_parser = aws_based_scans_subparser.add_mutually_exclusive_group() diff --git a/prowler/providers/aws/lib/policy_condition_parser/policy_condition_parser.py b/prowler/providers/aws/lib/policy_condition_parser/policy_condition_parser.py index 99a404069f..020aedc505 100644 --- a/prowler/providers/aws/lib/policy_condition_parser/policy_condition_parser.py +++ b/prowler/providers/aws/lib/policy_condition_parser/policy_condition_parser.py @@ -56,12 +56,15 @@ def is_account_only_allowed_in_condition( ): # if there is an arn/account without the source account -> we do not consider it safe # here by default we assume is true and look for false entries - is_condition_valid = True + is_condition_key_restrictive = True for item in condition_statement[condition_operator][value]: if source_account not in item: - is_condition_valid = False + is_condition_key_restrictive = False break + if is_condition_key_restrictive: + is_condition_valid = True + # value is a string elif isinstance( condition_statement[condition_operator][value], diff --git a/prowler/providers/aws/lib/security_hub/security_hub.py b/prowler/providers/aws/lib/security_hub/security_hub.py index 4b1553127e..c8aa716843 100644 --- a/prowler/providers/aws/lib/security_hub/security_hub.py +++ b/prowler/providers/aws/lib/security_hub/security_hub.py @@ -14,9 +14,11 @@ def prepare_security_hub_findings( findings: [], audit_info: AWS_Audit_Info, output_options, enabled_regions: [] ) -> dict: security_hub_findings_per_region = {} - # Create a key per region - for region in audit_info.audited_regions: + + # Create a key per audited region + for region in enabled_regions: security_hub_findings_per_region[region] = [] + for finding in findings: # We don't send the INFO findings to AWS Security Hub if finding.status == "INFO": @@ -47,8 +49,10 @@ def prepare_security_hub_findings( def verify_security_hub_integration_enabled_per_region( + partition: str, region: str, session: session.Session, + aws_account_number: str, ) -> bool: f"""verify_security_hub_integration_enabled returns True if the {SECURITY_HUB_INTEGRATION_NAME} is enabled for the given region. Otherwise returns false.""" prowler_integration_enabled = False @@ -62,7 +66,8 @@ def verify_security_hub_integration_enabled_per_region( security_hub_client.describe_hub() # Check if Prowler integration is enabled in Security Hub - if "prowler/prowler" not in str( + security_hub_prowler_integration_arn = f"arn:{partition}:securityhub:{region}:{aws_account_number}:product-subscription/{SECURITY_HUB_INTEGRATION_NAME}" + if security_hub_prowler_integration_arn not in str( security_hub_client.list_enabled_products_for_import() ): logger.error( diff --git a/prowler/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled.py b/prowler/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled.py index 608cfe1889..2b51630b13 100644 --- a/prowler/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled.py +++ b/prowler/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled.py @@ -19,17 +19,23 @@ class accessanalyzer_enabled(Check): f"IAM Access Analyzer {analyzer.name} is enabled." ) - elif analyzer.status == "NOT_AVAILABLE": - report.status = "FAIL" - report.status_extended = ( - f"IAM Access Analyzer in account {analyzer.name} is not enabled." - ) - else: - report.status = "FAIL" - report.status_extended = ( - f"IAM Access Analyzer {analyzer.name} is not active." - ) + if analyzer.status == "NOT_AVAILABLE": + report.status = "FAIL" + report.status_extended = f"IAM Access Analyzer in account {analyzer.name} is not enabled." + + else: + report.status = "FAIL" + report.status_extended = ( + f"IAM Access Analyzer {analyzer.name} is not active." + ) + if ( + accessanalyzer_client.audit_config.get( + "allowlist_non_default_regions", False + ) + and not analyzer.region == accessanalyzer_client.region + ): + report.status = "WARNING" findings.append(report) diff --git a/prowler/providers/aws/services/codeartifact/codeartifact_packages_external_public_publishing_disabled/codeartifact_packages_external_public_publishing_disabled.py b/prowler/providers/aws/services/codeartifact/codeartifact_packages_external_public_publishing_disabled/codeartifact_packages_external_public_publishing_disabled.py index aa7f3d6b27..db5d85dece 100644 --- a/prowler/providers/aws/services/codeartifact/codeartifact_packages_external_public_publishing_disabled/codeartifact_packages_external_public_publishing_disabled.py +++ b/prowler/providers/aws/services/codeartifact/codeartifact_packages_external_public_publishing_disabled/codeartifact_packages_external_public_publishing_disabled.py @@ -16,7 +16,7 @@ class codeartifact_packages_external_public_publishing_disabled(Check): report = Check_Report_AWS(self.metadata()) report.region = repository.region report.resource_id = package.name - report.resource_arn = repository.arn + report.resource_arn = f"{repository.arn}/{package.namespace + ':' if package.namespace else ''}{package.name}" report.resource_tags = repository.tags if package.latest_version.origin.origin_type in ( diff --git a/prowler/providers/aws/services/codeartifact/codeartifact_service.py b/prowler/providers/aws/services/codeartifact/codeartifact_service.py index 8581e3f10c..d0d6a65701 100644 --- a/prowler/providers/aws/services/codeartifact/codeartifact_service.py +++ b/prowler/providers/aws/services/codeartifact/codeartifact_service.py @@ -63,7 +63,7 @@ class CodeArtifact(AWSService): list_packages_parameters = { "domain": self.repositories[repository].domain_name, "domainOwner": self.repositories[repository].domain_owner, - "repository": repository, + "repository": self.repositories[repository].name, } packages = [] for page in list_packages_paginator.paginate( @@ -83,18 +83,37 @@ class CodeArtifact(AWSService): ] ) # Get Latest Package Version - latest_version_information = ( - regional_client.list_package_versions( - domain=self.repositories[repository].domain_name, - domainOwner=self.repositories[ - repository - ].domain_owner, - repository=repository, - format=package_format, - package=package_name, - sortBy="PUBLISHED_TIME", + if package_namespace: + latest_version_information = ( + regional_client.list_package_versions( + domain=self.repositories[ + repository + ].domain_name, + domainOwner=self.repositories[ + repository + ].domain_owner, + repository=self.repositories[repository].name, + format=package_format, + namespace=package_namespace, + package=package_name, + sortBy="PUBLISHED_TIME", + ) + ) + else: + latest_version_information = ( + regional_client.list_package_versions( + domain=self.repositories[ + repository + ].domain_name, + domainOwner=self.repositories[ + repository + ].domain_owner, + repository=self.repositories[repository].name, + format=package_format, + package=package_name, + sortBy="PUBLISHED_TIME", + ) ) - ) latest_version = "" latest_origin_type = "UNKNOWN" latest_status = "Published" diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_not_used/ec2_securitygroup_not_used.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_not_used/ec2_securitygroup_not_used.py index d7d9daa810..8a232b33be 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_not_used/ec2_securitygroup_not_used.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_not_used/ec2_securitygroup_not_used.py @@ -18,10 +18,18 @@ class ec2_securitygroup_not_used(Check): report.status = "PASS" report.status_extended = f"Security group {security_group.name} ({security_group.id}) it is being used." sg_in_lambda = False + sg_associated = False for function in awslambda_client.functions.values(): if security_group.id in function.security_groups: sg_in_lambda = True - if len(security_group.network_interfaces) == 0 and not sg_in_lambda: + for sg in ec2_client.security_groups: + if security_group.id in sg.associated_sgs: + sg_associated = True + if ( + len(security_group.network_interfaces) == 0 + and not sg_in_lambda + and not sg_associated + ): report.status = "FAIL" report.status_extended = f"Security group {security_group.name} ({security_group.id}) it is not being used." diff --git a/prowler/providers/aws/services/ec2/ec2_service.py b/prowler/providers/aws/services/ec2/ec2_service.py index b57a90a7d6..4838f04115 100644 --- a/prowler/providers/aws/services/ec2/ec2_service.py +++ b/prowler/providers/aws/services/ec2/ec2_service.py @@ -117,6 +117,7 @@ class EC2(AWSService): if not self.audit_resources or ( is_resource_filtered(arn, self.audit_resources) ): + associated_sgs = [] # check if sg has public access to all ports all_public_ports = False for ingress_rule in sg["IpPermissions"]: @@ -128,7 +129,10 @@ class EC2(AWSService): in self.audited_checks ): all_public_ports = True - break + # check associated security groups + for sg_group in ingress_rule.get("UserIdGroupPairs", []): + if sg_group.get("GroupId"): + associated_sgs.append(sg_group["GroupId"]) self.security_groups.append( SecurityGroup( name=sg["GroupName"], @@ -138,6 +142,7 @@ class EC2(AWSService): ingress_rules=sg["IpPermissions"], egress_rules=sg["IpPermissionsEgress"], public_ports=all_public_ports, + associated_sgs=associated_sgs, vpc_id=sg["VpcId"], tags=sg.get("Tags"), ) @@ -464,6 +469,7 @@ class SecurityGroup(BaseModel): id: str vpc_id: str public_ports: bool + associated_sgs: list network_interfaces: list[str] = [] ingress_rules: list[dict] egress_rules: list[dict] diff --git a/prowler/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed.py b/prowler/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed.py index eb9666fd73..fb6aa64c26 100644 --- a/prowler/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed.py +++ b/prowler/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed.py @@ -6,7 +6,7 @@ class guardduty_centrally_managed(Check): def execute(self): findings = [] for detector in guardduty_client.detectors: - if detector.id: + if detector.id and detector.enabled_in_account: report = Check_Report_AWS(self.metadata()) report.region = detector.region report.resource_id = detector.id diff --git a/prowler/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings.py b/prowler/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings.py index d4ae724299..ad38fd96e6 100644 --- a/prowler/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings.py +++ b/prowler/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings.py @@ -6,7 +6,7 @@ class guardduty_no_high_severity_findings(Check): def execute(self): findings = [] for detector in guardduty_client.detectors: - if detector.id: + if detector.id and detector.enabled_in_account: report = Check_Report_AWS(self.metadata()) report.region = detector.region report.resource_id = detector.id diff --git a/prowler/providers/aws/services/iam/iam_service.py b/prowler/providers/aws/services/iam/iam_service.py index 7bb79988f5..ca31b14ac5 100644 --- a/prowler/providers/aws/services/iam/iam_service.py +++ b/prowler/providers/aws/services/iam/iam_service.py @@ -139,7 +139,10 @@ class IAM(AWSService): logger.warning( f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) - + else: + logger.error( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) except Exception as error: logger.error( f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" @@ -208,14 +211,24 @@ class IAM(AWSService): reuse_prevention=reuse_prevention, hard_expiry=hard_expiry, ) - except Exception as error: - if "NoSuchEntity" in str(error): + + except ClientError as error: + if error.response["Error"]["Code"] == "NoSuchEntity": # Password policy does not exist stored_password_policy = None + logger.warning( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) else: logger.error( f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) + + except Exception as error: + logger.error( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + finally: return stored_password_policy @@ -268,17 +281,22 @@ class IAM(AWSService): logger.info("IAM - List Attached Group Policies...") try: for group in self.groups: - list_attached_group_policies_paginator = self.client.get_paginator( - "list_attached_group_policies" - ) - attached_group_policies = [] - for page in list_attached_group_policies_paginator.paginate( - GroupName=group.name - ): - for attached_group_policy in page["AttachedPolicies"]: - attached_group_policies.append(attached_group_policy) + try: + list_attached_group_policies_paginator = self.client.get_paginator( + "list_attached_group_policies" + ) + attached_group_policies = [] + for page in list_attached_group_policies_paginator.paginate( + GroupName=group.name + ): + for attached_group_policy in page["AttachedPolicies"]: + attached_group_policies.append(attached_group_policy) - group.attached_policies = attached_group_policies + group.attached_policies = attached_group_policies + except Exception as error: + logger.error( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) except Exception as error: logger.error( f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" @@ -337,18 +355,33 @@ class IAM(AWSService): logger.info("IAM - List Attached User Policies...") try: for user in self.users: - attached_user_policies = [] - get_user_attached_policies_paginator = self.client.get_paginator( - "list_attached_user_policies" - ) - for page in get_user_attached_policies_paginator.paginate( - UserName=user.name - ): - for policy in page["AttachedPolicies"]: - attached_user_policies.append(policy) + try: + attached_user_policies = [] + get_user_attached_policies_paginator = self.client.get_paginator( + "list_attached_user_policies" + ) + for page in get_user_attached_policies_paginator.paginate( + UserName=user.name + ): + for policy in page["AttachedPolicies"]: + attached_user_policies.append(policy) - user.attached_policies = attached_user_policies + user.attached_policies = attached_user_policies + except ClientError as error: + if error.response["Error"]["Code"] == "NoSuchEntity": + logger.warning( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + else: + logger.error( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + + except Exception as error: + logger.error( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) except Exception as error: logger.error( f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" @@ -371,10 +404,19 @@ class IAM(AWSService): role.attached_policies = attached_role_policies except ClientError as error: - if error.response["Error"]["Code"] == "NoSuchEntityException": + if error.response["Error"]["Code"] == "NoSuchEntity": logger.warning( f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) + else: + logger.error( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + + except Exception as error: + logger.error( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) except Exception as error: logger.error( @@ -639,8 +681,16 @@ class IAM(AWSService): response = self.client.list_role_tags(RoleName=role.name)["Tags"] role.tags = response except ClientError as error: - if error.response["Error"]["Code"] == "NoSuchEntityException": + if error.response["Error"]["Code"] == "NoSuchEntity": role.tags = [] + else: + logger.error( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + except Exception as error: + logger.error( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) except Exception as error: logger.error( @@ -653,8 +703,12 @@ class IAM(AWSService): response = self.client.list_user_tags(UserName=user.name)["Tags"] user.tags = response except ClientError as error: - if error.response["Error"]["Code"] == "NoSuchEntityException": + if error.response["Error"]["Code"] == "NoSuchEntity": user.tags = [] + else: + logger.error( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) except Exception as error: logger.error( @@ -664,13 +718,22 @@ class IAM(AWSService): try: for policy in self.policies: try: - response = self.client.list_policy_tags(PolicyArn=policy.arn)[ - "Tags" - ] - policy.tags = response + if policy.type != "Inline": + response = self.client.list_policy_tags(PolicyArn=policy.arn)[ + "Tags" + ] + policy.tags = response except ClientError as error: - if error.response["Error"]["Code"] == "NoSuchEntityException": + if error.response["Error"]["Code"] == "NoSuchEntity": policy.tags = [] + else: + logger.error( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + except Exception as error: + logger.error( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) except Exception as error: logger.error( @@ -697,9 +760,19 @@ class IAM(AWSService): ] except ClientError as error: + if error.response["Error"]["Code"] == "NoSuchEntity": + logger.warning( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + else: + logger.error( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + except Exception as error: logger.error( f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) + except Exception as error: logger.error( f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" @@ -717,6 +790,15 @@ class IAM(AWSService): "AccessKeyMetadata" ] except ClientError as error: + if error.response["Error"]["Code"] == "NoSuchEntity": + logger.warning( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + else: + logger.error( + f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + except Exception as error: logger.error( f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) diff --git a/prowler/providers/aws/services/rds/rds_instance_deprecated_engine_version/rds_instance_deprecated_engine_version.py b/prowler/providers/aws/services/rds/rds_instance_deprecated_engine_version/rds_instance_deprecated_engine_version.py index c2e32ef112..6bf0ea1b42 100644 --- a/prowler/providers/aws/services/rds/rds_instance_deprecated_engine_version/rds_instance_deprecated_engine_version.py +++ b/prowler/providers/aws/services/rds/rds_instance_deprecated_engine_version/rds_instance_deprecated_engine_version.py @@ -13,9 +13,14 @@ class rds_instance_deprecated_engine_version(Check): report.resource_arn = db_instance.arn report.resource_tags = db_instance.tags report.status_extended = f"RDS instance {db_instance.id} is using a deprecated engine {db_instance.engine} with version {db_instance.engine_version}." - if ( - db_instance.engine_version + hasattr( + rds_client.db_engines.get(db_instance.region, {}).get( + db_instance.engine, {} + ), + "engine_versions", + ) + and db_instance.engine_version in rds_client.db_engines[db_instance.region][ db_instance.engine ].engine_versions diff --git a/prowler/providers/aws/services/sqs/sqs_service.py b/prowler/providers/aws/services/sqs/sqs_service.py index b04c666a0e..bdc3e6a92d 100644 --- a/prowler/providers/aws/services/sqs/sqs_service.py +++ b/prowler/providers/aws/services/sqs/sqs_service.py @@ -16,23 +16,30 @@ class SQS(AWSService): super().__init__(__class__.__name__, audit_info) self.queues = [] self.__threading_call__(self.__list_queues__) - self.__get_queue_attributes__(self.regional_clients) + self.__get_queue_attributes__() self.__list_queue_tags__() def __list_queues__(self, regional_client): logger.info("SQS - describing queues...") try: list_queues_paginator = regional_client.get_paginator("list_queues") - for page in list_queues_paginator.paginate(): + # The SQS API uses nonstandard pagination + # you must specify a PageSize if there are more than 1000 queues + for page in list_queues_paginator.paginate( + PaginationConfig={"PageSize": 1000} + ): if "QueueUrls" in page: for queue in page["QueueUrls"]: - arn = f"arn:{self.audited_partition}:sqs:{regional_client.region}:{self.audited_account}:{queue}" + # the queue name is the last path segment of the url + queue_name = queue.split("/")[-1] + arn = f"arn:{self.audited_partition}:sqs:{regional_client.region}:{self.audited_account}:{queue_name}" if not self.audit_resources or ( is_resource_filtered(arn, self.audit_resources) ): self.queues.append( Queue( arn=arn, + name=queue_name, id=queue, region=regional_client.region, ) @@ -42,28 +49,46 @@ class SQS(AWSService): f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) - def __get_queue_attributes__(self, regional_clients): + def __get_queue_attributes__(self): try: logger.info("SQS - describing queue attributes...") for queue in self.queues: - regional_client = regional_clients[queue.region] - queue_attributes = regional_client.get_queue_attributes( - QueueUrl=queue.id, AttributeNames=["All"] - ) - if "Attributes" in queue_attributes: - if "Policy" in queue_attributes["Attributes"]: - queue.policy = loads(queue_attributes["Attributes"]["Policy"]) - if "KmsMasterKeyId" in queue_attributes["Attributes"]: - queue.kms_key_id = queue_attributes["Attributes"][ - "KmsMasterKeyId" - ] - if "SqsManagedSseEnabled" in queue_attributes["Attributes"]: - if ( - queue_attributes["Attributes"]["SqsManagedSseEnabled"] - == "true" - ): - queue.kms_key_id = "SqsManagedSseEnabled" - + try: + regional_client = self.regional_clients[queue.region] + queue_attributes = regional_client.get_queue_attributes( + QueueUrl=queue.id, AttributeNames=["All"] + ) + if "Attributes" in queue_attributes: + if "Policy" in queue_attributes["Attributes"]: + queue.policy = loads( + queue_attributes["Attributes"]["Policy"] + ) + if "KmsMasterKeyId" in queue_attributes["Attributes"]: + queue.kms_key_id = queue_attributes["Attributes"][ + "KmsMasterKeyId" + ] + if "SqsManagedSseEnabled" in queue_attributes["Attributes"]: + if ( + queue_attributes["Attributes"]["SqsManagedSseEnabled"] + == "true" + ): + queue.kms_key_id = "SqsManagedSseEnabled" + except ClientError as error: + if ( + error.response["Error"]["Code"] + == "AWS.SimpleQueueService.NonExistentQueue" + ): + logger.warning( + f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + else: + logger.error( + f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + except Exception as error: + logger.error( + f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) except Exception as error: logger.error( f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" @@ -87,6 +112,14 @@ class SQS(AWSService): logger.warning( f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) + else: + logger.error( + f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + except Exception as error: + logger.error( + f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) except Exception as error: logger.error( @@ -96,6 +129,7 @@ class SQS(AWSService): class Queue(BaseModel): id: str + name: str arn: str region: str policy: dict = None diff --git a/prowler/providers/azure/azure_provider.py b/prowler/providers/azure/azure_provider.py index 2f054ec99e..52f5459a50 100644 --- a/prowler/providers/azure/azure_provider.py +++ b/prowler/providers/azure/azure_provider.py @@ -7,6 +7,7 @@ from msgraph.core import GraphClient from prowler.lib.logger import logger from prowler.providers.azure.lib.audit_info.models import Azure_Identity_Info +from prowler.providers.azure.lib.regions.regions import get_regions_config class Azure_Provider: @@ -18,12 +19,14 @@ class Azure_Provider: managed_entity_auth: bool, subscription_ids: list, tenant_id: str, + region: str, ): logger.info("Instantiating Azure Provider ...") - self.credentials = self.__set_credentials__( + self.region_config = self.__get_region_config__(region) + self.credentials = self.__get_credentials__( az_cli_auth, sp_env_auth, browser_auth, managed_entity_auth, tenant_id ) - self.identity = self.__set_identity_info__( + self.identity = self.__get_identity_info__( self.credentials, az_cli_auth, sp_env_auth, @@ -32,7 +35,10 @@ class Azure_Provider: subscription_ids, ) - def __set_credentials__( + def __get_region_config__(self, region): + return get_regions_config(region) + + def __get_credentials__( self, az_cli_auth, sp_env_auth, browser_auth, managed_entity_auth, tenant_id ): # Browser auth creds cannot be set with DefaultAzureCredentials() @@ -52,6 +58,8 @@ class Azure_Provider: exclude_shared_token_cache_credential=True, # Azure Auth using PowerShell is not supported exclude_powershell_credential=True, + # set Authority of a Microsoft Entra endpoint + authority=self.region_config["authority"], ) except Exception as error: logger.critical("Failed to retrieve azure credentials") @@ -61,7 +69,6 @@ class Azure_Provider: sys.exit(1) else: try: - print(tenant_id) credentials = InteractiveBrowserCredential(tenant_id=tenant_id) except Exception as error: logger.critical("Failed to retrieve azure credentials") @@ -83,7 +90,7 @@ class Azure_Provider: ) sys.exit(1) - def __set_identity_info__( + def __get_identity_info__( self, credentials, az_cli_auth, @@ -153,7 +160,11 @@ class Azure_Provider: logger.info( "Trying to subscriptions and tenant ids to populate identity structure ..." ) - subscriptions_client = SubscriptionClient(credential=credentials) + subscriptions_client = SubscriptionClient( + credential=credentials, + base_url=self.region_config["base_url"], + credential_scopes=self.region_config["credential_scopes"], + ) if not subscription_ids: logger.info("Scanning all the Azure subscriptions...") for subscription in subscriptions_client.subscriptions.list(): @@ -195,3 +206,6 @@ class Azure_Provider: def get_identity(self): return self.identity + + def get_region_config(self): + return self.region_config diff --git a/prowler/providers/azure/lib/arguments/arguments.py b/prowler/providers/azure/lib/arguments/arguments.py index 5c1316e672..30cf7a33d7 100644 --- a/prowler/providers/azure/lib/arguments/arguments.py +++ b/prowler/providers/azure/lib/arguments/arguments.py @@ -1,3 +1,6 @@ +from argparse import ArgumentTypeError + + def init_parser(self): """Init the Azure Provider CLI parser""" azure_parser = self.subparsers.add_parser( @@ -40,3 +43,27 @@ def init_parser(self): default=None, help="Azure Tenant ID to be used with --browser-auth option", ) + # Regions + azure_regions_subparser = azure_parser.add_argument_group("Regions") + azure_regions_subparser.add_argument( + "--azure-region", + nargs="?", + default="AzureCloud", + type=validate_azure_region, + help="Azure region from `az cloud list --output table`, by default AzureCloud", + ) + + +def validate_azure_region(region): + """validate_azure_region validates if the region passed as argument is valid""" + regions_allowed = [ + "AzureChinaCloud", + "AzureUSGovernment", + "AzureGermanCloud", + "AzureCloud", + ] + if region not in regions_allowed: + raise ArgumentTypeError( + f"Region {region} not allowed, allowed regions are {' '.join(regions_allowed)}" + ) + return region diff --git a/prowler/providers/azure/lib/audit_info/audit_info.py b/prowler/providers/azure/lib/audit_info/audit_info.py index 62144444db..098f63c77e 100644 --- a/prowler/providers/azure/lib/audit_info/audit_info.py +++ b/prowler/providers/azure/lib/audit_info/audit_info.py @@ -1,6 +1,7 @@ from prowler.providers.azure.lib.audit_info.models import ( Azure_Audit_Info, Azure_Identity_Info, + Azure_Region_Config, ) azure_audit_info = Azure_Audit_Info( @@ -9,4 +10,5 @@ azure_audit_info = Azure_Audit_Info( audit_resources=None, audit_metadata=None, audit_config=None, + azure_region_config=Azure_Region_Config(), ) diff --git a/prowler/providers/azure/lib/audit_info/models.py b/prowler/providers/azure/lib/audit_info/models.py index dcbe1adec0..978ec34259 100644 --- a/prowler/providers/azure/lib/audit_info/models.py +++ b/prowler/providers/azure/lib/audit_info/models.py @@ -13,6 +13,13 @@ class Azure_Identity_Info(BaseModel): subscriptions: dict = {} +class Azure_Region_Config(BaseModel): + name: str = "" + authority: str = None + base_url: str = "" + credential_scopes: list = [] + + @dataclass class Azure_Audit_Info: credentials: DefaultAzureCredential @@ -20,12 +27,20 @@ class Azure_Audit_Info: audit_resources: Optional[Any] audit_metadata: Optional[Any] audit_config: dict + azure_region_config: Azure_Region_Config def __init__( - self, credentials, identity, audit_metadata, audit_resources, audit_config + self, + credentials, + identity, + audit_metadata, + audit_resources, + audit_config, + azure_region_config, ): self.credentials = credentials self.identity = identity self.audit_metadata = audit_metadata self.audit_resources = audit_resources self.audit_config = audit_config + self.azure_region_config = azure_region_config diff --git a/prowler/providers/azure/lib/exception/__init__.py b/prowler/providers/azure/lib/exception/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/providers/azure/lib/exception/exception.py b/prowler/providers/azure/lib/exception/exception.py new file mode 100644 index 0000000000..51fe95d001 --- /dev/null +++ b/prowler/providers/azure/lib/exception/exception.py @@ -0,0 +1,11 @@ +class AzureException(Exception): + """ + Exception raised when dealing with Azure Provider/Azure audit info instance + + Attributes: + message -- message to be displayed + """ + + def __init__(self, message): + self.message = message + super().__init__(self.message) diff --git a/prowler/providers/azure/lib/regions/__init__.py b/prowler/providers/azure/lib/regions/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/providers/azure/lib/regions/regions.py b/prowler/providers/azure/lib/regions/regions.py new file mode 100644 index 0000000000..a9d29d7eca --- /dev/null +++ b/prowler/providers/azure/lib/regions/regions.py @@ -0,0 +1,38 @@ +from azure.identity import AzureAuthorityHosts +from msrestazure.azure_cloud import ( + AZURE_CHINA_CLOUD, + AZURE_GERMAN_CLOUD, + AZURE_US_GOV_CLOUD, +) + + +def get_regions_config(region): + allowed_regions = { + "AzureCloud": { + "authority": None, + "base_url": "https://management.azure.com", + "credential_scopes": ["https://management.azure.com/.default"], + }, + "AzureChinaCloud": { + "authority": AzureAuthorityHosts.AZURE_CHINA, + "base_url": AZURE_CHINA_CLOUD.endpoints.resource_manager, + "credential_scopes": [ + AZURE_CHINA_CLOUD.endpoints.resource_manager + "/.default" + ], + }, + "AzureUSGovernment": { + "authority": AzureAuthorityHosts.AZURE_GOVERNMENT, + "base_url": AZURE_US_GOV_CLOUD.endpoints.resource_manager, + "credential_scopes": [ + AZURE_US_GOV_CLOUD.endpoints.resource_manager + "/.default" + ], + }, + "AzureGermanCloud": { + "authority": AzureAuthorityHosts.AZURE_GERMANY, + "base_url": AZURE_GERMAN_CLOUD.endpoints.resource_manager, + "credential_scopes": [ + AZURE_GERMAN_CLOUD.endpoints.resource_manager + "/.default" + ], + }, + } + return allowed_regions[region] diff --git a/prowler/providers/azure/lib/service/service.py b/prowler/providers/azure/lib/service/service.py index f45747c50d..305f25898e 100644 --- a/prowler/providers/azure/lib/service/service.py +++ b/prowler/providers/azure/lib/service/service.py @@ -9,17 +9,27 @@ class AzureService: audit_info: Azure_Audit_Info, ): self.clients = self.__set_clients__( - audit_info.identity.subscriptions, audit_info.credentials, service + audit_info.identity.subscriptions, + audit_info.credentials, + service, + audit_info.azure_region_config, ) self.subscriptions = audit_info.identity.subscriptions - def __set_clients__(self, subscriptions, credentials, service): + def __set_clients__(self, subscriptions, credentials, service, region_config): clients = {} try: for display_name, id in subscriptions.items(): clients.update( - {display_name: service(credential=credentials, subscription_id=id)} + { + display_name: service( + credential=credentials, + subscription_id=id, + base_url=region_config.base_url, + credential_scopes=region_config.credential_scopes, + ) + } ) except Exception as error: logger.error( diff --git a/prowler/providers/common/audit_info.py b/prowler/providers/common/audit_info.py index d2ee136183..ec8b302b5c 100644 --- a/prowler/providers/common/audit_info.py +++ b/prowler/providers/common/audit_info.py @@ -26,7 +26,11 @@ from prowler.providers.aws.lib.resource_api_tagging.resource_api_tagging import ) from prowler.providers.azure.azure_provider import Azure_Provider from prowler.providers.azure.lib.audit_info.audit_info import azure_audit_info -from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info +from prowler.providers.azure.lib.audit_info.models import ( + Azure_Audit_Info, + Azure_Region_Config, +) +from prowler.providers.azure.lib.exception.exception import AzureException from prowler.providers.gcp.gcp_provider import GCP_Provider from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info @@ -63,7 +67,7 @@ GCP Account: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} GCP Project IDs: {Fore.Y report = f""" This report is being generated using the identity below: -Azure Tenant IDs: {Fore.YELLOW}[{" ".join(audit_info.identity.tenant_ids)}]{Style.RESET_ALL} Azure Tenant Domain: {Fore.YELLOW}[{audit_info.identity.domain}]{Style.RESET_ALL} +Azure Tenant IDs: {Fore.YELLOW}[{" ".join(audit_info.identity.tenant_ids)}]{Style.RESET_ALL} Azure Tenant Domain: {Fore.YELLOW}[{audit_info.identity.domain}]{Style.RESET_ALL} Azure Region: {Fore.YELLOW}[{audit_info.azure_region_config.name}]{Style.RESET_ALL} Azure Subscriptions: {Fore.YELLOW}{printed_subscriptions}{Style.RESET_ALL} Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RESET_ALL} Azure Identity ID: {Fore.YELLOW}[{audit_info.identity.identity_id}]{Style.RESET_ALL} """ @@ -282,17 +286,21 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE browser_auth = arguments.get("browser_auth") managed_entity_auth = arguments.get("managed_entity_auth") tenant_id = arguments.get("tenant_id") + + logger.info("Checking if region is different than default one") + region = arguments.get("azure_region") + if ( not az_cli_auth and not sp_env_auth and not browser_auth and not managed_entity_auth ): - raise Exception( + raise AzureException( "Azure provider requires at least one authentication method set: [--az-cli-auth | --sp-env-auth | --browser-auth | --managed-identity-auth]" ) if (not browser_auth and tenant_id) or (browser_auth and not tenant_id): - raise Exception( + raise AzureException( "Azure Tenant ID (--tenant-id) is required only for browser authentication mode" ) @@ -303,9 +311,17 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE managed_entity_auth, subscription_ids, tenant_id, + region, ) azure_audit_info.credentials = azure_provider.get_credentials() azure_audit_info.identity = azure_provider.get_identity() + region_config = azure_provider.get_region_config() + azure_audit_info.azure_region_config = Azure_Region_Config( + name=region, + authority=region_config["authority"], + base_url=region_config["base_url"], + credential_scopes=region_config["credential_scopes"], + ) if not arguments.get("only_logs"): self.print_azure_credentials(azure_audit_info) diff --git a/prowler/providers/common/clean.py b/prowler/providers/common/clean.py new file mode 100644 index 0000000000..d06e1e6941 --- /dev/null +++ b/prowler/providers/common/clean.py @@ -0,0 +1,32 @@ +import importlib +import sys +from shutil import rmtree + +from prowler.config.config import default_output_directory +from prowler.lib.logger import logger + + +def clean_provider_local_output_directories(args): + """ + clean_provider_local_output_directories cleans deletes local custom dirs when output is sent to remote provider storage + """ + try: + # import provider cleaning function + provider_clean_function = f"clean_{args.provider}_local_output_directories" + getattr(importlib.import_module(__name__), provider_clean_function)(args) + except AttributeError as attribute_exception: + logger.info( + f"Cleaning local output directories not initialized for provider {args.provider}: {attribute_exception}" + ) + except Exception as error: + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + sys.exit(1) + + +def clean_aws_local_output_directories(args): + """clean_aws_provider_local_output_directories deletes local custom dirs when output is sent to remote provider storage for aws provider""" + if args.output_bucket or args.output_bucket_no_assume: + if args.output_directory != default_output_directory: + rmtree(args.output_directory) diff --git a/prowler/providers/common/outputs.py b/prowler/providers/common/outputs.py index 1360764505..58567df1e8 100644 --- a/prowler/providers/common/outputs.py +++ b/prowler/providers/common/outputs.py @@ -29,6 +29,21 @@ def set_provider_output_options( return provider_output_options +def get_provider_output_model(audit_info_class_name): + """ + get_provider_output_model returns the model _Check_Output_CSV for each provider + """ + # from AWS_Audit_Info -> AWS -> aws -> Aws + output_provider = audit_info_class_name.split("_", 1)[0].lower().capitalize() + output_provider_model_name = f"{output_provider}_Check_Output_CSV" + output_provider_models_path = "prowler.lib.outputs.models" + output_provider_model = getattr( + importlib.import_module(output_provider_models_path), output_provider_model_name + ) + + return output_provider_model + + @dataclass class Provider_Output_Options: is_quiet: bool diff --git a/prowler/providers/gcp/gcp_provider.py b/prowler/providers/gcp/gcp_provider.py index fe8ae67da0..7f5b700f99 100644 --- a/prowler/providers/gcp/gcp_provider.py +++ b/prowler/providers/gcp/gcp_provider.py @@ -3,10 +3,8 @@ import sys from google import auth from googleapiclient import discovery -from googleapiclient.discovery import Resource from prowler.lib.logger import logger -from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info class GCP_Provider: @@ -92,16 +90,3 @@ class GCP_Provider: f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) return [] - - -def generate_client( - service: str, - api_version: str, - audit_info: GCP_Audit_Info, -) -> Resource: - try: - return discovery.build(service, api_version, credentials=audit_info.credentials) - except Exception as error: - logger.error( - f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" - ) diff --git a/prowler/providers/gcp/lib/service/service.py b/prowler/providers/gcp/lib/service/service.py index 7b8b44e1c7..30bcc5a2e7 100644 --- a/prowler/providers/gcp/lib/service/service.py +++ b/prowler/providers/gcp/lib/service/service.py @@ -3,10 +3,11 @@ import threading import google_auth_httplib2 import httplib2 from colorama import Fore, Style +from google.oauth2.credentials import Credentials from googleapiclient import discovery +from googleapiclient.discovery import Resource from prowler.lib.logger import logger -from prowler.providers.gcp.gcp_provider import generate_client from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info @@ -25,7 +26,9 @@ class GCPService: self.api_version = api_version self.default_project_id = audit_info.default_project_id self.region = region - self.client = generate_client(service, api_version, audit_info) + self.client = self.__generate_client__( + service, api_version, audit_info.credentials + ) # Only project ids that have their API enabled will be scanned self.project_ids = self.__is_api_active__(audit_info.project_ids) @@ -66,3 +69,16 @@ class GCPService: f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" ) return project_ids + + def __generate_client__( + self, + service: str, + api_version: str, + credentials: Credentials, + ) -> Resource: + try: + return discovery.build(service, api_version, credentials=credentials) + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) diff --git a/pyproject.toml b/pyproject.toml index 003906fb4a..99a7318c81 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,10 +22,10 @@ packages = [ {include = "prowler"} ] readme = "README.md" -version = "3.11.0" +version = "3.11.3" [tool.poetry.dependencies] -alive-progress = "3.1.4" +alive-progress = "3.1.5" awsipranges = "0.3.3" azure-identity = "1.15.0" azure-mgmt-authorization = "4.0.0" @@ -33,21 +33,23 @@ azure-mgmt-security = "5.0.0" azure-mgmt-sql = "3.0.1" azure-mgmt-storage = "21.1.0" azure-mgmt-subscription = "3.1.1" -azure-storage-blob = "12.18.3" +azure-storage-blob = "12.19.0" boto3 = "1.26.165" botocore = "1.29.165" colorama = "0.4.6" detect-secrets = "1.4.0" -google-api-python-client = "2.105.0" +google-api-python-client = "2.108.0" google-auth-httplib2 = "^0.1.0" +jsonschema = "4.18.0" mkdocs = {version = "1.5.3", optional = true} -mkdocs-material = {version = "9.4.7", optional = true} +mkdocs-material = {version = "9.4.10", optional = true} msgraph-core = "0.2.2" +msrestazure = "^0.6.4" pydantic = "1.10.13" -python = "^3.9" +python = ">=3.9,<3.12" schema = "0.7.5" shodan = "1.30.1" -slack-sdk = "3.23.0" +slack-sdk = "3.24.0" tabulate = "0.9.0" [tool.poetry.extras] @@ -61,13 +63,13 @@ docker = "6.1.3" flake8 = "6.1.0" freezegun = "1.2.2" mock = "5.1.0" -moto = "4.2.7" +moto = {extras = ["all"], version = "4.2.9"} openapi-spec-validator = "0.7.1" pylint = "3.0.2" pytest = "7.4.3" pytest-cov = "4.1.0" pytest-randomly = "3.15.0" -pytest-xdist = "3.3.1" +pytest-xdist = "3.4.0" safety = "2.3.5" vulture = "2.10" diff --git a/tests/config/config_test.py b/tests/config/config_test.py index ab116d35e9..7f2c0d5652 100644 --- a/tests/config/config_test.py +++ b/tests/config/config_test.py @@ -63,7 +63,7 @@ class Test_Config: def test_check_current_version_with_latest(self): assert ( check_current_version() - == f"Prowler {MOCK_PROWLER_VERSION} (it is the latest version, yay!)" + == f"Prowler {MOCK_PROWLER_VERSION} (You are running the latest version, yay!)" ) @mock.patch( diff --git a/tests/lib/check/custom_checks_metadata_test.py b/tests/lib/check/custom_checks_metadata_test.py new file mode 100644 index 0000000000..c7f12459d9 --- /dev/null +++ b/tests/lib/check/custom_checks_metadata_test.py @@ -0,0 +1,164 @@ +import logging +import os + +import pytest + +from prowler.lib.check.custom_checks_metadata import ( + parse_custom_checks_metadata_file, + update_check_metadata, + update_checks_metadata, +) +from prowler.lib.check.models import ( + Check_Metadata_Model, + Code, + Recommendation, + Remediation, +) + +CUSTOM_CHECKS_METADATA_FIXTURE_FILE = f"{os.path.dirname(os.path.realpath(__file__))}/fixtures/custom_checks_metadata_example.yaml" +CUSTOM_CHECKS_METADATA_FIXTURE_FILE_NOT_VALID = f"{os.path.dirname(os.path.realpath(__file__))}/fixtures/custom_checks_metadata_example_not_valid.yaml" + +AWS_PROVIDER = "aws" +AZURE_PROVIDER = "azure" +GCP_PROVIDER = "gcp" + +S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME = "s3_bucket_level_public_access_block" +S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY = "medium" + + +class TestCustomChecksMetadata: + def get_custom_check_metadata(self): + return Check_Metadata_Model( + Provider="aws", + CheckID=S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME, + CheckTitle="Check S3 Bucket Level Public Access Block.", + CheckType=["Data Protection"], + CheckAliases=[], + ServiceName="s3", + SubServiceName="", + ResourceIdTemplate="arn:partition:s3:::bucket_name", + Severity=S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY, + ResourceType="AwsS3Bucket", + Description="Check S3 Bucket Level Public Access Block.", + Risk="Public access policies may be applied to sensitive data buckets.", + RelatedUrl="https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-control-block-public-access.html", + Remediation=Remediation( + Code=Code( + NativeIaC="", + Terraform="https://docs.bridgecrew.io/docs/bc_aws_s3_20#terraform", + CLI="aws s3api put-public-access-block --region --public-access-block-configuration BlockPublicAcls=true,IgnorePublicAcls=true,BlockPublicPolicy=true,RestrictPublicBuckets=true --bucket ", + Other="https://github.com/cloudmatos/matos/tree/master/remediations/aws/s3/s3/block-public-access", + ), + Recommendation=Recommendation( + Text="You can enable Public Access Block at the bucket level to prevent the exposure of your data stored in S3.", + Url="https://docs.aws.amazon.com/AmazonS3/latest/userguide/access-control-block-public-access.html", + ), + ), + Categories=[], + DependsOn=[], + RelatedTo=[], + Notes="", + Compliance=[], + ) + + def test_parse_custom_checks_metadata_file_for_aws(self): + assert parse_custom_checks_metadata_file( + AWS_PROVIDER, CUSTOM_CHECKS_METADATA_FIXTURE_FILE + ) == { + "Checks": { + "s3_bucket_level_public_access_block": {"Severity": "high"}, + "s3_bucket_no_mfa_delete": {"Severity": "high"}, + } + } + + def test_parse_custom_checks_metadata_file_for_azure(self): + assert parse_custom_checks_metadata_file( + AZURE_PROVIDER, CUSTOM_CHECKS_METADATA_FIXTURE_FILE + ) == {"Checks": {"sqlserver_auditing_enabled": {"Severity": "high"}}} + + def test_parse_custom_checks_metadata_file_for_gcp(self): + assert parse_custom_checks_metadata_file( + GCP_PROVIDER, CUSTOM_CHECKS_METADATA_FIXTURE_FILE + ) == {"Checks": {"bigquery_dataset_cmk_encryption": {"Severity": "low"}}} + + def test_parse_custom_checks_metadata_file_for_aws_validation_error(self, caplog): + caplog.set_level(logging.CRITICAL) + + with pytest.raises(SystemExit) as error: + parse_custom_checks_metadata_file( + AWS_PROVIDER, CUSTOM_CHECKS_METADATA_FIXTURE_FILE_NOT_VALID + ) + assert error.type == SystemExit + assert error.value.code == 1 + assert "'Checks' is a required property" in caplog.text + + def test_update_checks_metadata(self): + updated_severity = "high" + bulk_checks_metadata = { + S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata(), + } + custom_checks_metadata = { + "Checks": { + S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: { + "Severity": updated_severity + }, + } + } + + bulk_checks_metadata_updated = update_checks_metadata( + bulk_checks_metadata, custom_checks_metadata + ).get(S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME) + + assert bulk_checks_metadata_updated.Severity == updated_severity + + def test_update_checks_metadata_not_present_field(self): + bulk_checks_metadata = { + S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: self.get_custom_check_metadata(), + } + custom_checks_metadata = { + "Checks": { + S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME: { + "RandomField": "random_value" + }, + } + } + + bulk_checks_metadata_updated = update_checks_metadata( + bulk_checks_metadata, custom_checks_metadata + ).get(S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME) + + assert ( + bulk_checks_metadata_updated.Severity + == S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY + ) + + def test_update_check_metadata(self): + updated_severity = "high" + custom_checks_metadata = {"Severity": updated_severity} + + check_metadata_updated = update_check_metadata( + self.get_custom_check_metadata(), custom_checks_metadata + ) + assert check_metadata_updated.Severity == updated_severity + + def test_update_check_metadata_not_present_field(self): + custom_checks_metadata = {"RandomField": "random_value"} + + check_metadata_updated = update_check_metadata( + self.get_custom_check_metadata(), custom_checks_metadata + ) + assert ( + check_metadata_updated.Severity + == S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY + ) + + def test_update_check_metadata_none_custom_metadata(self): + custom_checks_metadata = None + + check_metadata_updated = update_check_metadata( + self.get_custom_check_metadata(), custom_checks_metadata + ) + assert ( + check_metadata_updated.Severity + == S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY + ) diff --git a/tests/lib/check/fixtures/custom_checks_metadata_example.yaml b/tests/lib/check/fixtures/custom_checks_metadata_example.yaml new file mode 100644 index 0000000000..744051e4b2 --- /dev/null +++ b/tests/lib/check/fixtures/custom_checks_metadata_example.yaml @@ -0,0 +1,15 @@ +CustomChecksMetadata: + aws: + Checks: + s3_bucket_level_public_access_block: + Severity: high + s3_bucket_no_mfa_delete: + Severity: high + azure: + Checks: + sqlserver_auditing_enabled: + Severity: high + gcp: + Checks: + bigquery_dataset_cmk_encryption: + Severity: low diff --git a/tests/lib/check/fixtures/custom_checks_metadata_example_not_valid.yaml b/tests/lib/check/fixtures/custom_checks_metadata_example_not_valid.yaml new file mode 100644 index 0000000000..a5f7b06cbc --- /dev/null +++ b/tests/lib/check/fixtures/custom_checks_metadata_example_not_valid.yaml @@ -0,0 +1,5 @@ +CustomChecksMetadata: + aws: + Check: + s3_bucket_level_public_access_block: + Severity: high diff --git a/tests/lib/cli/parser_test.py b/tests/lib/cli/parser_test.py index e09f366d12..a307c541f6 100644 --- a/tests/lib/cli/parser_test.py +++ b/tests/lib/cli/parser_test.py @@ -1,9 +1,11 @@ import uuid +from argparse import ArgumentTypeError import pytest from mock import patch from prowler.lib.cli.parser import ProwlerArgumentParser +from prowler.providers.azure.lib.arguments.arguments import validate_azure_region prowler_command = "prowler" @@ -502,6 +504,18 @@ class Test_Parser: assert service_1 in parsed.services assert service_2 in parsed.services + def test_checks_parser_services_with_severity(self): + argument1 = "--services" + service_1 = "iam" + argument2 = "--severity" + severity = "low" + command = [prowler_command, argument1, service_1, argument2, severity] + parsed = self.parser.parse(command) + assert len(parsed.services) == 1 + assert service_1 in parsed.services + assert len(parsed.severity) == 1 + assert severity in parsed.severity + def test_checks_parser_informational_severity(self): argument = "--severity" severity = "informational" @@ -1038,6 +1052,14 @@ class Test_Parser: assert parsed.subscription_ids[0] == subscription_1 assert parsed.subscription_ids[1] == subscription_2 + def test_parser_azure_region(self): + argument = "--azure-region" + region = "AzureChinaCloud" + command = [prowler_command, "azure", argument, region] + parsed = self.parser.parse(command) + assert parsed.provider == "azure" + assert parsed.azure_region == region + # Test AWS flags with Azure provider def test_parser_azure_with_aws_flag(self, capsys): command = [prowler_command, "azure", "-p"] @@ -1080,3 +1102,33 @@ class Test_Parser: assert len(parsed.project_ids) == 2 assert parsed.project_ids[0] == project_1 assert parsed.project_ids[1] == project_2 + + def test_validate_azure_region_valid_regions(self): + expected_regions = [ + "AzureChinaCloud", + "AzureUSGovernment", + "AzureGermanCloud", + "AzureCloud", + ] + input_regions = [ + "AzureChinaCloud", + "AzureUSGovernment", + "AzureGermanCloud", + "AzureCloud", + ] + for region in input_regions: + assert validate_azure_region(region) in expected_regions + + def test_validate_azure_region_invalid_regions(self): + expected_regions = [ + "AzureChinaCloud", + "AzureUSGovernment", + "AzureGermanCloud", + "AzureCloud", + ] + invalid_region = "non-valid-region" + with pytest.raises( + ArgumentTypeError, + match=f"Region {invalid_region} not allowed, allowed regions are {' '.join(expected_regions)}", + ): + validate_azure_region(invalid_region) diff --git a/tests/lib/outputs/slack_test.py b/tests/lib/outputs/slack_test.py index b66093567e..02e572ea6f 100644 --- a/tests/lib/outputs/slack_test.py +++ b/tests/lib/outputs/slack_test.py @@ -11,6 +11,7 @@ from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info from prowler.providers.azure.lib.audit_info.models import ( Azure_Audit_Info, Azure_Identity_Info, + Azure_Region_Config, ) from prowler.providers.common.models import Audit_Metadata from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info @@ -76,6 +77,7 @@ class Test_Slack_Integration: audit_resources=None, audit_metadata=None, audit_config=None, + azure_region_config=Azure_Region_Config(), ) assert create_message_identity("aws", aws_audit_info) == ( f"AWS Account *{aws_audit_info.audited_account}*", diff --git a/tests/providers/aws/audit_info_utils.py b/tests/providers/aws/audit_info_utils.py index d2598a8c4a..596c3809de 100644 --- a/tests/providers/aws/audit_info_utils.py +++ b/tests/providers/aws/audit_info_utils.py @@ -5,9 +5,11 @@ from prowler.providers.common.models import Audit_Metadata AWS_REGION_US_EAST_1 = "us-east-1" AWS_REGION_EU_WEST_1 = "eu-west-1" +AWS_REGION_EU_WEST_2 = "eu-west-2" AWS_PARTITION = "aws" AWS_ACCOUNT_NUMBER = "123456789012" AWS_ACCOUNT_ARN = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root" +AWS_COMMERCIAL_PARTITION = "aws" # Mocked AWS Audit Info diff --git a/tests/providers/aws/lib/allowlist/allowlist_test.py b/tests/providers/aws/lib/allowlist/allowlist_test.py index 4bd624be39..d54c19c881 100644 --- a/tests/providers/aws/lib/allowlist/allowlist_test.py +++ b/tests/providers/aws/lib/allowlist/allowlist_test.py @@ -8,15 +8,18 @@ from prowler.providers.aws.lib.allowlist.allowlist import ( is_allowlisted, is_allowlisted_in_check, is_allowlisted_in_region, + is_allowlisted_in_resource, is_allowlisted_in_tags, is_excepted, parse_allowlist_file, ) from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info from prowler.providers.common.models import Audit_Metadata - -AWS_ACCOUNT_NUMBER = "123456789012" -AWS_REGION = "us-east-1" +from tests.providers.aws.audit_info_utils import ( + AWS_ACCOUNT_NUMBER, + AWS_REGION_EU_WEST_1, + AWS_REGION_US_EAST_1, +) class Test_Allowlist: @@ -56,7 +59,7 @@ class Test_Allowlist: def test_s3_allowlist(self): audit_info = self.set_mocked_audit_info() # Create bucket and upload allowlist yaml - s3_resource = resource("s3", region_name=AWS_REGION) + s3_resource = resource("s3", region_name=AWS_REGION_US_EAST_1) s3_resource.create_bucket(Bucket="test-allowlist") s3_resource.Object("test-allowlist", "allowlist.yaml").put( Body=open( @@ -75,7 +78,7 @@ class Test_Allowlist: def test_dynamo_allowlist(self): audit_info = self.set_mocked_audit_info() # Create table and put item - dynamodb_resource = resource("dynamodb", region_name=AWS_REGION) + dynamodb_resource = resource("dynamodb", region_name=AWS_REGION_US_EAST_1) table_name = "test-allowlist" params = { "TableName": table_name, @@ -97,7 +100,7 @@ class Test_Allowlist: Item={ "Accounts": "*", "Checks": "iam_user_hardware_mfa_enabled", - "Regions": ["eu-west-1", AWS_REGION], + "Regions": [AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1], "Resources": ["keyword"], } ) @@ -107,7 +110,7 @@ class Test_Allowlist: in parse_allowlist_file( audit_info, "arn:aws:dynamodb:" - + AWS_REGION + + AWS_REGION_US_EAST_1 + ":" + str(AWS_ACCOUNT_NUMBER) + ":table/" @@ -119,7 +122,7 @@ class Test_Allowlist: def test_dynamo_allowlist_with_tags(self): audit_info = self.set_mocked_audit_info() # Create table and put item - dynamodb_resource = resource("dynamodb", region_name=AWS_REGION) + dynamodb_resource = resource("dynamodb", region_name=AWS_REGION_US_EAST_1) table_name = "test-allowlist" params = { "TableName": table_name, @@ -152,7 +155,7 @@ class Test_Allowlist: in parse_allowlist_file( audit_info, "arn:aws:dynamodb:" - + AWS_REGION + + AWS_REGION_US_EAST_1 + ":" + str(AWS_ACCOUNT_NUMBER) + ":table/" @@ -169,7 +172,7 @@ class Test_Allowlist: "*": { "Checks": { "check_test": { - "Regions": [AWS_REGION, "eu-west-1"], + "Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1], "Resources": ["prowler", "^test", "prowler-pro"], } } @@ -183,7 +186,7 @@ class Test_Allowlist: finding_1.check_metadata = MagicMock finding_1.check_metadata.CheckID = "check_test" finding_1.status = "FAIL" - finding_1.region = AWS_REGION + finding_1.region = AWS_REGION_US_EAST_1 finding_1.resource_id = "prowler" finding_1.resource_tags = [] @@ -195,6 +198,66 @@ class Test_Allowlist: assert len(allowlisted_findings) == 1 assert allowlisted_findings[0].status == "WARNING" + def test_is_allowlisted_with_everything_excepted(self): + allowlist = { + "Accounts": { + "*": { + "Checks": { + "athena_*": { + "Regions": "*", + "Resources": "*", + "Tags": "*", + "Exceptions": { + "Accounts": ["*"], + "Regions": ["*"], + "Resources": ["*"], + "Tags": ["*"], + }, + } + } + } + } + } + + assert not is_allowlisted( + allowlist, + AWS_ACCOUNT_NUMBER, + "athena_1", + AWS_REGION_US_EAST_1, + "prowler", + "", + ) + + def test_is_allowlisted_with_default_allowlist(self): + allowlist = { + "Accounts": { + "*": { + "Checks": { + "*": { + "Tags": ["*"], + "Regions": ["*"], + "Resources": ["*"], + "Exceptions": { + "Tags": [], + "Regions": [], + "Accounts": [], + "Resources": [], + }, + } + } + } + } + } + + assert is_allowlisted( + allowlist, + AWS_ACCOUNT_NUMBER, + "athena_1", + AWS_REGION_US_EAST_1, + "prowler", + "", + ) + def test_is_allowlisted(self): # Allowlist example allowlist = { @@ -202,7 +265,7 @@ class Test_Allowlist: "*": { "Checks": { "check_test": { - "Regions": [AWS_REGION, "eu-west-1"], + "Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1], "Resources": ["prowler", "^test", "prowler-pro"], } } @@ -211,22 +274,37 @@ class Test_Allowlist: } assert is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", "" - ) - - assert is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test", "" - ) - - assert is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler", "" + allowlist, + AWS_ACCOUNT_NUMBER, + "check_test", + AWS_REGION_US_EAST_1, + "prowler", + "", ) assert is_allowlisted( allowlist, AWS_ACCOUNT_NUMBER, "check_test", - AWS_REGION, + AWS_REGION_US_EAST_1, + "prowler-test", + "", + ) + + assert is_allowlisted( + allowlist, + AWS_ACCOUNT_NUMBER, + "check_test", + AWS_REGION_US_EAST_1, + "test-prowler", + "", + ) + + assert is_allowlisted( + allowlist, + AWS_ACCOUNT_NUMBER, + "check_test", + AWS_REGION_US_EAST_1, "prowler-pro-test", "", ) @@ -244,7 +322,7 @@ class Test_Allowlist: "*": { "Checks": { "check_test": { - "Regions": [AWS_REGION, "eu-west-1"], + "Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1], "Resources": [".*"], } } @@ -253,15 +331,30 @@ class Test_Allowlist: } assert is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", "" + allowlist, + AWS_ACCOUNT_NUMBER, + "check_test", + AWS_REGION_US_EAST_1, + "prowler", + "", ) assert is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test", "" + allowlist, + AWS_ACCOUNT_NUMBER, + "check_test", + AWS_REGION_US_EAST_1, + "prowler-test", + "", ) assert is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler", "" + allowlist, + AWS_ACCOUNT_NUMBER, + "check_test", + AWS_REGION_US_EAST_1, + "test-prowler", + "", ) assert not ( @@ -277,7 +370,7 @@ class Test_Allowlist: "*": { "Checks": { "check_test": { - "Regions": [AWS_REGION, "eu-west-1"], + "Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1], "Resources": ["*"], } } @@ -286,15 +379,30 @@ class Test_Allowlist: } assert is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", "" + allowlist, + AWS_ACCOUNT_NUMBER, + "check_test", + AWS_REGION_US_EAST_1, + "prowler", + "", ) assert is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test", "" + allowlist, + AWS_ACCOUNT_NUMBER, + "check_test", + AWS_REGION_US_EAST_1, + "prowler-test", + "", ) assert is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler", "" + allowlist, + AWS_ACCOUNT_NUMBER, + "check_test", + AWS_REGION_US_EAST_1, + "test-prowler", + "", ) assert not ( @@ -310,7 +418,7 @@ class Test_Allowlist: "*": { "Checks": { "check_test_2": { - "Regions": [AWS_REGION, "eu-west-1"], + "Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1], "Resources": ["*"], } } @@ -318,7 +426,7 @@ class Test_Allowlist: AWS_ACCOUNT_NUMBER: { "Checks": { "check_test": { - "Regions": [AWS_REGION], + "Regions": [AWS_REGION_US_EAST_1], "Resources": ["*"], } } @@ -327,19 +435,39 @@ class Test_Allowlist: } assert is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test_2", AWS_REGION, "prowler", "" + allowlist, + AWS_ACCOUNT_NUMBER, + "check_test_2", + AWS_REGION_US_EAST_1, + "prowler", + "", ) assert is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", "" + allowlist, + AWS_ACCOUNT_NUMBER, + "check_test", + AWS_REGION_US_EAST_1, + "prowler", + "", ) assert is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test", "" + allowlist, + AWS_ACCOUNT_NUMBER, + "check_test", + AWS_REGION_US_EAST_1, + "prowler-test", + "", ) assert is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler", "" + allowlist, + AWS_ACCOUNT_NUMBER, + "check_test", + AWS_REGION_US_EAST_1, + "test-prowler", + "", ) assert not ( @@ -354,7 +482,7 @@ class Test_Allowlist: AWS_ACCOUNT_NUMBER: { "Checks": { "check_test": { - "Regions": [AWS_REGION], + "Regions": [AWS_REGION_US_EAST_1], "Resources": ["prowler"], } } @@ -363,7 +491,12 @@ class Test_Allowlist: } assert is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", "" + allowlist, + AWS_ACCOUNT_NUMBER, + "check_test", + AWS_REGION_US_EAST_1, + "prowler", + "", ) assert not ( @@ -373,47 +506,27 @@ class Test_Allowlist: ) def test_is_allowlisted_in_region(self): - # Allowlist example - allowlisted_regions = [AWS_REGION, "eu-west-1"] - allowlisted_resources = ["*"] + allowlisted_regions = [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1] + finding_region = AWS_REGION_US_EAST_1 - assert is_allowlisted_in_region( - allowlisted_regions, allowlisted_resources, None, AWS_REGION, "prowler", "" - ) + assert is_allowlisted_in_region(allowlisted_regions, finding_region) - assert is_allowlisted_in_region( - allowlisted_regions, - allowlisted_resources, - None, - AWS_REGION, - "prowler-test", - "", - ) + def test_is_allowlisted_in_region_wildcard(self): + allowlisted_regions = ["*"] + finding_region = AWS_REGION_US_EAST_1 - assert is_allowlisted_in_region( - allowlisted_regions, - allowlisted_resources, - None, - AWS_REGION, - "test-prowler", - "", - ) + assert is_allowlisted_in_region(allowlisted_regions, finding_region) - assert not ( - is_allowlisted_in_region( - allowlisted_regions, - allowlisted_resources, - None, - "us-east-2", - "test", - "", - ) - ) + def test_is_not_allowlisted_in_region(self): + allowlisted_regions = [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1] + finding_region = "eu-west-2" + + assert not is_allowlisted_in_region(allowlisted_regions, finding_region) def test_is_allowlisted_in_check(self): allowlisted_checks = { "check_test": { - "Regions": [AWS_REGION, "eu-west-1"], + "Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1], "Resources": ["*"], } } @@ -421,9 +534,8 @@ class Test_Allowlist: assert is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "check_test", - AWS_REGION, + AWS_REGION_US_EAST_1, "prowler", "", ) @@ -431,9 +543,8 @@ class Test_Allowlist: assert is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "check_test", - AWS_REGION, + AWS_REGION_US_EAST_1, "prowler-test", "", ) @@ -441,9 +552,8 @@ class Test_Allowlist: assert is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "check_test", - AWS_REGION, + AWS_REGION_US_EAST_1, "test-prowler", "", ) @@ -452,7 +562,6 @@ class Test_Allowlist: is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", "test", @@ -464,7 +573,7 @@ class Test_Allowlist: # Allowlist example allowlisted_checks = { "s3_*": { - "Regions": [AWS_REGION, "eu-west-1"], + "Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1], "Resources": ["*"], } } @@ -472,9 +581,8 @@ class Test_Allowlist: assert is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "s3_bucket_public_access", - AWS_REGION, + AWS_REGION_US_EAST_1, "prowler", "", ) @@ -482,9 +590,8 @@ class Test_Allowlist: assert is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "s3_bucket_no_mfa_delete", - AWS_REGION, + AWS_REGION_US_EAST_1, "prowler-test", "", ) @@ -492,9 +599,8 @@ class Test_Allowlist: assert is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "s3_bucket_policy_public_write_access", - AWS_REGION, + AWS_REGION_US_EAST_1, "test-prowler", "", ) @@ -503,9 +609,8 @@ class Test_Allowlist: is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "iam_user_hardware_mfa_enabled", - AWS_REGION, + AWS_REGION_US_EAST_1, "test", "", ) @@ -514,7 +619,7 @@ class Test_Allowlist: def test_is_allowlisted_lambda_generic_check(self): allowlisted_checks = { "lambda_*": { - "Regions": [AWS_REGION, "eu-west-1"], + "Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1], "Resources": ["*"], } } @@ -522,9 +627,8 @@ class Test_Allowlist: assert is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "awslambda_function_invoke_api_operations_cloudtrail_logging_enabled", - AWS_REGION, + AWS_REGION_US_EAST_1, "prowler", "", ) @@ -532,9 +636,8 @@ class Test_Allowlist: assert is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "awslambda_function_no_secrets_in_code", - AWS_REGION, + AWS_REGION_US_EAST_1, "prowler", "", ) @@ -542,9 +645,8 @@ class Test_Allowlist: assert is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "awslambda_function_no_secrets_in_variables", - AWS_REGION, + AWS_REGION_US_EAST_1, "prowler", "", ) @@ -552,9 +654,8 @@ class Test_Allowlist: assert is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "awslambda_function_not_publicly_accessible", - AWS_REGION, + AWS_REGION_US_EAST_1, "prowler", "", ) @@ -562,9 +663,8 @@ class Test_Allowlist: assert is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "awslambda_function_url_cors_policy", - AWS_REGION, + AWS_REGION_US_EAST_1, "prowler", "", ) @@ -572,9 +672,8 @@ class Test_Allowlist: assert is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "awslambda_function_url_public", - AWS_REGION, + AWS_REGION_US_EAST_1, "prowler", "", ) @@ -582,9 +681,8 @@ class Test_Allowlist: assert is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "awslambda_function_using_supported_runtimes", - AWS_REGION, + AWS_REGION_US_EAST_1, "prowler", "", ) @@ -592,7 +690,7 @@ class Test_Allowlist: def test_is_allowlisted_lambda_concrete_check(self): allowlisted_checks = { "lambda_function_no_secrets_in_variables": { - "Regions": [AWS_REGION, "eu-west-1"], + "Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1], "Resources": ["*"], } } @@ -600,9 +698,8 @@ class Test_Allowlist: assert is_allowlisted_in_check( allowlisted_checks, AWS_ACCOUNT_NUMBER, - AWS_ACCOUNT_NUMBER, "awslambda_function_no_secrets_in_variables", - AWS_REGION, + AWS_REGION_US_EAST_1, "prowler", "", ) @@ -614,7 +711,7 @@ class Test_Allowlist: "*": { "Checks": { "check_test": { - "Regions": [AWS_REGION, "eu-west-1"], + "Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1], "Resources": ["*"], "Tags": ["environment=dev", "project=.*"], } @@ -627,7 +724,7 @@ class Test_Allowlist: allowlist, AWS_ACCOUNT_NUMBER, "check_test", - AWS_REGION, + AWS_REGION_US_EAST_1, "prowler", "environment=dev", ) @@ -636,7 +733,7 @@ class Test_Allowlist: allowlist, AWS_ACCOUNT_NUMBER, "check_test", - AWS_REGION, + AWS_REGION_US_EAST_1, "prowler-test", "environment=dev | project=prowler", ) @@ -654,56 +751,45 @@ class Test_Allowlist: def test_is_allowlisted_in_tags(self): allowlist_tags = ["environment=dev", "project=prowler"] - allowlist_resource = "*" + + assert is_allowlisted_in_tags(allowlist_tags, "environment=dev") assert is_allowlisted_in_tags( allowlist_tags, - "*", - "prowler", - "environment=dev", - ) - - assert is_allowlisted_in_tags( - allowlist_tags, - allowlist_resource, - "prowler-test", "environment=dev | project=prowler", ) assert not ( is_allowlisted_in_tags( allowlist_tags, - allowlist_resource, - "test", "environment=pro", ) ) def test_is_allowlisted_in_tags_regex(self): allowlist_tags = ["environment=(dev|test)", ".*=prowler"] - allowlist_resource = "*" assert is_allowlisted_in_tags( allowlist_tags, - allowlist_resource, - "prowler-test", "environment=test | proj=prowler", ) assert is_allowlisted_in_tags( allowlist_tags, - allowlist_resource, - "prowler-test", "env=prod | project=prowler", ) assert not is_allowlisted_in_tags( allowlist_tags, - allowlist_resource, - "prowler-test", "environment=prod | project=myproj", ) + def test_is_allowlisted_in_tags_with_no_tags_in_finding(self): + allowlist_tags = ["environment=(dev|test)", ".*=prowler"] + finding_tags = "" + + assert not is_allowlisted_in_tags(allowlist_tags, finding_tags) + def test_is_excepted(self): # Allowlist example exceptions = { @@ -737,6 +823,28 @@ class Test_Allowlist: "environment=test", ) + def test_is_excepted_all_wildcard(self): + exceptions = { + "Accounts": ["*"], + "Regions": ["*"], + "Resources": ["*"], + "Tags": ["*"], + } + assert is_excepted( + exceptions, AWS_ACCOUNT_NUMBER, "eu-south-2", "test", "environment=test" + ) + assert not is_excepted( + exceptions, AWS_ACCOUNT_NUMBER, "eu-south-2", "test", None + ) + + def test_is_not_excepted(self): + exceptions = { + "Accounts": [AWS_ACCOUNT_NUMBER], + "Regions": ["eu-central-1", "eu-south-3"], + "Resources": ["test"], + "Tags": ["environment=test", "project=.*"], + } + assert not is_excepted( exceptions, AWS_ACCOUNT_NUMBER, @@ -760,3 +868,11 @@ class Test_Allowlist: "test", "environment=pro", ) + + def test_is_allowlisted_in_resource(self): + allowlist_resources = ["prowler", "^test", "prowler-pro"] + + assert is_allowlisted_in_resource(allowlist_resources, "prowler") + assert is_allowlisted_in_resource(allowlist_resources, "prowler-test") + assert is_allowlisted_in_resource(allowlist_resources, "test-prowler") + assert not is_allowlisted_in_resource(allowlist_resources, "random") diff --git a/tests/providers/aws/lib/policy_condition_parser/policy_condition_parser_test.py b/tests/providers/aws/lib/policy_condition_parser/policy_condition_parser_test.py index 14d454711d..4e8e71ccec 100644 --- a/tests/providers/aws/lib/policy_condition_parser/policy_condition_parser_test.py +++ b/tests/providers/aws/lib/policy_condition_parser/policy_condition_parser_test.py @@ -1282,3 +1282,75 @@ class Test_policy_condition_parser: assert not is_account_only_allowed_in_condition( condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER ) + + def test_condition_parser_two_lists_unrestrictive(self): + condition_statement = { + "StringLike": { + "AWS:ResourceAccount": [ + TRUSTED_AWS_ACCOUNT_NUMBER, + NON_TRUSTED_AWS_ACCOUNT_NUMBER, + ] + }, + "ArnLike": { + "AWS:SourceArn": [ + f"arn:aws:cloudtrail:*:{TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*", + f"arn:aws:cloudtrail:*:{NON_TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*", + ] + }, + } + assert not is_account_only_allowed_in_condition( + condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER + ) + + def test_condition_parser_two_lists_both_restrictive(self): + condition_statement = { + "StringLike": { + "AWS:ResourceAccount": [ + TRUSTED_AWS_ACCOUNT_NUMBER, + ] + }, + "ArnLike": { + "AWS:SourceArn": [ + f"arn:aws:cloudtrail:*:{TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*", + ] + }, + } + assert is_account_only_allowed_in_condition( + condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER + ) + + def test_condition_parser_two_lists_first_restrictive(self): + condition_statement = { + "StringLike": { + "AWS:ResourceAccount": [ + TRUSTED_AWS_ACCOUNT_NUMBER, + ] + }, + "ArnLike": { + "AWS:SourceArn": [ + f"arn:aws:cloudtrail:*:{TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*", + f"arn:aws:cloudtrail:*:{NON_TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*", + ] + }, + } + assert is_account_only_allowed_in_condition( + condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER + ) + + def test_condition_parser_two_lists_second_restrictive(self): + condition_statement = { + "StringLike": { + "AWS:ResourceAccount": [ + TRUSTED_AWS_ACCOUNT_NUMBER, + NON_TRUSTED_AWS_ACCOUNT_NUMBER, + ] + }, + "ArnLike": { + "AWS:SourceArn": [ + f"arn:aws:cloudtrail:*:{TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*", + ] + }, + } + assert is_account_only_allowed_in_condition( + condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER + ) diff --git a/tests/providers/aws/lib/security_hub/security_hub_test.py b/tests/providers/aws/lib/security_hub/security_hub_test.py index 6c81d2e43e..dad3a7103e 100644 --- a/tests/providers/aws/lib/security_hub/security_hub_test.py +++ b/tests/providers/aws/lib/security_hub/security_hub_test.py @@ -6,7 +6,6 @@ from mock import MagicMock, patch from prowler.config.config import prowler_version, timestamp_utc from prowler.lib.check.models import Check_Report, load_check_metadata -from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info # from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info from prowler.providers.aws.lib.security_hub.security_hub import ( @@ -14,11 +13,13 @@ from prowler.providers.aws.lib.security_hub.security_hub import ( prepare_security_hub_findings, verify_security_hub_integration_enabled_per_region, ) -from prowler.providers.common.models import Audit_Metadata - -AWS_ACCOUNT_ID = "123456789012" -AWS_REGION_1 = "eu-west-1" -AWS_REGION_2 = "eu-west-2" +from tests.providers.aws.audit_info_utils import ( + AWS_ACCOUNT_NUMBER, + AWS_COMMERCIAL_PARTITION, + AWS_REGION_EU_WEST_1, + AWS_REGION_EU_WEST_2, + set_mocked_aws_audit_info, +) # Mocking Security Hub Get Findings make_api_call = botocore.client.BaseClient._make_api_call @@ -32,7 +33,7 @@ def mock_make_api_call(self, operation_name, kwarg): } if operation_name == "DescribeHub": return { - "HubArn": f"arn:aws:securityhub:{AWS_REGION_1}:{AWS_ACCOUNT_ID}:hub/default", + "HubArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:hub/default", "SubscribedAt": "2023-02-07T09:45:43.742Z", "AutoEnableControls": True, "ControlFindingGenerator": "STANDARD_CONTROL", @@ -41,7 +42,7 @@ def mock_make_api_call(self, operation_name, kwarg): if operation_name == "ListEnabledProductsForImport": return { "ProductSubscriptions": [ - f"arn:aws:securityhub:{AWS_REGION_1}:{AWS_ACCOUNT_ID}:product-subscription/prowler/prowler", + f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:product-subscription/prowler/prowler", ] } @@ -49,32 +50,6 @@ def mock_make_api_call(self, operation_name, kwarg): class Test_SecurityHub: - def set_mocked_audit_info(self): - return AWS_Audit_Info( - session_config=None, - original_session=None, - audit_session=None, - audited_account=AWS_ACCOUNT_ID, - audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_ID}:root", - audited_identity_arn="test-arn", - audited_user_id="test", - audited_partition="aws", - profile="default", - profile_region="eu-west-1", - credentials=None, - assumed_role_info=None, - audited_regions=["eu-west-2", "eu-west-1"], - organizations_metadata=None, - audit_resources=None, - mfa_enabled=False, - audit_metadata=Audit_Metadata( - services_scanned=0, - expected_checks=[], - completed_checks=0, - audit_progress=0, - ), - ) - def generate_finding(self, status, region): finding = Check_Report( load_check_metadata( @@ -104,14 +79,18 @@ class Test_SecurityHub: @patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call) def test_verify_security_hub_integration_enabled_per_region(self): - session = self.set_mocked_session(AWS_REGION_1) - assert verify_security_hub_integration_enabled_per_region(AWS_REGION_1, session) + session = self.set_mocked_session(AWS_REGION_EU_WEST_1) + assert verify_security_hub_integration_enabled_per_region( + AWS_COMMERCIAL_PARTITION, AWS_REGION_EU_WEST_1, session, AWS_ACCOUNT_NUMBER + ) def test_prepare_security_hub_findings_enabled_region_not_quiet(self): - enabled_regions = [AWS_REGION_1] + enabled_regions = [AWS_REGION_EU_WEST_1] output_options = self.set_mocked_output_options(is_quiet=False) - findings = [self.generate_finding("PASS", AWS_REGION_1)] - audit_info = self.set_mocked_audit_info() + findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)] + audit_info = set_mocked_aws_audit_info( + audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2] + ) assert prepare_security_hub_findings( findings, @@ -119,11 +98,11 @@ class Test_SecurityHub: output_options, enabled_regions, ) == { - AWS_REGION_1: [ + AWS_REGION_EU_WEST_1: [ { "SchemaVersion": "2018-10-08", - "Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_ID}-{AWS_REGION_1}-ee26b0dd4", - "ProductArn": f"arn:aws:securityhub:{AWS_REGION_1}::product/prowler/prowler", + "Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_NUMBER}-{AWS_REGION_EU_WEST_1}-ee26b0dd4", + "ProductArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}::product/prowler/prowler", "RecordState": "ACTIVE", "ProductFields": { "ProviderName": "Prowler", @@ -131,7 +110,7 @@ class Test_SecurityHub: "ProwlerResourceName": "test", }, "GeneratorId": "prowler-iam_user_accesskey_unused", - "AwsAccountId": f"{AWS_ACCOUNT_ID}", + "AwsAccountId": f"{AWS_ACCOUNT_NUMBER}", "Types": ["Software and Configuration Checks"], "FirstObservedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"), "UpdatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"), @@ -144,7 +123,7 @@ class Test_SecurityHub: "Type": "AwsIamAccessAnalyzer", "Id": "test", "Partition": "aws", - "Region": f"{AWS_REGION_1}", + "Region": f"{AWS_REGION_EU_WEST_1}", } ], "Compliance": { @@ -160,55 +139,117 @@ class Test_SecurityHub: }, } ], - AWS_REGION_2: [], } def test_prepare_security_hub_findings_quiet_INFO_finding(self): - enabled_regions = [AWS_REGION_1] + enabled_regions = [AWS_REGION_EU_WEST_1] output_options = self.set_mocked_output_options(is_quiet=False) - findings = [self.generate_finding("INFO", AWS_REGION_1)] - audit_info = self.set_mocked_audit_info() + findings = [self.generate_finding("INFO", AWS_REGION_EU_WEST_1)] + audit_info = set_mocked_aws_audit_info( + audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2] + ) assert prepare_security_hub_findings( findings, audit_info, output_options, enabled_regions, - ) == {AWS_REGION_1: [], AWS_REGION_2: []} + ) == {AWS_REGION_EU_WEST_1: []} def test_prepare_security_hub_findings_disabled_region(self): - enabled_regions = [AWS_REGION_1] + enabled_regions = [AWS_REGION_EU_WEST_1] output_options = self.set_mocked_output_options(is_quiet=False) - findings = [self.generate_finding("PASS", AWS_REGION_2)] - audit_info = self.set_mocked_audit_info() + findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_2)] + audit_info = set_mocked_aws_audit_info( + audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2] + ) assert prepare_security_hub_findings( findings, audit_info, output_options, enabled_regions, - ) == {AWS_REGION_1: [], AWS_REGION_2: []} + ) == {AWS_REGION_EU_WEST_1: []} def test_prepare_security_hub_findings_quiet(self): - enabled_regions = [AWS_REGION_1] + enabled_regions = [AWS_REGION_EU_WEST_1] output_options = self.set_mocked_output_options(is_quiet=True) - findings = [self.generate_finding("PASS", AWS_REGION_1)] - audit_info = self.set_mocked_audit_info() + findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)] + audit_info = set_mocked_aws_audit_info( + audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2] + ) assert prepare_security_hub_findings( findings, audit_info, output_options, enabled_regions, - ) == {AWS_REGION_1: [], AWS_REGION_2: []} + ) == {AWS_REGION_EU_WEST_1: []} + + def test_prepare_security_hub_findings_no_audited_regions(self): + enabled_regions = [AWS_REGION_EU_WEST_1] + output_options = self.set_mocked_output_options(is_quiet=False) + findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)] + audit_info = set_mocked_aws_audit_info() + + assert prepare_security_hub_findings( + findings, + audit_info, + output_options, + enabled_regions, + ) == { + AWS_REGION_EU_WEST_1: [ + { + "SchemaVersion": "2018-10-08", + "Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_NUMBER}-{AWS_REGION_EU_WEST_1}-ee26b0dd4", + "ProductArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}::product/prowler/prowler", + "RecordState": "ACTIVE", + "ProductFields": { + "ProviderName": "Prowler", + "ProviderVersion": prowler_version, + "ProwlerResourceName": "test", + }, + "GeneratorId": "prowler-iam_user_accesskey_unused", + "AwsAccountId": f"{AWS_ACCOUNT_NUMBER}", + "Types": ["Software and Configuration Checks"], + "FirstObservedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"), + "UpdatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"), + "CreatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"), + "Severity": {"Label": "LOW"}, + "Title": "Ensure Access Keys unused are disabled", + "Description": "test", + "Resources": [ + { + "Type": "AwsIamAccessAnalyzer", + "Id": "test", + "Partition": "aws", + "Region": f"{AWS_REGION_EU_WEST_1}", + } + ], + "Compliance": { + "Status": "PASSED", + "RelatedRequirements": [], + "AssociatedStandards": [], + }, + "Remediation": { + "Recommendation": { + "Text": "Run sudo yum update and cross your fingers and toes.", + "Url": "https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html", + } + }, + } + ], + } @patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call) def test_batch_send_to_security_hub_one_finding(self): - enabled_regions = [AWS_REGION_1] + enabled_regions = [AWS_REGION_EU_WEST_1] output_options = self.set_mocked_output_options(is_quiet=False) - findings = [self.generate_finding("PASS", AWS_REGION_1)] - audit_info = self.set_mocked_audit_info() - session = self.set_mocked_session(AWS_REGION_1) + findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)] + audit_info = set_mocked_aws_audit_info( + audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2] + ) + session = self.set_mocked_session(AWS_REGION_EU_WEST_1) security_hub_findings = prepare_security_hub_findings( findings, diff --git a/tests/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled_test.py b/tests/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled_test.py index 17f35627f1..3a0178e953 100644 --- a/tests/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled_test.py +++ b/tests/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled_test.py @@ -33,6 +33,7 @@ class Test_accessanalyzer_enabled: def test_one_analyzer_not_available(self): # Include analyzers to check accessanalyzer_client = mock.MagicMock + accessanalyzer_client.region = AWS_REGION_1 accessanalyzer_client.analyzers = [ Analyzer( arn=AWS_ACCOUNT_ARN, @@ -65,8 +66,46 @@ class Test_accessanalyzer_enabled: assert result[0].region == AWS_REGION_1 assert result[0].resource_tags == [] + def test_one_analyzer_not_available_allowlisted(self): + # Include analyzers to check + accessanalyzer_client = mock.MagicMock + accessanalyzer_client.region = AWS_REGION_2 + accessanalyzer_client.audit_config = {"allowlist_non_default_regions": True} + accessanalyzer_client.analyzers = [ + Analyzer( + arn=AWS_ACCOUNT_ARN, + name=AWS_ACCOUNT_NUMBER, + status="NOT_AVAILABLE", + tags=[], + type="", + region=AWS_REGION_1, + ) + ] + with mock.patch( + "prowler.providers.aws.services.accessanalyzer.accessanalyzer_service.AccessAnalyzer", + accessanalyzer_client, + ): + from prowler.providers.aws.services.accessanalyzer.accessanalyzer_enabled.accessanalyzer_enabled import ( + accessanalyzer_enabled, + ) + + check = accessanalyzer_enabled() + result = check.execute() + + assert len(result) == 1 + assert result[0].status == "WARNING" + assert ( + result[0].status_extended + == f"IAM Access Analyzer in account {AWS_ACCOUNT_NUMBER} is not enabled." + ) + assert result[0].resource_id == AWS_ACCOUNT_NUMBER + assert result[0].resource_arn == AWS_ACCOUNT_ARN + assert result[0].region == AWS_REGION_1 + assert result[0].resource_tags == [] + def test_two_analyzers(self): accessanalyzer_client = mock.MagicMock + accessanalyzer_client.region = AWS_REGION_1 accessanalyzer_client.analyzers = [ Analyzer( arn=AWS_ACCOUNT_ARN, diff --git a/tests/providers/aws/services/codeartifact/codeartifact_packages_external_public_publishing_disabled/codeartifact_packages_external_public_publishing_disabled_test.py b/tests/providers/aws/services/codeartifact/codeartifact_packages_external_public_publishing_disabled/codeartifact_packages_external_public_publishing_disabled_test.py index bcbe13e743..668f00e0a3 100644 --- a/tests/providers/aws/services/codeartifact/codeartifact_packages_external_public_publishing_disabled/codeartifact_packages_external_public_publishing_disabled_test.py +++ b/tests/providers/aws/services/codeartifact/codeartifact_packages_external_public_publishing_disabled/codeartifact_packages_external_public_publishing_disabled_test.py @@ -110,7 +110,10 @@ class Test_codeartifact_packages_external_public_publishing_disabled: assert len(result) == 1 assert result[0].region == AWS_REGION assert result[0].resource_id == "test-package" - assert result[0].resource_arn == repository_arn + assert ( + result[0].resource_arn + == repository_arn + "/" + package_namespace + ":" + package_name + ) assert result[0].resource_tags == [] assert result[0].status == "FAIL" assert ( @@ -167,7 +170,10 @@ class Test_codeartifact_packages_external_public_publishing_disabled: assert len(result) == 1 assert result[0].region == AWS_REGION assert result[0].resource_id == "test-package" - assert result[0].resource_arn == repository_arn + assert ( + result[0].resource_arn + == repository_arn + "/" + package_namespace + ":" + package_name + ) assert result[0].resource_tags == [] assert result[0].status == "PASS" assert ( diff --git a/tests/providers/aws/services/ec2/ec2_securitygroup_not_used/ec2_securitygroup_not_used_test.py b/tests/providers/aws/services/ec2/ec2_securitygroup_not_used/ec2_securitygroup_not_used_test.py index cf216574d3..81b0ea5062 100644 --- a/tests/providers/aws/services/ec2/ec2_securitygroup_not_used/ec2_securitygroup_not_used_test.py +++ b/tests/providers/aws/services/ec2/ec2_securitygroup_not_used/ec2_securitygroup_not_used_test.py @@ -244,3 +244,88 @@ class Test_ec2_securitygroup_not_used: assert result[0].resource_id == sg.id assert result[0].resource_details == sg_name assert result[0].resource_tags == [] + + @mock_ec2 + @mock_lambda + def test_ec2_associated_sg(self): + # Create EC2 Mocked Resources + ec2 = resource("ec2", AWS_REGION_US_EAST_1) + ec2_client = client("ec2", region_name=AWS_REGION_US_EAST_1) + vpc_id = ec2_client.create_vpc(CidrBlock="10.0.0.0/16")["Vpc"]["VpcId"] + sg_name = "test-sg" + sg_name1 = "test-sg1" + sg = ec2.create_security_group( + GroupName=sg_name, Description="test", VpcId=vpc_id + ) + sg1 = ec2.create_security_group( + GroupName=sg_name1, Description="test1", VpcId=vpc_id + ) + + ec2_client.authorize_security_group_ingress( + GroupId=sg.id, + IpPermissions=[ + { + "IpProtocol": "-1", + "UserIdGroupPairs": [ + { + "GroupId": sg1.id, + "Description": "Allow traffic from source SG", + } + ], + } + ], + ) + + from prowler.providers.aws.services.awslambda.awslambda_service import Lambda + from prowler.providers.aws.services.ec2.ec2_service import EC2 + + current_audit_info = set_mocked_aws_audit_info( + audited_regions=["us-east-1", "eu-west-1"] + ) + + with mock.patch( + "prowler.providers.aws.lib.audit_info.audit_info.current_audit_info", + new=current_audit_info, + ), mock.patch( + "prowler.providers.aws.services.ec2.ec2_securitygroup_not_used.ec2_securitygroup_not_used.ec2_client", + new=EC2(current_audit_info), + ), mock.patch( + "prowler.providers.aws.services.ec2.ec2_securitygroup_not_used.ec2_securitygroup_not_used.awslambda_client", + new=Lambda(current_audit_info), + ): + # Test Check + from prowler.providers.aws.services.ec2.ec2_securitygroup_not_used.ec2_securitygroup_not_used import ( + ec2_securitygroup_not_used, + ) + + check = ec2_securitygroup_not_used() + result = check.execute() + + # One custom sg + assert len(result) == 2 + assert result[0].status == "FAIL" + assert result[0].region == AWS_REGION_US_EAST_1 + assert ( + result[0].status_extended + == f"Security group {sg_name} ({sg.id}) it is not being used." + ) + assert ( + result[0].resource_arn + == f"arn:{current_audit_info.audited_partition}:ec2:{AWS_REGION_US_EAST_1}:{current_audit_info.audited_account}:security-group/{sg.id}" + ) + assert result[0].resource_id == sg.id + assert result[0].resource_details == sg_name + assert result[0].resource_tags == [] + assert result[1].status == "PASS" + assert result[1].region == AWS_REGION_US_EAST_1 + assert ( + result[1].status_extended + == f"Security group {sg_name1} ({sg1.id}) it is being used." + ) + assert ( + result[1].resource_arn + == f"arn:{current_audit_info.audited_partition}:ec2:{AWS_REGION_US_EAST_1}:{current_audit_info.audited_account}:security-group/{sg1.id}" + ) + assert result[1].resource_id == sg1.id + assert result[1].resource_details == sg_name1 + assert result[1].resource_tags == [] diff --git a/tests/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed_test.py b/tests/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed_test.py index 3f3afa6ed3..19cc667c00 100644 --- a/tests/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed_test.py +++ b/tests/providers/aws/services/guardduty/guardduty_centrally_managed/guardduty_centrally_managed_test.py @@ -62,6 +62,31 @@ class Test_guardduty_centrally_managed: assert result[0].region == AWS_REGION assert result[0].resource_arn == DETECTOR_ARN + def test_not_enabled_account_detector(self): + guardduty_client = mock.MagicMock + guardduty_client.detectors = [] + guardduty_client.detectors.append( + Detector( + id=AWS_ACCOUNT_NUMBER, + region=AWS_REGION, + arn=DETECTOR_ARN, + enabled_in_account=False, + ) + ) + + with mock.patch( + "prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty", + guardduty_client, + ): + # Test Check + from prowler.providers.aws.services.guardduty.guardduty_centrally_managed.guardduty_centrally_managed import ( + guardduty_centrally_managed, + ) + + check = guardduty_centrally_managed() + result = check.execute() + assert len(result) == 0 + def test_detector_centralized_managed(self): guardduty_client = mock.MagicMock guardduty_client.detectors = [] diff --git a/tests/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings_test.py b/tests/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings_test.py index 5f82583f1f..456169324a 100644 --- a/tests/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings_test.py +++ b/tests/providers/aws/services/guardduty/guardduty_no_high_severity_findings/guardduty_no_high_severity_findings_test.py @@ -58,6 +58,29 @@ class Test_guardduty_no_high_severity_findings: assert result[0].resource_arn == DETECTOR_ARN assert result[0].region == AWS_REGION + def test_not_enabled_account_detector(self): + guardduty_client = mock.MagicMock + guardduty_client.detectors = [] + guardduty_client.detectors.append( + Detector( + id=AWS_ACCOUNT_NUMBER, + arn=DETECTOR_ARN, + region=AWS_REGION, + enabled_in_account=False, + ) + ) + with mock.patch( + "prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty", + guardduty_client, + ): + from prowler.providers.aws.services.guardduty.guardduty_no_high_severity_findings.guardduty_no_high_severity_findings import ( + guardduty_no_high_severity_findings, + ) + + check = guardduty_no_high_severity_findings() + result = check.execute() + assert len(result) == 0 + def test_high_findings(self): guardduty_client = mock.MagicMock guardduty_client.detectors = [] diff --git a/tests/providers/aws/services/sqs/sqs_queues_not_publicly_accessible/sqs_queues_not_publicly_accessible_test.py b/tests/providers/aws/services/sqs/sqs_queues_not_publicly_accessible/sqs_queues_not_publicly_accessible_test.py index e53778f2e2..cb679d2627 100644 --- a/tests/providers/aws/services/sqs/sqs_queues_not_publicly_accessible/sqs_queues_not_publicly_accessible_test.py +++ b/tests/providers/aws/services/sqs/sqs_queues_not_publicly_accessible/sqs_queues_not_publicly_accessible_test.py @@ -7,8 +7,11 @@ from prowler.providers.aws.services.sqs.sqs_service import Queue AWS_REGION = "eu-west-1" AWS_ACCOUNT_NUMBER = "123456789012" -queue_id = str(uuid4()) -topic_arn = f"arn:aws:sqs:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:{queue_id}" +test_queue_name = str(uuid4()) +test_queue_url = ( + f"https://sqs.{AWS_REGION}.amazonaws.com/{AWS_ACCOUNT_NUMBER}/{test_queue_name}" +) +test_queue_arn = f"arn:aws:sqs:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:{test_queue_name}" test_restricted_policy = { "Version": "2012-10-17", @@ -19,7 +22,7 @@ test_restricted_policy = { "Effect": "Allow", "Principal": {"AWS": {AWS_ACCOUNT_NUMBER}}, "Action": "sqs:ReceiveMessage", - "Resource": topic_arn, + "Resource": test_queue_arn, } ], } @@ -33,7 +36,7 @@ test_public_policy = { "Effect": "Allow", "Principal": "*", "Action": "sqs:ReceiveMessage", - "Resource": topic_arn, + "Resource": test_queue_arn, } ], } @@ -47,7 +50,7 @@ test_public_policy_with_condition_same_account_not_valid = { "Effect": "Allow", "Principal": "*", "Action": "sqs:ReceiveMessage", - "Resource": topic_arn, + "Resource": test_queue_arn, "Condition": { "DateGreaterThan": {"aws:CurrentTime": "2009-01-31T12:00Z"}, "DateLessThan": {"aws:CurrentTime": "2009-01-31T15:00Z"}, @@ -65,7 +68,7 @@ test_public_policy_with_condition_same_account = { "Effect": "Allow", "Principal": "*", "Action": "sqs:ReceiveMessage", - "Resource": topic_arn, + "Resource": test_queue_arn, "Condition": { "StringEquals": {"aws:SourceAccount": f"{AWS_ACCOUNT_NUMBER}"} }, @@ -82,7 +85,7 @@ test_public_policy_with_condition_diff_account = { "Effect": "Allow", "Principal": "*", "Action": "sqs:ReceiveMessage", - "Resource": topic_arn, + "Resource": test_queue_arn, "Condition": {"StringEquals": {"aws:SourceAccount": "111122223333"}}, } ], @@ -110,10 +113,11 @@ class Test_sqs_queues_not_publicly_accessible: sqs_client.queues = [] sqs_client.queues.append( Queue( - id=queue_id, + id=test_queue_url, + name=test_queue_name, region=AWS_REGION, policy=test_restricted_policy, - arn="arn_test", + arn=test_queue_arn, ) ) with mock.patch( @@ -129,8 +133,8 @@ class Test_sqs_queues_not_publicly_accessible: assert len(result) == 1 assert result[0].status == "PASS" assert search("is not public", result[0].status_extended) - assert result[0].resource_id == queue_id - assert result[0].resource_arn == "arn_test" + assert result[0].resource_id == test_queue_url + assert result[0].resource_arn == test_queue_arn assert result[0].resource_tags == [] assert result[0].region == AWS_REGION @@ -139,10 +143,11 @@ class Test_sqs_queues_not_publicly_accessible: sqs_client.queues = [] sqs_client.queues.append( Queue( - id=queue_id, + id=test_queue_url, + name=test_queue_name, region=AWS_REGION, policy=test_public_policy, - arn="arn_test", + arn=test_queue_arn, ) ) with mock.patch( @@ -161,8 +166,8 @@ class Test_sqs_queues_not_publicly_accessible: "is public because its policy allows public access", result[0].status_extended, ) - assert result[0].resource_id == queue_id - assert result[0].resource_arn == "arn_test" + assert result[0].resource_id == test_queue_url + assert result[0].resource_arn == test_queue_arn assert result[0].resource_tags == [] assert result[0].region == AWS_REGION @@ -172,10 +177,11 @@ class Test_sqs_queues_not_publicly_accessible: sqs_client.audited_account = AWS_ACCOUNT_NUMBER sqs_client.queues.append( Queue( - id=queue_id, + id=test_queue_url, + name=test_queue_name, region=AWS_REGION, policy=test_public_policy_with_condition_same_account_not_valid, - arn="arn_test", + arn=test_queue_arn, ) ) with mock.patch( @@ -194,8 +200,8 @@ class Test_sqs_queues_not_publicly_accessible: "is public because its policy allows public access", result[0].status_extended, ) - assert result[0].resource_id == queue_id - assert result[0].resource_arn == "arn_test" + assert result[0].resource_id == test_queue_url + assert result[0].resource_arn == test_queue_arn assert result[0].resource_tags == [] assert result[0].region == AWS_REGION @@ -205,10 +211,11 @@ class Test_sqs_queues_not_publicly_accessible: sqs_client.audited_account = AWS_ACCOUNT_NUMBER sqs_client.queues.append( Queue( - id=queue_id, + id=test_queue_url, + name=test_queue_name, region=AWS_REGION, policy=test_public_policy_with_condition_same_account, - arn="arn_test", + arn=test_queue_arn, ) ) with mock.patch( @@ -225,10 +232,10 @@ class Test_sqs_queues_not_publicly_accessible: assert result[0].status == "PASS" assert ( result[0].status_extended - == f"SQS queue {queue_id} is not public because its policy only allows access from the same account." + == f"SQS queue {test_queue_url} is not public because its policy only allows access from the same account." ) - assert result[0].resource_id == queue_id - assert result[0].resource_arn == "arn_test" + assert result[0].resource_id == test_queue_url + assert result[0].resource_arn == test_queue_arn assert result[0].resource_tags == [] assert result[0].region == AWS_REGION @@ -238,10 +245,11 @@ class Test_sqs_queues_not_publicly_accessible: sqs_client.audited_account = AWS_ACCOUNT_NUMBER sqs_client.queues.append( Queue( - id=queue_id, + id=test_queue_url, + name=test_queue_name, region=AWS_REGION, policy=test_public_policy_with_condition_diff_account, - arn="arn_test", + arn=test_queue_arn, ) ) with mock.patch( @@ -258,9 +266,9 @@ class Test_sqs_queues_not_publicly_accessible: assert result[0].status == "FAIL" assert ( result[0].status_extended - == f"SQS queue {queue_id} is public because its policy allows public access, and the condition does not limit access to resources within the same account." + == f"SQS queue {test_queue_url} is public because its policy allows public access, and the condition does not limit access to resources within the same account." ) - assert result[0].resource_id == queue_id - assert result[0].resource_arn == "arn_test" + assert result[0].resource_id == test_queue_url + assert result[0].resource_arn == test_queue_arn assert result[0].resource_tags == [] assert result[0].region == AWS_REGION diff --git a/tests/providers/aws/services/sqs/sqs_queues_server_side_encryption_enabled/sqs_queues_server_side_encryption_enabled_test.py b/tests/providers/aws/services/sqs/sqs_queues_server_side_encryption_enabled/sqs_queues_server_side_encryption_enabled_test.py index ff1f880803..7afbb48a78 100644 --- a/tests/providers/aws/services/sqs/sqs_queues_server_side_encryption_enabled/sqs_queues_server_side_encryption_enabled_test.py +++ b/tests/providers/aws/services/sqs/sqs_queues_server_side_encryption_enabled/sqs_queues_server_side_encryption_enabled_test.py @@ -8,8 +8,11 @@ AWS_REGION = "eu-west-1" AWS_ACCOUNT_NUMBER = "123456789012" test_kms_key_id = str(uuid4()) -queue_id = str(uuid4()) -topic_arn = f"arn:aws:sqs:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:{queue_id}" +test_queue_name = str(uuid4()) +test_queue_url = ( + f"https://sqs.{AWS_REGION}.amazonaws.com/{AWS_ACCOUNT_NUMBER}/{test_queue_name}" +) +test_queue_arn = f"arn:aws:sqs:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:{test_queue_name}" class Test_sqs_queues_server_side_encryption_enabled: @@ -33,10 +36,11 @@ class Test_sqs_queues_server_side_encryption_enabled: sqs_client.queues = [] sqs_client.queues.append( Queue( - id=queue_id, + id=test_queue_url, + name=test_queue_name, region=AWS_REGION, kms_key_id=test_kms_key_id, - arn="arn_test", + arn=test_queue_arn, ) ) with mock.patch( @@ -52,17 +56,18 @@ class Test_sqs_queues_server_side_encryption_enabled: assert len(result) == 1 assert result[0].status == "PASS" assert search("is using Server Side Encryption", result[0].status_extended) - assert result[0].resource_id == queue_id - assert result[0].resource_arn == "arn_test" + assert result[0].resource_id == test_queue_url + assert result[0].resource_arn == test_queue_arn def test_queues_no_encryption(self): sqs_client = mock.MagicMock sqs_client.queues = [] sqs_client.queues.append( Queue( - id=queue_id, + id=test_queue_url, + name=test_queue_name, region=AWS_REGION, - arn="arn_test", + arn=test_queue_arn, ) ) with mock.patch( @@ -80,5 +85,5 @@ class Test_sqs_queues_server_side_encryption_enabled: assert search( "is not using Server Side Encryption", result[0].status_extended ) - assert result[0].resource_id == queue_id - assert result[0].resource_arn == "arn_test" + assert result[0].resource_id == test_queue_url + assert result[0].resource_arn == test_queue_arn diff --git a/tests/providers/aws/services/sqs/sqs_service_test.py b/tests/providers/aws/services/sqs/sqs_service_test.py index c3a71dc518..72779a56dc 100644 --- a/tests/providers/aws/services/sqs/sqs_service_test.py +++ b/tests/providers/aws/services/sqs/sqs_service_test.py @@ -110,9 +110,25 @@ class Test_SQS_Service: sqs = SQS(audit_info) assert len(sqs.queues) == 1 assert sqs.queues[0].id == queue["QueueUrl"] + assert sqs.queues[0].name == test_queue + assert sqs.queues[0].name == sqs.queues[0].arn.split(":")[-1] + assert sqs.queues[0].name == sqs.queues[0].id.split("/")[-1] + assert sqs.queues[0].arn == test_queue_arn assert sqs.queues[0].region == AWS_REGION assert sqs.queues[0].tags == [{"test": "test"}] + # moto does not properly mock this and is hardcoded to return 1000 queues + # so this test currently always fails + # @mock_sqs + # # Test SQS list queues for over 1000 queues + # def test__list_queues__pagination_over_a_thousand(self): + # sqs_client = client("sqs", region_name=AWS_REGION) + # for i in range(0,1050): + # sqs_client.create_queue(QueueName=f"{test_queue}-{i}", tags={"test": "test"}) + # audit_info = self.set_mocked_audit_info() + # sqs = SQS(audit_info) + # assert len(sqs.queues) > 1000 + @mock_sqs # Test SQS list queues def test__get_queue_attributes__(self): diff --git a/tests/providers/azure/lib/regions/regions_test.py b/tests/providers/azure/lib/regions/regions_test.py new file mode 100644 index 0000000000..acf9d2b1f9 --- /dev/null +++ b/tests/providers/azure/lib/regions/regions_test.py @@ -0,0 +1,50 @@ +from azure.identity import AzureAuthorityHosts +from msrestazure.azure_cloud import ( + AZURE_CHINA_CLOUD, + AZURE_GERMAN_CLOUD, + AZURE_US_GOV_CLOUD, +) + +from prowler.providers.azure.lib.regions.regions import get_regions_config + + +class Test_azure_regions: + def test_get_regions_config(self): + allowed_regions = [ + "AzureCloud", + "AzureChinaCloud", + "AzureUSGovernment", + "AzureGermanCloud", + ] + expected_output = { + "AzureCloud": { + "authority": None, + "base_url": "https://management.azure.com", + "credential_scopes": ["https://management.azure.com/.default"], + }, + "AzureChinaCloud": { + "authority": AzureAuthorityHosts.AZURE_CHINA, + "base_url": AZURE_CHINA_CLOUD.endpoints.resource_manager, + "credential_scopes": [ + AZURE_CHINA_CLOUD.endpoints.resource_manager + "/.default" + ], + }, + "AzureUSGovernment": { + "authority": AzureAuthorityHosts.AZURE_GOVERNMENT, + "base_url": AZURE_US_GOV_CLOUD.endpoints.resource_manager, + "credential_scopes": [ + AZURE_US_GOV_CLOUD.endpoints.resource_manager + "/.default" + ], + }, + "AzureGermanCloud": { + "authority": AzureAuthorityHosts.AZURE_GERMANY, + "base_url": AZURE_GERMAN_CLOUD.endpoints.resource_manager, + "credential_scopes": [ + AZURE_GERMAN_CLOUD.endpoints.resource_manager + "/.default" + ], + }, + } + + for region in allowed_regions: + region_config = get_regions_config(region) + assert region_config == expected_output[region] diff --git a/tests/providers/common/audit_info_test.py b/tests/providers/common/audit_info_test.py index d8c9b5d7d5..2f09c986a8 100644 --- a/tests/providers/common/audit_info_test.py +++ b/tests/providers/common/audit_info_test.py @@ -11,7 +11,9 @@ from prowler.providers.azure.azure_provider import Azure_Provider from prowler.providers.azure.lib.audit_info.models import ( Azure_Audit_Info, Azure_Identity_Info, + Azure_Region_Config, ) +from prowler.providers.azure.lib.exception.exception import AzureException from prowler.providers.common.audit_info import ( Audit_Info, get_tagged_resources, @@ -31,6 +33,7 @@ mock_azure_audit_info = Azure_Audit_Info( audit_metadata=None, audit_resources=None, audit_config=None, + azure_region_config=Azure_Region_Config(), ) mock_set_audit_info = Audit_Info() @@ -132,8 +135,8 @@ class Test_Set_Audit_Info: "prowler.providers.common.audit_info.azure_audit_info", new=mock_azure_audit_info, ) - @patch.object(Azure_Provider, "__set_credentials__", new=mock_set_azure_credentials) - @patch.object(Azure_Provider, "__set_identity_info__", new=mock_set_identity_info) + @patch.object(Azure_Provider, "__get_credentials__", new=mock_set_azure_credentials) + @patch.object(Azure_Provider, "__get_identity_info__", new=mock_set_identity_info) def test_set_audit_info_azure(self): provider = "azure" arguments = { @@ -150,11 +153,109 @@ class Test_Set_Audit_Info: "browser_auth": None, "managed_entity_auth": None, "config_file": default_config_file_path, + "azure_region": "AzureCloud", } audit_info = set_provider_audit_info(provider, arguments) assert isinstance(audit_info, Azure_Audit_Info) + @patch( + "prowler.providers.common.audit_info.azure_audit_info", + new=mock_azure_audit_info, + ) + @patch.object(Azure_Provider, "__get_credentials__", new=mock_set_azure_credentials) + @patch.object(Azure_Provider, "__get_identity_info__", new=mock_set_identity_info) + def test_set_azure_audit_info_not_auth_methods(self): + arguments = { + "profile": None, + "role": None, + "session_duration": None, + "external_id": None, + "regions": None, + "organizations_role": None, + "subscriptions": None, + # We need to set exactly one auth method + "az_cli_auth": None, + "sp_env_auth": None, + "browser_auth": None, + "managed_entity_auth": None, + "config_file": default_config_file_path, + "azure_region": "AzureCloud", + } + + with pytest.raises(AzureException) as exception: + _ = Audit_Info().set_azure_audit_info(arguments) + assert exception.type == AzureException + assert ( + exception.value.args[0] + == "Azure provider requires at least one authentication method set: [--az-cli-auth | --sp-env-auth | --browser-auth | --managed-identity-auth]" + ) + + @patch( + "prowler.providers.common.audit_info.azure_audit_info", + new=mock_azure_audit_info, + ) + @patch.object(Azure_Provider, "__get_credentials__", new=mock_set_azure_credentials) + @patch.object(Azure_Provider, "__get_identity_info__", new=mock_set_identity_info) + def test_set_azure_audit_info_browser_auth_but_not_tenant_id(self): + arguments = { + "profile": None, + "role": None, + "session_duration": None, + "external_id": None, + "regions": None, + "organizations_role": None, + "subscriptions": None, + # We need to set exactly one auth method + "az_cli_auth": None, + "sp_env_auth": None, + "browser_auth": True, + "managed_entity_auth": None, + "config_file": default_config_file_path, + "azure_region": "AzureCloud", + } + + with pytest.raises(AzureException) as exception: + _ = Audit_Info().set_azure_audit_info(arguments) + assert exception.type == AzureException + assert ( + exception.value.args[0] + == "Azure Tenant ID (--tenant-id) is required only for browser authentication mode" + ) + + @patch( + "prowler.providers.common.audit_info.azure_audit_info", + new=mock_azure_audit_info, + ) + @patch.object(Azure_Provider, "__get_credentials__", new=mock_set_azure_credentials) + @patch.object(Azure_Provider, "__get_identity_info__", new=mock_set_identity_info) + def test_set_azure_audit_info_tenant_id_but_no_browser_auth(self): + arguments = { + "profile": None, + "role": None, + "session_duration": None, + "external_id": None, + "regions": None, + "organizations_role": None, + "subscriptions": None, + # We need to set exactly one auth method + "az_cli_auth": True, + "sp_env_auth": None, + "browser_auth": None, + "managed_entity_auth": None, + "config_file": default_config_file_path, + "azure_region": "AzureCloud", + "tenant_id": "test-tenant-id", + } + + with pytest.raises(AzureException) as exception: + _ = Audit_Info().set_azure_audit_info(arguments) + assert exception.type == AzureException + assert ( + exception.value.args[0] + == "Azure Tenant ID (--tenant-id) is required only for browser authentication mode" + ) + @patch.object(GCP_Provider, "__set_credentials__", new=mock_set_gcp_credentials) @patch.object(GCP_Provider, "get_project_ids", new=mock_get_project_ids) @patch.object(Audit_Info, "print_gcp_credentials", new=mock_print_audit_credentials) diff --git a/tests/providers/common/clean_test.py b/tests/providers/common/clean_test.py new file mode 100644 index 0000000000..7b58217a77 --- /dev/null +++ b/tests/providers/common/clean_test.py @@ -0,0 +1,87 @@ +import importlib +import logging +import tempfile +from argparse import Namespace +from os import path + +from mock import patch + +from prowler.providers.common.clean import clean_provider_local_output_directories + + +class Test_Common_Clean: + def set_provider_input_args(self, provider): + set_args_function = f"set_{provider}_input_args" + args = getattr( + getattr(importlib.import_module(__name__), __class__.__name__), + set_args_function, + )(self) + return args + + def set_aws_input_args(self): + args = Namespace() + args.provider = "aws" + args.output_bucket = "test-bucket" + args.output_bucket_no_assume = None + return args + + def set_azure_input_args(self): + args = Namespace() + args.provider = "azure" + return args + + def test_clean_provider_local_output_directories_non_initialized(self, caplog): + provider = "azure" + input_args = self.set_provider_input_args(provider) + caplog.set_level(logging.INFO) + clean_provider_local_output_directories(input_args) + assert ( + f"Cleaning local output directories not initialized for provider {provider}:" + in caplog.text + ) + + def test_clean_aws_local_output_directories_non_default_dir_output_bucket(self): + provider = "aws" + input_args = self.set_provider_input_args(provider) + with tempfile.TemporaryDirectory() as temp_dir: + input_args.output_directory = temp_dir + clean_provider_local_output_directories(input_args) + assert not path.exists(input_args.output_directory) + + def test_clean_aws_local_output_directories_non_default_dir_output_bucket_no_assume( + self, + ): + provider = "aws" + input_args = self.set_provider_input_args(provider) + input_args.output_bucket = None + input_args.output_bucket_no_assume = "test" + with tempfile.TemporaryDirectory() as temp_dir: + input_args.output_directory = temp_dir + clean_provider_local_output_directories(input_args) + assert not path.exists(input_args.output_directory) + + def test_clean_aws_local_output_directories_default_dir_output_bucket(self): + provider = "aws" + input_args = self.set_provider_input_args(provider) + with tempfile.TemporaryDirectory() as temp_dir: + with patch( + "prowler.providers.common.clean.default_output_directory", new=temp_dir + ): + input_args.output_directory = temp_dir + clean_provider_local_output_directories(input_args) + assert path.exists(input_args.output_directory) + + def test_clean_aws_local_output_directories_default_dir_output_bucket_no_assume( + self, + ): + provider = "aws" + input_args = self.set_provider_input_args(provider) + input_args.output_bucket_no_assume = "test" + input_args.ouput_bucket = None + with tempfile.TemporaryDirectory() as temp_dir: + with patch( + "prowler.providers.common.clean.default_output_directory", new=temp_dir + ): + input_args.output_directory = temp_dir + clean_provider_local_output_directories(input_args) + assert path.exists(input_args.output_directory) diff --git a/tests/providers/common/common_outputs_test.py b/tests/providers/common/common_outputs_test.py index a756b72315..3e24091b65 100644 --- a/tests/providers/common/common_outputs_test.py +++ b/tests/providers/common/common_outputs_test.py @@ -9,12 +9,14 @@ from prowler.providers.aws.lib.audit_info.audit_info import AWS_Audit_Info from prowler.providers.azure.lib.audit_info.audit_info import ( Azure_Audit_Info, Azure_Identity_Info, + Azure_Region_Config, ) from prowler.providers.common.models import Audit_Metadata from prowler.providers.common.outputs import ( Aws_Output_Options, Azure_Output_Options, Gcp_Output_Options, + get_provider_output_model, set_provider_output_options, ) from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info @@ -33,6 +35,7 @@ class Test_Common_Output_Options: audit_metadata=None, audit_resources=None, audit_config=None, + azure_region_config=Azure_Region_Config(), ) return audit_info @@ -332,7 +335,7 @@ class Test_Common_Output_Options: AWS Account: {audit_info.audited_account}
  • - AWS-CLI Profile: {audit_info.profile} + AWS-CLI Profile: default
  • Audited Regions: All Regions @@ -391,3 +394,16 @@ class Test_Common_Output_Options: """ ) + + def test_get_provider_output_model(self): + audit_info_class_names = [ + "AWS_Audit_Info", + "GCP_Audit_Info", + "Azure_Audit_Info", + ] + for class_name in audit_info_class_names: + provider_prefix = class_name.split("_", 1)[0].lower().capitalize() + assert ( + get_provider_output_model(class_name).__name__ + == f"{provider_prefix}_Check_Output_CSV" + ) From f7248dfb1c5a2442e3c3d2c83e24eda7f2d9af58 Mon Sep 17 00:00:00 2001 From: Sergio Garcia <38561120+sergargar@users.noreply.github.com> Date: Tue, 12 Dec 2023 16:57:52 +0100 Subject: [PATCH 02/10] feat(mute list): change allowlist to mute list (#3039) Co-authored-by: Nacho Rivera --- .github/workflows/codeql.yml | 4 +- .github/workflows/pull-request.yml | 2 + docs/tutorials/configuration_file.md | 12 +- .../{allowlist-keys.png => mutelist-keys.png} | Bin .../{allowlist-row.png => mutelist-row.png} | Bin docs/tutorials/logging.md | 2 +- docs/tutorials/{allowlist.md => mutelist.md} | 68 ++-- mkdocs.yml | 2 +- prowler/__main__.py | 8 +- .../{aws_allowlist.yaml => aws_mutelist.yaml} | 2 +- ...example.yaml => aws_mutelist_example.yaml} | 4 +- prowler/config/config.yaml | 8 +- prowler/lib/banner.py | 2 +- prowler/lib/check/check.py | 10 +- prowler/lib/outputs/html.py | 2 +- prowler/lib/outputs/json.py | 8 +- prowler/lib/outputs/outputs.py | 4 +- .../providers/aws/lib/arguments/arguments.py | 10 +- .../lib/{allowlist => mutelist}/__init__.py | 0 .../allowlist.py => mutelist/mutelist.py} | 167 ++++---- .../accessanalyzer_enabled.py | 4 +- .../config_recorder_all_regions_enabled.py | 4 +- .../drs/drs_job_exist/drs_job_exist.py | 4 +- .../guardduty_is_enabled.py | 6 +- .../securityhub_enabled.py | 6 +- prowler/providers/common/allowlist.py | 35 -- prowler/providers/common/mutelist.py | 35 ++ prowler/providers/common/outputs.py | 22 +- tests/lib/cli/parser_test.py | 20 +- tests/lib/outputs/outputs_test.py | 10 +- .../fixtures/mutelist.yaml} | 13 +- .../mutelist_test.py} | 362 +++++++++--------- .../accessanalyzer_enabled_test.py | 6 +- ...onfig_recorder_all_regions_enabled_test.py | 6 +- .../drs/drs_job_exist/drs_job_exist_test.py | 6 +- .../guardduty_is_enabled_test.py | 6 +- .../securityhub_enabled_test.py | 6 +- tests/providers/common/common_outputs_test.py | 30 +- 38 files changed, 445 insertions(+), 451 deletions(-) rename docs/tutorials/img/{allowlist-keys.png => mutelist-keys.png} (100%) rename docs/tutorials/img/{allowlist-row.png => mutelist-row.png} (100%) rename docs/tutorials/{allowlist.md => mutelist.md} (70%) rename prowler/config/{aws_allowlist.yaml => aws_mutelist.yaml} (99%) rename prowler/config/{aws_allowlist_example.yaml => aws_mutelist_example.yaml} (97%) rename prowler/providers/aws/lib/{allowlist => mutelist}/__init__.py (100%) rename prowler/providers/aws/lib/{allowlist/allowlist.py => mutelist/mutelist.py} (63%) delete mode 100644 prowler/providers/common/allowlist.py create mode 100644 prowler/providers/common/mutelist.py rename tests/providers/aws/lib/{allowlist/fixtures/allowlist.yaml => mutelist/fixtures/mutelist.yaml} (78%) rename tests/providers/aws/lib/{allowlist/allowlist_test.py => mutelist/mutelist_test.py} (71%) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 6a3cddf936..b02607fe38 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -13,10 +13,10 @@ name: "CodeQL" on: push: - branches: [ "master", prowler-2, prowler-3.0-dev ] + branches: [ "master", "prowler-4.0-dev" ] pull_request: # The branches below must be a subset of the branches above - branches: [ "master" ] + branches: [ "master", "prowler-4.0-dev" ] schedule: - cron: '00 12 * * *' diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml index 79f8c19ea3..6353157b66 100644 --- a/.github/workflows/pull-request.yml +++ b/.github/workflows/pull-request.yml @@ -4,9 +4,11 @@ on: push: branches: - "master" + - "prowler-4.0-dev" pull_request: branches: - "master" + - "prowler-4.0-dev" jobs: build: runs-on: ubuntu-latest diff --git a/docs/tutorials/configuration_file.md b/docs/tutorials/configuration_file.md index bc9f149ba9..6aa8c8f63d 100644 --- a/docs/tutorials/configuration_file.md +++ b/docs/tutorials/configuration_file.md @@ -29,10 +29,10 @@ The following list includes all the AWS checks with configurable variables that | `organizations_delegated_administrators` | `organizations_trusted_delegated_administrators` | List of Strings | | `ecr_repositories_scan_vulnerabilities_in_latest_image` | `ecr_repository_vulnerability_minimum_severity` | String | | `trustedadvisor_premium_support_plan_subscribed` | `verify_premium_support_plans` | Boolean | -| `config_recorder_all_regions_enabled` | `allowlist_non_default_regions` | Boolean | -| `drs_job_exist` | `allowlist_non_default_regions` | Boolean | -| `guardduty_is_enabled` | `allowlist_non_default_regions` | Boolean | -| `securityhub_enabled` | `allowlist_non_default_regions` | Boolean | +| `config_recorder_all_regions_enabled` | `mute_non_default_regions` | Boolean | +| `drs_job_exist` | `mute_non_default_regions` | Boolean | +| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean | +| `securityhub_enabled` | `mute_non_default_regions` | Boolean | ## Azure @@ -50,8 +50,8 @@ The following list includes all the AWS checks with configurable variables that aws: # AWS Global Configuration - # aws.allowlist_non_default_regions --> Allowlist Failed Findings in non-default regions for GuardDuty, SecurityHub, DRS and Config - allowlist_non_default_regions: False + # aws.mute_non_default_regions --> Mute Failed Findings in non-default regions for GuardDuty, SecurityHub, DRS and Config + mute_non_default_regions: False # AWS IAM Configuration # aws.iam_user_accesskey_unused --> CIS recommends 45 days diff --git a/docs/tutorials/img/allowlist-keys.png b/docs/tutorials/img/mutelist-keys.png similarity index 100% rename from docs/tutorials/img/allowlist-keys.png rename to docs/tutorials/img/mutelist-keys.png diff --git a/docs/tutorials/img/allowlist-row.png b/docs/tutorials/img/mutelist-row.png similarity index 100% rename from docs/tutorials/img/allowlist-row.png rename to docs/tutorials/img/mutelist-row.png diff --git a/docs/tutorials/logging.md b/docs/tutorials/logging.md index 0d74a9013f..48036c4653 100644 --- a/docs/tutorials/logging.md +++ b/docs/tutorials/logging.md @@ -8,7 +8,7 @@ There are different log levels depending on the logging information that is desi - **DEBUG**: It will show low-level logs from Python. - **INFO**: It will show all the API calls that are being invoked by the provider. -- **WARNING**: It will show all resources that are being **allowlisted**. +- **WARNING**: It will show all resources that are being **muted**. - **ERROR**: It will show any errors, e.g., not authorized actions. - **CRITICAL**: The default log level. If a critical log appears, it will **exit** Prowler’s execution. diff --git a/docs/tutorials/allowlist.md b/docs/tutorials/mutelist.md similarity index 70% rename from docs/tutorials/allowlist.md rename to docs/tutorials/mutelist.md index 394894cca9..b14b265028 100644 --- a/docs/tutorials/allowlist.md +++ b/docs/tutorials/mutelist.md @@ -1,19 +1,19 @@ -# Allowlisting +# Mute Listing Sometimes you may find resources that are intentionally configured in a certain way that may be a bad practice but it is all right with it, for example an AWS S3 Bucket open to the internet hosting a web site, or an AWS Security Group with an open port needed in your use case. -Allowlist option works along with other options and adds a `WARNING` instead of `INFO`, `PASS` or `FAIL` to any output format. +Mute List option works along with other options and adds a `MUTED` instead of `INFO`, `PASS` or `FAIL` to any output format. -You can use `-w`/`--allowlist-file` with the path of your allowlist yaml file, but first, let's review the syntax. +You can use `-w`/`--mutelist-file` with the path of your mutelist yaml file, but first, let's review the syntax. -## Allowlist Yaml File Syntax +## Mute List Yaml File Syntax ### Account, Check and/or Region can be * to apply for all the cases. ### Resources and tags are lists that can have either Regex or Keywords. ### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together. ### Use an alternation Regex to match one of multiple tags with "ORed" logic. ### For each check you can except Accounts, Regions, Resources and/or Tags. - ########################### ALLOWLIST EXAMPLE ########################### - Allowlist: + ########################### MUTE LIST EXAMPLE ########################### + Mute List: Accounts: "123456789012": Checks: @@ -79,10 +79,10 @@ You can use `-w`/`--allowlist-file` with the path of your allowlist yaml file, b Tags: - "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod -## Allowlist specific regions -If you want to allowlist/mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w allowlist.yaml`: +## Mute specific regions +If you want to mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w mutelist.yaml`: - Allowlist: + Mute List: Accounts: "*": Checks: @@ -93,50 +93,50 @@ If you want to allowlist/mute failed findings only in specific regions, create a Resources: - "*" -## Default AWS Allowlist -Prowler provides you a Default AWS Allowlist with the AWS Resources that should be allowlisted such as all resources created by AWS Control Tower when setting up a landing zone. -You can execute Prowler with this allowlist using the following command: +## Default AWS Mute List +Prowler provides you a Default AWS Mute List with the AWS Resources that should be muted such as all resources created by AWS Control Tower when setting up a landing zone. +You can execute Prowler with this mutelist using the following command: ```sh -prowler aws --allowlist prowler/config/aws_allowlist.yaml +prowler aws --mutelist prowler/config/aws_mutelist.yaml ``` -## Supported Allowlist Locations +## Supported Mute List Locations -The allowlisting flag supports the following locations: +The mutelisting flag supports the following locations: ### Local file -You will need to pass the local path where your Allowlist YAML file is located: +You will need to pass the local path where your Mute List YAML file is located: ``` -prowler -w allowlist.yaml +prowler -w mutelist.yaml ``` ### AWS S3 URI -You will need to pass the S3 URI where your Allowlist YAML file was uploaded to your bucket: +You will need to pass the S3 URI where your Mute List YAML file was uploaded to your bucket: ``` -prowler aws -w s3:////allowlist.yaml +prowler aws -w s3:////mutelist.yaml ``` -> Make sure that the used AWS credentials have s3:GetObject permissions in the S3 path where the allowlist file is located. +> Make sure that the used AWS credentials have s3:GetObject permissions in the S3 path where the mutelist file is located. ### AWS DynamoDB Table ARN -You will need to pass the DynamoDB Allowlist Table ARN: +You will need to pass the DynamoDB Mute List Table ARN: ``` prowler aws -w arn:aws:dynamodb:::table/ ``` 1. The DynamoDB Table must have the following String keys: - + -- The Allowlist Table must have the following columns: - - Accounts (String): This field can contain either an Account ID or an `*` (which applies to all the accounts that use this table as an allowlist). +- The Mute List Table must have the following columns: + - Accounts (String): This field can contain either an Account ID or an `*` (which applies to all the accounts that use this table as an mutelist). - Checks (String): This field can contain either a Prowler Check Name or an `*` (which applies to all the scanned checks). - - Regions (List): This field contains a list of regions where this allowlist rule is applied (it can also contains an `*` to apply all scanned regions). - - Resources (List): This field contains a list of regex expressions that applies to the resources that are wanted to be allowlisted. - - Tags (List): -Optional- This field contains a list of tuples in the form of 'key=value' that applies to the resources tags that are wanted to be allowlisted. - - Exceptions (Map): -Optional- This field contains a map of lists of accounts/regions/resources/tags that are wanted to be excepted in the allowlist. + - Regions (List): This field contains a list of regions where this mutelist rule is applied (it can also contains an `*` to apply all scanned regions). + - Resources (List): This field contains a list of regex expressions that applies to the resources that are wanted to be muted. + - Tags (List): -Optional- This field contains a list of tuples in the form of 'key=value' that applies to the resources tags that are wanted to be muted. + - Exceptions (Map): -Optional- This field contains a map of lists of accounts/regions/resources/tags that are wanted to be excepted in the mutelist. -The following example will allowlist all resources in all accounts for the EC2 checks in the regions `eu-west-1` and `us-east-1` with the tags `environment=dev` and `environment=prod`, except the resources containing the string `test` in the account `012345678912` and region `eu-west-1` with the tag `environment=prod`: +The following example will mute all resources in all accounts for the EC2 checks in the regions `eu-west-1` and `us-east-1` with the tags `environment=dev` and `environment=prod`, except the resources containing the string `test` in the account `012345678912` and region `eu-west-1` with the tag `environment=prod`: - + > Make sure that the used AWS credentials have `dynamodb:PartiQLSelect` permissions in the table. @@ -151,7 +151,7 @@ prowler aws -w arn:aws:lambda:REGION:ACCOUNT_ID:function:FUNCTION_NAME Make sure that the credentials that Prowler uses can invoke the Lambda Function: ``` -- PolicyName: GetAllowList +- PolicyName: GetMuteList PolicyDocument: Version: '2012-10-17' Statement: @@ -160,14 +160,14 @@ Make sure that the credentials that Prowler uses can invoke the Lambda Function: Resource: arn:aws:lambda:REGION:ACCOUNT_ID:function:FUNCTION_NAME ``` -The Lambda Function can then generate an Allowlist dynamically. Here is the code an example Python Lambda Function that -generates an Allowlist: +The Lambda Function can then generate an Mute List dynamically. Here is the code an example Python Lambda Function that +generates an Mute List: ``` def handler(event, context): checks = {} checks["vpc_flow_logs_enabled"] = { "Regions": [ "*" ], "Resources": [ "" ], Optional("Tags"): [ "key:value" ] } - al = { "Allowlist": { "Accounts": { "*": { "Checks": checks } } } } + al = { "Mute List": { "Accounts": { "*": { "Checks": checks } } } } return al ``` diff --git a/mkdocs.yml b/mkdocs.yml index 86761e4f68..ea71d7bd1a 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -36,7 +36,7 @@ nav: - Slack Integration: tutorials/integrations.md - Configuration File: tutorials/configuration_file.md - Logging: tutorials/logging.md - - Allowlist: tutorials/allowlist.md + - Mute List: tutorials/mutelist.md - Check Aliases: tutorials/check-aliases.md - Custom Metadata: tutorials/custom-checks-metadata.md - Ignore Unused Services: tutorials/ignore-unused-services.md diff --git a/prowler/__main__.py b/prowler/__main__.py index a6151dec98..b50ddd5fcb 100644 --- a/prowler/__main__.py +++ b/prowler/__main__.py @@ -46,12 +46,12 @@ from prowler.providers.aws.lib.security_hub.security_hub import ( resolve_security_hub_previous_findings, verify_security_hub_integration_enabled_per_region, ) -from prowler.providers.common.allowlist import set_provider_allowlist from prowler.providers.common.audit_info import ( set_provider_audit_info, set_provider_execution_parameters, ) from prowler.providers.common.clean import clean_provider_local_output_directories +from prowler.providers.common.mutelist import set_provider_mutelist from prowler.providers.common.outputs import set_provider_output_options from prowler.providers.common.quick_inventory import run_provider_quick_inventory @@ -173,12 +173,12 @@ def prowler(): # Sort final check list checks_to_execute = sorted(checks_to_execute) - # Parse Allowlist - allowlist_file = set_provider_allowlist(provider, audit_info, args) + # Parse Mute List + mutelist_file = set_provider_mutelist(provider, audit_info, args) # Set output options based on the selected provider audit_output_options = set_provider_output_options( - provider, args, audit_info, allowlist_file, bulk_checks_metadata + provider, args, audit_info, mutelist_file, bulk_checks_metadata ) # Run the quick inventory for the provider if available diff --git a/prowler/config/aws_allowlist.yaml b/prowler/config/aws_mutelist.yaml similarity index 99% rename from prowler/config/aws_allowlist.yaml rename to prowler/config/aws_mutelist.yaml index 6ba4dc9f62..cc42ec728e 100644 --- a/prowler/config/aws_allowlist.yaml +++ b/prowler/config/aws_mutelist.yaml @@ -1,4 +1,4 @@ -Allowlist: +Mute List: Accounts: "*": ########################### AWS CONTROL TOWER ########################### diff --git a/prowler/config/aws_allowlist_example.yaml b/prowler/config/aws_mutelist_example.yaml similarity index 97% rename from prowler/config/aws_allowlist_example.yaml rename to prowler/config/aws_mutelist_example.yaml index 7f5028c094..64a2493945 100644 --- a/prowler/config/aws_allowlist_example.yaml +++ b/prowler/config/aws_mutelist_example.yaml @@ -3,8 +3,8 @@ ### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together. ### Use an alternation Regex to match one of multiple tags with "ORed" logic. ### For each check you can except Accounts, Regions, Resources and/or Tags. -########################### ALLOWLIST EXAMPLE ########################### -Allowlist: +########################### MUTE LIST EXAMPLE ########################### +Mute List: Accounts: "123456789012": Checks: diff --git a/prowler/config/config.yaml b/prowler/config/config.yaml index 86a03b5a3b..d9cf87c787 100644 --- a/prowler/config/config.yaml +++ b/prowler/config/config.yaml @@ -2,10 +2,10 @@ aws: # AWS Global Configuration - # aws.allowlist_non_default_regions --> Set to True to allowlist failed findings in non-default regions for AccessAnalyzer, GuardDuty, SecurityHub, DRS and Config - allowlist_non_default_regions: False - # If you want to allowlist/mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w allowlist.yaml`: - # Allowlist: + # aws.mute_non_default_regions --> Set to True to mute failed findings in non-default regions for GuardDuty, SecurityHub, DRS and Config + mute_non_default_regions: False + # If you want to mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w mutelist.yaml`: + # Mute List: # Accounts: # "*": # Checks: diff --git a/prowler/lib/banner.py b/prowler/lib/banner.py index 867bfd663e..f99f092404 100644 --- a/prowler/lib/banner.py +++ b/prowler/lib/banner.py @@ -21,7 +21,7 @@ def print_banner(args): Color code for results: - {Fore.YELLOW}INFO (Information){Style.RESET_ALL} - {Fore.GREEN}PASS (Recommended value){Style.RESET_ALL} -- {orange_color}WARNING (Ignored by allowlist){Style.RESET_ALL} +- {orange_color}MUTED (Muted by muted list){Style.RESET_ALL} - {Fore.RED}FAIL (Fix required){Style.RESET_ALL} """ ) diff --git a/prowler/lib/check/check.py b/prowler/lib/check/check.py index ce5e23cf9d..d8e91f18c1 100644 --- a/prowler/lib/check/check.py +++ b/prowler/lib/check/check.py @@ -21,7 +21,7 @@ from prowler.lib.check.models import Check, load_check_metadata from prowler.lib.logger import logger from prowler.lib.outputs.outputs import report from prowler.lib.utils.utils import open_file, parse_json_file -from prowler.providers.aws.lib.allowlist.allowlist import allowlist_findings +from prowler.providers.aws.lib.mutelist.mutelist import mutelist_findings from prowler.providers.common.models import Audit_Metadata from prowler.providers.common.outputs import Provider_Output_Options @@ -560,10 +560,10 @@ def execute( audit_info.audit_metadata, services_executed, checks_executed ) - # Allowlist findings - if audit_output_options.allowlist_file: - check_findings = allowlist_findings( - audit_output_options.allowlist_file, + # Mute List findings + if audit_output_options.mutelist_file: + check_findings = mutelist_findings( + audit_output_options.mutelist_file, audit_info.audited_account, check_findings, ) diff --git a/prowler/lib/outputs/html.py b/prowler/lib/outputs/html.py index 820681b094..5a7f1cf2fc 100644 --- a/prowler/lib/outputs/html.py +++ b/prowler/lib/outputs/html.py @@ -173,7 +173,7 @@ def fill_html(file_descriptor, finding, output_options): row_class = "table-info" elif finding.status == "FAIL": row_class = "table-danger" - elif finding.status == "WARNING": + elif finding.status == "MUTED": row_class = "table-warning" file_descriptor.write( f""" diff --git a/prowler/lib/outputs/json.py b/prowler/lib/outputs/json.py index 6f1403cf05..7a5ab71d49 100644 --- a/prowler/lib/outputs/json.py +++ b/prowler/lib/outputs/json.py @@ -116,8 +116,8 @@ def generate_json_asff_status(status: str) -> str: json_asff_status = "PASSED" elif status == "FAIL": json_asff_status = "FAILED" - elif status == "WARNING": - json_asff_status = "WARNING" + elif status == "MUTED": + json_asff_status = "MUTED" else: json_asff_status = "NOT_AVAILABLE" @@ -293,7 +293,7 @@ def generate_json_ocsf_status(status: str): json_ocsf_status = "Success" elif status == "FAIL": json_ocsf_status = "Failure" - elif status == "WARNING": + elif status == "MUTED": json_ocsf_status = "Other" else: json_ocsf_status = "Unknown" @@ -307,7 +307,7 @@ def generate_json_ocsf_status_id(status: str): json_ocsf_status_id = 1 elif status == "FAIL": json_ocsf_status_id = 2 - elif status == "WARNING": + elif status == "MUTED": json_ocsf_status_id = 99 else: json_ocsf_status_id = 0 diff --git a/prowler/lib/outputs/outputs.py b/prowler/lib/outputs/outputs.py index b81861674c..288a58152c 100644 --- a/prowler/lib/outputs/outputs.py +++ b/prowler/lib/outputs/outputs.py @@ -165,12 +165,12 @@ def set_report_color(status: str) -> str: color = Fore.RED elif status == "ERROR": color = Fore.BLACK - elif status == "WARNING": + elif status == "MUTED": color = orange_color elif status == "INFO": color = Fore.YELLOW else: - raise Exception("Invalid Report Status. Must be PASS, FAIL, ERROR or WARNING") + raise Exception("Invalid Report Status. Must be PASS, FAIL, ERROR or MUTED") return color diff --git a/prowler/providers/aws/lib/arguments/arguments.py b/prowler/providers/aws/lib/arguments/arguments.py index 96fab145db..3abd739d5d 100644 --- a/prowler/providers/aws/lib/arguments/arguments.py +++ b/prowler/providers/aws/lib/arguments/arguments.py @@ -117,14 +117,14 @@ def init_parser(self): default=None, help="Shodan API key used by check ec2_elastic_ip_shodan.", ) - # Allowlist - allowlist_subparser = aws_parser.add_argument_group("Allowlist") - allowlist_subparser.add_argument( + # Mute List + mutelist_subparser = aws_parser.add_argument_group("Mute List") + mutelist_subparser.add_argument( "-w", - "--allowlist-file", + "--mutelist-file", nargs="?", default=None, - help="Path for allowlist yaml file. See example prowler/config/aws_allowlist.yaml for reference and format. It also accepts AWS DynamoDB Table or Lambda ARNs or S3 URIs, see more in https://docs.prowler.cloud/en/latest/tutorials/allowlist/", + help="Path for mutelist yaml file. See example prowler/config/aws_mutelist.yaml for reference and format. It also accepts AWS DynamoDB Table or Lambda ARNs or S3 URIs, see more in https://docs.prowler.cloud/en/latest/tutorials/mutelist/", ) # Based Scans diff --git a/prowler/providers/aws/lib/allowlist/__init__.py b/prowler/providers/aws/lib/mutelist/__init__.py similarity index 100% rename from prowler/providers/aws/lib/allowlist/__init__.py rename to prowler/providers/aws/lib/mutelist/__init__.py diff --git a/prowler/providers/aws/lib/allowlist/allowlist.py b/prowler/providers/aws/lib/mutelist/mutelist.py similarity index 63% rename from prowler/providers/aws/lib/allowlist/allowlist.py rename to prowler/providers/aws/lib/mutelist/mutelist.py index 6789ff9e92..da255f9b86 100644 --- a/prowler/providers/aws/lib/allowlist/allowlist.py +++ b/prowler/providers/aws/lib/mutelist/mutelist.py @@ -9,7 +9,7 @@ from schema import Optional, Schema from prowler.lib.logger import logger from prowler.lib.outputs.models import unroll_tags -allowlist_schema = Schema( +mutelist_schema = Schema( { "Accounts": { str: { @@ -32,38 +32,38 @@ allowlist_schema = Schema( ) -def parse_allowlist_file(audit_info, allowlist_file): +def parse_mutelist_file(audit_info, mutelist_file): try: # Check if file is a S3 URI - if re.search("^s3://([^/]+)/(.*?([^/]+))$", allowlist_file): - bucket = allowlist_file.split("/")[2] - key = ("/").join(allowlist_file.split("/")[3:]) + if re.search("^s3://([^/]+)/(.*?([^/]+))$", mutelist_file): + bucket = mutelist_file.split("/")[2] + key = ("/").join(mutelist_file.split("/")[3:]) s3_client = audit_info.audit_session.client("s3") - allowlist = yaml.safe_load( + mutelist = yaml.safe_load( s3_client.get_object(Bucket=bucket, Key=key)["Body"] - )["Allowlist"] + )["Mute List"] # Check if file is a Lambda Function ARN - elif re.search(r"^arn:(\w+):lambda:", allowlist_file): - lambda_region = allowlist_file.split(":")[3] + elif re.search(r"^arn:(\w+):lambda:", mutelist_file): + lambda_region = mutelist_file.split(":")[3] lambda_client = audit_info.audit_session.client( "lambda", region_name=lambda_region ) lambda_response = lambda_client.invoke( - FunctionName=allowlist_file, InvocationType="RequestResponse" + FunctionName=mutelist_file, InvocationType="RequestResponse" ) lambda_payload = lambda_response["Payload"].read() - allowlist = yaml.safe_load(lambda_payload)["Allowlist"] + mutelist = yaml.safe_load(lambda_payload)["Mute List"] # Check if file is a DynamoDB ARN elif re.search( r"^arn:aws(-cn|-us-gov)?:dynamodb:[a-z]{2}-[a-z-]+-[1-9]{1}:[0-9]{12}:table\/[a-zA-Z0-9._-]+$", - allowlist_file, + mutelist_file, ): - allowlist = {"Accounts": {}} - table_region = allowlist_file.split(":")[3] + mutelist = {"Accounts": {}} + table_region = mutelist_file.split(":")[3] dynamodb_resource = audit_info.audit_session.resource( "dynamodb", region_name=table_region ) - dynamo_table = dynamodb_resource.Table(allowlist_file.split("/")[1]) + dynamo_table = dynamodb_resource.Table(mutelist_file.split("/")[1]) response = dynamo_table.scan( FilterExpression=Attr("Accounts").is_in( [audit_info.audited_account, "*"] @@ -80,8 +80,8 @@ def parse_allowlist_file(audit_info, allowlist_file): ) dynamodb_items.update(response["Items"]) for item in dynamodb_items: - # Create allowlist for every item - allowlist["Accounts"][item["Accounts"]] = { + # Create mutelist for every item + mutelist["Accounts"][item["Accounts"]] = { "Checks": { item["Checks"]: { "Regions": item["Regions"], @@ -90,24 +90,24 @@ def parse_allowlist_file(audit_info, allowlist_file): } } if "Tags" in item: - allowlist["Accounts"][item["Accounts"]]["Checks"][item["Checks"]][ + mutelist["Accounts"][item["Accounts"]]["Checks"][item["Checks"]][ "Tags" ] = item["Tags"] if "Exceptions" in item: - allowlist["Accounts"][item["Accounts"]]["Checks"][item["Checks"]][ + mutelist["Accounts"][item["Accounts"]]["Checks"][item["Checks"]][ "Exceptions" ] = item["Exceptions"] else: - with open(allowlist_file) as f: - allowlist = yaml.safe_load(f)["Allowlist"] + with open(mutelist_file) as f: + mutelist = yaml.safe_load(f)["Mute List"] try: - allowlist_schema.validate(allowlist) + mutelist_schema.validate(mutelist) except Exception as error: logger.critical( - f"{error.__class__.__name__} -- Allowlist YAML is malformed - {error}[{error.__traceback__.tb_lineno}]" + f"{error.__class__.__name__} -- Mute List YAML is malformed - {error}[{error.__traceback__.tb_lineno}]" ) sys.exit(1) - return allowlist + return mutelist except Exception as error: logger.critical( f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]" @@ -115,27 +115,27 @@ def parse_allowlist_file(audit_info, allowlist_file): sys.exit(1) -def allowlist_findings( - allowlist: dict, +def mutelist_findings( + mutelist: dict, audited_account: str, check_findings: [Any], ): - # Check if finding is allowlisted + # Check if finding is muted for finding in check_findings: - if is_allowlisted( - allowlist, + if is_muted( + mutelist, audited_account, finding.check_metadata.CheckID, finding.region, finding.resource_id, unroll_tags(finding.resource_tags), ): - finding.status = "WARNING" + finding.status = "MUTED" return check_findings -def is_allowlisted( - allowlist: dict, +def is_muted( + mutelist: dict, audited_account: str, check: str, finding_region: str, @@ -143,31 +143,30 @@ def is_allowlisted( finding_tags, ): try: - allowlisted_checks = {} - # By default is not allowlisted - is_finding_allowlisted = False - # First set account key from allowlist dict - if audited_account in allowlist["Accounts"]: - allowlisted_checks = allowlist["Accounts"][audited_account]["Checks"] + muted_checks = {} + # By default is not muted + is_finding_muted = False + # First set account key from mutelist dict + if audited_account in mutelist["Accounts"]: + muted_checks = mutelist["Accounts"][audited_account]["Checks"] # If there is a *, it affects to all accounts # This cannot be elif since in the case of * and single accounts we - # want to merge allowlisted checks from * to the other accounts check list - if "*" in allowlist["Accounts"]: - checks_multi_account = allowlist["Accounts"]["*"]["Checks"] - allowlisted_checks.update(checks_multi_account) - - # Test if it is allowlisted - if is_allowlisted_in_check( - allowlisted_checks, + # want to merge muted checks from * to the other accounts check list + if "*" in mutelist["Accounts"]: + checks_multi_account = mutelist["Accounts"]["*"]["Checks"] + muted_checks.update(checks_multi_account) + # Test if it is muted + if is_muted_in_check( + muted_checks, audited_account, check, finding_region, finding_resource, finding_tags, ): - is_finding_allowlisted = True + is_finding_muted = True - return is_finding_allowlisted + return is_finding_muted except Exception as error: logger.critical( f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]" @@ -175,8 +174,8 @@ def is_allowlisted( sys.exit(1) -def is_allowlisted_in_check( - allowlisted_checks, +def is_muted_in_check( + muted_checks, audited_account, check, finding_region, @@ -184,15 +183,15 @@ def is_allowlisted_in_check( finding_tags, ): try: - # Default value is not allowlisted - is_check_allowlisted = False + # Default value is not muted + is_check_muted = False - for allowlisted_check, allowlisted_check_info in allowlisted_checks.items(): + for muted_check, muted_check_info in muted_checks.items(): # map lambda to awslambda - allowlisted_check = re.sub("^lambda", "awslambda", allowlisted_check) + muted_check = re.sub("^lambda", "awslambda", muted_check) # Check if the finding is excepted - exceptions = allowlisted_check_info.get("Exceptions") + exceptions = muted_check_info.get("Exceptions") if is_excepted( exceptions, audited_account, @@ -203,40 +202,36 @@ def is_allowlisted_in_check( # Break loop and return default value since is excepted break - allowlisted_regions = allowlisted_check_info.get("Regions") - allowlisted_resources = allowlisted_check_info.get("Resources") - allowlisted_tags = allowlisted_check_info.get("Tags") + muted_regions = muted_check_info.get("Regions") + muted_resources = muted_check_info.get("Resources") + muted_tags = muted_check_info.get("Tags") # If there is a *, it affects to all checks if ( - "*" == allowlisted_check - or check == allowlisted_check - or re.search(allowlisted_check, check) + "*" == muted_check + or check == muted_check + or re.search(muted_check, check) ): - allowlisted_in_check = True - allowlisted_in_region = is_allowlisted_in_region( - allowlisted_regions, finding_region - ) - allowlisted_in_resource = is_allowlisted_in_resource( - allowlisted_resources, finding_resource - ) - allowlisted_in_tags = is_allowlisted_in_tags( - allowlisted_tags, finding_tags + muted_in_check = True + muted_in_region = is_muted_in_region(muted_regions, finding_region) + muted_in_resource = is_muted_in_resource( + muted_resources, finding_resource ) + muted_in_tags = is_muted_in_tags(muted_tags, finding_tags) - # For a finding to be allowlisted requires the following set to True: - # - allowlisted_in_check -> True - # - allowlisted_in_region -> True - # - allowlisted_in_tags -> True or allowlisted_in_resource -> True + # For a finding to be muted requires the following set to True: + # - muted_in_check -> True + # - muted_in_region -> True + # - muted_in_tags -> True or muted_in_resource -> True # - excepted -> False if ( - allowlisted_in_check - and allowlisted_in_region - and (allowlisted_in_tags or allowlisted_in_resource) + muted_in_check + and muted_in_region + and (muted_in_tags or muted_in_resource) ): - is_check_allowlisted = True + is_check_muted = True - return is_check_allowlisted + return is_check_muted except Exception as error: logger.critical( f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]" @@ -244,12 +239,12 @@ def is_allowlisted_in_check( sys.exit(1) -def is_allowlisted_in_region( - allowlisted_regions, +def is_muted_in_region( + mutelist_regions, finding_region, ): try: - return __is_item_matched__(allowlisted_regions, finding_region) + return __is_item_matched__(mutelist_regions, finding_region) except Exception as error: logger.critical( f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]" @@ -257,9 +252,9 @@ def is_allowlisted_in_region( sys.exit(1) -def is_allowlisted_in_tags(allowlisted_tags, finding_tags): +def is_muted_in_tags(muted_tags, finding_tags): try: - return __is_item_matched__(allowlisted_tags, finding_tags) + return __is_item_matched__(muted_tags, finding_tags) except Exception as error: logger.critical( f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]" @@ -267,9 +262,9 @@ def is_allowlisted_in_tags(allowlisted_tags, finding_tags): sys.exit(1) -def is_allowlisted_in_resource(allowlisted_resources, finding_resource): +def is_muted_in_resource(muted_resources, finding_resource): try: - return __is_item_matched__(allowlisted_resources, finding_resource) + return __is_item_matched__(muted_resources, finding_resource) except Exception as error: logger.critical( diff --git a/prowler/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled.py b/prowler/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled.py index 2b51630b13..75060b5a61 100644 --- a/prowler/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled.py +++ b/prowler/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled.py @@ -31,11 +31,11 @@ class accessanalyzer_enabled(Check): ) if ( accessanalyzer_client.audit_config.get( - "allowlist_non_default_regions", False + "mute_non_default_regions", False ) and not analyzer.region == accessanalyzer_client.region ): - report.status = "WARNING" + report.status = "MUTED" findings.append(report) diff --git a/prowler/providers/aws/services/config/config_recorder_all_regions_enabled/config_recorder_all_regions_enabled.py b/prowler/providers/aws/services/config/config_recorder_all_regions_enabled/config_recorder_all_regions_enabled.py index 5e4b86a55a..b0782a2356 100644 --- a/prowler/providers/aws/services/config/config_recorder_all_regions_enabled/config_recorder_all_regions_enabled.py +++ b/prowler/providers/aws/services/config/config_recorder_all_regions_enabled/config_recorder_all_regions_enabled.py @@ -36,10 +36,10 @@ class config_recorder_all_regions_enabled(Check): f"AWS Config recorder {recorder.name} is disabled." ) if report.status == "FAIL" and ( - config_client.audit_config.get("allowlist_non_default_regions", False) + config_client.audit_config.get("mute_non_default_regions", False) and not recorder.region == config_client.region ): - report.status = "WARNING" + report.status = "MUTED" findings.append(report) diff --git a/prowler/providers/aws/services/drs/drs_job_exist/drs_job_exist.py b/prowler/providers/aws/services/drs/drs_job_exist/drs_job_exist.py index 9a65dea759..565e8b285d 100644 --- a/prowler/providers/aws/services/drs/drs_job_exist/drs_job_exist.py +++ b/prowler/providers/aws/services/drs/drs_job_exist/drs_job_exist.py @@ -20,10 +20,10 @@ class drs_job_exist(Check): report.status_extended = "DRS is enabled for this region with jobs." if report.status == "FAIL" and ( - drs_client.audit_config.get("allowlist_non_default_regions", False) + drs_client.audit_config.get("mute_non_default_regions", False) and not drs.region == drs_client.region ): - report.status = "WARNING" + report.status = "MUTED" findings.append(report) diff --git a/prowler/providers/aws/services/guardduty/guardduty_is_enabled/guardduty_is_enabled.py b/prowler/providers/aws/services/guardduty/guardduty_is_enabled/guardduty_is_enabled.py index 0d81e170b2..6aec8a90d5 100644 --- a/prowler/providers/aws/services/guardduty/guardduty_is_enabled/guardduty_is_enabled.py +++ b/prowler/providers/aws/services/guardduty/guardduty_is_enabled/guardduty_is_enabled.py @@ -29,12 +29,10 @@ class guardduty_is_enabled(Check): ) if report.status == "FAIL" and ( - guardduty_client.audit_config.get( - "allowlist_non_default_regions", False - ) + guardduty_client.audit_config.get("mute_non_default_regions", False) and not detector.region == guardduty_client.region ): - report.status = "WARNING" + report.status = "MUTED" findings.append(report) diff --git a/prowler/providers/aws/services/securityhub/securityhub_enabled/securityhub_enabled.py b/prowler/providers/aws/services/securityhub/securityhub_enabled/securityhub_enabled.py index 62023ebb9a..390b4f781b 100644 --- a/prowler/providers/aws/services/securityhub/securityhub_enabled/securityhub_enabled.py +++ b/prowler/providers/aws/services/securityhub/securityhub_enabled/securityhub_enabled.py @@ -26,12 +26,10 @@ class securityhub_enabled(Check): report.status_extended = "Security Hub is not enabled." if report.status == "FAIL" and ( - securityhub_client.audit_config.get( - "allowlist_non_default_regions", False - ) + securityhub_client.audit_config.get("mute_non_default_regions", False) and not securityhub.region == securityhub_client.region ): - report.status = "WARNING" + report.status = "MUTED" findings.append(report) diff --git a/prowler/providers/common/allowlist.py b/prowler/providers/common/allowlist.py deleted file mode 100644 index 0529ec8348..0000000000 --- a/prowler/providers/common/allowlist.py +++ /dev/null @@ -1,35 +0,0 @@ -import importlib -import sys - -from prowler.lib.logger import logger -from prowler.providers.aws.lib.allowlist.allowlist import parse_allowlist_file - - -def set_provider_allowlist(provider, audit_info, args): - """ - set_provider_allowlist configures the allowlist based on the selected provider. - """ - try: - # Check if the provider arguments has the allowlist_file - if hasattr(args, "allowlist_file"): - # Dynamically get the Provider allowlist handler - provider_allowlist_function = f"set_{provider}_allowlist" - allowlist_file = getattr( - importlib.import_module(__name__), provider_allowlist_function - )(audit_info, args.allowlist_file) - - return allowlist_file - except Exception as error: - logger.critical( - f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" - ) - sys.exit(1) - - -def set_aws_allowlist(audit_info, allowlist_file): - # Parse content from Allowlist file and get it, if necessary, from S3 - if allowlist_file: - allowlist_file = parse_allowlist_file(audit_info, allowlist_file) - else: - allowlist_file = None - return allowlist_file diff --git a/prowler/providers/common/mutelist.py b/prowler/providers/common/mutelist.py new file mode 100644 index 0000000000..969db52cc4 --- /dev/null +++ b/prowler/providers/common/mutelist.py @@ -0,0 +1,35 @@ +import importlib +import sys + +from prowler.lib.logger import logger +from prowler.providers.aws.lib.mutelist.mutelist import parse_mutelist_file + + +def set_provider_mutelist(provider, audit_info, args): + """ + set_provider_mutelist configures the mutelist based on the selected provider. + """ + try: + # Check if the provider arguments has the mutelist_file + if hasattr(args, "mutelist_file"): + # Dynamically get the Provider mutelist handler + provider_mutelist_function = f"set_{provider}_mutelist" + mutelist_file = getattr( + importlib.import_module(__name__), provider_mutelist_function + )(audit_info, args.mutelist_file) + + return mutelist_file + except Exception as error: + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + sys.exit(1) + + +def set_aws_mutelist(audit_info, mutelist_file): + # Parse content from Mute List file and get it, if necessary, from S3 + if mutelist_file: + mutelist_file = parse_mutelist_file(audit_info, mutelist_file) + else: + mutelist_file = None + return mutelist_file diff --git a/prowler/providers/common/outputs.py b/prowler/providers/common/outputs.py index 58567df1e8..3716df30cc 100644 --- a/prowler/providers/common/outputs.py +++ b/prowler/providers/common/outputs.py @@ -9,7 +9,7 @@ from prowler.lib.logger import logger def set_provider_output_options( - provider: str, arguments, audit_info, allowlist_file, bulk_checks_metadata + provider: str, arguments, audit_info, mutelist_file, bulk_checks_metadata ): """ set_provider_output_options configures automatically the outputs based on the selected provider and returns the Provider_Output_Options object. @@ -19,7 +19,7 @@ def set_provider_output_options( provider_output_class = f"{provider.capitalize()}_Output_Options" provider_output_options = getattr( importlib.import_module(__name__), provider_output_class - )(arguments, audit_info, allowlist_file, bulk_checks_metadata) + )(arguments, audit_info, mutelist_file, bulk_checks_metadata) except Exception as error: logger.critical( f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" @@ -49,20 +49,20 @@ class Provider_Output_Options: is_quiet: bool output_modes: list output_directory: str - allowlist_file: str + mutelist_file: str bulk_checks_metadata: dict verbose: str output_filename: str only_logs: bool unix_timestamp: bool - def __init__(self, arguments, allowlist_file, bulk_checks_metadata): + def __init__(self, arguments, mutelist_file, bulk_checks_metadata): self.is_quiet = arguments.quiet self.output_modes = arguments.output_modes self.output_directory = arguments.output_directory self.verbose = arguments.verbose self.bulk_checks_metadata = bulk_checks_metadata - self.allowlist_file = allowlist_file + self.mutelist_file = mutelist_file self.only_logs = arguments.only_logs self.unix_timestamp = arguments.unix_timestamp # Check output directory, if it is not created -> create it @@ -73,9 +73,9 @@ class Provider_Output_Options: class Azure_Output_Options(Provider_Output_Options): - def __init__(self, arguments, audit_info, allowlist_file, bulk_checks_metadata): + def __init__(self, arguments, audit_info, mutelist_file, bulk_checks_metadata): # First call Provider_Output_Options init - super().__init__(arguments, allowlist_file, bulk_checks_metadata) + super().__init__(arguments, mutelist_file, bulk_checks_metadata) # Check if custom output filename was input, if not, set the default if ( @@ -94,9 +94,9 @@ class Azure_Output_Options(Provider_Output_Options): class Gcp_Output_Options(Provider_Output_Options): - def __init__(self, arguments, audit_info, allowlist_file, bulk_checks_metadata): + def __init__(self, arguments, audit_info, mutelist_file, bulk_checks_metadata): # First call Provider_Output_Options init - super().__init__(arguments, allowlist_file, bulk_checks_metadata) + super().__init__(arguments, mutelist_file, bulk_checks_metadata) # Check if custom output filename was input, if not, set the default if ( @@ -111,9 +111,9 @@ class Gcp_Output_Options(Provider_Output_Options): class Aws_Output_Options(Provider_Output_Options): security_hub_enabled: bool - def __init__(self, arguments, audit_info, allowlist_file, bulk_checks_metadata): + def __init__(self, arguments, audit_info, mutelist_file, bulk_checks_metadata): # First call Provider_Output_Options init - super().__init__(arguments, allowlist_file, bulk_checks_metadata) + super().__init__(arguments, mutelist_file, bulk_checks_metadata) # Confire Shodan API if arguments.shodan: diff --git a/tests/lib/cli/parser_test.py b/tests/lib/cli/parser_test.py index a307c541f6..b0e375c702 100644 --- a/tests/lib/cli/parser_test.py +++ b/tests/lib/cli/parser_test.py @@ -75,7 +75,7 @@ class Test_Parser: assert not parsed.output_bucket assert not parsed.output_bucket_no_assume assert not parsed.shodan - assert not parsed.allowlist_file + assert not parsed.mutelist_file assert not parsed.resource_tags assert not parsed.ignore_unused_services @@ -936,19 +936,19 @@ class Test_Parser: parsed = self.parser.parse(command) assert parsed.shodan == shodan_api_key - def test_aws_parser_allowlist_short(self): + def test_aws_parser_mutelist_short(self): argument = "-w" - allowlist_file = "allowlist.txt" - command = [prowler_command, argument, allowlist_file] + mutelist_file = "mutelist.txt" + command = [prowler_command, argument, mutelist_file] parsed = self.parser.parse(command) - assert parsed.allowlist_file == allowlist_file + assert parsed.mutelist_file == mutelist_file - def test_aws_parser_allowlist_long(self): - argument = "--allowlist-file" - allowlist_file = "allowlist.txt" - command = [prowler_command, argument, allowlist_file] + def test_aws_parser_mutelist_long(self): + argument = "--mutelist-file" + mutelist_file = "mutelist.txt" + command = [prowler_command, argument, mutelist_file] parsed = self.parser.parse(command) - assert parsed.allowlist_file == allowlist_file + assert parsed.mutelist_file == mutelist_file def test_aws_parser_resource_tags(self): argument = "--resource-tags" diff --git a/tests/lib/outputs/outputs_test.py b/tests/lib/outputs/outputs_test.py index 70b07da0da..97f9aca566 100644 --- a/tests/lib/outputs/outputs_test.py +++ b/tests/lib/outputs/outputs_test.py @@ -165,7 +165,7 @@ class Test_Outputs: remove(expected[index][output_mode].name) def test_set_report_color(self): - test_status = ["PASS", "FAIL", "ERROR", "WARNING"] + test_status = ["PASS", "FAIL", "ERROR", "MUTED"] test_colors = [Fore.GREEN, Fore.RED, Fore.BLACK, orange_color] for status in test_status: @@ -177,7 +177,7 @@ class Test_Outputs: with pytest.raises(Exception) as exc: set_report_color(test_status) - assert "Invalid Report Status. Must be PASS, FAIL, ERROR or WARNING" in str( + assert "Invalid Report Status. Must be PASS, FAIL, ERROR or MUTED" in str( exc.value ) assert exc.type == Exception @@ -1347,7 +1347,7 @@ class Test_Outputs: def test_generate_json_asff_status(self): assert generate_json_asff_status("PASS") == "PASSED" assert generate_json_asff_status("FAIL") == "FAILED" - assert generate_json_asff_status("WARNING") == "WARNING" + assert generate_json_asff_status("MUTED") == "MUTED" assert generate_json_asff_status("SOMETHING ELSE") == "NOT_AVAILABLE" def test_generate_json_asff_resource_tags(self): @@ -1364,13 +1364,13 @@ class Test_Outputs: def test_generate_json_ocsf_status(self): assert generate_json_ocsf_status("PASS") == "Success" assert generate_json_ocsf_status("FAIL") == "Failure" - assert generate_json_ocsf_status("WARNING") == "Other" + assert generate_json_ocsf_status("MUTED") == "Other" assert generate_json_ocsf_status("SOMETHING ELSE") == "Unknown" def test_generate_json_ocsf_status_id(self): assert generate_json_ocsf_status_id("PASS") == 1 assert generate_json_ocsf_status_id("FAIL") == 2 - assert generate_json_ocsf_status_id("WARNING") == 99 + assert generate_json_ocsf_status_id("MUTED") == 99 assert generate_json_ocsf_status_id("SOMETHING ELSE") == 0 def test_generate_json_ocsf_severity_id(self): diff --git a/tests/providers/aws/lib/allowlist/fixtures/allowlist.yaml b/tests/providers/aws/lib/mutelist/fixtures/mutelist.yaml similarity index 78% rename from tests/providers/aws/lib/allowlist/fixtures/allowlist.yaml rename to tests/providers/aws/lib/mutelist/fixtures/mutelist.yaml index 02d9667d7c..70fcb29aaa 100644 --- a/tests/providers/aws/lib/allowlist/fixtures/allowlist.yaml +++ b/tests/providers/aws/lib/mutelist/fixtures/mutelist.yaml @@ -1,7 +1,10 @@ -### Account, Check and/or Region can be * to apply for all the cases -### Resources is a list that can have either Regex or Keywords: -########################### ALLOWLIST EXAMPLE ########################### -Allowlist: +### Account, Check and/or Region can be * to apply for all the cases. +### Resources and tags are lists that can have either Regex or Keywords. +### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together. +### Use an alternation Regex to match one of multiple tags with "ORed" logic. +### For each check you can except Accounts, Regions, Resources and/or Tags. +########################### MUTE LIST EXAMPLE ########################### +Mute List: Accounts: "123456789012": Checks: @@ -29,7 +32,7 @@ Allowlist: - ".+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions # EXAMPLE: CONTROL TOWER (to migrate) -# When using Control Tower, guardrails prevent access to certain protected resources. The allowlist +# When using Control Tower, guardrails prevent access to certain protected resources. The mutelist # below ensures that warnings instead of errors are reported for the affected resources. #extra734:aws-controltower-logs-[[:digit:]]+-[[:alpha:]\-]+ #extra734:aws-controltower-s3-access-logs-[[:digit:]]+-[[:alpha:]\-]+ diff --git a/tests/providers/aws/lib/allowlist/allowlist_test.py b/tests/providers/aws/lib/mutelist/mutelist_test.py similarity index 71% rename from tests/providers/aws/lib/allowlist/allowlist_test.py rename to tests/providers/aws/lib/mutelist/mutelist_test.py index d54c19c881..b4697c44de 100644 --- a/tests/providers/aws/lib/allowlist/allowlist_test.py +++ b/tests/providers/aws/lib/mutelist/mutelist_test.py @@ -3,17 +3,17 @@ from boto3 import resource, session from mock import MagicMock from moto import mock_dynamodb, mock_s3 -from prowler.providers.aws.lib.allowlist.allowlist import ( - allowlist_findings, - is_allowlisted, - is_allowlisted_in_check, - is_allowlisted_in_region, - is_allowlisted_in_resource, - is_allowlisted_in_tags, - is_excepted, - parse_allowlist_file, -) from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info +from prowler.providers.aws.lib.mutelist.mutelist import ( + is_excepted, + is_muted, + is_muted_in_check, + is_muted_in_region, + is_muted_in_resource, + is_muted_in_tags, + mutelist_findings, + parse_mutelist_file, +) from prowler.providers.common.models import Audit_Metadata from tests.providers.aws.audit_info_utils import ( AWS_ACCOUNT_NUMBER, @@ -22,7 +22,7 @@ from tests.providers.aws.audit_info_utils import ( ) -class Test_Allowlist: +class Test_Mutelist: # Mocked Audit Info def set_mocked_audit_info(self): audit_info = AWS_Audit_Info( @@ -54,32 +54,32 @@ class Test_Allowlist: ) return audit_info - # Test S3 allowlist + # Test S3 mutelist @mock_s3 - def test_s3_allowlist(self): + def test_s3_mutelist(self): audit_info = self.set_mocked_audit_info() - # Create bucket and upload allowlist yaml + # Create bucket and upload mutelist yaml s3_resource = resource("s3", region_name=AWS_REGION_US_EAST_1) - s3_resource.create_bucket(Bucket="test-allowlist") - s3_resource.Object("test-allowlist", "allowlist.yaml").put( + s3_resource.create_bucket(Bucket="test-mutelist") + s3_resource.Object("test-mutelist", "mutelist.yaml").put( Body=open( - "tests/providers/aws/lib/allowlist/fixtures/allowlist.yaml", + "tests/providers/aws/lib/mutelist/fixtures/mutelist.yaml", "rb", ) ) - with open("tests/providers/aws/lib/allowlist/fixtures/allowlist.yaml") as f: - assert yaml.safe_load(f)["Allowlist"] == parse_allowlist_file( - audit_info, "s3://test-allowlist/allowlist.yaml" + with open("tests/providers/aws/lib/mutelist/fixtures/mutelist.yaml") as f: + assert yaml.safe_load(f)["Mute List"] == parse_mutelist_file( + audit_info, "s3://test-mutelist/mutelist.yaml" ) - # Test DynamoDB allowlist + # Test DynamoDB mutelist @mock_dynamodb - def test_dynamo_allowlist(self): + def test_dynamo_mutelist(self): audit_info = self.set_mocked_audit_info() # Create table and put item dynamodb_resource = resource("dynamodb", region_name=AWS_REGION_US_EAST_1) - table_name = "test-allowlist" + table_name = "test-mutelist" params = { "TableName": table_name, "KeySchema": [ @@ -107,7 +107,7 @@ class Test_Allowlist: assert ( "keyword" - in parse_allowlist_file( + in parse_mutelist_file( audit_info, "arn:aws:dynamodb:" + AWS_REGION_US_EAST_1 @@ -119,11 +119,11 @@ class Test_Allowlist: ) @mock_dynamodb - def test_dynamo_allowlist_with_tags(self): + def test_dynamo_mutelist_with_tags(self): audit_info = self.set_mocked_audit_info() # Create table and put item dynamodb_resource = resource("dynamodb", region_name=AWS_REGION_US_EAST_1) - table_name = "test-allowlist" + table_name = "test-mutelist" params = { "TableName": table_name, "KeySchema": [ @@ -152,7 +152,7 @@ class Test_Allowlist: assert ( "environment=dev" - in parse_allowlist_file( + in parse_mutelist_file( audit_info, "arn:aws:dynamodb:" + AWS_REGION_US_EAST_1 @@ -163,11 +163,11 @@ class Test_Allowlist: )["Accounts"]["*"]["Checks"]["*"]["Tags"] ) - # Allowlist tests + # Mutelist tests - def test_allowlist_findings(self): - # Allowlist example - allowlist = { + def test_mutelist_findings(self): + # Mutelist example + mutelist = { "Accounts": { "*": { "Checks": { @@ -192,14 +192,12 @@ class Test_Allowlist: check_findings.append(finding_1) - allowlisted_findings = allowlist_findings( - allowlist, AWS_ACCOUNT_NUMBER, check_findings - ) - assert len(allowlisted_findings) == 1 - assert allowlisted_findings[0].status == "WARNING" + muted_findings = mutelist_findings(mutelist, AWS_ACCOUNT_NUMBER, check_findings) + assert len(muted_findings) == 1 + assert muted_findings[0].status == "MUTED" - def test_is_allowlisted_with_everything_excepted(self): - allowlist = { + def test_is_muted_with_everything_excepted(self): + mutelist = { "Accounts": { "*": { "Checks": { @@ -219,8 +217,8 @@ class Test_Allowlist: } } - assert not is_allowlisted( - allowlist, + assert not is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "athena_1", AWS_REGION_US_EAST_1, @@ -228,8 +226,8 @@ class Test_Allowlist: "", ) - def test_is_allowlisted_with_default_allowlist(self): - allowlist = { + def test_is_muted_with_default_mutelist(self): + mutelist = { "Accounts": { "*": { "Checks": { @@ -249,8 +247,8 @@ class Test_Allowlist: } } - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "athena_1", AWS_REGION_US_EAST_1, @@ -258,9 +256,9 @@ class Test_Allowlist: "", ) - def test_is_allowlisted(self): - # Allowlist example - allowlist = { + def test_is_muted(self): + # Mutelist example + mutelist = { "Accounts": { "*": { "Checks": { @@ -273,8 +271,8 @@ class Test_Allowlist: } } - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -282,8 +280,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -291,8 +289,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -300,8 +298,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -310,14 +308,14 @@ class Test_Allowlist: ) assert not ( - is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", "test", "" + is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", "test", "" ) ) - def test_is_allowlisted_wildcard(self): - # Allowlist example - allowlist = { + def test_is_muted_wildcard(self): + # Mutelist example + mutelist = { "Accounts": { "*": { "Checks": { @@ -330,8 +328,8 @@ class Test_Allowlist: } } - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -339,8 +337,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -348,8 +346,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -358,14 +356,14 @@ class Test_Allowlist: ) assert not ( - is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", "test", "" + is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", "test", "" ) ) - def test_is_allowlisted_asterisk(self): - # Allowlist example - allowlist = { + def test_is_muted_asterisk(self): + # Mutelist example + mutelist = { "Accounts": { "*": { "Checks": { @@ -378,8 +376,8 @@ class Test_Allowlist: } } - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -387,8 +385,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -396,8 +394,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -406,14 +404,14 @@ class Test_Allowlist: ) assert not ( - is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", "test", "" + is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", "test", "" ) ) - def test_is_allowlisted_all_and_single_account(self): - # Allowlist example - allowlist = { + def test_is_muted_all_and_single_account(self): + # Mutelist example + mutelist = { "Accounts": { "*": { "Checks": { @@ -434,8 +432,8 @@ class Test_Allowlist: } } - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test_2", AWS_REGION_US_EAST_1, @@ -443,8 +441,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -452,8 +450,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -461,8 +459,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -471,13 +469,13 @@ class Test_Allowlist: ) assert not ( - is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", "test", "" + is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", "test", "" ) ) - def test_is_allowlisted_single_account(self): - allowlist = { + def test_is_muted_single_account(self): + mutelist = { "Accounts": { AWS_ACCOUNT_NUMBER: { "Checks": { @@ -490,8 +488,8 @@ class Test_Allowlist: } } - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -500,39 +498,39 @@ class Test_Allowlist: ) assert not ( - is_allowlisted( - allowlist, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", "test", "" + is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", "test", "" ) ) - def test_is_allowlisted_in_region(self): - allowlisted_regions = [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1] + def test_is_muted_in_region(self): + muted_regions = [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1] finding_region = AWS_REGION_US_EAST_1 - assert is_allowlisted_in_region(allowlisted_regions, finding_region) + assert is_muted_in_region(muted_regions, finding_region) - def test_is_allowlisted_in_region_wildcard(self): - allowlisted_regions = ["*"] + def test_is_muted_in_region_wildcard(self): + muted_regions = ["*"] finding_region = AWS_REGION_US_EAST_1 - assert is_allowlisted_in_region(allowlisted_regions, finding_region) + assert is_muted_in_region(muted_regions, finding_region) - def test_is_not_allowlisted_in_region(self): - allowlisted_regions = [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1] + def test_is_not_muted_in_region(self): + muted_regions = [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1] finding_region = "eu-west-2" - assert not is_allowlisted_in_region(allowlisted_regions, finding_region) + assert not is_muted_in_region(muted_regions, finding_region) - def test_is_allowlisted_in_check(self): - allowlisted_checks = { + def test_is_muted_in_check(self): + muted_checks = { "check_test": { "Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1], "Resources": ["*"], } } - assert is_allowlisted_in_check( - allowlisted_checks, + assert is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -540,8 +538,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted_in_check( - allowlisted_checks, + assert is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -549,8 +547,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted_in_check( - allowlisted_checks, + assert is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -559,8 +557,8 @@ class Test_Allowlist: ) assert not ( - is_allowlisted_in_check( - allowlisted_checks, + is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", @@ -569,17 +567,17 @@ class Test_Allowlist: ) ) - def test_is_allowlisted_in_check_regex(self): - # Allowlist example - allowlisted_checks = { + def test_is_muted_in_check_regex(self): + # Mutelist example + muted_checks = { "s3_*": { "Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1], "Resources": ["*"], } } - assert is_allowlisted_in_check( - allowlisted_checks, + assert is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "s3_bucket_public_access", AWS_REGION_US_EAST_1, @@ -587,8 +585,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted_in_check( - allowlisted_checks, + assert is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "s3_bucket_no_mfa_delete", AWS_REGION_US_EAST_1, @@ -596,8 +594,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted_in_check( - allowlisted_checks, + assert is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "s3_bucket_policy_public_write_access", AWS_REGION_US_EAST_1, @@ -606,8 +604,8 @@ class Test_Allowlist: ) assert not ( - is_allowlisted_in_check( - allowlisted_checks, + is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "iam_user_hardware_mfa_enabled", AWS_REGION_US_EAST_1, @@ -616,16 +614,16 @@ class Test_Allowlist: ) ) - def test_is_allowlisted_lambda_generic_check(self): - allowlisted_checks = { + def test_is_muted_lambda_generic_check(self): + muted_checks = { "lambda_*": { "Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1], "Resources": ["*"], } } - assert is_allowlisted_in_check( - allowlisted_checks, + assert is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "awslambda_function_invoke_api_operations_cloudtrail_logging_enabled", AWS_REGION_US_EAST_1, @@ -633,8 +631,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted_in_check( - allowlisted_checks, + assert is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "awslambda_function_no_secrets_in_code", AWS_REGION_US_EAST_1, @@ -642,8 +640,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted_in_check( - allowlisted_checks, + assert is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "awslambda_function_no_secrets_in_variables", AWS_REGION_US_EAST_1, @@ -651,8 +649,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted_in_check( - allowlisted_checks, + assert is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "awslambda_function_not_publicly_accessible", AWS_REGION_US_EAST_1, @@ -660,8 +658,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted_in_check( - allowlisted_checks, + assert is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "awslambda_function_url_cors_policy", AWS_REGION_US_EAST_1, @@ -669,8 +667,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted_in_check( - allowlisted_checks, + assert is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "awslambda_function_url_public", AWS_REGION_US_EAST_1, @@ -678,8 +676,8 @@ class Test_Allowlist: "", ) - assert is_allowlisted_in_check( - allowlisted_checks, + assert is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "awslambda_function_using_supported_runtimes", AWS_REGION_US_EAST_1, @@ -687,16 +685,16 @@ class Test_Allowlist: "", ) - def test_is_allowlisted_lambda_concrete_check(self): - allowlisted_checks = { + def test_is_muted_lambda_concrete_check(self): + muted_checks = { "lambda_function_no_secrets_in_variables": { "Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1], "Resources": ["*"], } } - assert is_allowlisted_in_check( - allowlisted_checks, + assert is_muted_in_check( + muted_checks, AWS_ACCOUNT_NUMBER, "awslambda_function_no_secrets_in_variables", AWS_REGION_US_EAST_1, @@ -704,9 +702,9 @@ class Test_Allowlist: "", ) - def test_is_allowlisted_tags(self): - # Allowlist example - allowlist = { + def test_is_muted_tags(self): + # Mutelist example + mutelist = { "Accounts": { "*": { "Checks": { @@ -720,8 +718,8 @@ class Test_Allowlist: } } - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -729,8 +727,8 @@ class Test_Allowlist: "environment=dev", ) - assert is_allowlisted( - allowlist, + assert is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION_US_EAST_1, @@ -739,8 +737,8 @@ class Test_Allowlist: ) assert not ( - is_allowlisted( - allowlist, + is_muted( + mutelist, AWS_ACCOUNT_NUMBER, "check_test", "us-east-2", @@ -749,49 +747,49 @@ class Test_Allowlist: ) ) - def test_is_allowlisted_in_tags(self): - allowlist_tags = ["environment=dev", "project=prowler"] + def test_is_muted_in_tags(self): + mutelist_tags = ["environment=dev", "project=prowler"] - assert is_allowlisted_in_tags(allowlist_tags, "environment=dev") + assert is_muted_in_tags(mutelist_tags, "environment=dev") - assert is_allowlisted_in_tags( - allowlist_tags, + assert is_muted_in_tags( + mutelist_tags, "environment=dev | project=prowler", ) assert not ( - is_allowlisted_in_tags( - allowlist_tags, + is_muted_in_tags( + mutelist_tags, "environment=pro", ) ) - def test_is_allowlisted_in_tags_regex(self): - allowlist_tags = ["environment=(dev|test)", ".*=prowler"] + def test_is_muted_in_tags_regex(self): + mutelist_tags = ["environment=(dev|test)", ".*=prowler"] - assert is_allowlisted_in_tags( - allowlist_tags, + assert is_muted_in_tags( + mutelist_tags, "environment=test | proj=prowler", ) - assert is_allowlisted_in_tags( - allowlist_tags, + assert is_muted_in_tags( + mutelist_tags, "env=prod | project=prowler", ) - assert not is_allowlisted_in_tags( - allowlist_tags, + assert not is_muted_in_tags( + mutelist_tags, "environment=prod | project=myproj", ) - def test_is_allowlisted_in_tags_with_no_tags_in_finding(self): - allowlist_tags = ["environment=(dev|test)", ".*=prowler"] + def test_is_muted_in_tags_with_no_tags_in_finding(self): + mutelist_tags = ["environment=(dev|test)", ".*=prowler"] finding_tags = "" - assert not is_allowlisted_in_tags(allowlist_tags, finding_tags) + assert not is_muted_in_tags(mutelist_tags, finding_tags) def test_is_excepted(self): - # Allowlist example + # Mutelist example exceptions = { "Accounts": [AWS_ACCOUNT_NUMBER], "Regions": ["eu-central-1", "eu-south-3"], @@ -869,10 +867,10 @@ class Test_Allowlist: "environment=pro", ) - def test_is_allowlisted_in_resource(self): - allowlist_resources = ["prowler", "^test", "prowler-pro"] + def test_is_muted_in_resource(self): + mutelist_resources = ["prowler", "^test", "prowler-pro"] - assert is_allowlisted_in_resource(allowlist_resources, "prowler") - assert is_allowlisted_in_resource(allowlist_resources, "prowler-test") - assert is_allowlisted_in_resource(allowlist_resources, "test-prowler") - assert not is_allowlisted_in_resource(allowlist_resources, "random") + assert is_muted_in_resource(mutelist_resources, "prowler") + assert is_muted_in_resource(mutelist_resources, "prowler-test") + assert is_muted_in_resource(mutelist_resources, "test-prowler") + assert not is_muted_in_resource(mutelist_resources, "random") diff --git a/tests/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled_test.py b/tests/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled_test.py index 3a0178e953..eb663d8ba7 100644 --- a/tests/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled_test.py +++ b/tests/providers/aws/services/accessanalyzer/accessanalyzer_enabled/accessanalyzer_enabled_test.py @@ -66,11 +66,11 @@ class Test_accessanalyzer_enabled: assert result[0].region == AWS_REGION_1 assert result[0].resource_tags == [] - def test_one_analyzer_not_available_allowlisted(self): + def test_one_analyzer_not_available_muted(self): # Include analyzers to check accessanalyzer_client = mock.MagicMock accessanalyzer_client.region = AWS_REGION_2 - accessanalyzer_client.audit_config = {"allowlist_non_default_regions": True} + accessanalyzer_client.audit_config = {"mute_non_default_regions": True} accessanalyzer_client.analyzers = [ Analyzer( arn=AWS_ACCOUNT_ARN, @@ -93,7 +93,7 @@ class Test_accessanalyzer_enabled: result = check.execute() assert len(result) == 1 - assert result[0].status == "WARNING" + assert result[0].status == "MUTED" assert ( result[0].status_extended == f"IAM Access Analyzer in account {AWS_ACCOUNT_NUMBER} is not enabled." diff --git a/tests/providers/aws/services/config/config_recorder_all_regions_enabled/config_recorder_all_regions_enabled_test.py b/tests/providers/aws/services/config/config_recorder_all_regions_enabled/config_recorder_all_regions_enabled_test.py index 653f42441e..fdd41bf0f9 100644 --- a/tests/providers/aws/services/config/config_recorder_all_regions_enabled/config_recorder_all_regions_enabled_test.py +++ b/tests/providers/aws/services/config/config_recorder_all_regions_enabled/config_recorder_all_regions_enabled_test.py @@ -162,7 +162,7 @@ class Test_config_recorder_all_regions_enabled: assert recorder.region == AWS_REGION @mock_config - def test_config_one_recorder_disabled_allowlisted(self): + def test_config_one_recorder_disabled_muted(self): # Create Config Mocked Resources config_client = client("config", region_name=AWS_REGION) # Create Config Recorder @@ -174,7 +174,7 @@ class Test_config_recorder_all_regions_enabled: current_audit_info = self.set_mocked_audit_info() current_audit_info.profile_region = "eu-south-2" current_audit_info.audited_regions = ["eu-south-2", AWS_REGION] - current_audit_info.audit_config = {"allowlist_non_default_regions": True} + current_audit_info.audit_config = {"mute_non_default_regions": True} with mock.patch( "prowler.providers.aws.lib.audit_info.audit_info.current_audit_info", @@ -194,7 +194,7 @@ class Test_config_recorder_all_regions_enabled: # Search for the recorder just created for recorder in result: if recorder.region == AWS_REGION: - assert recorder.status == "WARNING" + assert recorder.status == "MUTED" assert ( recorder.status_extended == f"AWS Config recorder {AWS_ACCOUNT_NUMBER} is disabled." diff --git a/tests/providers/aws/services/drs/drs_job_exist/drs_job_exist_test.py b/tests/providers/aws/services/drs/drs_job_exist/drs_job_exist_test.py index 4fad6112cc..1df944baae 100644 --- a/tests/providers/aws/services/drs/drs_job_exist/drs_job_exist_test.py +++ b/tests/providers/aws/services/drs/drs_job_exist/drs_job_exist_test.py @@ -120,9 +120,9 @@ class Test_drs_job_exist: assert result[0].region == AWS_REGION assert result[0].resource_tags == [] - def test_drs_disabled_allowlisted(self): + def test_drs_disabled_muted(self): drs_client = mock.MagicMock - drs_client.audit_config = {"allowlist_non_default_regions": True} + drs_client.audit_config = {"mute_non_default_regions": True} drs_client.audited_account = AWS_ACCOUNT_NUMBER drs_client.audited_account_arn = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root" drs_client.region = "eu-west-2" @@ -147,7 +147,7 @@ class Test_drs_job_exist: result = check.execute() assert len(result) == 1 - assert result[0].status == "WARNING" + assert result[0].status == "MUTED" assert result[0].status_extended == "DRS is not enabled for this region." assert result[0].resource_id == AWS_ACCOUNT_NUMBER assert result[0].resource_arn == f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root" diff --git a/tests/providers/aws/services/guardduty/guardduty_is_enabled/guardduty_is_enabled_test.py b/tests/providers/aws/services/guardduty/guardduty_is_enabled/guardduty_is_enabled_test.py index 1718947bf8..7696804669 100644 --- a/tests/providers/aws/services/guardduty/guardduty_is_enabled/guardduty_is_enabled_test.py +++ b/tests/providers/aws/services/guardduty/guardduty_is_enabled/guardduty_is_enabled_test.py @@ -136,9 +136,9 @@ class Test_: assert result[0].resource_arn == DETECTOR_ARN assert result[0].region == AWS_REGION - def test_guardduty_not_configured_allowlisted(self): + def test_guardduty_not_configured_muted(self): guardduty_client = mock.MagicMock - guardduty_client.audit_config = {"allowlist_non_default_regions": True} + guardduty_client.audit_config = {"mute_non_default_regions": True} guardduty_client.region = "eu-south-2" guardduty_client.detectors = [] guardduty_client.detectors.append( @@ -159,7 +159,7 @@ class Test_: check = guardduty_is_enabled() result = check.execute() assert len(result) == 1 - assert result[0].status == "WARNING" + assert result[0].status == "MUTED" assert ( result[0].status_extended == f"GuardDuty detector {DETECTOR_ID} not configured." diff --git a/tests/providers/aws/services/securityhub/securityhub_enabled/securityhub_enabled_test.py b/tests/providers/aws/services/securityhub/securityhub_enabled/securityhub_enabled_test.py index 977ab1a48f..b1566e8dee 100644 --- a/tests/providers/aws/services/securityhub/securityhub_enabled/securityhub_enabled_test.py +++ b/tests/providers/aws/services/securityhub/securityhub_enabled/securityhub_enabled_test.py @@ -150,9 +150,9 @@ class Test_securityhub_enabled: ) assert result[0].region == AWS_REGION - def test_securityhub_hub_active_without_integrations_or_standards_allowlisted(self): + def test_securityhub_hub_active_without_integrations_or_standards_muted(self): securityhub_client = mock.MagicMock - securityhub_client.audit_config = {"allowlist_non_default_regions": True} + securityhub_client.audit_config = {"mute_non_default_regions": True} securityhub_client.region = AWS_REGION securityhub_client.securityhubs = [ SecurityHubHub( @@ -176,7 +176,7 @@ class Test_securityhub_enabled: check = securityhub_enabled() result = check.execute() - assert result[0].status == "WARNING" + assert result[0].status == "MUTED" assert ( result[0].status_extended == "Security Hub is enabled but without any standard or integration." diff --git a/tests/providers/common/common_outputs_test.py b/tests/providers/common/common_outputs_test.py index 3e24091b65..5acbddc1f8 100644 --- a/tests/providers/common/common_outputs_test.py +++ b/tests/providers/common/common_outputs_test.py @@ -98,17 +98,17 @@ class Test_Common_Output_Options: arguments.unix_timestamp = False audit_info = self.set_mocked_aws_audit_info() - allowlist_file = "" + mutelist_file = "" bulk_checks_metadata = {} output_options = set_provider_output_options( - provider, arguments, audit_info, allowlist_file, bulk_checks_metadata + provider, arguments, audit_info, mutelist_file, bulk_checks_metadata ) assert isinstance(output_options, Aws_Output_Options) assert output_options.security_hub_enabled assert output_options.is_quiet assert output_options.output_modes == ["html", "csv", "json", "json-asff"] assert output_options.output_directory == arguments.output_directory - assert output_options.allowlist_file == "" + assert output_options.mutelist_file == "" assert output_options.bulk_checks_metadata == {} assert output_options.verbose assert output_options.output_filename == arguments.output_filename @@ -130,16 +130,16 @@ class Test_Common_Output_Options: arguments.unix_timestamp = False audit_info = self.set_mocked_gcp_audit_info() - allowlist_file = "" + mutelist_file = "" bulk_checks_metadata = {} output_options = set_provider_output_options( - provider, arguments, audit_info, allowlist_file, bulk_checks_metadata + provider, arguments, audit_info, mutelist_file, bulk_checks_metadata ) assert isinstance(output_options, Gcp_Output_Options) assert output_options.is_quiet assert output_options.output_modes == ["html", "csv", "json"] assert output_options.output_directory == arguments.output_directory - assert output_options.allowlist_file == "" + assert output_options.mutelist_file == "" assert output_options.bulk_checks_metadata == {} assert output_options.verbose assert output_options.output_filename == arguments.output_filename @@ -164,17 +164,17 @@ class Test_Common_Output_Options: # Mock AWS Audit Info audit_info = self.set_mocked_aws_audit_info() - allowlist_file = "" + mutelist_file = "" bulk_checks_metadata = {} output_options = set_provider_output_options( - provider, arguments, audit_info, allowlist_file, bulk_checks_metadata + provider, arguments, audit_info, mutelist_file, bulk_checks_metadata ) assert isinstance(output_options, Aws_Output_Options) assert output_options.security_hub_enabled assert output_options.is_quiet assert output_options.output_modes == ["html", "csv", "json", "json-asff"] assert output_options.output_directory == arguments.output_directory - assert output_options.allowlist_file == "" + assert output_options.mutelist_file == "" assert output_options.bulk_checks_metadata == {} assert output_options.verbose assert ( @@ -201,10 +201,10 @@ class Test_Common_Output_Options: audit_info = self.set_mocked_azure_audit_info() audit_info.identity.domain = "test-domain" - allowlist_file = "" + mutelist_file = "" bulk_checks_metadata = {} output_options = set_provider_output_options( - provider, arguments, audit_info, allowlist_file, bulk_checks_metadata + provider, arguments, audit_info, mutelist_file, bulk_checks_metadata ) assert isinstance(output_options, Azure_Output_Options) assert output_options.is_quiet @@ -214,7 +214,7 @@ class Test_Common_Output_Options: "json", ] assert output_options.output_directory == arguments.output_directory - assert output_options.allowlist_file == "" + assert output_options.mutelist_file == "" assert output_options.bulk_checks_metadata == {} assert output_options.verbose assert ( @@ -242,10 +242,10 @@ class Test_Common_Output_Options: tenants = ["tenant-1", "tenant-2"] audit_info.identity.tenant_ids = tenants - allowlist_file = "" + mutelist_file = "" bulk_checks_metadata = {} output_options = set_provider_output_options( - provider, arguments, audit_info, allowlist_file, bulk_checks_metadata + provider, arguments, audit_info, mutelist_file, bulk_checks_metadata ) assert isinstance(output_options, Azure_Output_Options) assert output_options.is_quiet @@ -255,7 +255,7 @@ class Test_Common_Output_Options: "json", ] assert output_options.output_directory == arguments.output_directory - assert output_options.allowlist_file == "" + assert output_options.mutelist_file == "" assert output_options.bulk_checks_metadata == {} assert output_options.verbose assert ( From 6ee016e577e1d1f117b672f25b0ab4fb3ad3916e Mon Sep 17 00:00:00 2001 From: Sergio Garcia <38561120+sergargar@users.noreply.github.com> Date: Tue, 12 Dec 2023 17:13:50 +0100 Subject: [PATCH 03/10] chore(sts-endpoint): deprecate `--sts-endpoint-region` (#3046) Co-authored-by: Pepe Fagoaga --- docs/tutorials/aws/authentication.md | 4 ---- docs/tutorials/aws/role-assumption.md | 8 -------- prowler/providers/aws/lib/arguments/arguments.py | 6 ------ prowler/providers/aws/lib/credentials/credentials.py | 6 +----- tests/lib/cli/parser_test.py | 7 ------- 5 files changed, 1 insertion(+), 30 deletions(-) diff --git a/docs/tutorials/aws/authentication.md b/docs/tutorials/aws/authentication.md index 7a01e25909..714872b8c4 100644 --- a/docs/tutorials/aws/authentication.md +++ b/docs/tutorials/aws/authentication.md @@ -37,7 +37,3 @@ If your IAM entity enforces MFA you can use `--mfa` and Prowler will ask you to - ARN of your MFA device - TOTP (Time-Based One-Time Password) - -## STS Endpoint Region - -If you are using Prowler in AWS regions that are not enabled by default you need to use the argument `--sts-endpoint-region` to point the AWS STS API calls `assume-role` and `get-caller-identity` to the non-default region, e.g.: `prowler aws --sts-endpoint-region eu-south-2`. diff --git a/docs/tutorials/aws/role-assumption.md b/docs/tutorials/aws/role-assumption.md index 977868c1f6..20ab7ad653 100644 --- a/docs/tutorials/aws/role-assumption.md +++ b/docs/tutorials/aws/role-assumption.md @@ -23,14 +23,6 @@ prowler aws -R arn:aws:iam:::role/ prowler aws -T/--session-duration -I/--external-id -R arn:aws:iam:::role/ ``` -## STS Endpoint Region - -If you are using Prowler in AWS regions that are not enabled by default you need to use the argument `--sts-endpoint-region` to point the AWS STS API calls `assume-role` and `get-caller-identity` to the non-default region, e.g.: `prowler aws --sts-endpoint-region eu-south-2`. - -> Since v3.11.0, Prowler uses a regional token in STS sessions so it can scan all AWS regions without needing the `--sts-endpoint-region` argument. - -> Make sure that you have enabled the AWS Region you want to scan in BOTH AWS Accounts (assumed role account and account from which you assume the role). - ## Role MFA If your IAM Role has MFA configured you can use `--mfa` along with `-R`/`--role ` and Prowler will ask you to input the following values to get a new temporary session for the IAM Role provided: diff --git a/prowler/providers/aws/lib/arguments/arguments.py b/prowler/providers/aws/lib/arguments/arguments.py index 3abd739d5d..67477aca71 100644 --- a/prowler/providers/aws/lib/arguments/arguments.py +++ b/prowler/providers/aws/lib/arguments/arguments.py @@ -26,12 +26,6 @@ def init_parser(self): help="ARN of the role to be assumed", # Pending ARN validation ) - aws_auth_subparser.add_argument( - "--sts-endpoint-region", - nargs="?", - default=None, - help="Specify the AWS STS endpoint region to use. Read more at https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp_enable-regions.html", - ) aws_auth_subparser.add_argument( "--mfa", action="store_true", diff --git a/prowler/providers/aws/lib/credentials/credentials.py b/prowler/providers/aws/lib/credentials/credentials.py index 8d7984d855..f0865d8f5d 100644 --- a/prowler/providers/aws/lib/credentials/credentials.py +++ b/prowler/providers/aws/lib/credentials/credentials.py @@ -12,12 +12,8 @@ def validate_aws_credentials( session: session, input_regions: list, sts_endpoint_region: str = None ) -> dict: try: - # For a valid STS GetCallerIdentity we have to use the right AWS Region - # Check if the --sts-endpoint-region is set - if sts_endpoint_region is not None: - aws_region = sts_endpoint_region # If there is no region passed with -f/--region/--filter-region - elif input_regions is None or len(input_regions) == 0: + if input_regions is None or len(input_regions) == 0: # If you have a region configured in your AWS config or credentials file if session.region_name is not None: aws_region = session.region_name diff --git a/tests/lib/cli/parser_test.py b/tests/lib/cli/parser_test.py index b0e375c702..a660f7d13d 100644 --- a/tests/lib/cli/parser_test.py +++ b/tests/lib/cli/parser_test.py @@ -998,13 +998,6 @@ class Test_Parser: parsed = self.parser.parse(command) assert parsed.config_file == config_file - def test_aws_parser_sts_endpoint_region(self): - argument = "--sts-endpoint-region" - sts_endpoint_region = "eu-west-1" - command = [prowler_command, argument, sts_endpoint_region] - parsed = self.parser.parse(command) - assert parsed.sts_endpoint_region == sts_endpoint_region - def test_parser_azure_auth_sp(self): argument = "--sp-env-auth" command = [prowler_command, "azure", argument] From 1fe74937c1f5c786c4c2d191aa9116561a922a64 Mon Sep 17 00:00:00 2001 From: Nacho Rivera Date: Tue, 12 Dec 2023 18:05:17 +0100 Subject: [PATCH 04/10] feat(CloudProvider): introduce global provider Azure&GCP (#3069) --- prowler/__main__.py | 2 + prowler/providers/azure/azure_provider.py | 4 +- prowler/providers/azure/azure_provider_new.py | 270 ++++++++++++++++++ .../azure/lib/audit_info/audit_info.py | 8 +- .../providers/azure/lib/audit_info/models.py | 8 +- .../providers/azure/lib/service/service.py | 16 +- .../services/defender/defender_client.py | 4 +- .../azure/services/iam/iam_client.py | 4 +- .../services/sqlserver/sqlserver_client.py | 4 +- .../azure/services/storage/storage_client.py | 4 +- prowler/providers/common/audit_info.py | 18 +- prowler/providers/common/common.py | 25 ++ prowler/providers/common/provider.py | 14 + prowler/providers/gcp/gcp_provider_new.py | 119 ++++++++ prowler/providers/gcp/lib/service/service.py | 14 +- .../gcp/services/apikeys/apikeys_client.py | 4 +- .../gcp/services/bigquery/bigquery_client.py | 4 +- .../cloudresourcemanager_client.py | 4 +- .../gcp/services/cloudsql/cloudsql_client.py | 4 +- .../cloudstorage/cloudstorage_client.py | 4 +- .../gcp/services/compute/compute_client.py | 4 +- .../gcp/services/dataproc/dataproc_client.py | 4 +- .../providers/gcp/services/dns/dns_client.py | 4 +- .../gcp/services/iam/accessapproval_client.py | 4 +- .../services/iam/essentialcontacts_client.py | 4 +- .../providers/gcp/services/iam/iam_client.py | 4 +- .../providers/gcp/services/kms/kms_client.py | 4 +- .../gcp/services/logging/logging_client.py | 4 +- .../services/monitoring/monitoring_client.py | 4 +- .../serviceusage/serviceusage_client.py | 4 +- tests/lib/outputs/slack_test.py | 8 +- tests/providers/common/audit_info_test.py | 10 +- tests/providers/common/common_outputs_test.py | 8 +- 33 files changed, 516 insertions(+), 84 deletions(-) create mode 100644 prowler/providers/azure/azure_provider_new.py create mode 100644 prowler/providers/common/provider.py create mode 100644 prowler/providers/gcp/gcp_provider_new.py diff --git a/prowler/__main__.py b/prowler/__main__.py index b50ddd5fcb..88288d9c43 100644 --- a/prowler/__main__.py +++ b/prowler/__main__.py @@ -51,6 +51,7 @@ from prowler.providers.common.audit_info import ( set_provider_execution_parameters, ) from prowler.providers.common.clean import clean_provider_local_output_directories +from prowler.providers.common.common import set_global_provider_object from prowler.providers.common.mutelist import set_provider_mutelist from prowler.providers.common.outputs import set_provider_output_options from prowler.providers.common.quick_inventory import run_provider_quick_inventory @@ -149,6 +150,7 @@ def prowler(): # Set the audit info based on the selected provider audit_info = set_provider_audit_info(provider, args.__dict__) + set_global_provider_object(args) # Import custom checks from folder if checks_folder: diff --git a/prowler/providers/azure/azure_provider.py b/prowler/providers/azure/azure_provider.py index 52f5459a50..df698625a2 100644 --- a/prowler/providers/azure/azure_provider.py +++ b/prowler/providers/azure/azure_provider.py @@ -6,7 +6,7 @@ from azure.mgmt.subscription import SubscriptionClient from msgraph.core import GraphClient from prowler.lib.logger import logger -from prowler.providers.azure.lib.audit_info.models import Azure_Identity_Info +from prowler.providers.azure.lib.audit_info.models import AzureIdentityInfo from prowler.providers.azure.lib.regions.regions import get_regions_config @@ -99,7 +99,7 @@ class Azure_Provider: managed_entity_auth, subscription_ids, ): - identity = Azure_Identity_Info() + identity = AzureIdentityInfo() # If credentials comes from service principal or browser, if the required permissions are assigned # the identity can access AAD and retrieve the tenant domain name. diff --git a/prowler/providers/azure/azure_provider_new.py b/prowler/providers/azure/azure_provider_new.py new file mode 100644 index 0000000000..0b45fe7567 --- /dev/null +++ b/prowler/providers/azure/azure_provider_new.py @@ -0,0 +1,270 @@ +import sys +from os import getenv +from typing import Any, Optional + +from azure.identity import DefaultAzureCredential, InteractiveBrowserCredential +from azure.mgmt.subscription import SubscriptionClient +from colorama import Fore, Style +from msgraph.core import GraphClient +from pydantic import BaseModel + +from prowler.lib.logger import logger +from prowler.providers.azure.lib.regions.regions import get_regions_config +from prowler.providers.common.provider import CloudProvider + + +class AzureIdentityInfo(BaseModel): + identity_id: str = "" + identity_type: str = "" + tenant_ids: list[str] = [] + domain: str = "Unknown tenant domain (missing AAD permissions)" + subscriptions: dict = {} + + +class AzureRegionConfig(BaseModel): + name: str = "" + authority: str = None + base_url: str = "" + credential_scopes: list = [] + + +class AzureProvider(CloudProvider): + session: DefaultAzureCredential + identity: AzureIdentityInfo + audit_resources: Optional[Any] + audit_metadata: Optional[Any] + audit_config: dict + region_config: AzureRegionConfig + + def __init__(self, arguments): + logger.info("Setting Azure session ...") + subscription_ids = arguments.subscription_ids + + logger.info("Checking if any credentials mode is set ...") + az_cli_auth = arguments.az_cli_auth + sp_env_auth = arguments.sp_env_auth + browser_auth = arguments.browser_auth + managed_entity_auth = arguments.managed_identity_auth + tenant_id = arguments.tenant_id + + logger.info("Checking if region is different than default one") + region = arguments.azure_region + self.validate_arguments( + az_cli_auth, sp_env_auth, browser_auth, managed_entity_auth, tenant_id + ) + self.region_config = self.setup_region_config(region) + self.session = self.setup_session( + az_cli_auth, sp_env_auth, browser_auth, managed_entity_auth, tenant_id + ) + self.identity = self.setup_identity( + az_cli_auth, + sp_env_auth, + browser_auth, + managed_entity_auth, + subscription_ids, + ) + if not arguments.only_logs: + self.print_credentials() + + def validate_arguments( + self, az_cli_auth, sp_env_auth, browser_auth, managed_entity_auth, tenant_id + ): + if ( + not az_cli_auth + and not sp_env_auth + and not browser_auth + and not managed_entity_auth + ): + raise Exception( + "Azure provider requires at least one authentication method set: [--az-cli-auth | --sp-env-auth | --browser-auth | --managed-identity-auth]" + ) + if (not browser_auth and tenant_id) or (browser_auth and not tenant_id): + raise Exception( + "Azure Tenant ID (--tenant-id) is required only for browser authentication mode" + ) + + def setup_region_config(self, region): + config = get_regions_config(region) + return AzureRegionConfig( + name=region, + authority=config["authority"], + base_url=config["base_url"], + credential_scopes=config["credential_scopes"], + ) + + def print_credentials(self): + printed_subscriptions = [] + for key, value in self.identity.subscriptions.items(): + intermediate = key + " : " + value + printed_subscriptions.append(intermediate) + report = f""" +This report is being generated using the identity below: + +Azure Tenant IDs: {Fore.YELLOW}[{" ".join(self.identity.tenant_ids)}]{Style.RESET_ALL} Azure Tenant Domain: {Fore.YELLOW}[{self.identity.domain}]{Style.RESET_ALL} Azure Region: {Fore.YELLOW}[{self.region_config.name}]{Style.RESET_ALL} +Azure Subscriptions: {Fore.YELLOW}{printed_subscriptions}{Style.RESET_ALL} +Azure Identity Type: {Fore.YELLOW}[{self.identity.identity_type}]{Style.RESET_ALL} Azure Identity ID: {Fore.YELLOW}[{self.identity.identity_id}]{Style.RESET_ALL} +""" + print(report) + + def setup_session( + self, az_cli_auth, sp_env_auth, browser_auth, managed_entity_auth, tenant_id + ): + # Browser auth creds cannot be set with DefaultAzureCredentials() + if not browser_auth: + if sp_env_auth: + self.__check_service_principal_creds_env_vars__() + try: + # Since the input vars come as True when it is wanted to be used, we need to inverse it since + # DefaultAzureCredential sets the auth method excluding the others + credentials = DefaultAzureCredential( + exclude_environment_credential=not sp_env_auth, + exclude_cli_credential=not az_cli_auth, + exclude_managed_identity_credential=not managed_entity_auth, + # Azure Auth using Visual Studio is not supported + exclude_visual_studio_code_credential=True, + # Azure Auth using Shared Token Cache is not supported + exclude_shared_token_cache_credential=True, + # Azure Auth using PowerShell is not supported + exclude_powershell_credential=True, + # set Authority of a Microsoft Entra endpoint + authority=self.region_config.authority, + ) + except Exception as error: + logger.critical("Failed to retrieve azure credentials") + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}" + ) + sys.exit(1) + else: + try: + credentials = InteractiveBrowserCredential(tenant_id=tenant_id) + except Exception as error: + logger.critical("Failed to retrieve azure credentials") + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}" + ) + sys.exit(1) + + return credentials + + def __check_service_principal_creds_env_vars__(self): + logger.info( + "Azure provider: checking service principal environment variables ..." + ) + for env_var in ["AZURE_CLIENT_ID", "AZURE_TENANT_ID", "AZURE_CLIENT_SECRET"]: + if not getenv(env_var): + logger.critical( + f"Azure provider: Missing environment variable {env_var} needed to autenticate against Azure" + ) + sys.exit(1) + + def setup_identity( + self, + az_cli_auth, + sp_env_auth, + browser_auth, + managed_entity_auth, + subscription_ids, + ): + credentials = self.session + identity = AzureIdentityInfo() + + # If credentials comes from service principal or browser, if the required permissions are assigned + # the identity can access AAD and retrieve the tenant domain name. + # With cli also should be possible but right now it does not work, azure python package issue is coming + # At the time of writting this with az cli creds is not working, despite that is included + if sp_env_auth or browser_auth or az_cli_auth: + # Trying to recover tenant domain info + try: + logger.info( + "Trying to retrieve tenant domain from AAD to populate identity structure ..." + ) + client = GraphClient(credential=credentials) + domain_result = client.get("/domains").json() + if "value" in domain_result: + if "id" in domain_result["value"][0]: + identity.domain = domain_result["value"][0]["id"] + except Exception as error: + logger.error( + "Provided identity does not have permissions to access AAD to retrieve tenant domain" + ) + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}" + ) + # since that exception is not considered as critical, we keep filling another identity fields + if sp_env_auth: + # The id of the sp can be retrieved from environment variables + identity.identity_id = getenv("AZURE_CLIENT_ID") + identity.identity_type = "Service Principal" + # Same here, if user can access AAD, some fields are retrieved if not, default value, for az cli + # should work but it doesn't, pending issue + else: + identity.identity_id = "Unknown user id (Missing AAD permissions)" + identity.identity_type = "User" + try: + logger.info( + "Trying to retrieve user information from AAD to populate identity structure ..." + ) + client = GraphClient(credential=credentials) + user_name = client.get("/me").json() + if "userPrincipalName" in user_name: + identity.identity_id = user_name + + except Exception as error: + logger.error( + "Provided identity does not have permissions to access AAD to retrieve user's metadata" + ) + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}" + ) + # Managed identities only can be assigned resource, resource group and subscription scope permissions + elif managed_entity_auth: + identity.identity_id = "Default Managed Identity ID" + identity.identity_type = "Managed Identity" + # Pending extracting info from managed identity + + # once we have populated the id, type, and domain fields, time to retrieve the subscriptions and finally the tenants + try: + logger.info( + "Trying to subscriptions and tenant ids to populate identity structure ..." + ) + subscriptions_client = SubscriptionClient( + credential=credentials, + base_url=self.region_config.base_url, + credential_scopes=self.region_config.credential_scopes, + ) + if not subscription_ids: + logger.info("Scanning all the Azure subscriptions...") + for subscription in subscriptions_client.subscriptions.list(): + identity.subscriptions.update( + {subscription.display_name: subscription.subscription_id} + ) + else: + logger.info("Scanning the subscriptions passed as argument ...") + for id in subscription_ids: + subscription = subscriptions_client.subscriptions.get( + subscription_id=id + ) + identity.subscriptions.update({subscription.display_name: id}) + + # If there are no subscriptions listed -> checks are not going to be run against any resource + if not identity.subscriptions: + logger.critical( + "It was not possible to retrieve any subscriptions, please check your permission assignments" + ) + sys.exit(1) + + tenants = subscriptions_client.tenants.list() + for tenant in tenants: + identity.tenant_ids.append(tenant.tenant_id) + # This error is critical, since it implies something is wrong with the credentials provided + except Exception as error: + logger.critical( + "Error with credentials provided getting subscriptions and tenants to scan" + ) + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}" + ) + sys.exit(1) + + return identity diff --git a/prowler/providers/azure/lib/audit_info/audit_info.py b/prowler/providers/azure/lib/audit_info/audit_info.py index 098f63c77e..cb46d8a7b7 100644 --- a/prowler/providers/azure/lib/audit_info/audit_info.py +++ b/prowler/providers/azure/lib/audit_info/audit_info.py @@ -1,14 +1,14 @@ from prowler.providers.azure.lib.audit_info.models import ( Azure_Audit_Info, - Azure_Identity_Info, - Azure_Region_Config, + AzureIdentityInfo, + AzureRegionConfig, ) azure_audit_info = Azure_Audit_Info( credentials=None, - identity=Azure_Identity_Info(), + identity=AzureIdentityInfo(), audit_resources=None, audit_metadata=None, audit_config=None, - azure_region_config=Azure_Region_Config(), + azure_region_config=AzureRegionConfig(), ) diff --git a/prowler/providers/azure/lib/audit_info/models.py b/prowler/providers/azure/lib/audit_info/models.py index 978ec34259..2b1bb274e3 100644 --- a/prowler/providers/azure/lib/audit_info/models.py +++ b/prowler/providers/azure/lib/audit_info/models.py @@ -5,7 +5,7 @@ from azure.identity import DefaultAzureCredential from pydantic import BaseModel -class Azure_Identity_Info(BaseModel): +class AzureIdentityInfo(BaseModel): identity_id: str = "" identity_type: str = "" tenant_ids: list[str] = [] @@ -13,7 +13,7 @@ class Azure_Identity_Info(BaseModel): subscriptions: dict = {} -class Azure_Region_Config(BaseModel): +class AzureRegionConfig(BaseModel): name: str = "" authority: str = None base_url: str = "" @@ -23,11 +23,11 @@ class Azure_Region_Config(BaseModel): @dataclass class Azure_Audit_Info: credentials: DefaultAzureCredential - identity: Azure_Identity_Info + identity: AzureIdentityInfo audit_resources: Optional[Any] audit_metadata: Optional[Any] audit_config: dict - azure_region_config: Azure_Region_Config + azure_region_config: AzureRegionConfig def __init__( self, diff --git a/prowler/providers/azure/lib/service/service.py b/prowler/providers/azure/lib/service/service.py index 305f25898e..423df72694 100644 --- a/prowler/providers/azure/lib/service/service.py +++ b/prowler/providers/azure/lib/service/service.py @@ -1,30 +1,30 @@ from prowler.lib.logger import logger -from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info +from prowler.providers.azure.azure_provider_new import AzureProvider class AzureService: def __init__( self, service: str, - audit_info: Azure_Audit_Info, + provider: AzureProvider, ): self.clients = self.__set_clients__( - audit_info.identity.subscriptions, - audit_info.credentials, + provider.identity.subscriptions, + provider.session, service, - audit_info.azure_region_config, + provider.region_config, ) - self.subscriptions = audit_info.identity.subscriptions + self.subscriptions = provider.identity.subscriptions - def __set_clients__(self, subscriptions, credentials, service, region_config): + def __set_clients__(self, subscriptions, session, service, region_config): clients = {} try: for display_name, id in subscriptions.items(): clients.update( { display_name: service( - credential=credentials, + credential=session, subscription_id=id, base_url=region_config.base_url, credential_scopes=region_config.credential_scopes, diff --git a/prowler/providers/azure/services/defender/defender_client.py b/prowler/providers/azure/services/defender/defender_client.py index f2f50f1a8a..9135315775 100644 --- a/prowler/providers/azure/services/defender/defender_client.py +++ b/prowler/providers/azure/services/defender/defender_client.py @@ -1,4 +1,4 @@ -from prowler.providers.azure.lib.audit_info.audit_info import azure_audit_info from prowler.providers.azure.services.defender.defender_service import Defender +from prowler.providers.common.common import global_provider -defender_client = Defender(azure_audit_info) +defender_client = Defender(global_provider) diff --git a/prowler/providers/azure/services/iam/iam_client.py b/prowler/providers/azure/services/iam/iam_client.py index b4d7fd42ae..1ef0fd36b7 100644 --- a/prowler/providers/azure/services/iam/iam_client.py +++ b/prowler/providers/azure/services/iam/iam_client.py @@ -1,4 +1,4 @@ -from prowler.providers.azure.lib.audit_info.audit_info import azure_audit_info from prowler.providers.azure.services.iam.iam_service import IAM +from prowler.providers.common.common import global_provider -iam_client = IAM(azure_audit_info) +iam_client = IAM(global_provider) diff --git a/prowler/providers/azure/services/sqlserver/sqlserver_client.py b/prowler/providers/azure/services/sqlserver/sqlserver_client.py index a96e570db6..9132cf97ab 100644 --- a/prowler/providers/azure/services/sqlserver/sqlserver_client.py +++ b/prowler/providers/azure/services/sqlserver/sqlserver_client.py @@ -1,4 +1,4 @@ -from prowler.providers.azure.lib.audit_info.audit_info import azure_audit_info from prowler.providers.azure.services.sqlserver.sqlserver_service import SQLServer +from prowler.providers.common.common import global_provider -sqlserver_client = SQLServer(azure_audit_info) +sqlserver_client = SQLServer(global_provider) diff --git a/prowler/providers/azure/services/storage/storage_client.py b/prowler/providers/azure/services/storage/storage_client.py index 2a18d2cab2..58651e2cb3 100644 --- a/prowler/providers/azure/services/storage/storage_client.py +++ b/prowler/providers/azure/services/storage/storage_client.py @@ -1,4 +1,4 @@ -from prowler.providers.azure.lib.audit_info.audit_info import azure_audit_info from prowler.providers.azure.services.storage.storage_service import Storage +from prowler.providers.common.common import global_provider -storage_client = Storage(azure_audit_info) +storage_client = Storage(global_provider) diff --git a/prowler/providers/common/audit_info.py b/prowler/providers/common/audit_info.py index ec8b302b5c..a8ba2a900c 100644 --- a/prowler/providers/common/audit_info.py +++ b/prowler/providers/common/audit_info.py @@ -28,7 +28,7 @@ from prowler.providers.azure.azure_provider import Azure_Provider from prowler.providers.azure.lib.audit_info.audit_info import azure_audit_info from prowler.providers.azure.lib.audit_info.models import ( Azure_Audit_Info, - Azure_Region_Config, + AzureRegionConfig, ) from prowler.providers.azure.lib.exception.exception import AzureException from prowler.providers.gcp.gcp_provider import GCP_Provider @@ -67,7 +67,7 @@ GCP Account: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} GCP Project IDs: {Fore.Y report = f""" This report is being generated using the identity below: -Azure Tenant IDs: {Fore.YELLOW}[{" ".join(audit_info.identity.tenant_ids)}]{Style.RESET_ALL} Azure Tenant Domain: {Fore.YELLOW}[{audit_info.identity.domain}]{Style.RESET_ALL} Azure Region: {Fore.YELLOW}[{audit_info.azure_region_config.name}]{Style.RESET_ALL} +Azure Tenant IDs: {Fore.YELLOW}[{" ".join(audit_info.identity.tenant_ids)}]{Style.RESET_ALL} Azure Tenant Domain: {Fore.YELLOW}[{audit_info.identity.domain}]{Style.RESET_ALL} Azure Region: {Fore.YELLOW}[{audit_info.AzureRegionConfig.name}]{Style.RESET_ALL} Azure Subscriptions: {Fore.YELLOW}{printed_subscriptions}{Style.RESET_ALL} Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RESET_ALL} Azure Identity ID: {Fore.YELLOW}[{audit_info.identity.identity_id}]{Style.RESET_ALL} """ @@ -316,15 +316,17 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE azure_audit_info.credentials = azure_provider.get_credentials() azure_audit_info.identity = azure_provider.get_identity() region_config = azure_provider.get_region_config() - azure_audit_info.azure_region_config = Azure_Region_Config( + azure_audit_info.AzureRegionConfig = AzureRegionConfig( name=region, authority=region_config["authority"], base_url=region_config["base_url"], credential_scopes=region_config["credential_scopes"], ) - if not arguments.get("only_logs"): - self.print_azure_credentials(azure_audit_info) + # TODO - remove it + # this logic is being processed in general provider + # if not arguments.get("only_logs"): + # self.print_azure_credentials(azure_audit_info) return azure_audit_info @@ -349,8 +351,10 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE gcp_audit_info.project_ids, ) = gcp_provider.get_credentials() - if not arguments.get("only_logs"): - self.print_gcp_credentials(gcp_audit_info) + # TODO - remove it + # this logic is being processed in general provider + # if not arguments.get("only_logs"): + # self.print_gcp_credentials(gcp_audit_info) return gcp_audit_info diff --git a/prowler/providers/common/common.py b/prowler/providers/common/common.py index a1a000c8d5..d553de05d0 100644 --- a/prowler/providers/common/common.py +++ b/prowler/providers/common/common.py @@ -1,7 +1,20 @@ from importlib import import_module +from typing import Any providers_prowler_lib_path = "prowler.providers" +# SHARED PROVIDER OBJECT ACROSS ALL PROWLER CODE +global_provider = None + + +def set_provider(provider, arguments) -> Any: + provider_class_name = f"{provider.capitalize()}Provider" + import_module_path = f"prowler.providers.{provider}.azure_provider_testing" + provider_instance = getattr(import_module(import_module_path), provider_class_name)( + arguments + ) + return provider_instance + def get_available_providers() -> list[str]: """get_available_providers returns a list of the available providers""" @@ -12,3 +25,15 @@ def get_available_providers() -> list[str]: if not (provider.startswith("__") or provider.startswith("common")) ] return providers + + +def set_global_provider_object(arguments): + global global_provider + # make here dynamic import + common_import_path = ( + f"prowler.providers.{arguments.provider}.{arguments.provider}_provider_new" + ) + provider_class = f"{arguments.provider.capitalize()}Provider" + global_provider = getattr(import_module(common_import_path), provider_class)( + arguments + ) diff --git a/prowler/providers/common/provider.py b/prowler/providers/common/provider.py new file mode 100644 index 0000000000..fd6eec8c4f --- /dev/null +++ b/prowler/providers/common/provider.py @@ -0,0 +1,14 @@ +from abc import ABC, abstractmethod + + +class CloudProvider(ABC): + @abstractmethod + def setup_session(self): + pass + + @abstractmethod + def print_credentials(self): + pass + + def validate_arguments(self): + pass diff --git a/prowler/providers/gcp/gcp_provider_new.py b/prowler/providers/gcp/gcp_provider_new.py new file mode 100644 index 0000000000..9cbd38ae25 --- /dev/null +++ b/prowler/providers/gcp/gcp_provider_new.py @@ -0,0 +1,119 @@ +import os +import sys +from typing import Any, Optional + +from colorama import Fore, Style +from google import auth +from google.oauth2.credentials import Credentials +from googleapiclient import discovery + +from prowler.lib.logger import logger +from prowler.providers.common.provider import CloudProvider + + +class GcpProvider(CloudProvider): + session: Credentials + default_project_id: str + project_ids: list + audit_resources: Optional[Any] + audit_metadata: Optional[Any] + audit_config: Optional[dict] + + def __init__(self, arguments): + logger.info("Instantiating GCP Provider ...") + input_project_ids = arguments.project_ids + credentials_file = arguments.credentials_file + + self.session, self.default_project_id = self.setup_session(credentials_file) + if not self.default_project_id: + logger.critical("No Project ID associated to Google Credentials.") + sys.exit(1) + + self.project_ids = [] + accessible_projects = self.get_project_ids() + if not accessible_projects: + logger.critical("No Project IDs can be accessed via Google Credentials.") + sys.exit(1) + + if input_project_ids: + for input_project in input_project_ids: + if input_project in accessible_projects: + self.project_ids.append(input_project) + else: + logger.critical( + f"Project {input_project} cannot be accessed via Google Credentials." + ) + sys.exit(1) + else: + # If not projects were input, all accessible projects are scanned by default + self.project_ids = accessible_projects + + if not arguments.only_logs: + self.print_credentials() + + def setup_session(self, credentials_file): + try: + if credentials_file: + self.__set_gcp_creds_env_var__(credentials_file) + + return auth.default( + scopes=["https://www.googleapis.com/auth/cloud-platform"] + ) + except Exception as error: + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + sys.exit(1) + + def __set_gcp_creds_env_var__(self, credentials_file): + logger.info( + "GCP provider: Setting GOOGLE_APPLICATION_CREDENTIALS environment variable..." + ) + client_secrets_path = os.path.abspath(credentials_file) + os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = client_secrets_path + + def print_credentials(self): + # Beautify audited profile, set "default" if there is no profile set + try: + getattr(self.session, "_service_account_email") + profile = ( + self.session._service_account_email + if self.session._service_account_email is not None + else "default" + ) + except AttributeError: + profile = "default" + + report = f""" +This report is being generated using credentials below: + +GCP Account: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} GCP Project IDs: {Fore.YELLOW}[{", ".join(self.project_ids)}]{Style.RESET_ALL} +""" + print(report) + + def get_project_ids(self): + try: + project_ids = [] + + service = discovery.build( + "cloudresourcemanager", "v1", credentials=self.session + ) + + request = service.projects().list() + + while request is not None: + response = request.execute() + + for project in response.get("projects", []): + project_ids.append(project["projectId"]) + + request = service.projects().list_next( + previous_request=request, previous_response=response + ) + + return project_ids + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + return [] diff --git a/prowler/providers/gcp/lib/service/service.py b/prowler/providers/gcp/lib/service/service.py index 30bcc5a2e7..c76691b617 100644 --- a/prowler/providers/gcp/lib/service/service.py +++ b/prowler/providers/gcp/lib/service/service.py @@ -8,29 +8,27 @@ from googleapiclient import discovery from googleapiclient.discovery import Resource from prowler.lib.logger import logger -from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info +from prowler.providers.gcp.gcp_provider_new import GcpProvider class GCPService: def __init__( self, service: str, - audit_info: GCP_Audit_Info, + provider: GcpProvider, region="global", api_version="v1", ): # We receive the service using __class__.__name__ or the service name in lowercase # e.g.: APIKeys --> we need a lowercase string, so service.lower() self.service = service.lower() if not service.islower() else service - self.credentials = audit_info.credentials + self.credentials = provider.session self.api_version = api_version - self.default_project_id = audit_info.default_project_id + self.default_project_id = provider.default_project_id self.region = region - self.client = self.__generate_client__( - service, api_version, audit_info.credentials - ) + self.client = self.__generate_client__(service, api_version, self.credentials) # Only project ids that have their API enabled will be scanned - self.project_ids = self.__is_api_active__(audit_info.project_ids) + self.project_ids = self.__is_api_active__(provider.project_ids) def __get_client__(self): return self.client diff --git a/prowler/providers/gcp/services/apikeys/apikeys_client.py b/prowler/providers/gcp/services/apikeys/apikeys_client.py index e8b8913dc8..92de567c3a 100644 --- a/prowler/providers/gcp/services/apikeys/apikeys_client.py +++ b/prowler/providers/gcp/services/apikeys/apikeys_client.py @@ -1,4 +1,4 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.apikeys.apikeys_service import APIKeys -apikeys_client = APIKeys(gcp_audit_info) +apikeys_client = APIKeys(global_provider) diff --git a/prowler/providers/gcp/services/bigquery/bigquery_client.py b/prowler/providers/gcp/services/bigquery/bigquery_client.py index fca7c124bb..ea788aba97 100644 --- a/prowler/providers/gcp/services/bigquery/bigquery_client.py +++ b/prowler/providers/gcp/services/bigquery/bigquery_client.py @@ -1,4 +1,4 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.bigquery.bigquery_service import BigQuery -bigquery_client = BigQuery(gcp_audit_info) +bigquery_client = BigQuery(global_provider) diff --git a/prowler/providers/gcp/services/cloudresourcemanager/cloudresourcemanager_client.py b/prowler/providers/gcp/services/cloudresourcemanager/cloudresourcemanager_client.py index aaf574fcfa..a9aa26b509 100644 --- a/prowler/providers/gcp/services/cloudresourcemanager/cloudresourcemanager_client.py +++ b/prowler/providers/gcp/services/cloudresourcemanager/cloudresourcemanager_client.py @@ -1,6 +1,6 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.cloudresourcemanager.cloudresourcemanager_service import ( CloudResourceManager, ) -cloudresourcemanager_client = CloudResourceManager(gcp_audit_info) +cloudresourcemanager_client = CloudResourceManager(global_provider) diff --git a/prowler/providers/gcp/services/cloudsql/cloudsql_client.py b/prowler/providers/gcp/services/cloudsql/cloudsql_client.py index fa51c28fac..96169972f7 100644 --- a/prowler/providers/gcp/services/cloudsql/cloudsql_client.py +++ b/prowler/providers/gcp/services/cloudsql/cloudsql_client.py @@ -1,4 +1,4 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.cloudsql.cloudsql_service import CloudSQL -cloudsql_client = CloudSQL(gcp_audit_info) +cloudsql_client = CloudSQL(global_provider) diff --git a/prowler/providers/gcp/services/cloudstorage/cloudstorage_client.py b/prowler/providers/gcp/services/cloudstorage/cloudstorage_client.py index aca1c82bb9..7cfa837dc9 100644 --- a/prowler/providers/gcp/services/cloudstorage/cloudstorage_client.py +++ b/prowler/providers/gcp/services/cloudstorage/cloudstorage_client.py @@ -1,6 +1,6 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import ( CloudStorage, ) -cloudstorage_client = CloudStorage(gcp_audit_info) +cloudstorage_client = CloudStorage(global_provider) diff --git a/prowler/providers/gcp/services/compute/compute_client.py b/prowler/providers/gcp/services/compute/compute_client.py index 93354f39f9..3fc985d1de 100644 --- a/prowler/providers/gcp/services/compute/compute_client.py +++ b/prowler/providers/gcp/services/compute/compute_client.py @@ -1,4 +1,4 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.compute.compute_service import Compute -compute_client = Compute(gcp_audit_info) +compute_client = Compute(global_provider) diff --git a/prowler/providers/gcp/services/dataproc/dataproc_client.py b/prowler/providers/gcp/services/dataproc/dataproc_client.py index 8f17a630ef..848314f533 100644 --- a/prowler/providers/gcp/services/dataproc/dataproc_client.py +++ b/prowler/providers/gcp/services/dataproc/dataproc_client.py @@ -1,4 +1,4 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.dataproc.dataproc_service import Dataproc -dataproc_client = Dataproc(gcp_audit_info) +dataproc_client = Dataproc(global_provider) diff --git a/prowler/providers/gcp/services/dns/dns_client.py b/prowler/providers/gcp/services/dns/dns_client.py index 0c3a3f4b30..6d290ed84c 100644 --- a/prowler/providers/gcp/services/dns/dns_client.py +++ b/prowler/providers/gcp/services/dns/dns_client.py @@ -1,4 +1,4 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.dns.dns_service import DNS -dns_client = DNS(gcp_audit_info) +dns_client = DNS(global_provider) diff --git a/prowler/providers/gcp/services/iam/accessapproval_client.py b/prowler/providers/gcp/services/iam/accessapproval_client.py index f4f201a96e..8e4c0944cf 100644 --- a/prowler/providers/gcp/services/iam/accessapproval_client.py +++ b/prowler/providers/gcp/services/iam/accessapproval_client.py @@ -1,4 +1,4 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.iam.iam_service import AccessApproval -accessapproval_client = AccessApproval(gcp_audit_info) +accessapproval_client = AccessApproval(global_provider) diff --git a/prowler/providers/gcp/services/iam/essentialcontacts_client.py b/prowler/providers/gcp/services/iam/essentialcontacts_client.py index 9bb82ed83a..23add3e86c 100644 --- a/prowler/providers/gcp/services/iam/essentialcontacts_client.py +++ b/prowler/providers/gcp/services/iam/essentialcontacts_client.py @@ -1,4 +1,4 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.iam.iam_service import EssentialContacts -essentialcontacts_client = EssentialContacts(gcp_audit_info) +essentialcontacts_client = EssentialContacts(global_provider) diff --git a/prowler/providers/gcp/services/iam/iam_client.py b/prowler/providers/gcp/services/iam/iam_client.py index 0752df0d1a..fd36d5bade 100644 --- a/prowler/providers/gcp/services/iam/iam_client.py +++ b/prowler/providers/gcp/services/iam/iam_client.py @@ -1,4 +1,4 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.iam.iam_service import IAM -iam_client = IAM(gcp_audit_info) +iam_client = IAM(global_provider) diff --git a/prowler/providers/gcp/services/kms/kms_client.py b/prowler/providers/gcp/services/kms/kms_client.py index cd51f34486..0bc861cbdb 100644 --- a/prowler/providers/gcp/services/kms/kms_client.py +++ b/prowler/providers/gcp/services/kms/kms_client.py @@ -1,4 +1,4 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.kms.kms_service import KMS -kms_client = KMS(gcp_audit_info) +kms_client = KMS(global_provider) diff --git a/prowler/providers/gcp/services/logging/logging_client.py b/prowler/providers/gcp/services/logging/logging_client.py index 2eb45eecb9..83dbdf34aa 100644 --- a/prowler/providers/gcp/services/logging/logging_client.py +++ b/prowler/providers/gcp/services/logging/logging_client.py @@ -1,4 +1,4 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.logging.logging_service import Logging -logging_client = Logging(gcp_audit_info) +logging_client = Logging(global_provider) diff --git a/prowler/providers/gcp/services/monitoring/monitoring_client.py b/prowler/providers/gcp/services/monitoring/monitoring_client.py index e20cd09e19..d30491226a 100644 --- a/prowler/providers/gcp/services/monitoring/monitoring_client.py +++ b/prowler/providers/gcp/services/monitoring/monitoring_client.py @@ -1,4 +1,4 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.monitoring.monitoring_service import Monitoring -monitoring_client = Monitoring(gcp_audit_info) +monitoring_client = Monitoring(global_provider) diff --git a/prowler/providers/gcp/services/serviceusage/serviceusage_client.py b/prowler/providers/gcp/services/serviceusage/serviceusage_client.py index 363dcdf91d..cf146dd336 100644 --- a/prowler/providers/gcp/services/serviceusage/serviceusage_client.py +++ b/prowler/providers/gcp/services/serviceusage/serviceusage_client.py @@ -1,6 +1,6 @@ -from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info +from prowler.providers.common.common import global_provider from prowler.providers.gcp.services.serviceusage.serviceusage_service import ( ServiceUsage, ) -serviceusage_client = ServiceUsage(gcp_audit_info) +serviceusage_client = ServiceUsage(global_provider) diff --git a/tests/lib/outputs/slack_test.py b/tests/lib/outputs/slack_test.py index 02e572ea6f..3a93f5caf3 100644 --- a/tests/lib/outputs/slack_test.py +++ b/tests/lib/outputs/slack_test.py @@ -10,8 +10,8 @@ from prowler.lib.outputs.slack import ( from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info from prowler.providers.azure.lib.audit_info.models import ( Azure_Audit_Info, - Azure_Identity_Info, - Azure_Region_Config, + AzureIdentityInfo, + AzureRegionConfig, ) from prowler.providers.common.models import Audit_Metadata from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info @@ -64,7 +64,7 @@ class Test_Slack_Integration: ) azure_audit_info = Azure_Audit_Info( credentials=None, - identity=Azure_Identity_Info( + identity=AzureIdentityInfo( identity_id="", identity_type="", tenant_ids=[], @@ -77,7 +77,7 @@ class Test_Slack_Integration: audit_resources=None, audit_metadata=None, audit_config=None, - azure_region_config=Azure_Region_Config(), + AzureRegionConfig=AzureRegionConfig(), ) assert create_message_identity("aws", aws_audit_info) == ( f"AWS Account *{aws_audit_info.audited_account}*", diff --git a/tests/providers/common/audit_info_test.py b/tests/providers/common/audit_info_test.py index 2f09c986a8..14a9256882 100644 --- a/tests/providers/common/audit_info_test.py +++ b/tests/providers/common/audit_info_test.py @@ -10,8 +10,8 @@ from prowler.providers.aws.lib.audit_info.models import AWS_Assume_Role, AWS_Aud from prowler.providers.azure.azure_provider import Azure_Provider from prowler.providers.azure.lib.audit_info.models import ( Azure_Audit_Info, - Azure_Identity_Info, - Azure_Region_Config, + AzureIdentityInfo, + AzureRegionConfig, ) from prowler.providers.azure.lib.exception.exception import AzureException from prowler.providers.common.audit_info import ( @@ -29,11 +29,11 @@ AWS_ACCOUNT_NUMBER = "123456789012" mock_azure_audit_info = Azure_Audit_Info( credentials=None, - identity=Azure_Identity_Info(), + identity=AzureIdentityInfo(), audit_metadata=None, audit_resources=None, audit_config=None, - azure_region_config=Azure_Region_Config(), + azure_region_config=AzureRegionConfig(), ) mock_set_audit_info = Audit_Info() @@ -78,7 +78,7 @@ def mock_print_audit_credentials(*_): def mock_set_identity_info(*_): - return Azure_Identity_Info() + return AzureIdentityInfo() def mock_set_azure_credentials(*_): diff --git a/tests/providers/common/common_outputs_test.py b/tests/providers/common/common_outputs_test.py index 5acbddc1f8..311f0e839f 100644 --- a/tests/providers/common/common_outputs_test.py +++ b/tests/providers/common/common_outputs_test.py @@ -8,8 +8,8 @@ from prowler.lib.outputs.html import get_assessment_summary from prowler.providers.aws.lib.audit_info.audit_info import AWS_Audit_Info from prowler.providers.azure.lib.audit_info.audit_info import ( Azure_Audit_Info, - Azure_Identity_Info, - Azure_Region_Config, + AzureIdentityInfo, + AzureRegionConfig, ) from prowler.providers.common.models import Audit_Metadata from prowler.providers.common.outputs import ( @@ -31,11 +31,11 @@ class Test_Common_Output_Options: def set_mocked_azure_audit_info(self): audit_info = Azure_Audit_Info( credentials=None, - identity=Azure_Identity_Info(), + identity=AzureIdentityInfo(), audit_metadata=None, audit_resources=None, audit_config=None, - azure_region_config=Azure_Region_Config(), + azure_region_config=AzureRegionConfig(), ) return audit_info From 93a2431211069860983b0a2a51cdd926d7b80c00 Mon Sep 17 00:00:00 2001 From: Sergio Garcia <38561120+sergargar@users.noreply.github.com> Date: Wed, 13 Dec 2023 17:31:39 +0100 Subject: [PATCH 05/10] feat(compliance): execute all compliance by default (#3003) Co-authored-by: Pepe Fagoaga --- docs/tutorials/compliance.md | 31 +- docs/tutorials/img/compliance.png | Bin 0 -> 95639 bytes prowler/__main__.py | 19 +- prowler/config/config.py | 7 +- prowler/lib/outputs/compliance.py | 641 ------------------ prowler/lib/outputs/compliance/__init__.py | 0 .../aws_well_architected_framework.py | 55 ++ prowler/lib/outputs/compliance/cis.py | 36 + prowler/lib/outputs/compliance/cis_aws.py | 34 + prowler/lib/outputs/compliance/cis_gcp.py | 35 + prowler/lib/outputs/compliance/compliance.py | 472 +++++++++++++ .../lib/outputs/compliance/ens_rd2022_aws.py | 45 ++ prowler/lib/outputs/compliance/generic.py | 51 ++ .../outputs/compliance/iso27001_2013_aws.py | 53 ++ .../outputs/compliance/mitre_attack_aws.py | 66 ++ prowler/lib/outputs/csv.py | 10 + prowler/lib/outputs/file_descriptors.py | 45 +- prowler/lib/outputs/models.py | 11 +- prowler/lib/outputs/outputs.py | 37 +- prowler/providers/common/outputs.py | 3 + .../lib/outputs/compliance/compliance_test.py | 111 +++ 21 files changed, 1073 insertions(+), 689 deletions(-) create mode 100644 docs/tutorials/img/compliance.png delete mode 100644 prowler/lib/outputs/compliance.py create mode 100644 prowler/lib/outputs/compliance/__init__.py create mode 100644 prowler/lib/outputs/compliance/aws_well_architected_framework.py create mode 100644 prowler/lib/outputs/compliance/cis.py create mode 100644 prowler/lib/outputs/compliance/cis_aws.py create mode 100644 prowler/lib/outputs/compliance/cis_gcp.py create mode 100644 prowler/lib/outputs/compliance/compliance.py create mode 100644 prowler/lib/outputs/compliance/ens_rd2022_aws.py create mode 100644 prowler/lib/outputs/compliance/generic.py create mode 100644 prowler/lib/outputs/compliance/iso27001_2013_aws.py create mode 100644 prowler/lib/outputs/compliance/mitre_attack_aws.py create mode 100644 prowler/lib/outputs/csv.py create mode 100644 tests/lib/outputs/compliance/compliance_test.py diff --git a/docs/tutorials/compliance.md b/docs/tutorials/compliance.md index a85037a0dc..6d39e96e84 100644 --- a/docs/tutorials/compliance.md +++ b/docs/tutorials/compliance.md @@ -1,5 +1,18 @@ # Compliance -Prowler allows you to execute checks based on requirements defined in compliance frameworks. +Prowler allows you to execute checks based on requirements defined in compliance frameworks. By default, it will execute and give you an overview of the status of each compliance framework: + + + +> You can find CSVs containing detailed compliance results inside the compliance folder within Prowler's output folder. + +## Execute Prowler based on Compliance Frameworks +Prowler can analyze your environment based on a specific compliance framework and get more details, to do it, you can use option `--compliance`: +```sh +prowler --compliance +``` +Standard results will be shown and additionally the framework information as the sample below for CIS AWS 1.5. For details a CSV file has been generated as well. + + ## List Available Compliance Frameworks In order to see which compliance frameworks are cover by Prowler, you can use option `--list-compliance`: @@ -10,9 +23,12 @@ Currently, the available frameworks are: - `cis_1.4_aws` - `cis_1.5_aws` +- `cis_2.0_aws` +- `cisa_aws` - `ens_rd2022_aws` - `aws_audit_manager_control_tower_guardrails_aws` - `aws_foundational_security_best_practices_aws` +- `aws_well_architected_framework_reliability_pillar_aws` - `aws_well_architected_framework_security_pillar_aws` - `cisa_aws` - `fedramp_low_revision_4_aws` @@ -22,6 +38,9 @@ Currently, the available frameworks are: - `gxp_eu_annex_11_aws` - `gxp_21_cfr_part_11_aws` - `hipaa_aws` +- `iso27001_2013_aws` +- `iso27001_2013_aws` +- `mitre_attack_aws` - `nist_800_53_revision_4_aws` - `nist_800_53_revision_5_aws` - `nist_800_171_revision_2_aws` @@ -38,7 +57,6 @@ prowler --list-compliance-requirements ``` Example for the first requirements of CIS 1.5 for AWS: - ``` Listing CIS 1.5 AWS Compliance Requirements: @@ -71,15 +89,6 @@ Requirement Id: 1.5 ``` -## Execute Prowler based on Compliance Frameworks -As we mentioned, Prowler can be execute to analyse you environment based on a specific compliance framework, to do it, you can use option `--compliance`: -```sh -prowler --compliance -``` -Standard results will be shown and additionally the framework information as the sample below for CIS AWS 1.5. For details a CSV file has been generated as well. - - - ## Create and contribute adding other Security Frameworks This information is part of the Developer Guide and can be found here: https://docs.prowler.cloud/en/latest/tutorials/developer-guide/. diff --git a/docs/tutorials/img/compliance.png b/docs/tutorials/img/compliance.png new file mode 100644 index 0000000000000000000000000000000000000000..1492089c86d26394aad475a1a8c77252a4c5ea32 GIT binary patch literal 95639 zcmdq}Wpo_7vH%L35;J4W3}a@7#B3+FV|&aHbIi;fGsoK>sTrj3>wx z0_LwWa^UBm-%s%U2j<^Ts7$E;lz?f@g#J$&s`^jS)F_E#@B`jPO5Gj;0*C6)4JoZi zc@AzSqN%c)gW8wRd<#QpZ5&Lk ztw{gi)zi0jbPyyb|5MT5zkj#W!PNMFYO=Ebi!3mK%zwTxvof(T|6gE6E~fu~!2W#s zH`u@E`ge5#e~|Gh8rfT0IR0rB(8|<7h)v*Mb^J5?e;O(92N$1$sf&??nwY7jk(E6d zREU+06%6@*e)>N_)&B>Si}(Kr@_&8#3sQjjPj~&V?)&%H{3{ncI6?>l%zqEG5JKx~ z3Ooda2!yoQM`aht<22Y*Eb-euXJ;+TmIXOvJVCQFVs(O9wGRZIXif`3k(JWgl zD%`KNT#~pTg~a%;%Na(0S)45rM#|@(7ZQ{*{sQWM=L=wDPVS>4GyjBu|Hq|ZYDn^5 z6=0l8`N-r*ax{xTQU3D+MSK2NwNmXAkbnr{iF2rbqW=jIFLC>SkProv&*3K4h4^3S z{}2HC|49OZZ(OJhaVMvyP>BWbDvzOuUD|yh6Ls7QcrTtGu7n@9G3~>b&Kc{LF^Gs> zR-D&Lv>GN$LFyAp}Ur=cSlm92ze_lmuf>oLTKFvVyJXIF2s|LGL^K3 z5=OO~2>D{-;Q4&R!!JFbvi;I68I9H6G?c?w%7o-oFnm^7jfe4q1@D&agHVVr>2z^nI`cSmYi+MbwfFAYWc9o;`?ST%R<4u^POPZ**n3GlY* zwpDhE<)tRX#aS+3a2o_}mDI&7@2+#P`An{Pzu9c{?pWE2Zu>${BKS7U;SOD*oEuCCVwd5yF!H!5CX zI1+s}?#qs=>LBs$&wpPvxd%LihY%=ILLPy&wjrK0A1TDQH?{UBnQPq84a=fgVf6$*Fx z(}|qxgFHli{nY(o?&Y7U@kZAkZ76BnN%As>mN}ufld)v>J=pII`tYMBp;0N{ZZ&Jt zspU7bL?gqP<@|hvsNi7l=yb|St;R`nm<69KF6-7*iYUo#Kc~6zaMmn8<43z>v^k{* zp-P&VNP53{ZdRMlAWWJ@W8qHTdC;z|3enL)%7b9fos}-953ULwGJNgnQJD2n!MxTR zLC^ws9`j42&Fkgy_9P51A43^`#PEF2(yqzrWcaHB5x>L!`CZ#vOC)VX+uJLD@4jN! zl)UhYfD^aslRR;t&da<(RLEkP!a5c=ZjiRA#nkVSSa~7%fheIzI1)JJm}Hnw(I#IG zF^j9cUygJ^b40v$hI^xF{WuA|$%bB!2XaVT06SX2+xZw>FWZQU+fzp=0S*!Qhr1JV zBuHCMrqTO=Sym$%FAEFK5#&YLy-~U6D_K*d%5Oet&T=id~UP z&bg)nG&mSPJ8{l`w*A9i+%EA0>|M6` zTnzsyEiS0S@knt16PjO^O0!O7rQMN=y^4dg1|eBoNRX(dbK-YIO$-j}P{FHUjQ^@8 zR`w(|D3mX5OIEDT$|x^Q<*}{}d)ZcUu+j)96A5x$mDjAP#Hj7({{C$_4&szw(e*%f z9FLtXJe!S8#{CAMUXfk66i$3ne9sjY27Kw*6GT{F5QD@!%Av=<$)}|!%W4)4vTJ6t zYM_EIloD?yu~fb5pN!>qVsL%fd3F&U3hu4L_U4p$`NKq$!GJ>Ts~lwg`+j|B)gg3YFW zdyHrPI4Y%YI4l&q<{S8!Cr)1Qb~~G>TylQd{js;qJ@;}N)de8vW1`eHcBEP+9)@F< zddwAnS7a5-!d3|fb8j+PCL#BW?cI9`N;(FHRDS2v&7QC<;N968iA=5TgTJ!N)bB54 zZLf`3wu5weYS-lQ;%7^B);*!v^^Y{H>_X@5(4XFij!c4Ny-9; zwlDnNBV+im8xOl2fD+B*ph>7V0KYGJ&zki?9OM|Y5aB2I+zDCfF@*>Yr^oo_rRoOH z;z2b4u2}{4b@$qDACHUGD;PE6Zq9VwhFS4aR3c=GCf$)AG#d?7R8+XEb!N)d;>)cw zQluO?pCQk-M{d+x1*rM?xtK~B*x4Ip6BX4=S$=q&HZ;e|6&h2&Y=$ixk03=)b(DiCazUmLa#gBw%VPEyE zTiXV0tn=Fc-jIln!#QFr+#nb^aP_* zd{VZWuWWgHed#;a4cr}KZWFt|9AZ{wMY@-TaMF-wGZ`iH zo@$J5JO7j@VY?lCkw&1xP&J<=x-*g@2?8~ISPxZea$*Y5+Zo*dnj^C<%Nw}Jy<%AZ zb>QOlsTX3im!O4r7dWW$v{p6=9L&y6TC#S2fXeUZ!#kH-)zZWJ+$|R@H5~YOza>!r zsz2^D1AJ%Qyn$bx=3y88Jshl#BZyMm)_snOo`XnwyZl`iO^Ii`>^~-yUrL`&_B`A! zHPkm+X&;}|HKqO($9veykeUA-ag`H=G*HYCl-QJMALXEd)i?`kE2rid~@1j3r- zERNDUE#0CZWVZkXlquY!0N3_dNJ~wBi#y_})t)ql72B$>=pb ze83T`?@#&&RUKE*&mSq-lXz5M!CN}5oUbw&`QX|>h!DVfv!7;}cHVH*Q3 zv!c8^-F}p}pRiZWqw3+Xs0@cu!*C3TmE|JM?Xch+H{diPwF9A3VUMA+9|!qtv4$Q; z9n!l##ryf9&zDhby2;skVA*R9?QN9*EI&I4mBefMtK>#Cq`+4|B$;SnCbdj^r*2S; zu4}sY>jN!HzW2ipE|3-~Ne$7DSSvb_S=TBOwl|Ea5l(7qa`N)(3YCxtg5vP{U}}Wz zHPG|c(2(|y2(lxf-83&P@Qgum_Hd>MdX`KwoW-G`^HU$HaCe%wm!6%S>)t4cfU7?J zj);FVnVUJ9`A#um2UkuX1KK;6ebi>e@@dSw@b!@X0ay3h9cfSXpfFPk-i79@09$Qo zH_e%FPRn5bw#5x-dcW1zWYygr0>sZT{Lv7m^t;|cN`7Istqr`Q)-7MGEY_`FnG4>#0zp-MCe@I|qT*Gn)3FR4=xlDABUh#5a8 z?sAx2+Pd{u(0KqZj(VJgJ!f;BwzPh-kz&k+D`G1~EH&_{I?J#bHd1J@hfK|yg^o!; zP(3!r8H!K!;^Se}Sjy3u zI}{QtJQi3aT5Qk+6V5AM8qKIq(YFXOnxA-LuS{*@FOwBsuOaH2f?jy zb{15`XfPM|^2~bNESi_aPrW?dX~t1hsqJ@W0r9i)zm+IY3vqL+_W52l9+mo;=bJH3 zlhv2&<9Ut=-q8}F@&UE9mIr7u81(V>4ca5{)DuBqRd^oQvYTwaeYjwf5y5OS?u1tM zOrtl`fgc|wa0y4q095ySr_k6V5xVf*34|_AYb0`MJyVE1O*3t_*&R|SUhluq^Bro| z*;i$%-qrwJBJJt=;HvHIWo(t5seT=5pWu6PA(tI7j}7T`7FZ8!iGO;*6ZUF0<6-Nz zcb}A|5qvtXy4oF4(ocrmKO_(7>z#~ZG%t9&HHCMc(m;dgw z4i8+&=J+8X(^u48%&`i#nRlDuUllGtdd-*X+4OPNeG)F7ij7WLcDApjRZja9kE~{D zv4Y=KY}afl^tNpK`XG^jtF8RJpHsT$d9|Aa37vhOg{ymUadDq&A9~h2r6yt27ny{} z#9|RLlALAEyMgLw(?iMqO!Cfq+Gt3N$)?veGe{Q%t07eax{_O!tpO;z8DPz12Mi5| zp%Vzyqe0uV;^bY#a49m)jNr69nk`Ss6U;WSkJ74et(KG3tT_X#jdeRI)#@>xcbEgh za6mdXA6Z|S+QVGtN=4!%+S0!i4^slu*z+{@{ITomc@WleHEJC^FBKRoVVbM8DcfGS zB$o+s810swG)eI(F5w>ukX!?+Xr6Knv+&Okl;o*Woixx}St1D!ELN8r-0QYFU=hzv zpgsWDzqS9;#3qWnL*WVaM`ZT{X*KRl~>HlHV7W zB=O{EeWG_os>q~Wwdq0*o*;yLQnc5i z`L$oYqL`Xmh)!3`U)FpXs{TIb`+O+n@BIA!Qv9R zWZiNzJCedZWJWKuY679b%ApgS<)Ee#KWicn$Ir4`Mox^Hat+CWP^&-{^1iT~nyT$m z`PUN?60v|9hlGn7B-~=~ss9(|@h~VIBF5escXM`tP! z(2@_b!UhtAuSo-1!3L29b{d*3_`Xb7Q{nR8@TFk1ffY&XOnS>xtlTCKQc7kDksyMvKbPLmHTp65! zss|;SO?+de+MI6t&w%Ns@>figvAo^8 zev<>R-iN$)bSuPyp-ui^nZKd+OCCtAx07%eocn1nA>_2%;G#g(Ft5R*koX4^+b>HO~a@u)0u+gLh{g<+A0G=yfaW=`iS&TykMKy7aOsH}|(F>`T0Y}`@L zr&j9+UW~xweh&HDxF~8z9BSAHp@TpUS}(*%9U{ThmGr zZPz@%Wzn)~*dNi1mE>+}U%IU5&Hf&!gd`n+?yg^ZKRNyk`9>m=0 z{Xnm|9{V<3Aj#t6!##0*Pyt!Sb)b5HeZu0k4tMvk*IUgubTK7w>%3~}kyd~hB!8I> zSg0wwnv~ThL7cuXHjtdFHya~f!M%x<-*pF&BV7oceT870EUO)xY%vT#~-OI2LD5fH~SHCxCNXo6W zjrak1Z|I<8s_q1`b~^y?KDbl>BGB{9lp{i1cL}?d12#D0$h&iB27nGha6%HZVj~3|Sh0KWx`(Y-Gw_Q&YS}5pF_?jE)%yjo z;;!O&QYe49i{Q7~bhc|iQgznK|2(VN(D{6~Ml=p7R_y6Mmnp!%1QJY+mDH~vtQQI*}a)=blt z3O_5?>3ZIGLLn4ZE|PYq`XLzbW%zGk`?S*fkbaVg&gMZ372_j%!!TCmdJ<^wEUvKen?te?u9XX3GafERp{3ys7{5X0-?#)3p{>n z{pM>f71T0h-7@2%Q;s zsxu%`(~6EzV7_2^6|KDxzAG?3)j54FEcp@GIE_~u_jGE1)j{x71&gSSd^JH?Cgb~- zUK|c!Rane$rQW$3(?0twrqU|3v01t#aV+6e5deVlZ8{q*OAWV(M3J5cZ?9#FKVsjg zHzpRR%FS`3|JpecpBw@eC{+-a-pDOH<&8Z>l0Towl6d^&_^!~ZCNq+9G;e+s8}nLq zU{inRBc5e*R*f5hmsVTkUwO$cXWc+$eS54H?l(i_4gK2M;h~6NDt{Xm(?;tt*fq7i zmqFA=Wb%yNE_;xmsb&7*{`0;#gm;YLq6aA4mi~h5a#pj8^?eb;^wQg_kZnhcWHRVT zIOTg`o98M4(Nc|d+skZgPb1r^S~u&{=rPUr20&gig-BX-+wMNDeX(Jh9IgNO6ic7t ztXucCG%H?zyJo@wyo*-eloMJ#*t+Z4g_ljjB@vm%j2-e%`m~W?vQI8r#8IejpVYtC zf;>na^1%&fpt)IqHbO=`x~Lu#1C4RQ4iL7P@3F1q%O@@}&0d9I(u!ZqqD8oY2#B3M zc3o^U(~Ogd7du!Dj((AyuI8pPfTv9_EYpOsL0J3h9WRmFIhdWK9|{rbV@*?B^9%dw zRbQO^{^fmnUuIj}F>-%J|Ll8GqGZ1=6C3^07z&C!AYKC{O zo*23KXK6P}7w)du0+B5F#@B=x#o21p0eBj`F8*Vu`>U~v`>B~MzbaYWL~YrJE)fVN z5*j%Ui@Bs4$3Z$!js&)>6{98}jU23@_+V)}&tEF9h*CLRuvW7h5adrV$EZQ%4%y3V*F8jaCtzGae5CgQUWEBg1 z5M2NA?k6Gpo#U+Uwd-1Re48iy6r0f@+oV1_#IqZphvK9+m%ZJfAw~;BvOW_lS;q}Z z#Xa6Zwf1EPz4>sG#F((R;drK<0REVus!I>HRqKaeh&RNyN65Reic;>CLR4p8Q0@

    @0=hi}`uM zQ5nDO6#>C0>N!*p{yc*>ytj`#5Dn8vKAb*Z^YIXRP95ead|&`vJHNc#PGQ>?-#1*k9!$PTa3S=hj3OA`T`4U1^ z6Xa>-3S$n<%Art&pUdx;JJmwRu=im%ap;SS{ zv}1pHA-Dr#QNJo?8SP=w{5~;rR*Q(UuwNI%x;_aEJNzaR+g$iCB~Mf2iNBG%*eck% zg%{W%N#ckc86S4bHy7gwqD)5Jc7gYyw(X@k%=Zt6nsOkS#~j`7J__nt??}m<%-Wd)_ba0z88Tv0 zkf5N6=UlCuu*lkyc-|l3a{!A!DPooh+NWp11*w>JW27mXPOP+Wh%mlFa?4$H=1D=Z zQ+UNE?@?jOCr-bpKozP6$Qg92_-3@Iv?H`wU$Nji%xS}WHLUm*pW7n1V?ZU*n(3jtz%mygj1ub0_ui$xJS{;R~nCy)TLJ8{~m)%UthHQqY zR)jZ^t=1HoPr%igQqWdf)xym>x~jH2;20-aow_B&bf{!oMP&X7lYLs*}XqVY(Zum@QFcWc6EVMeLYKRc`kV zGaAVmLcUorrnUO72k{c7MyORfb&xADns0kL4^SETEZXbfaqM?t3Y9{vkoADfoANW{ z%3zQ_Ouq1Ak0Cv6L^f&rEG#3mP!UbK3whwaO`2$c5iSG_6nF?tn?Cdp|6>2+i&nqK z6o+k=%HthF3wR+uc>to~DfUpme<$I9CqEEBQm0vGh4bMnn4!%==7ekbgJpuynlc1Q z8b$_nEO5s|zVdTuaP)rkdw)QAN9mU)ZUYa9@F2RN{OwZsE>l4IJ(Z4`(U^=N3tRLg z6B-0nhHExf#e#-$gg%JD{oOF@{3g`V<2N8n{W>}E&y8IxoN;*i%dGtYA$psQfYO1k zp~Sw=8IhcFMXv*V+$&8@(^2vC5g$>jvDBx%7C8(6X~iBC?mrB=^fP*lOknS!;AIYk zig4v1JulPEsAN|@xi{SD%rz#ed4H-M^oQoT4Y-P9`LTIq97g@w2J&qyYTQfQ(LL|@ zu4UJA!17~qG+6eQF)8IgCvcfWx@J`dB2ZH$Z=LA5FP*R@B~xjRt{dGvUs=t{*V0Tx z!QcN8xF|wf&k!lOTCW&1P(>~!T-}+8%(2SpKT1?JL0mjADbatyOhK*n>6AaU(+*;e9i~>r3MKCnX`)_k*CQKWpO`?< zty)d!E9{W(4EMHA`HrHC95mt8fvVhsS$C3wbTA_or9_FgX+>uQA+O>!^1|MDjMfzv zj_?(-4@fRfV(5?u!0i%CZlU8Pp*@L8d!`h&Nv33db5$V~>c>gGp={dVuLL44`f_5f zmf8U45fdH3F%pdt=!pkeLl}*r@N}Dm>jP9phx3UU699f;kek>`cQ0-D&b*QA5)o8Q z<6cmuh0iryGrV3i_xnq7oW(1R(n`5kSLVk+{RRv5cxR4UsKjlv6T;L^ObPNV6EN@- zP?4QIq8REvJx8~3I_vc9F!pXb!6f4gS`T;+Z?ZQ+`!!!FuW>p_lGwH?@x2P;Wt?VO zM434w-`g`e!x@a0S_1T#?0v(+nfu6-;unpqH;dLAl^BoCqoum!MRu#yBSsF-#;6ar zx3x_r51ez9Lpi>_%h;-TDw1uH3e6n~kR#~~(4PIW+)A8%CZzSI>kenDg*9c#uwOlEQVQ!K$2GuK;cg;?z%1Q z4c#lraM^^vTC5sRE58IJ|DH5^fBpq7C1SI*BS)kLjzIVo?2Hfiit;O;I)y#1DXIo& z+c$E7p(6d9y)7ue%@9f@U<`!ydr{ymNGZDTe!KH_k(psCt@r0g5eCAnZx!N{bJAQ8 zqd3anD)LzbR=U2Q{aH@JZd@Noy_aGA9pA=BnQN(-ArXq#TqPd#E}bPtdac#SdHE}k z=OY=?+MyqlC^!g$U8aJlFKK~Y~4nzAYF#V$djm(HHh|5;PJokiH?62S zL<}HAa2?mx-46Teq^=i(I?2s#PCEeOp%ZaHtsfoMEb@gnE+JSD>^Z0YVmIX^y}yHd z+6T2at)xAmSxx)3zM1mcReFy&AX{9KHEmM-vVKeE?J_YN9N}@lpVL*hq5`)b{I-cm zf*(1y3(N4W^{nGiHXX1P8>2h68;ael`L3we>-?it3RmwCMu}G-v`E% zS#VT`5|S-F$6Yhub?}?_|0Gk&lZ$WR$aT0!p0&9on~m~Kp^Zb{p$Tcd+=>!zv?Q{^ zu8j#Ihk17v$$yIWtB=_Ia{``IQxToYK@VAsnks@Z&_W38LC34cHzNq`okR2|%Y?ED zn+^H13FeD6ZA6ku@QU%O%NFuOCix=R4=g5>R(Z^J zj`J$pqt_aR=6A*I)KD=5&2}7{PuyH>8w?>tGMvVbyU|YTI)=Hhsq~JP%qRY96#pp? z%=nIi=r5yCB}dZtx-j4N`+PP1PP6!Enup_)bbWu`8ZvWs)a#ZqY3=V$-AIy9Eqhe4 z8h+qG+x=*eb(NZmr><(>0X3(Ot!6=Vi&O8rad;d-*s^YGS(KI6olI?`I%am2wQ~%; z@gEhaLPJHIZR-2>37^x;4B)hT4T3`cAdy zw%vChYzf@J%JaH}&NWMZw2q#Zta5nN`^(~`$d$FF3qpSJNk$l$Vr3-MNBTYXI zd~zX@c^umnMWt%Fl0&&I?aUq|@l+mhzCNj|af@y^*}poaC&CVxn^QO8y?|Rk@QFQq zhpW5<98v<X9|J({a**+ zRLIct)Wd&`xJl0UI?mH$P^b-8cenZ^YS|6Il6b6FwQOMJb4?V9+{t7fip~GX z+q<95E7f2=R~n!#HBHv}m$M*(X!?Pr- zUvgwbk_Z#a4ev@v4Kh$1{!*XEPyvYmRWwq+hue6x>?CMXYT9Eit7sY(g(l#6NH*Lq zn#Utjg~kFp_@|5C8|V|q@z+<$?5@*EMZ+_N9Mu9o(Ll@)ak(vXqO07Y=OxJ_>07aP ztYX*ueXj>$%cy0<$ZWF9x4WLT+~scM)1VB808u7)lQVbCY_*puq_VM=N$Zogh_Ym^ z>DN`|6H0jxt(Jva=(t@oQE91<^+Wa>JnTbv+{-b=eEKe+-M^_^tQ$C#M$ zr?R$fD(*UZ%-8E!)*mq2@R(YDLqv$S%LT0eagNPS35CXpu&AcN`vXn&0vzGwk^#*};0S-M{jkRv&{n%(#q+(QVk}P}uXUY0o9zMYAcALa2 z7#tWz$%!ap7Dm8fBKItg+n}yAO+&Sf7MiD~=fJ1UtSH|iEB%Oczg=KAZXNx)q$#QR z782W*qNtbVo}0W&wQhZoJoa1Hb5-_+A#qwz5i-nYNr|)B)_tZ7J(G%_QzyoU4?LF- zCWPnDXN!mBx~VOEy{u4Efx??36O|2Mgqa2c?a?7HLmJLI5slc=x>G7NW7OdGfck7 z`B-3j4W5rRAVJzeW}LEPK2j=}t%a&GmlC)Wu(5slRD#7l6XKG1oo0r}9(uR&y2`nJLRe)8iayzfymK zzfWSo$|}*Xk@kVIUL5u z#yXBOKyVyIRAeNIcu^9~U-ZwzAFsANfn9UyT$af%7zTe6FMu9wwNzV@XHFJ+1}@QZ zw~BbAudcpO2rR0#T1k$L#mufuVuYU-h7xqYJ)S9=3&o~aDb)gVZ68jw2rXQ0Zf^DW z2`Fz*MJ1)^w@RZSaHFw8fBaqXv@BFjCP(Y*jy(F+bi z@0gd0VBN2a{Sfo7V+Af`EY_>d{HOKMN`$Z|ggkE7-mDEdHzONE=>hQxIL-VkdE+NMpz)~x#m8{MzThbKYy}Hk!q;qd^1sLzBlIl? zM6j|-*v^|$Y_e2z{u|3ph|ko4n7zJ%xk=ULqgae3*}rhyv!lTkP2QV8408RufBr|} zL_EpCvL>|hr8eN7vi9RMSZsd!vhn|ynHy8eu$P_|f#{u3PSh<gEz8eEl^72 zt0hKq-(dLp9}+WHF)~3FtE(EaUA=e5y!Gnr`H>0te0IrL1Yw6Ot98~y1*kcNqD zUA6nJ+Y_bP;tMZp4Qi@a^Yfqk2w6qD!xv1)J7%%>{wYtl2Wjc)_#~(mM6Rx3cV{Wh z77bJbUs~L5%qP1s=<=*fG`UUYnCcQgp)atvT(B}~knX)%&VL&dhDc}8xHVm_*G@Ly zZ>FNzi!Cm;uC=T(DW!X8yE%ddD~txcE=-Nu=Yn3tNb3oGtvajQ!Th8q?>E7wOp!El z8;uk$T&Lq;5i99~FIQfyx-sV~FOOMnw=N;SzydtGtOc(-0lGRLR@wRbRdp^i$`wGX zcJYpak6)5#hq12YNhxm_(|36&GV*YCvczT?cL)Q=%GO<)rqWR4C>vcnH7!Mgs+ZrH zP)4JH{=iQ>!9R5u(nN-Rw}i<8ZJz-LBoQsO)*pI$wK47kfD5;`@`DJ1^rGq{C`^8RyM#MAdXB zV}9oSEqWg*(OhK=qONE17G&vyCsYE%cynOTZO%DFJfzZkrJ4M;oR z5R6yZ@-cmU^Kg2&6uv#F>YqEmZp=ZtA#JO6*o`DgD1P2qpr$gqWbMmUEj)@LNOpC4 zy2Sx~)y4S5AZYW=*~{x{d%KOqQF$c3H@sU+ejC)uVxhs4x>A}ZGYQi3y9p}MsniB* zkJv-}Ese=dj-EKZrt0?q`X)o|5YvQR3m^z}-W;f-m`yIb#1(lCU?@5K1NtuUoLyWDThN)CdPGnQP9ZU zUO)Nwj_!Qfu8e_cUwD$lK;dF!KCMZSgj{OIw4iqvaUGxDYX4{jw)VSBnI+E16+02C zL?3nv&bzFsi-^p_clg*w`}2NFd#F}xHXTheI=YHfdVh-{sTfRQFE<&0bbkTguQt?R zbTn-Um2f^LB_;i*jcXzk-Z)KPuaJ;oFwNB{U5TdX$HLD<_ixU3XUzCpk_6mdb(Sd< ze2>kML~`a+IBr`n(6GywN7f0&_F-XpmNi~Nx~*+y3^$MG=t-kA1wRMgkcr%`nq7xb zl;PFbuJ5pJffG`~%k>lKv%2YJX*NHmoiae@$c<;!N@*v@cFD>w6T$VI7&; zWcs3wZ=h%UEa}}%$Uw_m5mqICG|%T1)%@3|psB~B^QqY-_w?*?ZL^F}prtN8P5q?&%jHd0h>59?5|%{!jkdy zzVl9BfS)fiYn?zB=ljRgh-}kE1cinPjSRYZclAQ|h=;-&EXOyg`m)D=Dysr$-XqyUe_iX9$Eb9{=(h)=MOazWXZ@?)o~OGaa)EbpRx>h88xrkOE*?V(x0->6 zA)l+d#lGU>o~)STUMY)dhXuK3X`*&DUv$aRA0`Xfr-4Ry7I+6Tl-U#2VV?OI2~RFb zN!U0E!7-D~W)q#OtfcH!1rt~aOLKXTh2^Q7KC7tgmeJa`kRuoWAO z8u<~dagUT~xS}%!s3_S|2ERALjIhe^<;hq3AV2yuj7}3(f~C?TRII$AY@#jG-?8J3HbzNgf&(SZ9`Na)On;kA~hyB2q#A%t%vGQL#dSfy6a zK*40DoH_XP$ei#xJiH#>LMAKu`8xwG_(_gKs%7b&K785oRAPNXrNgnG4TEU;_uwx+@2(OX7!M>nfaGN?(|D$2F~Ab zMwAU6{QN*#P>&o1n_5tkgr5Ely=cJKcjY>f4A0PQT4(A9vKp7sHHMk1byouJ<1v)9 zd1`*J55q&NFGp5$m&9sOG_<^uD%1fqP2r{?RmH}=>s-0@CWr$pUbgX0$xnA%%5**0 z_{n@XAA;$*j&mjs0&yWCC%FPq;B$0))NQ=h&qKoXD_4Xi%S)tLM|(^8`c2iv0saQ4 zO@v7|OQp?`b4uLEYUs=%@d{Kb8=Wz&?A>ahcm!^WC@c zgq}ge`F?!5I6|K;ej6-fJn;GNq@t@PyBOKL()3&D6B=tM7p(0pdO;l@z8J+EHm@{Q zgkq>{_B6^+uxZMbRBLO-%xz8m__}ib0lo@$E8(n?yQ<#u@fB7s#58G>Cuwiqj^*@@ z(TYgx1pb6|2U+`le$toHfg*8}n_i7%g)59u*$M_33(#ohq#LuK)d9+I&@gYFyt>8b z>*&rOtS#d|w}A@)SLwOmJvmLdU=8fGvF#+JS8 zKDGUl%t&Y|Fu_B3#jW~G0$OwOlpauj={@CRIJA>a{2iIb^iE#8#5J>c(hJWsQzn4v zU}aM6zE1()nFj?Yj%YAwrQXHnshgxQJT#IhNCpr(##{1b;o=$EA-p(IS)uZj z0djf;@kq1j^WoYP(x#8jCsoao7@=Y7tiz(T}&BLBF{H}9;qLk#t35v zVIqqJZ8)h;$__HwRarN@mpX2p_ZV&5|M@R~;+qH2#Xd(%9SxSD;QZPRmo)w?!I7G(2GmOHDoDGb-9hRFd~9m zQAmfdZenK+eE7*zS~gxEfGrNR3==_?59_^-c~?D*i7y+)A)4I5Keh!k2Qq9-n45~i}Hn*|23PPDD5H)g92VunH;O3E|`&4p!j}hlr zmK!-*Pb;LV%3I7JByS+ntA=imw7rIYfr_$PPHe(dDlKFQA&NK6lOvIIa%0hE<>*44rUY{Vo_7E%aP2;D!WE2?_mf4q|pm*UL358LQa8|Pq!cN5o<9=KboGZ>1n=XYwKEQoRaTw%Gg-#;EC}y5^^I zIq?<|W;>HtUi_Ioo+ce0CA{VTVehS?>ROg~?*t1TBoG{eYjF4A5Zv9}-61#xcXxMp z3+@oyEx5beH*(jQjY#+_yEdNUu3ts=KTH^(*zh)tyoMBb7=8fpRe&!cZl(HlppZMvdFXB1pqs~qq)WnE7KpuBw zFy7R69lU~R?N^~Q7=~MUgd0jPKqI|8z2>la5U~=XM4xWen`vrDl~;^Si083uS?@E# zk}w>nJ<<;MW-VJcMG+KJ6`H2zv_2Mmq)$ez%-10Tnc05RJvo{J%w*kHx~<$ zDQ>vYO?dqa6I}YU^j0BtVHC}8PYodgK1lB^aaK4*lU*-PkP%26jkBS8?KJzx{W)oe zSp>^5Eyjzp9?ZU013-gsK4Y+09j$krlxkX@HBMU&t?Rh;F^4gckA-hSh>x7xCkLeUHd6(DhlgfbT4wy0p9ETJ8oZm@Ti<+vJHi;RDm$5+zMHC$OmmHNEEvP8c*5%vjKNetz4<@2gX$Fqc zal}6#nsUtAx(*V|4a1#<+e8zmBrs^TgwD{L3VrKc@mj&{Zm8=SYet1+p6L~h{p+T! z_d{82N(wWih^hHbQtXE-+G>Z1iQGl1=fg#;5rLEO4EkeuzZ`-QJP}a*brVhs!2WbJ zgDp%m*zVW?0)v@I1X-&cVSKsTs!f2%-e|wQ9wlRB?JzwGB(}VhNKhP3@-yEJ7nPI@ z3-!w;)>%=jzV6lVCvTXI*FN1jRL^2?D4H|dm}zs@o8(SFZC`3ZSLM>8LY=$kM;7!4 z{0-h0`@WjnrL2e6bg{rqb`n&eESETgTX!7$g2;7_U9!ZPKxAp*7=oC%=q?)Q1~#7R zShSye$al%uvM_8q`R%GLs5Y2O%L#tNWT99Z8&Dz=Y`?*-r2zG3230Q+=A+lUdwhWe3hw87$j1a=LCg}IT{gZzkoS`3Z+7)2qTzySt!@U&u!X ztzB-LDUuGJ&*{RF#~KX*2WxgU zbroA=uvjApME~-+oN-E~xYy%NMO39sC}$w4#C}{6QQg&u*#j2{hu9jxGxZMUI#8GX z+{?yd%$&b?nA>L}kZK_n#YAu^%iIAnO=k)uGnZ`U%9xW#`iz!FNgivw&|gw_VTij> zj7VJOREyMIq(>DbZertkZrdy=>9%YCmZ)srr$;^MYcJZ(R@GdIyL0|c$k77FO)oNt zD(7_N)fc&MlfY5WEee+eB-7IxY2Ky>t-mYIq z&|Ii%LA(t<+fyX)DxJrBd>kBTP$8k2Z`=3~#C{^(?8mn>E8BpvHOl;i~=0df6Az8v6!lw5@F!Hg6$M@FQI;$`J* zYpeuPc8yi*^%QTGTY>xPusuhcz|a8W%A)Mof}wT+lE%1iBIky#y%QhBbEQpYxuym6 zfAEE>7BhG2zj;7=4W2kk50b((1HpLE#Sjf-Is=P81 zX<-e6j!MRs45>sgM8j}MW%VrUwfqBwqK(SiVS7rPIi|bP)z zB+P?i zdUCpZATk*6iv?V7I-!s$+LWMW@{>3!mmx2$61GQ;W}i#1G z9v>Fzs_p5l@}us!zGhb0*p}&sxo8YxJ@?yW8Lc2`Ut|NttK11>HgpjLW%m!_K@2Z0 zWXnXTs@gZB#rNdzRDXonu;{EktFRV-Pstp!Ivi9_pb0|JToUcfGgMln#jMt(7@7$j zg%|(wK8``8;MaYMjomQgY%BSuFd+g$G@vg2QFf)y#DCPlSf9ksixZtalEneb!61h% zIls5uj*3mcOkR2R>u5zE$RC~LQx_J2Eu%olr+Cy2kP^WLnDquiRbE%EF;PYmkULs; znUQ zq_PYKd9B%;`b5T4lX-FcyBoOcn#FfH>Fo2gh5I5e!Z);@_+BxRagA3v<4bNSqFe3F z*?=mez@Nx0wX%ZTy}$HHMTllP8rokg8vWp=1xbZ5G5Qg=ePLSreDp|Fu2DC$n)gR! z5DCmm_ZGVx`A8if;F8wYm?%wb=p-IdD#qu-h!{buJPd7p*8nG`H;xCYVq6RXUY{28 zaGDUsYn}?Ht}(JJ@qo)BfT|g)vAr*LA9ei+o$q(>48acM76eCIs9rF1YN;`KSzO`K zWN6zkz8q4$ID<}mG*hKO5XE;(byZ2#JjVYB+VXH&465MO%xGizy+_nO5=gb<(Y>jJ zL~~6|emJXOPkROaDXO-@%5&!@Im`!&g`0@e;GKL`rf{>#?E*nKAw7d1xR28M?a))m z+q|~hfO8^Lm*Tp~xAyu0TfWQGo^khe?x&Zk#n9_AH-90$IwbD(=KGuI9w&M63{G@U5c{Yf;Y?iYW=?lK zaC;-^CC{?HL;2ahi^+Z5oc=tmkw@Q~1G9xrF%?3M%^`rA^}tH5*dN?6L_sW%_+_j~ zEv^yoM0w^;t4 z-c0g~EP5AwGs+)q)L6KhG0>?}g}6mYc(2N}yfm2#+~xezY4Ek}hufdQz^uvzvg2?9 z!@;QXxyMrF3#Z6J&n8Ps4m_XdQ=qloMIdz*Hpfv=IZB}P z2J(sX_0wF3Dz^P!L-&=>AMd!Aa%fTFXT1}kR^}a6laR-yt8%oBiH&azDh(yo_15$Y zCmrTbGq~?DfvovVHn0g@+RC}SRrv1biiNrQ>>miB(w$KBGYPXsp_6ejn~y|)oIqq=Kaqs~HLq;9-z#F_DqwBj z3d1@Ii}|GafUC^naBS)qX$yYBKMaN2*!ZENQ^65(!wJea_?oN6>N%y^bT&sI*jq-b zuZDqC?z=Iwll&LyxUfxbmurrCzBdJ{x@`|1NvDhZ5~l`j!LF(}2x>VPx!683xy(k+ z9d)EQs!U{BJ9qI5&3M#Ov~FGd)!2zCuSmY-vWHLM)_rd-;#`@;I+Zf`eMu!JmV}5$ z*1lLS+0VUB9qK?B2=(oDACvj@z**i36P5V9-tcD<2{lQC+;|X=8O5J+&KN^@Z{3U9KSmd`E3e+-%e|J08s)JkKy04iC_B~0vRAm z5FAX^;RLi%e~#HdKV+c(R0e_XQbzsvzkdP4_X9);!i};ezlAY>4l7e2fGDB%RjR=M z=p}LhT@V_Xrr|ux&r99*_s%dg47rUzz=-UU~&}D~`7$ zx`AogVhu=`0L20apzujq+Pyi_0r~5#Fc8p~%;o^Oc3MJ0gVlN`I=#UZ9trU;E8xYA z_?HoAY+P=3YzGoIWMpIp2L}~ORciHzP;X288{sL#!0_W=wKUVxIsx3Y)0?m*g^F!L z1PPGaV4p<9+k@z&q^~ydAJy;IeBSz3gHQ(l>K^y2JGG#WfMHnyy@ z%JaWQ4Uu2#=6HG4?P5ex(;bD^-a!6We*?CdD@6am&2{|b{r z(!Uo<=T8Nl6HE2G|C!ptKd1J2UFF1|R(^dzzyZGL>PpQ0v!;IkK0iQ!MT@Ktjr~jD z^!Mv{o&H=~IHQ#RBSpfG0v2jui+RcK$@8aC-vHp7Q!4|q|M~sMpY~wWTsQaM_5hCq z_{KkgUhLo9=@m!_m>qmeObYRTo1OKa28km@8T$7r;I9<}2*3z|0YG$KR5Jc|*ZjYs z`F}(6|M#llF}pPdBxW9`YaOTY2d&+&K_S#!_Hqpp?=E&c&j(1h(dyS#0cs{2+dYtP zuG(mmtLo)~HD97!4d^?kHX!^PjR8CmE&w3~l;Oz8$kLZlL}GwoKR9Xj13|XiYTIto zPPPqzDrhu~=S!vI0l5b|&BLZQfZa@YJ#GYWy?Z1%e>-8QkRa11fIMzRSZuZ$k!a$| zO2N$B++3jP2fzTMQYuLWA1Mkr?oH$Z**p7kCcCo6!~Oj%#|;~JEx>lg9X{c->V9*z zJ5KQ71FzM(!tq??#!k9t8k>C)dWJ7R&DCO2^#A6JnwK@+wSy=AX>J@u4V~Tl8+)a9F*1c z1Q#3oN0aQ_94N*m?7ka_imIYg{b4X{)dD2s5tVA@gT9HANRWz2)A%Y!-4b#8bW`TB zrATby=Xyz+%E7AmQQcWzMeFNvBTB&HBhh)s8{DM^>+JWhHRzCcXUjEL+uRX-;z?o! z>CydvZy+C$A)_Z)oXA_D^0w3=7L4w^BTrNfJQy5^~?~61d?>iI^Zk3ZyX>N>)rfUzK~tYB+B8 zM*_rSd{re3(9y?KM;mrW)T7ZYZyZe3m0HsQY{wXiT-yJTTt$UyV`9@ZP*JvIFq$2y zQ|X9sSrha#g0mnWVU&zH&bp({HiEV4<>u>*v%PsCInMYsd?3moh>7GG`^rmHYm72i zU@eec+V`2D2>v#}cvXZ@6(dXO5}EIE{C}aI#?y=IqzOL{Y`lTPAbe#woNzWFMm-(T`(AUC$<)9|2OL~_ z)f1Q9)5ERWV2dvx4jx@c>0;_^g!$|{ru9OFw)4r#_rfe2+w6EsYqCU4W(9F4kK@Lr zIVHXaHak*-4(~bTY8@cG?qIe3GC?;5Ra{2@nErIZlvSoca^M@7MN1_O0YOF1v1|vu z4sb5GHU&_?RYG+BfA69D$Um8l|Apy%76$09T_lf)1cLl*BM&F-+!M(e$}hlhDuj>^ z#Ze^#X9v-%t$0m*H{jS*j0W*Y^oqTdFp=IctlgOEoFrY15MyJ#OIo)t4~J!~3f1FQ z+wEXxbW(ys`aGHhlb*!dZtsWNgLzk|?Q+xHqNQY6VDUXVV-f$MJq#-Us-0|7)9Ler zKQ5UaJ<5h+hO;dn3>9@aKutE7%xegyhNr=;_v>_Fbui9#kf}dsi`?A- zIFwV7Ik124gH!Y%9~3bF7@8|Y5hq`6`O2e_VY{5|P9rCQ6Z1xkh=_P4;}S1xkD}30 zlaVO{J!+WlzCB}h0YlYg z6t4#IngKO#4ggZVN$f&{f$0SfYqdt>_^x$5nd#pKP<0!%oo3Q zcz}t<@$z^fjV1L0;8csGz-TH_AvhcGsWsb{u-=BeSGPlq$ji;$+S$Qnv3eyqwKtic ztp5o2a5&H}b<7Up4dXQ_!49{ooKrGgtoSB_FAVpDjzl6w3Ft}x#d`L<&(?bpNZA*z zm*f73NPV8q8wSTef~lJrBqPUPIIZwpMv@vx(uBXLusfL1FbYns@8w_U>gd#&2M>;h z6+P$dE7I_YIPYZmWS(f}G2pytw7O`oQ0HB70nbb>o62=E+bm&o{oJC<_Nv?CcKr=? zG!1q0A)Om$)V)|58>ccMZz{bXT$lI~C`EO585C<_DK0nYw!W~HW83izzFduZ;{T#u#MaO{d28X5rbwQ@Jj z_r5Qfv{fI)fc3Br=Q_eT+do}B#)YXXWW*k*_#9oauN68dgyRebA6G=#4nn%DL(pT! z&^!@{A47+CS8p;6y^n0dX#InK8egqb1@r&NYUGk=S_8oEmKnxH3;oW(Z9^&m$Mew| z1?o!o@%F4}y&oZaE?RC z1kv`AZ7^VeeBdt1nxmfYBWhH>2+x$Q~mXUp{fac~OZ75VtB1X(+UjL3He;>X3rUbyp_E7rVK(vL4 ze3q7ue9|`p2oHb85YW-qgh{>2_z%W#`>lX5kdk+PZhtVto9$B+B!|{Lq(dHurN!av z2N#J3%B$PG@)kUy69om_QYm3?xRXk?uA2tF3vpLY*iVe52Pwk-zp;mme3gmo#A@1#MI ztEH;-MVmiIN)|BJG)|{l<7h*JgI3J?qg1dM!f2;D!uOo$#XL|5b4$D>(vrg6 zW%+fx#bA%$+IY%`PaIy5+on3Ej$VAUgi>}>(zk#orFvg3+I?`N_fHDn7@kJ>PkW^P zDiANk&Lg7q;e4Qugyp(BWtV$qMMXKCqB!_#GN^hLy((uLYkR2%<>Y!J%*e9QMk z`>6qd^3giAK3y+n%8+ZPzEa>3K>W^{nd^g@z}M(UHf&+qZ>DMGBCIXn$9EQhj!KaEhXQeL02fAVlR!8 z*R_WGi9vg*0g2MdCoxnuA7{Rjz~K3@VQ*45o(e_}^i1__0c<+lv0z`h32GMxm(lQIbwd^ZS11eudR_j|8_AYzn**7O0E7QofL%a(6^UjY%AquCCP zEz{wQmc&6GVnFVx)oz2V?MVhM=woaQGic|f(p2T&Kq~iAaC^d@a zCl(GOE}Rub;j{bYIKSBiX=JHNY^tNicyammNQ(1j-Qw5bs9GExrV_pHcpH8|ZYb~K zA?=!=Hlx7>GPK`O*IVwGrN{jXwiC9I9d!-x+v9Am9RStFQj#=152-;=V2V3Q zUl=wiKbmagQ^J6g!KBHU;ZbH}kQc*!8fGn^$-59iizcU~L*|qf4cJ49N9ZL&(~?I@ zeRW8#WV&PN`GAR$yZhmbbAEy}Ssi?;+acY3%%QO<5@;W{w%=8BMO=8hE##5F!(#Li z>l9($xtEWu9ZmZi_XqRGKM=OWl*4l=fwjjAn9TfC$23!)$)qU`Qe#}V?^aR)RCfDQ zYZRfY9uKANvdUMDgEF4OZS<~!N@atE2I;fTv&LK}O66h-#6F)OIpJ(6_3bG>r^QGT zo);l{QUGP zkp)Aj?X4|dV>$A#72g{8L9&QcB=n`R1jM;gielLMoB$XK64ucJ0#Gy-1}PAQF+tc8XU`YfADuX z0BI{1F|wjUq2v~eGq*fGBUHo^lun10R&JhWQzk*}Cv9%?>cC&>ezg7`UQ$9?(p8dJ zns2Ba^)S6dMa?`^*m59OesR}Az#7G)=sS;mASdR&#uogFuEXcct4G95xw8Ak=+=7m z^X~HVs2+B>fOs-k6O$Uzq2GR+2%ZEeUj<{i^mSU|(-j(h%$Qt^;m#~PWjV?-QF)y> zSrqEmjKeZ#gXd5^BJwioa0+Yd0l28?s%T-7opWkebf=dtRXWw%gqhrqc6Y4>_Nm6jOjT8yBCvC1;ZH(!vMa zgumY4AnBN!JpRZf6l3l>Cu+}v4Bh_=2TM@5>YMQ^l@w0q6I!4W z&Xc^NI=F4Yn9BSG6e1D2k}4O&oLD?s$u4LvqP7!a*BC0FVMr!z1U9pU+AC3A0Q;6t zTyP7AsU~SRTlTS3L><~??P6@BQo{JtCubWQ8g*^ZGDACt%Pt! zT@s?lI2FcnQZTy31jRUby&}CEM$Qc!nV~a%cObYxp%tdw;zT=h)q#=tGPdIUtxr0| zQ?m&_HEEhsEY2K76-c*j9Z1m8)*|<&D9}?H>U(}mJgt4G$4-1g|k7&usU9R8obFPx4o2gXiNj$1H)kwID z#vB}^Qc($xa39W)j_Nq#q!blFuYTbaA~o@D*lCs_I@nU*xq0}=Y$QO)^yA_T`3Z24 z@Vs4#{Wc%wy#z0`f0)UxTyob<(+$(nXuAVFmY)(ssnuF}%cG=F^*b8$)0fZr3`yk9 z*qNA}h*=kdz)0tq+Y7_&dTpj{iT9y7uk@ME>2WHY;B*QqTRIwBh@78+Y;gaQ{ygT{ zJA`W*4F@05&w~>~q;d%|7P4q6r8AQud@JvI`A}jR9P6D9fw+AxX%}~h_h%tOaD-0s z!F{lbb4qAlsqk>}(u_n;qi0G_H5J@RxUBuzMUAn0b^u*_qYnt-z)P@_M#?Hsyi>$&l(+7gzRIFZ z2IC*7moP&0C&D+_f`fR;RIAFx>BW7y72>EKZO{BE{>KC$5lZK2ty5W1n|C zVR@L-Jv%L02h}~vjZeynbz1kEdCOg*UQ^KZNI_MrVH0#dR*NBJ92KjN%y?Uj)y#!V z%yQC@4Q^iE*2K!a+Io0>Ica{{JD~zB^yxOPIgSeN3i#5ToXx9Q&G)XxfnFah zDw2cWMsh=FXVEa}r4Xj2#C=rXxBuZ})uC`smmR-ql0r57Tuj?IOU4gP7G-S6QyYD&e)0F0V z=jsp-U@LDH=*Kt+mS8%Y_^6fo^Yf`C(fMIC_wEbI3A!uZLA$m&4}#J?Mx)`(P_7ez zo!)B!VMEe4LcmxR^b|u&eR`cJJ;t&D`_5`e+gB%+41VhkqZ7fG3T8JeV=LGGWMtE_ z93>tSz@}C zQxu|BFFG8>=bEG5_Qx!_&7a6iF}4V6+CtTzwnhNF&ek_Xi*Ye?XOdWLHyS6d;9K85 zDM22m-LSuUTD|#MtWHt!eCPc^nZS2S52C5|8eoVj6$#s_m5hT@BR;l2p8vcYpSY3z z)5P#LL4si5FEKj7f2$4@mc+*$JPY|q`6kmGD9v>W10>3JX=k4hq?9ZA$VR@%@5z6z;Cmv zmaXyy@{%0~6}(aq7OuIA0M~i z#))J1xJ0b#Y2Y@2KeESuv1eN@1?o-QPAd{u$|2?FGR4f6g|;15;ZgO5rM1L%_fH$9 zjE3PB*D<{~`!6mwtp?ZD+A~;k=N`02MN#e=6H^x`u75dp4kdRE2=wNddU1cRN~fa* zlrKQ;;`>OEDRl9XERY=?nh|scA;NI0-|tmIV3HVhs~jaR!j;Kk5;4XI$DjE7E-T15 zQ2mHAe_58^n>;ccY#R=#_(xE;>?_8=Tzuf{Zt1tdIoE z#7p9Ur}biy#uvou@07N4LP~tf&rt?B`5fMg!NI{^AbIR&+>%~j zOc0X4E;){Npl*Vzp(CNyK7z2wp0M18>r8DeFWZw_@s{%M zvk4G#_QJtO2xAQ8WUwm&V&=O9U|)mHB{{c_z?INGE!W2XB54FQCCkW!PF+UPG<)&=D%r58DVImlQ+$#k! zfZDFHqpgg-Dlc%D`Yq^JaQkPF)C*E`5-oBzB>I2gEER}r6fYH-215T1;C_FB{BCsU zIU7RoKXKv;#Efw?rm4gK!Nc6+zQzcBI!Utb{&#B#Odt#xUH(bfe;(?0IQoTvC%f`R zoj&Z32kf_gq*L&VpzfShHNihR^*`OP>j`GWaNTYSC{O>~Cw@sN=75KW#WZpF!y5T* zN1no8h4wx>j4_cY;N%aJ^q1X5~h96;Soz%>Yvn`+BLLpMGE zZQ_0S@PUE?=0#HE^XG0zW`KucG@jMQDi`+kmoM_3(My8{<0mC%YDz947%15zEG{nI z-rLs^%~JrWwte5SfA1?wauxB7?332^a3b@H6Tyl=nQHwp5G>;sFc>lZCEwwhgY;!b z9Q6i=(W1rSa6%XrR0+{;9{y_r;U|0pNVYkR>_BDXR5<`GXKe*7_p}e!Ng#05$FUp=@M+&AIQvV4WrmIs*zoT^G7#>uCjQ z4I795C<+iov;ZB7aNbWC*5l)2UrJ5;ls$EUr2z1$ji$o5Hr2zLLnYv{m08s z8!@2899K*V&gJ#}nr9N&An*a8(Ts^MhA=;51LzG!|1+jmMTn5a<4#2#TW){H?fzO3 z8STixd|WNZ>-()r`?Jb8g@L8=wlmk{-;;ayOaH z72KF)cZdOCmgf3$JRqC`-y+k#TTL?9;1 z0p03N$Rnv6v39Rs?bp*Y`3BB=4{bEEFY)v4<$Gje-)h-w0T3@Fqb3`ZsfNtnnj9WX zf1wq0)6UcVaw;fE$HRZ9Ss1|XbMwyzy}hCK_OfMVH^h(6W@~CF*=;t@%oiCZDX?hP z9$S_sA@uW9DSu1_#l?lgW4qY&!H4Q01XwMFN{S7M+i)Aj&Ca&hIi9$_c`GV^-$#~w zDV52>cvMqDN&2FwoH+Y&LRZI=OZ%~5YN|T{4HC=3OwIGhZFVw~sV`{ic)P<%T2!QT zM2g$YT1Pq#XVEz7?s}rKGK^s8`S?EIT2>xkDnVtjoU0H6YVwKujCfq^%kRr(Jq`sS z({hZtNLL#zH-W;zWx3gFca$7{Fqf9*R;$dSxZ{#vb(+4c>-^k@aB~(uqzTGpg?~0M-3Hc@YRc+qc2K}8O#mUN9D@m!zS~5+5x)iXqCo=Rvx>8? zzU=p0RjQR?`-7%Gd1ZR#)P$(+@b$?15Fk&9M}MjKU|7Fz+E8FV5^K?A#2B9Rv(?j@ z+*G229C1s=)zPBOq**R_DY8F?%nt!RLvz`~{noQ`^V6xIq}v|Vq_5T4wh(m^BcV@4 zOT{72qPJAp^z6sZrgEyoK@gLb5M8rxbRFberYCK-As-rbtp-Vgm{~StK(l-PH{jqnWy0o!$4z?GA>M%seW z6zk~WZ9_A$#77A+KPDwp&y|+DgAI)|p=oz;l{SwW9UO-FqjW5;!+8fYlYN5xR0BG@ zhSUH;!nM_$OE2bf6?xOzL1H#T!YyV6MMWk?{ocM+8W+2p&|WXpV1{x1_xm7N$@_dU z)WajO;?U7=4$CG52f{v!pvUcc^iJ3GXnTulTTHwv9F(n+oSv(WN;*zij5}l!?w-@) zTrhkunoVmW6;(sh3g0$jaG`S2Ty^y$2;((u7nkGZ*+gf|!wMsIv19u5a z@JF;9hLVC{PwP@=SHHYq3mhg7M;ivY8YQaqlbfQ!xbzucXB5MsYI9>6>{?A3v~-e_ z6jM5kTqQY$Do%>VGE(}pOrqZC(ySFG9j!^uRsP0}2Y(yU7W0+4rg;M76X!mq$-SO8 zgd^umlxEK+Rw1%-zRipdq$4qgLS*RL}U8;$eM zO#+L#!@Ppv+EW9V!S_YT;w~;*^7fnuOf{oA?}_x0QUC1DE~#IxKQzz2$+3NXVVc^Did390Nhw=-t*xse-r^uE4utrANwenSES59!;V?MsL#Ov> zbGgbhInqruCGU7d$1+4j-79uo(jX3v)09Xer0jh;oWJPk@af&%4G!2zXQq7Pb2l9` zP+xYZMpbSoXySa!WQom<<3P}os%rX)o~%P{IhgdM*s@+^VGJcNr{;aO=o>>nmf|V^Og*GP8dNXObERvk0|O z*lzBypK1w(&F1O|8(8|3>iSW%t7J1zGX?qO6bViGzW0abU|J+8rOeQq#a7WSZBuIO zy{nkKs`b8nm|)s%pHkZNBXfHG#qF@FH040;^p^GEDQ>sJILjApx-u%}!y*P;);;06 zxt(xFSe#ec#m6(O#y3LeO$SFJ7-x)G(*$*wsd5G5E$1~fP;RC_T-0To51<-8f17C(nAo9Y z*o}|j{M#+{{QxlZ<1B{O`2pm&TWQOU2XBdp0)2h0qoNZ?KS7(8mIA374<}We(+W52 z9&7%^M}E~~{;Vko9;3U}UL|dm zZ}NWY4oo13fxC>TmDcN&&88M->4(rdVFR;mITqmM8CxUb_VpwF$?y>2wx_SUqk+zTrt)y*90IgK5{2W)Mm z=4F#;HyVR=XWD1c@y3V`IrcmmwiYmOKKK#z?vkQgkPh0wR-j2XeZt%?%?q#VYpw5* z+Id!&DI48+z{F|!7RmYzGJiEf@3XQ|gr_byRdKrwxlzq1w)sa@vIC!wXr{^9frHx( z42C)tQ`)~tq| zrelGTx{KZnM~+oTeegS+WcR45rm>3k49m(dqbdr`sc|z)46AUAt@WFvw2Mk6kim$4 zxA`%Qah+Uer9vEPFDxJ(c3QMfy6c z^Z4+pV|MM$w2Eon(2~ZqqS{@3?ELd-CkjN8LEb$m@r!ihPvrczb;$uV+Zzk!!T~VN zaz@E%!By2w9W8R`A1GCYO!D1Sl(NWt#l_f1)a7~$R}^;R-+!zy@EBl6l0!tM+^ZH6F>aVuQUnJ<^fqp&2sM0WDdsDLofcw2 zLbTtL*Ou^(nghiRokSUL$?`Bux)D;cM)hQ*kGiVD!)mu;+IDDlk%A7Wcr`{}Q(9J_ zCc7tb;^oHiVQTe_{CcKZjux%w+&cdR_h_8$vIX-{mDW;-y8Y&7yVVtcVpB!Bf}6u- zGx?UZ@xrepShx=~J^uV^xd)^Sp1!W%rPiMoy||FlHIQH0Lp|85^_w&=jZ%CAxlqhg z{jd?!G-}Zov;9)x^A){};-gVV1%(`hFfhxbI^;3lHm*{zRt9#$4)n zS63*C=h#Ow;O@St_P!u{r8-A*qMFL|@9iu6OW{ouy#83f&OM)^16K3o-5o9WL287v z{AH`OP++z}jL)JtWp_7z;tsm9^2WqOSbfcLQL(%G3C=pz6oWLbOBixC)xdB|o>dO( zg4E)|{Zpi0gl4{|FX+?iz!Gp?5i%^7 z(SBJ$V!R%;9IAb?ymkmv*~nvg0?$twF=NtMfXw zDimYNX@H+!3%`@zU=k76oYR%rlH(AKyXJXuBbAu%av*N|PM5aj)*MA9$Xw6g-XfBb zoAc`WWkB=^{J^Q_eQ5Z+et3}0+b47Jup_$lsN$g+X;J=HTpC^e(w1y0B*Xfu+Wu#O zi{ET(AxCfxn_p;}n!0@JLNO^(O_G?g1pRUJoCT1u4=y>DB&rlQ)>V=ZGM>H92G}X| z2XV>pVE+49aS!6Qs3*NB;T9YRjkZh|6V8*yUKRWPk9N>JCO#1#`l zGVzPaaU#dO>~j$Iq9)aW)ywFh13OA&31lY?wzXG;)NHjj;dD)IlRH#|x z4isG!7P!y;C{E(-;Z9{SJNSp<9Sh8~V~yDQD$r~d9?wSm2?U}%+5r$gNa8n?f@oPH zT+FAZ3$BM%%i=`N+mVSg!NggP!?~rIMA!3U(iWVSpC^~_Bf+P%s|f~7giI8cLCsB9 z&pNGg6dVaoytfBk5OKbwvgSA?Lx_Gtkyxezlp? zkOx6zu{}euYXJ3QWm5FhBzl->K{bXKH z(yCjo=v)Tk{=Jm1%MT7p!OOr<^l&Cdlsv-p^k|r_t5K0?rWBUUN`a@aiyhM$E%uH= zbaZRMjk<+}M`lz%UqjN?W-0@ zB#CFkt9u)cf~h&#S#?uWfk|>=fV2do@;go^6s%tc$y6Da%Ij>zt@-(RvGCh-M?VL& z^_iJtW8a>FO$+6!WvS`!{_3v{Lw(O?&a)@Q7b%*mL{xS{kg^%67neu0*4uzzg8*VRaypOics6u3tzH_o?sZS59_5RPW>T&L&>$VxsYaLN^q z?Nh(#Uu;w1s74*it4rm)M-r1%#4biKY3C@r#XR%UWcCgf%x~Oy{;YhNrlvXDw4pR~ zWvF#O68~BBuybRUjq^SC8)2vo=C9M>6SKIo%CWlU&T=K=w4_j&65ljR+h0J2WQ*Ny zS=}!!lpEi?OqB^778KQ~0RPI62hGjtA&=&tvLL6c>LmOyeC{9JS}1vjSV zGfv!Nzf7TQdyZM>-(K)?Vob zokXW^sJ=YZ>}c8&pY+iUYexC3SMv^5OqhqijMVg*tHz_Ni-@SAiJF@98@Dy;>&gHO z%t+PRybBfHlq+_EP>;yvH?g%xs*XlrAe_Fla_0ChOnq@7SvKN+?BMY5_0w_$9QG~` z_>yA^ky5g=_y&kiTOP)=SrB74rSXy$Vqx_tcF>eDvKE}pQk3e!sw%`3J|7d>xqLmd zSY@ndR=G&n#zDCAomZjCp9OJSgXM!KYIm396C3oJIcI#eM(>kH8BWUUTB;ixR2Y!<9r|M%cv&IcgzXr9rZvS}JczB=F58?F3Sdm6|d~mlbyerDY0p3i{1+0;7!#`Hh}S+lx#0VXh0*wA722zEP$7%BR;6c{T1@gpjsC z$F)emq=5dSnw&z0y0}YuP@{IZl37DOYLP@w-exPK{1jEC`H#zUBo8+e#`M&nMB3xl zdrdBnydPqB$ARl}22$>*TQD}ZZn!^H7R$W6xxu%K<`7FYFx=&TCa8htE0lr>KEv>w~k8@hl-CZUF z_95qbUw_B~o7=Y9Ksl#c|w@y%HZSF?K z1Qj#!8&g_LS{Z<)nXkY_$ueDb{gmKfI>}qBz0=&xI=Jvhlvvi!7T&}?ykYz-Q7L7J3&w7TPs=n z+xz_gxjF4zeOy&*lUcK5jH*6*fBW;gt&3dpgPaTU9f{d`8`aL`dcLud44TCd5ljD| zp{S5*cu3u`%Hs({7R|^5ck&c_OvXtQpanp;)Ge?3~)$W3<;$AGJo{#782hVocM38eyQX`Y>Xs<~w=Hynd$y0*J) zA<7rV7jsl*Ddme|YJ@mxiJlUznctuIS{+6-cml??qEP?1oudE!K`bLtenmK9F@)mB zAj^%SnH4adDZtLb_GrSoPUIcb`iC~tl;TJ=gvhWUE7WgVqFoM!g`I|84s)Y~H7*Ge zIDV{LVR^E!Z#@rAT$SHZ4XB^=hVJa}w2t$S>{8Mo7skoz61mPeO%F=~LJ}wR6>%eV zPcaYneL2J!%n%Y|YQKTw!Xjr$*Brcv$rL8`Cca_E`#`m(sp}F(D`T+a1|Dsw5O*Ew zIm%+F=%biC{lxETL0u|XUB5Px5`;Q&&pD@ zkyqtElxE4h4y4N(lWv3{f9h?W2X+fE%&^+xZQhGr036Nl?X#!yK}_+eZ&qJ2^Wt_h zjVc}z5MAqc7oz9%--+Xzms&9KGHNe%WG=`ogqxkJ9mFx(wAjPThyAWZe8| zl265_Z|&9i+!eF`*ZL-tvE-*$NaMKIc_9{^(l)UW73o9)70!iP3zRj7w^iCR^IN0lYPC`<$TR_|$!bh+E2-uDML7%B{-*ou+B*YEgU}w8LlaA)7MCIg zw6a=i3jrIxs!LMtu3joKKAB-x5Pb`d&|!de<>0jDO0?J5QflB#k6-D^RIrJkPH@2Y zMi<9O1v9Ny_}&Vwn&W0>jYIR)81$FNgaeN;$Kh7=8>!XQB|L+tn54jeD#b9b{rZ{l z+Tyl+x)&6uPcq4Y(HgOaY zB5sMbw%7VcJA<}&ojw%f6~(n%jdyDEBUNU|E{x_^)d*GS=%@(cHRbc}CcYy1yX&^D zixNq6bWn5$(VlrGYJMJm1o7X#ImXR|+Z^KpziP|n&FKSEk^I@tsyp*03!kTz#!^*x z2q(ONoCZh@j68Spxb!`u+Q9O5GCzu5f(Scdf4?jZsvQ(l{9}QQJ7aA)jDewIo zA!If7H|2YG4ehIO40Xfed|2pjUQWDI3Ax~)3@>aRT9~E_NlBO=!g)#4Go+#DETh-@ z-PK5E3k~y8=u=$~WSouP^jaSqW5P@AN^X~h@$Kl!mg*_7?*3J^`c-`97tVOWjo^sD zeP^~eUUze>qw0K=ntG3zF;5#%!Bs5L;Tp818nx9It?; z>MJ#x+}3^!v_0HgWnSJ+<%e|Qd#-pc?-S>nsx%!QD7=1uR22&6_R~jOg<=+W5D}B@ z!UuV6v5i>VD12gm#unYaH;tJS-mvKqBMn$nE^|BZdL>i8fKGJ_1?q7-Y_W@0m+Rz< z4krBP!%@T3`8E-8~o~M7^>imTKNOm0%Q|pFp8mD zcIGDD`3&QUXD?V=G6->xJC@6qh<#IhOTNJG6^&y5BUe)=(5iZchQWptIrVGMw}cb+ zIJ(<3n`I!I_&!=5bNddNr=n||+x>yye8|ej>MJ>wtkXL&b#}KCE?6Grg3t%I{5he( zW+i~M(-WVTgvw4on^Y?|6d}+&z;FD@TK{mPalVcN&sEhhmpZ8(sl3TFX`m)T;wn!> zIK)S4OiuF;$pI*M-ltE%Xq;h?7Dq^0XB*sFYA)>D=zvM&=JaC*3J?qEl81DG25(>7 zFR{iv*4XS~Pt*CV=l5cNOdeT3itrZAuYi@V%P8CJW;mrJox~IKd8m4O+bT^06^%FY ze6^J84uiECZJ?}NNVT^|me*%FujC9cb7fk{55Jh+d`nGDQ}~oPfm!xMv{89gi5YrS z`2dO2D?Csn&FF)S3#*;PCyEmRl1+l4Q(QTX4M%uHX9L(%RmVg~lVZyGMcXDn^?<0) zen5GHoLjM_pa^jp^i?WjSJVL= z)++XvHa63`S#a2D&=HbdQ^s3MNykeW8EJm}OQviNHaI^?mstGO>yNP2OIut-#|GF>t}BgHuFlO9c2**z!g|C( z%5>}D2L%ouDRGTH_X~q2LC9eu7R;@cpi_DN2bsY=;(Ti|__;ET^A5k)hVB$u2EKK+ zscSfj3$s=mxhsPgk~zo>SN3&T&*sYL;oL4F^q;&8c9CS07D!A3T+)Q|dt_LG^iB0%@N)2)vmO-rUV+Om+g6jOCNgej*wsfSn`jM%y77$zNF8DCVs^ZlA#P|B&s-)-^oQ(Ux#zM7hq_W z%zG$)#hS@zE#{VLLSE$caVOY^jjR;0=YGzPT;s5ly`Cv7H^OGnqC!MBL%>IN$F!hR zZq++^l-i!DZ;JKi(I&P)+oHbNB0L}{HC>4j#(eq161hSJ6idhN!Md0-hP2bwb<$5j z3F)!1MEJ?(>|NTt%kwJ7m8tr-O=rahZc1pw$n!2_3bVytdN@>2H-lVGC~q1s_9ny* zXI3J^Ouuyq?4I(#JYBdkYFq@FMt_K_$O9 zHSy}rZ6~lkiX#Z7)^$VQg*G~orfxeefnf-Yh|1LL>QUagka2AID4&r~yn#jU{L+)=(OF{hS z6#x32|G$$lV8bIsg5JYLFgRxYUWWbm?~PM}oxfl=t*quk$p1NL2sx6Hn-4ZN4C0AN zNqBdh3;ur}!XpYmOAhEh-|BnnKmUXB5G47fmm%Wwz%arGnpjE5bLjs*ust#++hxvv zLT+ct1lE5nx~_gHRO*3-#K6G7l9H0mO+)Xl?BB*vz;~=>=|#V{8!lIwMU_Ee{85~Z zO8}Y~^1y1ZpN@|$fRpVNV$^TR&U(6_tLqIQwMA~ITk~0JW2O9aZ@@`WY_re5nWYt$ zl@S3vny1^fv7QJ&ZJNQqrn4Ir6VM?LMC8$LSm_0{v=RG0R)R=p|IBrW_axj@R8;e| zLbY_Xw2gqs84V3BMQ#`1R}Bsgp;qDLyk#c`fJYw_ZRl@TzMH^j)svgZr9i`8(Stqu zLL3sFy~K8Ri(XX&9D}vT9S*)F%9_8WH0?A}8Q9_C_TmH+fU@b0d>R2Ep|`Z*Zq8rA z9bxfbqAkF~%X2psB%Jpp^v^k|KzQ_-W4!L~@294xmywqTLUnb-5V609@R@u@Lj&$1 zAuTN}z|(nrul)P=Ns?%Qw^*=3@tD^B`#1zB-spbzRGqL>)${)`tfB#DFktvXiUF2$ z|9zVH!TcJe!i@C!zjaW5CQBv>@CgP+;ivx?zQ1N1OiM)y)_)9Nf8Y}Zg&d^+{v%*$ z!3w|T;#H@k)}P(~o{Mn5=AzIS?myS&_goYPW?c9<{E)@J&-wp3ng6*n|1-?~@68!~ zxR++YF3`%-vU9Ecl!!_`?Xl(5Tb|*v2V1Wh&}$#ym2U&$cs77h^9h-MzR{&v-4CXW zfYp4w(x{Kj&EFtR3qXtk>@g=tM|v#y#?=agF5v%Xo|q8`=3O7%oNuos5kEomL=v$L zieO0!3V=OMjspD<7rsA-^enBct~ve%QNsuzY9@YlCXXdDlk@Sd7%OqtEFD856M1dN zsn3?_EQMVr=wM@EMdC0E6NRGQv*7{ufj)^?__>?jrr!bH{4p91;OjIQvA)%w zSJAQ@>bFlCkrwn@A!-4Y?-%9|)2SzkVUG!{#m1S$tysW;R^qCCys3uHCM1&I$T@4# zBavEEaSaWP5c^ttJ^$pCUHXWWFH^bZ_}L-7nASy1i)<|+M|H> zd7$(71BV3w66(qj3ZE;0U>x3wtRuO2agnIvPtJUchH z<~YQ_LS%8i+2i(EPfKfY&Eok$z3t#sd7SHcN?t&RQx%~00g`R9g@fa|HSwR&0(X8T z2BZq$XH%hX03&Od>QeFNWv36HdoW@c=S_Gi?M0+C!&Kxwjr0`~omgVkJO2g z+e2*$gf_e)SU_XP?(nxy{i^ieKE2{~OUjVQDVCCx6OT^<8V-?&`J0=YAA!J6XcI;B z7Lkw}WA)=N7Sn}CdyJH%q+lX3){4OhPVAC(Er3*N{kfB@n+o+k^Hf+-5mj5n61QaU zQ@N53`5=H_V6KRb#VDTf*c;CzbYw4G|w+&}%3JlsHOCN}Ldk@v}J*8j{qtpQE60LWOdz8fn*1aID$R%pipy zKre6cq2GDa+SA+UHRloCK$~-s2{^+;%hyZ=Z4d^;TMVH8oVVEw)!SyR zl@A8raYML{=lrCJhh9AMY@BrKt31?^eKNPIgYf3%w6gB(TmEM^CV0rq(nfBNKBUVo zH#@}V$)E5MZi&>Sw+;ndk2FdidcLo=0dy)yV~>-cAAJr3OxM;x z^w1yYC5XL$7@_Xmw!L^aj>+s~`F&fQ=#}Q0IexGT{TUREHdvsfEdHlgcazM)V#X)%v~0%s&4y zz%Gl2U&uc0Z{?6Z33FbM%x=-e|9a-lHa5Lv^tL1|2^)2*LxpdiTxuu>p`&Gep+5Dh z+g3!ug!>ru)d%PrpSIIYGj#Ti^ye!oE+*LxfKeolsM{5C8ac|NNj>onBn#S9@PR(( zKLmO=gS4R9@ZEUcSZaOvz~_rF(@FoQJKze%FO3Z^cl*_D+8S&-3L2KthYw39D^1DC z$>-zYyZF4_UGHJ7q_e?BMKpw%nVA83oTos$ECx1qkhwyCG6e#o^R7cv@Z&AostT``!TmVB3t7_DuE0wJXMg}!j6yJ&{z=`6 z>(cYngIFyl&1v1MVjL{S?r|R8;>m+cYZw8h+43x+sI*O@_MCP2xR}HY`okQIXt}~= zJA?bH{Z2z??$lTQT$`?)dD!(~{&ImiFLL5o)aPoD3F6K4AK_s*!g1KuUhWCo#Pz4C z>yjOY;-fV|O|D|FOS_?TS_1d|jz5CY)D{y2EG$NE*UF>9tgOtL2)HJ1Zk8&;u=|{S z+wW96?cFmp83&L5MJ$g{bW?f`SI`R1>2d7Oot^1z%l%i!$cAiIH{|;7*SuMvdpzS$Yo4 z{V-`a0Usdl+uXRu23{JhG(9wJzF)vFb=r3VG$C#Ek<=Nq4bpTaBsX7wu6M2Xr+$?% zm!Xz2`D=#x?2C+CRV5 zmG93ig6}t{L=vO$|8u*j*E~;KvXq^vjTA0a1JHiu}MgeMoJtI3^a z934_L-XrOAXRt5`=3?3z&MX1=@&OS0VS!tky;%+?qs4qvy=WLf;Tq?4RM{tru94&B zEP`t|Y1mDgrq)yQkwZ`t>PkIJgziq*V8Ez7F)th%NirD)DOL$Z=gu6wN-srq&85Jv z@APs^;C7;-SlMrNKxZGejGu9g6i-d1zO--ss^9~u(ZOqEjf#=%0s@w9_w%08mq)WL zs`QlRryI2zV?FAv_tkdk1|xXnlQ$}=4Sm`g=Dv^Tc}&R23yCvp3OLr~t@lMZB7(Qg zX=~i6YoK%GkGK36E=26#L@>gsNcZ|KDXgvSkI zjWzzxWccw*W|(4h#k1)YL)C>LZf1!k00)tWzGKHq&Bo6G-0M316T& z1WSlxtbfA@+GOnPI#Zlm=R&B=2sTkwzfr*D(KPZ!P1NXzBj1`DPk>QW1{FzRQ1H1; zR`^xCf>hSDMah~a8PXV~8Vvr@_4QlDJF!!HolT5EPDtny>#Y1&ZE%jqk9!gVojDYI z7!@ZNbhMw#MWTr0ai3y&mt6Cq{Cxc}FQ`Zpb_?nQeSELFVQP+ru&k9Ia;*} z(tGdhy!2zBmPYyLa2E0<>=tOd^R%U<(^DTH5(6tfo&JKFRJEzF%U8ZzC%1b`sJKn` z4i`aq-a^BO=KBYO^BgxZG9OFPq?C+PV?HNnKc3TO13wRqWQ}O$hd{Kw}Fcqlug**G(A` z!3c2AW8auI6HyfpCN~u7$zqdF?Tl8QvkDh?mtI^gKz&H)H?dO^SB~c0{+cfSDkzu` zaoSvz&KRo)O&Mbx(vWAPbYyh2my`vK^n@dr59)PpK&$Yb`+jZ$%2U`alADj%4<>$% z+t2g7qQy{iVHD+;a!6$fWm<&1dpD&fCJnMpWfZSV%Zpf6z3U94ws*K&&k-L|xMoub zwjoE>J0hD-ZGFx$1}A{(S4}=og1bYr-SgAHIFUdIG0)X^qAxbmT-;8VTxvb6CW%c7n3BS$q0AB z{C@WR7!?8)m!vzp%ICGG6A;-B)~Hf*bD#1Tm@rH(gb4BD9q6;9`(rI$a$qGaW=&gd zplG7b{iy;6sF!9x)E$1>hao2at_fkl3MDPY7kHAe;V-@;p0n9MG=zSes391(yAOaJ zu|{(Rn_#ZwkW||2ezth7Act*d_FTm6 zS}m}|jfDJOP7~A-g;eg@Q@|T{*#vBpM3~v1)<7B`nKaLvf&%+xBiim1(F&6{eqtXT9{I-am--y8i-xfm^&wFK&%VO)oCWJLnQ__^Q)9_7h;fk&TB{-+Ovc z2qtet_R|tYEhV3QjqQCreFKon*g(4Gp_S+36P4oCu+n99yLoFFJpnCFJ{U|Up29$s zdX5kQy#NW11zfM2z0~Cy3}bK2Zx8r_4_j zN}Q1ynJ46ybZcN zX_<@IeUzpc1gRFBxP025chp!)9&ivs1Lbm8WL~vNs@eVc`&_SF5NqoNb%}rUi^9A; z0K*Myod)+9jPa6R9`J##uu7<`Q4LWL>u0*LzM@-`^$QJ6djeBLJ{_P+o-HXve~Euv zs+@kIlg2r5tj}vtV!RZ6cQSFiKyu=~2Iru*>H@^4j-S*-4axb51ty5S&cImLbzoDD zsr!%Zb4H08zxE`_BZxGkr0{^s&b$l}l8|Td!;k~m^4@%Q@t7=?!QHo=27kq79O^cGXAe+pwVD@2 z5~*2SgamFTP+J0y<>Ec#mjRGx8hSVF2=g)Vx9u6_@PC3pKIx3U^sKc1J8T&=;r`T8&OI%jej!vq_ztCj@|IlTy*-fHC{cCQ{x{!;V#yVYvcT{C$ zYiBe+20?tV+bSY$l^cb@7vR5LcCsH73Q75{m05q^Um8kRR)%e|KtUqi>LOglM32f$ zXUBAhydA@E&Gc*CxA{%=#3E9;elu8E42-#q#jmt6&i08&~}&gy->c~*S~4k8<{8Lv}xX1A_olpVIkDHFbh8MOBPBk4=z^u zd@_Wd#`UUXYyNQ6pZDkn#k#BXm3hLibvIDK4uA%4eit|sVCL|xR^|Bc)5vQ&n8R_v zz5Q3-vN9nM2WJziso@UgrxD5sm%ck0Me~X6$y=PO^GZ51GP#BS|&9n@;Vdx zV!w7swAQZ~6%{QSFIKNwiMjOn^OD&r(Pj_t^g=Q4BfZ4Xfm96 z&C@Qu|1|UqBxvk`GUMf;)nIn2S= z@L~`xJ*Fw}Fq25h;Vu-a7lGp9YEm{mgLt!M(3DKJvNzzWAxyuo$mkLKe*ZB~{Yy)` zO1xT11rF++2LM-vt2v-?BdjHeIs#Hs#r7*Ma|0;S&QyT`dkQtU;h)`^PEbZ^V?X5! z!clTkVzCUoy~5CMBsWAGn-d&OKXT>*kDBY9Ky?K8%#ycGL+F}l63&rcI( zhj4V_l8`vC$%8NYKUf;xmZ4Hiz`sSo(%uNsyunc*&EU~AkLUQbGmBWDN7HQawV~Si z)lGd+Wq|Y%;6G>3uq8q40^wT3)nEI4-MR6D_jAK}ak-3PVU8C5AdorKbJdK-%0BA1 z5(?M$9^5~=tF*Lo;^CbE=KNSx6EM0{k3c7m+>SpC5%kF{-?m&SReA|d z7gt&(!GJygV^Q?lZ!aY!g~^$`Nl=0E;!At8kfAE?mwpcyu*%qA3Q;0F-~k)^o*|>{ zanY*0Bg3DlerFu6fqBDLtJeCP58_HfKsNF>iV6mo@iaPOqmRC?tlk))8W_Z|lJjk& zHfw2XBVaJC7(BScL8yG1S6OD)=nFm!mirXEadlJ{MBOnG92XJnJC*Ze2H}-v5*!>H z6ZtZcJqF0>9ajIDcNpD+kK_R@kb#(mZ@D;Spt4C>wIA^!wX>JXhatgdX4L`8Er5q#6| zGe9*wC&!th=v+3twzkV`??a5)zrrPe)+hTCizE2UA2`4`_lOj9s5~yrZ>F%iUfjx0 zPdaUWw|YpU_NSx{2o0`;z`eHDpc&zUM8TiJKU&cgtTGR=#e3i@{5H$^HDNN;VMXin zQKc{{qUBSiGu17soX~=0z%>}$Pg^`vfj5koI_9@JFtb0!XOuz+GA~)999{O&-(E2P zOgxc~<&S`&Ny{Id!`~Z%Z4>}OMo*U==|4yofNzTUFW(gG-+|Td`uASKFObF1P_NLR z#Mke=x&5k@Bb(g_|6a8J4%(k$fbzDF`vRx=zwZsO8o~SJ0;BuqVE^98(EG*MU}VIH z{@*#8WCM;S3-G`1+HaE0)q9{M{;+MbmGQ5)=+_Pb;}r8>#wnivVVvS41)OJFvo*mV zQOEDS0sbrzzx-Jk|Ga$u-WX?uP4sY>#>K_`{Q2|#{{H#-c?&1PUz`+*abj3j4i0tT zPemm*F7CST+-dmt_w-<{A}114Q@sJFA-N=08gA~!w&+dHzYg~PjUS*{0Ihk0fU?61tJ8+HT0iUiRAFBsq0@_(10 z49HPVL^R(d^z%P+YLwfD&{7Gu0XLf0xq_{uMCX9uYaeZp-Ci7U}@k>I$ReYv8~ zUx>cD7vJ(rvMn%B{^w}`6Z48MLdgFRG=hbNEl3&c zWmIkf8x}%3V}GzvaVrpSDhAvJhsfQm1h%WVt(Jm!rSe~%SmP*r32D8ZV6R8VhWKn0 zOdEU1cAT-mhrE6@3+mIuN#b-&H&>Z$MF z?GN-!Omrdtzri%+^wf#bY5d%5Y^y(7oh-Gq;t9Df?rLrDq#W+bSDNyTjWH;w(%Ib3 z@+G1GUsyeNQ?*%t#cZ$sU!DteNy#(dyNLL%`)3;k78MJ2x-HT`+lVih16km%CZi=* z=?73orciA8Tl&N}z8nVHgCIIJXUet1>ycc@NUm&e#I5#_ZYp(nun)FLrPUcOoeCn- z%>@p#>^)_x_%B-RdT`W#Wm{UtNpAwGkv+62urmzfu%Xkc4AVk{QNVO9&$Ti8&k7v~AQ(OkmgtmltV`5*P@8 z5abq;KPVGvY^p-VMI^|5kN<%;iT3$pW@WC8mDqCQ>zx{CR@tlfmUtJ@ztvM2>M&OV z*$y(6i$?il)9wD(Ztg-IknZktZ}oI%Cdo#Tq$K-gFSfbOT{u}i9*J&5PZ!P|K(Z;? zDe6VAXyYCBTvDAUb1`cXIJfoViE&SbRizgHTDiqM zp|tE=sx3hbnUCQS4VH500|-`!1h00-@29C#-?1eNUbZ6?`b@Ex z%a2(lr6n}qP|}p^DX|;;(}gt$>&#nob+6!zBZO;CbU^f0n__eZL7PRS<_d^nKV&E=sNV)$9B{T}{M# zIsyYdl5K=N78e#oNCMAaEP5@}>2-?YF--nDaHcR$BC50T=EzE~Woa@xA5K}xxWU=J zHhsu z?+brkZP<{rG*dgP&a64-CB`~Qh;4q?2^_qSk6GSv3RSS+@hw3t<3cW`e_knNm>d4M z0@GH|Ko7wfXU*>UxRe~X(VdhXm^=RlK@64pz%DKBY7Vo^!1a-N+jY2yo1t&Yyre02 zdhY^fX{dO!mD_mLwVGjJbC`IZyH3(0<}>$G4B<-EAC}pV-?oxzO&Ir60o->`SnBTH zPkeel4%H3YCn4x#xZGFq4qG|<#ZLPC-EZ;ug;TsOZf)f&zkT~wv8T{>C-_sKt|>kF zrP@Kcq=VD5KN@!bwX%_+^FEll(0ElW5YcSIzclXo-Id-vWB4N2yNIHM!?k|CcN!@&oC@g2 z!TvqLBD(>kibUc@NBJh#>T-|~vkloJCUkr%P}&A1Hi4wH$zX0Hc5RSBRAes4BFZvB zmfMB1-+pF=!}~RLa#2zhIlX#!q_NBt9rENRaWSMf5U7YBxt`SN6VLYVEC?_cCPh7* zs2Aq;o&Zc|%P_@icc(ReMQSCkgyy2;eXW(cyp`l1s(HnO(YKL_Qdf{50(x-_BInY| zfc>SJSN;V8^A+Z)xo|KVT@?)Ymgn`a;*^fu>6995-!wV)w6RX z#oArE)w^gfUe8nMQj?k`RLc?BPcrW#(W)YH11oS8gMFnnBWRA$F@!HH0zYG6LZ;`d z+jJ;$yKmu!mNxB2TO6Xw*Rc3-x;o`o?ToEB)ioe6jRj(k>cU$+o0~ZPHU3?eO)!m?3BUPWMZnFGc1~TgJq+{kFoY@F|`0A zQdCsOBW^%S66Ly+kIn?;mTr(NR(;kJOG&%INnvfiRacIjd^zfGD9jWEMbMeA@Yq4< zAY0tY6;Xd5I@-w7d#RxYTeBn+&8FnaLs*=p><_6(W^+nlDX;|~v#f_OeRCzX$)wUy zUsi6_^MEZQ;=K82`|*ZI;Y(;;S!xy)w-;CEnIL58+0ByLLU&1PBli=h@{!z^*)Kb3 z2`EmD;{M+YM;|OF_p%2=wG*==t@Vb~T3n0rC*As=bO*l7Ncmb-UX(K1aT+hKNAA|i zj_l_!4S3Hi0z{Z8nh`%ADOnMsKkMl$+FxyvUuKk}m-pV;pJz4m2D53D>JgR0k^ao^ ze0fa#&V88F`DKlfUbJ!UAz!|0gIkp>#lgZNEKkOLJB6zw!nKz~9qLO#fumj6tLr_P zu!w6?c7BHApt2)Q*L>yS9%YT*62rG?W}gZ=Xw`B&XC9~azVS1O&`)dYXof6H_Lsx^ zO&%#vm|Lf1=cI_yzQ5eb#*P9x$gkO!!yL8Eo`*8x5myZnp_}me zdMj8qJ|md7I|{k%;77>y8BpG#3l<lFQc&zr`@w!hSHxZPe#JU_Y?_Oh|_?1HMAf zv-A`+F+=#K+GbgwQYy-jl}Ox{;>VoW0ym%hO2Pyi;qzh5xl;YNjsu>>5e=uk9RS?|2AVw?4z37O!#r1Q-A||z>W_e zfVkVZHQMKWQ*&+Kw;T*kv36Q`-8=ZT!&v@${IKqJR!Ssxua5$Lms;_g zuwl>s<6XwIW6$)SDEtjlh^JW?vdwEEy$MHUnk!a*XHu5FL^}M^uYsMPVXU}-2Cfm$ z=DtC^$odFzE^A^P2!|q<=XrDWch~;n4=R-GnCbHjgZM#-h(SlWQCcZb(>zU`RN!U-_*DpH)6@s3N$xMii-5THaOF|w9MUmZ56Y_ z){)vsq6toZ1vL4j4^s5ucYTPy4BAzW{W|Oxb9#lF93KdTYH<9$pNrMs4=KzJb;c** zDazG2Yr#=MdSuxLT5Z$r!EP5vskNJnx&xgkljNzhG1sD}-V$cTLnFcGi3!IyJViU{ zv}L?r^u7-&7>z&Y>`q5Jg(71%_QW31IxMBV^?_rW5w8EhA>ECRnZS`Ip z#1qR@$t%9iH57>_e1B&iw9EeSd&8;l(kqa5Ze>=s#G0&qEuD05nhjq*40xq{`FUx# ziegkMez7lzzl5IRjm^*q%35VkpBfBy@!U?}jg4f=z=?q;=9H#ID92K-u~0oj?>a$} z;X`kt`CQ}|$<^%9QOooadJ8DCOm@>JA)E;6y7(VGMZ8((zM8Af?+Ybk4{&%OUaU2| zwcID0jd|#OC)6Hi7z=HC{wc$rq6I@_pWnc4ec6H_NMEY4lE;2#6>^WKdk3e~8C>#? zRERicbxptx07M{y9H^hU+Y?B%&`aEOlQRE>BqaRh3EH`vm_LocgVI%L&)w4%Es zuQN;3xTYf>9~*Hj!8DU}u~f*l1z^BDr2fT#Awf3Vs>;npF4t@8UV0P#9TUzJS@odm ze#m#B**&Z;p2AZ#foT6TnhqVqf00;BSXmw$^W!Ht`LOjIDLhacjJ*VHbrt*xs8E94 zt-kK-ze?jE=Kk<>ZA)A1Si%q$%EpaS@GTC(%;@O}TO&;iv#D{bCemb^=ux=|t1>^! zV!?K3Ql}R^g5TrTbI?KwoblYE%Hzc~xay#dpSot*{td;38M+*o3!Usje7YKFuECxm ziwM>fK@HmVxqUg=mwlY5j>d@URw6#3EiiBCm{;neZ2d5wP6U|;-zwDmy2 zQ4J0zX5DjiK`iR%&3=PrHfnMP#oAr^4tyVv>5q^mnEwNQMIlY={(ASI6a0uen1*Cw zAgsx2`)54~hkCaX85ygJGLqD(#{O;Uqu0LHvh!v){=-cN#ix+4nQ)%N1Krh@C5hqE z(}wBaaFw&hI#M~lzt}1to!o`Mo*TNeQBXV!Dp4C$0V->9u=0F9>WgT@Z`R8U50&hq zqWbR48uj-F!PZ*uL!=4zy|%NYB3V&+zWEqMm9j$n{PvEG+9b<>d^tP4ANvcm7nWLM zZ!zE}H{;`frV$d|mYUJFnq2mZIM~@-FqB-Dq^*wV5qAn?qDb)^+NFOw8;vJUwOs`X z-{DtUP5`lZBUWk?Eqa1=U0)K4c=3^IecR&2=*~L1#o-E#*&+*iHm@NKU;h3lrJ;uG zo)bKWvOUeQ+5ud zx(e2R&{a}}-lu)E3E|l-l^w+hvvH1h(R%B8svw7D#xa76`7$6Er~fMEKpku8y6vol9pjU=8+p}J++fQkTQm1vYIE=N>0aoY7PY;z`5Z`Ot zJ!$9-!VQe2;r!L@G;2I0beKld57g>Fh8l*3>LHdD$=fqS@hjXuk9*V8ZeL-86)uk= zQ#m3CKY^+IbashP<^4It*y{BtL;150B!SQe6_vKEUmhyWg(LrMd{z>k;6Yb<#E=|) zzW3|o)HUEghJUl>f1EOb!)wT2Ig}oySv^c-(PGaFp^r@BF&<VlN?` z>vs9Jc{k;y1?{%s6jvRg4@EirxEgt+lEc07Mh4yB{$BZkllG9F2T2HaE_GF+8wdTl zN}N_a_SH}Fa_ul>Hf*FDYP`z0EPHEf6Ze@sOP8m*4SSP=PfpFrv>#Zw1^VpLs2PsA zX!dAy#pj_+wQAYhV!uH3I$8}Qd!8Y8>4`B-vFGZkyq};Bzy2V-EAyvLQ4Q~hM32YJ zh3NDmi0F|2jZV#@Z;poswg{GejpK8F^U?z^e#xRbMuKxhn1nehT}`lbP+lHf&z)Sy zjm95^`RvD@n}I6yJHUoxFMBmwh_9gNr77F z2^*ooqaz%r+)CaZ`c7@bMLcryEvHTqbbLX1EpH3=ozUUaV-ZFj)?b~}JNTDqQHv8S zmf;i@Ohx%d(yW3{P$CR}&_RdHZLzqWDDe5#%Q~i~Dxp4i3xA-61*-#|%sl9qr?IE1 z2GK*!uhG$;ZEnp?Y_g!_KciNufeN)YC=&(y6i*cP?(6Rq8_6^TcaeFw9GW4l>~LR` z!@g2_B9oN-{4mf`6IRLHt*)i~&fDQqg}|kdWHxdy{mm;*ZV`9>57oXeZSb*>6!tfV z@Ne5wM2?sorfqTx8dZs=H0&{%WxA0)DOed#PA8PUa?#Mi=SGJVWbed2Wa8W|&F|R@ zd`{8MzY{h5v?MATbz?J}5?e@r>Nob-B1S1|)fJjz^>Edr-6y4~lRjdndm)h$+T7-1 zc8i@FYUS4tun?m){|<5?mXo*!YUAZMtGGt$ZAx|`;g zN9@|;Th?9eaI`D>0JQuG6p4 zdKbA#o;L<5d6ukywooS8=)Gixnj@ht)8ny^rM@$HGr`Vls4)O`;#0>%zQBth1! zyTz^`W^HtDL-L4ByOL zon+0O)dLqoLU0hmaTDh$P2^R>cGqjC-3;Sv(iZXc?E2$oS)p#ks#jgG@SEuSzsIzl3)3#2~Wi;H~!=0@20 zc4u!1H`Q)=3J&)c+-9g!evgKLqPbKxP@|?`0ScIQuUlQrQ-mL-{k~9SE`Q^u}C)^Tl{Les1d!6g&M(PQ+`ius!2}>V*>-I znH9CC)u@4sAQkGSN}&(~3Wh8%4rCmXo&VyoJYBm7uhWVeRS!%r+M>gxq|LIyoAS^` zb}iM9;@*bi1kS&iBtBiFd|G2RACI+N**RLR$+|+xV<97cEmXO4z?jUwdpA1ON-{p* zIb+t8Jl7S=FY*N?nb|O@xXB3t3hSLDl^F=rZy9WWCN;$~G^dZISakD~!7vHQq~nOH z1NAc*)CaR$6y=oo za@o`)1yfMSwPu=}-RS4F-rNbY8on`DaifRU%3+3w%-t)c6uwZ?V_&u};5Lm=D$G{4 zz9!5Mdymz0{xJ`iGr$$IOKl4apfcl!5>(=n=QT9)^i>!z&sE!6NBuAM-ZH3;bz8#? zgb>_i;_eO;cPF?@a0m{;-9jL^Cb&Dn-8Fb{clV&d9d2i>mF&IGK2^8s{<~Etzo>k4 zPkQ#JW4z;eZ(iPBrOLbAD@I*wK!n4#G(9S^NJ>Q)}s^iyel%anF-)zsS)yc1Mdx!%T}VN|N!dlhHV znqE@LA>BcrKH;TY8LMU_AIHW9Yu+tIi|W#@fdeY3o3p1xmqa6>(o@sO#5PYPBT>HS znndKVvHn!Rf4aIqsc(8EWfp})7BL-i?Q%G_rksK3*koxbc)A>^x32}Bxt4WBgc9ew zy@S1ce-x%ywfqnn)swdb*U0KMgT&iiSaBDr`9X5C?_qjpkAScSZm5E{wIYdy`uNwC_r>*0R8;9hzWO}5h(G7z2mq#N1Txf@^8PqdZQK@k`K5=!@vY17{4M$ zM8TiYv9j%wuOv6e=^@fyl9FMoK}76H!9A?#yPHzlA`ebBP^BfBh!i~9z4ps z#xJx*RUPL8<9)>PNJ@3D4)RR~u@0BDx0f;@@89vYxs;BwWLK5+bvN_I0%n^6;86zV zu!oIg++ZGOVOf7K&wGyHk2NS6yh|7u(!wzUbuLH6i*<$k!A_<+OQ3=mUDATbW-mu9 z%pVC86O(9`o)kCSS7vuuO;Ay3Fj^ZvCN-t+@aDw1aDq8%94MFO2cX&sLyyH1r6NP8 zz(Da*%1-Ke#rV+^XJP%FQ_X;=!clx_&lM?h70MmMQ{D-hfZlGQco`o8RD_C#jir$I`g{-8we!oAG@1(4^uiA$T(Hd|@N|SlBJ7Q`&K?~~YgAQD zpInNoY=h3p^d;7)P1iXtaZ9n5<2(NitCC6#h0rL6KG8XrX_cCRoR-0?+DAJDCBWqg z7Z}3G#F*adQp%&B?@2)+yorMtc+@x0s*oOF;vp-W-2>U^mf4hFa=Gp9!|V3bq}4dMAC2vgT*O!9mg~?cUy~s6K-5tNT+R?Ec`6A3<3j|Z1`oY395B7 ze2?*^?&b{7TMvY_68^XUB=b?|Aqi~VEGs;Gs%~%Z?;Z*$!xQj8O|HJKCKOMNyUn|M z^%z!LEj8}d&FDMPFJKICFdwX3F+yxhiyA%QC^;p>KdFI}Bh@!EMvsk=s5qK}A3M$u z2s3)ROfv&&Pi+fM;4ZcAuEgR0O!s5xndS)mo9H>vB|lhAo*tO%w;#&e;2a}LBB3CO z4dyjdy(XI>Fc1`CM(TY~(nB@>O*!I5K6bhT_Be!Q`=4p`4^Se)AF^36RDb;0C;cTC z_@x*Kcp+{apd;n-UxWir6f+G|C15t-*d7TD$28O<>FBrg8Oe8 zj#LLSq~Wyf_V%{2jKRY5hyL@ZsU$ za2ZZ!hsS2TupIpI=R!5r3`|VJAt#c~WXVq#(v z5)#tW35keCi-rG@WBH(dEiLWw3i|x{b7!YWJKV${+prEM$RiruM@#C6{~iQ@U(*u& zdO&+n$zMiIf7a_?KM)&0VnjgX7z_U%ioZo-!CpW^jBmue=>KeiNd$&Y$DZb&^!}eQ zl>szt=^J1DR|_Pi&^U;h2kVb*-fyi;KNZk$JDbS=Uo9VC6~WNqk+1&e;}D+#4H8f6 zrpW)*@_&!=e{bb?_x-;P_G@tee;O-0KV4ba*)IVl;(Gb4XGD%f#9a@QPgb2)t>rXO zNUQH0+4rk--Iy7KMX&kGATA0THFZ8uCP6d^N!;#r*HZ1!&`|XV4;?2bCkF?X8W7U< z8A;`~u@wa*gOZV;g=0yNy@hEd9qZ3QHVW@dd8B3he^eaFWe1_4b}$_Q0>`xnfK0-~ z#DuKG;PfxkS{=xB^a#!+yRHCO!0u>r*fdYMdAZGYwGCO|#^7>)`rDDR!OGd$8Bo}q z!x)BjhYU)ABw0PG$Llbh+ME zpBxSzep%sFDmq*S1Sh5DE2gHsY#cVzwO0X7iHV7GSmRudro%wRVR$%nm^hB2K|P+! z5o`NXlgX_^*w>BHddsS+eG}*8?i1gPn)kZ!B-e-4>|_$)?p~gNyx|!|)puGAV;3{p z_Q_QqNB}QG*Hzd1`6)6nQNmDP!%VYz2JkS1^$7K>yqzvDNzgGG9I^(d;6)MHF10PY z?WU!*%y|krC!KHh>;Nmxa-(Ab_RDl-bP$VCH}H5d{zM9)04gQ_pILEDj1q-%{06AK zy4o8TMTDHF)F{{cRVofkitBs#gNiRF3`@`M+be*T;#{q826nnsQQ)U|*DmHKs-xqz zByJ3tJZHG?;Ar|>j7z!+6NlAMpu%O?SSVBs0EHQs8)dvc4>#ZZU$0}u^nOTdKbKcq zZ4+5;49pZ2zC1AVT=SEl!L6=73JDp;Ra4Qhv=mj1l>sAx41AAjaorRbg~GDo9^i$* z@<~Sb=NVR0E75@-sa)f2Ubm+>Y#Z?KFCQ$L?%y9wYs|hGP2<7ojkM9J$lO@4`Z6&1 z5+@=fE{?(%Cs*6K^Iof#V)D;&PCh0K3w85uN5d8{l4OShDRo=cGU~T^EjKy4_3=4u zNwqj!4Jt@QxSp&$mis=+sM;Sl?piu2S*a0mJG`6Bm*evM9DwtDQ6LD*io$`%VSNMe zD40bsl_U{GJQ@K9@!{Sv3;H3L>p7Cr=7QyXU)a0auVQgbF#krcOQ5t$Yk!Rlt zb$@f}QoGC49v=8!Oy0p^b1W-5SV>JS@bW-+z;buQH01K(T+OtalqsuOy_|zd%V=c4 z62v{>GeCnB@mckL_RC7+5hvB|O(l82ml#9J^^6Oc-RH zML_+c_u7ZkP&&(ISIO5odH{QHfbI+U%7?%R@PW4|^o#)jb=R`MCjIyRLWmp{rqMhV zZ1Gt!93Zg(@QMWdD6sF7S4(FGpSy!(!EK-5Q)P~;u-Ux=|D`Ib>i5yn41Ui#dUjy% zmFs(1tp6RK@`2(5S1EOs^*pQZpylBxcY!>0Z)oV(ACu^V13KfK$b#X=GsE^zw;UGF zht&f^Z@kXV@NL_5KZyzZ-9FuKrnwLMqF{=(AoUe%;2pPAYq>t|3J%Jx8v1S_bbWndTPEMGZ5VRz1z1}@#b^H;Oi}EGUvyaSul=GyC$o4VNL>ESn~?kH z9-t%tLSc|;u$Ul02d`N`^BXlLZ*9FRQD$@ApIT}4#Ky#IJ?+KQNi@m6ZX__w~tYJ5Ygh9b!4F=gELDrX^;?1XOcyNit)hh68%XQr+g6Wd;Y74WKOf zoLLWJ0WbX}O$KGV_904}A!2=KlR z2c+d26)IG-I#mE5D2HM~s7oK2qTNf+z6bLw(pQZVO%ghBNOUXIu`b1}2t3ZBbHCI5 z;&SMT+}zyc77JWZv}}3nPQ4{An*=PtV!1mY++l3H$(C`4!%Oo$KObf@^SC($Fg1xXOlqps zAG0BXd-Yml^9lJa(LzxzsP(wb&f>NA0b_M!-LZMoG}8^v*PNhZ{-N03S>OI>lEmvU zi`HkLGA+`h++CWyj&7ZTlrF?0M5cSywa3xX-%EXc|4e^OY{h>PZi9 zUI)2H4gpKkMoO`O=O9CUa956Gl;x9aCWrCw(}fhV6BHj`{i{k=7|fD5xFEp9)C1aX z1t>xS;nL#0;0_=uaSKmpw~C)~8&ky!+fH>HK{fpg)#A|IL=Th@fDk=_(!j9l1RaNv zar1x$;Wm81kgrHHU%fU$sF*vHXSOvUYpslO6C-vQag{z{yzKOqmq*${e|rg#SH2Uj zW6UfFVOO-ltIEB7o6Ibc@{3Zn;(5|2NTut(9XE!zmu$_!7X`p|9QFY8Q+!f15!f+TP|J_#_jW46)GI#sNBscA<^EFp zb-ln}5SQeiz|351ok(eayPZJxx_rIcRdP3tK6UR$b3Ttfb-TY?#w8%TF1GpE>{oA0 zT2NLt^4O2RzD(FbC zh~h`4VGA8jRYJTTU8@#CuG@+k#P8bfkI`3!ANZaC&X5bI0n(o(rx_i_CRYhMpR&gY9s1^eJuer7Xe zT>`>x)G`e23b7i*>;umB+X(8ix~VU{0I0(~r7o_|}C9!@y6W zS>|gY6}_W^&-7=gk8Z%?fBMto`SDs*S=yK%w&X)7Z6Psm1f6t1`vjX{SQ_0snPC+IXBB%s*nuN%-fyK!vgg;A8Ps?BJ)&{ zdE(4iX!Xt!d+@+5St!svC+>zN=k@ZV*91)VC?W~QIPa`XAnD$Q)PO>Mkz@#?f3CRJ zQcM6|*TG*ga!!BQQV(nWOREROI7RR*y0zWM6;KV&C(Qc!DzmsHDy4g1Ja1JaLfrQ^ zNL4@eSp}1BZ8l1$_W0wXIS}X3lRgr^E>?kKYe`9f&XO{CSJ+G+%OZhrHl-RG9jqG8 zbqRnw;9nwGe~E`-3k8vp*Suxve9b+E#EUg6ohJ~6Ayx7ZtJbhmZ zp4iScLuz+0XgPM%@9~T>{4ozh2vaY#KeOhp%%7jsCm7L_W^RGT_hf740V;mAx_~ZG zU3o(}%I{Vwp)<5lK>#Q0Qc9dsJ+|@TtVu(!us56idU=-bMn#vr0_b>jWUqcEvD2xO zKis9HABTQ4iX*^y7}PI1#U4y+d-AZBiy#$VY#1xnkMLtrH&*u*C7Ba=_$j*HUGAwj zYMVB4(tJbH;YoDMgPbC3^b6@S&WxZh328;PX{&wwj>#JvbDc>~cfB=HF&qRZ+(Z4H z`FDheirA014-U*P#wWLBaEmicGo0lWtR(4a@RB|FR2RmEq%)t9R3e+SZ@nY?*_2+7 zbVS6J#IHYh0c1>esWKp=o>MXXf;w(BO+Z`%f+Uu|=n*N+;+s$jRK4 z=Q=b$d{m9c3;4?456(Xi0aL1Ne3#vmRpJ#%;Q9oXCwV~^w)e&$0$Kq4-ned^-U!QxA zY3PfWYZ*7~5*{?zBbB_TeH+}3si8?FU!&{OXyrdySA%~q1UDLno#OMr5}lZN*xY5< zupQ5Xpk!+eUUG~&Bd|4x>HN9?z;Xt>RkUuKq$gd6+Ek*#$5JtafUeBn?#$DEqFF9e zA`2}HlNQJwZ{$oTn~qT~)vXW#>LdJ_R#I?6{BzA8IW&=K<$C4kFwCQz^7=HT`RF1K8jjShwdi|Bh~`2oq|;Q+4s?9Xk*Do%m}-W z^IArsG({bA@?us!7g8ls0ru?>hb((c6xWe86uZE>(6guC;@WXeCxNu-@2(F@2^uf* zYmD1+Uit1fJ@K~C`(Ed?qojnmpWK-t2swq&Ln=h~dT*WUS||LWWOP_fZ_Mdv-)%2z zp&;FI@;;#;y{H#1=VzU3x&2JZ3kvhenz64CFAq7FKGaL~-kuLPyU>(N85v{JZ`4DE zbO0zWc}Th$x%!pvoIsh*!j|7TU=#(p?j1OEsaT{ z!Duq!(s8EAqDf3>0Ow_Au%9a6i*+s_1L@w35@hUMBGn8{xOSsO>oV%W4W#_yGJ} zIGk5mGB$!_NH19RI+ue=cqr{$<(=1mUO93-a(D&Wh7vrvf=FecWo^BqeKNESzk5xj z3LPt-q&$l??)LmR^Zg5EkK2~i6#Hxqh-^*c9EqS4Tl&;-)mvRhx`gO{>&t+&>P>eq z>8e4sMZ5imHT9DRw!F{sc`^~_aF%(?(+MvIF*%*jwIYg@$|jRfZ1==)uos-|Mji6a zuWwH2(@of4D44uZ_gNd9g3PP$|JuY*!vv^Urj(X-ni z%H?Xi;`j>`nex5~c3m-})ZrSPRC?pv+w<&itu8VCnBK&=F}>7A;nh7kZ0T92#JtA6 z;V;CDrGP24iTE5IUUzlR=Mm!;V`6kSL-P69RsHZIh=}M$aW2KS&M1(4K}*dfkz=^F z*vw#YWg3=)R7d-04RSJ{>5Ubzy?il&P+QYCIJWhMCdz8(-@~oMBpAty=gI9z>_3OyE zx&Xs;g^I}{KuVY=&PabpReZd^o}Ggk4;C&;_IeCDeks@?3a$; zD4HX|5=p?Tu5dbIT~Q4qFFX;fDQso>xj+Z0l6^39D@?ty`Eyj15ount@C$&qBgQ~# zTAail>P)tbmpKApG6`lbzu_hs`9lfy<}%g#rPq)Xh4Rjuy@Z34b#@MQ>5QaxfQQS- zgXEy5v6w{eXP=!~b^4MGDpjAn_%Sf2!9*nM@vt5QcWQgO0p}{yI0-FZs$&r!^o4T1 z`@sM4z6;yJ$u@lmb}c=@L|dl6d1Uum_x)IC%p52qDq{~I^mH(+#1YR$1}q^--XV;q z(eHR=<7|9dn<`Wh-=MIg!3ZLXX4(MC+!`@PXGXH!qk2Li-OL#UaEAvQX~om5*Ck|Ra7Zi)RsfmSq!6Ey4>pO zxL0M6o*9ncUOBhlr?EC+(q7*kYyum?O&v#bFh}`oD`q$5K!aY9^6V+A@}B;U_J=$x z%(D6wxOd(JKd3ZAUL3n;s2x1F8K<_mX;E^d2q)V2MH0Bx1{r2+7Q$m15)mlr&K>`^?h{(JYA|EQurDHCP?-lr`uckHPu)@w>R+)sY3X;uzK8=yoi5p475k3;$ce;xOr&(>FRc9cKpVlY6b)!e<)O&jrP8TSiuoxU)R!M z)Gms=6h0SvWk@`KbGiwDdeH&8j5(+mvCjUCWG3nxvr;f3w>+AHd>A&<)to(Fg8`j8 z3<*rjYeF}nIljv&6{Bz7QJ!AL!yaA%6zW=HnwZ*^aw!~$t%25}({2e-V1##_#?v0G z&sX*B5H!iJF|UEsdbUciZZL$~zOzwQM5PVc4vx`DEkK+;QN)J(xg70HQb%SjQ@-6@rB=3HMmAC?N6E zMqaSz1K9jfu$lEfZxYoe+RPP19X&sd?mTpc^r#~4 z-*#4;YN4ZR1Ls%a((9FCvFD#ObZqo`OC?nm`a?GL)0-IsDMrvNF0;DB)vx2|35cwh z4VfE?+V+b~3FyM!&lxlx)$J^pv|V>UKkDPpOnsC8{QY}FZSu=f^YhK9dJAujlgm42 zjtb8eGfV*Qq+Mr?T3)s_%Cw^!CR}|Jr}#V(cqHGe7jrXWluzLz(gfhi1aH<}W<2IW z{5dpo!QK#%!erXR0HnefqohXeXt#=yW=>i70iyr~n~ahC9kO*mi!f^UOvPMgzTU7% zPA8kS7yw3B{*|ZIcFfH)>P~Tn8U_nY2eFxrbjK22=1OwM$@jC>R#Gxp`KiX!%2P`q zc(nqq7y;VO*nAw?O>t^r_+psv8@I8z1JI^JC=F%v-$EN35q1*SSC(L{H`%7=u= zR>ASZO1Xw>cB!HFp7Cs;6kB~rl%$$#hSWeIn7t6eJt!a#ySaen0`LlLdt3`4$UEHw z0Bt5dbKAjQSBFe98%u{+wxJWfx@5dOK;O0MC%rB7aOr_Y>janJDyzFg!E$p1icEj( z8Ny59_QyJRgMws#dOCDOIZwQ&n;eT_-0DQy!mGfG{pmofvQurA>`W)d;z{x)Y_#|0 z=1-ND*4EBx>vZL^wAAKV9OGyA$6?x4x#IcxJWN35Sk{dtzF_NB9QS^J{&h?5FW8L8 zKd_nc^jfRgP<2BFx^>BOMw$=2I7sY>>aIH8>UBDx4l063c3fjU7_CL`(j z@x+(p>)PuqZjiB!Y(Pppz1Bvx0SY3@UY9i5Ts+TOe_TQ@vA_rJFN$J`iR31^&HT{i zNC04$ns_0RZ)$#IJX-aF;f~umy2k>P* z%X*FAxvaLjdRLXn_-ka^fm^7DfW34{1^;{Rpf2=vc>#!W;*{S7vQ)@^c458DU#5e5 znP*4#rPj7cqCZcxC`MfC4Q^Yuq@4aXmiZNn?|>B{#&^V3h%T$b0ah9kQf9Y`qs!Tp zT@;VJXS!py$e-zf%wx+4CIJA?HN=^?*IHSMgoWmT>1xlCH@y8j^Z!ujMaZ(XOP}}$gULCvEmSwccWPfyRo!^6wVi(2*PGusuf z?AnMU5grfe8J`mqgRypYcSqCt@4KyOwSNPP2$F?(gL{1+5A+KQ3!}q) z?-y5rZS_pnY2_(7pdg#&5+vdg{C4(6klgMwN+Os21_0`hbd3SxtKL9hT>i6Ob;>_q zKMDurx*m*KZ#a%(`2E$9XhlpYi*-U0=C214fY!F@|^L?O+1cLYJJkFL-*Ri_) zm;m{?02WE(T<&M@F3zVl^lcJWs+9lgcYeX-b^ycX)Hfag9*e(wJx}vbcQlv$rBF(& zii?d6h&$%8Dkfm!F8+0_0IsM2GFtTc3qW=QV4wfTMNGdg(gn!Zf|XN}o|o~L|Ilri z{ea|2w(D}UYt))RmDTSD^7-}a0C;APq~I^`jKr@G=fZ)(7w`ftcT;vyiJHy-1)~A} z8wSs>B2*$rsSt_RCZN=_B>1lm4NMl1(Rb^d*fIXM9;AA(1?s>gr+^56F&M-c&9NCISL z|7XM4_^+{YLDzrvUt1~luW89zLjAcyvQCXQSC|NpVdwt>y-3qlXOg{}cW32~ z4-#3&wAro*<-Bb2m@6)&s^yNgIa^X#DD<|#v9sMJOPJ5A9qkyM3YQU^coMF*Qi^Tq zEl?^35rIv1y;?(U_aV&WZfDH7H4dUFFm@^Z&0Ay%ZQqu^Xn5$gS=}7yuJ?HoVNk`cS88LRF{<*Y%m3fwazpO zKAYpvNz)n;B}A)TiSg73;nOp{!xZ*b*KgHNrZL8C3u*fQwP$f589No*%$%yhmQVZY z_v+FOaG1cpK6Ia)$O|e}x96ZD$&&nBKPH^km35kPZYSh0Aouc46zR?Vh}@alPH+I> z;NeZfY7~f_ivd$Sb0&SU;dGZw66~|!=5`8OETiiJjG8KdQNw*0Gxes)p`K64p^2Q^ zA^GxyXECk~M~ub6WTWYfyA-FsbFRvU4K@N`QmDUC;+}83Q2gU~nE&-AUlc$0u#rU@ zis=^`+Iq*MRjAE`bm`> z$&@ntN|6|!2FC(P{~*aojL_4ZG+yk(MKe|qD!*Qee5Rqh5({2#abRJ5&uAXvL zc~Q}XDPDTIi>V9J(AZPEJeSGcMvtXTjD+=hx?xu)9_rDO7d=p`<1DMP0B@ig;di}1m z#gK}V#zm-ZSv;qAjBIgO_^Z5l|2~56l~)#B_`1C7x+e*%3yvnCF+T}SK9ob8wJ>P_DNP7zSWu> zasPvJR-MrCw?vQQqrJwpb!_MOT}?^N1scnarW>mX4fW(tVZ$?cR6?90*4l9XKb60m zR`-oMOQ;*uD=n*7cQt1zXMSxt56|_bp9FU-<=no~dYsd79oMvNv{1%3oF14gFWS(G>+S85p~JFBdesJQmoQX|>W}L41VCZ1vtr zQe@))^4Wuu^4K1UM#uMp`l<2U+iM6Eb332Te{ANxvqeemN8>5lW!CI^Qd|~DYFQdg zIJk;{3)Q2hs>OX!pw?466C@GN)vR6L9EcP*QO~}6cg}RMED36FHV*PE3`t5-bn4G& zuiuL+E!*8`f8QU)k~}i-9VdN-o1N!QQ>DUw-~men5_O!j9-EKNhwo#V-RG1<>er3~ zp>1=`*Y*CdbD}gZ_HpRPxy(k>ozu%oN>s^5fFdc_HSuUW<=L`ih#{`99oQ7c%1RM% z8Nb)uvg};yjW0{f*d|QZev;W-T4|KN>e{}zN$c%mslEPtR`rH9NvzZxdxdEgY+hDg zlAoV~^L_|@YS-nsI1DF6Ax}#yLjDoZ+J_KHO)Zg&7q~0NEpGf6Y`sUcm6(9l?OlTe zOYyivD_QfMrLEpxC_YD}I`vBmC{xY-DJ*|E^KVBaG} z_7R0}MaAUFClyQj!VB)US)OV30iLImuuqWT^LA@u*2h-+i2dhjH)@ElG@PKTB7B%3VCvnvceE$uq#MJaPKx%MP>8sPAe2utn{ zI6PRK2-Zee?`3>^Bj`c3PX>o(x3vXV@!WR|Ca9Eu1k&LExdhcZ>@ zB$S}h65bwNB$U<6x6l~JVDsi;U#`E{40t)U=2Ql(y+mN~TjT0ybN8Gp83IoXsU6kUyNrve7}##TzK14~z4Bd4hpo$RM$3v{1mz z!SvpqUEDMk;XN-^@O}-3>BFDRtKCPGJ-6&icDu;@n6IqsUfWAI+zC->H`POMlLETB1j5lBO7dVY)buzG&^6tJ z5q0^Ht3oc0IzDKX8GF2Q_6{89mKo)>LiF>zPehiwLTG#{=#% zgSUsD^O=ng5ZT!y-rLxLYu<0)hKI}YOvKLBK7hIWL%F~jQ=zij`3JSZ^rUAj((2^6 zXR)Wz@g5WcmQv2{)vg*~^3KFgN@`#6j!Dgp+r{y>m^N9ef?P5U5qh&F>&5SREur6WtQo(`btR&fXK9(OLwP9N%62851x6h0s9 z7LU98C7&;=+%YT<-WcSxek-s-4^WKZfL z9!+ayhAGEq|HGZ$kHTwjMz%|;{UDl3b1!;%e#!}tCyy-Ej1-Rxg(K}J1F!Rv`0yjW2zoHTTDGDAI>B8-b?tg^59k@3ecP7f-w zXnf46t>VY(vUGcQEh0s`CfJf+%d6;=(|Uvt_Fj zt@Qs<2N@77F=UKkjLPD6sW`b`c&zz4KR38yG2uRW^|U$V6Pww4rSYWs`L2n{ZBsPN z%S+$I3J9BqeA^5^xRM!N3;_2Nci^R045XofMmzdzrvD<#6>D8F!&sH6MZ zuv2m7@?ygNZ4{xV$84*?yWKgDf@V`>M%%#2w+Sq5HTF6RsqArt$kvxr@=T1?D=auZ zMHy{SR@s`;#RrluqIL*)w&Y$dBUO{^a8t@}3&rM8IfjnObHcb{S-`=J_`US=N^ETf zdHSZ0CS-OnJ#(5S5$@-bxd$Ues%*|5KodVI=BIHn&76I3Q?77X2B^zLkB?L#C))%aN}(V{LV)}_c>PeqSzUn0U*YQ{+67Nhr4 zW>Ke<3M^hZE!U7h7hqooNZf-=tZ4;UQerug#t*J@BqO|&XM2?7OINJYl%ihQ{%p(K zZ!DHgK>~5&S6I`2Thq(S-OpK7RttXfJ5=R&MTJ6=eOd{KdX6V4?2nmeWo9PquC#gt^t?CkU63lrvjQi)j(E#$}Ob$!b*mGkvIN| ze=zsFgpzBwU`lP0Y$2KF6N5Ah(-mt=yEAYs` z0b4?}G!3%iIN~#+rLBIlEb*>!put4ca-&{fv^idUv2X2VOqIaL7H7U4fsL$3P4X6$ z;7h!(m^c2e>QWk;Zbl?%R`u=Xs^cS=`(1A3Ths{l`qa>NO=}KQwU|q=31xw4qY_2m zEenVEd%U+wadLr8PgarjbI>#|P4IFe5SNv$cac8o^<)*^G^ARDh~9sFA1aq+g#r;c zB?2Zbc<(g|Qd{DBy3J#EmBV7tm^s7w>_11Q$FJEo54Vl%c{^@P6ZS_mLq2v4MJfzl z;q(H{a6t_CVq+XyQ>0T0WvuJqyprh*Rx?{jYqRdqxW8J@psShFcX#Sa4?Vv+Oj1!* zU2v%P&=NE2G{X4iPi^JDOJhkFJDyUDQ)xe@niw0bW>c};riRmh#3l#v zdpk9#l1Lex>bBF(o9c2UFKwwp?uKv&`oF#Mwbs7Et5kf3@K+a(Y|xwl>C!;Dn7 z(^g%pSE02HFY}JF?w#t@+wjm(?Rx!p0|3>>o_{L)FRIUz(h)@$KDtL}GtAY%XYO`f zTY_o+TidWag~#|R81z4Zr6YME_qv0z47{^DBS~Q=h_JL4l{K5Rs4&k96k{3|(u;uo zs>E7q(Xd;|D4!-zPI4UbJE0KNyzRF7Z7?wjHzSOk ziRKbGZI(#}pdYZ6RBFYXuKgKs#CMP>^3D$9jZHx5JY^Q23x=ZB#FmrXjYQrqZHSyK zcW<^;NoKP~*!eD{dY$y@jllH;f6zW-gWon#?ar(~%-k%2aDGkuZi#07l8E+%AAYzx zLlEjYKmh|Bt7_0JlB2`g$8Dt)h=m6yjov7xNg5Q9^_*JP&{?!0Z1)v&o83= z3EkdZTAsi#olL_+gJfvxch27vrB`1g5mDB?7Bja$PUoM)hb*omcA<@BUKnGdc;&Bg z^0v}JTb%e?MpeF?a1#|}U7aGLOLdFkcQk7*Zf1bTYzq z{L67SsH%#=j08Q+(R~fy@5~?}WP^)LY?+gSefbkIqj1<~Eiz29(2m_0jk2D?Wcb%I z|_`Xx&g z_O!)1j`(>K6!s^7d@pB^3mb5=&Dtp`%$9Fa@$(5&j!K`N<3@g&6M!lC$s7zmGB%_x zM?Eg~WrLNiaEjJZ&1aFviM4V-(ss6mtW(BFQdVkS+#9vv(n-2o!#}wihz}nbJx!sK zmhqH_3n)VqAV{>BUwBZKzy%GKvIri26GZUE{3g2?>!ZXVp2Iw2OOwX-M~yYc2*p!j z-n|7LdVI;=on~65`GY|JB$xav<($_l1}X5e>Z<8NWtw_>$ZCS)-iqaks;GQ2>V6-+ zI6znSWAgRj)({~tPsOx~EBMXu5_#Tzsdq)**2`jbi;*i?WkO{cE1hxBu$DUvJA4`} z%M4p#kM!e2rewAZxUNnkw+2BPpxT745B!Awest{UAb<6p7`IZqxr>I4-l~=1KH$5B z^K8UR3RzPbuMI0#ojXTjs(?!OViyA4$>SWQM^+X>wWDFDT-wv};g{IJ8^Yh!-NJXZ zymikYBV)N*EFC{U4z0d)$0-ZmTk<+DBb&xX;J6+vY1+K#P*-3-VnBbt4av8^Q#c8s z@>!;T0bS?d5f^pF(FY-9pno(sA-N!GVFEp~FSkgQ^M%DP6whGIEW3(^G0Kvtbuabn zWFu~!ANULyM2D+;N??w(s48n3)Z$X3;HsgE#4+0+m0bU-J{OV8%n6k^Bqxa<>-K8F zOJKYVf&Fq3Fr3Qo*#asS^~z&spTxGxFmH`TJjBTMO@6Vqhp>HXJ^u6v6mF+J-hG5; z-bG1~HFt>{Qz66B5l28j?IMINOFO-Y{WzRkLLVyvsU(>p-rca;D^W?BKf^amBM{U4 z&4x641`O)^!m4|}HoU5>I#I(>V&`R((Uy3?MN@oGeLF1k1eG&_KK?o%Ce*a2wvFF- z`KR?ksdOwzyPj$^`Jfe#wYy->GhC&o!ABvSQVc@dYswKrJJsCXpsR|mQo4p=p)rPk zX2hb>3rQqwg#V?S;9=;qPKd?k;rOhNVee9ZtR_omb)F!RWN3CZ?V!&>%dyuUHicZA z%WmM8{@7;XSMT1Wouo&ucuxtiuJEt&OSb2EYW=IIDIi%W2qU6vav~j6&@BqIVF|Ca6Q3Mko(et(Bi~Z70a#*laD_dlO0n2M* zTqUog^)66ZNUh<6<@)f;xR{8xqN!+3VwUmUH~sJc;8Uwh3opT8JMpC4w= z8rQ`!rJ^)qW;Qi2ii+n~V@UUZ`E?J9$%wy}vu$NAC!0OO@L;vaV^(YLRelYH;8Wb_ zF;{rerN+`bl29^8Q&*45i2iYF@;!i}(@T83)f%5nC)p{Kgw|LRcUog=hF5fuOH@6K zYQhRw!Bkt6W%A`w8LsGr!+y~`&0{zArLlNW)n!+H$k)7T(rYs~CA60Km}g>$Q(e=T zay5A$T;%{&-W!)=Yv$BuKZ+{n6O43(?g2LnTeNK>9+@Mf-1WmfIM(PIb|1x#*$wxUKoGS?{}JVS^tpEF{{_~g zc=HR^<0_*;A{RUiVi}%p#rj`SJ;tw2zcHJlpjb5S6menrqWYIsdj=OCmzH%>>(`p_ z<{waAp~&GBp(c@=GDB6@!3_daGXunwevM^jy>R`AXO9%{BzeU)^h&_>;fP_j#t>{lbbFtv^_k8g$)o@Fm_6k&8Psfr~Jo~(=8C+y4%@G33 zx7l;Ty_-8oAgkjuu4P6;!dx_Mm5(b2R@38emXy1t>|6IoRY|)lF1YS67SuXIv>SBq zU@b(fdxI^?j%y9(DiW>Ua@I*c_<4ECQ?+=1?W2`+v#4N*k3ELs^(Ci;mE)f2 zde8oEXrE{^5r~fq%Uywu)hQY^yNwYkQ@2L2_*+9vXB>?(y4{|4K&B&`rJ+fI=m9-R zWoI{5GxR3UN@eQEf<)XNA}h z+p*kq%lwwxkUR!D#wQ}wzV5Tq(~PoNxA>qrf)y8gaaDPTqn9dqLQ%e>?Jm-`QU>bj ziLM&nFeI%-t>dvCn!9xRQ3J` z2-Koh$P+|is4BVgS&!ZpaDw`_(yBIsNL!)MyJmV^wjmJ|p8%FzB@$lcN9>s2~|{`Plc% z-HMCb4l0|g@uAf!UU3`~Zmhp*cs?fY7s8~1vOYIZr8`S zcyY{s5J9T`d?`SE@#>BH0F;J`Je(<}W$2HBcT2tqMVc!3LA{sv%E$LpEeiJn6sIRL z%xo$)$VbT!bC6Mk9X!C+fY}iLxirW&X(6Pn=8mUMIrIHHSNq$QU|DX2WGANI2>-SA?g1b8e zcXxlV03oWAHTsyLJyBn*L8h8at$kLUmzm8;$5Dt7r*msZEemYc1OO)g>XGNqs) znMBTdUVXXuXe}QZueQs>)j*JsuHSs;h))WY*&XFuHvo?{M<9rzXfONh99v`J49kwI zbX0sFIz{*LL&*He$mD%;Q)1fkST&=@v#`M@1&}-J;G|-+>&arBzwbY0+x<-wk{1WW zLUST5Wu;u#KOE=RNHUFhmv4k6@5i~HTIU6<6Xjntp(CW>XGHn)eIX)&`y%U)Jxiya zBu`?vfaFNBK7AkQUpM;us{b!u@>`DL-~Q4aWT-|PT0z_O@b{}!hzQ+1c3M?ee@vNu z-n)At!{ocuGBQfw0m-MpO{x3;RF zK2%0d{kc<`cMFj2Nx#zL(Vd`*?OAL!B12q|ZhN{l2t?(otE&S_rKx$LxjzCYPed@2 zBwHv@nGo~gLam}dx9qd$<_t(K^+7BnmO(qHxq9Ja@DkM6lSa-J<%uTrJEC2;ickJY zO9B)QQ1Air+>C;P0+-7}AdwXWCFhM};fKG)XDfj%9f4@6?l(VYW@gyUr|7h5M!v$H zK0g%fZ@pE+Cm{;x|B@yol%f^@LF)ga%fhhk2#{Mb1@V6T+?f&~LF}j|pH3=>ZcyAp$8^IY5K+M z?)ORDc5_R9Kv+pQK6Axl_}NHhen!m7ypZMWf#WU%PeEwWFTcBiCJ%4#Hf7uF6RG`y%Bh~`Y*QT@OiR@)YL{igB zfpmx2$qu;E1Ol#C8z`Y3SM5Yl2)O8IXk<1oCjiz))8oUfyuh^>!rM$h!5SYLj?(Zv$2t*dZkq0ul60oFZP0r{;jI?}G%w0@Z}NmV|p>K>lfrEN2uE!&|?4Z(tPKkvzq zx8E`v3g?md`kU|Lc({<;yPKvvMH#CoqfBH18lKdM?Xpus)8<58W zEFnwEfDd;-l*@4lgN9QpBvwx9;^d$}@A$|hfR2fdxJQLV1X5bsVub?oSdXm)6TNGI zT(k=5MK=~JM&(+S#otQfa(_M@Tz?2eUS+r2SN7YuU#vJdFaB3) zLZ?)?j5RHzxnFIp1kaV`gD@Df@-9}a>)2C)h*>f1k&41=e1Qg?88tfTe6Sq_^Asa@ z6_waKgvf+lpUZMHnSU8MBv*V$kr79)fZVtwEdf@Vg;PKbPO)Y+qIH-=7(NgNMu`{< zyb+MO`4RO1Ag|l8*X2|?5dJ?QgvkGQ5kf%=d3h`ax<{x9ypA05PVNuaH0A1}8k?J4 zllg-(vGL_)xS+6#)Q>@;8RWhmJ`eBSeLJ7ujGM03X8U}pu4)nxX!=H`JP@kp@nC*B z%cH_imP!u-p_ahDy);*8zPLyVP11Ba#EY~p{=IP(r$7&$JDb^0^AkYDaaVf}dB>c!3xrW4T9Bjz ze9l+fhpGtI0759N;07RXVsqILBl6nslCyM&6NSf0f+KkFN8kN&?!-_{<4MygHSjG! zhka8ZE`%f+$!WEaGX&k0U)+Q%`86ua%*+hnc_NG5-|Q7nay2!$RFo z^$=Jp52&lMTEzAAOX41lLUX;tZ zES(&PiLGAND5`10C0d@TE^aI-k!|K!awFu@>m8^Nm#CimZndbT#$j2e<)oinGQOOl za$mLTb7kLF7*LOHmvp7^! zvl(LU412r$Z#YnSULFlUIG1(fxe7c&j1`GLlMhDtXu0uV6fV-YK7&$#4aD6Bk96a- z0t!=$qaY&#&(ODSx;D1Gh>~Go#tnVT+Y`4=i89%W8}oZ}0j1VgcPE|ZARB^SKDr=I zmKR|lbOq4Oy-un7qTdd%f%q@8QvtvT>Su86 zuY-QuqZtySi2V0w<1A>u47Uan%GPZHOve8UG7#GYU;h6Z87S`mB^iiEBtX7AB=pms zbsKlB0=3Uc&h>TOkJk=+egV2{>|VcaAO?n3w!17oc**%&JwuSayH`cz#G71s35MR z`ZPzK&4v_w{N zmMwKE$j~3-JNAj4zZdG7T*PGx&}^TIwpA5|8NILgQTowkws(D$YOfT;n;u+r4mw0dW zJOwSwT*^=d&8rqvd4B*oVrDKrRiN%|FWUqJDObMy7P?)yv-1JIY%J*uP7vSq#@Duo z(r&_b8+Ne)WK7R^lmz4?gCiDrLs4WQzS2NNCo6A0Ow&6I@PQVz)}U062lWlIA1BA{ zNb8Yj0Pn^>Ga4|0tG&cf7P1q`^2Uo7T_8o@BDD9R5I7Drr`%w+amJqj&}qQroSV(t zwgoV0umV+eoW->iZ3C!?z{+p#bLh{Pds#xfBi+-{KW^NDRmvy-Tx!%7l|mwTHK%$z zE^lZ&P@><8Hz0Sn@pV05vG6aXkez$eHq8+4a2Ar_wOGPgja$9x3=^-;x}GV1K}JS7 z87^z80H@U_g!+TUc`tFtn@h*am%4TXGYh5d(eXJw;-!t5&HQ6*UE8bdL*ZTUhSZbZ za+~TQsnf1;Cq)QHEIwX9%Q8(}=jUw4y)1S?wus9h{0$j0*99O$mwaQ`dGq~#ng`HXo?&J0gA2b7r`;?AeNWSf^m@Q3{d)f6_vO*0v z?JBC-8sVisN2|VMU1ku?mJVj*;IsEV)(+c0Y;judP3v9_IZkwXX#kKnj7JYqZ?ks> z>KR-vi}uoP7R3VgguKQ-ci(}iP?rMUg71?De#?@<0Dnj4*dZ4~CBaL^Cba|SoLISR zgl!=&cwe!@oM-JHsVnZ?48JA;E+4SrEoi|KIO6Eq(Ak2#69!gBwcaS0dJSWTMX*YM zRC^2|s}=(DEe!x2l{}fAq{xuxRFR7hSLM<2BHUk>$vK;+pI@ZG76^-3wpKzSC8~G_ zh>3>|`)59?kTar~=sSLYF`TJ_T^i|RYHs7`t2Qz)fE>tQVPH#b=wY_c2!;lyuK#`l zhb_2;)e@^tOnA(j5&;he?k!WyCKQPVLRrYBYkG)yOwBT?!;c+;BAFx+oa`0P7~tEx zLN8pDTf-=byIYB!KjJGv=&NliqRZ=Qqw_{{jvJA1dPB!8YHNVK=$O74-9&s8M6Zx`$)Y z$#Z}R`*ZoB3YD|T0FuK6T{gpoGpD?9F*U{rm;0Sm9e6X9`zwJfVsOjtfs>|-Z)P(6 zR4VJNK~@DXe*%M_!s%B1T2Ce$Isd4F8NIH2ZDggoy{9<6RTt|01uFM|9&(#pJFZM5 zG+Z!cXFK{y207`H;llu40x!&h+vQ^0c3`#-${HDy9K;fw0jjuUO2IRAJ0MobAQV(8 z!QC}sl9_rY1yzh{VQ%g{N+pZVEV^JB@TCOR-k+2@>^%vgf4?ZYkMj@q)e4h!Wl#W_ zfv`ADnqG2S#Q4-+2*_D_ZYc-lHZGtu=^b$mv2)*TtsYKC7|5JW1a?X*kmAP_NbI{~u^YwHjj!^gH(0|2HmX5A`VA%D;7}X`R6=D7$*qsS;%hRl zY9w=gzX1;pRvu?adS7wg!(cbeA9Pe#FmTx`|5>%bCN7o+=>sV2Y|&1>8yqQTSXbay z{XDqVp>S8v=Zz>hSnarvYsJ0DQreK+Hl4cqLz~EO6vZnaKd5=$BQ<@tFoM1OoVWKy zEI}AxP$uZ@o=8LjAt8fMcz(B2!CMM$yq^TU>qJlTsRUEq9$3=2zqP3N%%kBHb~+*R z1cODd(0YTBVs(7y!aFb#__g3G*U%48k#1Ho^h>?9#OUYCp87pGG8R}MJMLz?{hd6K zbIlkh6)MvQUth0G)>1L0JoIsbzRwB~obqKlp9S=}dWHMSPiXFr{RPpPt8HS}BKZ#O z#ncLV-h3#{DZ-v5X!ucvUqpO(cf*D&Zb1{Rr(ndT%&Zoop?o}gZ@uFC6NZS4#iL7) z4J0GQ>-P}1`UNBaBA=mRWW-8kF7?E8?ESq8)dGz=pd3(gYura}A`z(&gc#&^^|X~H zixcl;gs(A*8dGVPrYquxmYd{n&3fEPBu<1+hM1ZfzwcJB%(FYtF#2w&#OTcJ-dD=-0yTR*tgm9jttaL+hF-j2v6T`NR+4?+ zP7O>YhbC_wsr)SO;$C7BzfrjpNhc{&TPG0ICGBP9OBG4H`qFWi5YFc~>gy!&mOq8B z?Oo@qg9kCT5B6nmp7yGGNk9+o0s@4Cz6WfNqxeyL^&k!@e;Ecl2xkStmtvK->-wOu zG~NUCYlzwwldS3$Iwlz@;U*7=`Gr1PU#pn1rvd_;*0v+Ul&1?_y1B)ga>^^a`HiEvpLKoFf}2ppN3!n=#y0HBMk+33wo}nZo zDDr^$MhROzt*B@ksmFdgS0>K!h@<%em!;t1E-;BLCPpE#tjqpzF`o~t-K@*gkkxXL z$4I!c$CcHgV9nIx!&^f~vNzJ488A^&To~=u8N&*_!zQWC@TBxK=%mJYk#|?j9_N#) zwgK5RHvJjIzYf!g?O*S(GETV~V6Tm?_QlZpZlh`M>8K^p=^#%fuaP|Y^n^TM&g!=J z!qaUmvx-4ncApGp-ib`~EX43IE^Nl>GVCK{s!vz4yTgR1Uc3fR;n8Q?EnAoL4R|fC zp6CurVbO^g&E{zr&5UCzlAxruiR0HjpAJ#p-Hd!$+} z+N(%KOdE1BB8d~b#6IoQ6BSfNK@k)~mAXx*m9r($c)hjHZjL|K3=SQwKq5c9Q0unQ zF^14vGo)SrSwwz*PDwC(pc}k0Ko_GgbXbxzo}8%b_+ZD*r^^EeS%Hx-KhV;2#kioT zs7qAHCy+E7i>ED&H@!a7ZEG`yV)_2i(c2_Q`I};$E=#|83AXvOYs`Q+!Y1Pc$>Yk~ zi4XqkI#%oiU+#iRA^;&gw@_Y^9N~CpcYG{g&eHy-NyJXcgMKx7A(MDf$sKlpW2ir=-ItuPQTRo_-3{3q`##0~A9n(K% ze*`#65=k_#0&HVoEh=S66F91^=k+SUYmjuX`x;4o4vzg0;9|j1)ENa6bDkq@@|i zqO(JwU6kj1D$5@Et8yBoUkS=W!!wm+JX>wgds%CnVJ2dT542kbGZ&ft4)~?K#d28B zdgyMPu*EPLs$XETwRoMeXJkZo#1`}fv4}`L;M>IX zEqb`57LY7wN=P>1ZF#>GZs^0bw$$$x)%y$p00-i+9sh+|yo8+-)To|hm}(jhHO)#h z3}b|H1nqTL{yyXg-w18IT-TlN{+{UV-WZp6r^ANtjwJ)mUFP^oOMU~Crf>`-D!Yk1 zw73v_?EQ60B8DY(TUi1y9}HHq8pDv=!6^4PFzJwbj^z!U)B=6tkb!MCU#EJWEoSW}>->-Bx+yJKa!yNcG=dM8T(k}PU2@Tc3k_W0C)hZ?aag>Kg$%;F2)^D` zUA!9qpg5%WedVBm+J_1ih{wcK~%Y8WN zyGi`xFrzHm-o_<6sjCAN~8gU(voWZNH)$&dNZV0lV|x6)6N|(ut8mA6WwE zahOvq=c@u0NcLhzGF*1i+f4{x*@I8XX}PF`X~B2~=F}e6?RAKND-?GYnWvBc8&v57 zY&>9;&#)XPa)wYQC4wEmBUkBk14$|eWQmcJ{2FNyi+rg;LUCK*dNFIF|5g}D%MKZ2 z^`<6~!)js1q@C+k%c%by@{9$K@vyQ_>q@TR)SEWawvS|{AgF#ar^>v%S5$g+?H6#E z0eMiRxmPoK1Y|{IJfS3htTpuz3SSaYXz@@cf5`Oai#l#Vqj9MYY9L85}R&ePQ;fvls z=iU5`gNcGAHJ}yx7UU@XbNA6gph2}jko6lnp4(ka>XpZWK zb57q{MWKj8qf;;K zA>Xwq8({(+SSqwO1s8mDsyqtb|ylC)?(5~o*(j|ZxZ4cC9_ ze9PjWH3O_vr*bmaX|(`kqJo0L+qVD|XDvTr?$63X2z3Xr%2`+x}xM2)YD_MN@^E)AIZla(}iFqCuXy6%}l~ zXg~~o!%@@iiuT#bvw;W0m+rug;E6XPH*aJi)zzrn3sBW`C9`ObY z5GVnO(yo@fe|}0|23SFb>gBH!!1Z+hJTzhq0;=VM>walBkfcVBLEvmebbB~e-qaPJ z50*T&1q-Y=8@zbqa)?lhI13ig799i3$xg+7VD?z_c)X|P$3lEc;uZxWo(;6Is{8p` zEre1o$+J8X({PM|m6!Cd4*J3iNME)9_&n6sxJ-tUVrW&PV4o`769uf712>Ld(@&ot zP#A%rB%T~$0gPVb+48}U-YSI}7`lTKpg%Q&MeCJ`8>$u%lbjrSXRj4(U+SL1^%o}$ z`wjx&MTNZ212$cRybk*-MD!9*nc2YQh;u>fJ&|P0`sByh$EJ2K{;mYkgqFj_dggn; zj1O-Flf?U{T-f9hN*Tt<8x%B?owIvCGGrfj`DbMTIKL^B{zyN+zE7SGzpTkk;*Y7ig<`>VwQBNa?S^^ka zecCLD|1nUaexLcJR!;dJ&*uhPNT=CkP!9oT=o`43sxbZFIt)$JkwY3e*d%BoSJ(M# z#tWPsi-Ek1X!o_3JIXP}`$~6H2BnQV1I1=^iXXQ9oUvDy#@lwh^xeahKcLN?XCmw% zh#gaW8rA!%ygX+zP|GL$B(x!bxZdyx@P1eD2_?kAz6jl7F-j$J%tfX0b9r5{7GUf2 z?_PV`IpII+9)*HXC8gvIJ4^Icu+`s4%+E+$#bJc-#MdO)L zDCCa^-blMVJQZN7VK{b+HMTjX=T6PFo7xA|3J=u@tjrB7pB2iD=d0EW3)Am5gJ5N3 zK=xT#_x)Rgs~gY_O@^}&%R#-7eztc>r`4JjV3wRim%}*g!|$3t>7)*S+-+EA{^3bI{?_iRf?JF;IFe&_q7($ z|Hi-Qm`81AZMmg5|7G&#BMXmp!-Rhk_{R9|w?gRM@&{}lot9E7c^Bot+)qy16h=6j z;gIIRc~I^$iVDLfOUY9)?nRTIe@xzu2A+YRlPK3>hF`iHypnU~<+1ZP`z(p0RHe=`?S!V-;fw=7yQltBI`bjk4tIQWhU-ZP$w! zXm8^N6XmM&K{A14r}eBRs+-%wS4x`mjE+YSIp2k+OH%!-->3|DI}|(K=PL3j|J8THLFp1)97faW#|dRDvzeqOYWC#<(hdezhuGPl!}HC9 z+v!5v%J%i&DVJ>M<>Evh;7uALL4@b^uSdoN^zuDsD065cvE_G0sqi$c&#O93AK3CW zi%NHCeQ3UB>Ce&9O>5+wwzAxPz{DfWb`*2iQ)OV6#s0pNWA1P_>|b?%!u(+~bMC>m z%3z{y;i8!+CZ-G-gJOCNHd$APc;4XfP@(qVs|3315r%po*uVswW3keQepTDBYOd_s zvYzXey;!*9G7WSKqSTXAOJ*$NYF*CqM^J|2f{k5~sDqdARR?=rdRgPv8_C3ygI};y zE!(fs8jP-|3`5ImsjmEZH9c%F_TO7Z2OA&_(^{vhrkt*ReYaGTY0(DuG-U~WfjL`% ze4XQ6ldo(w2Ua|oEq`QX!8bMcEiUpUBdOKS>WGWYgKMG7cG%DHA_n?BE0kdm&Ba6e z?sKG0$JFsN*f zL&6=-lo_dJ=qL|l5G9ymVg4NVM}(wh9%4m_nmcKM`@DVjvjlGyYtcnOVBDj=3e%yc z3QIv?Y%D#$pl2|Ky&n_{ffwq*?ON0EeL)DRU$WWk4fU&S+d(Y-EEvW5immX<>ORUFxRHwWQ=0H zt#Gw7y(s4BcNcHfn6e{Sw3G1;?b@1;Q{VrId_l;YSbLsnscWekwkYV}wB0AI7Nud? zMNwI(_Pj`wgz%bR#Z6pD9wrjFYi9SyUF4UaT@|`r|Fv0C(^p>JpFEVY==CK_J;=Og z@3I*PS`Mt@m?1y;;?z*cF2Yu@W7Jt^KXkyZ?jh$LytolPojzT&i7MNUHCz%A4t`@v}Pe{qU?`X}z{;XbU#0|7Q zMH`$b2)k;rY61_t$naj6+d}c$CR(2l{;29*Sy(FP^_U)W`jViblE!05R~6%6c~+LX z8RHcQ@oRF>msZ=l;%~2lM?dDxKIDf^l@}i9s_&?C6>X+&Z9cjbAEn7TEBWiFv3sfd zm85;a1!h@|+%7p@_a@;PdmLW*C$AU*KL4c<588XnrtZK$IzS|n|LXzRCOMX@9+Sf`h_}J zQu}B}Yp?gUbDrI*nnIVaC(*oMHrv50kbZim^f*`K_4*(Rb2s3)ncLL+@R7z%AAhqfXL1@8;jef?*&yNqM z$^0N=K4|o;qG|aQbb1;vxD903KvAw)pu&z@5H|_oeiTOp_TQ$blkRv8vu1Lv^P!RC z;*DJhX&q*Hz1)+dII~fgf;~&Mx4%2zQ9wUF`(EO~JOE2zDH7viLT!FAz$@3I^B z>u|gb6>@%ASv8YV59))wxN?)t9%<*&y!}{Rb=*Ub^r!VtF*Ku#YGyFOD3IMpRV&&v zl#%Dyg7Yuux#$b`oIWgTD@E5aQ2W!2N0yJF!Qe?jeQ{t=N&WRH1~XuFcG`pP)f&Iy z0ii-f)Lc2$Jh*LbQNnHp^5U#J;Wu-}_YU(BO)ngBM_A=y_l`NM<~Z<$T2)#X(Ju= zHZ`%xPp=YkDx|idkELie!D00LN>P*Jxsr8|lSN0XSLm!}M&7j!F4 zO0PM|5twJeJ$oNXb(qy$E`2>Uo2lbu#s$`+NF#T^h8%`17kKhHv;F!!UHsbN7DsjaJDe<@eDach+Vaoq>qpd{ zFzMGDXA(syX1ArX8HKNO8L((`G%89Z#qhg<-&Sd zOGg^Z#`vp ztJY@zM-%+T3lE@hMPSbAMcb*|93Qt6C_<9kAoG8(^^QZ|>uGmRYzx7Fnt`&1E$%}i zLN$Cgl=6+(;n**5f|`lx_4XF=;!u}j_Jd)4-J#_CPa|=0Y)!e(JR!9DNyLFe;Z?Ny zIZlnA_Gf-YMg4??{1_ZyTz&gJnb&p~4y{X9Q$WRn7HpX;s-Vv&-|L6pl9*jU3XJqU z>i0(5l#`ffkfE^OZg?n@Bw6-hGC!}{{wVluY7sa2UL0WX>MS%FPDlN*o>#__859F1 znz@3bnpTwNQV$&#e_JBKQQy=Aj}mW*1GAEP?f`2^x$KzDZZt?#1ZorlD#+?K*6y%dGQe^hHbz>DBo5 ziP?b;*&=L0(II*I*0}7d22SQgOg?Vt4;4-&lIF0za@My!?stu0w3LZu`%U-Q z?0>!c3(v^aX~F(09<22-eENCZwE9I7%MqW^yrN0Is$2F;$d?p;6h>&6&$~TTi4{_2 zTn{G4BE7TwLQOPi88biF%ZCJ3rnmYKDqCA`233=i`XxfYG+rJrrG%h}xSP2@Y%V&h zyagv(x74hX+=F3vY`7%kfNjVuVH!7>ot{uyoCp!)>ggo?c4W!-xDeSscR(cG*y@Ob zfT!$~_g5?3nrzLw(EGA*&CYaSLOPT{n2fs0SQ3PQq8S;fXKi?blpRXX1)MbOi{iK2My_{(kz57jA;Wqnl-jt z{8vyMu%A>z+2#vlMG;~RLyJ68_L4KSq&xizxWArvd97H!K)9G=f5W~(XaD&WWX2@f zOUWN<88jNu2s#d{sK7ZQvZH@hU>q$aCZAIz$%#5?#$mj#rcA+eZshA;WpH=_bz6TR z&)tLlO2vPWRU9_=K$`PbV*9fIH)Je5Vq}eEQE$h0$BPf*KR~c)EpV;XOt=a=NxDF~ z8FAhRJH^|Bez9~VioM|pg=2n=UB(%mWu$(=5hXWPhFr@|mgm(lDU238Xx-8Hjl>q} z$n|}zw}1Lcb|=)CNxg%atN@iT-R3YRo#)HzSs;Zm_e7`iA+HBvz4r`#?5G@N7i=kI zEdA}t#X+%o)f?B%4%oZgqE4wk1e^;cEtj*M;O?-=%O!7tIp+C+Hj0R?U$+QV?7x@3 z1`!-zc`UpQ5h*WH0n#JvT*F4mmgpBTv7r{4amV2aSPCLGss~WR%1c5G(0T*Q9X|1B za1R9;ae(9S%-zxKb7rS0Qq82s0-PCab;?*;Qi3R=lj%#P4&&xiGD2(K$SV zLn%L4t`#YUOjH(VP*5?D0T~r{R#XfeP>N@T*jxTKu~N?neYg(E>@@ECw%lN ziDKDK-L5wd?{)s*Jz@GkPH(~x(e*9S?ryaPZ82W5rx9%%9*7t3k0ci|KVg#bv^hkE z6L8t|M^jS`6AEaVu>FSc1fIOX=Gdo%!z5NZKU6PAWaETWHkeHRM1RQRcz8`Fq4%DF zo?0D9-q1Zb=uuP@#=TWF8@hefXZ}V7Q!67Ke|3>-SWGgAPY7G*tEO$+*(2m*-jo=; z3Se~?-*mAzdxejG$h#bHJmI|9FYK+U?aH3qPYcAD4kVJ>doZOGz;yz)S) z+CSCSnRU)n-zIxCm6mVoW>7YxO~#5WoX3l|%GAcynJ4~JWMbx|a-EXD28QgDPkw!wpv}x0u>89&HE~CgKO^rY? zrr&S6X;{ugDd3aBg%xlT@W6eYVQ(bo3g9^H)HY>2{IxyD|225guD`^^e=ILA)2)+S z;z+5qi^8b+Un{4MWFa%W&{Sa8K9(sh)T;g9M&yp|$kT<2|$NtA+zN%Y|bGKJe+#cdj1x>s?FaiN!$pXTz0^&I^VzwS1#cFj7hyB&0OXjoq#|L9!zzu#fkbiBby-s-I6;wgN)VD$O+I|kA5S2kX~F>Ob^AMIUJd5 zP{$HuqbNgcq1>aF*tg&^<*>jt%xT3X!V?2m(L8k0HWY@p)z$!S0&YkvKFXJ*Og)6_ zdYNk3TYuZx(*{s_yrz9Uw5cc>Gd|kJ=%0LkjWv65$rl|7w)I-I%SGbE|5$Z(;H)4% zero;$dRrGo*2jN+d)6e0eM3QX-0gFMC7zr=s~oF4P^97W-UofNptbr++NgJ{uTBOZ z^~-`n^S-{)o{W{ihMx2>=mQ5mqr6(kjQ%en#Z#=VaEWh8>~`B7vHX>F^AmJ~4j^0= zJ=j#?l&)U9ICMi=yF8F#jWz|DJjq=7Oo##mM+QmA>B?3=ClJRF+EjNcno9-Ux=p{2Xe&_Bf#zlE3;glsit@_j=*;eKqGrmnHNx`y;fzpmxh)i! z6*FgI8hcVj_Ls58KI}&>s8W^{lJ2Xk_|eV4%lKIgNk5rby=GuC z^F{-b$;AxbSoF99$v;YE1!`iZI#rjK+}ok<8#Qdu6j<3A<v!`+TwYg&jT~U06mZu-P0_i=Bj#7q$t<4It3k2< zZScxv(>#yZht~+8GFnqp(^8_VDCBi7D019CJYT2OD_mb&i)@~$@!O`$aDU_hTVRw6S_9SJujqAMc(*wtm!c2jyr)=3^P!^zU0W@?Y@zZ zEh$kr14TGziAmaKV|!Thp$HO}%VG6nchI?4tpf_&++yK(kC;%9mh_ywd`&VEf?`?H z&MbKQ#V^7HOpN(ixNxf*=1Cz0P8OthLl9A$nMbPYOZHJIR`O)Akh^jeYbAD7yT%n( zJTPdg^S;}C)M|S>DT=#S?a*^KD<5wn?UvNTe!J_|qgpN1pTmcc@w}qha7{M0mj$f} zU%M`C)}0qR8YDXRs2Ed3$hsx8lPBy5EblERQp%%r`!nR!&z$oVa$hOse)hIsaow>| zOjNH;Xj)O2CQaW*bb4KGF@+rhVq12&p!>oUc_Pjlq$B><4wyh%xyitV$l{eOh!!XA z*RRptFI{^5B^^J-W|&k`4df#;PG*={7{KKsP^XR7k40Z7(r1Rgw2lcNJ|Do(nm-%n zta^d4N;ZZtfaOG&t^$?tEk}YuMH>y@02(n;e)9Tp%C};VG+eQG1h?nq_!RYCA))TK zP6H6khs!(>=RV2Sx@t_g*O4a_nI*Q;lD*!RG7&zQpIJE0A;4YYLa@HwhMZeI-qA_Y z$h_gaWSb7_(V zE2@ML-mgWrA5EpUz5|6ov_{PIuLfga7$jLw($ptFHs>W`NI+QhJMQi}B=KGr$HW|? zk)|tu!lGL?84Q*PZ~M^E6-9Cx0qh3>V_|DMsjnyoJ0m`KdZE0QIzXX;EN#^$woofz z*H&S9SK~@+_tAeg+Oz)48;U$+Mmn@)POaMUPMIw&4F5GZTU*;~)%}X>LOI^0DUIpo z%$mlD^P@Dt470i{vuwkZ`MRduXLZtH5Pm$NDx|QRcniINpqV7>;v|+!1urNizY zZ{clgfcL7(*2}%3=%VoE{kQ%50&Hf=D9l%RAIC491moGzXzDKeJAh$5@*GL~b1q$Q zn_w7GxS)bgsa5i!MXJ9zV)t~mECQyt_T^EdW1vs!E9cK*d4$P#HU~fXE8yZgm|Ez(2*H&$1F4`GR`$`p+6AfD=3`#1YXjfvAd#Ee6s0 z_Ii7>Cvw|(rg%BlUz()bD<;9f_D2rVmHfB`uRKr6@~BMNZQ7}#G8K(`4}GuG6&RYv zS<=a9;33J`U2u-x%Voi8K6+%z5W8cZDi2H8SMkM8hr|M2O!4I@4>{SkY*BM6bkAPC zj%3do@WA>x2%7y}k*O9*#?)q{m$TT$DFCfnurRw&FggXWuMU29@OCOLI`re5CVJ(B z?n@JIHYuzEA=m1D+gfEAP6%^Afo!R2y>z#GvR>#F27q;tsx37g?`pU(=5N1FPJRf; zOv4ZkQ~y1yQD-YgCpb&BybzTmR-b+RB6yc=q^}S;@~ES|W3TIihmo&?8-$`H{AON$ z*o0~T;!gVpNe8>tN0k=t4$L;n2F*7F27|1}$+Y^u`Y3*0i>pJssREn+h_J7TQ7kQ2 z{C4oyd9P@)v8p5O*I46~%vRgHpt?t4WQ&`5c7vH2fu)CKYa_EZ;rguftLah|^%XOm znBa5&*1`1jjk{}5LG=WUm-x3+Z&mwV?LiY143g7+s1EJJGb~3E`@*V1y4zP}dckWW zC!kDSZ0co+@3%o>$a_f`GxTKigAI!IsJlM(X3%Py(>?iqQ&YTGcf++;Gp_=1|R z0}|@kC3A+nb5=V6r=+!jQl!3*@i5=+!6a4uINN@{v_Rr>Jp}uBSWuLSg*#xt|9F+O zDKeuARkPL5p8Q%d?)(E8N9xF@EXJxOrf&a5MXkdm&D7CUyf@uWuSS$t-t;?W*7E%* zFPy3?k!cN;0r57!ydxCnRqFfGyIMTaBpZ(>UP}1AWcyTiY+^$$p`meX@|MfiKybNq z&kOQ&v&tuMkBVcERB*)OXc14-u1Xs5CAn>@?#CP4q(-jCWA|GE+L9V(dinxjm{25} zGj9|`ia>dg$p54%^+VgUI*>=`f)<_oPGebF9~i*(rixgDR}=#ami!aWvedZ-Sd+T5P_SV}( za)~`H<(||y*i?Xc1UiWC_Swn(q<2C8{aTO=sQyo~gy(N3_-`eQP^3xo^MC#vM1Vnj zVEfS`zJ~nILhGdgg~q4)wIBCn(R+H&cb-D;LP4p+(B#H;?|5asf?NK)7kQlLRcg=P z`Ri)Uizht=wG?sgg{Gn+8u8}lW)?XO;@|1RRt!kqDk=Hovazvg(V?yJJ=K)gZ~4L$ z0BNC7%$Ac~@yU70Hg-eGWI4wcki_R)#wh$RZi}Q$-e`^E(GqaU+S(cru3Gj7>EiX0 zq~z1jcO4Ba27I4(Mm*s?lmAq{klAlR1q|Eow28NaoXbfX@mv~zRkByGw-BJ9p|4)} zQrmDgCWF--!UgxI`X$-C-s6Ra3qCj*8m`AP8y+@4x-(e4Ih)LK|N9s8UcPWx0#_^E z4)tz8bTkkK`{M$=|GB8x1-U0{)$Nd@Rj)K=YjCmAZ z!Qn5`>r0uO0Hsa9Wz&^$H%_DwDa?uoenCjfqkvpC+6}-ixCYV)3Zf7wQ2sFxX+{4Q z+CWH%K`(w9mj$wbawNRLqEL|w`m5pekWq7ri#@J^!1y964z3n6|dItgLL_uoNunKJex899O&u60v%WFVL$nw9T$on7*pOizie!t%lqemspNMF=sYf-gi+5w z@csr2YGF5LF0TJ5wc76z#0t7RLxi5jp|C74+R~c%@bRBacuy|`PY09$yg#+^c|!T~ zLM>^pfy#NSONjWyZF+t!sMz1r%I5IWe~i$#Xh7xUFd{)q{1Gobml|AV&Ts6DtzY`|^JQNN<-? literal 0 HcmV?d00001 diff --git a/prowler/__main__.py b/prowler/__main__.py index 88288d9c43..7e03541ec2 100644 --- a/prowler/__main__.py +++ b/prowler/__main__.py @@ -6,6 +6,7 @@ import sys from colorama import Fore, Style +from prowler.config.config import get_available_compliance_frameworks from prowler.lib.banner import print_banner from prowler.lib.check.check import ( bulk_load_checks_metadata, @@ -32,7 +33,7 @@ from prowler.lib.check.custom_checks_metadata import ( ) from prowler.lib.cli.parser import ProwlerArgumentParser from prowler.lib.logger import logger, set_logging_config -from prowler.lib.outputs.compliance import display_compliance_table +from prowler.lib.outputs.compliance.compliance import display_compliance_table from prowler.lib.outputs.html import add_html_footer, fill_html_overview_statistics from prowler.lib.outputs.json import close_json from prowler.lib.outputs.outputs import extract_findings_statistics @@ -81,6 +82,9 @@ def prowler(): # We treat the compliance framework as another output format if compliance_framework: args.output_modes.extend(compliance_framework) + # If no input compliance framework, set all + else: + args.output_modes.extend(get_available_compliance_frameworks(provider)) # Set Logger configuration set_logging_config(args.log_level, args.log_file, args.only_logs) @@ -311,8 +315,12 @@ def prowler(): provider, ) - if compliance_framework and findings: - for compliance in compliance_framework: + if findings: + compliance_overview = False + if not compliance_framework: + compliance_overview = True + compliance_framework = get_available_compliance_frameworks(provider) + for compliance in sorted(compliance_framework): # Display compliance table display_compliance_table( findings, @@ -320,6 +328,11 @@ def prowler(): compliance, audit_output_options.output_filename, audit_output_options.output_directory, + compliance_overview, + ) + if compliance_overview: + print( + f"\nDetailed compliance results are in {Fore.YELLOW}{audit_output_options.output_directory}/compliance/{Style.RESET_ALL}\n" ) # If custom checks were passed, remove the modules diff --git a/prowler/config/config.py b/prowler/config/config.py index 859907b186..62d41b6758 100644 --- a/prowler/config/config.py +++ b/prowler/config/config.py @@ -26,9 +26,12 @@ banner_color = "\033[1;92m" actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__))) -def get_available_compliance_frameworks(): +def get_available_compliance_frameworks(provider=None): available_compliance_frameworks = [] - for provider in ["aws", "gcp", "azure"]: + providers = ["aws", "gcp", "azure"] + if provider: + providers = [provider] + for provider in providers: with os.scandir(f"{actual_directory}/../compliance/{provider}") as files: for file in files: if file.is_file() and file.name.endswith(".json"): diff --git a/prowler/lib/outputs/compliance.py b/prowler/lib/outputs/compliance.py deleted file mode 100644 index 4395af2d3c..0000000000 --- a/prowler/lib/outputs/compliance.py +++ /dev/null @@ -1,641 +0,0 @@ -import sys -from csv import DictWriter - -from colorama import Fore, Style -from tabulate import tabulate - -from prowler.config.config import orange_color, timestamp -from prowler.lib.check.models import Check_Report -from prowler.lib.logger import logger -from prowler.lib.outputs.models import ( - Check_Output_CSV_AWS_CIS, - Check_Output_CSV_AWS_ISO27001_2013, - Check_Output_CSV_AWS_Well_Architected, - Check_Output_CSV_ENS_RD2022, - Check_Output_CSV_GCP_CIS, - Check_Output_CSV_Generic_Compliance, - Check_Output_MITRE_ATTACK, - generate_csv_fields, - unroll_list, -) -from prowler.lib.utils.utils import outputs_unix_timestamp - - -def add_manual_controls(output_options, audit_info, file_descriptors): - try: - # Check if MANUAL control was already added to output - if "manual_check" in output_options.bulk_checks_metadata: - manual_finding = Check_Report( - output_options.bulk_checks_metadata["manual_check"].json() - ) - manual_finding.status = "INFO" - manual_finding.status_extended = "Manual check" - manual_finding.resource_id = "manual_check" - manual_finding.resource_name = "Manual check" - manual_finding.region = "" - manual_finding.location = "" - manual_finding.project_id = "" - fill_compliance( - output_options, manual_finding, audit_info, file_descriptors - ) - del output_options.bulk_checks_metadata["manual_check"] - except Exception as error: - logger.error( - f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" - ) - - -def fill_compliance(output_options, finding, audit_info, file_descriptors): - try: - # We have to retrieve all the check's compliance requirements - check_compliance = output_options.bulk_checks_metadata[ - finding.check_metadata.CheckID - ].Compliance - for compliance in check_compliance: - csv_header = compliance_row = compliance_output = None - if ( - compliance.Framework == "ENS" - and compliance.Version == "RD2022" - and "ens_rd2022_aws" in output_options.output_modes - ): - compliance_output = "ens_rd2022_aws" - for requirement in compliance.Requirements: - requirement_description = requirement.Description - requirement_id = requirement.Id - for attribute in requirement.Attributes: - compliance_row = Check_Output_CSV_ENS_RD2022( - Provider=finding.check_metadata.Provider, - Description=compliance.Description, - AccountId=audit_info.audited_account, - Region=finding.region, - AssessmentDate=outputs_unix_timestamp( - output_options.unix_timestamp, timestamp - ), - Requirements_Id=requirement_id, - Requirements_Description=requirement_description, - Requirements_Attributes_IdGrupoControl=attribute.IdGrupoControl, - Requirements_Attributes_Marco=attribute.Marco, - Requirements_Attributes_Categoria=attribute.Categoria, - Requirements_Attributes_DescripcionControl=attribute.DescripcionControl, - Requirements_Attributes_Nivel=attribute.Nivel, - Requirements_Attributes_Tipo=attribute.Tipo, - Requirements_Attributes_Dimensiones=",".join( - attribute.Dimensiones - ), - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_id, - CheckId=finding.check_metadata.CheckID, - ) - - csv_header = generate_csv_fields(Check_Output_CSV_ENS_RD2022) - - elif compliance.Framework == "CIS" and "cis_" in str( - output_options.output_modes - ): - compliance_output = ( - "cis_" + compliance.Version + "_" + compliance.Provider.lower() - ) - # Only with the version of CIS that was selected - if compliance_output in str(output_options.output_modes): - for requirement in compliance.Requirements: - requirement_description = requirement.Description - requirement_id = requirement.Id - for attribute in requirement.Attributes: - if compliance.Provider == "AWS": - compliance_row = Check_Output_CSV_AWS_CIS( - Provider=finding.check_metadata.Provider, - Description=compliance.Description, - AccountId=audit_info.audited_account, - Region=finding.region, - AssessmentDate=outputs_unix_timestamp( - output_options.unix_timestamp, timestamp - ), - Requirements_Id=requirement_id, - Requirements_Description=requirement_description, - Requirements_Attributes_Section=attribute.Section, - Requirements_Attributes_Profile=attribute.Profile, - Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, - Requirements_Attributes_Description=attribute.Description, - Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, - Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, - Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, - Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, - Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, - Requirements_Attributes_References=attribute.References, - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_id, - CheckId=finding.check_metadata.CheckID, - ) - csv_header = generate_csv_fields( - Check_Output_CSV_AWS_CIS - ) - elif compliance.Provider == "GCP": - compliance_row = Check_Output_CSV_GCP_CIS( - Provider=finding.check_metadata.Provider, - Description=compliance.Description, - ProjectId=finding.project_id, - Location=finding.location.lower(), - AssessmentDate=outputs_unix_timestamp( - output_options.unix_timestamp, timestamp - ), - Requirements_Id=requirement_id, - Requirements_Description=requirement_description, - Requirements_Attributes_Section=attribute.Section, - Requirements_Attributes_Profile=attribute.Profile, - Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, - Requirements_Attributes_Description=attribute.Description, - Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, - Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, - Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, - Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, - Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, - Requirements_Attributes_References=attribute.References, - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_id, - ResourceName=finding.resource_name, - CheckId=finding.check_metadata.CheckID, - ) - csv_header = generate_csv_fields( - Check_Output_CSV_GCP_CIS - ) - - elif ( - "AWS-Well-Architected-Framework" in compliance.Framework - and compliance.Provider == "AWS" - ): - compliance_output = compliance.Framework - if compliance.Version != "": - compliance_output += "_" + compliance.Version - if compliance.Provider != "": - compliance_output += "_" + compliance.Provider - - compliance_output = compliance_output.lower().replace("-", "_") - if compliance_output in output_options.output_modes: - for requirement in compliance.Requirements: - requirement_description = requirement.Description - requirement_id = requirement.Id - for attribute in requirement.Attributes: - compliance_row = Check_Output_CSV_AWS_Well_Architected( - Provider=finding.check_metadata.Provider, - Description=compliance.Description, - AccountId=audit_info.audited_account, - Region=finding.region, - AssessmentDate=outputs_unix_timestamp( - output_options.unix_timestamp, timestamp - ), - Requirements_Id=requirement_id, - Requirements_Description=requirement_description, - Requirements_Attributes_Name=attribute.Name, - Requirements_Attributes_WellArchitectedQuestionId=attribute.WellArchitectedQuestionId, - Requirements_Attributes_WellArchitectedPracticeId=attribute.WellArchitectedPracticeId, - Requirements_Attributes_Section=attribute.Section, - Requirements_Attributes_SubSection=attribute.SubSection, - Requirements_Attributes_LevelOfRisk=attribute.LevelOfRisk, - Requirements_Attributes_AssessmentMethod=attribute.AssessmentMethod, - Requirements_Attributes_Description=attribute.Description, - Requirements_Attributes_ImplementationGuidanceUrl=attribute.ImplementationGuidanceUrl, - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_id, - CheckId=finding.check_metadata.CheckID, - ) - - csv_header = generate_csv_fields( - Check_Output_CSV_AWS_Well_Architected - ) - - elif ( - compliance.Framework == "ISO27001" - and compliance.Version == "2013" - and compliance.Provider == "AWS" - ): - compliance_output = compliance.Framework - if compliance.Version != "": - compliance_output += "_" + compliance.Version - if compliance.Provider != "": - compliance_output += "_" + compliance.Provider - - compliance_output = compliance_output.lower().replace("-", "_") - if compliance_output in output_options.output_modes: - for requirement in compliance.Requirements: - requirement_description = requirement.Description - requirement_id = requirement.Id - requirement_name = requirement.Name - for attribute in requirement.Attributes: - compliance_row = Check_Output_CSV_AWS_ISO27001_2013( - Provider=finding.check_metadata.Provider, - Description=compliance.Description, - AccountId=audit_info.audited_account, - Region=finding.region, - AssessmentDate=outputs_unix_timestamp( - output_options.unix_timestamp, timestamp - ), - Requirements_Id=requirement_id, - Requirements_Name=requirement_name, - Requirements_Description=requirement_description, - Requirements_Attributes_Category=attribute.Category, - Requirements_Attributes_Objetive_ID=attribute.Objetive_ID, - Requirements_Attributes_Objetive_Name=attribute.Objetive_Name, - Requirements_Attributes_Check_Summary=attribute.Check_Summary, - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_id, - CheckId=finding.check_metadata.CheckID, - ) - - csv_header = generate_csv_fields(Check_Output_CSV_AWS_ISO27001_2013) - - elif ( - compliance.Framework == "MITRE-ATTACK" - and compliance.Version == "" - and compliance.Provider == "AWS" - ): - compliance_output = compliance.Framework - if compliance.Version != "": - compliance_output += "_" + compliance.Version - if compliance.Provider != "": - compliance_output += "_" + compliance.Provider - - compliance_output = compliance_output.lower().replace("-", "_") - if compliance_output in output_options.output_modes: - for requirement in compliance.Requirements: - requirement_description = requirement.Description - requirement_id = requirement.Id - requirement_name = requirement.Name - attributes_aws_services = "" - attributes_categories = "" - attributes_values = "" - attributes_comments = "" - for attribute in requirement.Attributes: - attributes_aws_services += attribute.AWSService + "\n" - attributes_categories += attribute.Category + "\n" - attributes_values += attribute.Value + "\n" - attributes_comments += attribute.Comment + "\n" - compliance_row = Check_Output_MITRE_ATTACK( - Provider=finding.check_metadata.Provider, - Description=compliance.Description, - AccountId=audit_info.audited_account, - Region=finding.region, - AssessmentDate=outputs_unix_timestamp( - output_options.unix_timestamp, timestamp - ), - Requirements_Id=requirement_id, - Requirements_Description=requirement_description, - Requirements_Name=requirement_name, - Requirements_Tactics=unroll_list(requirement.Tactics), - Requirements_SubTechniques=unroll_list( - requirement.SubTechniques - ), - Requirements_Platforms=unroll_list(requirement.Platforms), - Requirements_TechniqueURL=requirement.TechniqueURL, - Requirements_Attributes_AWSServices=attributes_aws_services, - Requirements_Attributes_Categories=attributes_categories, - Requirements_Attributes_Values=attributes_values, - Requirements_Attributes_Comments=attributes_comments, - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_id, - CheckId=finding.check_metadata.CheckID, - ) - - csv_header = generate_csv_fields(Check_Output_MITRE_ATTACK) - - else: - compliance_output = compliance.Framework - if compliance.Version != "": - compliance_output += "_" + compliance.Version - if compliance.Provider != "": - compliance_output += "_" + compliance.Provider - - compliance_output = compliance_output.lower().replace("-", "_") - if compliance_output in output_options.output_modes: - for requirement in compliance.Requirements: - requirement_description = requirement.Description - requirement_id = requirement.Id - for attribute in requirement.Attributes: - compliance_row = Check_Output_CSV_Generic_Compliance( - Provider=finding.check_metadata.Provider, - Description=compliance.Description, - AccountId=audit_info.audited_account, - Region=finding.region, - AssessmentDate=outputs_unix_timestamp( - output_options.unix_timestamp, timestamp - ), - Requirements_Id=requirement_id, - Requirements_Description=requirement_description, - Requirements_Attributes_Section=attribute.Section, - Requirements_Attributes_SubSection=attribute.SubSection, - Requirements_Attributes_SubGroup=attribute.SubGroup, - Requirements_Attributes_Service=attribute.Service, - Requirements_Attributes_Soc_Type=attribute.Soc_Type, - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_id, - CheckId=finding.check_metadata.CheckID, - ) - - csv_header = generate_csv_fields( - Check_Output_CSV_Generic_Compliance - ) - - if compliance_row: - csv_writer = DictWriter( - file_descriptors[compliance_output], - fieldnames=csv_header, - delimiter=";", - ) - csv_writer.writerow(compliance_row.__dict__) - except Exception as error: - logger.error( - f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" - ) - - -def display_compliance_table( - findings: list, - bulk_checks_metadata: dict, - compliance_framework: str, - output_filename: str, - output_directory: str, -): - try: - if "ens_rd2022_aws" == compliance_framework: - marcos = {} - ens_compliance_table = { - "Proveedor": [], - "Marco/Categoria": [], - "Estado": [], - "Alto": [], - "Medio": [], - "Bajo": [], - "Opcional": [], - } - pass_count = fail_count = 0 - for finding in findings: - check = bulk_checks_metadata[finding.check_metadata.CheckID] - check_compliances = check.Compliance - for compliance in check_compliances: - if ( - compliance.Framework == "ENS" - and compliance.Provider == "AWS" - and compliance.Version == "RD2022" - ): - compliance_version = compliance.Version - compliance_fm = compliance.Framework - compliance_provider = compliance.Provider - for requirement in compliance.Requirements: - for attribute in requirement.Attributes: - marco_categoria = ( - f"{attribute.Marco}/{attribute.Categoria}" - ) - # Check if Marco/Categoria exists - if marco_categoria not in marcos: - marcos[marco_categoria] = { - "Estado": f"{Fore.GREEN}CUMPLE{Style.RESET_ALL}", - "Opcional": 0, - "Alto": 0, - "Medio": 0, - "Bajo": 0, - } - if finding.status == "FAIL": - fail_count += 1 - marcos[marco_categoria][ - "Estado" - ] = f"{Fore.RED}NO CUMPLE{Style.RESET_ALL}" - elif finding.status == "PASS": - pass_count += 1 - if attribute.Nivel == "opcional": - marcos[marco_categoria]["Opcional"] += 1 - elif attribute.Nivel == "alto": - marcos[marco_categoria]["Alto"] += 1 - elif attribute.Nivel == "medio": - marcos[marco_categoria]["Medio"] += 1 - elif attribute.Nivel == "bajo": - marcos[marco_categoria]["Bajo"] += 1 - - # Add results to table - for marco in sorted(marcos): - ens_compliance_table["Proveedor"].append(compliance.Provider) - ens_compliance_table["Marco/Categoria"].append(marco) - ens_compliance_table["Estado"].append(marcos[marco]["Estado"]) - ens_compliance_table["Opcional"].append( - f"{Fore.BLUE}{marcos[marco]['Opcional']}{Style.RESET_ALL}" - ) - ens_compliance_table["Alto"].append( - f"{Fore.LIGHTRED_EX}{marcos[marco]['Alto']}{Style.RESET_ALL}" - ) - ens_compliance_table["Medio"].append( - f"{orange_color}{marcos[marco]['Medio']}{Style.RESET_ALL}" - ) - ens_compliance_table["Bajo"].append( - f"{Fore.YELLOW}{marcos[marco]['Bajo']}{Style.RESET_ALL}" - ) - if fail_count + pass_count < 0: - print( - f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_fm} {compliance_version} - {compliance_provider}{Style.RESET_ALL}.\n" - ) - else: - print( - f"\nEstado de Cumplimiento de {Fore.YELLOW}{compliance_fm} {compliance_version} - {compliance_provider}{Style.RESET_ALL}:" - ) - overview_table = [ - [ - f"{Fore.RED}{round(fail_count/(fail_count+pass_count)*100, 2)}% ({fail_count}) NO CUMPLE{Style.RESET_ALL}", - f"{Fore.GREEN}{round(pass_count/(fail_count+pass_count)*100, 2)}% ({pass_count}) CUMPLE{Style.RESET_ALL}", - ] - ] - print(tabulate(overview_table, tablefmt="rounded_grid")) - print( - f"\nResultados de {Fore.YELLOW}{compliance_fm} {compliance_version} - {compliance_provider}{Style.RESET_ALL}:" - ) - print( - tabulate( - ens_compliance_table, headers="keys", tablefmt="rounded_grid" - ) - ) - print( - f"{Style.BRIGHT}* Solo aparece el Marco/Categoria que contiene resultados.{Style.RESET_ALL}" - ) - print(f"\nResultados detallados de {compliance_fm} en:") - print( - f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n" - ) - elif "cis_" in compliance_framework: - sections = {} - cis_compliance_table = { - "Provider": [], - "Section": [], - "Level 1": [], - "Level 2": [], - } - pass_count = fail_count = 0 - for finding in findings: - check = bulk_checks_metadata[finding.check_metadata.CheckID] - check_compliances = check.Compliance - for compliance in check_compliances: - if ( - compliance.Framework == "CIS" - and compliance.Version in compliance_framework - ): - compliance_version = compliance.Version - compliance_fm = compliance.Framework - for requirement in compliance.Requirements: - for attribute in requirement.Attributes: - section = attribute.Section - # Check if Section exists - if section not in sections: - sections[section] = { - "Status": f"{Fore.GREEN}PASS{Style.RESET_ALL}", - "Level 1": {"FAIL": 0, "PASS": 0}, - "Level 2": {"FAIL": 0, "PASS": 0}, - } - if finding.status == "FAIL": - fail_count += 1 - elif finding.status == "PASS": - pass_count += 1 - if attribute.Profile == "Level 1": - if finding.status == "FAIL": - sections[section]["Level 1"]["FAIL"] += 1 - else: - sections[section]["Level 1"]["PASS"] += 1 - elif attribute.Profile == "Level 2": - if finding.status == "FAIL": - sections[section]["Level 2"]["FAIL"] += 1 - else: - sections[section]["Level 2"]["PASS"] += 1 - - # Add results to table - sections = dict(sorted(sections.items())) - for section in sections: - cis_compliance_table["Provider"].append(compliance.Provider) - cis_compliance_table["Section"].append(section) - if sections[section]["Level 1"]["FAIL"] > 0: - cis_compliance_table["Level 1"].append( - f"{Fore.RED}FAIL({sections[section]['Level 1']['FAIL']}){Style.RESET_ALL}" - ) - else: - cis_compliance_table["Level 1"].append( - f"{Fore.GREEN}PASS({sections[section]['Level 1']['PASS']}){Style.RESET_ALL}" - ) - if sections[section]["Level 2"]["FAIL"] > 0: - cis_compliance_table["Level 2"].append( - f"{Fore.RED}FAIL({sections[section]['Level 2']['FAIL']}){Style.RESET_ALL}" - ) - else: - cis_compliance_table["Level 2"].append( - f"{Fore.GREEN}PASS({sections[section]['Level 2']['PASS']}){Style.RESET_ALL}" - ) - if fail_count + pass_count < 1: - print( - f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_fm}-{compliance_version}{Style.RESET_ALL}.\n" - ) - else: - print( - f"\nCompliance Status of {Fore.YELLOW}{compliance_fm}-{compliance_version}{Style.RESET_ALL} Framework:" - ) - overview_table = [ - [ - f"{Fore.RED}{round(fail_count/(fail_count+pass_count)*100, 2)}% ({fail_count}) FAIL{Style.RESET_ALL}", - f"{Fore.GREEN}{round(pass_count/(fail_count+pass_count)*100, 2)}% ({pass_count}) PASS{Style.RESET_ALL}", - ] - ] - print(tabulate(overview_table, tablefmt="rounded_grid")) - print( - f"\nFramework {Fore.YELLOW}{compliance_fm}-{compliance_version}{Style.RESET_ALL} Results:" - ) - print( - tabulate( - cis_compliance_table, headers="keys", tablefmt="rounded_grid" - ) - ) - print( - f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}" - ) - print(f"\nDetailed results of {compliance_fm} are in:") - print( - f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n" - ) - elif "mitre_attack" in compliance_framework: - tactics = {} - mitre_compliance_table = { - "Provider": [], - "Tactic": [], - "Status": [], - } - pass_count = fail_count = 0 - for finding in findings: - check = bulk_checks_metadata[finding.check_metadata.CheckID] - check_compliances = check.Compliance - for compliance in check_compliances: - if ( - "MITRE-ATTACK" in compliance.Framework - and compliance.Version in compliance_framework - ): - compliance_fm = compliance.Framework - for requirement in compliance.Requirements: - for tactic in requirement.Tactics: - if tactic not in tactics: - tactics[tactic] = {"FAIL": 0, "PASS": 0} - if finding.status == "FAIL": - fail_count += 1 - tactics[tactic]["FAIL"] += 1 - elif finding.status == "PASS": - pass_count += 1 - tactics[tactic]["PASS"] += 1 - - # Add results to table - tactics = dict(sorted(tactics.items())) - for tactic in tactics: - mitre_compliance_table["Provider"].append(compliance.Provider) - mitre_compliance_table["Tactic"].append(tactic) - if tactics[tactic]["FAIL"] > 0: - mitre_compliance_table["Status"].append( - f"{Fore.RED}FAIL({tactics[tactic]['FAIL']}){Style.RESET_ALL}" - ) - else: - mitre_compliance_table["Status"].append( - f"{Fore.GREEN}PASS({tactics[tactic]['PASS']}){Style.RESET_ALL}" - ) - if fail_count + pass_count < 1: - print( - f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_fm}{Style.RESET_ALL}.\n" - ) - else: - print( - f"\nCompliance Status of {Fore.YELLOW}{compliance_fm}{Style.RESET_ALL} Framework:" - ) - overview_table = [ - [ - f"{Fore.RED}{round(fail_count/(fail_count+pass_count)*100, 2)}% ({fail_count}) FAIL{Style.RESET_ALL}", - f"{Fore.GREEN}{round(pass_count/(fail_count+pass_count)*100, 2)}% ({pass_count}) PASS{Style.RESET_ALL}", - ] - ] - print(tabulate(overview_table, tablefmt="rounded_grid")) - print( - f"\nFramework {Fore.YELLOW}{compliance_fm}{Style.RESET_ALL} Results:" - ) - print( - tabulate( - mitre_compliance_table, headers="keys", tablefmt="rounded_grid" - ) - ) - print( - f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}" - ) - print(f"\nDetailed results of {compliance_fm} are in:") - print( - f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n" - ) - else: - print(f"\nDetailed results of {compliance_framework.upper()} are in:") - print( - f" - CSV: {output_directory}/{output_filename}_{compliance_framework}.csv\n" - ) - except Exception as error: - logger.critical( - f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}" - ) - sys.exit(1) diff --git a/prowler/lib/outputs/compliance/__init__.py b/prowler/lib/outputs/compliance/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/lib/outputs/compliance/aws_well_architected_framework.py b/prowler/lib/outputs/compliance/aws_well_architected_framework.py new file mode 100644 index 0000000000..f13d2f935e --- /dev/null +++ b/prowler/lib/outputs/compliance/aws_well_architected_framework.py @@ -0,0 +1,55 @@ +from csv import DictWriter + +from prowler.config.config import timestamp +from prowler.lib.outputs.models import ( + Check_Output_CSV_AWS_Well_Architected, + generate_csv_fields, +) +from prowler.lib.utils.utils import outputs_unix_timestamp + + +def write_compliance_row_aws_well_architected_framework( + file_descriptors, finding, compliance, output_options, audit_info +): + compliance_output = compliance.Framework + if compliance.Version != "": + compliance_output += "_" + compliance.Version + if compliance.Provider != "": + compliance_output += "_" + compliance.Provider + compliance_output = compliance_output.lower().replace("-", "_") + csv_header = generate_csv_fields(Check_Output_CSV_AWS_Well_Architected) + csv_writer = DictWriter( + file_descriptors[compliance_output], + fieldnames=csv_header, + delimiter=";", + ) + for requirement in compliance.Requirements: + requirement_description = requirement.Description + requirement_id = requirement.Id + for attribute in requirement.Attributes: + compliance_row = Check_Output_CSV_AWS_Well_Architected( + Provider=finding.check_metadata.Provider, + Description=compliance.Description, + AccountId=audit_info.audited_account, + Region=finding.region, + AssessmentDate=outputs_unix_timestamp( + output_options.unix_timestamp, timestamp + ), + Requirements_Id=requirement_id, + Requirements_Description=requirement_description, + Requirements_Attributes_Name=attribute.Name, + Requirements_Attributes_WellArchitectedQuestionId=attribute.WellArchitectedQuestionId, + Requirements_Attributes_WellArchitectedPracticeId=attribute.WellArchitectedPracticeId, + Requirements_Attributes_Section=attribute.Section, + Requirements_Attributes_SubSection=attribute.SubSection, + Requirements_Attributes_LevelOfRisk=attribute.LevelOfRisk, + Requirements_Attributes_AssessmentMethod=attribute.AssessmentMethod, + Requirements_Attributes_Description=attribute.Description, + Requirements_Attributes_ImplementationGuidanceUrl=attribute.ImplementationGuidanceUrl, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_id, + CheckId=finding.check_metadata.CheckID, + ) + + csv_writer.writerow(compliance_row.__dict__) diff --git a/prowler/lib/outputs/compliance/cis.py b/prowler/lib/outputs/compliance/cis.py new file mode 100644 index 0000000000..5cdfa250f9 --- /dev/null +++ b/prowler/lib/outputs/compliance/cis.py @@ -0,0 +1,36 @@ +from prowler.lib.outputs.compliance.cis_aws import generate_compliance_row_cis_aws +from prowler.lib.outputs.compliance.cis_gcp import generate_compliance_row_cis_gcp +from prowler.lib.outputs.csv import write_csv + + +def write_compliance_row_cis( + file_descriptors, + finding, + compliance, + output_options, + audit_info, + input_compliance_frameworks, +): + compliance_output = "cis_" + compliance.Version + "_" + compliance.Provider.lower() + + # Only with the version of CIS that was selected + if compliance_output in str(input_compliance_frameworks): + for requirement in compliance.Requirements: + for attribute in requirement.Attributes: + if compliance.Provider == "AWS": + (compliance_row, csv_header) = generate_compliance_row_cis_aws( + finding, + compliance, + requirement, + attribute, + output_options, + audit_info, + ) + elif compliance.Provider == "GCP": + (compliance_row, csv_header) = generate_compliance_row_cis_gcp( + finding, compliance, output_options + ) + + write_csv( + file_descriptors[compliance_output], csv_header, compliance_row + ) diff --git a/prowler/lib/outputs/compliance/cis_aws.py b/prowler/lib/outputs/compliance/cis_aws.py new file mode 100644 index 0000000000..31604da5ee --- /dev/null +++ b/prowler/lib/outputs/compliance/cis_aws.py @@ -0,0 +1,34 @@ +from prowler.config.config import timestamp +from prowler.lib.outputs.models import Check_Output_CSV_AWS_CIS, generate_csv_fields +from prowler.lib.utils.utils import outputs_unix_timestamp + + +def generate_compliance_row_cis_aws( + finding, compliance, requirement, attribute, output_options, audit_info +): + compliance_row = Check_Output_CSV_AWS_CIS( + Provider=finding.check_metadata.Provider, + Description=compliance.Description, + AccountId=audit_info.audited_account, + Region=finding.region, + AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp), + Requirements_Id=requirement.Id, + Requirements_Description=requirement.Description, + Requirements_Attributes_Section=attribute.Section, + Requirements_Attributes_Profile=attribute.Profile, + Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, + Requirements_Attributes_Description=attribute.Description, + Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, + Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, + Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, + Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, + Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, + Requirements_Attributes_References=attribute.References, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_id, + CheckId=finding.check_metadata.CheckID, + ) + csv_header = generate_csv_fields(Check_Output_CSV_AWS_CIS) + + return compliance_row, csv_header diff --git a/prowler/lib/outputs/compliance/cis_gcp.py b/prowler/lib/outputs/compliance/cis_gcp.py new file mode 100644 index 0000000000..bbcbb2ff33 --- /dev/null +++ b/prowler/lib/outputs/compliance/cis_gcp.py @@ -0,0 +1,35 @@ +from prowler.config.config import timestamp +from prowler.lib.outputs.models import Check_Output_CSV_GCP_CIS, generate_csv_fields +from prowler.lib.utils.utils import outputs_unix_timestamp + + +def generate_compliance_row_cis_gcp( + finding, compliance, requirement, attribute, output_options +): + compliance_row = Check_Output_CSV_GCP_CIS( + Provider=finding.check_metadata.Provider, + Description=compliance.Description, + ProjectId=finding.project_id, + Location=finding.location.lower(), + AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp), + Requirements_Id=requirement.Id, + Requirements_Description=requirement.Description, + Requirements_Attributes_Section=attribute.Section, + Requirements_Attributes_Profile=attribute.Profile, + Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, + Requirements_Attributes_Description=attribute.Description, + Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, + Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, + Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, + Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, + Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, + Requirements_Attributes_References=attribute.References, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_id, + ResourceName=finding.resource_name, + CheckId=finding.check_metadata.CheckID, + ) + csv_header = generate_csv_fields(Check_Output_CSV_GCP_CIS) + + return compliance_row, csv_header diff --git a/prowler/lib/outputs/compliance/compliance.py b/prowler/lib/outputs/compliance/compliance.py new file mode 100644 index 0000000000..b567546590 --- /dev/null +++ b/prowler/lib/outputs/compliance/compliance.py @@ -0,0 +1,472 @@ +import sys + +from colorama import Fore, Style +from tabulate import tabulate + +from prowler.config.config import orange_color +from prowler.lib.check.models import Check_Report +from prowler.lib.logger import logger +from prowler.lib.outputs.compliance.aws_well_architected_framework import ( + write_compliance_row_aws_well_architected_framework, +) +from prowler.lib.outputs.compliance.cis import write_compliance_row_cis +from prowler.lib.outputs.compliance.ens_rd2022_aws import ( + write_compliance_row_ens_rd2022_aws, +) +from prowler.lib.outputs.compliance.generic import write_compliance_row_generic +from prowler.lib.outputs.compliance.iso27001_2013_aws import ( + write_compliance_row_iso27001_2013_aws, +) +from prowler.lib.outputs.compliance.mitre_attack_aws import ( + write_compliance_row_mitre_attack_aws, +) + + +def add_manual_controls( + output_options, audit_info, file_descriptors, input_compliance_frameworks +): + try: + # Check if MANUAL control was already added to output + if "manual_check" in output_options.bulk_checks_metadata: + manual_finding = Check_Report( + output_options.bulk_checks_metadata["manual_check"].json() + ) + manual_finding.status = "INFO" + manual_finding.status_extended = "Manual check" + manual_finding.resource_id = "manual_check" + manual_finding.resource_name = "Manual check" + manual_finding.region = "" + manual_finding.location = "" + manual_finding.project_id = "" + fill_compliance( + output_options, + manual_finding, + audit_info, + file_descriptors, + input_compliance_frameworks, + ) + del output_options.bulk_checks_metadata["manual_check"] + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + + +def get_check_compliance_frameworks_in_input( + check_id, bulk_checks_metadata, input_compliance_frameworks +): + """get_check_compliance_frameworks_in_input returns a list of Compliance for the given check if the compliance framework is present in the input compliance to execute""" + check_compliances = [] + if bulk_checks_metadata and bulk_checks_metadata[check_id]: + for compliance in bulk_checks_metadata[check_id].Compliance: + compliance_name = "" + if compliance.Version: + compliance_name = ( + compliance.Framework.lower() + + "_" + + compliance.Version.lower() + + "_" + + compliance.Provider.lower() + ) + else: + compliance_name = ( + compliance.Framework.lower() + "_" + compliance.Provider.lower() + ) + if compliance_name.replace("-", "_") in input_compliance_frameworks: + check_compliances.append(compliance) + + return check_compliances + + +def fill_compliance( + output_options, finding, audit_info, file_descriptors, input_compliance_frameworks +): + try: + # We have to retrieve all the check's compliance requirements and get the ones matching with the input ones + check_compliances = get_check_compliance_frameworks_in_input( + finding.check_metadata.CheckID, + output_options.bulk_checks_metadata, + input_compliance_frameworks, + ) + + for compliance in check_compliances: + if compliance.Framework == "ENS" and compliance.Version == "RD2022": + write_compliance_row_ens_rd2022_aws( + file_descriptors, finding, compliance, output_options, audit_info + ) + + elif compliance.Framework == "CIS": + write_compliance_row_cis( + file_descriptors, + finding, + compliance, + output_options, + audit_info, + input_compliance_frameworks, + ) + + elif ( + "AWS-Well-Architected-Framework" in compliance.Framework + and compliance.Provider == "AWS" + ): + write_compliance_row_aws_well_architected_framework( + file_descriptors, finding, compliance, output_options, audit_info + ) + + elif ( + compliance.Framework == "ISO27001" + and compliance.Version == "2013" + and compliance.Provider == "AWS" + ): + write_compliance_row_iso27001_2013_aws( + file_descriptors, finding, compliance, output_options, audit_info + ) + + elif ( + compliance.Framework == "MITRE-ATTACK" + and compliance.Version == "" + and compliance.Provider == "AWS" + ): + write_compliance_row_mitre_attack_aws( + file_descriptors, finding, compliance, output_options, audit_info + ) + + else: + write_compliance_row_generic( + file_descriptors, finding, compliance, output_options, audit_info + ) + + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + + +def display_compliance_table( + findings: list, + bulk_checks_metadata: dict, + compliance_framework: str, + output_filename: str, + output_directory: str, + compliance_overview: bool, +): + try: + if "ens_rd2022_aws" == compliance_framework: + marcos = {} + ens_compliance_table = { + "Proveedor": [], + "Marco/Categoria": [], + "Estado": [], + "Alto": [], + "Medio": [], + "Bajo": [], + "Opcional": [], + } + pass_count = fail_count = 0 + for finding in findings: + check = bulk_checks_metadata[finding.check_metadata.CheckID] + check_compliances = check.Compliance + for compliance in check_compliances: + if ( + compliance.Framework == "ENS" + and compliance.Provider == "AWS" + and compliance.Version == "RD2022" + ): + compliance_version = compliance.Version + compliance_fm = compliance.Framework + compliance_provider = compliance.Provider + for requirement in compliance.Requirements: + for attribute in requirement.Attributes: + marco_categoria = ( + f"{attribute.Marco}/{attribute.Categoria}" + ) + # Check if Marco/Categoria exists + if marco_categoria not in marcos: + marcos[marco_categoria] = { + "Estado": f"{Fore.GREEN}CUMPLE{Style.RESET_ALL}", + "Opcional": 0, + "Alto": 0, + "Medio": 0, + "Bajo": 0, + } + if finding.status == "FAIL": + fail_count += 1 + marcos[marco_categoria][ + "Estado" + ] = f"{Fore.RED}NO CUMPLE{Style.RESET_ALL}" + elif finding.status == "PASS": + pass_count += 1 + if attribute.Nivel == "opcional": + marcos[marco_categoria]["Opcional"] += 1 + elif attribute.Nivel == "alto": + marcos[marco_categoria]["Alto"] += 1 + elif attribute.Nivel == "medio": + marcos[marco_categoria]["Medio"] += 1 + elif attribute.Nivel == "bajo": + marcos[marco_categoria]["Bajo"] += 1 + + # Add results to table + for marco in sorted(marcos): + ens_compliance_table["Proveedor"].append(compliance.Provider) + ens_compliance_table["Marco/Categoria"].append(marco) + ens_compliance_table["Estado"].append(marcos[marco]["Estado"]) + ens_compliance_table["Opcional"].append( + f"{Fore.BLUE}{marcos[marco]['Opcional']}{Style.RESET_ALL}" + ) + ens_compliance_table["Alto"].append( + f"{Fore.LIGHTRED_EX}{marcos[marco]['Alto']}{Style.RESET_ALL}" + ) + ens_compliance_table["Medio"].append( + f"{orange_color}{marcos[marco]['Medio']}{Style.RESET_ALL}" + ) + ens_compliance_table["Bajo"].append( + f"{Fore.YELLOW}{marcos[marco]['Bajo']}{Style.RESET_ALL}" + ) + if fail_count + pass_count < 0: + print( + f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_fm}_{compliance_version}_{compliance_provider}{Style.RESET_ALL}.\n" + ) + else: + print( + f"\nEstado de Cumplimiento de {Fore.YELLOW}{compliance_fm}_{compliance_version}_{compliance_provider}{Style.RESET_ALL}:" + ) + overview_table = [ + [ + f"{Fore.RED}{round(fail_count / (fail_count + pass_count) * 100, 2)}% ({fail_count}) NO CUMPLE{Style.RESET_ALL}", + f"{Fore.GREEN}{round(pass_count / (fail_count + pass_count) * 100, 2)}% ({pass_count}) CUMPLE{Style.RESET_ALL}", + ] + ] + print(tabulate(overview_table, tablefmt="rounded_grid")) + if not compliance_overview: + print( + f"\nResultados de {Fore.YELLOW}{compliance_fm}_{compliance_version}_{compliance_provider}{Style.RESET_ALL}:" + ) + print( + tabulate( + ens_compliance_table, + headers="keys", + tablefmt="rounded_grid", + ) + ) + print( + f"{Style.BRIGHT}* Solo aparece el Marco/Categoria que contiene resultados.{Style.RESET_ALL}" + ) + print(f"\nResultados detallados de {compliance_fm} en:") + print( + f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n" + ) + elif "cis_" in compliance_framework: + sections = {} + cis_compliance_table = { + "Provider": [], + "Section": [], + "Level 1": [], + "Level 2": [], + } + pass_count = fail_count = 0 + for finding in findings: + check = bulk_checks_metadata[finding.check_metadata.CheckID] + check_compliances = check.Compliance + for compliance in check_compliances: + if ( + compliance.Framework == "CIS" + and compliance.Version in compliance_framework + ): + compliance_version = compliance.Version + compliance_fm = compliance.Framework + for requirement in compliance.Requirements: + for attribute in requirement.Attributes: + section = attribute.Section + # Check if Section exists + if section not in sections: + sections[section] = { + "Status": f"{Fore.GREEN}PASS{Style.RESET_ALL}", + "Level 1": {"FAIL": 0, "PASS": 0}, + "Level 2": {"FAIL": 0, "PASS": 0}, + } + if finding.status == "FAIL": + fail_count += 1 + elif finding.status == "PASS": + pass_count += 1 + if attribute.Profile == "Level 1": + if finding.status == "FAIL": + sections[section]["Level 1"]["FAIL"] += 1 + else: + sections[section]["Level 1"]["PASS"] += 1 + elif attribute.Profile == "Level 2": + if finding.status == "FAIL": + sections[section]["Level 2"]["FAIL"] += 1 + else: + sections[section]["Level 2"]["PASS"] += 1 + + # Add results to table + sections = dict(sorted(sections.items())) + for section in sections: + cis_compliance_table["Provider"].append(compliance.Provider) + cis_compliance_table["Section"].append(section) + if sections[section]["Level 1"]["FAIL"] > 0: + cis_compliance_table["Level 1"].append( + f"{Fore.RED}FAIL({sections[section]['Level 1']['FAIL']}){Style.RESET_ALL}" + ) + else: + cis_compliance_table["Level 1"].append( + f"{Fore.GREEN}PASS({sections[section]['Level 1']['PASS']}){Style.RESET_ALL}" + ) + if sections[section]["Level 2"]["FAIL"] > 0: + cis_compliance_table["Level 2"].append( + f"{Fore.RED}FAIL({sections[section]['Level 2']['FAIL']}){Style.RESET_ALL}" + ) + else: + cis_compliance_table["Level 2"].append( + f"{Fore.GREEN}PASS({sections[section]['Level 2']['PASS']}){Style.RESET_ALL}" + ) + if fail_count + pass_count < 1: + print( + f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_fm}_{compliance_version}{Style.RESET_ALL}.\n" + ) + else: + print( + f"\nCompliance Status of {Fore.YELLOW}{compliance_fm}_{compliance_version}{Style.RESET_ALL} Framework:" + ) + overview_table = [ + [ + f"{Fore.RED}{round(fail_count / (fail_count + pass_count) * 100, 2)}% ({fail_count}) FAIL{Style.RESET_ALL}", + f"{Fore.GREEN}{round(pass_count / (fail_count + pass_count) * 100, 2)}% ({pass_count}) PASS{Style.RESET_ALL}", + ] + ] + print(tabulate(overview_table, tablefmt="rounded_grid")) + if not compliance_overview: + print( + f"\nFramework {Fore.YELLOW}{compliance_fm}_{compliance_version}{Style.RESET_ALL} Results:" + ) + print( + tabulate( + cis_compliance_table, + headers="keys", + tablefmt="rounded_grid", + ) + ) + print( + f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}" + ) + print(f"\nDetailed results of {compliance_fm} are in:") + print( + f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n" + ) + elif "mitre_attack" in compliance_framework: + tactics = {} + mitre_compliance_table = { + "Provider": [], + "Tactic": [], + "Status": [], + } + pass_count = fail_count = 0 + for finding in findings: + check = bulk_checks_metadata[finding.check_metadata.CheckID] + check_compliances = check.Compliance + for compliance in check_compliances: + if ( + "MITRE-ATTACK" in compliance.Framework + and compliance.Version in compliance_framework + ): + compliance_fm = compliance.Framework + for requirement in compliance.Requirements: + for tactic in requirement.Tactics: + if tactic not in tactics: + tactics[tactic] = {"FAIL": 0, "PASS": 0} + if finding.status == "FAIL": + fail_count += 1 + tactics[tactic]["FAIL"] += 1 + elif finding.status == "PASS": + pass_count += 1 + tactics[tactic]["PASS"] += 1 + + # Add results to table + tactics = dict(sorted(tactics.items())) + for tactic in tactics: + mitre_compliance_table["Provider"].append(compliance.Provider) + mitre_compliance_table["Tactic"].append(tactic) + if tactics[tactic]["FAIL"] > 0: + mitre_compliance_table["Status"].append( + f"{Fore.RED}FAIL({tactics[tactic]['FAIL']}){Style.RESET_ALL}" + ) + else: + mitre_compliance_table["Status"].append( + f"{Fore.GREEN}PASS({tactics[tactic]['PASS']}){Style.RESET_ALL}" + ) + if fail_count + pass_count < 1: + print( + f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_fm}{Style.RESET_ALL}.\n" + ) + else: + print( + f"\nCompliance Status of {Fore.YELLOW}{compliance_fm}{Style.RESET_ALL} Framework:" + ) + overview_table = [ + [ + f"{Fore.RED}{round(fail_count / (fail_count + pass_count) * 100, 2)}% ({fail_count}) FAIL{Style.RESET_ALL}", + f"{Fore.GREEN}{round(pass_count / (fail_count + pass_count) * 100, 2)}% ({pass_count}) PASS{Style.RESET_ALL}", + ] + ] + print(tabulate(overview_table, tablefmt="rounded_grid")) + if not compliance_overview: + print( + f"\nFramework {Fore.YELLOW}{compliance_fm}{Style.RESET_ALL} Results:" + ) + print( + tabulate( + mitre_compliance_table, + headers="keys", + tablefmt="rounded_grid", + ) + ) + print( + f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}" + ) + print(f"\nDetailed results of {compliance_fm} are in:") + print( + f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n" + ) + else: + pass_count = fail_count = 0 + for finding in findings: + check = bulk_checks_metadata[finding.check_metadata.CheckID] + check_compliances = check.Compliance + for compliance in check_compliances: + if ( + compliance.Framework.upper() + in compliance_framework.upper().replace("_", "-") + and compliance.Version in compliance_framework.upper() + and compliance.Provider in compliance_framework.upper() + ): + for requirement in compliance.Requirements: + for attribute in requirement.Attributes: + if finding.status == "FAIL": + fail_count += 1 + elif finding.status == "PASS": + pass_count += 1 + if fail_count + pass_count < 1: + print( + f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL}.\n" + ) + else: + print( + f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:" + ) + overview_table = [ + [ + f"{Fore.RED}{round(fail_count / (fail_count + pass_count) * 100, 2)}% ({fail_count}) FAIL{Style.RESET_ALL}", + f"{Fore.GREEN}{round(pass_count / (fail_count + pass_count) * 100, 2)}% ({pass_count}) PASS{Style.RESET_ALL}", + ] + ] + print(tabulate(overview_table, tablefmt="rounded_grid")) + if not compliance_overview: + print(f"\nDetailed results of {compliance_framework.upper()} are in:") + print( + f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n" + ) + except Exception as error: + logger.critical( + f"{error.__class__.__name__}:{error.__traceback__.tb_lineno} -- {error}" + ) + sys.exit(1) diff --git a/prowler/lib/outputs/compliance/ens_rd2022_aws.py b/prowler/lib/outputs/compliance/ens_rd2022_aws.py new file mode 100644 index 0000000000..6b8759acf6 --- /dev/null +++ b/prowler/lib/outputs/compliance/ens_rd2022_aws.py @@ -0,0 +1,45 @@ +from csv import DictWriter + +from prowler.config.config import timestamp +from prowler.lib.outputs.models import Check_Output_CSV_ENS_RD2022, generate_csv_fields +from prowler.lib.utils.utils import outputs_unix_timestamp + + +def write_compliance_row_ens_rd2022_aws( + file_descriptors, finding, compliance, output_options, audit_info +): + compliance_output = "ens_rd2022_aws" + csv_header = generate_csv_fields(Check_Output_CSV_ENS_RD2022) + csv_writer = DictWriter( + file_descriptors[compliance_output], + fieldnames=csv_header, + delimiter=";", + ) + for requirement in compliance.Requirements: + requirement_description = requirement.Description + requirement_id = requirement.Id + for attribute in requirement.Attributes: + compliance_row = Check_Output_CSV_ENS_RD2022( + Provider=finding.check_metadata.Provider, + Description=compliance.Description, + AccountId=audit_info.audited_account, + Region=finding.region, + AssessmentDate=outputs_unix_timestamp( + output_options.unix_timestamp, timestamp + ), + Requirements_Id=requirement_id, + Requirements_Description=requirement_description, + Requirements_Attributes_IdGrupoControl=attribute.IdGrupoControl, + Requirements_Attributes_Marco=attribute.Marco, + Requirements_Attributes_Categoria=attribute.Categoria, + Requirements_Attributes_DescripcionControl=attribute.DescripcionControl, + Requirements_Attributes_Nivel=attribute.Nivel, + Requirements_Attributes_Tipo=attribute.Tipo, + Requirements_Attributes_Dimensiones=",".join(attribute.Dimensiones), + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_id, + CheckId=finding.check_metadata.CheckID, + ) + + csv_writer.writerow(compliance_row.__dict__) diff --git a/prowler/lib/outputs/compliance/generic.py b/prowler/lib/outputs/compliance/generic.py new file mode 100644 index 0000000000..f71621aa82 --- /dev/null +++ b/prowler/lib/outputs/compliance/generic.py @@ -0,0 +1,51 @@ +from csv import DictWriter + +from prowler.config.config import timestamp +from prowler.lib.outputs.models import ( + Check_Output_CSV_Generic_Compliance, + generate_csv_fields, +) +from prowler.lib.utils.utils import outputs_unix_timestamp + + +def write_compliance_row_generic( + file_descriptors, finding, compliance, output_options, audit_info +): + compliance_output = compliance.Framework + if compliance.Version != "": + compliance_output += "_" + compliance.Version + if compliance.Provider != "": + compliance_output += "_" + compliance.Provider + + compliance_output = compliance_output.lower().replace("-", "_") + csv_header = generate_csv_fields(Check_Output_CSV_Generic_Compliance) + csv_writer = DictWriter( + file_descriptors[compliance_output], + fieldnames=csv_header, + delimiter=";", + ) + for requirement in compliance.Requirements: + requirement_description = requirement.Description + requirement_id = requirement.Id + for attribute in requirement.Attributes: + compliance_row = Check_Output_CSV_Generic_Compliance( + Provider=finding.check_metadata.Provider, + Description=compliance.Description, + AccountId=audit_info.audited_account, + Region=finding.region, + AssessmentDate=outputs_unix_timestamp( + output_options.unix_timestamp, timestamp + ), + Requirements_Id=requirement_id, + Requirements_Description=requirement_description, + Requirements_Attributes_Section=attribute.Section, + Requirements_Attributes_SubSection=attribute.SubSection, + Requirements_Attributes_SubGroup=attribute.SubGroup, + Requirements_Attributes_Service=attribute.Service, + Requirements_Attributes_Soc_Type=attribute.Soc_Type, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_id, + CheckId=finding.check_metadata.CheckID, + ) + csv_writer.writerow(compliance_row.__dict__) diff --git a/prowler/lib/outputs/compliance/iso27001_2013_aws.py b/prowler/lib/outputs/compliance/iso27001_2013_aws.py new file mode 100644 index 0000000000..8b6f7c7030 --- /dev/null +++ b/prowler/lib/outputs/compliance/iso27001_2013_aws.py @@ -0,0 +1,53 @@ +from csv import DictWriter + +from prowler.config.config import timestamp +from prowler.lib.outputs.models import ( + Check_Output_CSV_AWS_ISO27001_2013, + generate_csv_fields, +) +from prowler.lib.utils.utils import outputs_unix_timestamp + + +def write_compliance_row_iso27001_2013_aws( + file_descriptors, finding, compliance, output_options, audit_info +): + compliance_output = compliance.Framework + if compliance.Version != "": + compliance_output += "_" + compliance.Version + if compliance.Provider != "": + compliance_output += "_" + compliance.Provider + + compliance_output = compliance_output.lower().replace("-", "_") + csv_header = generate_csv_fields(Check_Output_CSV_AWS_ISO27001_2013) + csv_writer = DictWriter( + file_descriptors[compliance_output], + fieldnames=csv_header, + delimiter=";", + ) + for requirement in compliance.Requirements: + requirement_description = requirement.Description + requirement_id = requirement.Id + requirement_name = requirement.Name + for attribute in requirement.Attributes: + compliance_row = Check_Output_CSV_AWS_ISO27001_2013( + Provider=finding.check_metadata.Provider, + Description=compliance.Description, + AccountId=audit_info.audited_account, + Region=finding.region, + AssessmentDate=outputs_unix_timestamp( + output_options.unix_timestamp, timestamp + ), + Requirements_Id=requirement_id, + Requirements_Name=requirement_name, + Requirements_Description=requirement_description, + Requirements_Attributes_Category=attribute.Category, + Requirements_Attributes_Objetive_ID=attribute.Objetive_ID, + Requirements_Attributes_Objetive_Name=attribute.Objetive_Name, + Requirements_Attributes_Check_Summary=attribute.Check_Summary, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_id, + CheckId=finding.check_metadata.CheckID, + ) + + csv_writer.writerow(compliance_row.__dict__) diff --git a/prowler/lib/outputs/compliance/mitre_attack_aws.py b/prowler/lib/outputs/compliance/mitre_attack_aws.py new file mode 100644 index 0000000000..cfffa62f7b --- /dev/null +++ b/prowler/lib/outputs/compliance/mitre_attack_aws.py @@ -0,0 +1,66 @@ +from csv import DictWriter + +from prowler.config.config import timestamp +from prowler.lib.outputs.models import ( + Check_Output_MITRE_ATTACK, + generate_csv_fields, + unroll_list, +) +from prowler.lib.utils.utils import outputs_unix_timestamp + + +def write_compliance_row_mitre_attack_aws( + file_descriptors, finding, compliance, output_options, audit_info +): + compliance_output = compliance.Framework + if compliance.Version != "": + compliance_output += "_" + compliance.Version + if compliance.Provider != "": + compliance_output += "_" + compliance.Provider + + compliance_output = compliance_output.lower().replace("-", "_") + csv_header = generate_csv_fields(Check_Output_MITRE_ATTACK) + csv_writer = DictWriter( + file_descriptors[compliance_output], + fieldnames=csv_header, + delimiter=";", + ) + for requirement in compliance.Requirements: + requirement_description = requirement.Description + requirement_id = requirement.Id + requirement_name = requirement.Name + attributes_aws_services = "" + attributes_categories = "" + attributes_values = "" + attributes_comments = "" + for attribute in requirement.Attributes: + attributes_aws_services += attribute.AWSService + "\n" + attributes_categories += attribute.Category + "\n" + attributes_values += attribute.Value + "\n" + attributes_comments += attribute.Comment + "\n" + compliance_row = Check_Output_MITRE_ATTACK( + Provider=finding.check_metadata.Provider, + Description=compliance.Description, + AccountId=audit_info.audited_account, + Region=finding.region, + AssessmentDate=outputs_unix_timestamp( + output_options.unix_timestamp, timestamp + ), + Requirements_Id=requirement_id, + Requirements_Description=requirement_description, + Requirements_Name=requirement_name, + Requirements_Tactics=unroll_list(requirement.Tactics), + Requirements_SubTechniques=unroll_list(requirement.SubTechniques), + Requirements_Platforms=unroll_list(requirement.Platforms), + Requirements_TechniqueURL=requirement.TechniqueURL, + Requirements_Attributes_AWSServices=attributes_aws_services, + Requirements_Attributes_Categories=attributes_categories, + Requirements_Attributes_Values=attributes_values, + Requirements_Attributes_Comments=attributes_comments, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_id, + CheckId=finding.check_metadata.CheckID, + ) + + csv_writer.writerow(compliance_row.__dict__) diff --git a/prowler/lib/outputs/csv.py b/prowler/lib/outputs/csv.py new file mode 100644 index 0000000000..c3ebfd7e33 --- /dev/null +++ b/prowler/lib/outputs/csv.py @@ -0,0 +1,10 @@ +from csv import DictWriter + + +def write_csv(file_descriptor, headers, row): + csv_writer = DictWriter( + file_descriptor, + fieldnames=headers, + delimiter=";", + ) + csv_writer.writerow(row.__dict__) diff --git a/prowler/lib/outputs/file_descriptors.py b/prowler/lib/outputs/file_descriptors.py index 9b5def4d22..1e3a0d0b9c 100644 --- a/prowler/lib/outputs/file_descriptors.py +++ b/prowler/lib/outputs/file_descriptors.py @@ -23,6 +23,7 @@ from prowler.lib.outputs.models import ( ) from prowler.lib.utils.utils import file_exists, open_file from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info +from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info from prowler.providers.common.outputs import get_provider_output_model from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info @@ -108,7 +109,7 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit elif isinstance(audit_info, GCP_Audit_Info): if output_mode == "cis_2.0_gcp": - filename = f"{output_directory}/{output_filename}_cis_2.0_gcp{csv_file_suffix}" + filename = f"{output_directory}/compliance/{output_filename}_cis_2.0_gcp{csv_file_suffix}" file_descriptor = initialize_file_descriptor( filename, output_mode, audit_info, Check_Output_CSV_GCP_CIS ) @@ -123,7 +124,7 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit file_descriptors.update({output_mode: file_descriptor}) elif output_mode == "ens_rd2022_aws": - filename = f"{output_directory}/{output_filename}_ens_rd2022_aws{csv_file_suffix}" + filename = f"{output_directory}/compliance/{output_filename}_ens_rd2022_aws{csv_file_suffix}" file_descriptor = initialize_file_descriptor( filename, output_mode, @@ -133,14 +134,14 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit file_descriptors.update({output_mode: file_descriptor}) elif output_mode == "cis_1.5_aws": - filename = f"{output_directory}/{output_filename}_cis_1.5_aws{csv_file_suffix}" + filename = f"{output_directory}/compliance/{output_filename}_cis_1.5_aws{csv_file_suffix}" file_descriptor = initialize_file_descriptor( filename, output_mode, audit_info, Check_Output_CSV_AWS_CIS ) file_descriptors.update({output_mode: file_descriptor}) elif output_mode == "cis_1.4_aws": - filename = f"{output_directory}/{output_filename}_cis_1.4_aws{csv_file_suffix}" + filename = f"{output_directory}/compliance/{output_filename}_cis_1.4_aws{csv_file_suffix}" file_descriptor = initialize_file_descriptor( filename, output_mode, audit_info, Check_Output_CSV_AWS_CIS ) @@ -150,7 +151,7 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit output_mode == "aws_well_architected_framework_security_pillar_aws" ): - filename = f"{output_directory}/{output_filename}_aws_well_architected_framework_security_pillar_aws{csv_file_suffix}" + filename = f"{output_directory}/compliance/{output_filename}_aws_well_architected_framework_security_pillar_aws{csv_file_suffix}" file_descriptor = initialize_file_descriptor( filename, output_mode, @@ -163,7 +164,7 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit output_mode == "aws_well_architected_framework_reliability_pillar_aws" ): - filename = f"{output_directory}/{output_filename}_aws_well_architected_framework_reliability_pillar_aws{csv_file_suffix}" + filename = f"{output_directory}/compliance/{output_filename}_aws_well_architected_framework_reliability_pillar_aws{csv_file_suffix}" file_descriptor = initialize_file_descriptor( filename, output_mode, @@ -173,7 +174,7 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit file_descriptors.update({output_mode: file_descriptor}) elif output_mode == "iso27001_2013_aws": - filename = f"{output_directory}/{output_filename}_iso27001_2013_aws{csv_file_suffix}" + filename = f"{output_directory}/compliance/{output_filename}_iso27001_2013_aws{csv_file_suffix}" file_descriptor = initialize_file_descriptor( filename, output_mode, @@ -183,7 +184,7 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit file_descriptors.update({output_mode: file_descriptor}) elif output_mode == "mitre_attack_aws": - filename = f"{output_directory}/{output_filename}_mitre_attack_aws{csv_file_suffix}" + filename = f"{output_directory}/compliance/{output_filename}_mitre_attack_aws{csv_file_suffix}" file_descriptor = initialize_file_descriptor( filename, output_mode, @@ -194,14 +195,26 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit else: # Generic Compliance framework - filename = f"{output_directory}/{output_filename}_{output_mode}{csv_file_suffix}" - file_descriptor = initialize_file_descriptor( - filename, - output_mode, - audit_info, - Check_Output_CSV_Generic_Compliance, - ) - file_descriptors.update({output_mode: file_descriptor}) + if ( + isinstance(audit_info, AWS_Audit_Info) + and "aws" in output_mode + or ( + isinstance(audit_info, Azure_Audit_Info) + and "azure" in output_mode + ) + or ( + isinstance(audit_info, GCP_Audit_Info) + and "gcp" in output_mode + ) + ): + filename = f"{output_directory}/compliance/{output_filename}_{output_mode}{csv_file_suffix}" + file_descriptor = initialize_file_descriptor( + filename, + output_mode, + audit_info, + Check_Output_CSV_Generic_Compliance, + ) + file_descriptors.update({output_mode: file_descriptor}) except Exception as error: logger.error( diff --git a/prowler/lib/outputs/models.py b/prowler/lib/outputs/models.py index 09f42b6e12..faddd07f54 100644 --- a/prowler/lib/outputs/models.py +++ b/prowler/lib/outputs/models.py @@ -13,7 +13,16 @@ from prowler.lib.utils.utils import outputs_unix_timestamp from prowler.providers.aws.lib.audit_info.models import AWS_Organizations_Info -def get_check_compliance(finding, provider, output_options): +def get_check_compliance(finding, provider, output_options) -> dict: + """get_check_compliance returns a map with the compliance framework as key and the requirements where the finding's check is present. + + Example: + + { + "CIS-1.4": ["2.1.3"], + "CIS-1.5": ["2.1.3"], + } + """ try: check_compliance = {} # We have to retrieve all the check's compliance requirements diff --git a/prowler/lib/outputs/outputs.py b/prowler/lib/outputs/outputs.py index 288a58152c..f0666516aa 100644 --- a/prowler/lib/outputs/outputs.py +++ b/prowler/lib/outputs/outputs.py @@ -4,7 +4,10 @@ from colorama import Fore, Style from prowler.config.config import available_compliance_frameworks, orange_color from prowler.lib.logger import logger -from prowler.lib.outputs.compliance import add_manual_controls, fill_compliance +from prowler.lib.outputs.compliance.compliance import ( + add_manual_controls, + fill_compliance, +) from prowler.lib.outputs.file_descriptors import fill_file_descriptors from prowler.lib.outputs.html import fill_html from prowler.lib.outputs.json import fill_json_asff, fill_json_ocsf @@ -63,22 +66,26 @@ def report(check_findings, output_options, audit_info): if file_descriptors: # Check if --quiet to only add fails to outputs if not (finding.status != "FAIL" and output_options.is_quiet): - if any( - compliance in output_options.output_modes - for compliance in available_compliance_frameworks - ): - fill_compliance( - output_options, - finding, - audit_info, - file_descriptors, + input_compliance_frameworks = list( + set(output_options.output_modes).intersection( + available_compliance_frameworks ) + ) - add_manual_controls( - output_options, - audit_info, - file_descriptors, - ) + fill_compliance( + output_options, + finding, + audit_info, + file_descriptors, + input_compliance_frameworks, + ) + + add_manual_controls( + output_options, + audit_info, + file_descriptors, + input_compliance_frameworks, + ) # AWS specific outputs if finding.check_metadata.Provider == "aws": diff --git a/prowler/providers/common/outputs.py b/prowler/providers/common/outputs.py index 3716df30cc..adcddbcbee 100644 --- a/prowler/providers/common/outputs.py +++ b/prowler/providers/common/outputs.py @@ -70,6 +70,9 @@ class Provider_Output_Options: if not isdir(arguments.output_directory): if arguments.output_modes: makedirs(arguments.output_directory) + if not isdir(arguments.output_directory + "/compliance"): + if arguments.output_modes: + makedirs(arguments.output_directory + "/compliance") class Azure_Output_Options(Provider_Output_Options): diff --git a/tests/lib/outputs/compliance/compliance_test.py b/tests/lib/outputs/compliance/compliance_test.py new file mode 100644 index 0000000000..6be964a8fa --- /dev/null +++ b/tests/lib/outputs/compliance/compliance_test.py @@ -0,0 +1,111 @@ +from mock import MagicMock + +from prowler.lib.check.compliance_models import ( + CIS_Requirement_Attribute, + Compliance_Base_Model, + Compliance_Requirement, +) +from prowler.lib.outputs.compliance.compliance import ( + get_check_compliance_frameworks_in_input, +) + +CIS_1_4_AWS_NAME = "cis_1.4_aws" +CIS_1_4_AWS = Compliance_Base_Model( + Framework="CIS", + Provider="AWS", + Version="1.4", + Description="The CIS Benchmark for CIS Amazon Web Services Foundations Benchmark, v1.4.0, Level 1 and 2 provides prescriptive guidance for configuring security options for a subset of Amazon Web Services. It has an emphasis on foundational, testable, and architecture agnostic settings", + Requirements=[ + Compliance_Requirement( + Checks=[], + Id="2.1.3", + Description="Ensure MFA Delete is enabled on S3 buckets", + Attributes=[ + CIS_Requirement_Attribute( + Section="2.1. Simple Storage Service (S3)", + Profile="Level 1", + AssessmentStatus="Automated", + Description="Once MFA Delete is enabled on your sensitive and classified S3 bucket it requires the user to have two forms of authentication.", + RationaleStatement="Adding MFA delete to an S3 bucket, requires additional authentication when you change the version state of your bucket or you delete and object version adding another layer of security in the event your security credentials are compromised or unauthorized access is granted.", + ImpactStatement="", + RemediationProcedure="Perform the steps below to enable MFA delete on an S3 bucket.\n\nNote:\n-You cannot enable MFA Delete using the AWS Management Console. You must use the AWS CLI or API.\n-You must use your 'root' account to enable MFA Delete on S3 buckets.\n\n**From Command line:**\n\n1. Run the s3api put-bucket-versioning command\n\n```\naws s3api put-bucket-versioning --profile my-root-profile --bucket Bucket_Name --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa “arn:aws:iam::aws_account_id:mfa/root-account-mfa-device passcode”\n```", + AuditProcedure='Perform the steps below to confirm MFA delete is configured on an S3 Bucket\n\n**From Console:**\n\n1. Login to the S3 console at `https://console.aws.amazon.com/s3/`\n\n2. Click the `Check` box next to the Bucket name you want to confirm\n\n3. In the window under `Properties`\n\n4. Confirm that Versioning is `Enabled`\n\n5. Confirm that MFA Delete is `Enabled`\n\n**From Command Line:**\n\n1. Run the `get-bucket-versioning`\n```\naws s3api get-bucket-versioning --bucket my-bucket\n```\n\nOutput example:\n```\n \n Enabled\n Enabled \n\n```\n\nIf the Console or the CLI output does not show Versioning and MFA Delete `enabled` refer to the remediation below.', + AdditionalInformation="", + References="https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html#MultiFactorAuthenticationDelete:https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMFADelete.html:https://aws.amazon.com/blogs/security/securing-access-to-aws-using-mfa-part-3/:https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa_lost-or-broken.html", + ) + ], + ) + ], +) +CIS_1_5_AWS_NAME = "cis_1.5_aws" +CIS_1_5_AWS = Compliance_Base_Model( + Framework="CIS", + Provider="AWS", + Version="1.5", + Description="The CIS Amazon Web Services Foundations Benchmark provides prescriptive guidance for configuring security options for a subset of Amazon Web Services with an emphasis on foundational, testable, and architecture agnostic settings.", + Requirements=[ + Compliance_Requirement( + Checks=[], + Id="2.1.3", + Description="Ensure MFA Delete is enabled on S3 buckets", + Attributes=[ + CIS_Requirement_Attribute( + Section="2.1. Simple Storage Service (S3)", + Profile="Level 1", + AssessmentStatus="Automated", + Description="Once MFA Delete is enabled on your sensitive and classified S3 bucket it requires the user to have two forms of authentication.", + RationaleStatement="Adding MFA delete to an S3 bucket, requires additional authentication when you change the version state of your bucket or you delete and object version adding another layer of security in the event your security credentials are compromised or unauthorized access is granted.", + ImpactStatement="", + RemediationProcedure="Perform the steps below to enable MFA delete on an S3 bucket.\n\nNote:\n-You cannot enable MFA Delete using the AWS Management Console. You must use the AWS CLI or API.\n-You must use your 'root' account to enable MFA Delete on S3 buckets.\n\n**From Command line:**\n\n1. Run the s3api put-bucket-versioning command\n\n```\naws s3api put-bucket-versioning --profile my-root-profile --bucket Bucket_Name --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa “arn:aws:iam::aws_account_id:mfa/root-account-mfa-device passcode”\n```", + AuditProcedure='Perform the steps below to confirm MFA delete is configured on an S3 Bucket\n\n**From Console:**\n\n1. Login to the S3 console at `https://console.aws.amazon.com/s3/`\n\n2. Click the `Check` box next to the Bucket name you want to confirm\n\n3. In the window under `Properties`\n\n4. Confirm that Versioning is `Enabled`\n\n5. Confirm that MFA Delete is `Enabled`\n\n**From Command Line:**\n\n1. Run the `get-bucket-versioning`\n```\naws s3api get-bucket-versioning --bucket my-bucket\n```\n\nOutput example:\n```\n \n Enabled\n Enabled \n\n```\n\nIf the Console or the CLI output does not show Versioning and MFA Delete `enabled` refer to the remediation below.', + AdditionalInformation="", + References="https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html#MultiFactorAuthenticationDelete:https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMFADelete.html:https://aws.amazon.com/blogs/security/securing-access-to-aws-using-mfa-part-3/:https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa_lost-or-broken.html", + ) + ], + ) + ], +) + +NOT_PRESENT_COMPLIANCE_NAME = "not_present_compliance_name" +NOT_PRESENT_COMPLIANCE = Compliance_Base_Model( + Framework="NOT_EXISTENT", + Provider="NOT_EXISTENT", + Version="NOT_EXISTENT", + Description="NOT_EXISTENT", + Requirements=[], +) + + +class TestCompliance: + def test_get_check_compliance_frameworks_all_none(self): + check_id = None + bulk_checks_metadata = None + input_compliance_frameworks = None + assert ( + get_check_compliance_frameworks_in_input( + check_id, bulk_checks_metadata, input_compliance_frameworks + ) + == [] + ) + + def test_get_check_compliance_frameworks_all(self): + check_id = "test-check" + bulk_check_metadata = [CIS_1_4_AWS, CIS_1_5_AWS] + bulk_checks_metadata = {} + bulk_checks_metadata[check_id] = MagicMock() + bulk_checks_metadata[check_id].Compliance = bulk_check_metadata + input_compliance_frameworks = [CIS_1_4_AWS_NAME, CIS_1_5_AWS_NAME] + assert get_check_compliance_frameworks_in_input( + check_id, bulk_checks_metadata, input_compliance_frameworks + ) == [CIS_1_4_AWS, CIS_1_5_AWS] + + def test_get_check_compliance_frameworks_two_of_three(self): + check_id = "test-check" + bulk_check_metadata = [CIS_1_4_AWS, CIS_1_5_AWS, NOT_PRESENT_COMPLIANCE] + bulk_checks_metadata = {} + bulk_checks_metadata[check_id] = MagicMock() + bulk_checks_metadata[check_id].Compliance = bulk_check_metadata + input_compliance_frameworks = [CIS_1_4_AWS_NAME, CIS_1_5_AWS_NAME] + assert get_check_compliance_frameworks_in_input( + check_id, bulk_checks_metadata, input_compliance_frameworks + ) == [CIS_1_4_AWS, CIS_1_5_AWS] From 0ef85b3dee36b9e2f0ba1f433e44a000dc6b27b7 Mon Sep 17 00:00:00 2001 From: Sergio Garcia <38561120+sergargar@users.noreply.github.com> Date: Mon, 18 Dec 2023 12:10:58 +0100 Subject: [PATCH 06/10] fix(gcp): fix error in generating compliance (#3201) --- prowler/lib/outputs/compliance/cis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prowler/lib/outputs/compliance/cis.py b/prowler/lib/outputs/compliance/cis.py index 5cdfa250f9..4c2cbf73e5 100644 --- a/prowler/lib/outputs/compliance/cis.py +++ b/prowler/lib/outputs/compliance/cis.py @@ -28,7 +28,7 @@ def write_compliance_row_cis( ) elif compliance.Provider == "GCP": (compliance_row, csv_header) = generate_compliance_row_cis_gcp( - finding, compliance, output_options + finding, compliance, requirement, attribute, output_options ) write_csv( From c2f8980f1fa518aab97ead695872dc62c3ff9149 Mon Sep 17 00:00:00 2001 From: Sergio Garcia <38561120+sergargar@users.noreply.github.com> Date: Tue, 9 Jan 2024 10:31:51 +0100 Subject: [PATCH 07/10] feat(kubernetes): add Kubernetes provider (#3226) Co-authored-by: Pepe Fagoaga --- poetry.lock | 33 ++++- prowler/compliance/kubernetes/__init__.py | 0 prowler/config/config.yaml | 3 + .../custom_checks_metadata_example.yaml | 4 + prowler/lib/check/models.py | 16 +++ prowler/lib/outputs/html.py | 49 ++++++++ prowler/lib/outputs/models.py | 37 +++++- prowler/lib/outputs/outputs.py | 2 + prowler/lib/outputs/summary_table.py | 3 + prowler/providers/common/audit_info.py | 37 ++++++ prowler/providers/common/outputs.py | 17 +++ prowler/providers/kubernetes/__init__.py | 0 .../kubernetes/kubernetes_provider.py | 90 ++++++++++++++ .../kubernetes/kubernetes_provider_new.py | 113 ++++++++++++++++++ prowler/providers/kubernetes/lib/__init__.py | 0 .../kubernetes/lib/arguments/__init__.py | 0 .../kubernetes/lib/arguments/arguments.py | 21 ++++ .../kubernetes/lib/audit_info/__init__.py | 0 .../kubernetes/lib/audit_info/audit_info.py | 9 ++ .../kubernetes/lib/audit_info/models.py | 27 +++++ .../kubernetes/lib/service/__init__.py | 0 .../kubernetes/lib/service/service.py | 40 +++++++ .../kubernetes/services/apiserver/__init__.py | 0 .../apiserver_anonymous_requests/__init__.py | 0 ...apiserver_anonymous_requests.metadata.json | 36 ++++++ .../apiserver_anonymous_requests.py | 23 ++++ .../services/apiserver/apiserver_client.py | 4 + .../services/apiserver/apiserver_service.py | 26 ++++ .../kubernetes/services/core/__init__.py | 0 .../kubernetes/services/core/core_client.py | 4 + .../kubernetes/services/core/core_service.py | 80 +++++++++++++ .../kubernetes/services/scheduler/__init__.py | 0 .../services/scheduler/scheduler_client.py | 4 + .../scheduler/scheduler_profiling/__init__.py | 0 .../scheduler_profiling.metadata.json | 36 ++++++ .../scheduler_profiling.py | 23 ++++ .../services/scheduler/scheduler_service.py | 26 ++++ pyproject.toml | 1 + tests/config/config_test.py | 9 +- tests/config/fixtures/config.yaml | 3 + .../lib/check/custom_checks_metadata_test.py | 6 + .../custom_checks_metadata_example.yaml | 4 + tests/lib/cli/parser_test.py | 62 +++++++++- tests/providers/common/audit_info_test.py | 34 ++++++ tests/providers/common/common_outputs_test.py | 92 +++++++++++++- 45 files changed, 963 insertions(+), 11 deletions(-) create mode 100644 prowler/compliance/kubernetes/__init__.py create mode 100644 prowler/providers/kubernetes/__init__.py create mode 100644 prowler/providers/kubernetes/kubernetes_provider.py create mode 100644 prowler/providers/kubernetes/kubernetes_provider_new.py create mode 100644 prowler/providers/kubernetes/lib/__init__.py create mode 100644 prowler/providers/kubernetes/lib/arguments/__init__.py create mode 100644 prowler/providers/kubernetes/lib/arguments/arguments.py create mode 100644 prowler/providers/kubernetes/lib/audit_info/__init__.py create mode 100644 prowler/providers/kubernetes/lib/audit_info/audit_info.py create mode 100644 prowler/providers/kubernetes/lib/audit_info/models.py create mode 100644 prowler/providers/kubernetes/lib/service/__init__.py create mode 100644 prowler/providers/kubernetes/lib/service/service.py create mode 100644 prowler/providers/kubernetes/services/apiserver/__init__.py create mode 100644 prowler/providers/kubernetes/services/apiserver/apiserver_anonymous_requests/__init__.py create mode 100644 prowler/providers/kubernetes/services/apiserver/apiserver_anonymous_requests/apiserver_anonymous_requests.metadata.json create mode 100644 prowler/providers/kubernetes/services/apiserver/apiserver_anonymous_requests/apiserver_anonymous_requests.py create mode 100644 prowler/providers/kubernetes/services/apiserver/apiserver_client.py create mode 100644 prowler/providers/kubernetes/services/apiserver/apiserver_service.py create mode 100644 prowler/providers/kubernetes/services/core/__init__.py create mode 100644 prowler/providers/kubernetes/services/core/core_client.py create mode 100644 prowler/providers/kubernetes/services/core/core_service.py create mode 100644 prowler/providers/kubernetes/services/scheduler/__init__.py create mode 100644 prowler/providers/kubernetes/services/scheduler/scheduler_client.py create mode 100644 prowler/providers/kubernetes/services/scheduler/scheduler_profiling/__init__.py create mode 100644 prowler/providers/kubernetes/services/scheduler/scheduler_profiling/scheduler_profiling.metadata.json create mode 100644 prowler/providers/kubernetes/services/scheduler/scheduler_profiling/scheduler_profiling.py create mode 100644 prowler/providers/kubernetes/services/scheduler/scheduler_service.py diff --git a/poetry.lock b/poetry.lock index 667e7f6659..0dcf2a6c7f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "about-time" @@ -1339,6 +1339,32 @@ files = [ [package.dependencies] six = "*" +[[package]] +name = "kubernetes" +version = "28.1.0" +description = "Kubernetes python client" +optional = false +python-versions = ">=3.6" +files = [ + {file = "kubernetes-28.1.0-py2.py3-none-any.whl", hash = "sha256:10f56f8160dcb73647f15fafda268e7f60cf7dbc9f8e46d52fcd46d3beb0c18d"}, + {file = "kubernetes-28.1.0.tar.gz", hash = "sha256:1468069a573430fb1cb5ad22876868f57977930f80a6749405da31cd6086a7e9"}, +] + +[package.dependencies] +certifi = ">=14.05.14" +google-auth = ">=1.0.1" +oauthlib = ">=3.2.2" +python-dateutil = ">=2.5.3" +pyyaml = ">=5.4.1" +requests = "*" +requests-oauthlib = "*" +six = ">=1.9.0" +urllib3 = ">=1.24.2,<2.0" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" + +[package.extras] +adal = ["adal (>=1.0.2)"] + [[package]] name = "lazy-object-proxy" version = "1.9.0" @@ -2773,8 +2799,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win32.whl", hash = "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231"}, {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_12_6_arm64.whl", hash = "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"}, @@ -3312,4 +3337,4 @@ docs = ["mkdocs", "mkdocs-material"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.12" -content-hash = "653c89aa68d9924b75c01a3dd894fdffc57fb899d46dcc23728e0543a3fc24e9" +content-hash = "5eeda2c0549c1a40ebedefe766f0d7e27e78ed123aaacb3e42d242271774b1da" diff --git a/prowler/compliance/kubernetes/__init__.py b/prowler/compliance/kubernetes/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/config/config.yaml b/prowler/config/config.yaml index d9cf87c787..b1b8d6af35 100644 --- a/prowler/config/config.yaml +++ b/prowler/config/config.yaml @@ -92,3 +92,6 @@ azure: # GCP Configuration gcp: + +# Kubernetes Configuration +kubernetes: diff --git a/prowler/config/custom_checks_metadata_example.yaml b/prowler/config/custom_checks_metadata_example.yaml index ed11065333..20228af8ce 100644 --- a/prowler/config/custom_checks_metadata_example.yaml +++ b/prowler/config/custom_checks_metadata_example.yaml @@ -13,3 +13,7 @@ CustomChecksMetadata: Checks: compute_instance_public_ip: Severity: critical + kubernetes: + Checks: + apiserver_anonymous_requests: + Severity: low diff --git a/prowler/lib/check/models.py b/prowler/lib/check/models.py index ce4a6d028d..274f1004a6 100644 --- a/prowler/lib/check/models.py +++ b/prowler/lib/check/models.py @@ -146,6 +146,22 @@ class Check_Report_GCP(Check_Report): self.location = "" +@dataclass +class Check_Report_Kubernetes(Check_Report): + # TODO change class name to CheckReportKubernetes + """Contains the Kubernetes Check's finding information.""" + + resource_name: str + resource_id: str + namespace: str + + def __init__(self, metadata): + super().__init__(metadata) + self.resource_name = "" + self.resource_id = "" + self.namespace = "" + + # Testing Pending def load_check_metadata(metadata_file: str) -> Check_Metadata_Model: """load_check_metadata loads and parse a Check's metadata file""" diff --git a/prowler/lib/outputs/html.py b/prowler/lib/outputs/html.py index 5a7f1cf2fc..99d76aa117 100644 --- a/prowler/lib/outputs/html.py +++ b/prowler/lib/outputs/html.py @@ -21,6 +21,7 @@ from prowler.lib.utils.utils import open_file from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info +from prowler.providers.kubernetes.lib.audit_info.models import Kubernetes_Audit_Info def add_html_header(file_descriptor, audit_info): @@ -522,6 +523,53 @@ def get_gcp_html_assessment_summary(audit_info): sys.exit(1) +def get_kubernetes_html_assessment_summary(audit_info): + try: + if isinstance(audit_info, Kubernetes_Audit_Info): + return ( + """ +

    +
    +
    + Kubernetes Assessment Summary +
    +
      +
    • + Kubernetes Context: """ + + audit_info.context["name"] + + """ +
    • +
    +
    +
    +
    +
    +
    + Kubernetes Credentials +
    +
      +
    • + Kubernetes Cluster: """ + + audit_info.context["context"]["cluster"] + + """ +
    • +
    • + Kubernetes User: """ + + audit_info.context["context"]["user"] + + """ +
    • +
    +
    +
    + """ + ) + except Exception as error: + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}" + ) + sys.exit(1) + + def get_assessment_summary(audit_info): """ get_assessment_summary gets the HTML assessment summary for the provider @@ -532,6 +580,7 @@ def get_assessment_summary(audit_info): # AWS_Audit_Info --> aws # GCP_Audit_Info --> gcp # Azure_Audit_Info --> azure + # Kubernetes_Audit_Info --> kubernetes provider = audit_info.__class__.__name__.split("_")[0].lower() # Dynamically get the Provider quick inventory handler diff --git a/prowler/lib/outputs/models.py b/prowler/lib/outputs/models.py index faddd07f54..db3b8db8b2 100644 --- a/prowler/lib/outputs/models.py +++ b/prowler/lib/outputs/models.py @@ -85,6 +85,18 @@ def generate_provider_output_csv( ) finding_output = output_model(**data) + if provider == "kubernetes": + data["resource_id"] = finding.resource_id + data["resource_name"] = finding.resource_name + data["namespace"] = finding.namespace + data[ + "finding_unique_id" + ] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.namespace}-{finding.resource_id}" + data["compliance"] = unroll_dict( + get_check_compliance(finding, provider, output_options) + ) + finding_output = output_model(**data) + if provider == "aws": data["profile"] = audit_info.profile data["account_id"] = audit_info.audited_account @@ -357,6 +369,16 @@ class Gcp_Check_Output_CSV(Check_Output_CSV): resource_name: str = "" +class Kubernetes_Check_Output_CSV(Check_Output_CSV): + """ + Kubernetes_Check_Output_CSV generates a finding's output in CSV format for the Kubernetes provider. + """ + + namespace: str = "" + resource_id: str = "" + resource_name: str = "" + + def generate_provider_output_json( provider: str, finding, audit_info, mode: str, output_options ): @@ -487,7 +509,7 @@ class Azure_Check_Output_JSON(Check_Output_JSON): class Gcp_Check_Output_JSON(Check_Output_JSON): """ - Gcp_Check_Output_JSON generates a finding's output in JSON format for the AWS provider. + Gcp_Check_Output_JSON generates a finding's output in JSON format for the GCP provider. """ ProjectId: str = "" @@ -499,6 +521,19 @@ class Gcp_Check_Output_JSON(Check_Output_JSON): super().__init__(**metadata) +class Kubernetes_Check_Output_JSON(Check_Output_JSON): + """ + Kubernetes_Check_Output_JSON generates a finding's output in JSON format for the Kubernetes provider. + """ + + ResourceId: str = "" + ResourceName: str = "" + Namespace: str = "" + + def __init__(self, **metadata): + super().__init__(**metadata) + + class Check_Output_MITRE_ATTACK(BaseModel): """ Check_Output_MITRE_ATTACK generates a finding's output in CSV MITRE ATTACK format. diff --git a/prowler/lib/outputs/outputs.py b/prowler/lib/outputs/outputs.py index f0666516aa..091a21e866 100644 --- a/prowler/lib/outputs/outputs.py +++ b/prowler/lib/outputs/outputs.py @@ -27,6 +27,8 @@ def stdout_report(finding, color, verbose, is_quiet): details = finding.check_metadata.ServiceName if finding.check_metadata.Provider == "gcp": details = finding.location.lower() + if finding.check_metadata.Provider == "kubernetes": + details = finding.namespace.lower() if verbose and not (is_quiet and finding.status != "FAIL"): print( diff --git a/prowler/lib/outputs/summary_table.py b/prowler/lib/outputs/summary_table.py index 8e917bbec1..6e121018ad 100644 --- a/prowler/lib/outputs/summary_table.py +++ b/prowler/lib/outputs/summary_table.py @@ -39,6 +39,9 @@ def display_summary_table( elif provider == "gcp": entity_type = "Project ID/s" audited_entities = ", ".join(audit_info.project_ids) + elif provider == "kubernetes": + entity_type = "Context" + audited_entities = audit_info.context["name"] if findings: current = { diff --git a/prowler/providers/common/audit_info.py b/prowler/providers/common/audit_info.py index a8ba2a900c..5ba28ab6af 100644 --- a/prowler/providers/common/audit_info.py +++ b/prowler/providers/common/audit_info.py @@ -34,6 +34,9 @@ from prowler.providers.azure.lib.exception.exception import AzureException from prowler.providers.gcp.gcp_provider import GCP_Provider from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info +from prowler.providers.kubernetes.kubernetes_provider import Kubernetes_Provider +from prowler.providers.kubernetes.lib.audit_info.audit_info import kubernetes_audit_info +from prowler.providers.kubernetes.lib.audit_info.models import Kubernetes_Audit_Info class Audit_Info: @@ -56,6 +59,21 @@ class Audit_Info: This report is being generated using credentials below: GCP Account: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} GCP Project IDs: {Fore.YELLOW}[{", ".join(audit_info.project_ids)}]{Style.RESET_ALL} +""" + print(report) + + def print_kubernetes_credentials(self, audit_info: Kubernetes_Audit_Info): + # Get the current context + cluster_name = self.context.get("context").get("cluster") + user_name = self.context.get("context").get("user") + namespace = self.context.get("namespace", "default") + roles = self.get_context_user_roles() + roles_str = ", ".join(roles) if roles else "No associated Roles" + + report = f""" +This report is being generated using the Kubernetes configuration below: + +Kubernetes Cluster: {Fore.YELLOW}[{cluster_name}]{Style.RESET_ALL} User: {Fore.YELLOW}[{user_name}]{Style.RESET_ALL} Namespace: {Fore.YELLOW}[{namespace}]{Style.RESET_ALL} Roles: {Fore.YELLOW}[{roles_str}]{Style.RESET_ALL} """ print(report) @@ -358,6 +376,25 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE return gcp_audit_info + def set_kubernetes_audit_info(self, arguments) -> Kubernetes_Audit_Info: + """ + set_kubernetes_audit_info returns the Kubernetes_Audit_Info + """ + logger.info("Setting Kubernetes session ...") + kubeconfig_file = arguments.get("kubeconfig_file") + + logger.info("Checking if any context is set ...") + context = arguments.get("context") + + kubernetes_provider = Kubernetes_Provider(kubeconfig_file, context) + + ( + kubernetes_audit_info.api_client, + kubernetes_audit_info.context, + ) = kubernetes_provider.get_credentials() + + return kubernetes_audit_info + def set_provider_audit_info(provider: str, arguments: dict): """ diff --git a/prowler/providers/common/outputs.py b/prowler/providers/common/outputs.py index adcddbcbee..8221e1345f 100644 --- a/prowler/providers/common/outputs.py +++ b/prowler/providers/common/outputs.py @@ -111,6 +111,23 @@ class Gcp_Output_Options(Provider_Output_Options): self.output_filename = arguments.output_filename +class Kubernetes_Output_Options(Provider_Output_Options): + def __init__(self, arguments, audit_info, mutelist_file, bulk_checks_metadata): + # First call Provider_Output_Options init + super().__init__(arguments, mutelist_file, bulk_checks_metadata) + # TODO move the below if to Provider_Output_Options + # Check if custom output filename was input, if not, set the default + if ( + not hasattr(arguments, "output_filename") + or arguments.output_filename is None + ): + self.output_filename = ( + f"prowler-output-{audit_info.context['name']}-{output_file_timestamp}" + ) + else: + self.output_filename = arguments.output_filename + + class Aws_Output_Options(Provider_Output_Options): security_hub_enabled: bool diff --git a/prowler/providers/kubernetes/__init__.py b/prowler/providers/kubernetes/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/providers/kubernetes/kubernetes_provider.py b/prowler/providers/kubernetes/kubernetes_provider.py new file mode 100644 index 0000000000..10400b4585 --- /dev/null +++ b/prowler/providers/kubernetes/kubernetes_provider.py @@ -0,0 +1,90 @@ +import os +import sys + +from kubernetes import client, config + +from prowler.lib.logger import logger + + +class Kubernetes_Provider: + def __init__( + self, + kubeconfig_file: str, + context: str, + ): + logger.info("Instantiating Kubernetes Provider ...") + self.api_client, self.context = self.__set_credentials__( + kubeconfig_file, context + ) + if not self.api_client: + logger.critical("Failed to set up a Kubernetes session.") + sys.exit(1) + + def __set_credentials__(self, kubeconfig_file, context): + try: + if kubeconfig_file: + # Use kubeconfig file if provided + config.load_kube_config( + config_file=os.path.abspath(kubeconfig_file), context=context + ) + else: + # Otherwise try to load in-cluster config + config.load_incluster_config() + context = config.list_kube_config_contexts()[0][0] + return client.ApiClient(), context + except Exception as error: + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + sys.exit(1) + + def get_credentials(self): + return self.api_client, self.context + + def search_and_save_roles( + self, roles: list, role_bindings, context_user: str, role_binding_type: str + ): + try: + for rb in role_bindings: + if rb.subjects: + for subject in rb.subjects: + if subject.kind == "User" and subject.name == context_user: + if role_binding_type == "ClusterRole": + roles.append(f"{role_binding_type}: {rb.role_ref.name}") + elif role_binding_type == "Role": + roles.append( + f"{role_binding_type} ({rb.metadata.namespace}): {rb.role_ref.name}" + ) + return roles + except Exception as error: + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + sys.exit(1) + + def get_context_user_roles(self): + try: + rbac_api = client.RbacAuthorizationV1Api() + context_user = self.context.get("context", {}).get("user", "") + roles = [] + # Search in ClusterRoleBindings + roles = self.search_and_save_roles( + roles, + rbac_api.list_cluster_role_binding().items, + context_user, + "ClusterRole", + ) + + # Search in RoleBindings for all namespaces + roles = self.search_and_save_roles( + roles, + rbac_api.list_role_binding_for_all_namespaces().items, + context_user, + "Role", + ) + return roles + except Exception as error: + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + sys.exit(1) diff --git a/prowler/providers/kubernetes/kubernetes_provider_new.py b/prowler/providers/kubernetes/kubernetes_provider_new.py new file mode 100644 index 0000000000..0ab765ff78 --- /dev/null +++ b/prowler/providers/kubernetes/kubernetes_provider_new.py @@ -0,0 +1,113 @@ +import os +import sys +from typing import Any, Optional + +from colorama import Fore, Style +from kubernetes import client, config + +from prowler.lib.logger import logger +from prowler.providers.common.provider import CloudProvider + + +class KubernetesProvider(CloudProvider): + # TODO change class name from CloudProvider to Provider + api_client: Any + context: dict + audit_resources: Optional[Any] + audit_metadata: Optional[Any] + audit_config: Optional[dict] + + def __init__(self, arguments: dict): + logger.info("Instantiating Kubernetes Provider ...") + + self.api_client, self.context = self.setup_session( + arguments.kubeconfig_file, arguments.context + ) + + if not self.api_client: + logger.critical("Failed to set up a Kubernetes session.") + sys.exit(1) + if not arguments.only_logs: + self.print_credentials() + + def setup_session(self, kubeconfig_file, context): + try: + if kubeconfig_file: + # Use kubeconfig file if provided + config.load_kube_config( + config_file=os.path.abspath(kubeconfig_file), context=context + ) + else: + # Otherwise try to load in-cluster config + config.load_incluster_config() + context = config.list_kube_config_contexts()[0][0] + return client.ApiClient(), context + except Exception as error: + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + sys.exit(1) + + def search_and_save_roles( + self, roles: list, role_bindings, context_user: str, role_binding_type: str + ): + try: + for rb in role_bindings: + if rb.subjects: + for subject in rb.subjects: + if subject.kind == "User" and subject.name == context_user: + if role_binding_type == "ClusterRole": + roles.append(f"{role_binding_type}: {rb.role_ref.name}") + elif role_binding_type == "Role": + roles.append( + f"{role_binding_type} ({rb.metadata.namespace}): {rb.role_ref.name}" + ) + return roles + except Exception as error: + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + sys.exit(1) + + def get_context_user_roles(self): + try: + rbac_api = client.RbacAuthorizationV1Api() + context_user = self.context.get("context", {}).get("user", "") + roles = [] + # Search in ClusterRoleBindings + roles = self.search_and_save_roles( + roles, + rbac_api.list_cluster_role_binding().items, + context_user, + "ClusterRole", + ) + + # Search in RoleBindings for all namespaces + roles = self.search_and_save_roles( + roles, + rbac_api.list_role_binding_for_all_namespaces().items, + context_user, + "Role", + ) + return roles + except Exception as error: + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + sys.exit(1) + + def print_credentials(self): + + # Get the current context + cluster_name = self.context.get("context").get("cluster") + user_name = self.context.get("context").get("user") + namespace = self.context.get("namespace", "default") + roles = self.get_context_user_roles() + roles_str = ", ".join(roles) if roles else "No associated Roles" + + report = f""" +This report is being generated using the Kubernetes configuration below: + +Kubernetes Cluster: {Fore.YELLOW}[{cluster_name}]{Style.RESET_ALL} User: {Fore.YELLOW}[{user_name}]{Style.RESET_ALL} Namespace: {Fore.YELLOW}[{namespace}]{Style.RESET_ALL} Roles: {Fore.YELLOW}[{roles_str}]{Style.RESET_ALL} +""" + print(report) diff --git a/prowler/providers/kubernetes/lib/__init__.py b/prowler/providers/kubernetes/lib/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/providers/kubernetes/lib/arguments/__init__.py b/prowler/providers/kubernetes/lib/arguments/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/providers/kubernetes/lib/arguments/arguments.py b/prowler/providers/kubernetes/lib/arguments/arguments.py new file mode 100644 index 0000000000..6ad0b09e13 --- /dev/null +++ b/prowler/providers/kubernetes/lib/arguments/arguments.py @@ -0,0 +1,21 @@ +def init_parser(self): + """Init the Kubernetes Provider CLI parser""" + k8s_parser = self.subparsers.add_parser( + "kubernetes", parents=[self.common_providers_parser], help="Kubernetes Provider" + ) + # Authentication and Configuration + k8s_auth_subparser = k8s_parser.add_argument_group( + "Authentication and Configuration" + ) + k8s_auth_subparser.add_argument( + "--kubeconfig-file", + nargs="?", + metavar="FILE_PATH", + help="Path to the kubeconfig file to use for CLI requests. Not necessary for in-cluster execution.", + ) + k8s_auth_subparser.add_argument( + "--context", + nargs="?", + metavar="CONTEXT_NAME", + help="The name of the kubeconfig context to use. By default, current_context from config file will be used.", + ) diff --git a/prowler/providers/kubernetes/lib/audit_info/__init__.py b/prowler/providers/kubernetes/lib/audit_info/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/providers/kubernetes/lib/audit_info/audit_info.py b/prowler/providers/kubernetes/lib/audit_info/audit_info.py new file mode 100644 index 0000000000..ce9c977e83 --- /dev/null +++ b/prowler/providers/kubernetes/lib/audit_info/audit_info.py @@ -0,0 +1,9 @@ +from prowler.providers.kubernetes.lib.audit_info.models import Kubernetes_Audit_Info + +kubernetes_audit_info = Kubernetes_Audit_Info( + api_client=None, + context=None, + audit_resources=None, + audit_metadata=None, + audit_config=None, +) diff --git a/prowler/providers/kubernetes/lib/audit_info/models.py b/prowler/providers/kubernetes/lib/audit_info/models.py new file mode 100644 index 0000000000..7ef5ff7a2f --- /dev/null +++ b/prowler/providers/kubernetes/lib/audit_info/models.py @@ -0,0 +1,27 @@ +from dataclasses import dataclass +from typing import Any, Optional + +from kubernetes import client + + +@dataclass +class Kubernetes_Audit_Info: + api_client: client.ApiClient + context: Optional[str] + audit_resources: Optional[Any] + audit_metadata: Optional[Any] + audit_config: Optional[dict] + + def __init__( + self, + api_client, + context, + audit_metadata, + audit_resources, + audit_config, + ): + self.api_client = api_client + self.context = context + self.audit_metadata = audit_metadata + self.audit_resources = audit_resources + self.audit_config = audit_config diff --git a/prowler/providers/kubernetes/lib/service/__init__.py b/prowler/providers/kubernetes/lib/service/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/providers/kubernetes/lib/service/service.py b/prowler/providers/kubernetes/lib/service/service.py new file mode 100644 index 0000000000..c4d242b7bf --- /dev/null +++ b/prowler/providers/kubernetes/lib/service/service.py @@ -0,0 +1,40 @@ +from concurrent.futures import ThreadPoolExecutor, as_completed + +from prowler.lib.logger import logger +from prowler.providers.kubernetes.kubernetes_provider_new import KubernetesProvider + +MAX_WORKERS = 10 + + +class KubernetesService: + def __init__(self, provider: KubernetesProvider): + self.context = provider.context + self.api_client = provider.api_client + + # Thread pool for __threading_call__ + self.thread_pool = ThreadPoolExecutor(max_workers=MAX_WORKERS) + + def __threading_call__(self, call, iterator): + items = iterator + # Determine the total count for logging + item_count = len(items) + + # Trim leading and trailing underscores from the call's name + call_name = call.__name__.strip("_") + # Add Capitalization + call_name = " ".join([x.capitalize() for x in call_name.split("_")]) + + logger.info( + f"{self.service.upper()} - Starting threads for '{call_name}' function to process {item_count} items..." + ) + + # Submit tasks to the thread pool + futures = [self.thread_pool.submit(call, item) for item in items] + + # Wait for all tasks to complete + for future in as_completed(futures): + try: + future.result() # Raises exceptions from the thread, if any + except Exception: + # Handle exceptions if necessary + pass # Replace 'pass' with any additional exception handling logic. Currently handled within the called function diff --git a/prowler/providers/kubernetes/services/apiserver/__init__.py b/prowler/providers/kubernetes/services/apiserver/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/providers/kubernetes/services/apiserver/apiserver_anonymous_requests/__init__.py b/prowler/providers/kubernetes/services/apiserver/apiserver_anonymous_requests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/providers/kubernetes/services/apiserver/apiserver_anonymous_requests/apiserver_anonymous_requests.metadata.json b/prowler/providers/kubernetes/services/apiserver/apiserver_anonymous_requests/apiserver_anonymous_requests.metadata.json new file mode 100644 index 0000000000..626ceea41c --- /dev/null +++ b/prowler/providers/kubernetes/services/apiserver/apiserver_anonymous_requests/apiserver_anonymous_requests.metadata.json @@ -0,0 +1,36 @@ +{ + "Provider": "kubernetes", + "CheckID": "apiserver_anonymous_requests", + "CheckTitle": "Ensure that the --anonymous-auth argument is set to false", + "CheckType": [ + "Cluster Security", + "Authentication and Authorization" + ], + "ServiceName": "apiserver", + "SubServiceName": "", + "ResourceIdTemplate": "", + "Severity": "high", + "ResourceType": "KubernetesAPIServer", + "Description": "Disable anonymous requests to the API server. When enabled, requests that are not rejected by other configured authentication methods are treated as anonymous requests, which are then served by the API server. Disallowing anonymous requests strengthens security by ensuring all access is authenticated.", + "Risk": "Enabling anonymous access to the API server can expose the cluster to unauthorized access and potential security vulnerabilities.", + "RelatedUrl": "https://kubernetes.io/docs/admin/authentication/#anonymous-requests", + "Remediation": { + "Code": { + "CLI": "Edit the API server pod specification file /etc/kubernetes/manifests/kube-apiserver.yaml and set --anonymous-auth=false", + "NativeIaC": "", + "Other": "", + "Terraform": "" + }, + "Recommendation": { + "Text": "Ensure the --anonymous-auth argument in the API server is set to false. This will reject all anonymous requests, enforcing authenticated access to the server.", + "Url": "https://kubernetes.io/docs/reference/command-line-tools-reference/kube-apiserver/" + } + }, + "Categories": [ + "Security Best Practices", + "Compliance" + ], + "DependsOn": [], + "RelatedTo": [], + "Notes": "While anonymous access can be useful for health checks and discovery, consider the security implications for your specific environment." +} diff --git a/prowler/providers/kubernetes/services/apiserver/apiserver_anonymous_requests/apiserver_anonymous_requests.py b/prowler/providers/kubernetes/services/apiserver/apiserver_anonymous_requests/apiserver_anonymous_requests.py new file mode 100644 index 0000000000..65b3893eb2 --- /dev/null +++ b/prowler/providers/kubernetes/services/apiserver/apiserver_anonymous_requests/apiserver_anonymous_requests.py @@ -0,0 +1,23 @@ +from prowler.lib.check.models import Check, Check_Report_Kubernetes +from prowler.providers.kubernetes.services.apiserver.apiserver_client import ( + apiserver_client, +) + + +class apiserver_anonymous_requests(Check): + def execute(self) -> Check_Report_Kubernetes: + findings = [] + for pod in apiserver_client.apiserver_pods: + report = Check_Report_Kubernetes(self.metadata()) + report.namespace = pod.namespace + report.resource_name = pod.name + report.resource_id = pod.uid + report.status = "PASS" + report.status_extended = "API Server does not have anonymous-auth enabled." + for container in pod.containers.values(): + if "--anonymous-auth=true" in container.command: + report.resource_id = container.name + report.status = "FAIL" + report.status_extended = f"API Server has anonymous-auth enabled in container {container.name}." + findings.append(report) + return findings diff --git a/prowler/providers/kubernetes/services/apiserver/apiserver_client.py b/prowler/providers/kubernetes/services/apiserver/apiserver_client.py new file mode 100644 index 0000000000..870a2f43fb --- /dev/null +++ b/prowler/providers/kubernetes/services/apiserver/apiserver_client.py @@ -0,0 +1,4 @@ +from prowler.providers.common.common import global_provider +from prowler.providers.kubernetes.services.apiserver.apiserver_service import APIServer + +apiserver_client = APIServer(global_provider) diff --git a/prowler/providers/kubernetes/services/apiserver/apiserver_service.py b/prowler/providers/kubernetes/services/apiserver/apiserver_service.py new file mode 100644 index 0000000000..c85704157f --- /dev/null +++ b/prowler/providers/kubernetes/services/apiserver/apiserver_service.py @@ -0,0 +1,26 @@ +from prowler.lib.logger import logger +from prowler.providers.kubernetes.lib.service.service import KubernetesService +from prowler.providers.kubernetes.services.core.core_client import core_client + + +################## APIServer ################## +class APIServer(KubernetesService): + def __init__(self, audit_info): + super().__init__(audit_info) + self.client = core_client + + self.apiserver_pods = self.__get_apiserver_pod__() + + def __get_apiserver_pod__(self): + try: + apiserver_pods = [] + for pod in self.client.pods.values(): + if pod.namespace == "kube-system" and pod.name.startswith( + "kube-apiserver" + ): + apiserver_pods.append(pod) + return apiserver_pods + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) diff --git a/prowler/providers/kubernetes/services/core/__init__.py b/prowler/providers/kubernetes/services/core/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/providers/kubernetes/services/core/core_client.py b/prowler/providers/kubernetes/services/core/core_client.py new file mode 100644 index 0000000000..a4886daa73 --- /dev/null +++ b/prowler/providers/kubernetes/services/core/core_client.py @@ -0,0 +1,4 @@ +from prowler.providers.common.common import global_provider +from prowler.providers.kubernetes.services.core.core_service import Core + +core_client = Core(global_provider) diff --git a/prowler/providers/kubernetes/services/core/core_service.py b/prowler/providers/kubernetes/services/core/core_service.py new file mode 100644 index 0000000000..1a57fca7f5 --- /dev/null +++ b/prowler/providers/kubernetes/services/core/core_service.py @@ -0,0 +1,80 @@ +from typing import List, Optional + +from kubernetes import client +from pydantic import BaseModel + +from prowler.lib.logger import logger +from prowler.providers.kubernetes.lib.service.service import KubernetesService + + +################## Core ################## +class Core(KubernetesService): + def __init__(self, audit_info): + super().__init__(audit_info) + self.client = client.CoreV1Api(self.api_client) + + self.pods = {} + self.__get_pods__() + + def __get_pods__(self): + try: + pods = self.client.list_pod_for_all_namespaces() + for pod in pods.items: + pod_containers = {} + for container in pod.spec.containers: + pod_containers[container.name] = Container( + name=container.name, + image=container.image, + command=container.command if container.command else None, + ports=[ + {"containerPort": port.container_port} + for port in container.ports + ] + if container.ports + else None, + env=[ + {"name": env.name, "value": env.value} + for env in container.env + ] + if container.env + else None, + ) + self.pods[pod.metadata.uid] = Pod( + name=pod.metadata.name, + uid=pod.metadata.uid, + namespace=pod.metadata.namespace, + labels=pod.metadata.labels, + annotations=pod.metadata.annotations, + node_name=pod.spec.node_name, + service_account=pod.spec.service_account_name, + status_phase=pod.status.phase, + pod_ip=pod.status.pod_ip, + host_ip=pod.status.host_ip, + containers=pod_containers, + ) + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + + +class Container(BaseModel): + name: str + image: str + command: Optional[List[str]] + ports: Optional[List[dict]] + env: Optional[List[dict]] + + +class Pod(BaseModel): + name: str + uid: str + namespace: str + labels: Optional[dict] + annotations: Optional[dict] + node_name: Optional[str] + service_account: Optional[str] + status_phase: Optional[str] + pod_ip: Optional[str] + host_ip: Optional[str] + containers: Optional[dict] diff --git a/prowler/providers/kubernetes/services/scheduler/__init__.py b/prowler/providers/kubernetes/services/scheduler/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/providers/kubernetes/services/scheduler/scheduler_client.py b/prowler/providers/kubernetes/services/scheduler/scheduler_client.py new file mode 100644 index 0000000000..9a0c62c080 --- /dev/null +++ b/prowler/providers/kubernetes/services/scheduler/scheduler_client.py @@ -0,0 +1,4 @@ +from prowler.providers.common.common import global_provider +from prowler.providers.kubernetes.services.scheduler.scheduler_service import Scheduler + +scheduler_client = Scheduler(global_provider) diff --git a/prowler/providers/kubernetes/services/scheduler/scheduler_profiling/__init__.py b/prowler/providers/kubernetes/services/scheduler/scheduler_profiling/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/prowler/providers/kubernetes/services/scheduler/scheduler_profiling/scheduler_profiling.metadata.json b/prowler/providers/kubernetes/services/scheduler/scheduler_profiling/scheduler_profiling.metadata.json new file mode 100644 index 0000000000..a692c50e11 --- /dev/null +++ b/prowler/providers/kubernetes/services/scheduler/scheduler_profiling/scheduler_profiling.metadata.json @@ -0,0 +1,36 @@ +{ + "Provider": "kubernetes", + "CheckID": "scheduler_profiling", + "CheckTitle": "Ensure that the --profiling argument is set to false", + "CheckType": [ + "Cluster Performance", + "Cluster Security" + ], + "ServiceName": "kube-scheduler", + "SubServiceName": "", + "ResourceIdTemplate": "", + "Severity": "medium", + "ResourceType": "KubernetesScheduler", + "Description": "Disable profiling in the Kubernetes Scheduler unless it is needed for troubleshooting. Profiling can reveal detailed system and application performance data, which might be exploited if exposed. Turning off profiling reduces the potential attack surface and performance overhead.", + "Risk": "While profiling is useful for identifying performance issues, it generates detailed data that could potentially expose sensitive information about the system and its performance characteristics.", + "RelatedUrl": "https://github.com/kubernetes/community/blob/master/contributors/devel/profiling.md", + "Remediation": { + "Code": { + "CLI": "Edit the Scheduler pod specification file /etc/kubernetes/manifests/kube-scheduler.yaml and set --profiling=false", + "NativeIaC": "", + "Other": "", + "Terraform": "" + }, + "Recommendation": { + "Text": "To minimize exposure to performance data and potential vulnerabilities, ensure the --profiling argument in the Kubernetes Scheduler is set to false.", + "Url": "https://kubernetes.io/docs/admin/kube-scheduler/" + } + }, + "Categories": [ + "Performance Optimization", + "Security Best Practices" + ], + "DependsOn": [], + "RelatedTo": [], + "Notes": "By default, profiling is enabled in Kubernetes Scheduler. Disabling it is a good security practice if profiling data is not needed for regular operations." +} diff --git a/prowler/providers/kubernetes/services/scheduler/scheduler_profiling/scheduler_profiling.py b/prowler/providers/kubernetes/services/scheduler/scheduler_profiling/scheduler_profiling.py new file mode 100644 index 0000000000..b591dc3af7 --- /dev/null +++ b/prowler/providers/kubernetes/services/scheduler/scheduler_profiling/scheduler_profiling.py @@ -0,0 +1,23 @@ +from prowler.lib.check.models import Check, Check_Report_Kubernetes +from prowler.providers.kubernetes.services.scheduler.scheduler_client import ( + scheduler_client, +) + + +class scheduler_profiling(Check): + def execute(self) -> Check_Report_Kubernetes: + findings = [] + for pod in scheduler_client.scheduler_pods: + report = Check_Report_Kubernetes(self.metadata()) + report.namespace = pod.namespace + report.resource_name = pod.name + report.resource_id = pod.uid + report.status = "PASS" + report.status_extended = "Scheduler does not have profiling enabled." + for container in pod.containers.values(): + if "--profiling=true" in container.command: + report.resource_id = container.name + report.status = "FAIL" + report.status_extended = f"Scheduler has profiling enabled in container {container.name}." + findings.append(report) + return findings diff --git a/prowler/providers/kubernetes/services/scheduler/scheduler_service.py b/prowler/providers/kubernetes/services/scheduler/scheduler_service.py new file mode 100644 index 0000000000..edd8455c3d --- /dev/null +++ b/prowler/providers/kubernetes/services/scheduler/scheduler_service.py @@ -0,0 +1,26 @@ +from prowler.lib.logger import logger +from prowler.providers.kubernetes.lib.service.service import KubernetesService +from prowler.providers.kubernetes.services.core.core_client import core_client + + +################## Scheduler ################## +class Scheduler(KubernetesService): + def __init__(self, audit_info): + super().__init__(audit_info) + self.client = core_client + + self.scheduler_pods = self.__get_scheduler_pod__() + + def __get_scheduler_pod__(self): + try: + scheduler_pods = [] + for pod in self.client.pods.values(): + if pod.namespace == "kube-system" and pod.name.startswith( + "kube-scheduler" + ): + scheduler_pods.append(pod) + return scheduler_pods + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) diff --git a/pyproject.toml b/pyproject.toml index 99a7318c81..e1509e9c63 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,6 +41,7 @@ detect-secrets = "1.4.0" google-api-python-client = "2.108.0" google-auth-httplib2 = "^0.1.0" jsonschema = "4.18.0" +kubernetes = "^28.1.0" mkdocs = {version = "1.5.3", optional = true} mkdocs-material = {version = "9.4.10", optional = true} msgraph-core = "0.2.2" diff --git a/tests/config/config_test.py b/tests/config/config_test.py index 7f2c0d5652..570b4800a3 100644 --- a/tests/config/config_test.py +++ b/tests/config/config_test.py @@ -54,7 +54,7 @@ config_aws = { class Test_Config: def test_get_aws_available_regions(self): - assert len(get_aws_available_regions()) == 32 + assert len(get_aws_available_regions()) == 33 @mock.patch( "prowler.config.config.requests.get", new=mock_prowler_get_latest_release @@ -179,6 +179,13 @@ class Test_Config: assert load_and_validate_config_file(provider, config_test_file) is None + def test_load_and_validate_config_file_kubernetes(self): + path = pathlib.Path(os.path.dirname(os.path.realpath(__file__))) + config_test_file = f"{path}/fixtures/config.yaml" + provider = "kubernetes" + + assert load_and_validate_config_file(provider, config_test_file) is None + def test_load_and_validate_config_file_azure(self): path = pathlib.Path(os.path.dirname(os.path.realpath(__file__))) config_test_file = f"{path}/fixtures/config.yaml" diff --git a/tests/config/fixtures/config.yaml b/tests/config/fixtures/config.yaml index 246731004a..d55503e962 100644 --- a/tests/config/fixtures/config.yaml +++ b/tests/config/fixtures/config.yaml @@ -59,3 +59,6 @@ azure: # GCP Configuration gcp: + +# Kubernetes Configuration +kubernetes: diff --git a/tests/lib/check/custom_checks_metadata_test.py b/tests/lib/check/custom_checks_metadata_test.py index c7f12459d9..5530ce8b9e 100644 --- a/tests/lib/check/custom_checks_metadata_test.py +++ b/tests/lib/check/custom_checks_metadata_test.py @@ -21,6 +21,7 @@ CUSTOM_CHECKS_METADATA_FIXTURE_FILE_NOT_VALID = f"{os.path.dirname(os.path.realp AWS_PROVIDER = "aws" AZURE_PROVIDER = "azure" GCP_PROVIDER = "gcp" +KUBERNETES_PROVIDER = "kubernetes" S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_NAME = "s3_bucket_level_public_access_block" S3_BUCKET_LEVEL_PUBLIC_ACCESS_BLOCK_SEVERITY = "medium" @@ -81,6 +82,11 @@ class TestCustomChecksMetadata: GCP_PROVIDER, CUSTOM_CHECKS_METADATA_FIXTURE_FILE ) == {"Checks": {"bigquery_dataset_cmk_encryption": {"Severity": "low"}}} + def test_parse_custom_checks_metadata_file_for_kubernetes(self): + assert parse_custom_checks_metadata_file( + KUBERNETES_PROVIDER, CUSTOM_CHECKS_METADATA_FIXTURE_FILE + ) == {"Checks": {"bigquery_dataset_cmk_encryption": {"Severity": "low"}}} + def test_parse_custom_checks_metadata_file_for_aws_validation_error(self, caplog): caplog.set_level(logging.CRITICAL) diff --git a/tests/lib/check/fixtures/custom_checks_metadata_example.yaml b/tests/lib/check/fixtures/custom_checks_metadata_example.yaml index 744051e4b2..b9b3a65261 100644 --- a/tests/lib/check/fixtures/custom_checks_metadata_example.yaml +++ b/tests/lib/check/fixtures/custom_checks_metadata_example.yaml @@ -13,3 +13,7 @@ CustomChecksMetadata: Checks: bigquery_dataset_cmk_encryption: Severity: low + kubernetes: + Checks: + apiserver_anonymous_requests: + Severity: low diff --git a/tests/lib/cli/parser_test.py b/tests/lib/cli/parser_test.py index a660f7d13d..396a7fadec 100644 --- a/tests/lib/cli/parser_test.py +++ b/tests/lib/cli/parser_test.py @@ -11,11 +11,11 @@ prowler_command = "prowler" # capsys # https://docs.pytest.org/en/7.1.x/how-to/capture-stdout-stderr.html -prowler_default_usage_error = "usage: prowler [-h] [-v] {aws,azure,gcp} ..." +prowler_default_usage_error = "usage: prowler [-h] [-v] {aws,azure,gcp,kubernetes} ..." def mock_get_available_providers(): - return ["aws", "azure", "gcp"] + return ["aws", "azure", "gcp", "kubernetes"] class Test_Parser: @@ -153,6 +153,41 @@ class Test_Parser: assert not parsed.list_categories assert not parsed.credentials_file + def test_default_parser_no_arguments_kubernetes(self): + provider = "kubernetes" + command = [prowler_command, provider] + parsed = self.parser.parse(command) + assert parsed.provider == provider + assert not parsed.quiet + assert len(parsed.output_modes) == 4 + assert "csv" in parsed.output_modes + assert "html" in parsed.output_modes + assert "json" in parsed.output_modes + assert not parsed.output_filename + assert "output" in parsed.output_directory + assert not parsed.verbose + assert not parsed.no_banner + assert not parsed.slack + assert not parsed.unix_timestamp + assert parsed.log_level == "CRITICAL" + assert not parsed.log_file + assert not parsed.only_logs + assert not parsed.checks + assert not parsed.checks_file + assert not parsed.checks_folder + assert not parsed.services + assert not parsed.severity + assert not parsed.compliance + assert len(parsed.categories) == 0 + assert not parsed.excluded_checks + assert not parsed.excluded_services + assert not parsed.list_checks + assert not parsed.list_services + assert not parsed.list_compliance + assert not parsed.list_compliance_requirements + assert not parsed.list_categories + assert not parsed.credentials_file + def test_root_parser_version_short(self): command = [prowler_command, "-v"] with pytest.raises(SystemExit) as wrapped_exit: @@ -194,15 +229,18 @@ class Test_Parser: def test_root_parser_azure_provider(self): command = [prowler_command, "azure"] parsed = self.parser.parse(command) - print(parsed) assert parsed.provider == "azure" def test_root_parser_gcp_provider(self): command = [prowler_command, "gcp"] parsed = self.parser.parse(command) - print(parsed) assert parsed.provider == "gcp" + def test_root_parser_kubernetes_provider(self): + command = [prowler_command, "kubernetes"] + parsed = self.parser.parse(command) + assert parsed.provider == "kubernetes" + def test_root_parser_quiet_short(self): command = [prowler_command, "-q"] parsed = self.parser.parse(command) @@ -1096,6 +1134,22 @@ class Test_Parser: assert parsed.project_ids[0] == project_1 assert parsed.project_ids[1] == project_2 + def test_parser_kubernetes_auth_kubeconfig_file(self): + argument = "--kubeconfig-file" + file = "config" + command = [prowler_command, "kubernetes", argument, file] + parsed = self.parser.parse(command) + assert parsed.provider == "kubernetes" + assert parsed.kubeconfig_file == file + + def test_parser_kubernetes_auth_context(self): + argument = "--context" + context = "default" + command = [prowler_command, "kubernetes", argument, context] + parsed = self.parser.parse(command) + assert parsed.provider == "kubernetes" + assert parsed.context == context + def test_validate_azure_region_valid_regions(self): expected_regions = [ "AzureChinaCloud", diff --git a/tests/providers/common/audit_info_test.py b/tests/providers/common/audit_info_test.py index 14a9256882..fb9e7ba4d8 100644 --- a/tests/providers/common/audit_info_test.py +++ b/tests/providers/common/audit_info_test.py @@ -22,6 +22,8 @@ from prowler.providers.common.audit_info import ( from prowler.providers.common.models import Audit_Metadata from prowler.providers.gcp.gcp_provider import GCP_Provider from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info +from prowler.providers.kubernetes.kubernetes_provider import Kubernetes_Provider +from prowler.providers.kubernetes.lib.audit_info.models import Kubernetes_Audit_Info EXAMPLE_AMI_ID = "ami-12c6146b" AWS_ACCOUNT_NUMBER = "123456789012" @@ -93,6 +95,14 @@ def mock_get_project_ids(*_): return ["project"] +def mock_set_kubernetes_credentials(*_): + return ("apiclient", "context") + + +def mock_get_context_user_roles(*_): + return [] + + class Test_Set_Audit_Info: # Mocked Audit Info def set_mocked_audit_info(self): @@ -278,6 +288,30 @@ class Test_Set_Audit_Info: audit_info = set_provider_audit_info(provider, arguments) assert isinstance(audit_info, GCP_Audit_Info) + @patch.object( + Kubernetes_Provider, "__set_credentials__", new=mock_set_kubernetes_credentials + ) + @patch.object( + Kubernetes_Provider, "get_context_user_roles", new=mock_get_context_user_roles + ) + def test_set_audit_info_kubernetes(self): + provider = "kubernetes" + arguments = { + "profile": None, + "role": None, + "session_duration": None, + "external_id": None, + "regions": None, + "organizations_role": None, + "subscriptions": None, + "context": "default", + "kubeconfig_file": "config", + "config_file": default_config_file_path, + } + + audit_info = set_provider_audit_info(provider, arguments) + assert isinstance(audit_info, Kubernetes_Audit_Info) + @mock_resourcegroupstaggingapi @mock_ec2 def test_get_tagged_resources(self): diff --git a/tests/providers/common/common_outputs_test.py b/tests/providers/common/common_outputs_test.py index 311f0e839f..b06020fb3b 100644 --- a/tests/providers/common/common_outputs_test.py +++ b/tests/providers/common/common_outputs_test.py @@ -16,10 +16,12 @@ from prowler.providers.common.outputs import ( Aws_Output_Options, Azure_Output_Options, Gcp_Output_Options, + Kubernetes_Output_Options, get_provider_output_model, set_provider_output_options, ) from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info +from prowler.providers.kubernetes.lib.audit_info.models import Kubernetes_Audit_Info AWS_ACCOUNT_NUMBER = "012345678912" DATETIME = "20230101120000" @@ -51,6 +53,20 @@ class Test_Common_Output_Options: ) return audit_info + # Mocked Kusbernete Audit Info + def set_mocked_kubernetes_audit_info(self): + audit_info = Kubernetes_Audit_Info( + api_client=None, + context={ + "name": "test-context", + "context": {"cluster": "test-cluster", "user": "XXXXXXXXX"}, + }, + audit_resources=None, + audit_metadata=None, + audit_config=None, + ) + return audit_info + # Mocked AWS Audit Info def set_mocked_aws_audit_info(self): audit_info = AWS_Audit_Info( @@ -147,6 +163,37 @@ class Test_Common_Output_Options: # Delete testing directory rmdir(arguments.output_directory) + def test_set_provider_output_options_kubernetes(self): + # Set the cloud provider + provider = "kubernetes" + # Set the arguments passed + arguments = Namespace() + arguments.quiet = True + arguments.output_modes = ["html", "csv", "json"] + arguments.output_directory = "output_test_directory" + arguments.verbose = True + arguments.output_filename = "output_test_filename" + arguments.only_logs = False + arguments.unix_timestamp = False + + audit_info = self.set_mocked_kubernetes_audit_info() + mutelist_file = "" + bulk_checks_metadata = {} + output_options = set_provider_output_options( + provider, arguments, audit_info, mutelist_file, bulk_checks_metadata + ) + assert isinstance(output_options, Kubernetes_Output_Options) + assert output_options.is_quiet + assert output_options.output_modes == ["html", "csv", "json"] + assert output_options.output_directory == arguments.output_directory + assert output_options.mutelist_file == "" + assert output_options.bulk_checks_metadata == {} + assert output_options.verbose + assert output_options.output_filename == arguments.output_filename + + # Delete testing directory + rmdir(arguments.output_directory) + def test_set_provider_output_options_aws_no_output_filename(self): # Set the cloud provider provider = "aws" @@ -362,7 +409,7 @@ class Test_Common_Output_Options: ) def test_gcp_get_assessment_summary(self): - # Mock Azure Audit Info + # Mock GCP Audit Info audit_info = self.set_mocked_gcp_audit_info() profile = "default" assert ( @@ -395,11 +442,54 @@ class Test_Common_Output_Options: """ ) + def test_kubernetes_get_assessment_summary(self): + # Mock Kubernetes Audit Info + audit_info = self.set_mocked_kubernetes_audit_info() + assert ( + get_assessment_summary(audit_info) + == """ +
    +
    +
    + Kubernetes Assessment Summary +
    +
      +
    • + Kubernetes Context: """ + + audit_info.context["name"] + + """ +
    • +
    +
    +
    +
    +
    +
    + Kubernetes Credentials +
    +
      +
    • + Kubernetes Cluster: """ + + audit_info.context["context"]["cluster"] + + """ +
    • +
    • + Kubernetes User: """ + + audit_info.context["context"]["user"] + + """ +
    • +
    +
    +
    + """ + ) + def test_get_provider_output_model(self): audit_info_class_names = [ "AWS_Audit_Info", "GCP_Audit_Info", "Azure_Audit_Info", + "Kubernetes_Audit_Info", ] for class_name in audit_info_class_names: provider_prefix = class_name.split("_", 1)[0].lower().capitalize() From c08e244c958357ec13be76a69d6fba7a123f73c7 Mon Sep 17 00:00:00 2001 From: Sergio Garcia <38561120+sergargar@users.noreply.github.com> Date: Tue, 9 Jan 2024 11:35:44 +0100 Subject: [PATCH 08/10] feat(status): add --status flag (#3238) --- docs/tutorials/misc.md | 6 +++--- prowler/config/config.py | 3 ++- prowler/lib/banner.py | 2 +- prowler/lib/cli/parser.py | 9 +++++---- prowler/lib/outputs/outputs.py | 13 ++++++++----- .../providers/aws/lib/security_hub/security_hub.py | 4 ++-- prowler/providers/common/outputs.py | 4 ++-- tests/lib/cli/parser_test.py | 11 +++-------- 8 files changed, 26 insertions(+), 26 deletions(-) diff --git a/docs/tutorials/misc.md b/docs/tutorials/misc.md index 1d42e8b073..c12c94d87e 100644 --- a/docs/tutorials/misc.md +++ b/docs/tutorials/misc.md @@ -9,10 +9,10 @@ Execute Prowler in verbose mode (like in Version 2): ```console prowler --verbose ``` -## Show only Fails -Prowler can only display the failed findings: +## Filter findings by status +Prowler can filter the findings by their status: ```console -prowler -q/--quiet +prowler --status [PASS, FAIL, INFO] ``` ## Disable Exit Code 3 Prowler does not trigger exit code 3 with failed checks: diff --git a/prowler/config/config.py b/prowler/config/config.py index 62d41b6758..73b11da450 100644 --- a/prowler/config/config.py +++ b/prowler/config/config.py @@ -22,6 +22,8 @@ gcp_logo = "https://user-images.githubusercontent.com/38561120/235928332-eb4accd orange_color = "\033[38;5;208m" banner_color = "\033[1;92m" +finding_statuses = ["PASS", "FAIL", "INFO"] + # Compliance actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__))) @@ -50,7 +52,6 @@ aws_services_json_file = "aws_regions_by_service.json" # gcp_zones_json_file = "gcp_zones.json" default_output_directory = getcwd() + "/output" - output_file_timestamp = timestamp.strftime("%Y%m%d%H%M%S") timestamp_iso = timestamp.isoformat(sep=" ", timespec="seconds") csv_file_suffix = ".csv" diff --git a/prowler/lib/banner.py b/prowler/lib/banner.py index f99f092404..6989dce2f1 100644 --- a/prowler/lib/banner.py +++ b/prowler/lib/banner.py @@ -15,7 +15,7 @@ def print_banner(args): """ print(banner) - if args.verbose or args.quiet: + if args.verbose: print( f""" Color code for results: diff --git a/prowler/lib/cli/parser.py b/prowler/lib/cli/parser.py index 8db0782174..d13b63ed1e 100644 --- a/prowler/lib/cli/parser.py +++ b/prowler/lib/cli/parser.py @@ -7,6 +7,7 @@ from prowler.config.config import ( check_current_version, default_config_file_path, default_output_directory, + finding_statuses, ) from prowler.providers.common.arguments import ( init_providers_parser, @@ -115,10 +116,10 @@ Detailed documentation at https://docs.prowler.cloud "Outputs" ) common_outputs_parser.add_argument( - "-q", - "--quiet", - action="store_true", - help="Store or send only Prowler failed findings", + "--status", + nargs="+", + help=f"Filter by the status of the findings {finding_statuses}", + choices=finding_statuses, ) common_outputs_parser.add_argument( "-M", diff --git a/prowler/lib/outputs/outputs.py b/prowler/lib/outputs/outputs.py index 091a21e866..e31b9b1f3c 100644 --- a/prowler/lib/outputs/outputs.py +++ b/prowler/lib/outputs/outputs.py @@ -20,7 +20,7 @@ from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info -def stdout_report(finding, color, verbose, is_quiet): +def stdout_report(finding, color, verbose, status): if finding.check_metadata.Provider == "aws": details = finding.region if finding.check_metadata.Provider == "azure": @@ -30,7 +30,7 @@ def stdout_report(finding, color, verbose, is_quiet): if finding.check_metadata.Provider == "kubernetes": details = finding.namespace.lower() - if verbose and not (is_quiet and finding.status != "FAIL"): + if verbose and (not status or finding.status in status): print( f"\t{color}{finding.status}{Style.RESET_ALL} {details}: {finding.status_extended}" ) @@ -62,12 +62,15 @@ def report(check_findings, output_options, audit_info): # Print findings by stdout color = set_report_color(finding.status) stdout_report( - finding, color, output_options.verbose, output_options.is_quiet + finding, color, output_options.verbose, output_options.status ) if file_descriptors: - # Check if --quiet to only add fails to outputs - if not (finding.status != "FAIL" and output_options.is_quiet): + # Check if --status is enabled and if the filter applies + if ( + not output_options.status + or finding.status in output_options.status + ): input_compliance_frameworks = list( set(output_options.output_modes).intersection( available_compliance_frameworks diff --git a/prowler/providers/aws/lib/security_hub/security_hub.py b/prowler/providers/aws/lib/security_hub/security_hub.py index c8aa716843..b301ae90ea 100644 --- a/prowler/providers/aws/lib/security_hub/security_hub.py +++ b/prowler/providers/aws/lib/security_hub/security_hub.py @@ -28,8 +28,8 @@ def prepare_security_hub_findings( if finding.region not in enabled_regions: continue - # Handle quiet mode - if output_options.is_quiet and finding.status != "FAIL": + # Handle status filters, if any + if not output_options.status or finding.status in output_options.status: continue # Get the finding region diff --git a/prowler/providers/common/outputs.py b/prowler/providers/common/outputs.py index 8221e1345f..0b1a4c6f8c 100644 --- a/prowler/providers/common/outputs.py +++ b/prowler/providers/common/outputs.py @@ -46,7 +46,7 @@ def get_provider_output_model(audit_info_class_name): @dataclass class Provider_Output_Options: - is_quiet: bool + status: bool output_modes: list output_directory: str mutelist_file: str @@ -57,7 +57,7 @@ class Provider_Output_Options: unix_timestamp: bool def __init__(self, arguments, mutelist_file, bulk_checks_metadata): - self.is_quiet = arguments.quiet + self.status = arguments.status self.output_modes = arguments.output_modes self.output_directory = arguments.output_directory self.verbose = arguments.verbose diff --git a/tests/lib/cli/parser_test.py b/tests/lib/cli/parser_test.py index 396a7fadec..77364501fd 100644 --- a/tests/lib/cli/parser_test.py +++ b/tests/lib/cli/parser_test.py @@ -241,15 +241,10 @@ class Test_Parser: parsed = self.parser.parse(command) assert parsed.provider == "kubernetes" - def test_root_parser_quiet_short(self): - command = [prowler_command, "-q"] + def test_root_parser_status(self): + command = [prowler_command, "--status"] parsed = self.parser.parse(command) - assert parsed.quiet - - def test_root_parser_quiet_long(self): - command = [prowler_command, "--quiet"] - parsed = self.parser.parse(command) - assert parsed.quiet + assert parsed.status def test_root_parser_exit_code_3_short(self): command = [prowler_command, "-z"] From 24efb34d9119ba2a80a1abec1c64e14373dec90e Mon Sep 17 00:00:00 2001 From: Sergio Garcia <38561120+sergargar@users.noreply.github.com> Date: Tue, 9 Jan 2024 18:08:00 +0100 Subject: [PATCH 09/10] chore(manual status): change INFO to MANUAL status (#3254) --- docs/developer-guide/checks.md | 2 +- docs/tutorials/misc.md | 2 +- docs/tutorials/mutelist.md | 2 +- prowler/config/config.py | 2 +- prowler/lib/banner.py | 2 +- prowler/lib/outputs/compliance/compliance.py | 2 +- prowler/lib/outputs/html.py | 2 +- prowler/lib/outputs/outputs.py | 4 ++-- prowler/providers/aws/lib/security_hub/security_hub.py | 4 ++-- .../account_maintain_current_contact_details.py | 4 ++-- .../account_security_contact_information_is_registered.py | 4 ++-- ...nt_security_questions_are_registered_in_the_aws_account.py | 4 ++-- .../cloudtrail_bucket_requires_mfa_delete.py | 2 +- .../cloudtrail_logs_s3_bucket_access_logging_enabled.py | 2 +- .../cloudtrail_logs_s3_bucket_is_not_publicly_accessible.py | 2 +- .../trustedadvisor_errors_and_warnings.py | 2 +- tests/lib/outputs/outputs_test.py | 2 +- tests/providers/aws/lib/security_hub/security_hub_test.py | 4 ++-- .../cloudtrail_bucket_requires_mfa_delete_test.py | 4 ++-- .../cloudtrail_logs_s3_bucket_access_logging_enabled_test.py | 2 +- ...oudtrail_logs_s3_bucket_is_not_publicly_accessible_test.py | 2 +- .../trustedadvisor_errors_and_warnings_test.py | 2 +- 22 files changed, 29 insertions(+), 29 deletions(-) diff --git a/docs/developer-guide/checks.md b/docs/developer-guide/checks.md index affa6a1d16..9f72221daf 100644 --- a/docs/developer-guide/checks.md +++ b/docs/developer-guide/checks.md @@ -102,7 +102,7 @@ All the checks MUST fill the `report.status` and `report.status_extended` with t - Status -- `report.status` - `PASS` --> If the check is passing against the configured value. - `FAIL` --> If the check is passing against the configured value. - - `INFO` --> This value cannot be used unless a manual operation is required in order to determine if the `report.status` is whether `PASS` or `FAIL`. + - `MANUAL` --> This value cannot be used unless a manual operation is required in order to determine if the `report.status` is whether `PASS` or `FAIL`. - Status Extended -- `report.status_extended` - MUST end in a dot `.` - MUST include the service audited with the resource and a brief explanation of the result generated, e.g.: `EC2 AMI ami-0123456789 is not public.` diff --git a/docs/tutorials/misc.md b/docs/tutorials/misc.md index c12c94d87e..74fa48dbd6 100644 --- a/docs/tutorials/misc.md +++ b/docs/tutorials/misc.md @@ -12,7 +12,7 @@ prowler --verbose ## Filter findings by status Prowler can filter the findings by their status: ```console -prowler --status [PASS, FAIL, INFO] +prowler --status [PASS, FAIL, MANUAL] ``` ## Disable Exit Code 3 Prowler does not trigger exit code 3 with failed checks: diff --git a/docs/tutorials/mutelist.md b/docs/tutorials/mutelist.md index b14b265028..8320a4550d 100644 --- a/docs/tutorials/mutelist.md +++ b/docs/tutorials/mutelist.md @@ -1,7 +1,7 @@ # Mute Listing Sometimes you may find resources that are intentionally configured in a certain way that may be a bad practice but it is all right with it, for example an AWS S3 Bucket open to the internet hosting a web site, or an AWS Security Group with an open port needed in your use case. -Mute List option works along with other options and adds a `MUTED` instead of `INFO`, `PASS` or `FAIL` to any output format. +Mute List option works along with other options and adds a `MUTED` instead of `MANUAL`, `PASS` or `FAIL` to any output format. You can use `-w`/`--mutelist-file` with the path of your mutelist yaml file, but first, let's review the syntax. diff --git a/prowler/config/config.py b/prowler/config/config.py index 73b11da450..b7e66ecf94 100644 --- a/prowler/config/config.py +++ b/prowler/config/config.py @@ -22,7 +22,7 @@ gcp_logo = "https://user-images.githubusercontent.com/38561120/235928332-eb4accd orange_color = "\033[38;5;208m" banner_color = "\033[1;92m" -finding_statuses = ["PASS", "FAIL", "INFO"] +finding_statuses = ["PASS", "FAIL", "MANUAL"] # Compliance actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__))) diff --git a/prowler/lib/banner.py b/prowler/lib/banner.py index 6989dce2f1..a60edc9801 100644 --- a/prowler/lib/banner.py +++ b/prowler/lib/banner.py @@ -19,7 +19,7 @@ def print_banner(args): print( f""" Color code for results: -- {Fore.YELLOW}INFO (Information){Style.RESET_ALL} +- {Fore.YELLOW}MANUAL (Manual check){Style.RESET_ALL} - {Fore.GREEN}PASS (Recommended value){Style.RESET_ALL} - {orange_color}MUTED (Muted by muted list){Style.RESET_ALL} - {Fore.RED}FAIL (Fix required){Style.RESET_ALL} diff --git a/prowler/lib/outputs/compliance/compliance.py b/prowler/lib/outputs/compliance/compliance.py index b567546590..b019d8f7e3 100644 --- a/prowler/lib/outputs/compliance/compliance.py +++ b/prowler/lib/outputs/compliance/compliance.py @@ -31,7 +31,7 @@ def add_manual_controls( manual_finding = Check_Report( output_options.bulk_checks_metadata["manual_check"].json() ) - manual_finding.status = "INFO" + manual_finding.status = "MANUAL" manual_finding.status_extended = "Manual check" manual_finding.resource_id = "manual_check" manual_finding.resource_name = "Manual check" diff --git a/prowler/lib/outputs/html.py b/prowler/lib/outputs/html.py index 99d76aa117..104f0c12f8 100644 --- a/prowler/lib/outputs/html.py +++ b/prowler/lib/outputs/html.py @@ -170,7 +170,7 @@ def add_html_header(file_descriptor, audit_info): def fill_html(file_descriptor, finding, output_options): try: row_class = "p-3 mb-2 bg-success-custom" - if finding.status == "INFO": + if finding.status == "MANUAL": row_class = "table-info" elif finding.status == "FAIL": row_class = "table-danger" diff --git a/prowler/lib/outputs/outputs.py b/prowler/lib/outputs/outputs.py index e31b9b1f3c..0886632563 100644 --- a/prowler/lib/outputs/outputs.py +++ b/prowler/lib/outputs/outputs.py @@ -152,7 +152,7 @@ def report(check_findings, output_options, audit_info): file_descriptors["json-ocsf"].write(",") else: # No service resources in the whole account - color = set_report_color("INFO") + color = set_report_color("MANUAL") if output_options.verbose: print(f"\t{color}INFO{Style.RESET_ALL} There are no resources") # Separator between findings and bar @@ -179,7 +179,7 @@ def set_report_color(status: str) -> str: color = Fore.BLACK elif status == "MUTED": color = orange_color - elif status == "INFO": + elif status == "MANUAL": color = Fore.YELLOW else: raise Exception("Invalid Report Status. Must be PASS, FAIL, ERROR or MUTED") diff --git a/prowler/providers/aws/lib/security_hub/security_hub.py b/prowler/providers/aws/lib/security_hub/security_hub.py index b301ae90ea..4c7f682a09 100644 --- a/prowler/providers/aws/lib/security_hub/security_hub.py +++ b/prowler/providers/aws/lib/security_hub/security_hub.py @@ -20,8 +20,8 @@ def prepare_security_hub_findings( security_hub_findings_per_region[region] = [] for finding in findings: - # We don't send the INFO findings to AWS Security Hub - if finding.status == "INFO": + # We don't send the MANUAL findings to AWS Security Hub + if finding.status == "MANUAL": continue # We don't send findings to not enabled regions diff --git a/prowler/providers/aws/services/account/account_maintain_current_contact_details/account_maintain_current_contact_details.py b/prowler/providers/aws/services/account/account_maintain_current_contact_details/account_maintain_current_contact_details.py index 19f1810822..fd9eff4db7 100644 --- a/prowler/providers/aws/services/account/account_maintain_current_contact_details/account_maintain_current_contact_details.py +++ b/prowler/providers/aws/services/account/account_maintain_current_contact_details/account_maintain_current_contact_details.py @@ -10,6 +10,6 @@ class account_maintain_current_contact_details(Check): report.region = account_client.region report.resource_id = account_client.audited_account report.resource_arn = account_client.audited_account_arn - report.status = "INFO" - report.status_extended = "Manual check: Login to the AWS Console. Choose your account name on the top right of the window -> My Account -> Contact Information." + report.status = "MANUAL" + report.status_extended = "Login to the AWS Console. Choose your account name on the top right of the window -> My Account -> Contact Information." return [report] diff --git a/prowler/providers/aws/services/account/account_security_contact_information_is_registered/account_security_contact_information_is_registered.py b/prowler/providers/aws/services/account/account_security_contact_information_is_registered/account_security_contact_information_is_registered.py index 5e41e8cd6b..f2ac5dd5c5 100644 --- a/prowler/providers/aws/services/account/account_security_contact_information_is_registered/account_security_contact_information_is_registered.py +++ b/prowler/providers/aws/services/account/account_security_contact_information_is_registered/account_security_contact_information_is_registered.py @@ -10,6 +10,6 @@ class account_security_contact_information_is_registered(Check): report.region = account_client.region report.resource_id = account_client.audited_account report.resource_arn = account_client.audited_account_arn - report.status = "INFO" - report.status_extended = "Manual check: Login to the AWS Console. Choose your account name on the top right of the window -> My Account -> Alternate Contacts -> Security Section." + report.status = "MANUAL" + report.status_extended = "Login to the AWS Console. Choose your account name on the top right of the window -> My Account -> Alternate Contacts -> Security Section." return [report] diff --git a/prowler/providers/aws/services/account/account_security_questions_are_registered_in_the_aws_account/account_security_questions_are_registered_in_the_aws_account.py b/prowler/providers/aws/services/account/account_security_questions_are_registered_in_the_aws_account/account_security_questions_are_registered_in_the_aws_account.py index 5b51f7ffdf..1ec4d53723 100644 --- a/prowler/providers/aws/services/account/account_security_questions_are_registered_in_the_aws_account/account_security_questions_are_registered_in_the_aws_account.py +++ b/prowler/providers/aws/services/account/account_security_questions_are_registered_in_the_aws_account/account_security_questions_are_registered_in_the_aws_account.py @@ -10,6 +10,6 @@ class account_security_questions_are_registered_in_the_aws_account(Check): report.region = account_client.region report.resource_id = account_client.audited_account report.resource_arn = account_client.audited_account_arn - report.status = "INFO" - report.status_extended = "Manual check: Login to the AWS Console as root. Choose your account name on the top right of the window -> My Account -> Configure Security Challenge Questions." + report.status = "MANUAL" + report.status_extended = "Login to the AWS Console as root. Choose your account name on the top right of the window -> My Account -> Configure Security Challenge Questions." return [report] diff --git a/prowler/providers/aws/services/cloudtrail/cloudtrail_bucket_requires_mfa_delete/cloudtrail_bucket_requires_mfa_delete.py b/prowler/providers/aws/services/cloudtrail/cloudtrail_bucket_requires_mfa_delete/cloudtrail_bucket_requires_mfa_delete.py index 21582011de..92b0edc1b1 100644 --- a/prowler/providers/aws/services/cloudtrail/cloudtrail_bucket_requires_mfa_delete/cloudtrail_bucket_requires_mfa_delete.py +++ b/prowler/providers/aws/services/cloudtrail/cloudtrail_bucket_requires_mfa_delete/cloudtrail_bucket_requires_mfa_delete.py @@ -27,7 +27,7 @@ class cloudtrail_bucket_requires_mfa_delete(Check): report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) has MFA delete enabled." # check if trail bucket is a cross account bucket if not trail_bucket_is_in_account: - report.status = "INFO" + report.status = "MANUAL" report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) is a cross-account bucket in another account out of Prowler's permissions scope, please check it manually." findings.append(report) diff --git a/prowler/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_access_logging_enabled/cloudtrail_logs_s3_bucket_access_logging_enabled.py b/prowler/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_access_logging_enabled/cloudtrail_logs_s3_bucket_access_logging_enabled.py index d7ee50e0a2..74120b782a 100644 --- a/prowler/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_access_logging_enabled/cloudtrail_logs_s3_bucket_access_logging_enabled.py +++ b/prowler/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_access_logging_enabled/cloudtrail_logs_s3_bucket_access_logging_enabled.py @@ -35,7 +35,7 @@ class cloudtrail_logs_s3_bucket_access_logging_enabled(Check): # check if trail is delivering logs in a cross account bucket if not trail_bucket_is_in_account: - report.status = "INFO" + report.status = "MANUAL" report.status_extended = f"Trail {trail.name} is delivering logs in a cross-account bucket {trail_bucket} in another account out of Prowler's permissions scope, please check it manually." findings.append(report) diff --git a/prowler/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_is_not_publicly_accessible/cloudtrail_logs_s3_bucket_is_not_publicly_accessible.py b/prowler/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_is_not_publicly_accessible/cloudtrail_logs_s3_bucket_is_not_publicly_accessible.py index 4577944256..3b6fc76646 100644 --- a/prowler/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_is_not_publicly_accessible/cloudtrail_logs_s3_bucket_is_not_publicly_accessible.py +++ b/prowler/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_is_not_publicly_accessible/cloudtrail_logs_s3_bucket_is_not_publicly_accessible.py @@ -41,7 +41,7 @@ class cloudtrail_logs_s3_bucket_is_not_publicly_accessible(Check): break # check if trail bucket is a cross account bucket if not trail_bucket_is_in_account: - report.status = "INFO" + report.status = "MANUAL" report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) is a cross-account bucket in another account out of Prowler's permissions scope, please check it manually." findings.append(report) diff --git a/prowler/providers/aws/services/trustedadvisor/trustedadvisor_errors_and_warnings/trustedadvisor_errors_and_warnings.py b/prowler/providers/aws/services/trustedadvisor/trustedadvisor_errors_and_warnings/trustedadvisor_errors_and_warnings.py index 980735d8ff..459fec488c 100644 --- a/prowler/providers/aws/services/trustedadvisor/trustedadvisor_errors_and_warnings/trustedadvisor_errors_and_warnings.py +++ b/prowler/providers/aws/services/trustedadvisor/trustedadvisor_errors_and_warnings/trustedadvisor_errors_and_warnings.py @@ -23,7 +23,7 @@ class trustedadvisor_errors_and_warnings(Check): findings.append(report) else: report = Check_Report_AWS(self.metadata()) - report.status = "INFO" + report.status = "MANUAL" report.status_extended = "Amazon Web Services Premium Support Subscription is required to use this service." report.resource_id = trustedadvisor_client.audited_account report.resource_arn = trustedadvisor_client.audited_account_arn diff --git a/tests/lib/outputs/outputs_test.py b/tests/lib/outputs/outputs_test.py index 97f9aca566..bd00f77e55 100644 --- a/tests/lib/outputs/outputs_test.py +++ b/tests/lib/outputs/outputs_test.py @@ -1238,7 +1238,7 @@ class Test_Outputs: def test_extract_findings_statistics_info_resources(self): finding_1 = mock.MagicMock() - finding_1.status = "INFO" + finding_1.status = "MANUAL" finding_1.resource_id = "test_resource_1" finding_2 = mock.MagicMock() finding_2.status = "PASS" diff --git a/tests/providers/aws/lib/security_hub/security_hub_test.py b/tests/providers/aws/lib/security_hub/security_hub_test.py index dad3a7103e..762a418241 100644 --- a/tests/providers/aws/lib/security_hub/security_hub_test.py +++ b/tests/providers/aws/lib/security_hub/security_hub_test.py @@ -141,10 +141,10 @@ class Test_SecurityHub: ], } - def test_prepare_security_hub_findings_quiet_INFO_finding(self): + def test_prepare_security_hub_findings_quiet_MANUAL_finding(self): enabled_regions = [AWS_REGION_EU_WEST_1] output_options = self.set_mocked_output_options(is_quiet=False) - findings = [self.generate_finding("INFO", AWS_REGION_EU_WEST_1)] + findings = [self.generate_finding("MANUAL", AWS_REGION_EU_WEST_1)] audit_info = set_mocked_aws_audit_info( audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2] ) diff --git a/tests/providers/aws/services/cloudtrail/cloudtrail_bucket_requires_mfa_delete/cloudtrail_bucket_requires_mfa_delete_test.py b/tests/providers/aws/services/cloudtrail/cloudtrail_bucket_requires_mfa_delete/cloudtrail_bucket_requires_mfa_delete_test.py index d47afb2b0a..87410db089 100644 --- a/tests/providers/aws/services/cloudtrail/cloudtrail_bucket_requires_mfa_delete/cloudtrail_bucket_requires_mfa_delete_test.py +++ b/tests/providers/aws/services/cloudtrail/cloudtrail_bucket_requires_mfa_delete/cloudtrail_bucket_requires_mfa_delete_test.py @@ -209,7 +209,7 @@ class Test_cloudtrail_bucket_requires_mfa_delete: check = cloudtrail_bucket_requires_mfa_delete() result = check.execute() assert len(result) == 1 - assert result[0].status == "INFO" + assert result[0].status == "MANUAL" assert ( result[0].status_extended == f"Trail {trail_name_us} bucket ({bucket_name_us}) is a cross-account bucket in another account out of Prowler's permissions scope, please check it manually." @@ -262,7 +262,7 @@ class Test_cloudtrail_bucket_requires_mfa_delete: check = cloudtrail_bucket_requires_mfa_delete() result = check.execute() assert len(result) == 1 - assert result[0].status == "INFO" + assert result[0].status == "MANUAL" assert ( result[0].status_extended == f"Trail {trail_name_us} bucket ({bucket_name_us}) is a cross-account bucket in another account out of Prowler's permissions scope, please check it manually." diff --git a/tests/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_access_logging_enabled/cloudtrail_logs_s3_bucket_access_logging_enabled_test.py b/tests/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_access_logging_enabled/cloudtrail_logs_s3_bucket_access_logging_enabled_test.py index 80efd00474..bbbe4b4c72 100644 --- a/tests/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_access_logging_enabled/cloudtrail_logs_s3_bucket_access_logging_enabled_test.py +++ b/tests/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_access_logging_enabled/cloudtrail_logs_s3_bucket_access_logging_enabled_test.py @@ -220,7 +220,7 @@ class Test_cloudtrail_logs_s3_bucket_access_logging_enabled: result = check.execute() assert len(result) == 1 - assert result[0].status == "INFO" + assert result[0].status == "MANUAL" assert search( "in another account out of Prowler's permissions scope, please check it manually", result[0].status_extended, diff --git a/tests/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_is_not_publicly_accessible/cloudtrail_logs_s3_bucket_is_not_publicly_accessible_test.py b/tests/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_is_not_publicly_accessible/cloudtrail_logs_s3_bucket_is_not_publicly_accessible_test.py index d27354ee8c..e1a682fd80 100644 --- a/tests/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_is_not_publicly_accessible/cloudtrail_logs_s3_bucket_is_not_publicly_accessible_test.py +++ b/tests/providers/aws/services/cloudtrail/cloudtrail_logs_s3_bucket_is_not_publicly_accessible/cloudtrail_logs_s3_bucket_is_not_publicly_accessible_test.py @@ -284,7 +284,7 @@ class Test_cloudtrail_logs_s3_bucket_is_not_publicly_accessible: result = check.execute() assert len(result) == 1 - assert result[0].status == "INFO" + assert result[0].status == "MANUAL" assert result[0].resource_id == trail_name_us assert result[0].resource_arn == trail_us["TrailARN"] assert search( diff --git a/tests/providers/aws/services/trustedadvisor/trustedadvisor_errors_and_warnings/trustedadvisor_errors_and_warnings_test.py b/tests/providers/aws/services/trustedadvisor/trustedadvisor_errors_and_warnings/trustedadvisor_errors_and_warnings_test.py index 53250fc585..5c6f95d140 100644 --- a/tests/providers/aws/services/trustedadvisor/trustedadvisor_errors_and_warnings/trustedadvisor_errors_and_warnings_test.py +++ b/tests/providers/aws/services/trustedadvisor/trustedadvisor_errors_and_warnings/trustedadvisor_errors_and_warnings_test.py @@ -31,7 +31,7 @@ class Test_trustedadvisor_errors_and_warnings: check = trustedadvisor_errors_and_warnings() result = check.execute() assert len(result) == 1 - assert result[0].status == "INFO" + assert result[0].status == "MANUAL" assert ( result[0].status_extended == "Amazon Web Services Premium Support Subscription is required to use this service." From 36fc575e40a8d9c48eb659532e3001a83b707614 Mon Sep 17 00:00:00 2001 From: Nacho Rivera Date: Mon, 15 Jan 2024 16:55:53 +0100 Subject: [PATCH 10/10] feat(AwsProvider): include new structure for AWS provider (#3252) Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com> Co-authored-by: Sergio Garcia --- prowler/__main__.py | 9 +- prowler/lib/check/check.py | 12 +- prowler/lib/outputs/compliance/compliance.py | 40 +- prowler/lib/outputs/models.py | 4 +- prowler/providers/aws/aws_provider.py | 4 +- prowler/providers/aws/aws_provider_new.py | 621 ++++++++++++++++++ .../aws/lib/audit_info/audit_info.py | 4 +- .../providers/aws/lib/audit_info/models.py | 14 +- .../aws/lib/credentials/credentials.py | 4 +- .../aws/lib/organizations/organizations.py | 6 +- prowler/providers/aws/lib/service/service.py | 30 +- .../accessanalyzer/accessanalyzer_client.py | 4 +- .../accessanalyzer/accessanalyzer_service.py | 4 +- .../aws/services/account/account_client.py | 4 +- .../aws/services/account/account_service.py | 4 +- .../providers/aws/services/acm/acm_client.py | 4 +- .../providers/aws/services/acm/acm_service.py | 4 +- .../services/apigateway/apigateway_client.py | 4 +- .../services/apigateway/apigateway_service.py | 4 +- .../apigatewayv2/apigatewayv2_client.py | 4 +- .../apigatewayv2/apigatewayv2_service.py | 4 +- .../services/appstream/appstream_client.py | 4 +- .../services/appstream/appstream_service.py | 4 +- .../aws/services/athena/athena_client.py | 4 +- .../aws/services/athena/athena_service.py | 4 +- .../athena_workgroup_encryption.py | 2 +- .../athena_workgroup_enforce_configuration.py | 2 +- .../autoscaling/autoscaling_client.py | 4 +- .../autoscaling/autoscaling_service.py | 4 +- .../services/awslambda/awslambda_client.py | 4 +- .../services/awslambda/awslambda_service.py | 6 +- .../aws/services/backup/backup_client.py | 4 +- .../aws/services/backup/backup_service.py | 4 +- .../cloudformation/cloudformation_client.py | 4 +- .../cloudformation/cloudformation_service.py | 4 +- .../services/cloudfront/cloudfront_client.py | 4 +- .../services/cloudfront/cloudfront_service.py | 4 +- .../services/cloudtrail/cloudtrail_client.py | 4 +- .../cloudtrail_s3_dataevents_read_enabled.py | 2 +- .../cloudtrail_s3_dataevents_write_enabled.py | 2 +- .../services/cloudtrail/cloudtrail_service.py | 4 +- .../services/cloudwatch/cloudwatch_client.py | 4 +- .../services/cloudwatch/cloudwatch_service.py | 10 +- .../aws/services/cloudwatch/logs_client.py | 4 +- .../codeartifact/codeartifact_client.py | 4 +- .../codeartifact/codeartifact_service.py | 4 +- .../services/codebuild/codebuild_client.py | 4 +- .../services/codebuild/codebuild_service.py | 4 +- .../aws/services/config/config_client.py | 4 +- .../aws/services/config/config_service.py | 4 +- .../directoryservice_client.py | 4 +- .../directoryservice_service.py | 4 +- .../providers/aws/services/dlm/dlm_client.py | 4 +- .../providers/aws/services/dlm/dlm_service.py | 4 +- .../services/documentdb/documentdb_client.py | 4 +- .../services/documentdb/documentdb_service.py | 4 +- .../providers/aws/services/drs/drs_client.py | 4 +- .../providers/aws/services/drs/drs_service.py | 4 +- .../aws/services/dynamodb/dax_client.py | 4 +- .../aws/services/dynamodb/dynamodb_client.py | 4 +- .../aws/services/dynamodb/dynamodb_service.py | 8 +- .../providers/aws/services/ec2/ec2_client.py | 4 +- .../ec2_ebs_default_encryption.py | 3 +- .../ec2_networkacl_allow_ingress_any_port.py | 2 +- ...c2_networkacl_allow_ingress_tcp_port_22.py | 2 +- ..._networkacl_allow_ingress_tcp_port_3389.py | 2 +- ...allow_ingress_from_internet_to_any_port.py | 2 +- ...om_internet_to_port_mongodb_27017_27018.py | 2 +- ...ess_from_internet_to_tcp_ftp_port_20_21.py | 2 +- ...ow_ingress_from_internet_to_tcp_port_22.py | 2 +- ..._ingress_from_internet_to_tcp_port_3389.py | 2 +- ...et_to_tcp_port_cassandra_7199_9160_8888.py | 2 +- ...ort_elasticsearch_kibana_9200_9300_5601.py | 2 +- ...ss_from_internet_to_tcp_port_kafka_9092.py | 2 +- ...om_internet_to_tcp_port_memcached_11211.py | 2 +- ...ss_from_internet_to_tcp_port_mysql_3306.py | 2 +- ...m_internet_to_tcp_port_oracle_1521_2483.py | 2 +- ...from_internet_to_tcp_port_postgres_5432.py | 2 +- ...ss_from_internet_to_tcp_port_redis_6379.py | 2 +- ...ternet_to_tcp_port_sql_server_1433_1434.py | 2 +- ...ess_from_internet_to_tcp_port_telnet_23.py | 2 +- ...curitygroup_allow_wide_open_public_ipv4.py | 2 +- .../providers/aws/services/ec2/ec2_service.py | 4 +- .../providers/aws/services/ecr/ecr_client.py | 4 +- .../providers/aws/services/ecr/ecr_service.py | 6 +- .../providers/aws/services/ecs/ecs_client.py | 4 +- .../providers/aws/services/ecs/ecs_service.py | 4 +- .../providers/aws/services/efs/efs_client.py | 4 +- .../providers/aws/services/efs/efs_service.py | 4 +- .../providers/aws/services/eks/eks_client.py | 4 +- .../providers/aws/services/eks/eks_service.py | 6 +- .../elasticache/elasticache_client.py | 4 +- .../elasticache/elasticache_service.py | 4 +- .../providers/aws/services/elb/elb_client.py | 4 +- .../providers/aws/services/elb/elb_service.py | 4 +- .../aws/services/elbv2/elbv2_client.py | 4 +- .../aws/services/elbv2/elbv2_service.py | 4 +- .../providers/aws/services/emr/emr_client.py | 4 +- .../providers/aws/services/emr/emr_service.py | 4 +- .../providers/aws/services/fms/fms_client.py | 4 +- .../providers/aws/services/fms/fms_service.py | 4 +- .../aws/services/glacier/glacier_client.py | 4 +- .../aws/services/glacier/glacier_service.py | 4 +- .../globalaccelerator_client.py | 4 +- .../globalaccelerator_service.py | 6 +- .../aws/services/glue/glue_client.py | 4 +- ...connection_passwords_encryption_enabled.py | 2 +- ...ta_catalogs_metadata_encryption_enabled.py | 2 +- .../aws/services/glue/glue_service.py | 4 +- .../services/guardduty/guardduty_client.py | 4 +- .../services/guardduty/guardduty_service.py | 4 +- .../providers/aws/services/iam/iam_client.py | 4 +- .../providers/aws/services/iam/iam_service.py | 4 +- .../services/inspector2/inspector2_client.py | 4 +- .../inspector2_findings_exist.py | 4 +- .../services/inspector2/inspector2_service.py | 4 +- .../providers/aws/services/kms/kms_client.py | 4 +- .../providers/aws/services/kms/kms_service.py | 4 +- .../aws/services/macie/macie_client.py | 4 +- .../macie_is_enabled/macie_is_enabled.py | 2 +- .../aws/services/macie/macie_service.py | 4 +- .../aws/services/neptune/neptune_client.py | 8 +- .../aws/services/neptune/neptune_service.py | 4 +- .../networkfirewall/networkfirewall_client.py | 4 +- .../networkfirewall_in_all_vpc.py | 2 +- .../networkfirewall_service.py | 4 +- .../services/opensearch/opensearch_client.py | 4 +- .../services/opensearch/opensearch_service.py | 4 +- .../organizations/organizations_client.py | 4 +- .../organizations/organizations_service.py | 4 +- .../providers/aws/services/rds/rds_client.py | 4 +- .../providers/aws/services/rds/rds_service.py | 4 +- .../aws/services/redshift/redshift_client.py | 4 +- .../aws/services/redshift/redshift_service.py | 4 +- .../resourceexplorer2_client.py | 4 +- .../resourceexplorer2_service.py | 4 +- .../aws/services/route53/route53_client.py | 4 +- .../aws/services/route53/route53_service.py | 10 +- .../services/route53/route53domains_client.py | 4 +- .../s3_account_level_public_access_blocks.py | 2 +- .../providers/aws/services/s3/s3_client.py | 4 +- .../providers/aws/services/s3/s3_service.py | 16 +- .../aws/services/s3/s3control_client.py | 4 +- .../services/sagemaker/sagemaker_client.py | 4 +- .../services/sagemaker/sagemaker_service.py | 4 +- .../secretsmanager/secretsmanager_client.py | 4 +- .../secretsmanager/secretsmanager_service.py | 4 +- .../securityhub/securityhub_client.py | 4 +- .../securityhub/securityhub_service.py | 4 +- .../aws/services/shield/shield_client.py | 4 +- .../aws/services/shield/shield_service.py | 4 +- .../providers/aws/services/sns/sns_client.py | 4 +- .../providers/aws/services/sns/sns_service.py | 4 +- .../providers/aws/services/sqs/sqs_client.py | 4 +- .../providers/aws/services/sqs/sqs_service.py | 4 +- .../providers/aws/services/ssm/ssm_client.py | 4 +- .../providers/aws/services/ssm/ssm_service.py | 4 +- .../ssmincidents/ssmincidents_client.py | 4 +- .../ssmincidents/ssmincidents_service.py | 4 +- .../trustedadvisor/trustedadvisor_client.py | 4 +- .../trustedadvisor/trustedadvisor_service.py | 10 +- .../providers/aws/services/vpc/vpc_client.py | 4 +- .../vpc_flow_logs_enabled.py | 2 +- .../providers/aws/services/vpc/vpc_service.py | 4 +- .../providers/aws/services/waf/waf_client.py | 4 +- .../providers/aws/services/waf/waf_service.py | 4 +- .../aws/services/wafv2/wafv2_client.py | 4 +- .../aws/services/wafv2/wafv2_service.py | 4 +- .../wellarchitected/wellarchitected_client.py | 4 +- .../wellarchitected_service.py | 4 +- .../services/workspaces/workspaces_client.py | 4 +- .../services/workspaces/workspaces_service.py | 4 +- prowler/providers/azure/azure_provider_new.py | 6 +- .../services/defender/defender_client.py | 4 +- .../services/defender/defender_service.py | 4 +- .../azure/services/iam/iam_client.py | 4 +- .../azure/services/iam/iam_service.py | 4 +- .../services/sqlserver/sqlserver_client.py | 4 +- .../services/sqlserver/sqlserver_service.py | 4 +- .../azure/services/storage/storage_client.py | 4 +- .../azure/services/storage/storage_service.py | 4 +- prowler/providers/common/audit_info.py | 24 +- prowler/providers/common/common.py | 5 +- prowler/providers/common/provider.py | 2 +- prowler/providers/gcp/gcp_provider_new.py | 4 +- .../gcp/services/apikeys/apikeys_client.py | 4 +- .../gcp/services/apikeys/apikeys_service.py | 4 +- .../gcp/services/bigquery/bigquery_client.py | 4 +- .../gcp/services/bigquery/bigquery_service.py | 4 +- .../cloudresourcemanager_client.py | 4 +- .../cloudresourcemanager_service.py | 4 +- .../gcp/services/cloudsql/cloudsql_client.py | 4 +- .../gcp/services/cloudsql/cloudsql_service.py | 4 +- .../cloudstorage/cloudstorage_client.py | 4 +- .../cloudstorage/cloudstorage_service.py | 4 +- .../gcp/services/compute/compute_client.py | 4 +- .../gcp/services/compute/compute_service.py | 4 +- .../gcp/services/dataproc/dataproc_client.py | 4 +- .../gcp/services/dataproc/dataproc_service.py | 4 +- .../providers/gcp/services/dns/dns_client.py | 4 +- .../providers/gcp/services/dns/dns_service.py | 4 +- .../gcp/services/iam/accessapproval_client.py | 4 +- .../services/iam/essentialcontacts_client.py | 4 +- .../providers/gcp/services/iam/iam_client.py | 4 +- .../providers/gcp/services/iam/iam_service.py | 12 +- .../providers/gcp/services/kms/kms_client.py | 4 +- .../providers/gcp/services/kms/kms_service.py | 4 +- .../gcp/services/logging/logging_client.py | 4 +- .../gcp/services/logging/logging_service.py | 4 +- .../services/monitoring/monitoring_client.py | 4 +- .../services/monitoring/monitoring_service.py | 4 +- .../serviceusage/serviceusage_client.py | 4 +- .../serviceusage/serviceusage_service.py | 4 +- .../kubernetes/kubernetes_provider_new.py | 6 +- 214 files changed, 1092 insertions(+), 486 deletions(-) create mode 100644 prowler/providers/aws/aws_provider_new.py diff --git a/prowler/__main__.py b/prowler/__main__.py index 7e03541ec2..53c9c7a07d 100644 --- a/prowler/__main__.py +++ b/prowler/__main__.py @@ -39,7 +39,6 @@ from prowler.lib.outputs.json import close_json from prowler.lib.outputs.outputs import extract_findings_statistics from prowler.lib.outputs.slack import send_slack_message from prowler.lib.outputs.summary_table import display_summary_table -from prowler.providers.aws.aws_provider import get_available_aws_service_regions from prowler.providers.aws.lib.s3.s3 import send_to_s3_bucket from prowler.providers.aws.lib.security_hub.security_hub import ( batch_send_to_security_hub, @@ -52,7 +51,10 @@ from prowler.providers.common.audit_info import ( set_provider_execution_parameters, ) from prowler.providers.common.clean import clean_provider_local_output_directories -from prowler.providers.common.common import set_global_provider_object +from prowler.providers.common.common import ( + get_global_provider, + set_global_provider_object, +) from prowler.providers.common.mutelist import set_provider_mutelist from prowler.providers.common.outputs import set_provider_output_options from prowler.providers.common.quick_inventory import run_provider_quick_inventory @@ -263,9 +265,10 @@ def prowler(): f"{Style.BRIGHT}\nSending findings to AWS Security Hub, please wait...{Style.RESET_ALL}" ) # Verify where AWS Security Hub is enabled + global_provider = get_global_provider() aws_security_enabled_regions = [] security_hub_regions = ( - get_available_aws_service_regions("securityhub", audit_info) + global_provider.get_available_aws_service_regions("securityhub") if not audit_info.audited_regions else audit_info.audited_regions ) diff --git a/prowler/lib/check/check.py b/prowler/lib/check/check.py index d8e91f18c1..58911ba620 100644 --- a/prowler/lib/check/check.py +++ b/prowler/lib/check/check.py @@ -22,6 +22,7 @@ from prowler.lib.logger import logger from prowler.lib.outputs.outputs import report from prowler.lib.utils.utils import open_file, parse_json_file from prowler.providers.aws.lib.mutelist.mutelist import mutelist_findings +from prowler.providers.common.common import get_global_provider from prowler.providers.common.models import Audit_Metadata from prowler.providers.common.outputs import Provider_Output_Options @@ -425,8 +426,10 @@ def execute_checks( services_executed = set() checks_executed = set() + global_provider = get_global_provider() + # Initialize the Audit Metadata - audit_info.audit_metadata = Audit_Metadata( + global_provider.audit_metadata = Audit_Metadata( services_scanned=0, expected_checks=checks_to_execute, completed_checks=0, @@ -537,6 +540,7 @@ def execute( checks_executed: set, custom_checks_metadata: Any, ): + global_provider = get_global_provider() # Import check module check_module_path = ( f"prowler.providers.{provider}.services.{service}.{check_name}.{check_name}" @@ -556,15 +560,15 @@ def execute( # Update Audit Status services_executed.add(service) checks_executed.add(check_name) - audit_info.audit_metadata = update_audit_metadata( - audit_info.audit_metadata, services_executed, checks_executed + global_provider.audit_metadata = update_audit_metadata( + global_provider.audit_metadata, services_executed, checks_executed ) # Mute List findings if audit_output_options.mutelist_file: check_findings = mutelist_findings( audit_output_options.mutelist_file, - audit_info.audited_account, + global_provider.audited_account, check_findings, ) diff --git a/prowler/lib/outputs/compliance/compliance.py b/prowler/lib/outputs/compliance/compliance.py index b019d8f7e3..5c53eb2b28 100644 --- a/prowler/lib/outputs/compliance/compliance.py +++ b/prowler/lib/outputs/compliance/compliance.py @@ -172,9 +172,6 @@ def display_compliance_table( and compliance.Provider == "AWS" and compliance.Version == "RD2022" ): - compliance_version = compliance.Version - compliance_fm = compliance.Framework - compliance_provider = compliance.Provider for requirement in compliance.Requirements: for attribute in requirement.Attributes: marco_categoria = ( @@ -222,13 +219,13 @@ def display_compliance_table( ens_compliance_table["Bajo"].append( f"{Fore.YELLOW}{marcos[marco]['Bajo']}{Style.RESET_ALL}" ) - if fail_count + pass_count < 0: + if fail_count + pass_count < 1: print( - f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_fm}_{compliance_version}_{compliance_provider}{Style.RESET_ALL}.\n" + f"\nThere are no resources for {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL}.\n" ) else: print( - f"\nEstado de Cumplimiento de {Fore.YELLOW}{compliance_fm}_{compliance_version}_{compliance_provider}{Style.RESET_ALL}:" + f"\nEstado de Cumplimiento de {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL}:" ) overview_table = [ [ @@ -239,7 +236,7 @@ def display_compliance_table( print(tabulate(overview_table, tablefmt="rounded_grid")) if not compliance_overview: print( - f"\nResultados de {Fore.YELLOW}{compliance_fm}_{compliance_version}_{compliance_provider}{Style.RESET_ALL}:" + f"\nResultados de {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL}:" ) print( tabulate( @@ -251,7 +248,9 @@ def display_compliance_table( print( f"{Style.BRIGHT}* Solo aparece el Marco/Categoria que contiene resultados.{Style.RESET_ALL}" ) - print(f"\nResultados detallados de {compliance_fm} en:") + print( + f"\nResultados detallados de {compliance_framework.upper()} en:" + ) print( f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n" ) @@ -272,8 +271,6 @@ def display_compliance_table( compliance.Framework == "CIS" and compliance.Version in compliance_framework ): - compliance_version = compliance.Version - compliance_fm = compliance.Framework for requirement in compliance.Requirements: for attribute in requirement.Attributes: section = attribute.Section @@ -322,11 +319,11 @@ def display_compliance_table( ) if fail_count + pass_count < 1: print( - f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_fm}_{compliance_version}{Style.RESET_ALL}.\n" + f"\nThere are no resources for {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL}.\n" ) else: print( - f"\nCompliance Status of {Fore.YELLOW}{compliance_fm}_{compliance_version}{Style.RESET_ALL} Framework:" + f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:" ) overview_table = [ [ @@ -337,7 +334,7 @@ def display_compliance_table( print(tabulate(overview_table, tablefmt="rounded_grid")) if not compliance_overview: print( - f"\nFramework {Fore.YELLOW}{compliance_fm}_{compliance_version}{Style.RESET_ALL} Results:" + f"\nFramework {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Results:" ) print( tabulate( @@ -349,7 +346,9 @@ def display_compliance_table( print( f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}" ) - print(f"\nDetailed results of {compliance_fm} are in:") + print( + f"\nDetailed results of {compliance_framework.upper()} are in:" + ) print( f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n" ) @@ -369,7 +368,6 @@ def display_compliance_table( "MITRE-ATTACK" in compliance.Framework and compliance.Version in compliance_framework ): - compliance_fm = compliance.Framework for requirement in compliance.Requirements: for tactic in requirement.Tactics: if tactic not in tactics: @@ -396,11 +394,11 @@ def display_compliance_table( ) if fail_count + pass_count < 1: print( - f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_fm}{Style.RESET_ALL}.\n" + f"\nThere are no resources for {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL}.\n" ) else: print( - f"\nCompliance Status of {Fore.YELLOW}{compliance_fm}{Style.RESET_ALL} Framework:" + f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:" ) overview_table = [ [ @@ -411,7 +409,7 @@ def display_compliance_table( print(tabulate(overview_table, tablefmt="rounded_grid")) if not compliance_overview: print( - f"\nFramework {Fore.YELLOW}{compliance_fm}{Style.RESET_ALL} Results:" + f"\nFramework {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Results:" ) print( tabulate( @@ -423,7 +421,9 @@ def display_compliance_table( print( f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}" ) - print(f"\nDetailed results of {compliance_fm} are in:") + print( + f"\nDetailed results of {compliance_framework.upper()} are in:" + ) print( f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n" ) @@ -447,7 +447,7 @@ def display_compliance_table( pass_count += 1 if fail_count + pass_count < 1: print( - f"\n {Style.BRIGHT}There are no resources for {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL}.\n" + f"\nThere are no resources for {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL}.\n" ) else: print( diff --git a/prowler/lib/outputs/models.py b/prowler/lib/outputs/models.py index db3b8db8b2..94561830b0 100644 --- a/prowler/lib/outputs/models.py +++ b/prowler/lib/outputs/models.py @@ -10,7 +10,7 @@ from prowler.config.config import prowler_version, timestamp from prowler.lib.check.models import Remediation from prowler.lib.logger import logger from prowler.lib.utils.utils import outputs_unix_timestamp -from prowler.providers.aws.lib.audit_info.models import AWS_Organizations_Info +from prowler.providers.aws.lib.audit_info.models import AWSOrganizationsInfo def get_check_compliance(finding, provider, output_options) -> dict: @@ -483,7 +483,7 @@ class Aws_Check_Output_JSON(Check_Output_JSON): Profile: str = "" AccountId: str = "" - OrganizationsInfo: Optional[AWS_Organizations_Info] + OrganizationsInfo: Optional[AWSOrganizationsInfo] Region: str = "" ResourceId: str = "" ResourceArn: str = "" diff --git a/prowler/providers/aws/aws_provider.py b/prowler/providers/aws/aws_provider.py index 0a1b5833dd..85d21d1bfa 100644 --- a/prowler/providers/aws/aws_provider.py +++ b/prowler/providers/aws/aws_provider.py @@ -11,7 +11,7 @@ from prowler.lib.check.check import list_modules, recover_checks_from_service from prowler.lib.logger import logger from prowler.lib.utils.utils import open_file, parse_json_file from prowler.providers.aws.config import AWS_STS_GLOBAL_ENDPOINT_REGION -from prowler.providers.aws.lib.audit_info.models import AWS_Assume_Role, AWS_Audit_Info +from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info, AWSAssumeRole from prowler.providers.aws.lib.credentials.credentials import create_sts_session @@ -109,7 +109,7 @@ class AWS_Provider: def assume_role( session: session.Session, - assumed_role_info: AWS_Assume_Role, + assumed_role_info: AWSAssumeRole, sts_endpoint_region: str = None, ) -> dict: try: diff --git a/prowler/providers/aws/aws_provider_new.py b/prowler/providers/aws/aws_provider_new.py new file mode 100644 index 0000000000..cbd4afb635 --- /dev/null +++ b/prowler/providers/aws/aws_provider_new.py @@ -0,0 +1,621 @@ +import os +import pathlib +import sys +from argparse import Namespace +from dataclasses import dataclass +from datetime import datetime +from typing import Any, Optional + +from boto3 import client, session +from botocore.config import Config +from botocore.credentials import RefreshableCredentials +from botocore.session import get_session +from colorama import Fore, Style + +from prowler.config.config import aws_services_json_file +from prowler.lib.check.check import list_modules, recover_checks_from_service +from prowler.lib.logger import logger +from prowler.lib.utils.utils import open_file, parse_json_file +from prowler.providers.aws.config import ( + AWS_STS_GLOBAL_ENDPOINT_REGION, + BOTO3_USER_AGENT_EXTRA, +) +from prowler.providers.aws.lib.arn.arn import parse_iam_credentials_arn +from prowler.providers.aws.lib.credentials.credentials import ( + create_sts_session, + validate_AWSCredentials, +) +from prowler.providers.aws.lib.organizations.organizations import ( + get_organizations_metadata, +) +from prowler.providers.common.provider import Provider + + +@dataclass +class AWSOrganizationsInfo: + account_details_email: str + account_details_name: str + account_details_arn: str + account_details_org: str + account_details_tags: str + + +@dataclass +class AWSCredentials: + aws_access_key_id: str + aws_session_token: str + aws_secret_access_key: str + expiration: datetime + + +@dataclass +class AWSAssumeRole: + role_arn: str + session_duration: int + external_id: str + mfa_enabled: bool + + +@dataclass +class AWSAssumeRoleConfiguration: + assumed_role_info: AWSAssumeRole + assumed_role_credentials: AWSCredentials + + +@dataclass +class AWSIdentityInfo: + account: str + account_arn: str + user_id: str + partition: str + identity_arn: str + profile: str + profile_region: str + audited_regions: list + + +@dataclass +class AWSSession: + session: session.Session + session_config: Config + original_session: None + + +class AwsProvider(Provider): + session: AWSSession = AWSSession( + session=None, session_config=None, original_session=None + ) + identity: AWSIdentityInfo = AWSIdentityInfo( + account=None, + account_arn=None, + user_id=None, + partition=None, + identity_arn=None, + profile=None, + profile_region=None, + audited_regions=[], + ) + assumed_role: AWSAssumeRoleConfiguration = AWSAssumeRoleConfiguration( + assumed_role_info=AWSAssumeRole( + role_arn=None, + session_duration=None, + external_id=None, + mfa_enabled=False, + ), + assumed_role_credentials=AWSCredentials( + aws_access_key_id=None, + aws_session_token=None, + aws_secret_access_key=None, + expiration=None, + ), + ) + organizations_metadata: AWSOrganizationsInfo = AWSOrganizationsInfo( + account_details_email=None, + account_details_name=None, + account_details_arn=None, + account_details_org=None, + account_details_tags=None, + ) + audit_resources: Optional[Any] + audit_metadata: Optional[Any] + audit_config: dict = {} + mfa_enabled: bool = False + ignore_unused_services: bool = False + + def __init__(self, arguments: Namespace): + logger.info("Setting AWS provider ...") + # Parse input arguments + # Assume Role Options + input_role = getattr(arguments, "role", None) + input_session_duration = getattr(arguments, "session_duration", None) + input_external_id = getattr(arguments, "external_id", None) + + # STS Endpoint Region + sts_endpoint_region = getattr(arguments, "sts_endpoint_region", None) + + # MFA Configuration (false by default) + input_mfa = getattr(arguments, "mfa", None) + + input_profile = getattr(arguments, "profile", None) + input_regions = getattr(arguments, "region", None) + organizations_role_arn = getattr(arguments, "organizations_role", None) + + # Set the maximum retries for the standard retrier config + aws_retries_max_attempts = getattr(arguments, "aws_retries_max_attempts", None) + + # Set if unused services must be ignored + ignore_unused_services = getattr(arguments, "ignore_unused_services", None) + + # Set the maximum retries for the standard retrier config + self.session.session_config = self.__set_session_config__( + aws_retries_max_attempts + ) + + # Set ignore unused services + self.ignore_unused_services = ignore_unused_services + + # Start populating AWS identity object + self.identity.profile = input_profile + self.identity.audited_regions = input_regions + + # We need to create an original sessions using regular auth path (creds, profile, etc) + logger.info("Generating original session ...") + self.session.session = self.setup_session(input_mfa) + + # After the session is created, validate it + logger.info("Validating credentials ...") + caller_identity = validate_AWSCredentials( + self.session.session, input_regions, sts_endpoint_region + ) + + logger.info("Credentials validated") + logger.info(f"Original caller identity UserId: {caller_identity['UserId']}") + logger.info(f"Original caller identity ARN: {caller_identity['Arn']}") + # Set values of AWS identity object + self.identity.account = caller_identity["Account"] + self.identity.identity_arn = caller_identity["Arn"] + self.identity.user_id = caller_identity["UserId"] + self.identity.partition = parse_iam_credentials_arn( + caller_identity["Arn"] + ).partition + self.identity.account_arn = ( + f"arn:{self.identity.partition}:iam::{self.identity.account}:root" + ) + + # save original session + self.session.original_session = self.session.session + # time for checking role assumption + if input_role: + # session will be the assumed one + self.session.session = self.setup_assumed_session( + input_role, + input_external_id, + input_mfa, + input_session_duration, + sts_endpoint_region, + ) + logger.info("Audit session is the new session created assuming role") + # check if organizations info is gonna be retrieved + if organizations_role_arn: + logger.info( + f"Getting organizations metadata for account {organizations_role_arn}" + ) + # session will be the assumed one with organizations permissions + self.session.session = self.setup_assumed_session( + organizations_role_arn, + input_external_id, + input_mfa, + input_session_duration, + sts_endpoint_region, + ) + self.organizations_metadata = get_organizations_metadata( + self.identity.account, self.assumed_role.assumed_role_credentials + ) + logger.info("Organizations metadata retrieved") + if self.session.session.region_name: + self.identity.profile_region = self.session.session.region_name + else: + self.identity.profile_region = "us-east-1" + + if not getattr(arguments, "only_logs", None): + self.print_credentials() + + # Parse Scan Tags + if getattr(arguments, "resource_tags", None): + input_resource_tags = arguments.resource_tags + self.audit_resources = self.get_tagged_resources(input_resource_tags) + + # Parse Input Resource ARNs + self.audit_resources = getattr(arguments, "resource_arn", None) + + def setup_session(self, input_mfa: bool): + logger.info("Creating regular session ...") + # Input MFA only if a role is not going to be assumed + if input_mfa and not self.assumed_role.assumed_role_info.role_arn: + mfa_ARN, mfa_TOTP = self.__input_role_mfa_token_and_code__() + get_session_token_arguments = { + "SerialNumber": mfa_ARN, + "TokenCode": mfa_TOTP, + } + sts_client = client("sts") + session_credentials = sts_client.get_session_token( + **get_session_token_arguments + ) + return session.Session( + aws_access_key_id=session_credentials["Credentials"]["AccessKeyId"], + aws_secret_access_key=session_credentials["Credentials"][ + "SecretAccessKey" + ], + aws_session_token=session_credentials["Credentials"]["SessionToken"], + profile_name=self.identity.profile, + ) + else: + return session.Session( + profile_name=self.identity.profile, + ) + + def setup_assumed_session( + self, + input_role: str, + input_external_id: str, + input_mfa: str, + session_duration: int, + sts_endpoint_region: str, + ): + logger.info("Creating assumed session ...") + # store information about the role is gonna be assumed + self.assumed_role.assumed_role_info.role_arn = input_role + self.assumed_role.assumed_role_info.session_duration = session_duration + self.assumed_role.assumed_role_info.external_id = input_external_id + self.assumed_role.assumed_role_info.mfa_enabled = input_mfa + # Check if role arn is valid + try: + # this returns the arn already parsed into a dict to be used when it is needed to access its fields + role_arn_parsed = parse_iam_credentials_arn( + self.assumed_role.assumed_role_info.role_arn + ) + + except Exception as error: + logger.critical(f"{error.__class__.__name__} -- {error}") + sys.exit(1) + + else: + logger.info(f"Assuming role {self.assumed_role.assumed_role_info.role_arn}") + # Assume the role + assumed_role_response = self.__assume_role__( + self.session.session, + sts_endpoint_region, + ) + logger.info("Role assumed") + # Set the info needed to create a session with an assumed role + self.assumed_role.assumed_role_credentials = AWSCredentials( + aws_access_key_id=assumed_role_response["Credentials"]["AccessKeyId"], + aws_session_token=assumed_role_response["Credentials"]["SessionToken"], + aws_secret_access_key=assumed_role_response["Credentials"][ + "SecretAccessKey" + ], + expiration=assumed_role_response["Credentials"]["Expiration"], + ) + # Set identity parameters + self.identity.account = role_arn_parsed.account_id + self.identity.partition = role_arn_parsed.partition + self.identity.account_arn = ( + f"arn:{self.identity.partition}:iam::{self.identity.account}:root" + ) + # From botocore we can use RefreshableCredentials class, which has an attribute (refresh_using) + # that needs to be a method without arguments that retrieves a new set of fresh credentials + # asuming the role again. -> https://github.com/boto/botocore/blob/098cc255f81a25b852e1ecdeb7adebd94c7b1b73/botocore/credentials.py#L395 + assumed_refreshable_credentials = RefreshableCredentials( + access_key=self.assumed_role.assumed_role_credentials.aws_access_key_id, + secret_key=self.assumed_role.assumed_role_credentials.aws_secret_access_key, + token=self.assumed_role.assumed_role_credentials.aws_session_token, + expiry_time=self.assumed_role.assumed_role_credentials.expiration, + refresh_using=self.refresh_credentials, + method="sts-assume-role", + ) + # Here we need the botocore session since it needs to use refreshable credentials + assumed_botocore_session = get_session() + assumed_botocore_session._credentials = assumed_refreshable_credentials + assumed_botocore_session.set_config_variable( + "region", self.identity.profile_region + ) + return session.Session( + profile_name=self.identity.profile, + botocore_session=assumed_botocore_session, + ) + + # Refresh credentials method using assume role + # This method is called "adding ()" to the name, so it cannot accept arguments + # https://github.com/boto/botocore/blob/098cc255f81a25b852e1ecdeb7adebd94c7b1b73/botocore/credentials.py#L570 + def refresh_credentials(self): + logger.info("Refreshing assumed credentials...") + + response = self.__assume_role__(self.aws_session, self.role_info) + refreshed_credentials = dict( + # Keys of the dict has to be the same as those that are being searched in the parent class + # https://github.com/boto/botocore/blob/098cc255f81a25b852e1ecdeb7adebd94c7b1b73/botocore/credentials.py#L609 + access_key=response["Credentials"]["AccessKeyId"], + secret_key=response["Credentials"]["SecretAccessKey"], + token=response["Credentials"]["SessionToken"], + expiry_time=response["Credentials"]["Expiration"].isoformat(), + ) + logger.info("Refreshed Credentials:") + logger.info(refreshed_credentials) + return refreshed_credentials + + def print_credentials(self): + # Beautify audited regions, set "all" if there is no filter region + regions = ( + ", ".join(self.identity.audited_regions) + if self.identity.audited_regions is not None + else "all" + ) + # Beautify audited profile, set "default" if there is no profile set + profile = ( + self.identity.profile if self.identity.profile is not None else "default" + ) + + report = f""" +This report is being generated using credentials below: + +AWS-CLI Profile: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} AWS Filter Region: {Fore.YELLOW}[{regions}]{Style.RESET_ALL} +AWS Account: {Fore.YELLOW}[{self.identity.account}]{Style.RESET_ALL} UserId: {Fore.YELLOW}[{self.identity.user_id}]{Style.RESET_ALL} +Caller Identity ARN: {Fore.YELLOW}[{ self.identity.identity_arn}]{Style.RESET_ALL} +""" + # If -A is set, print Assumed Role ARN + if self.assumed_role.assumed_role_info.role_arn is not None: + report += f"""Assumed Role ARN: {Fore.YELLOW}[{self.assumed_role.assumed_role_info.role_arn}]{Style.RESET_ALL} + """ + print(report) + + def generate_regional_clients( + self, service: str, global_service: bool = False + ) -> dict: + try: + regional_clients = {} + service_regions = self.get_available_aws_service_regions(service) + # Check if it is global service to gather only one region + if global_service: + if service_regions: + if self.identity.profile_region in service_regions: + service_regions = [self.identity.profile_region] + service_regions = service_regions[:1] + for region in service_regions: + regional_client = self.session.session.client( + service, region_name=region, config=self.session.session_config + ) + regional_client.region = region + regional_clients[region] = regional_client + return regional_clients + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + + def get_available_aws_service_regions(self, service: str) -> list: + # Get json locally + actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__))) + with open_file(f"{actual_directory}/{aws_services_json_file}") as f: + data = parse_json_file(f) + # Check if it is a subservice + json_regions = data["services"][service]["regions"][self.identity.partition] + if ( + self.identity.audited_regions + ): # Check for input aws audit_info.audited_regions + regions = list( + set(json_regions).intersection(self.identity.audited_regions) + ) # Get common regions between input and json + else: # Get all regions from json of the service and partition + regions = json_regions + return regions + + def get_aws_available_regions(): + try: + actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__))) + with open_file(f"{actual_directory}/{aws_services_json_file}") as f: + data = parse_json_file(f) + + regions = set() + for service in data["services"].values(): + for partition in service["regions"]: + for item in service["regions"][partition]: + regions.add(item) + return list(regions) + except Exception as error: + logger.error(f"{error.__class__.__name__}: {error}") + return [] + + def get_checks_from_input_arn(audit_resources: list, provider: str) -> set: + """get_checks_from_input_arn gets the list of checks from the input arns""" + checks_from_arn = set() + is_subservice_in_checks = False + # Handle if there are audit resources so only their services are executed + if audit_resources: + services_without_subservices = ["guardduty", "kms", "s3", "elb", "efs"] + service_list = set() + sub_service_list = set() + for resource in audit_resources: + service = resource.split(":")[2] + sub_service = resource.split(":")[5].split("/")[0].replace("-", "_") + # WAF Services does not have checks + if service != "wafv2" and service != "waf": + # Parse services when they are different in the ARNs + if service == "lambda": + service = "awslambda" + elif service == "elasticloadbalancing": + service = "elb" + elif service == "elasticfilesystem": + service = "efs" + elif service == "logs": + service = "cloudwatch" + # Check if Prowler has checks in service + try: + list_modules(provider, service) + except ModuleNotFoundError: + # Service is not supported + pass + else: + service_list.add(service) + + # Get subservices to execute only applicable checks + if service not in services_without_subservices: + # Parse some specific subservices + if service == "ec2": + if sub_service == "security_group": + sub_service = "securitygroup" + if sub_service == "network_acl": + sub_service = "networkacl" + if sub_service == "image": + sub_service = "ami" + if service == "rds": + if sub_service == "cluster_snapshot": + sub_service = "snapshot" + sub_service_list.add(sub_service) + else: + sub_service_list.add(service) + checks = recover_checks_from_service(service_list, provider) + + # Filter only checks with audited subservices + for check in checks: + if any(sub_service in check for sub_service in sub_service_list): + if not (sub_service == "policy" and "password_policy" in check): + checks_from_arn.add(check) + is_subservice_in_checks = True + + if not is_subservice_in_checks: + checks_from_arn = checks + + # Return final checks list + return sorted(checks_from_arn) + + def get_regions_from_audit_resources(audit_resources: list) -> set: + """get_regions_from_audit_resources gets the regions from the audit resources arns""" + audited_regions = set() + for resource in audit_resources: + region = resource.split(":")[3] + if region: + audited_regions.add(region) + return audited_regions + + def get_tagged_resources(self, input_resource_tags: list): + """ + get_tagged_resources returns a list of the resources that are going to be scanned based on the given input tags + """ + try: + resource_tags = [] + tagged_resources = [] + for tag in input_resource_tags: + key = tag.split("=")[0] + value = tag.split("=")[1] + resource_tags.append({"Key": key, "Values": [value]}) + # Get Resources with resource_tags for all regions + for regional_client in self.generate_regional_clients( + "resourcegroupstaggingapi" + ).values(): + try: + get_resources_paginator = regional_client.get_paginator( + "get_resources" + ) + for page in get_resources_paginator.paginate( + TagFilters=resource_tags + ): + for resource in page["ResourceTagMappingList"]: + tagged_resources.append(resource["ResourceARN"]) + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + except Exception as error: + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) + sys.exit(1) + else: + return tagged_resources + + def get_default_region(self, service: str) -> str: + """get_default_region gets the default region based on the profile and audited service regions""" + service_regions = self.get_available_aws_service_regions(service) + default_region = ( + self.get_global_region() + ) # global region of the partition when all regions are audited and there is no profile region + if self.identity.profile_region in service_regions: + # return profile region only if it is audited + default_region = self.identity.profile_region + # return first audited region if specific regions are audited + elif self.identity.audited_regions: + default_region = self.identity.audited_regions[0] + return default_region + + def get_global_region(self) -> str: + """get_global_region gets the global region based on the audited partition""" + global_region = "us-east-1" + if self.identity.partition == "aws-cn": + global_region = "cn-north-1" + elif self.identity.partition == "aws-us-gov": + global_region = "us-gov-east-1" + elif "aws-iso" in self.identity.partition: + global_region = "aws-iso-global" + return global_region + + def __input_role_mfa_token_and_code__() -> tuple[str]: + """input_role_mfa_token_and_code ask for the AWS MFA ARN and TOTP and returns it.""" + mfa_ARN = input("Enter ARN of MFA: ") + mfa_TOTP = input("Enter MFA code: ") + return (mfa_ARN.strip(), mfa_TOTP.strip()) + + def __set_session_config__(self, aws_retries_max_attempts: bool): + session_config = Config( + retries={"max_attempts": 3, "mode": "standard"}, + user_agent_extra=BOTO3_USER_AGENT_EXTRA, + ) + if aws_retries_max_attempts: + # Create the new config + config = Config( + retries={ + "max_attempts": aws_retries_max_attempts, + "mode": "standard", + }, + ) + # Merge the new configuration + session_config = self.session.session_config.merge(config) + + return session_config + + def __assume_role__( + self, + session, + sts_endpoint_region: str, + ) -> dict: + try: + assume_role_arguments = { + "RoleArn": self.assumed_role.assumed_role_info.role_arn, + "RoleSessionName": "ProwlerAsessmentSession", + "DurationSeconds": self.assumed_role.assumed_role_info.session_duration, + } + + # Set the info to assume the role from the partition, account and role name + if self.assumed_role.assumed_role_info.external_id: + assume_role_arguments[ + "ExternalId" + ] = self.assumed_role.assumed_role_info.external_id + + if self.assumed_role.assumed_role_info.mfa_enabled: + mfa_ARN, mfa_TOTP = self.__input_role_mfa_token_and_code__() + assume_role_arguments["SerialNumber"] = mfa_ARN + assume_role_arguments["TokenCode"] = mfa_TOTP + + # Set the STS Endpoint Region + if sts_endpoint_region is None: + sts_endpoint_region = AWS_STS_GLOBAL_ENDPOINT_REGION + + sts_client = create_sts_session(session, sts_endpoint_region) + assumed_credentials = sts_client.assume_role(**assume_role_arguments) + except Exception as error: + logger.critical( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}" + ) + sys.exit(1) + + else: + return assumed_credentials diff --git a/prowler/providers/aws/lib/audit_info/audit_info.py b/prowler/providers/aws/lib/audit_info/audit_info.py index 908936c02b..2030c5bb56 100644 --- a/prowler/providers/aws/lib/audit_info/audit_info.py +++ b/prowler/providers/aws/lib/audit_info/audit_info.py @@ -2,7 +2,7 @@ from boto3 import session from botocore.config import Config from prowler.providers.aws.config import BOTO3_USER_AGENT_EXTRA -from prowler.providers.aws.lib.audit_info.models import AWS_Assume_Role, AWS_Audit_Info +from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info, AWSAssumeRole # Default Current Audit Info current_audit_info = AWS_Audit_Info( @@ -25,7 +25,7 @@ current_audit_info = AWS_Audit_Info( profile=None, profile_region=None, credentials=None, - assumed_role_info=AWS_Assume_Role( + assumed_role_info=AWSAssumeRole( role_arn=None, session_duration=None, external_id=None, diff --git a/prowler/providers/aws/lib/audit_info/models.py b/prowler/providers/aws/lib/audit_info/models.py index 838982e31e..20538a048f 100644 --- a/prowler/providers/aws/lib/audit_info/models.py +++ b/prowler/providers/aws/lib/audit_info/models.py @@ -7,7 +7,7 @@ from botocore.config import Config @dataclass -class AWS_Credentials: +class AWSCredentials: aws_access_key_id: str aws_session_token: str aws_secret_access_key: str @@ -15,7 +15,7 @@ class AWS_Credentials: @dataclass -class AWS_Assume_Role: +class AWSAssumeRole: role_arn: str session_duration: int external_id: str @@ -23,7 +23,7 @@ class AWS_Assume_Role: @dataclass -class AWS_Organizations_Info: +class AWSOrganizationsInfo: account_details_email: str account_details_name: str account_details_arn: str @@ -44,12 +44,12 @@ class AWS_Audit_Info: audited_partition: str profile: str profile_region: str - credentials: AWS_Credentials + credentials: AWSCredentials mfa_enabled: bool - assumed_role_info: AWS_Assume_Role + assumed_role_info: AWSAssumeRole audited_regions: list audit_resources: list - organizations_metadata: AWS_Organizations_Info - audit_metadata: Optional[Any] = None + organizations_metadata: AWSOrganizationsInfo + audit_metadata: Optional[Any] audit_config: Optional[dict] = None ignore_unused_services: bool = False diff --git a/prowler/providers/aws/lib/credentials/credentials.py b/prowler/providers/aws/lib/credentials/credentials.py index f0865d8f5d..431cf0b706 100644 --- a/prowler/providers/aws/lib/credentials/credentials.py +++ b/prowler/providers/aws/lib/credentials/credentials.py @@ -8,7 +8,7 @@ from prowler.providers.aws.config import AWS_STS_GLOBAL_ENDPOINT_REGION from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info -def validate_aws_credentials( +def validate_AWSCredentials( session: session, input_regions: list, sts_endpoint_region: str = None ) -> dict: try: @@ -38,7 +38,7 @@ def validate_aws_credentials( return caller_identity -def print_aws_credentials(audit_info: AWS_Audit_Info): +def print_AWSCredentials(audit_info: AWS_Audit_Info): # Beautify audited regions, set "all" if there is no filter region regions = ( ", ".join(audit_info.audited_regions) diff --git a/prowler/providers/aws/lib/organizations/organizations.py b/prowler/providers/aws/lib/organizations/organizations.py index fc5f13a7a8..130b7166b8 100644 --- a/prowler/providers/aws/lib/organizations/organizations.py +++ b/prowler/providers/aws/lib/organizations/organizations.py @@ -3,12 +3,12 @@ import sys from boto3 import client from prowler.lib.logger import logger -from prowler.providers.aws.lib.audit_info.models import AWS_Organizations_Info +from prowler.providers.aws.lib.audit_info.models import AWSOrganizationsInfo def get_organizations_metadata( metadata_account: str, assumed_credentials: dict -) -> AWS_Organizations_Info: +) -> AWSOrganizationsInfo: try: organizations_client = client( "organizations", @@ -30,7 +30,7 @@ def get_organizations_metadata( account_details_tags = "" for tag in list_tags_for_resource["Tags"]: account_details_tags += tag["Key"] + ":" + tag["Value"] + "," - organizations_info = AWS_Organizations_Info( + organizations_info = AWSOrganizationsInfo( account_details_email=organizations_metadata["Account"]["Email"], account_details_name=organizations_metadata["Account"]["Name"], account_details_arn=organizations_metadata["Account"]["Arn"], diff --git a/prowler/providers/aws/lib/service/service.py b/prowler/providers/aws/lib/service/service.py index 0badb1a88d..a3dd71f08e 100644 --- a/prowler/providers/aws/lib/service/service.py +++ b/prowler/providers/aws/lib/service/service.py @@ -1,10 +1,6 @@ import threading -from prowler.providers.aws.aws_provider import ( - generate_regional_clients, - get_default_region, -) -from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info +from prowler.providers.aws.aws_provider_new import AwsProvider class AWSService: @@ -15,18 +11,18 @@ class AWSService: - Also handles if the AWS Service is Global """ - def __init__(self, service: str, audit_info: AWS_Audit_Info, global_service=False): + def __init__(self, service: str, provider: AwsProvider, global_service=False): # Audit Information - self.audit_info = audit_info - self.audited_account = audit_info.audited_account - self.audited_account_arn = audit_info.audited_account_arn - self.audited_partition = audit_info.audited_partition - self.audit_resources = audit_info.audit_resources - self.audited_checks = audit_info.audit_metadata.expected_checks - self.audit_config = audit_info.audit_config + self.provider = provider + self.audited_account = provider.identity.account + self.audited_account_arn = provider.identity.account_arn + self.audited_partition = provider.identity.partition + self.audit_resources = provider.audit_resources + self.audited_checks = provider.audit_metadata.expected_checks + self.audit_config = provider.audit_config # AWS Session - self.session = audit_info.audit_session + self.session = provider.session.session # We receive the service using __class__.__name__ or the service name in lowercase # e.g.: AccessAnalyzer --> we need a lowercase string, so service.lower() @@ -34,14 +30,14 @@ class AWSService: # Generate Regional Clients if not global_service: - self.regional_clients = generate_regional_clients( - self.service, audit_info, global_service + self.regional_clients = provider.generate_regional_clients( + self.service, global_service ) # Get a single region and client if the service needs it (e.g. AWS Global Service) # We cannot include this within an else because some services needs both the regional_clients # and a single client like S3 - self.region = get_default_region(self.service, audit_info) + self.region = provider.get_default_region(self.service) self.client = self.session.client(self.service, self.region) def __get_session__(self): diff --git a/prowler/providers/aws/services/accessanalyzer/accessanalyzer_client.py b/prowler/providers/aws/services/accessanalyzer/accessanalyzer_client.py index ad5e94ddec..a60319cdbe 100644 --- a/prowler/providers/aws/services/accessanalyzer/accessanalyzer_client.py +++ b/prowler/providers/aws/services/accessanalyzer/accessanalyzer_client.py @@ -1,6 +1,6 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.accessanalyzer.accessanalyzer_service import ( AccessAnalyzer, ) +from prowler.providers.common.common import get_global_provider -accessanalyzer_client = AccessAnalyzer(current_audit_info) +accessanalyzer_client = AccessAnalyzer(get_global_provider()) diff --git a/prowler/providers/aws/services/accessanalyzer/accessanalyzer_service.py b/prowler/providers/aws/services/accessanalyzer/accessanalyzer_service.py index 64c818009b..54ee4abd00 100644 --- a/prowler/providers/aws/services/accessanalyzer/accessanalyzer_service.py +++ b/prowler/providers/aws/services/accessanalyzer/accessanalyzer_service.py @@ -10,9 +10,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## AccessAnalyzer class AccessAnalyzer(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.analyzers = [] self.__threading_call__(self.__list_analyzers__) self.__list_findings__() diff --git a/prowler/providers/aws/services/account/account_client.py b/prowler/providers/aws/services/account/account_client.py index 2bf199668d..d9639878e4 100644 --- a/prowler/providers/aws/services/account/account_client.py +++ b/prowler/providers/aws/services/account/account_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.account.account_service import Account +from prowler.providers.common.common import get_global_provider -account_client = Account(current_audit_info) +account_client = Account(get_global_provider()) diff --git a/prowler/providers/aws/services/account/account_service.py b/prowler/providers/aws/services/account/account_service.py index 16331fbe8a..c623bef658 100644 --- a/prowler/providers/aws/services/account/account_service.py +++ b/prowler/providers/aws/services/account/account_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService class Account(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.number_of_contacts = 4 self.contact_base = self.__get_contact_information__() self.contacts_billing = self.__get_alternate_contact__("BILLING") diff --git a/prowler/providers/aws/services/acm/acm_client.py b/prowler/providers/aws/services/acm/acm_client.py index 05a3b8bccb..61ddf2d75c 100644 --- a/prowler/providers/aws/services/acm/acm_client.py +++ b/prowler/providers/aws/services/acm/acm_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.acm.acm_service import ACM +from prowler.providers.common.common import get_global_provider -acm_client = ACM(current_audit_info) +acm_client = ACM(get_global_provider()) diff --git a/prowler/providers/aws/services/acm/acm_service.py b/prowler/providers/aws/services/acm/acm_service.py index 4f0e5de42e..8fd3196c75 100644 --- a/prowler/providers/aws/services/acm/acm_service.py +++ b/prowler/providers/aws/services/acm/acm_service.py @@ -10,9 +10,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## ACM class ACM(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.certificates = [] self.__threading_call__(self.__list_certificates__) self.__describe_certificates__() diff --git a/prowler/providers/aws/services/apigateway/apigateway_client.py b/prowler/providers/aws/services/apigateway/apigateway_client.py index aafa31cf7f..b36272fefd 100644 --- a/prowler/providers/aws/services/apigateway/apigateway_client.py +++ b/prowler/providers/aws/services/apigateway/apigateway_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.apigateway.apigateway_service import APIGateway +from prowler.providers.common.common import get_global_provider -apigateway_client = APIGateway(current_audit_info) +apigateway_client = APIGateway(get_global_provider()) diff --git a/prowler/providers/aws/services/apigateway/apigateway_service.py b/prowler/providers/aws/services/apigateway/apigateway_service.py index 41c14b4a40..73ea8cce0b 100644 --- a/prowler/providers/aws/services/apigateway/apigateway_service.py +++ b/prowler/providers/aws/services/apigateway/apigateway_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## APIGateway class APIGateway(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.rest_apis = [] self.__threading_call__(self.__get_rest_apis__) self.__get_authorizers__() diff --git a/prowler/providers/aws/services/apigatewayv2/apigatewayv2_client.py b/prowler/providers/aws/services/apigatewayv2/apigatewayv2_client.py index 3d88a315e5..187f6ce57f 100644 --- a/prowler/providers/aws/services/apigatewayv2/apigatewayv2_client.py +++ b/prowler/providers/aws/services/apigatewayv2/apigatewayv2_client.py @@ -1,6 +1,6 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.apigatewayv2.apigatewayv2_service import ( ApiGatewayV2, ) +from prowler.providers.common.common import get_global_provider -apigatewayv2_client = ApiGatewayV2(current_audit_info) +apigatewayv2_client = ApiGatewayV2(get_global_provider()) diff --git a/prowler/providers/aws/services/apigatewayv2/apigatewayv2_service.py b/prowler/providers/aws/services/apigatewayv2/apigatewayv2_service.py index 4b19d6e041..7c6faf196e 100644 --- a/prowler/providers/aws/services/apigatewayv2/apigatewayv2_service.py +++ b/prowler/providers/aws/services/apigatewayv2/apigatewayv2_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## ApiGatewayV2 class ApiGatewayV2(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.apis = [] self.__threading_call__(self.__get_apis__) self.__get_authorizers__() diff --git a/prowler/providers/aws/services/appstream/appstream_client.py b/prowler/providers/aws/services/appstream/appstream_client.py index 4f379f281a..e4f97b2333 100644 --- a/prowler/providers/aws/services/appstream/appstream_client.py +++ b/prowler/providers/aws/services/appstream/appstream_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.appstream.appstream_service import AppStream +from prowler.providers.common.common import get_global_provider -appstream_client = AppStream(current_audit_info) +appstream_client = AppStream(get_global_provider()) diff --git a/prowler/providers/aws/services/appstream/appstream_service.py b/prowler/providers/aws/services/appstream/appstream_service.py index 9bebb630c8..4761e70a2d 100644 --- a/prowler/providers/aws/services/appstream/appstream_service.py +++ b/prowler/providers/aws/services/appstream/appstream_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## AppStream class AppStream(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.fleets = [] self.__threading_call__(self.__describe_fleets__) self.__list_tags_for_resource__() diff --git a/prowler/providers/aws/services/athena/athena_client.py b/prowler/providers/aws/services/athena/athena_client.py index 23d3d4ad32..6de41a95b1 100644 --- a/prowler/providers/aws/services/athena/athena_client.py +++ b/prowler/providers/aws/services/athena/athena_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.athena.athena_service import Athena +from prowler.providers.common.common import get_global_provider -athena_client = Athena(current_audit_info) +athena_client = Athena(get_global_provider()) diff --git a/prowler/providers/aws/services/athena/athena_service.py b/prowler/providers/aws/services/athena/athena_service.py index ec91dc1b63..fa0a946bc6 100644 --- a/prowler/providers/aws/services/athena/athena_service.py +++ b/prowler/providers/aws/services/athena/athena_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## Athena class Athena(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.workgroups = {} self.__threading_call__(self.__list_workgroups__) self.__get_workgroups__() diff --git a/prowler/providers/aws/services/athena/athena_workgroup_encryption/athena_workgroup_encryption.py b/prowler/providers/aws/services/athena/athena_workgroup_encryption/athena_workgroup_encryption.py index 8b67d2d6fc..d9d498c45f 100644 --- a/prowler/providers/aws/services/athena/athena_workgroup_encryption/athena_workgroup_encryption.py +++ b/prowler/providers/aws/services/athena/athena_workgroup_encryption/athena_workgroup_encryption.py @@ -12,7 +12,7 @@ class athena_workgroup_encryption(Check): # Only check for enabled and used workgroups (has recent queries) if ( workgroup.state == "ENABLED" and workgroup.queries - ) or not athena_client.audit_info.ignore_unused_services: + ) or not athena_client.provider.ignore_unused_services: report = Check_Report_AWS(self.metadata()) report.region = workgroup.region report.resource_id = workgroup.name diff --git a/prowler/providers/aws/services/athena/athena_workgroup_enforce_configuration/athena_workgroup_enforce_configuration.py b/prowler/providers/aws/services/athena/athena_workgroup_enforce_configuration/athena_workgroup_enforce_configuration.py index 3fa94a5797..d0043edfc2 100644 --- a/prowler/providers/aws/services/athena/athena_workgroup_enforce_configuration/athena_workgroup_enforce_configuration.py +++ b/prowler/providers/aws/services/athena/athena_workgroup_enforce_configuration/athena_workgroup_enforce_configuration.py @@ -12,7 +12,7 @@ class athena_workgroup_enforce_configuration(Check): # Only check for enabled and used workgroups (has recent queries) if ( workgroup.state == "ENABLED" and workgroup.queries - ) or not athena_client.audit_info.ignore_unused_services: + ) or not athena_client.provider.ignore_unused_services: report = Check_Report_AWS(self.metadata()) report.region = workgroup.region report.resource_id = workgroup.name diff --git a/prowler/providers/aws/services/autoscaling/autoscaling_client.py b/prowler/providers/aws/services/autoscaling/autoscaling_client.py index 9f4d933f62..c267dfdf7f 100644 --- a/prowler/providers/aws/services/autoscaling/autoscaling_client.py +++ b/prowler/providers/aws/services/autoscaling/autoscaling_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.autoscaling.autoscaling_service import AutoScaling +from prowler.providers.common.common import get_global_provider -autoscaling_client = AutoScaling(current_audit_info) +autoscaling_client = AutoScaling(get_global_provider()) diff --git a/prowler/providers/aws/services/autoscaling/autoscaling_service.py b/prowler/providers/aws/services/autoscaling/autoscaling_service.py index 8f37bb5d99..221477f520 100644 --- a/prowler/providers/aws/services/autoscaling/autoscaling_service.py +++ b/prowler/providers/aws/services/autoscaling/autoscaling_service.py @@ -7,9 +7,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## AutoScaling class AutoScaling(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.launch_configurations = [] self.__threading_call__(self.__describe_launch_configurations__) self.groups = [] diff --git a/prowler/providers/aws/services/awslambda/awslambda_client.py b/prowler/providers/aws/services/awslambda/awslambda_client.py index 355bda1fc2..64f8779f4e 100644 --- a/prowler/providers/aws/services/awslambda/awslambda_client.py +++ b/prowler/providers/aws/services/awslambda/awslambda_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.awslambda.awslambda_service import Lambda +from prowler.providers.common.common import get_global_provider -awslambda_client = Lambda(current_audit_info) +awslambda_client = Lambda(get_global_provider()) diff --git a/prowler/providers/aws/services/awslambda/awslambda_service.py b/prowler/providers/aws/services/awslambda/awslambda_service.py index 3b9d307e1c..78faad6066 100644 --- a/prowler/providers/aws/services/awslambda/awslambda_service.py +++ b/prowler/providers/aws/services/awslambda/awslambda_service.py @@ -15,9 +15,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## Lambda class Lambda(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.functions = {} self.__threading_call__(self.__list_functions__) self.__list_tags_for_resource__() @@ -26,7 +26,7 @@ class Lambda(AWSService): # awslambda_function_no_secrets_in_code check is set if ( "awslambda_function_no_secrets_in_code" - in audit_info.audit_metadata.expected_checks + in provider.audit_metadata.expected_checks ): self.__threading_call__(self.__get_function__) diff --git a/prowler/providers/aws/services/backup/backup_client.py b/prowler/providers/aws/services/backup/backup_client.py index db6fdfe67e..371dbb4591 100644 --- a/prowler/providers/aws/services/backup/backup_client.py +++ b/prowler/providers/aws/services/backup/backup_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.backup.backup_service import Backup +from prowler.providers.common.common import get_global_provider -backup_client = Backup(current_audit_info) +backup_client = Backup(get_global_provider()) diff --git a/prowler/providers/aws/services/backup/backup_service.py b/prowler/providers/aws/services/backup/backup_service.py index 8e77349af1..fec72fc3ca 100644 --- a/prowler/providers/aws/services/backup/backup_service.py +++ b/prowler/providers/aws/services/backup/backup_service.py @@ -10,9 +10,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## Backup class Backup(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.backup_vaults = [] self.__threading_call__(self.__list_backup_vaults__) self.backup_plans = [] diff --git a/prowler/providers/aws/services/cloudformation/cloudformation_client.py b/prowler/providers/aws/services/cloudformation/cloudformation_client.py index 47461271ad..b48434a7a4 100644 --- a/prowler/providers/aws/services/cloudformation/cloudformation_client.py +++ b/prowler/providers/aws/services/cloudformation/cloudformation_client.py @@ -1,6 +1,6 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.cloudformation.cloudformation_service import ( CloudFormation, ) +from prowler.providers.common.common import get_global_provider -cloudformation_client = CloudFormation(current_audit_info) +cloudformation_client = CloudFormation(get_global_provider()) diff --git a/prowler/providers/aws/services/cloudformation/cloudformation_service.py b/prowler/providers/aws/services/cloudformation/cloudformation_service.py index f994db9065..76f76a2205 100644 --- a/prowler/providers/aws/services/cloudformation/cloudformation_service.py +++ b/prowler/providers/aws/services/cloudformation/cloudformation_service.py @@ -10,9 +10,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## CloudFormation class CloudFormation(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.stacks = [] self.__threading_call__(self.__describe_stacks__) self.__describe_stack__() diff --git a/prowler/providers/aws/services/cloudfront/cloudfront_client.py b/prowler/providers/aws/services/cloudfront/cloudfront_client.py index d33af5dfdf..1a2430876e 100644 --- a/prowler/providers/aws/services/cloudfront/cloudfront_client.py +++ b/prowler/providers/aws/services/cloudfront/cloudfront_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.cloudfront.cloudfront_service import CloudFront +from prowler.providers.common.common import get_global_provider -cloudfront_client = CloudFront(current_audit_info) +cloudfront_client = CloudFront(get_global_provider()) diff --git a/prowler/providers/aws/services/cloudfront/cloudfront_service.py b/prowler/providers/aws/services/cloudfront/cloudfront_service.py index bbab537925..05e60138d8 100644 --- a/prowler/providers/aws/services/cloudfront/cloudfront_service.py +++ b/prowler/providers/aws/services/cloudfront/cloudfront_service.py @@ -10,9 +10,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## CloudFront class CloudFront(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info, global_service=True) + super().__init__(__class__.__name__, provider, global_service=True) self.distributions = {} self.__list_distributions__(self.client, self.region) self.__get_distribution_config__(self.client, self.distributions, self.region) diff --git a/prowler/providers/aws/services/cloudtrail/cloudtrail_client.py b/prowler/providers/aws/services/cloudtrail/cloudtrail_client.py index 0c22cb6ba8..8c969b8664 100644 --- a/prowler/providers/aws/services/cloudtrail/cloudtrail_client.py +++ b/prowler/providers/aws/services/cloudtrail/cloudtrail_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.cloudtrail.cloudtrail_service import Cloudtrail +from prowler.providers.common.common import get_global_provider -cloudtrail_client = Cloudtrail(current_audit_info) +cloudtrail_client = Cloudtrail(get_global_provider()) diff --git a/prowler/providers/aws/services/cloudtrail/cloudtrail_s3_dataevents_read_enabled/cloudtrail_s3_dataevents_read_enabled.py b/prowler/providers/aws/services/cloudtrail/cloudtrail_s3_dataevents_read_enabled/cloudtrail_s3_dataevents_read_enabled.py index 6b63bd496c..e37d038c4b 100644 --- a/prowler/providers/aws/services/cloudtrail/cloudtrail_s3_dataevents_read_enabled/cloudtrail_s3_dataevents_read_enabled.py +++ b/prowler/providers/aws/services/cloudtrail/cloudtrail_s3_dataevents_read_enabled/cloudtrail_s3_dataevents_read_enabled.py @@ -50,7 +50,7 @@ class cloudtrail_s3_dataevents_read_enabled(Check): report.status_extended = f"Trail {trail.name} from home region {trail.home_region} has an advanced data event selector to record all S3 object-level API operations." findings.append(report) if not findings and ( - s3_client.buckets or not cloudtrail_client.audit_info.ignore_unused_services + s3_client.buckets or not cloudtrail_client.provider.ignore_unused_services ): report = Check_Report_AWS(self.metadata()) report.region = cloudtrail_client.region diff --git a/prowler/providers/aws/services/cloudtrail/cloudtrail_s3_dataevents_write_enabled/cloudtrail_s3_dataevents_write_enabled.py b/prowler/providers/aws/services/cloudtrail/cloudtrail_s3_dataevents_write_enabled/cloudtrail_s3_dataevents_write_enabled.py index 425f78f86e..58d6313414 100644 --- a/prowler/providers/aws/services/cloudtrail/cloudtrail_s3_dataevents_write_enabled/cloudtrail_s3_dataevents_write_enabled.py +++ b/prowler/providers/aws/services/cloudtrail/cloudtrail_s3_dataevents_write_enabled/cloudtrail_s3_dataevents_write_enabled.py @@ -50,7 +50,7 @@ class cloudtrail_s3_dataevents_write_enabled(Check): report.status_extended = f"Trail {trail.name} from home region {trail.home_region} has an advanced data event selector to record all S3 object-level API operations." findings.append(report) if not findings and ( - s3_client.buckets or not cloudtrail_client.audit_info.ignore_unused_services + s3_client.buckets or not cloudtrail_client.provider.ignore_unused_services ): report = Check_Report_AWS(self.metadata()) report.region = cloudtrail_client.region diff --git a/prowler/providers/aws/services/cloudtrail/cloudtrail_service.py b/prowler/providers/aws/services/cloudtrail/cloudtrail_service.py index 2e6bc0288c..06de17737b 100644 --- a/prowler/providers/aws/services/cloudtrail/cloudtrail_service.py +++ b/prowler/providers/aws/services/cloudtrail/cloudtrail_service.py @@ -11,9 +11,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################### CLOUDTRAIL class Cloudtrail(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.trails = [] self.__threading_call__(self.__get_trails__) self.__get_trail_status__() diff --git a/prowler/providers/aws/services/cloudwatch/cloudwatch_client.py b/prowler/providers/aws/services/cloudwatch/cloudwatch_client.py index 33a8c85e66..d5ca262147 100644 --- a/prowler/providers/aws/services/cloudwatch/cloudwatch_client.py +++ b/prowler/providers/aws/services/cloudwatch/cloudwatch_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.cloudwatch.cloudwatch_service import CloudWatch +from prowler.providers.common.common import get_global_provider -cloudwatch_client = CloudWatch(current_audit_info) +cloudwatch_client = CloudWatch(get_global_provider()) diff --git a/prowler/providers/aws/services/cloudwatch/cloudwatch_service.py b/prowler/providers/aws/services/cloudwatch/cloudwatch_service.py index f213c81646..c765c8e0bb 100644 --- a/prowler/providers/aws/services/cloudwatch/cloudwatch_service.py +++ b/prowler/providers/aws/services/cloudwatch/cloudwatch_service.py @@ -11,9 +11,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## CloudWatch class CloudWatch(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.metric_alarms = [] self.__threading_call__(self.__describe_alarms__) self.__list_tags_for_resource__() @@ -64,16 +64,16 @@ class CloudWatch(AWSService): ################## CloudWatch Logs class Logs(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.metric_filters = [] self.log_groups = [] self.__threading_call__(self.__describe_metric_filters__) self.__threading_call__(self.__describe_log_groups__) if ( "cloudwatch_log_group_no_secrets_in_logs" - in audit_info.audit_metadata.expected_checks + in provider.audit_metadata.expected_checks ): self.events_per_log_group_threshold = ( 1000 # The threshold for number of events to return per log group. diff --git a/prowler/providers/aws/services/cloudwatch/logs_client.py b/prowler/providers/aws/services/cloudwatch/logs_client.py index a937c9036d..a2901b8862 100644 --- a/prowler/providers/aws/services/cloudwatch/logs_client.py +++ b/prowler/providers/aws/services/cloudwatch/logs_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.cloudwatch.cloudwatch_service import Logs +from prowler.providers.common.common import get_global_provider -logs_client = Logs(current_audit_info) +logs_client = Logs(get_global_provider()) diff --git a/prowler/providers/aws/services/codeartifact/codeartifact_client.py b/prowler/providers/aws/services/codeartifact/codeartifact_client.py index 3c22918d0f..59cbdc0c5d 100644 --- a/prowler/providers/aws/services/codeartifact/codeartifact_client.py +++ b/prowler/providers/aws/services/codeartifact/codeartifact_client.py @@ -1,6 +1,6 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.codeartifact.codeartifact_service import ( CodeArtifact, ) +from prowler.providers.common.common import get_global_provider -codeartifact_client = CodeArtifact(current_audit_info) +codeartifact_client = CodeArtifact(get_global_provider()) diff --git a/prowler/providers/aws/services/codeartifact/codeartifact_service.py b/prowler/providers/aws/services/codeartifact/codeartifact_service.py index d0d6a65701..039faa804a 100644 --- a/prowler/providers/aws/services/codeartifact/codeartifact_service.py +++ b/prowler/providers/aws/services/codeartifact/codeartifact_service.py @@ -11,9 +11,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## CodeArtifact class CodeArtifact(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) # repositories is a dictionary containing all the codeartifact service information self.repositories = {} self.__threading_call__(self.__list_repositories__) diff --git a/prowler/providers/aws/services/codebuild/codebuild_client.py b/prowler/providers/aws/services/codebuild/codebuild_client.py index bfc1138491..39621c1ac4 100644 --- a/prowler/providers/aws/services/codebuild/codebuild_client.py +++ b/prowler/providers/aws/services/codebuild/codebuild_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.codebuild.codebuild_service import Codebuild +from prowler.providers.common.common import get_global_provider -codebuild_client = Codebuild(current_audit_info) +codebuild_client = Codebuild(get_global_provider()) diff --git a/prowler/providers/aws/services/codebuild/codebuild_service.py b/prowler/providers/aws/services/codebuild/codebuild_service.py index 9b0ab4191b..ff65734eb9 100644 --- a/prowler/providers/aws/services/codebuild/codebuild_service.py +++ b/prowler/providers/aws/services/codebuild/codebuild_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################### Codebuild class Codebuild(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.projects = [] self.__threading_call__(self.__list_projects__) self.__list_builds_for_project__() diff --git a/prowler/providers/aws/services/config/config_client.py b/prowler/providers/aws/services/config/config_client.py index 05f0fc02f2..30ccfa89f9 100644 --- a/prowler/providers/aws/services/config/config_client.py +++ b/prowler/providers/aws/services/config/config_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.config.config_service import Config +from prowler.providers.common.common import get_global_provider -config_client = Config(current_audit_info) +config_client = Config(get_global_provider()) diff --git a/prowler/providers/aws/services/config/config_service.py b/prowler/providers/aws/services/config/config_service.py index 5e6df4c030..49b742153a 100644 --- a/prowler/providers/aws/services/config/config_service.py +++ b/prowler/providers/aws/services/config/config_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## Config class Config(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.recorders = [] self.__threading_call__(self.__describe_configuration_recorder_status__) diff --git a/prowler/providers/aws/services/directoryservice/directoryservice_client.py b/prowler/providers/aws/services/directoryservice/directoryservice_client.py index 42c42b6316..b522373a71 100644 --- a/prowler/providers/aws/services/directoryservice/directoryservice_client.py +++ b/prowler/providers/aws/services/directoryservice/directoryservice_client.py @@ -1,6 +1,6 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.directoryservice.directoryservice_service import ( DirectoryService, ) +from prowler.providers.common.common import get_global_provider -directoryservice_client = DirectoryService(current_audit_info) +directoryservice_client = DirectoryService(get_global_provider()) diff --git a/prowler/providers/aws/services/directoryservice/directoryservice_service.py b/prowler/providers/aws/services/directoryservice/directoryservice_service.py index 116619b135..9e107092ba 100644 --- a/prowler/providers/aws/services/directoryservice/directoryservice_service.py +++ b/prowler/providers/aws/services/directoryservice/directoryservice_service.py @@ -12,9 +12,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## DirectoryService class DirectoryService(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__("ds", audit_info) + super().__init__("ds", provider) self.directories = {} self.__threading_call__(self.__describe_directories__) self.__threading_call__(self.__list_log_subscriptions__) diff --git a/prowler/providers/aws/services/dlm/dlm_client.py b/prowler/providers/aws/services/dlm/dlm_client.py index 877b198912..553cfb5e14 100644 --- a/prowler/providers/aws/services/dlm/dlm_client.py +++ b/prowler/providers/aws/services/dlm/dlm_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.dlm.dlm_service import DLM +from prowler.providers.common.common import get_global_provider -dlm_client = DLM(current_audit_info) +dlm_client = DLM(get_global_provider()) diff --git a/prowler/providers/aws/services/dlm/dlm_service.py b/prowler/providers/aws/services/dlm/dlm_service.py index 1c620a4405..4f176eafaa 100644 --- a/prowler/providers/aws/services/dlm/dlm_service.py +++ b/prowler/providers/aws/services/dlm/dlm_service.py @@ -6,9 +6,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## Data Lifecycle Manager class DLM(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.lifecycle_policies = {} self.__threading_call__(self.__get_lifecycle_policies__) diff --git a/prowler/providers/aws/services/documentdb/documentdb_client.py b/prowler/providers/aws/services/documentdb/documentdb_client.py index b1477ae0e8..6998b857a2 100644 --- a/prowler/providers/aws/services/documentdb/documentdb_client.py +++ b/prowler/providers/aws/services/documentdb/documentdb_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.documentdb.documentdb_service import DocumentDB +from prowler.providers.common.common import get_global_provider -documentdb_client = DocumentDB(current_audit_info) +documentdb_client = DocumentDB(get_global_provider()) diff --git a/prowler/providers/aws/services/documentdb/documentdb_service.py b/prowler/providers/aws/services/documentdb/documentdb_service.py index 9560c2face..bc004b207a 100644 --- a/prowler/providers/aws/services/documentdb/documentdb_service.py +++ b/prowler/providers/aws/services/documentdb/documentdb_service.py @@ -9,10 +9,10 @@ from prowler.providers.aws.lib.service.service import AWSService ################## DocumentDB class DocumentDB(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ self.service_name = "docdb" - super().__init__(self.service_name, audit_info) + super().__init__(self.service_name, provider) self.db_instances = {} self.__threading_call__(self.__describe_db_instances__) self.__list_tags_for_resource__() diff --git a/prowler/providers/aws/services/drs/drs_client.py b/prowler/providers/aws/services/drs/drs_client.py index 0ab6efb356..8e0132467c 100644 --- a/prowler/providers/aws/services/drs/drs_client.py +++ b/prowler/providers/aws/services/drs/drs_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.drs.drs_service import DRS +from prowler.providers.common.common import get_global_provider -drs_client = DRS(current_audit_info) +drs_client = DRS(get_global_provider()) diff --git a/prowler/providers/aws/services/drs/drs_service.py b/prowler/providers/aws/services/drs/drs_service.py index 3fe4ad1b33..76f4818e47 100644 --- a/prowler/providers/aws/services/drs/drs_service.py +++ b/prowler/providers/aws/services/drs/drs_service.py @@ -8,9 +8,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## DRS (Elastic Disaster Recovery Service) class DRS(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.drs_services = [] self.__threading_call__(self.__describe_jobs__) diff --git a/prowler/providers/aws/services/dynamodb/dax_client.py b/prowler/providers/aws/services/dynamodb/dax_client.py index ff3927ab92..afe3dea814 100644 --- a/prowler/providers/aws/services/dynamodb/dax_client.py +++ b/prowler/providers/aws/services/dynamodb/dax_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.dynamodb.dynamodb_service import DAX +from prowler.providers.common.common import get_global_provider -dax_client = DAX(current_audit_info) +dax_client = DAX(get_global_provider()) diff --git a/prowler/providers/aws/services/dynamodb/dynamodb_client.py b/prowler/providers/aws/services/dynamodb/dynamodb_client.py index a79f6ba558..639fba991f 100644 --- a/prowler/providers/aws/services/dynamodb/dynamodb_client.py +++ b/prowler/providers/aws/services/dynamodb/dynamodb_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.dynamodb.dynamodb_service import DynamoDB +from prowler.providers.common.common import get_global_provider -dynamodb_client = DynamoDB(current_audit_info) +dynamodb_client = DynamoDB(get_global_provider()) diff --git a/prowler/providers/aws/services/dynamodb/dynamodb_service.py b/prowler/providers/aws/services/dynamodb/dynamodb_service.py index ef2349a4e9..626a916c61 100644 --- a/prowler/providers/aws/services/dynamodb/dynamodb_service.py +++ b/prowler/providers/aws/services/dynamodb/dynamodb_service.py @@ -10,9 +10,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## DynamoDB class DynamoDB(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.tables = [] self.__threading_call__(self.__list_tables__) self.__describe_table__() @@ -121,9 +121,9 @@ class DynamoDB(AWSService): ################## DynamoDB DAX class DAX(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.clusters = [] self.__threading_call__(self.__describe_clusters__) self.__list_tags_for_resource__() diff --git a/prowler/providers/aws/services/ec2/ec2_client.py b/prowler/providers/aws/services/ec2/ec2_client.py index 2541c7e614..4eada699aa 100644 --- a/prowler/providers/aws/services/ec2/ec2_client.py +++ b/prowler/providers/aws/services/ec2/ec2_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.ec2.ec2_service import EC2 +from prowler.providers.common.common import get_global_provider -ec2_client = EC2(current_audit_info) +ec2_client = EC2(get_global_provider()) diff --git a/prowler/providers/aws/services/ec2/ec2_ebs_default_encryption/ec2_ebs_default_encryption.py b/prowler/providers/aws/services/ec2/ec2_ebs_default_encryption/ec2_ebs_default_encryption.py index a208d1ffa4..e81a87088f 100644 --- a/prowler/providers/aws/services/ec2/ec2_ebs_default_encryption/ec2_ebs_default_encryption.py +++ b/prowler/providers/aws/services/ec2/ec2_ebs_default_encryption/ec2_ebs_default_encryption.py @@ -15,8 +15,7 @@ class ec2_ebs_default_encryption(Check): report.status_extended = "EBS Default Encryption is activated." findings.append(report) elif ( - not ec2_client.audit_info.ignore_unused_services - or ebs_encryption.volumes + not ec2_client.provider.ignore_unused_services or ebs_encryption.volumes ): report.status = "FAIL" report.status_extended = "EBS Default Encryption is not activated." diff --git a/prowler/providers/aws/services/ec2/ec2_networkacl_allow_ingress_any_port/ec2_networkacl_allow_ingress_any_port.py b/prowler/providers/aws/services/ec2/ec2_networkacl_allow_ingress_any_port/ec2_networkacl_allow_ingress_any_port.py index 224b1e8d8e..55c4c796ef 100644 --- a/prowler/providers/aws/services/ec2/ec2_networkacl_allow_ingress_any_port/ec2_networkacl_allow_ingress_any_port.py +++ b/prowler/providers/aws/services/ec2/ec2_networkacl_allow_ingress_any_port/ec2_networkacl_allow_ingress_any_port.py @@ -10,7 +10,7 @@ class ec2_networkacl_allow_ingress_any_port(Check): check_port = 0 for network_acl in ec2_client.network_acls: if ( - not ec2_client.audit_info.ignore_unused_services + not ec2_client.provider.ignore_unused_services or network_acl.region in ec2_client.regions_with_sgs ): # If some entry allows it, that ACL is not securely configured diff --git a/prowler/providers/aws/services/ec2/ec2_networkacl_allow_ingress_tcp_port_22/ec2_networkacl_allow_ingress_tcp_port_22.py b/prowler/providers/aws/services/ec2/ec2_networkacl_allow_ingress_tcp_port_22/ec2_networkacl_allow_ingress_tcp_port_22.py index 272b07deea..f8b8526632 100644 --- a/prowler/providers/aws/services/ec2/ec2_networkacl_allow_ingress_tcp_port_22/ec2_networkacl_allow_ingress_tcp_port_22.py +++ b/prowler/providers/aws/services/ec2/ec2_networkacl_allow_ingress_tcp_port_22/ec2_networkacl_allow_ingress_tcp_port_22.py @@ -10,7 +10,7 @@ class ec2_networkacl_allow_ingress_tcp_port_22(Check): check_port = 22 for network_acl in ec2_client.network_acls: if ( - not ec2_client.audit_info.ignore_unused_services + not ec2_client.provider.ignore_unused_services or network_acl.region in ec2_client.regions_with_sgs ): # If some entry allows it, that ACL is not securely configured diff --git a/prowler/providers/aws/services/ec2/ec2_networkacl_allow_ingress_tcp_port_3389/ec2_networkacl_allow_ingress_tcp_port_3389.py b/prowler/providers/aws/services/ec2/ec2_networkacl_allow_ingress_tcp_port_3389/ec2_networkacl_allow_ingress_tcp_port_3389.py index 148e3c89e7..42b873b62f 100644 --- a/prowler/providers/aws/services/ec2/ec2_networkacl_allow_ingress_tcp_port_3389/ec2_networkacl_allow_ingress_tcp_port_3389.py +++ b/prowler/providers/aws/services/ec2/ec2_networkacl_allow_ingress_tcp_port_3389/ec2_networkacl_allow_ingress_tcp_port_3389.py @@ -10,7 +10,7 @@ class ec2_networkacl_allow_ingress_tcp_port_3389(Check): check_port = 3389 for network_acl in ec2_client.network_acls: if ( - not ec2_client.audit_info.ignore_unused_services + not ec2_client.provider.ignore_unused_services or network_acl.region in ec2_client.regions_with_sgs ): # If some entry allows it, that ACL is not securely configured diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_any_port/ec2_securitygroup_allow_ingress_from_internet_to_any_port.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_any_port/ec2_securitygroup_allow_ingress_from_internet_to_any_port.py index 309caef361..88c1f770c0 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_any_port/ec2_securitygroup_allow_ingress_from_internet_to_any_port.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_any_port/ec2_securitygroup_allow_ingress_from_internet_to_any_port.py @@ -8,7 +8,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_any_port(Check): findings = [] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018/ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018/ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018.py index 160a6eaaf5..f07d561db2 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018/ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018/ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018.py @@ -10,7 +10,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018( check_ports = [27017, 27018] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21/ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21/ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21.py index 80c2677beb..531cd0e6da 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21/ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21/ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21.py @@ -10,7 +10,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21(Check) check_ports = [20, 21] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22.py index 0a2ecc35e5..98e4707db6 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22.py @@ -10,7 +10,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22(Check): check_ports = [22] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389.py index b9ae1fa267..4ba73e2ec8 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389.py @@ -10,7 +10,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_3389(Check): check_ports = [3389] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888.py index 67b6d10ff9..3aa513d1a5 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9160_8888.py @@ -12,7 +12,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_cassandra_7199_9 check_ports = [7199, 9160, 8888] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_kibana_9200_9300_5601/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_kibana_9200_9300_5601.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_kibana_9200_9300_5601/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_kibana_9200_9300_5601.py index f726df6a41..a4b105d9e5 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_kibana_9200_9300_5601/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_kibana_9200_9300_5601.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_kibana_9200_9300_5601/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_kibana_9200_9300_5601.py @@ -12,7 +12,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_elasticsearch_ki check_ports = [9200, 9300, 5601] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_kafka_9092/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_kafka_9092.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_kafka_9092/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_kafka_9092.py index 7ce02df92d..d2e8632354 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_kafka_9092/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_kafka_9092.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_kafka_9092/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_kafka_9092.py @@ -10,7 +10,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_kafka_9092(Check check_ports = [9092] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211.py index 6266d2d2ff..215eb0c3b6 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211.py @@ -10,7 +10,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_memcached_11211( check_ports = [11211] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306.py index eaca5d6af4..6678aa04e4 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306.py @@ -10,7 +10,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_mysql_3306(Check check_ports = [3306] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483.py index f51709a073..c9da15e8c6 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483.py @@ -10,7 +10,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_oracle_1521_2483 check_ports = [1521, 2483] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_postgres_5432/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_postgres_5432.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_postgres_5432/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_postgres_5432.py index b1043d8b83..c9a6786865 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_postgres_5432/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_postgres_5432.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_postgres_5432/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_postgres_5432.py @@ -10,7 +10,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_postgres_5432(Ch check_ports = [5432] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_redis_6379/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_redis_6379.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_redis_6379/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_redis_6379.py index e9c7a3f1ab..6a5701b6d1 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_redis_6379/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_redis_6379.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_redis_6379/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_redis_6379.py @@ -10,7 +10,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_redis_6379(Check check_ports = [6379] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_sql_server_1433_1434/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_sql_server_1433_1434.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_sql_server_1433_1434/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_sql_server_1433_1434.py index 564af09055..18cab0fb9e 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_sql_server_1433_1434/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_sql_server_1433_1434.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_sql_server_1433_1434/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_sql_server_1433_1434.py @@ -12,7 +12,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_sql_server_1433_ check_ports = [1433, 1434] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23.py index e1fabd4508..e8b173fbfd 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23/ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23.py @@ -10,7 +10,7 @@ class ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_telnet_23(Check) check_ports = [23] for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_wide_open_public_ipv4/ec2_securitygroup_allow_wide_open_public_ipv4.py b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_wide_open_public_ipv4/ec2_securitygroup_allow_wide_open_public_ipv4.py index 625da3a758..4a90f4e95b 100644 --- a/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_wide_open_public_ipv4/ec2_securitygroup_allow_wide_open_public_ipv4.py +++ b/prowler/providers/aws/services/ec2/ec2_securitygroup_allow_wide_open_public_ipv4/ec2_securitygroup_allow_wide_open_public_ipv4.py @@ -11,7 +11,7 @@ class ec2_securitygroup_allow_wide_open_public_ipv4(Check): cidr_treshold = 24 for security_group in ec2_client.security_groups: # Check if ignoring flag is set and if the VPC and the SG is in use - if not ec2_client.audit_info.ignore_unused_services or ( + if not ec2_client.provider.ignore_unused_services or ( security_group.vpc_id in vpc_client.vpcs and vpc_client.vpcs[security_group.vpc_id].in_use and len(security_group.network_interfaces) > 0 diff --git a/prowler/providers/aws/services/ec2/ec2_service.py b/prowler/providers/aws/services/ec2/ec2_service.py index 4838f04115..b08ca8aa3c 100644 --- a/prowler/providers/aws/services/ec2/ec2_service.py +++ b/prowler/providers/aws/services/ec2/ec2_service.py @@ -12,9 +12,9 @@ from prowler.providers.aws.services.ec2.lib.security_groups import check_securit ################## EC2 class EC2(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.instances = [] self.__threading_call__(self.__describe_instances__) self.__get_instance_user_data__() diff --git a/prowler/providers/aws/services/ecr/ecr_client.py b/prowler/providers/aws/services/ecr/ecr_client.py index d9775a9c36..8314806474 100644 --- a/prowler/providers/aws/services/ecr/ecr_client.py +++ b/prowler/providers/aws/services/ecr/ecr_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.ecr.ecr_service import ECR +from prowler.providers.common.common import get_global_provider -ecr_client = ECR(current_audit_info) +ecr_client = ECR(get_global_provider()) diff --git a/prowler/providers/aws/services/ecr/ecr_service.py b/prowler/providers/aws/services/ecr/ecr_service.py index 00bd72bdcb..c0d5f3d7cb 100644 --- a/prowler/providers/aws/services/ecr/ecr_service.py +++ b/prowler/providers/aws/services/ecr/ecr_service.py @@ -12,10 +12,10 @@ from prowler.providers.aws.lib.service.service import AWSService ################################ ECR class ECR(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) - self.registry_id = audit_info.audited_account + super().__init__(__class__.__name__, provider) + self.registry_id = self.audited_account self.registries = {} self.__threading_call__(self.__describe_registries_and_repositories__) self.__threading_call__(self.__describe_repository_policies__) diff --git a/prowler/providers/aws/services/ecs/ecs_client.py b/prowler/providers/aws/services/ecs/ecs_client.py index 5f9d59c1be..f0bc85b6cb 100644 --- a/prowler/providers/aws/services/ecs/ecs_client.py +++ b/prowler/providers/aws/services/ecs/ecs_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.ecs.ecs_service import ECS +from prowler.providers.common.common import get_global_provider -ecs_client = ECS(current_audit_info) +ecs_client = ECS(get_global_provider()) diff --git a/prowler/providers/aws/services/ecs/ecs_service.py b/prowler/providers/aws/services/ecs/ecs_service.py index 1dad47524c..ed222e04f1 100644 --- a/prowler/providers/aws/services/ecs/ecs_service.py +++ b/prowler/providers/aws/services/ecs/ecs_service.py @@ -10,9 +10,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################################ ECS class ECS(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.task_definitions = [] self.__threading_call__(self.__list_task_definitions__) self.__describe_task_definition__() diff --git a/prowler/providers/aws/services/efs/efs_client.py b/prowler/providers/aws/services/efs/efs_client.py index 00e71cdc8f..30d4f6ddb1 100644 --- a/prowler/providers/aws/services/efs/efs_client.py +++ b/prowler/providers/aws/services/efs/efs_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.efs.efs_service import EFS +from prowler.providers.common.common import get_global_provider -efs_client = EFS(current_audit_info) +efs_client = EFS(get_global_provider()) diff --git a/prowler/providers/aws/services/efs/efs_service.py b/prowler/providers/aws/services/efs/efs_service.py index f4d9104df6..8465623995 100644 --- a/prowler/providers/aws/services/efs/efs_service.py +++ b/prowler/providers/aws/services/efs/efs_service.py @@ -11,9 +11,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################### EFS class EFS(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.filesystems = [] self.__threading_call__(self.__describe_file_systems__) self.__describe_file_system_policies__() diff --git a/prowler/providers/aws/services/eks/eks_client.py b/prowler/providers/aws/services/eks/eks_client.py index 4eaad65208..e5dfd326cd 100644 --- a/prowler/providers/aws/services/eks/eks_client.py +++ b/prowler/providers/aws/services/eks/eks_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.eks.eks_service import EKS +from prowler.providers.common.common import get_global_provider -eks_client = EKS(current_audit_info) +eks_client = EKS(get_global_provider()) diff --git a/prowler/providers/aws/services/eks/eks_service.py b/prowler/providers/aws/services/eks/eks_service.py index f5f9cdf539..70ac050e75 100644 --- a/prowler/providers/aws/services/eks/eks_service.py +++ b/prowler/providers/aws/services/eks/eks_service.py @@ -4,16 +4,14 @@ from pydantic import BaseModel from prowler.lib.logger import logger from prowler.lib.scan_filters.scan_filters import is_resource_filtered -from prowler.providers.aws.aws_provider import generate_regional_clients from prowler.providers.aws.lib.service.service import AWSService ################################ EKS class EKS(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) - self.regional_clients = generate_regional_clients(self.service, audit_info) + super().__init__(__class__.__name__, provider) self.clusters = [] self.__threading_call__(self.__list_clusters__) self.__describe_cluster__(self.regional_clients) diff --git a/prowler/providers/aws/services/elasticache/elasticache_client.py b/prowler/providers/aws/services/elasticache/elasticache_client.py index 5b6669bdc7..fed5adab5b 100644 --- a/prowler/providers/aws/services/elasticache/elasticache_client.py +++ b/prowler/providers/aws/services/elasticache/elasticache_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.elasticache.elasticache_service import ElastiCache +from prowler.providers.common.common import get_global_provider -elasticache_client = ElastiCache(current_audit_info) +elasticache_client = ElastiCache(get_global_provider()) diff --git a/prowler/providers/aws/services/elasticache/elasticache_service.py b/prowler/providers/aws/services/elasticache/elasticache_service.py index d41d2cac6d..353ad4ff8c 100644 --- a/prowler/providers/aws/services/elasticache/elasticache_service.py +++ b/prowler/providers/aws/services/elasticache/elasticache_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################################ Elasticache class ElastiCache(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.clusters = {} self.__threading_call__(self.__describe_cache_clusters__) self.__threading_call__(self.__describe_cache_subnet_groups__) diff --git a/prowler/providers/aws/services/elb/elb_client.py b/prowler/providers/aws/services/elb/elb_client.py index 25c4695640..bac9a3ba87 100644 --- a/prowler/providers/aws/services/elb/elb_client.py +++ b/prowler/providers/aws/services/elb/elb_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.elb.elb_service import ELB +from prowler.providers.common.common import get_global_provider -elb_client = ELB(current_audit_info) +elb_client = ELB(get_global_provider()) diff --git a/prowler/providers/aws/services/elb/elb_service.py b/prowler/providers/aws/services/elb/elb_service.py index 31de0c4fbf..877ae780a9 100644 --- a/prowler/providers/aws/services/elb/elb_service.py +++ b/prowler/providers/aws/services/elb/elb_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################### ELB class ELB(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.loadbalancers = [] self.__threading_call__(self.__describe_load_balancers__) self.__threading_call__(self.__describe_load_balancer_attributes__) diff --git a/prowler/providers/aws/services/elbv2/elbv2_client.py b/prowler/providers/aws/services/elbv2/elbv2_client.py index 4b4cfe6e7a..3e22247f49 100644 --- a/prowler/providers/aws/services/elbv2/elbv2_client.py +++ b/prowler/providers/aws/services/elbv2/elbv2_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.elbv2.elbv2_service import ELBv2 +from prowler.providers.common.common import get_global_provider -elbv2_client = ELBv2(current_audit_info) +elbv2_client = ELBv2(get_global_provider()) diff --git a/prowler/providers/aws/services/elbv2/elbv2_service.py b/prowler/providers/aws/services/elbv2/elbv2_service.py index 4964a8f980..0588ae9455 100644 --- a/prowler/providers/aws/services/elbv2/elbv2_service.py +++ b/prowler/providers/aws/services/elbv2/elbv2_service.py @@ -10,9 +10,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################### ELBv2 class ELBv2(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.loadbalancersv2 = [] self.__threading_call__(self.__describe_load_balancers__) self.listeners = [] diff --git a/prowler/providers/aws/services/emr/emr_client.py b/prowler/providers/aws/services/emr/emr_client.py index ce022feea5..e01c20803a 100644 --- a/prowler/providers/aws/services/emr/emr_client.py +++ b/prowler/providers/aws/services/emr/emr_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.emr.emr_service import EMR +from prowler.providers.common.common import get_global_provider -emr_client = EMR(current_audit_info) +emr_client = EMR(get_global_provider()) diff --git a/prowler/providers/aws/services/emr/emr_service.py b/prowler/providers/aws/services/emr/emr_service.py index 897b7bcfee..287025daba 100644 --- a/prowler/providers/aws/services/emr/emr_service.py +++ b/prowler/providers/aws/services/emr/emr_service.py @@ -11,9 +11,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## EMR class EMR(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.clusters = {} self.block_public_access_configuration = {} self.__threading_call__(self.__list_clusters__) diff --git a/prowler/providers/aws/services/fms/fms_client.py b/prowler/providers/aws/services/fms/fms_client.py index 0bd41b0c4e..e20a950736 100644 --- a/prowler/providers/aws/services/fms/fms_client.py +++ b/prowler/providers/aws/services/fms/fms_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.fms.fms_service import FMS +from prowler.providers.common.common import get_global_provider -fms_client = FMS(current_audit_info) +fms_client = FMS(get_global_provider()) diff --git a/prowler/providers/aws/services/fms/fms_service.py b/prowler/providers/aws/services/fms/fms_service.py index 6c781a6701..5e645ae544 100644 --- a/prowler/providers/aws/services/fms/fms_service.py +++ b/prowler/providers/aws/services/fms/fms_service.py @@ -10,9 +10,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## FMS class FMS(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info, global_service=True) + super().__init__(__class__.__name__, provider, global_service=True) self.fms_admin_account = True self.fms_policies = [] self.__list_policies__() diff --git a/prowler/providers/aws/services/glacier/glacier_client.py b/prowler/providers/aws/services/glacier/glacier_client.py index a099cd9ce2..2c50e41b7b 100644 --- a/prowler/providers/aws/services/glacier/glacier_client.py +++ b/prowler/providers/aws/services/glacier/glacier_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.glacier.glacier_service import Glacier +from prowler.providers.common.common import get_global_provider -glacier_client = Glacier(current_audit_info) +glacier_client = Glacier(get_global_provider()) diff --git a/prowler/providers/aws/services/glacier/glacier_service.py b/prowler/providers/aws/services/glacier/glacier_service.py index a74e3dbc3a..756c210ea9 100644 --- a/prowler/providers/aws/services/glacier/glacier_service.py +++ b/prowler/providers/aws/services/glacier/glacier_service.py @@ -11,9 +11,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## Glacier class Glacier(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.vaults = {} self.__threading_call__(self.__list_vaults__) self.__threading_call__(self.__get_vault_access_policy__) diff --git a/prowler/providers/aws/services/globalaccelerator/globalaccelerator_client.py b/prowler/providers/aws/services/globalaccelerator/globalaccelerator_client.py index 58991d16b6..36f901ab90 100644 --- a/prowler/providers/aws/services/globalaccelerator/globalaccelerator_client.py +++ b/prowler/providers/aws/services/globalaccelerator/globalaccelerator_client.py @@ -1,6 +1,6 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.globalaccelerator.globalaccelerator_service import ( GlobalAccelerator, ) +from prowler.providers.common.common import get_global_provider -globalaccelerator_client = GlobalAccelerator(current_audit_info) +globalaccelerator_client = GlobalAccelerator(get_global_provider()) diff --git a/prowler/providers/aws/services/globalaccelerator/globalaccelerator_service.py b/prowler/providers/aws/services/globalaccelerator/globalaccelerator_service.py index 6e79eaaf62..c394a3c095 100644 --- a/prowler/providers/aws/services/globalaccelerator/globalaccelerator_service.py +++ b/prowler/providers/aws/services/globalaccelerator/globalaccelerator_service.py @@ -7,11 +7,11 @@ from prowler.providers.aws.lib.service.service import AWSService ################### GlobalAccelerator class GlobalAccelerator(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.accelerators = {} - if audit_info.audited_partition == "aws": + if provider.audited_partition == "aws": # Global Accelerator is a global service that supports endpoints in multiple AWS Regions # but you must specify the US West (Oregon) Region to create, update, or otherwise work with accelerators. # That is, for example, specify --region us-west-2 on AWS CLI commands. diff --git a/prowler/providers/aws/services/glue/glue_client.py b/prowler/providers/aws/services/glue/glue_client.py index 4647967fec..dfdb6af066 100644 --- a/prowler/providers/aws/services/glue/glue_client.py +++ b/prowler/providers/aws/services/glue/glue_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.glue.glue_service import Glue +from prowler.providers.common.common import get_global_provider -glue_client = Glue(current_audit_info) +glue_client = Glue(get_global_provider()) diff --git a/prowler/providers/aws/services/glue/glue_data_catalogs_connection_passwords_encryption_enabled/glue_data_catalogs_connection_passwords_encryption_enabled.py b/prowler/providers/aws/services/glue/glue_data_catalogs_connection_passwords_encryption_enabled/glue_data_catalogs_connection_passwords_encryption_enabled.py index f71b4a0bf7..b339790127 100644 --- a/prowler/providers/aws/services/glue/glue_data_catalogs_connection_passwords_encryption_enabled/glue_data_catalogs_connection_passwords_encryption_enabled.py +++ b/prowler/providers/aws/services/glue/glue_data_catalogs_connection_passwords_encryption_enabled/glue_data_catalogs_connection_passwords_encryption_enabled.py @@ -7,7 +7,7 @@ class glue_data_catalogs_connection_passwords_encryption_enabled(Check): findings = [] for encryption in glue_client.catalog_encryption_settings: # Check only if there are Glue Tables - if encryption.tables or not glue_client.audit_info.ignore_unused_services: + if encryption.tables or not glue_client.provider.ignore_unused_services: report = Check_Report_AWS(self.metadata()) report.resource_id = glue_client.audited_account report.resource_arn = glue_client.audited_account_arn diff --git a/prowler/providers/aws/services/glue/glue_data_catalogs_metadata_encryption_enabled/glue_data_catalogs_metadata_encryption_enabled.py b/prowler/providers/aws/services/glue/glue_data_catalogs_metadata_encryption_enabled/glue_data_catalogs_metadata_encryption_enabled.py index 6ed906cc21..a8ac2105ec 100644 --- a/prowler/providers/aws/services/glue/glue_data_catalogs_metadata_encryption_enabled/glue_data_catalogs_metadata_encryption_enabled.py +++ b/prowler/providers/aws/services/glue/glue_data_catalogs_metadata_encryption_enabled/glue_data_catalogs_metadata_encryption_enabled.py @@ -7,7 +7,7 @@ class glue_data_catalogs_metadata_encryption_enabled(Check): findings = [] for encryption in glue_client.catalog_encryption_settings: # Check only if there are Glue Tables - if encryption.tables or not glue_client.audit_info.ignore_unused_services: + if encryption.tables or not glue_client.provider.ignore_unused_services: report = Check_Report_AWS(self.metadata()) report.resource_id = glue_client.audited_account report.resource_arn = glue_client.audited_account_arn diff --git a/prowler/providers/aws/services/glue/glue_service.py b/prowler/providers/aws/services/glue/glue_service.py index 24be4f5928..4c5cdd1469 100644 --- a/prowler/providers/aws/services/glue/glue_service.py +++ b/prowler/providers/aws/services/glue/glue_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## Glue class Glue(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.connections = [] self.__threading_call__(self.__get_connections__) self.tables = [] diff --git a/prowler/providers/aws/services/guardduty/guardduty_client.py b/prowler/providers/aws/services/guardduty/guardduty_client.py index 74320efcb2..36a41c52f7 100644 --- a/prowler/providers/aws/services/guardduty/guardduty_client.py +++ b/prowler/providers/aws/services/guardduty/guardduty_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.guardduty.guardduty_service import GuardDuty +from prowler.providers.common.common import get_global_provider -guardduty_client = GuardDuty(current_audit_info) +guardduty_client = GuardDuty(get_global_provider()) diff --git a/prowler/providers/aws/services/guardduty/guardduty_service.py b/prowler/providers/aws/services/guardduty/guardduty_service.py index f666a342f0..04afe17ee1 100644 --- a/prowler/providers/aws/services/guardduty/guardduty_service.py +++ b/prowler/providers/aws/services/guardduty/guardduty_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################################ GuardDuty class GuardDuty(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.detectors = [] self.__threading_call__(self.__list_detectors__) self.__get_detector__() diff --git a/prowler/providers/aws/services/iam/iam_client.py b/prowler/providers/aws/services/iam/iam_client.py index 2e855bb21d..8c92ca0204 100644 --- a/prowler/providers/aws/services/iam/iam_client.py +++ b/prowler/providers/aws/services/iam/iam_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.iam.iam_service import IAM +from prowler.providers.common.common import get_global_provider -iam_client = IAM(current_audit_info) +iam_client = IAM(get_global_provider()) diff --git a/prowler/providers/aws/services/iam/iam_service.py b/prowler/providers/aws/services/iam/iam_service.py index ca31b14ac5..21bf63ac28 100644 --- a/prowler/providers/aws/services/iam/iam_service.py +++ b/prowler/providers/aws/services/iam/iam_service.py @@ -48,9 +48,9 @@ def is_service_role(role): ################## IAM class IAM(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.users = self.__get_users__() self.roles = self.__get_roles__() self.account_summary = self.__get_account_summary__() diff --git a/prowler/providers/aws/services/inspector2/inspector2_client.py b/prowler/providers/aws/services/inspector2/inspector2_client.py index b243227cd7..13846315fc 100644 --- a/prowler/providers/aws/services/inspector2/inspector2_client.py +++ b/prowler/providers/aws/services/inspector2/inspector2_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.inspector2.inspector2_service import Inspector2 +from prowler.providers.common.common import get_global_provider -inspector2_client = Inspector2(current_audit_info) +inspector2_client = Inspector2(get_global_provider()) diff --git a/prowler/providers/aws/services/inspector2/inspector2_findings_exist/inspector2_findings_exist.py b/prowler/providers/aws/services/inspector2/inspector2_findings_exist/inspector2_findings_exist.py index e8fd5d2fa7..fad57d213c 100644 --- a/prowler/providers/aws/services/inspector2/inspector2_findings_exist/inspector2_findings_exist.py +++ b/prowler/providers/aws/services/inspector2/inspector2_findings_exist/inspector2_findings_exist.py @@ -33,7 +33,7 @@ class inspector2_findings_exist(Check): ) findings.append(report) else: - if inspector2_client.audit_info.ignore_unused_services: + if inspector2_client.provider.ignore_unused_services: funtions_in_region = False ec2_in_region = False for function in awslambda_client.functions.values(): @@ -42,7 +42,7 @@ class inspector2_findings_exist(Check): for instance in ec2_client.instances: if instance == inspector.region: ec2_in_region = True - if not inspector2_client.audit_info.ignore_unused_services or ( + if not inspector2_client.provider.ignore_unused_services or ( funtions_in_region or ecr_client.registries[inspector.region].repositories or ec2_in_region diff --git a/prowler/providers/aws/services/inspector2/inspector2_service.py b/prowler/providers/aws/services/inspector2/inspector2_service.py index b984e40f37..d5b7ff4d0e 100644 --- a/prowler/providers/aws/services/inspector2/inspector2_service.py +++ b/prowler/providers/aws/services/inspector2/inspector2_service.py @@ -7,9 +7,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################################ Inspector2 class Inspector2(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.inspectors = [] self.__threading_call__(self.__batch_get_account_status__) self.__list_findings__() diff --git a/prowler/providers/aws/services/kms/kms_client.py b/prowler/providers/aws/services/kms/kms_client.py index 4d65227a28..9890674bc9 100644 --- a/prowler/providers/aws/services/kms/kms_client.py +++ b/prowler/providers/aws/services/kms/kms_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.kms.kms_service import KMS +from prowler.providers.common.common import get_global_provider -kms_client = KMS(current_audit_info) +kms_client = KMS(get_global_provider()) diff --git a/prowler/providers/aws/services/kms/kms_service.py b/prowler/providers/aws/services/kms/kms_service.py index 4dc7fafba3..b0c17b20f8 100644 --- a/prowler/providers/aws/services/kms/kms_service.py +++ b/prowler/providers/aws/services/kms/kms_service.py @@ -10,9 +10,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## KMS class KMS(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.keys = [] self.__threading_call__(self.__list_keys__) if self.keys: diff --git a/prowler/providers/aws/services/macie/macie_client.py b/prowler/providers/aws/services/macie/macie_client.py index d1cd34a0bf..3d63ba74bb 100644 --- a/prowler/providers/aws/services/macie/macie_client.py +++ b/prowler/providers/aws/services/macie/macie_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.macie.macie_service import Macie +from prowler.providers.common.common import get_global_provider -macie_client = Macie(current_audit_info) +macie_client = Macie(get_global_provider()) diff --git a/prowler/providers/aws/services/macie/macie_is_enabled/macie_is_enabled.py b/prowler/providers/aws/services/macie/macie_is_enabled/macie_is_enabled.py index eeb1d13491..b1e94e0d79 100644 --- a/prowler/providers/aws/services/macie/macie_is_enabled/macie_is_enabled.py +++ b/prowler/providers/aws/services/macie/macie_is_enabled/macie_is_enabled.py @@ -17,7 +17,7 @@ class macie_is_enabled(Check): findings.append(report) else: if ( - not macie_client.audit_info.ignore_unused_services + not macie_client.provider.ignore_unused_services or session.region in s3_client.regions_with_buckets ): if session.status == "PAUSED": diff --git a/prowler/providers/aws/services/macie/macie_service.py b/prowler/providers/aws/services/macie/macie_service.py index 4831aaf858..b644e1d73b 100644 --- a/prowler/providers/aws/services/macie/macie_service.py +++ b/prowler/providers/aws/services/macie/macie_service.py @@ -6,9 +6,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## Macie class Macie(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__("macie2", audit_info) + super().__init__("macie2", provider) self.sessions = [] self.__threading_call__(self.__get_macie_session__) diff --git a/prowler/providers/aws/services/neptune/neptune_client.py b/prowler/providers/aws/services/neptune/neptune_client.py index 1708be453e..5c14060077 100644 --- a/prowler/providers/aws/services/neptune/neptune_client.py +++ b/prowler/providers/aws/services/neptune/neptune_client.py @@ -1,6 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info -from prowler.providers.aws.services.neptune.neptune_service import ( - Neptune, -) +from prowler.providers.aws.services.neptune.neptune_service import Neptune +from prowler.providers.common.common import get_global_provider -neptune_client = Neptune(current_audit_info) +neptune_client = Neptune(get_global_provider()) diff --git a/prowler/providers/aws/services/neptune/neptune_service.py b/prowler/providers/aws/services/neptune/neptune_service.py index aabbbcf334..b850a7b0e1 100644 --- a/prowler/providers/aws/services/neptune/neptune_service.py +++ b/prowler/providers/aws/services/neptune/neptune_service.py @@ -9,10 +9,10 @@ from prowler.providers.aws.lib.service.service import AWSService ################## Neptune class Neptune(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ self.service_name = "neptune" - super().__init__(self.service_name, audit_info) + super().__init__(self.service_name, provider) self.clusters = {} self.__threading_call__(self.__describe_clusters__) self.__threading_call__(self.__describe_db_subnet_groups__) diff --git a/prowler/providers/aws/services/networkfirewall/networkfirewall_client.py b/prowler/providers/aws/services/networkfirewall/networkfirewall_client.py index 685947ac4b..1859622570 100644 --- a/prowler/providers/aws/services/networkfirewall/networkfirewall_client.py +++ b/prowler/providers/aws/services/networkfirewall/networkfirewall_client.py @@ -1,6 +1,6 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.networkfirewall.networkfirewall_service import ( NetworkFirewall, ) +from prowler.providers.common.common import get_global_provider -networkfirewall_client = NetworkFirewall(current_audit_info) +networkfirewall_client = NetworkFirewall(get_global_provider()) diff --git a/prowler/providers/aws/services/networkfirewall/networkfirewall_in_all_vpc/networkfirewall_in_all_vpc.py b/prowler/providers/aws/services/networkfirewall/networkfirewall_in_all_vpc/networkfirewall_in_all_vpc.py index 692a93b2ce..f28883efae 100644 --- a/prowler/providers/aws/services/networkfirewall/networkfirewall_in_all_vpc/networkfirewall_in_all_vpc.py +++ b/prowler/providers/aws/services/networkfirewall/networkfirewall_in_all_vpc/networkfirewall_in_all_vpc.py @@ -9,7 +9,7 @@ class networkfirewall_in_all_vpc(Check): def execute(self): findings = [] for vpc in vpc_client.vpcs.values(): - if not vpc_client.audit_info.ignore_unused_services or vpc.in_use: + if not vpc_client.provider.ignore_unused_services or vpc.in_use: report = Check_Report_AWS(self.metadata()) report.region = vpc.region report.resource_id = vpc.id diff --git a/prowler/providers/aws/services/networkfirewall/networkfirewall_service.py b/prowler/providers/aws/services/networkfirewall/networkfirewall_service.py index 9e32bf2be9..1f095df4e1 100644 --- a/prowler/providers/aws/services/networkfirewall/networkfirewall_service.py +++ b/prowler/providers/aws/services/networkfirewall/networkfirewall_service.py @@ -7,9 +7,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## NetworkFirewall class NetworkFirewall(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__("network-firewall", audit_info) + super().__init__("network-firewall", provider) self.network_firewalls = [] self.__threading_call__(self.__list_firewalls__) self.__describe_firewall__() diff --git a/prowler/providers/aws/services/opensearch/opensearch_client.py b/prowler/providers/aws/services/opensearch/opensearch_client.py index fdd7f8f46b..1f4d229c6f 100644 --- a/prowler/providers/aws/services/opensearch/opensearch_client.py +++ b/prowler/providers/aws/services/opensearch/opensearch_client.py @@ -1,6 +1,6 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.opensearch.opensearch_service import ( OpenSearchService, ) +from prowler.providers.common.common import get_global_provider -opensearch_client = OpenSearchService(current_audit_info) +opensearch_client = OpenSearchService(get_global_provider()) diff --git a/prowler/providers/aws/services/opensearch/opensearch_service.py b/prowler/providers/aws/services/opensearch/opensearch_service.py index 609589c159..231df1ae53 100644 --- a/prowler/providers/aws/services/opensearch/opensearch_service.py +++ b/prowler/providers/aws/services/opensearch/opensearch_service.py @@ -10,9 +10,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################################ OpenSearch class OpenSearchService(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__("opensearch", audit_info) + super().__init__("opensearch", provider) self.opensearch_domains = [] self.__threading_call__(self.__list_domain_names__) self.__describe_domain_config__(self.regional_clients) diff --git a/prowler/providers/aws/services/organizations/organizations_client.py b/prowler/providers/aws/services/organizations/organizations_client.py index f065b4934a..1637d1c17e 100644 --- a/prowler/providers/aws/services/organizations/organizations_client.py +++ b/prowler/providers/aws/services/organizations/organizations_client.py @@ -1,6 +1,6 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.organizations.organizations_service import ( Organizations, ) +from prowler.providers.common.common import get_global_provider -organizations_client = Organizations(current_audit_info) +organizations_client = Organizations(get_global_provider()) diff --git a/prowler/providers/aws/services/organizations/organizations_service.py b/prowler/providers/aws/services/organizations/organizations_service.py index 871da04622..472efff23e 100644 --- a/prowler/providers/aws/services/organizations/organizations_service.py +++ b/prowler/providers/aws/services/organizations/organizations_service.py @@ -18,9 +18,9 @@ available_organizations_policies = [ ################## Organizations class Organizations(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.organizations = [] self.policies = [] self.delegated_administrators = [] diff --git a/prowler/providers/aws/services/rds/rds_client.py b/prowler/providers/aws/services/rds/rds_client.py index 6911ab5fb2..d5cba59b34 100644 --- a/prowler/providers/aws/services/rds/rds_client.py +++ b/prowler/providers/aws/services/rds/rds_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.rds.rds_service import RDS +from prowler.providers.common.common import get_global_provider -rds_client = RDS(current_audit_info) +rds_client = RDS(get_global_provider()) diff --git a/prowler/providers/aws/services/rds/rds_service.py b/prowler/providers/aws/services/rds/rds_service.py index 50e5bcf7ac..1a5f88a6d7 100644 --- a/prowler/providers/aws/services/rds/rds_service.py +++ b/prowler/providers/aws/services/rds/rds_service.py @@ -10,9 +10,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## RDS class RDS(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.db_instances = [] self.db_clusters = {} self.db_snapshots = [] diff --git a/prowler/providers/aws/services/redshift/redshift_client.py b/prowler/providers/aws/services/redshift/redshift_client.py index 4743ddb4d9..6fb9659483 100644 --- a/prowler/providers/aws/services/redshift/redshift_client.py +++ b/prowler/providers/aws/services/redshift/redshift_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.redshift.redshift_service import Redshift +from prowler.providers.common.common import get_global_provider -redshift_client = Redshift(current_audit_info) +redshift_client = Redshift(get_global_provider()) diff --git a/prowler/providers/aws/services/redshift/redshift_service.py b/prowler/providers/aws/services/redshift/redshift_service.py index 083ed80a8b..0b0906fe2b 100644 --- a/prowler/providers/aws/services/redshift/redshift_service.py +++ b/prowler/providers/aws/services/redshift/redshift_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################################ Redshift class Redshift(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.clusters = [] self.__threading_call__(self.__describe_clusters__) self.__describe_logging_status__(self.regional_clients) diff --git a/prowler/providers/aws/services/resourceexplorer2/resourceexplorer2_client.py b/prowler/providers/aws/services/resourceexplorer2/resourceexplorer2_client.py index 78a873b430..8ac9923af5 100644 --- a/prowler/providers/aws/services/resourceexplorer2/resourceexplorer2_client.py +++ b/prowler/providers/aws/services/resourceexplorer2/resourceexplorer2_client.py @@ -1,6 +1,6 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.resourceexplorer2.resourceexplorer2_service import ( ResourceExplorer2, ) +from prowler.providers.common.common import get_global_provider -resource_explorer_2_client = ResourceExplorer2(current_audit_info) +resource_explorer_2_client = ResourceExplorer2(get_global_provider()) diff --git a/prowler/providers/aws/services/resourceexplorer2/resourceexplorer2_service.py b/prowler/providers/aws/services/resourceexplorer2/resourceexplorer2_service.py index f81b7144bf..e14506f3d0 100644 --- a/prowler/providers/aws/services/resourceexplorer2/resourceexplorer2_service.py +++ b/prowler/providers/aws/services/resourceexplorer2/resourceexplorer2_service.py @@ -7,9 +7,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################################ ResourceExplorer2 class ResourceExplorer2(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__("resource-explorer-2", audit_info) + super().__init__("resource-explorer-2", provider) self.indexes = [] self.__threading_call__(self.__list_indexes__) diff --git a/prowler/providers/aws/services/route53/route53_client.py b/prowler/providers/aws/services/route53/route53_client.py index 321a6000fe..47e5f1e0b6 100644 --- a/prowler/providers/aws/services/route53/route53_client.py +++ b/prowler/providers/aws/services/route53/route53_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.route53.route53_service import Route53 +from prowler.providers.common.common import get_global_provider -route53_client = Route53(current_audit_info) +route53_client = Route53(get_global_provider()) diff --git a/prowler/providers/aws/services/route53/route53_service.py b/prowler/providers/aws/services/route53/route53_service.py index 1562c681d6..c715a7b137 100644 --- a/prowler/providers/aws/services/route53/route53_service.py +++ b/prowler/providers/aws/services/route53/route53_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## Route53 class Route53(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info, global_service=True) + super().__init__(__class__.__name__, provider, global_service=True) self.hosted_zones = {} self.record_sets = [] self.__list_hosted_zones__() @@ -139,11 +139,11 @@ class RecordSet(BaseModel): ################## Route53Domains class Route53Domains(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.domains = {} - if audit_info.audited_partition == "aws": + if self.audited_partition == "aws": # Route53Domains is a global service that supports endpoints in multiple AWS Regions # but you must specify the US East (N. Virginia) Region to create, update, or otherwise work with domains. self.region = "us-east-1" diff --git a/prowler/providers/aws/services/route53/route53domains_client.py b/prowler/providers/aws/services/route53/route53domains_client.py index d05e7ed8cc..8f0d3621c8 100644 --- a/prowler/providers/aws/services/route53/route53domains_client.py +++ b/prowler/providers/aws/services/route53/route53domains_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.route53.route53_service import Route53Domains +from prowler.providers.common.common import get_global_provider -route53domains_client = Route53Domains(current_audit_info) +route53domains_client = Route53Domains(get_global_provider()) diff --git a/prowler/providers/aws/services/s3/s3_account_level_public_access_blocks/s3_account_level_public_access_blocks.py b/prowler/providers/aws/services/s3/s3_account_level_public_access_blocks/s3_account_level_public_access_blocks.py index 1374c7279f..03453b782d 100644 --- a/prowler/providers/aws/services/s3/s3_account_level_public_access_blocks/s3_account_level_public_access_blocks.py +++ b/prowler/providers/aws/services/s3/s3_account_level_public_access_blocks/s3_account_level_public_access_blocks.py @@ -18,7 +18,7 @@ class s3_account_level_public_access_blocks(Check): report.resource_id = s3control_client.audited_account report.resource_arn = s3control_client.audited_account_arn findings.append(report) - elif s3_client.buckets or not s3_client.audit_info.ignore_unused_services: + elif s3_client.buckets or not s3_client.provider.ignore_unused_services: report.status = "FAIL" report.status_extended = f"Block Public Access is not configured for the account {s3control_client.audited_account}." report.region = s3control_client.region diff --git a/prowler/providers/aws/services/s3/s3_client.py b/prowler/providers/aws/services/s3/s3_client.py index 2e714f5e1c..e6ca7256b2 100644 --- a/prowler/providers/aws/services/s3/s3_client.py +++ b/prowler/providers/aws/services/s3/s3_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.s3.s3_service import S3 +from prowler.providers.common.common import get_global_provider -s3_client = S3(current_audit_info) +s3_client = S3(get_global_provider()) diff --git a/prowler/providers/aws/services/s3/s3_service.py b/prowler/providers/aws/services/s3/s3_service.py index 841f152cde..f7578c990a 100644 --- a/prowler/providers/aws/services/s3/s3_service.py +++ b/prowler/providers/aws/services/s3/s3_service.py @@ -12,11 +12,11 @@ from prowler.providers.aws.lib.service.service import AWSService ################## S3 class S3(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.regions_with_buckets = [] - self.buckets = self.__list_buckets__(audit_info) + self.buckets = self.__list_buckets__(provider) self.__threading_call__(self.__get_bucket_versioning__) self.__threading_call__(self.__get_bucket_logging__) self.__threading_call__(self.__get_bucket_policy__) @@ -37,7 +37,7 @@ class S3(AWSService): for t in threads: t.join() - def __list_buckets__(self, audit_info): + def __list_buckets__(self, provider): logger.info("S3 - Listing buckets...") buckets = [] try: @@ -58,8 +58,8 @@ class S3(AWSService): ): self.regions_with_buckets.append(bucket_region) # Check if there are filter regions - if audit_info.audited_regions: - if bucket_region in audit_info.audited_regions: + if provider.identity.audited_regions: + if bucket_region in provider.identity.audited_regions: buckets.append( Bucket( name=bucket["Name"], @@ -344,9 +344,9 @@ class S3(AWSService): ################## S3Control class S3Control(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info, global_service=True) + super().__init__(__class__.__name__, provider, global_service=True) self.account_public_access_block = self.__get_public_access_block__() def __get_public_access_block__(self): diff --git a/prowler/providers/aws/services/s3/s3control_client.py b/prowler/providers/aws/services/s3/s3control_client.py index 596ef2b27f..c7499e5081 100644 --- a/prowler/providers/aws/services/s3/s3control_client.py +++ b/prowler/providers/aws/services/s3/s3control_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.s3.s3_service import S3Control +from prowler.providers.common.common import get_global_provider -s3control_client = S3Control(current_audit_info) +s3control_client = S3Control(get_global_provider()) diff --git a/prowler/providers/aws/services/sagemaker/sagemaker_client.py b/prowler/providers/aws/services/sagemaker/sagemaker_client.py index d883f58778..8b0cfe5433 100644 --- a/prowler/providers/aws/services/sagemaker/sagemaker_client.py +++ b/prowler/providers/aws/services/sagemaker/sagemaker_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.sagemaker.sagemaker_service import SageMaker +from prowler.providers.common.common import get_global_provider -sagemaker_client = SageMaker(current_audit_info) +sagemaker_client = SageMaker(get_global_provider()) diff --git a/prowler/providers/aws/services/sagemaker/sagemaker_service.py b/prowler/providers/aws/services/sagemaker/sagemaker_service.py index 4ebd66d90b..8a2370a7ec 100644 --- a/prowler/providers/aws/services/sagemaker/sagemaker_service.py +++ b/prowler/providers/aws/services/sagemaker/sagemaker_service.py @@ -10,9 +10,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################################ SageMaker class SageMaker(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.sagemaker_notebook_instances = [] self.sagemaker_models = [] self.sagemaker_training_jobs = [] diff --git a/prowler/providers/aws/services/secretsmanager/secretsmanager_client.py b/prowler/providers/aws/services/secretsmanager/secretsmanager_client.py index 09475758e1..c3403a26b4 100644 --- a/prowler/providers/aws/services/secretsmanager/secretsmanager_client.py +++ b/prowler/providers/aws/services/secretsmanager/secretsmanager_client.py @@ -1,6 +1,6 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.secretsmanager.secretsmanager_service import ( SecretsManager, ) +from prowler.providers.common.common import get_global_provider -secretsmanager_client = SecretsManager(current_audit_info) +secretsmanager_client = SecretsManager(get_global_provider()) diff --git a/prowler/providers/aws/services/secretsmanager/secretsmanager_service.py b/prowler/providers/aws/services/secretsmanager/secretsmanager_service.py index abcaeac7a1..8ed0ea126b 100644 --- a/prowler/providers/aws/services/secretsmanager/secretsmanager_service.py +++ b/prowler/providers/aws/services/secretsmanager/secretsmanager_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## SecretsManager class SecretsManager(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.secrets = {} self.__threading_call__(self.__list_secrets__) diff --git a/prowler/providers/aws/services/securityhub/securityhub_client.py b/prowler/providers/aws/services/securityhub/securityhub_client.py index d7c7c7fe3f..6832cfbb47 100644 --- a/prowler/providers/aws/services/securityhub/securityhub_client.py +++ b/prowler/providers/aws/services/securityhub/securityhub_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.securityhub.securityhub_service import SecurityHub +from prowler.providers.common.common import get_global_provider -securityhub_client = SecurityHub(current_audit_info) +securityhub_client = SecurityHub(get_global_provider()) diff --git a/prowler/providers/aws/services/securityhub/securityhub_service.py b/prowler/providers/aws/services/securityhub/securityhub_service.py index 0a38d4b3f4..14a1490af8 100644 --- a/prowler/providers/aws/services/securityhub/securityhub_service.py +++ b/prowler/providers/aws/services/securityhub/securityhub_service.py @@ -8,9 +8,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## SecurityHub class SecurityHub(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.securityhubs = [] self.__threading_call__(self.__describe_hub__) diff --git a/prowler/providers/aws/services/shield/shield_client.py b/prowler/providers/aws/services/shield/shield_client.py index 277247132d..f5149f2b13 100644 --- a/prowler/providers/aws/services/shield/shield_client.py +++ b/prowler/providers/aws/services/shield/shield_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.shield.shield_service import Shield +from prowler.providers.common.common import get_global_provider -shield_client = Shield(current_audit_info) +shield_client = Shield(get_global_provider()) diff --git a/prowler/providers/aws/services/shield/shield_service.py b/prowler/providers/aws/services/shield/shield_service.py index 5b0b2cd184..cb1eb5db71 100644 --- a/prowler/providers/aws/services/shield/shield_service.py +++ b/prowler/providers/aws/services/shield/shield_service.py @@ -6,9 +6,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################### Shield class Shield(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info, global_service=True) + super().__init__(__class__.__name__, provider, global_service=True) self.protections = {} self.enabled = False self.enabled = self.__get_subscription_state__() diff --git a/prowler/providers/aws/services/sns/sns_client.py b/prowler/providers/aws/services/sns/sns_client.py index 1568449589..7b56c4e7fb 100644 --- a/prowler/providers/aws/services/sns/sns_client.py +++ b/prowler/providers/aws/services/sns/sns_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.sns.sns_service import SNS +from prowler.providers.common.common import get_global_provider -sns_client = SNS(current_audit_info) +sns_client = SNS(get_global_provider()) diff --git a/prowler/providers/aws/services/sns/sns_service.py b/prowler/providers/aws/services/sns/sns_service.py index 5bf180978a..4a28c4bd8f 100644 --- a/prowler/providers/aws/services/sns/sns_service.py +++ b/prowler/providers/aws/services/sns/sns_service.py @@ -10,9 +10,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################################ SNS class SNS(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.topics = [] self.__threading_call__(self.__list_topics__) self.__get_topic_attributes__(self.regional_clients) diff --git a/prowler/providers/aws/services/sqs/sqs_client.py b/prowler/providers/aws/services/sqs/sqs_client.py index ffbc3977f5..b10754b2e0 100644 --- a/prowler/providers/aws/services/sqs/sqs_client.py +++ b/prowler/providers/aws/services/sqs/sqs_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.sqs.sqs_service import SQS +from prowler.providers.common.common import get_global_provider -sqs_client = SQS(current_audit_info) +sqs_client = SQS(get_global_provider()) diff --git a/prowler/providers/aws/services/sqs/sqs_service.py b/prowler/providers/aws/services/sqs/sqs_service.py index bdc3e6a92d..4dd0b91eea 100644 --- a/prowler/providers/aws/services/sqs/sqs_service.py +++ b/prowler/providers/aws/services/sqs/sqs_service.py @@ -11,9 +11,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################################ SQS class SQS(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.queues = [] self.__threading_call__(self.__list_queues__) self.__get_queue_attributes__() diff --git a/prowler/providers/aws/services/ssm/ssm_client.py b/prowler/providers/aws/services/ssm/ssm_client.py index cce00a8cab..acf89d5c29 100644 --- a/prowler/providers/aws/services/ssm/ssm_client.py +++ b/prowler/providers/aws/services/ssm/ssm_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.ssm.ssm_service import SSM +from prowler.providers.common.common import get_global_provider -ssm_client = SSM(current_audit_info) +ssm_client = SSM(get_global_provider()) diff --git a/prowler/providers/aws/services/ssm/ssm_service.py b/prowler/providers/aws/services/ssm/ssm_service.py index 8aaea837f5..7c50c1ab89 100644 --- a/prowler/providers/aws/services/ssm/ssm_service.py +++ b/prowler/providers/aws/services/ssm/ssm_service.py @@ -12,9 +12,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## SSM class SSM(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.documents = {} self.compliance_resources = {} self.managed_instances = {} diff --git a/prowler/providers/aws/services/ssmincidents/ssmincidents_client.py b/prowler/providers/aws/services/ssmincidents/ssmincidents_client.py index d71616fde4..efaab4e39b 100644 --- a/prowler/providers/aws/services/ssmincidents/ssmincidents_client.py +++ b/prowler/providers/aws/services/ssmincidents/ssmincidents_client.py @@ -1,6 +1,6 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.ssmincidents.ssmincidents_service import ( SSMIncidents, ) +from prowler.providers.common.common import get_global_provider -ssmincidents_client = SSMIncidents(current_audit_info) +ssmincidents_client = SSMIncidents(get_global_provider()) diff --git a/prowler/providers/aws/services/ssmincidents/ssmincidents_service.py b/prowler/providers/aws/services/ssmincidents/ssmincidents_service.py index 1be0c5463b..da3ad41785 100644 --- a/prowler/providers/aws/services/ssmincidents/ssmincidents_service.py +++ b/prowler/providers/aws/services/ssmincidents/ssmincidents_service.py @@ -14,9 +14,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## SSMIncidents class SSMIncidents(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__("ssm-incidents", audit_info) + super().__init__("ssm-incidents", provider) self.replication_set = [] self.__list_replication_sets__() self.__get_replication_set__() diff --git a/prowler/providers/aws/services/trustedadvisor/trustedadvisor_client.py b/prowler/providers/aws/services/trustedadvisor/trustedadvisor_client.py index b037a84100..ff36e179ae 100644 --- a/prowler/providers/aws/services/trustedadvisor/trustedadvisor_client.py +++ b/prowler/providers/aws/services/trustedadvisor/trustedadvisor_client.py @@ -1,6 +1,6 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.trustedadvisor.trustedadvisor_service import ( TrustedAdvisor, ) +from prowler.providers.common.common import get_global_provider -trustedadvisor_client = TrustedAdvisor(current_audit_info) +trustedadvisor_client = TrustedAdvisor(get_global_provider()) diff --git a/prowler/providers/aws/services/trustedadvisor/trustedadvisor_service.py b/prowler/providers/aws/services/trustedadvisor/trustedadvisor_service.py index c5fc591dee..a09acc3f0a 100644 --- a/prowler/providers/aws/services/trustedadvisor/trustedadvisor_service.py +++ b/prowler/providers/aws/services/trustedadvisor/trustedadvisor_service.py @@ -9,20 +9,20 @@ from prowler.providers.aws.lib.service.service import AWSService ################################ TrustedAdvisor class TrustedAdvisor(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__("support", audit_info) + super().__init__("support", provider) self.checks = [] self.premium_support = PremiumSupport(enabled=False) # Support API is not available in China Partition # But only in us-east-1 or us-gov-west-1 https://docs.aws.amazon.com/general/latest/gr/awssupport.html - if audit_info.audited_partition != "aws-cn": - if audit_info.audited_partition == "aws": + if provider.audited_partition != "aws-cn": + if provider.audited_partition == "aws": support_region = "us-east-1" else: support_region = "us-gov-west-1" - self.client = audit_info.audit_session.client( + self.client = provider.audit_session.client( self.service, region_name=support_region ) self.client.region = support_region diff --git a/prowler/providers/aws/services/vpc/vpc_client.py b/prowler/providers/aws/services/vpc/vpc_client.py index 22d241c4e4..506bb5c16b 100644 --- a/prowler/providers/aws/services/vpc/vpc_client.py +++ b/prowler/providers/aws/services/vpc/vpc_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.vpc.vpc_service import VPC +from prowler.providers.common.common import get_global_provider -vpc_client = VPC(current_audit_info) +vpc_client = VPC(get_global_provider()) diff --git a/prowler/providers/aws/services/vpc/vpc_flow_logs_enabled/vpc_flow_logs_enabled.py b/prowler/providers/aws/services/vpc/vpc_flow_logs_enabled/vpc_flow_logs_enabled.py index 9821d7f145..2177d11300 100644 --- a/prowler/providers/aws/services/vpc/vpc_flow_logs_enabled/vpc_flow_logs_enabled.py +++ b/prowler/providers/aws/services/vpc/vpc_flow_logs_enabled/vpc_flow_logs_enabled.py @@ -6,7 +6,7 @@ class vpc_flow_logs_enabled(Check): def execute(self): findings = [] for vpc in vpc_client.vpcs.values(): - if not vpc_client.audit_info.ignore_unused_services or vpc.in_use: + if not vpc_client.provider.ignore_unused_services or vpc.in_use: report = Check_Report_AWS(self.metadata()) report.region = vpc.region report.resource_tags = vpc.tags diff --git a/prowler/providers/aws/services/vpc/vpc_service.py b/prowler/providers/aws/services/vpc/vpc_service.py index 7affa7ee4b..54bae2695d 100644 --- a/prowler/providers/aws/services/vpc/vpc_service.py +++ b/prowler/providers/aws/services/vpc/vpc_service.py @@ -11,9 +11,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################## VPC class VPC(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__("ec2", audit_info) + super().__init__("ec2", provider) self.vpcs = {} self.vpc_peering_connections = [] self.vpc_endpoints = [] diff --git a/prowler/providers/aws/services/waf/waf_client.py b/prowler/providers/aws/services/waf/waf_client.py index 7e30b16239..ae39d10857 100644 --- a/prowler/providers/aws/services/waf/waf_client.py +++ b/prowler/providers/aws/services/waf/waf_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.waf.waf_service import WAF +from prowler.providers.common.common import get_global_provider -waf_client = WAF(current_audit_info) +waf_client = WAF(get_global_provider()) diff --git a/prowler/providers/aws/services/waf/waf_service.py b/prowler/providers/aws/services/waf/waf_service.py index be68083310..1d34a48969 100644 --- a/prowler/providers/aws/services/waf/waf_service.py +++ b/prowler/providers/aws/services/waf/waf_service.py @@ -7,9 +7,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################### WAF class WAF(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__("waf-regional", audit_info) + super().__init__("waf-regional", provider) self.web_acls = [] self.__threading_call__(self.__list_web_acls__) self.__threading_call__(self.__list_resources_for_web_acl__) diff --git a/prowler/providers/aws/services/wafv2/wafv2_client.py b/prowler/providers/aws/services/wafv2/wafv2_client.py index 76832a0efe..7996becb55 100644 --- a/prowler/providers/aws/services/wafv2/wafv2_client.py +++ b/prowler/providers/aws/services/wafv2/wafv2_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.wafv2.wafv2_service import WAFv2 +from prowler.providers.common.common import get_global_provider -wafv2_client = WAFv2(current_audit_info) +wafv2_client = WAFv2(get_global_provider()) diff --git a/prowler/providers/aws/services/wafv2/wafv2_service.py b/prowler/providers/aws/services/wafv2/wafv2_service.py index 7ab640f815..df8ac63f8d 100644 --- a/prowler/providers/aws/services/wafv2/wafv2_service.py +++ b/prowler/providers/aws/services/wafv2/wafv2_service.py @@ -7,9 +7,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################### WAFv2 class WAFv2(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.web_acls = [] self.__threading_call__(self.__list_web_acls__) self.__threading_call__(self.__list_resources_for_web_acl__) diff --git a/prowler/providers/aws/services/wellarchitected/wellarchitected_client.py b/prowler/providers/aws/services/wellarchitected/wellarchitected_client.py index 0967f6720d..1ace96e587 100644 --- a/prowler/providers/aws/services/wellarchitected/wellarchitected_client.py +++ b/prowler/providers/aws/services/wellarchitected/wellarchitected_client.py @@ -1,6 +1,6 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.wellarchitected.wellarchitected_service import ( WellArchitected, ) +from prowler.providers.common.common import get_global_provider -wellarchitected_client = WellArchitected(current_audit_info) +wellarchitected_client = WellArchitected(get_global_provider()) diff --git a/prowler/providers/aws/services/wellarchitected/wellarchitected_service.py b/prowler/providers/aws/services/wellarchitected/wellarchitected_service.py index 1bf8f50ba8..c4ad76950a 100644 --- a/prowler/providers/aws/services/wellarchitected/wellarchitected_service.py +++ b/prowler/providers/aws/services/wellarchitected/wellarchitected_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################################ WellArchitected class WellArchitected(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.workloads = [] self.__threading_call__(self.__list_workloads__) self.__list_tags_for_resource__() diff --git a/prowler/providers/aws/services/workspaces/workspaces_client.py b/prowler/providers/aws/services/workspaces/workspaces_client.py index 9fea90781b..0a8a281293 100644 --- a/prowler/providers/aws/services/workspaces/workspaces_client.py +++ b/prowler/providers/aws/services/workspaces/workspaces_client.py @@ -1,4 +1,4 @@ -from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info from prowler.providers.aws.services.workspaces.workspaces_service import WorkSpaces +from prowler.providers.common.common import get_global_provider -workspaces_client = WorkSpaces(current_audit_info) +workspaces_client = WorkSpaces(get_global_provider()) diff --git a/prowler/providers/aws/services/workspaces/workspaces_service.py b/prowler/providers/aws/services/workspaces/workspaces_service.py index 7637cc5101..9d76ce8a5a 100644 --- a/prowler/providers/aws/services/workspaces/workspaces_service.py +++ b/prowler/providers/aws/services/workspaces/workspaces_service.py @@ -9,9 +9,9 @@ from prowler.providers.aws.lib.service.service import AWSService ################################ WorkSpaces class WorkSpaces(AWSService): - def __init__(self, audit_info): + def __init__(self, provider): # Call AWSService's __init__ - super().__init__(__class__.__name__, audit_info) + super().__init__(__class__.__name__, provider) self.workspaces = [] self.__threading_call__(self.__describe_workspaces__) self.__describe_tags__() diff --git a/prowler/providers/azure/azure_provider_new.py b/prowler/providers/azure/azure_provider_new.py index 0b45fe7567..d52e24f749 100644 --- a/prowler/providers/azure/azure_provider_new.py +++ b/prowler/providers/azure/azure_provider_new.py @@ -10,7 +10,7 @@ from pydantic import BaseModel from prowler.lib.logger import logger from prowler.providers.azure.lib.regions.regions import get_regions_config -from prowler.providers.common.provider import CloudProvider +from prowler.providers.common.provider import Provider class AzureIdentityInfo(BaseModel): @@ -28,7 +28,7 @@ class AzureRegionConfig(BaseModel): credential_scopes: list = [] -class AzureProvider(CloudProvider): +class AzureProvider(Provider): session: DefaultAzureCredential identity: AzureIdentityInfo audit_resources: Optional[Any] @@ -37,7 +37,7 @@ class AzureProvider(CloudProvider): region_config: AzureRegionConfig def __init__(self, arguments): - logger.info("Setting Azure session ...") + logger.info("Setting Azure provider ...") subscription_ids = arguments.subscription_ids logger.info("Checking if any credentials mode is set ...") diff --git a/prowler/providers/azure/services/defender/defender_client.py b/prowler/providers/azure/services/defender/defender_client.py index 9135315775..0552505183 100644 --- a/prowler/providers/azure/services/defender/defender_client.py +++ b/prowler/providers/azure/services/defender/defender_client.py @@ -1,4 +1,4 @@ from prowler.providers.azure.services.defender.defender_service import Defender -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider -defender_client = Defender(global_provider) +defender_client = Defender(get_global_provider()) diff --git a/prowler/providers/azure/services/defender/defender_service.py b/prowler/providers/azure/services/defender/defender_service.py index d234c8c06a..0609ec599d 100644 --- a/prowler/providers/azure/services/defender/defender_service.py +++ b/prowler/providers/azure/services/defender/defender_service.py @@ -9,8 +9,8 @@ from prowler.providers.azure.lib.service.service import AzureService ########################## Defender class Defender(AzureService): - def __init__(self, audit_info): - super().__init__(SecurityCenter, audit_info) + def __init__(self, provider): + super().__init__(SecurityCenter, provider) self.pricings = self.__get_pricings__() diff --git a/prowler/providers/azure/services/iam/iam_client.py b/prowler/providers/azure/services/iam/iam_client.py index 1ef0fd36b7..778ffb5cb9 100644 --- a/prowler/providers/azure/services/iam/iam_client.py +++ b/prowler/providers/azure/services/iam/iam_client.py @@ -1,4 +1,4 @@ from prowler.providers.azure.services.iam.iam_service import IAM -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider -iam_client = IAM(global_provider) +iam_client = IAM(get_global_provider()) diff --git a/prowler/providers/azure/services/iam/iam_service.py b/prowler/providers/azure/services/iam/iam_service.py index b282222135..abae5e9970 100644 --- a/prowler/providers/azure/services/iam/iam_service.py +++ b/prowler/providers/azure/services/iam/iam_service.py @@ -9,8 +9,8 @@ from prowler.providers.azure.lib.service.service import AzureService ########################## IAM class IAM(AzureService): - def __init__(self, audit_info): - super().__init__(AuthorizationManagementClient, audit_info) + def __init__(self, provider): + super().__init__(AuthorizationManagementClient, provider) self.roles = self.__get_roles__() def __get_roles__(self): diff --git a/prowler/providers/azure/services/sqlserver/sqlserver_client.py b/prowler/providers/azure/services/sqlserver/sqlserver_client.py index 9132cf97ab..c3c99e8172 100644 --- a/prowler/providers/azure/services/sqlserver/sqlserver_client.py +++ b/prowler/providers/azure/services/sqlserver/sqlserver_client.py @@ -1,4 +1,4 @@ from prowler.providers.azure.services.sqlserver.sqlserver_service import SQLServer -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider -sqlserver_client = SQLServer(global_provider) +sqlserver_client = SQLServer(get_global_provider()) diff --git a/prowler/providers/azure/services/sqlserver/sqlserver_service.py b/prowler/providers/azure/services/sqlserver/sqlserver_service.py index 165593cbef..99ef440462 100644 --- a/prowler/providers/azure/services/sqlserver/sqlserver_service.py +++ b/prowler/providers/azure/services/sqlserver/sqlserver_service.py @@ -13,8 +13,8 @@ from prowler.providers.azure.lib.service.service import AzureService ########################## SQLServer class SQLServer(AzureService): - def __init__(self, audit_info): - super().__init__(SqlManagementClient, audit_info) + def __init__(self, provider): + super().__init__(SqlManagementClient, provider) self.sql_servers = self.__get_sql_servers__() def __get_sql_servers__(self): diff --git a/prowler/providers/azure/services/storage/storage_client.py b/prowler/providers/azure/services/storage/storage_client.py index 58651e2cb3..36a48f98ac 100644 --- a/prowler/providers/azure/services/storage/storage_client.py +++ b/prowler/providers/azure/services/storage/storage_client.py @@ -1,4 +1,4 @@ from prowler.providers.azure.services.storage.storage_service import Storage -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider -storage_client = Storage(global_provider) +storage_client = Storage(get_global_provider()) diff --git a/prowler/providers/azure/services/storage/storage_service.py b/prowler/providers/azure/services/storage/storage_service.py index b74d988de8..ffaf857748 100644 --- a/prowler/providers/azure/services/storage/storage_service.py +++ b/prowler/providers/azure/services/storage/storage_service.py @@ -9,8 +9,8 @@ from prowler.providers.azure.lib.service.service import AzureService ########################## Storage class Storage(AzureService): - def __init__(self, audit_info): - super().__init__(StorageManagementClient, audit_info) + def __init__(self, provider): + super().__init__(StorageManagementClient, provider) self.storage_accounts = self.__get_storage_accounts__() def __get_storage_accounts__(self): diff --git a/prowler/providers/common/audit_info.py b/prowler/providers/common/audit_info.py index 5ba28ab6af..52c31f97c9 100644 --- a/prowler/providers/common/audit_info.py +++ b/prowler/providers/common/audit_info.py @@ -13,11 +13,8 @@ from prowler.providers.aws.aws_provider import ( ) from prowler.providers.aws.lib.arn.arn import parse_iam_credentials_arn from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info -from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info, AWS_Credentials -from prowler.providers.aws.lib.credentials.credentials import ( - print_aws_credentials, - validate_aws_credentials, -) +from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info, AWSCredentials +from prowler.providers.aws.lib.credentials.credentials import validate_AWSCredentials from prowler.providers.aws.lib.organizations.organizations import ( get_organizations_metadata, ) @@ -146,7 +143,7 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE current_audit_info.original_session = aws_provider.aws_session logger.info("Validating credentials ...") # Verificate if we have valid credentials - caller_identity = validate_aws_credentials( + caller_identity = validate_AWSCredentials( current_audit_info.original_session, input_regions, sts_endpoint_region ) @@ -194,7 +191,7 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE ) logger.info("Role assumed") # Set the info needed to create a session with an assumed role - current_audit_info.credentials = AWS_Credentials( + current_audit_info.credentials = AWSCredentials( aws_access_key_id=assumed_role_response["Credentials"][ "AccessKeyId" ], @@ -261,9 +258,6 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE else: current_audit_info.profile_region = "us-east-1" - if not arguments.get("only_logs"): - print_aws_credentials(current_audit_info) - # Parse Scan Tags if arguments.get("resource_tags"): input_resource_tags = arguments.get("resource_tags") @@ -341,11 +335,6 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE credential_scopes=region_config["credential_scopes"], ) - # TODO - remove it - # this logic is being processed in general provider - # if not arguments.get("only_logs"): - # self.print_azure_credentials(azure_audit_info) - return azure_audit_info def set_gcp_audit_info(self, arguments) -> GCP_Audit_Info: @@ -369,11 +358,6 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE gcp_audit_info.project_ids, ) = gcp_provider.get_credentials() - # TODO - remove it - # this logic is being processed in general provider - # if not arguments.get("only_logs"): - # self.print_gcp_credentials(gcp_audit_info) - return gcp_audit_info def set_kubernetes_audit_info(self, arguments) -> Kubernetes_Audit_Info: diff --git a/prowler/providers/common/common.py b/prowler/providers/common/common.py index d553de05d0..6abcee09c8 100644 --- a/prowler/providers/common/common.py +++ b/prowler/providers/common/common.py @@ -3,10 +3,13 @@ from typing import Any providers_prowler_lib_path = "prowler.providers" -# SHARED PROVIDER OBJECT ACROSS ALL PROWLER CODE global_provider = None +def get_global_provider(): + return global_provider + + def set_provider(provider, arguments) -> Any: provider_class_name = f"{provider.capitalize()}Provider" import_module_path = f"prowler.providers.{provider}.azure_provider_testing" diff --git a/prowler/providers/common/provider.py b/prowler/providers/common/provider.py index fd6eec8c4f..80924db7b9 100644 --- a/prowler/providers/common/provider.py +++ b/prowler/providers/common/provider.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod -class CloudProvider(ABC): +class Provider(ABC): @abstractmethod def setup_session(self): pass diff --git a/prowler/providers/gcp/gcp_provider_new.py b/prowler/providers/gcp/gcp_provider_new.py index 9cbd38ae25..d1031b5c13 100644 --- a/prowler/providers/gcp/gcp_provider_new.py +++ b/prowler/providers/gcp/gcp_provider_new.py @@ -8,10 +8,10 @@ from google.oauth2.credentials import Credentials from googleapiclient import discovery from prowler.lib.logger import logger -from prowler.providers.common.provider import CloudProvider +from prowler.providers.common.provider import Provider -class GcpProvider(CloudProvider): +class GcpProvider(Provider): session: Credentials default_project_id: str project_ids: list diff --git a/prowler/providers/gcp/services/apikeys/apikeys_client.py b/prowler/providers/gcp/services/apikeys/apikeys_client.py index 92de567c3a..1ff3ace813 100644 --- a/prowler/providers/gcp/services/apikeys/apikeys_client.py +++ b/prowler/providers/gcp/services/apikeys/apikeys_client.py @@ -1,4 +1,4 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.apikeys.apikeys_service import APIKeys -apikeys_client = APIKeys(global_provider) +apikeys_client = APIKeys(get_global_provider()) diff --git a/prowler/providers/gcp/services/apikeys/apikeys_service.py b/prowler/providers/gcp/services/apikeys/apikeys_service.py index 5f70a1157d..e152d337cd 100644 --- a/prowler/providers/gcp/services/apikeys/apikeys_service.py +++ b/prowler/providers/gcp/services/apikeys/apikeys_service.py @@ -6,8 +6,8 @@ from prowler.providers.gcp.lib.service.service import GCPService ################## API Keys class APIKeys(GCPService): - def __init__(self, audit_info): - super().__init__(__class__.__name__, audit_info, api_version="v2") + def __init__(self, provider): + super().__init__(__class__.__name__, provider, api_version="v2") self.keys = [] self.__get_keys__() diff --git a/prowler/providers/gcp/services/bigquery/bigquery_client.py b/prowler/providers/gcp/services/bigquery/bigquery_client.py index ea788aba97..24dd292502 100644 --- a/prowler/providers/gcp/services/bigquery/bigquery_client.py +++ b/prowler/providers/gcp/services/bigquery/bigquery_client.py @@ -1,4 +1,4 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.bigquery.bigquery_service import BigQuery -bigquery_client = BigQuery(global_provider) +bigquery_client = BigQuery(get_global_provider()) diff --git a/prowler/providers/gcp/services/bigquery/bigquery_service.py b/prowler/providers/gcp/services/bigquery/bigquery_service.py index b0cb35dbba..ee7bbba625 100644 --- a/prowler/providers/gcp/services/bigquery/bigquery_service.py +++ b/prowler/providers/gcp/services/bigquery/bigquery_service.py @@ -6,8 +6,8 @@ from prowler.providers.gcp.lib.service.service import GCPService ################## BigQuery class BigQuery(GCPService): - def __init__(self, audit_info): - super().__init__(__class__.__name__, audit_info, api_version="v2") + def __init__(self, provider): + super().__init__(__class__.__name__, provider, api_version="v2") self.datasets = [] self.tables = [] diff --git a/prowler/providers/gcp/services/cloudresourcemanager/cloudresourcemanager_client.py b/prowler/providers/gcp/services/cloudresourcemanager/cloudresourcemanager_client.py index a9aa26b509..b5d43a960d 100644 --- a/prowler/providers/gcp/services/cloudresourcemanager/cloudresourcemanager_client.py +++ b/prowler/providers/gcp/services/cloudresourcemanager/cloudresourcemanager_client.py @@ -1,6 +1,6 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.cloudresourcemanager.cloudresourcemanager_service import ( CloudResourceManager, ) -cloudresourcemanager_client = CloudResourceManager(global_provider) +cloudresourcemanager_client = CloudResourceManager(get_global_provider()) diff --git a/prowler/providers/gcp/services/cloudresourcemanager/cloudresourcemanager_service.py b/prowler/providers/gcp/services/cloudresourcemanager/cloudresourcemanager_service.py index 55b6cf1f1e..690cbe404e 100644 --- a/prowler/providers/gcp/services/cloudresourcemanager/cloudresourcemanager_service.py +++ b/prowler/providers/gcp/services/cloudresourcemanager/cloudresourcemanager_service.py @@ -6,8 +6,8 @@ from prowler.providers.gcp.lib.service.service import GCPService ################## CloudResourceManager class CloudResourceManager(GCPService): - def __init__(self, audit_info): - super().__init__(__class__.__name__, audit_info) + def __init__(self, provider): + super().__init__(__class__.__name__, provider) self.bindings = [] self.projects = [] diff --git a/prowler/providers/gcp/services/cloudsql/cloudsql_client.py b/prowler/providers/gcp/services/cloudsql/cloudsql_client.py index 96169972f7..3411439611 100644 --- a/prowler/providers/gcp/services/cloudsql/cloudsql_client.py +++ b/prowler/providers/gcp/services/cloudsql/cloudsql_client.py @@ -1,4 +1,4 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.cloudsql.cloudsql_service import CloudSQL -cloudsql_client = CloudSQL(global_provider) +cloudsql_client = CloudSQL(get_global_provider()) diff --git a/prowler/providers/gcp/services/cloudsql/cloudsql_service.py b/prowler/providers/gcp/services/cloudsql/cloudsql_service.py index d7528d1ec7..7b66cf4da1 100644 --- a/prowler/providers/gcp/services/cloudsql/cloudsql_service.py +++ b/prowler/providers/gcp/services/cloudsql/cloudsql_service.py @@ -6,8 +6,8 @@ from prowler.providers.gcp.lib.service.service import GCPService ################## CloudSQL class CloudSQL(GCPService): - def __init__(self, audit_info): - super().__init__("sqladmin", audit_info) + def __init__(self, provider): + super().__init__("sqladmin", provider) self.instances = [] self.__get_instances__() diff --git a/prowler/providers/gcp/services/cloudstorage/cloudstorage_client.py b/prowler/providers/gcp/services/cloudstorage/cloudstorage_client.py index 7cfa837dc9..8321d704ef 100644 --- a/prowler/providers/gcp/services/cloudstorage/cloudstorage_client.py +++ b/prowler/providers/gcp/services/cloudstorage/cloudstorage_client.py @@ -1,6 +1,6 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.cloudstorage.cloudstorage_service import ( CloudStorage, ) -cloudstorage_client = CloudStorage(global_provider) +cloudstorage_client = CloudStorage(get_global_provider()) diff --git a/prowler/providers/gcp/services/cloudstorage/cloudstorage_service.py b/prowler/providers/gcp/services/cloudstorage/cloudstorage_service.py index 6775633e55..9e77541bf3 100644 --- a/prowler/providers/gcp/services/cloudstorage/cloudstorage_service.py +++ b/prowler/providers/gcp/services/cloudstorage/cloudstorage_service.py @@ -8,8 +8,8 @@ from prowler.providers.gcp.lib.service.service import GCPService ################## CloudStorage class CloudStorage(GCPService): - def __init__(self, audit_info): - super().__init__("storage", audit_info) + def __init__(self, provider): + super().__init__("storage", provider) self.buckets = [] self.__get_buckets__() diff --git a/prowler/providers/gcp/services/compute/compute_client.py b/prowler/providers/gcp/services/compute/compute_client.py index 3fc985d1de..7b15c08219 100644 --- a/prowler/providers/gcp/services/compute/compute_client.py +++ b/prowler/providers/gcp/services/compute/compute_client.py @@ -1,4 +1,4 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.compute.compute_service import Compute -compute_client = Compute(global_provider) +compute_client = Compute(get_global_provider()) diff --git a/prowler/providers/gcp/services/compute/compute_service.py b/prowler/providers/gcp/services/compute/compute_service.py index 88e927c4c0..eeb3a0945b 100644 --- a/prowler/providers/gcp/services/compute/compute_service.py +++ b/prowler/providers/gcp/services/compute/compute_service.py @@ -6,8 +6,8 @@ from prowler.providers.gcp.lib.service.service import GCPService ################## Compute class Compute(GCPService): - def __init__(self, audit_info): - super().__init__(__class__.__name__, audit_info) + def __init__(self, provider): + super().__init__(__class__.__name__, provider) self.regions = set() self.zones = set() self.instances = [] diff --git a/prowler/providers/gcp/services/dataproc/dataproc_client.py b/prowler/providers/gcp/services/dataproc/dataproc_client.py index 848314f533..dbc9d089f0 100644 --- a/prowler/providers/gcp/services/dataproc/dataproc_client.py +++ b/prowler/providers/gcp/services/dataproc/dataproc_client.py @@ -1,4 +1,4 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.dataproc.dataproc_service import Dataproc -dataproc_client = Dataproc(global_provider) +dataproc_client = Dataproc(get_global_provider()) diff --git a/prowler/providers/gcp/services/dataproc/dataproc_service.py b/prowler/providers/gcp/services/dataproc/dataproc_service.py index 507f870e94..5eff989a39 100644 --- a/prowler/providers/gcp/services/dataproc/dataproc_service.py +++ b/prowler/providers/gcp/services/dataproc/dataproc_service.py @@ -7,8 +7,8 @@ from prowler.providers.gcp.services.compute.compute_client import compute_client ################## Dataproc class Dataproc(GCPService): - def __init__(self, audit_info): - super().__init__(__class__.__name__, audit_info) + def __init__(self, provider): + super().__init__(__class__.__name__, provider) self.regions = compute_client.regions self.clusters = [] self.__threading_call__(self.__get_clusters__, self.regions) diff --git a/prowler/providers/gcp/services/dns/dns_client.py b/prowler/providers/gcp/services/dns/dns_client.py index 6d290ed84c..62fabff427 100644 --- a/prowler/providers/gcp/services/dns/dns_client.py +++ b/prowler/providers/gcp/services/dns/dns_client.py @@ -1,4 +1,4 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.dns.dns_service import DNS -dns_client = DNS(global_provider) +dns_client = DNS(get_global_provider()) diff --git a/prowler/providers/gcp/services/dns/dns_service.py b/prowler/providers/gcp/services/dns/dns_service.py index 7c58f95532..7082220a37 100644 --- a/prowler/providers/gcp/services/dns/dns_service.py +++ b/prowler/providers/gcp/services/dns/dns_service.py @@ -6,8 +6,8 @@ from prowler.providers.gcp.lib.service.service import GCPService ################## DNS class DNS(GCPService): - def __init__(self, audit_info): - super().__init__(__class__.__name__, audit_info) + def __init__(self, provider): + super().__init__(__class__.__name__, provider) self.managed_zones = [] self.__get_managed_zones__() self.policies = [] diff --git a/prowler/providers/gcp/services/iam/accessapproval_client.py b/prowler/providers/gcp/services/iam/accessapproval_client.py index 8e4c0944cf..4eaf884069 100644 --- a/prowler/providers/gcp/services/iam/accessapproval_client.py +++ b/prowler/providers/gcp/services/iam/accessapproval_client.py @@ -1,4 +1,4 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.iam.iam_service import AccessApproval -accessapproval_client = AccessApproval(global_provider) +accessapproval_client = AccessApproval(get_global_provider()) diff --git a/prowler/providers/gcp/services/iam/essentialcontacts_client.py b/prowler/providers/gcp/services/iam/essentialcontacts_client.py index 23add3e86c..47cae383c0 100644 --- a/prowler/providers/gcp/services/iam/essentialcontacts_client.py +++ b/prowler/providers/gcp/services/iam/essentialcontacts_client.py @@ -1,4 +1,4 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.iam.iam_service import EssentialContacts -essentialcontacts_client = EssentialContacts(global_provider) +essentialcontacts_client = EssentialContacts(get_global_provider()) diff --git a/prowler/providers/gcp/services/iam/iam_client.py b/prowler/providers/gcp/services/iam/iam_client.py index fd36d5bade..a9367d70b7 100644 --- a/prowler/providers/gcp/services/iam/iam_client.py +++ b/prowler/providers/gcp/services/iam/iam_client.py @@ -1,4 +1,4 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.iam.iam_service import IAM -iam_client = IAM(global_provider) +iam_client = IAM(get_global_provider()) diff --git a/prowler/providers/gcp/services/iam/iam_service.py b/prowler/providers/gcp/services/iam/iam_service.py index d9690ff396..89d170c7a9 100644 --- a/prowler/providers/gcp/services/iam/iam_service.py +++ b/prowler/providers/gcp/services/iam/iam_service.py @@ -11,8 +11,8 @@ from prowler.providers.gcp.services.cloudresourcemanager.cloudresourcemanager_cl ################## IAM class IAM(GCPService): - def __init__(self, audit_info): - super().__init__(__class__.__name__, audit_info) + def __init__(self, provider): + super().__init__(__class__.__name__, provider) self.service_accounts = [] self.__get_service_accounts__() self.__get_service_accounts_keys__() @@ -103,8 +103,8 @@ class ServiceAccount(BaseModel): ################## AccessApproval class AccessApproval(GCPService): - def __init__(self, audit_info): - super().__init__(__class__.__name__, audit_info) + def __init__(self, provider): + super().__init__(__class__.__name__, provider) self.settings = {} self.__get_settings__() @@ -135,8 +135,8 @@ class Setting(BaseModel): ################## EssentialContacts class EssentialContacts(GCPService): - def __init__(self, audit_info): - super().__init__(__class__.__name__, audit_info) + def __init__(self, provider): + super().__init__(__class__.__name__, provider) self.organizations = [] self.__get_contacts__() diff --git a/prowler/providers/gcp/services/kms/kms_client.py b/prowler/providers/gcp/services/kms/kms_client.py index 0bc861cbdb..b5eb4573cb 100644 --- a/prowler/providers/gcp/services/kms/kms_client.py +++ b/prowler/providers/gcp/services/kms/kms_client.py @@ -1,4 +1,4 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.kms.kms_service import KMS -kms_client = KMS(global_provider) +kms_client = KMS(get_global_provider()) diff --git a/prowler/providers/gcp/services/kms/kms_service.py b/prowler/providers/gcp/services/kms/kms_service.py index f5c30ba3f2..f6f620c1bd 100644 --- a/prowler/providers/gcp/services/kms/kms_service.py +++ b/prowler/providers/gcp/services/kms/kms_service.py @@ -8,8 +8,8 @@ from prowler.providers.gcp.lib.service.service import GCPService ################## KMS class KMS(GCPService): - def __init__(self, audit_info): - super().__init__("cloudkms", audit_info) + def __init__(self, provider): + super().__init__("cloudkms", provider) self.locations = [] self.key_rings = [] self.crypto_keys = [] diff --git a/prowler/providers/gcp/services/logging/logging_client.py b/prowler/providers/gcp/services/logging/logging_client.py index 83dbdf34aa..3f031b5e97 100644 --- a/prowler/providers/gcp/services/logging/logging_client.py +++ b/prowler/providers/gcp/services/logging/logging_client.py @@ -1,4 +1,4 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.logging.logging_service import Logging -logging_client = Logging(global_provider) +logging_client = Logging(get_global_provider()) diff --git a/prowler/providers/gcp/services/logging/logging_service.py b/prowler/providers/gcp/services/logging/logging_service.py index 7936291111..bb93c3783a 100644 --- a/prowler/providers/gcp/services/logging/logging_service.py +++ b/prowler/providers/gcp/services/logging/logging_service.py @@ -6,8 +6,8 @@ from prowler.providers.gcp.lib.service.service import GCPService ################## Logging class Logging(GCPService): - def __init__(self, audit_info): - super().__init__(__class__.__name__, audit_info, api_version="v2") + def __init__(self, provider): + super().__init__(__class__.__name__, provider, api_version="v2") self.sinks = [] self.metrics = [] self.__get_sinks__() diff --git a/prowler/providers/gcp/services/monitoring/monitoring_client.py b/prowler/providers/gcp/services/monitoring/monitoring_client.py index d30491226a..3c25f9832f 100644 --- a/prowler/providers/gcp/services/monitoring/monitoring_client.py +++ b/prowler/providers/gcp/services/monitoring/monitoring_client.py @@ -1,4 +1,4 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.monitoring.monitoring_service import Monitoring -monitoring_client = Monitoring(global_provider) +monitoring_client = Monitoring(get_global_provider()) diff --git a/prowler/providers/gcp/services/monitoring/monitoring_service.py b/prowler/providers/gcp/services/monitoring/monitoring_service.py index 1ffb6ed7af..b605c90324 100644 --- a/prowler/providers/gcp/services/monitoring/monitoring_service.py +++ b/prowler/providers/gcp/services/monitoring/monitoring_service.py @@ -6,8 +6,8 @@ from prowler.providers.gcp.lib.service.service import GCPService ################## Monitoring class Monitoring(GCPService): - def __init__(self, audit_info): - super().__init__(__class__.__name__, audit_info, api_version="v3") + def __init__(self, provider): + super().__init__(__class__.__name__, provider, api_version="v3") self.alert_policies = [] self.__get_alert_policies__() diff --git a/prowler/providers/gcp/services/serviceusage/serviceusage_client.py b/prowler/providers/gcp/services/serviceusage/serviceusage_client.py index cf146dd336..36d5a19ccf 100644 --- a/prowler/providers/gcp/services/serviceusage/serviceusage_client.py +++ b/prowler/providers/gcp/services/serviceusage/serviceusage_client.py @@ -1,6 +1,6 @@ -from prowler.providers.common.common import global_provider +from prowler.providers.common.common import get_global_provider from prowler.providers.gcp.services.serviceusage.serviceusage_service import ( ServiceUsage, ) -serviceusage_client = ServiceUsage(global_provider) +serviceusage_client = ServiceUsage(get_global_provider()) diff --git a/prowler/providers/gcp/services/serviceusage/serviceusage_service.py b/prowler/providers/gcp/services/serviceusage/serviceusage_service.py index fb62219c08..d79ee8e0b8 100644 --- a/prowler/providers/gcp/services/serviceusage/serviceusage_service.py +++ b/prowler/providers/gcp/services/serviceusage/serviceusage_service.py @@ -6,8 +6,8 @@ from prowler.providers.gcp.lib.service.service import GCPService ################## ServiceUsage class ServiceUsage(GCPService): - def __init__(self, audit_info): - super().__init__(__class__.__name__, audit_info) + def __init__(self, provider): + super().__init__(__class__.__name__, provider) self.active_services = {} self.__get_active_services__() diff --git a/prowler/providers/kubernetes/kubernetes_provider_new.py b/prowler/providers/kubernetes/kubernetes_provider_new.py index 0ab765ff78..ea5552b24c 100644 --- a/prowler/providers/kubernetes/kubernetes_provider_new.py +++ b/prowler/providers/kubernetes/kubernetes_provider_new.py @@ -6,11 +6,11 @@ from colorama import Fore, Style from kubernetes import client, config from prowler.lib.logger import logger -from prowler.providers.common.provider import CloudProvider +from prowler.providers.common.provider import Provider -class KubernetesProvider(CloudProvider): - # TODO change class name from CloudProvider to Provider +class KubernetesProvider(Provider): + # TODO change class name from Provider to Provider api_client: Any context: dict audit_resources: Optional[Any]