Compare commits

...

33 Commits

Author SHA1 Message Date
github-actions
c36bd6d067 chore(release): 3.11.3 2023-11-16 12:54:03 +00:00
Sergio Garcia
3d563356e5 fix(json): check if profile is None (#3043) 2023-11-16 13:52:07 +01:00
Johnny Lu
9205ef30f8 fix(securityhub): findings not being imported or archived in non-aws partitions (#3040)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-11-16 11:27:28 +01:00
Sergio Garcia
19c2dccc6d chore(regions_update): Changes in regions for AWS services. (#3042)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-16 11:09:41 +01:00
Sergio Garcia
8f819048ed chore(release): update Prowler Version to 3.11.2 (#3037)
Co-authored-by: github-actions <noreply@github.com>
2023-11-15 09:07:57 +01:00
Sergio Garcia
3a3bb44f11 fix(GuardDuty): only execute checks if GuardDuty enabled (#3028) 2023-11-14 14:14:05 +01:00
Nacho Rivera
f8e713a544 feat(azure regions): support non default azure region (#3013)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-11-14 13:17:48 +01:00
Pepe Fagoaga
573f1eba56 fix(securityhub): Use enabled_regions instead of audited_regions (#3029) 2023-11-14 12:57:54 +01:00
simone ragonesi
a36be258d8 chore: modify latest version msg (#3036)
Signed-off-by: r3drun3 <simone.ragonesi@sighup.io>
2023-11-14 12:11:55 +01:00
Sergio Garcia
690ec057c3 fix(ec2_securitygroup_not_used): check if security group is associated (#3026) 2023-11-14 12:03:01 +01:00
dependabot[bot]
2681feb1f6 build(deps): bump azure-storage-blob from 12.18.3 to 12.19.0 (#3034)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 11:47:42 +01:00
Sergio Garcia
e662adb8c5 chore(regions_update): Changes in regions for AWS services. (#3035)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-14 11:47:24 +01:00
Sergio Garcia
c94bd96c93 chore(args): make compatible severity and services arguments (#3024) 2023-11-14 11:26:53 +01:00
dependabot[bot]
6d85433194 build(deps): bump alive-progress from 3.1.4 to 3.1.5 (#3033)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 09:41:32 +01:00
dependabot[bot]
7a6092a779 build(deps): bump google-api-python-client from 2.106.0 to 2.107.0 (#3032)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 09:16:00 +01:00
dependabot[bot]
4c84529aed build(deps-dev): bump pytest-xdist from 3.3.1 to 3.4.0 (#3031)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 08:48:02 +01:00
Sergio Garcia
512d3e018f chore(accessanalyzer): include service in allowlist_non_default_regions (#3025) 2023-11-14 08:00:17 +01:00
dependabot[bot]
c6aff985c9 build(deps-dev): bump moto from 4.2.7 to 4.2.8 (#3030)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 07:54:34 +01:00
Sergio Garcia
7fadf31a2b chore(release): update Prowler Version to 3.11.1 (#3021)
Co-authored-by: github-actions <noreply@github.com>
2023-11-10 12:53:07 +01:00
Sergio Garcia
e7d098ed1e chore(regions_update): Changes in regions for AWS services. (#3020)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-10 11:34:44 +01:00
Sergio Garcia
21fba27355 fix(iam): do not list tags for inline policies (#3014) 2023-11-10 09:51:19 +01:00
John Mastron
74e37307f7 fix(SQS): fix invalid SQS ARNs (#3016)
Co-authored-by: John Mastron <jmastron@jpl.nasa.gov>
2023-11-10 09:33:18 +01:00
Sergio Garcia
d9d7c009a5 fix(rds): check if engines exist in region (#3012) 2023-11-10 09:20:36 +01:00
Pepe Fagoaga
2220cf9733 refactor(allowlist): Simplify and handle corner cases (#3019) 2023-11-10 09:11:52 +01:00
Pepe Fagoaga
3325b72b86 fix(iam-sqs): Handle exceptions for non-existent resources (#3010) 2023-11-08 14:06:45 +01:00
Sergio Garcia
9182d56246 chore(regions_update): Changes in regions for AWS services. (#3011)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-08 10:42:23 +01:00
Nacho Rivera
299ece19a8 fix(clean local output dirs): clean dirs when output to s3 (#2997) 2023-11-08 10:05:24 +01:00
Sergio Garcia
0a0732d7c0 docs(gcp): update GCP permissions (#3008) 2023-11-07 14:06:22 +01:00
Sergio Garcia
28011d97a9 chore(regions_update): Changes in regions for AWS services. (#3007)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-07 11:04:45 +01:00
Sergio Garcia
e71b0d1b6a chore(regions_update): Changes in regions for AWS services. (#3001)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-07 11:04:36 +01:00
John Mastron
ec01b62a82 fix(aws): check all conditions in IAM policy parser (#3006)
Co-authored-by: John Mastron <jmastron@jpl.nasa.gov>
Co-authored-by: Sergio Garcia <sergargar1@gmail.com>
2023-11-07 10:40:34 +01:00
dependabot[bot]
12b45c6896 build(deps): bump google-api-python-client from 2.105.0 to 2.106.0 (#3005)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-07 09:45:51 +01:00
dependabot[bot]
51c60dd4ee build(deps): bump mkdocs-material from 9.4.7 to 9.4.8 (#3004)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-07 09:02:02 +01:00
53 changed files with 1589 additions and 480 deletions

View File

@@ -178,11 +178,7 @@ Prowler will follow the same credentials search as [Google authentication librar
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
- Viewer
- Security Reviewer
- Stackdriver Account Viewer
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.

View File

@@ -97,10 +97,6 @@ Prowler will follow the same credentials search as [Google authentication librar
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
- Viewer
- Security Reviewer
- Stackdriver Account Viewer
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.

View File

@@ -0,0 +1,16 @@
# Use non default Azure regions
Microsoft provides clouds for compliance with regional laws, which are available for your use.
By default, Prowler uses `AzureCloud` cloud which is the comercial one. (you can list all the available with `az cloud list --output table`).
At the time of writing this documentation the available Azure Clouds from different regions are the following:
- AzureCloud
- AzureChinaCloud
- AzureUSGovernment
- AzureGermanCloud
If you want to change the default one you must include the flag `--azure-region`, i.e.:
```console
prowler azure --az-cli-auth --azure-region AzureChinaCloud
```

View File

@@ -22,8 +22,4 @@ Prowler will follow the same credentials search as [Google authentication librar
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
- Viewer
- Security Reviewer
- Stackdriver Account Viewer
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.

View File

@@ -56,6 +56,7 @@ nav:
- Boto3 Configuration: tutorials/aws/boto3-configuration.md
- Azure:
- Authentication: tutorials/azure/authentication.md
- Non default clouds: tutorials/azure/use-non-default-cloud.md
- Subscriptions: tutorials/azure/subscriptions.md
- Google Cloud:
- Authentication: tutorials/gcp/authentication.md

89
poetry.lock generated
View File

@@ -11,15 +11,32 @@ files = [
{file = "about_time-4.2.1-py3-none-any.whl", hash = "sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341"},
]
[[package]]
name = "adal"
version = "1.2.7"
description = "Note: This library is already replaced by MSAL Python, available here: https://pypi.org/project/msal/ .ADAL Python remains available here as a legacy. The ADAL for Python library makes it easy for python application to authenticate to Azure Active Directory (AAD) in order to access AAD protected web resources."
optional = false
python-versions = "*"
files = [
{file = "adal-1.2.7-py2.py3-none-any.whl", hash = "sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d"},
{file = "adal-1.2.7.tar.gz", hash = "sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1"},
]
[package.dependencies]
cryptography = ">=1.1.0"
PyJWT = ">=1.0.0,<3"
python-dateutil = ">=2.1.0,<3"
requests = ">=2.0.0,<3"
[[package]]
name = "alive-progress"
version = "3.1.4"
version = "3.1.5"
description = "A new kind of Progress Bar, with real-time throughput, ETA, and very cool animations!"
optional = false
python-versions = ">=3.7, <4"
files = [
{file = "alive-progress-3.1.4.tar.gz", hash = "sha256:74a95d8d0d42bc99d3a3725dbd06ebb852245f1b64e301a7c375b92b22663f7b"},
{file = "alive_progress-3.1.4-py3-none-any.whl", hash = "sha256:c80ad87ce9c1054b01135a87fae69ecebbfc2107497ae87cbe6aec7e534903db"},
{file = "alive-progress-3.1.5.tar.gz", hash = "sha256:42e399a66c8150dc507602dff7b7953f105ef11faf97ddaa6d27b1cbf45c4c98"},
{file = "alive_progress-3.1.5-py3-none-any.whl", hash = "sha256:347220c1858e3abe137fa0746895668c04df09c5261a13dc03f05795e8a29be5"},
]
[package.dependencies]
@@ -212,13 +229,13 @@ msrest = ">=0.7.1"
[[package]]
name = "azure-storage-blob"
version = "12.18.3"
version = "12.19.0"
description = "Microsoft Azure Blob Storage Client Library for Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "azure-storage-blob-12.18.3.tar.gz", hash = "sha256:d8ced0deee3367fa3d4f3d1a03cd9edadf4440c0a371f503d623fa6c807554ee"},
{file = "azure_storage_blob-12.18.3-py3-none-any.whl", hash = "sha256:c278dde2ac41857a68d615c9f2b36d894ba877a7e84d62795603c7e79d0bb5e9"},
{file = "azure-storage-blob-12.19.0.tar.gz", hash = "sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897"},
{file = "azure_storage_blob-12.19.0-py3-none-any.whl", hash = "sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b"},
]
[package.dependencies]
@@ -902,13 +919,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"]
[[package]]
name = "google-api-python-client"
version = "2.105.0"
version = "2.107.0"
description = "Google API Client Library for Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "google-api-python-client-2.105.0.tar.gz", hash = "sha256:0a8b32cfc2d9b3c1868ae6faef7ee1ab9c89a6cec30be709ea9c97f9a3e5902d"},
{file = "google_api_python_client-2.105.0-py2.py3-none-any.whl", hash = "sha256:571ce7c41e53415e385aab5a955725f71780550683ffcb71596f5809677d40b7"},
{file = "google-api-python-client-2.107.0.tar.gz", hash = "sha256:ef6d4c1a17fe9ec0894fc6d4f61e751c4b859fb33f2ab5b881ceb0b80ba442ba"},
{file = "google_api_python_client-2.107.0-py2.py3-none-any.whl", hash = "sha256:51d7bf676f41a77b00b7b9c72ace0c1db3dd5a4dd392a13ae897cf4f571a3539"},
]
[package.dependencies]
@@ -1365,13 +1382,13 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp
[[package]]
name = "mkdocs-material"
version = "9.4.7"
version = "9.4.8"
description = "Documentation that simply works"
optional = true
python-versions = ">=3.8"
files = [
{file = "mkdocs_material-9.4.7-py3-none-any.whl", hash = "sha256:4d698d52bb6a6a3c452ab854481c4cdb68453a0420956a6aee2de55fe15fe610"},
{file = "mkdocs_material-9.4.7.tar.gz", hash = "sha256:e704e001c9ef17291e1d3462c202425217601653e18f68f85d28eff4690e662b"},
{file = "mkdocs_material-9.4.8-py3-none-any.whl", hash = "sha256:8b20f6851bddeef37dced903893cd176cf13a21a482e97705a103c45f06ce9b9"},
{file = "mkdocs_material-9.4.8.tar.gz", hash = "sha256:f0c101453e8bc12b040e8b64ca39a405d950d8402609b1378cc2b98976e74b5f"},
]
[package.dependencies]
@@ -1421,13 +1438,13 @@ test = ["pytest", "pytest-cov"]
[[package]]
name = "moto"
version = "4.2.7"
version = "4.2.8"
description = ""
optional = false
python-versions = ">=3.7"
files = [
{file = "moto-4.2.7-py2.py3-none-any.whl", hash = "sha256:3e0ef388900448485cd6eff18e9f7fcaa6cf4560b6fb536ba2e2e1278a5ecc59"},
{file = "moto-4.2.7.tar.gz", hash = "sha256:1298006aaa6996b886658eb194cac0e3a5679c9fcce6cb13e741ccc5a7247abb"},
{file = "moto-4.2.8-py2.py3-none-any.whl", hash = "sha256:e78b49ae8acee06a865e4963174bdf974dd66398fb3bb831a7428498506c0c56"},
{file = "moto-4.2.8.tar.gz", hash = "sha256:9b5a363f36f8c3fb36388764e7b8c01c615da2f2cba7da3e681680de14bfc769"},
]
[package.dependencies]
@@ -1442,29 +1459,29 @@ werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1"
xmltodict = "*"
[package.extras]
all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.5.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"]
apigatewayv2 = ["PyYAML (>=5.1)"]
appsync = ["graphql-core"]
awslambda = ["docker (>=3.0.0)"]
batch = ["docker (>=3.0.0)"]
cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"]
ds = ["sshpubkeys (>=3.1.0)"]
dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.1)"]
dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.1)"]
dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.2)"]
dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.2)"]
ebs = ["sshpubkeys (>=3.1.0)"]
ec2 = ["sshpubkeys (>=3.1.0)"]
efs = ["sshpubkeys (>=3.1.0)"]
eks = ["sshpubkeys (>=3.1.0)"]
glue = ["pyparsing (>=3.0.7)"]
iotdata = ["jsondiff (>=1.1.2)"]
proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "sshpubkeys (>=3.1.0)"]
proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "sshpubkeys (>=3.1.0)"]
route53resolver = ["sshpubkeys (>=3.1.0)"]
s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.4.1)"]
s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.4.1)"]
server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.4.2)"]
s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.4.2)"]
server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
ssm = ["PyYAML (>=5.1)"]
xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"]
@@ -1537,6 +1554,22 @@ requests-oauthlib = ">=0.5.0"
[package.extras]
async = ["aiodns", "aiohttp (>=3.0)"]
[[package]]
name = "msrestazure"
version = "0.6.4"
description = "AutoRest swagger generator Python client runtime. Azure-specific module."
optional = false
python-versions = "*"
files = [
{file = "msrestazure-0.6.4-py2.py3-none-any.whl", hash = "sha256:3de50f56147ef529b31e099a982496690468ecef33f0544cb0fa0cfe1e1de5b9"},
{file = "msrestazure-0.6.4.tar.gz", hash = "sha256:a06f0dabc9a6f5efe3b6add4bd8fb623aeadacf816b7a35b0f89107e0544d189"},
]
[package.dependencies]
adal = ">=0.6.0,<2.0.0"
msrest = ">=0.6.0,<2.0.0"
six = "*"
[[package]]
name = "mypy-extensions"
version = "1.0.0"
@@ -1988,13 +2021,13 @@ pytest = "*"
[[package]]
name = "pytest-xdist"
version = "3.3.1"
version = "3.4.0"
description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"},
{file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"},
{file = "pytest-xdist-3.4.0.tar.gz", hash = "sha256:3a94a931dd9e268e0b871a877d09fe2efb6175c2c23d60d56a6001359002b832"},
{file = "pytest_xdist-3.4.0-py3-none-any.whl", hash = "sha256:e513118bf787677a427e025606f55e95937565e06dfaac8d87f55301e57ae607"},
]
[package.dependencies]
@@ -2889,4 +2922,4 @@ docs = ["mkdocs", "mkdocs-material"]
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
content-hash = "6ed432b0310655c247da3b4f542b9410842fb46de838408b99b6e61fb367cf38"
content-hash = "594dc3dc4952b294042203c3338b6959fed04eb6eb181796a4ae8c27cde5bf32"

View File

@@ -47,6 +47,7 @@ from prowler.providers.common.audit_info import (
set_provider_audit_info,
set_provider_execution_parameters,
)
from prowler.providers.common.clean import clean_provider_local_output_directories
from prowler.providers.common.outputs import set_provider_output_options
from prowler.providers.common.quick_inventory import run_provider_quick_inventory
@@ -246,7 +247,10 @@ def prowler():
for region in security_hub_regions:
# Save the regions where AWS Security Hub is enabled
if verify_security_hub_integration_enabled_per_region(
region, audit_info.audit_session
audit_info.audited_partition,
region,
audit_info.audit_session,
audit_info.audited_account,
):
aws_security_enabled_regions.append(region)
@@ -301,6 +305,9 @@ def prowler():
if checks_folder:
remove_custom_checks_module(checks_folder, provider)
# clean local directories
clean_provider_local_output_directories(args)
# If there are failed findings exit code 3, except if -z is input
if not args.ignore_exit_code_3 and stats["total_fail"] > 0:
sys.exit(3)

View File

@@ -11,7 +11,7 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "3.11.0"
prowler_version = "3.11.3"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
html_logo_img = "https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png"
square_logo_img = "https://user-images.githubusercontent.com/38561120/235905862-9ece5bd7-9aa3-4e48-807a-3a9035eb8bfb.png"
@@ -70,7 +70,9 @@ def check_current_version():
if latest_version != prowler_version:
return f"{prowler_version_string} (latest is {latest_version}, upgrade for the latest features)"
else:
return f"{prowler_version_string} (it is the latest version, yay!)"
return (
f"{prowler_version_string} (You are running the latest version, yay!)"
)
except requests.RequestException:
return f"{prowler_version_string}"
except Exception:

View File

@@ -2,7 +2,7 @@
aws:
# AWS Global Configuration
# aws.allowlist_non_default_regions --> Set to True to allowlist failed findings in non-default regions for GuardDuty, SecurityHub, DRS and Config
# aws.allowlist_non_default_regions --> Set to True to allowlist failed findings in non-default regions for AccessAnalyzer, GuardDuty, SecurityHub, DRS and Config
allowlist_non_default_regions: False
# If you want to allowlist/mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w allowlist.yaml`:
# Allowlist:

View File

@@ -36,6 +36,10 @@ def load_checks_to_execute(
# Check check's severity
if bulk_checks_metadata[check].Severity in severities:
checks_to_execute.add(check)
if service_list:
checks_to_execute = (
recover_checks_from_service(service_list, provider) & checks_to_execute
)
# Handle if there are checks passed using -C/--checks-file
elif checks_file:

View File

@@ -220,7 +220,7 @@ Detailed documentation at https://docs.prowler.cloud
group.add_argument(
"-s", "--services", nargs="+", help="List of services to be executed."
)
group.add_argument(
common_checks_parser.add_argument(
"--severity",
nargs="+",
help="List of severities to be executed [informational, low, medium, high, critical]",

View File

@@ -338,8 +338,9 @@ def add_html_footer(output_filename, output_directory):
def get_aws_html_assessment_summary(audit_info):
try:
if isinstance(audit_info, AWS_Audit_Info):
if not audit_info.profile:
audit_info.profile = "ENV"
profile = (
audit_info.profile if audit_info.profile is not None else "default"
)
if isinstance(audit_info.audited_regions, list):
audited_regions = " ".join(audit_info.audited_regions)
elif not audit_info.audited_regions:
@@ -361,7 +362,7 @@ def get_aws_html_assessment_summary(audit_info):
</li>
<li class="list-group-item">
<b>AWS-CLI Profile:</b> """
+ audit_info.profile
+ profile
+ """
</li>
<li class="list-group-item">

View File

@@ -155,6 +155,7 @@ def fill_json_ocsf(audit_info, finding, output_options) -> Check_Output_JSON_OCS
aws_org_uid = ""
account = None
org = None
profile = audit_info.profile if audit_info.profile is not None else "default"
if (
hasattr(audit_info, "organizations_metadata")
and audit_info.organizations_metadata
@@ -249,9 +250,7 @@ def fill_json_ocsf(audit_info, finding, output_options) -> Check_Output_JSON_OCS
original_time=outputs_unix_timestamp(
output_options.unix_timestamp, timestamp
),
profiles=[audit_info.profile]
if hasattr(audit_info, "organizations_metadata")
else [],
profiles=[profile],
)
compliance = Compliance_OCSF(
status=generate_json_ocsf_status(finding.status),

View File

@@ -498,17 +498,6 @@
]
}
},
"appfabric": {
"regions": {
"aws": [
"ap-northeast-1",
"eu-west-1",
"us-east-1"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"appflow": {
"regions": {
"aws": [
@@ -674,10 +663,13 @@
"regions": {
"aws": [
"ap-northeast-1",
"ap-south-1",
"ap-southeast-1",
"ap-southeast-2",
"eu-central-1",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"us-east-1",
"us-east-2",
"us-west-2"
@@ -1013,6 +1005,17 @@
]
}
},
"aws-appfabric": {
"regions": {
"aws": [
"ap-northeast-1",
"eu-west-1",
"us-east-1"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"awshealthdashboard": {
"regions": {
"aws": [
@@ -2068,17 +2071,24 @@
"ap-east-1",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-south-2",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ca-central-1",
"eu-central-1",
"eu-central-2",
"eu-north-1",
"eu-south-1",
"eu-south-2",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"il-central-1",
"me-central-1",
"me-south-1",
"sa-east-1",
"us-east-1",
@@ -2299,15 +2309,22 @@
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-south-2",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ca-central-1",
"eu-central-1",
"eu-central-2",
"eu-north-1",
"eu-south-1",
"eu-south-2",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"il-central-1",
"me-central-1",
"me-south-1",
"sa-east-1",
"us-east-1",
@@ -2467,6 +2484,7 @@
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ca-central-1",
"eu-central-1",
"eu-central-2",
@@ -3642,15 +3660,19 @@
"emr-serverless": {
"regions": {
"aws": [
"af-south-1",
"ap-east-1",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ca-central-1",
"eu-central-1",
"eu-north-1",
"eu-south-1",
"eu-west-1",
"eu-west-2",
"eu-west-3",
@@ -4336,16 +4358,6 @@
"aws-us-gov": []
}
},
"gamesparks": {
"regions": {
"aws": [
"ap-northeast-1",
"us-east-1"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"glacier": {
"regions": {
"aws": [
@@ -5605,6 +5617,44 @@
]
}
},
"launch-wizard": {
"regions": {
"aws": [
"af-south-1",
"ap-east-1",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-south-2",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ca-central-1",
"eu-central-1",
"eu-north-1",
"eu-south-1",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"me-central-1",
"me-south-1",
"sa-east-1",
"us-east-1",
"us-east-2",
"us-west-1",
"us-west-2"
],
"aws-cn": [
"cn-north-1",
"cn-northwest-1"
],
"aws-us-gov": [
"us-gov-east-1",
"us-gov-west-1"
]
}
},
"launchwizard": {
"regions": {
"aws": [
@@ -6062,6 +6112,15 @@
]
}
},
"managedblockchain-query": {
"regions": {
"aws": [
"us-east-1"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"managedservices": {
"regions": {
"aws": [
@@ -6388,11 +6447,18 @@
"aws": [
"af-south-1",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-4",
"ca-central-1",
"eu-central-1",
"eu-north-1",
"eu-west-1",
"eu-west-3",
"sa-east-1",
"us-east-1",
"us-east-2",
"us-west-2"
@@ -6703,6 +6769,7 @@
"eu-west-1",
"eu-west-2",
"eu-west-3",
"il-central-1",
"me-central-1",
"me-south-1",
"sa-east-1",
@@ -7152,6 +7219,41 @@
"aws-us-gov": []
}
},
"pca-connector-ad": {
"regions": {
"aws": [
"af-south-1",
"ap-east-1",
"ap-northeast-1",
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-south-2",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ca-central-1",
"eu-central-1",
"eu-central-2",
"eu-north-1",
"eu-south-1",
"eu-south-2",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"me-central-1",
"me-south-1",
"sa-east-1",
"us-east-1",
"us-east-2",
"us-west-1",
"us-west-2"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"personalize": {
"regions": {
"aws": [
@@ -9928,6 +10030,7 @@
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-south-2",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-4",

View File

@@ -135,7 +135,12 @@ def allowlist_findings(
def is_allowlisted(
allowlist: dict, audited_account: str, check: str, region: str, resource: str, tags
allowlist: dict,
audited_account: str,
check: str,
finding_region: str,
finding_resource: str,
finding_tags,
):
try:
allowlisted_checks = {}
@@ -150,15 +155,15 @@ def is_allowlisted(
if "*" in allowlist["Accounts"]:
checks_multi_account = allowlist["Accounts"]["*"]["Checks"]
allowlisted_checks.update(checks_multi_account)
# Test if it is allowlisted
if is_allowlisted_in_check(
allowlisted_checks,
audited_account,
audited_account,
check,
region,
resource,
tags,
finding_region,
finding_resource,
finding_tags,
):
is_finding_allowlisted = True
@@ -171,23 +176,29 @@ def is_allowlisted(
def is_allowlisted_in_check(
allowlisted_checks, audited_account, account, check, region, resource, tags
allowlisted_checks,
audited_account,
check,
finding_region,
finding_resource,
finding_tags,
):
try:
# Default value is not allowlisted
is_check_allowlisted = False
for allowlisted_check, allowlisted_check_info in allowlisted_checks.items():
# map lambda to awslambda
allowlisted_check = re.sub("^lambda", "awslambda", allowlisted_check)
# extract the exceptions
# Check if the finding is excepted
exceptions = allowlisted_check_info.get("Exceptions")
# Check if there are exceptions
if is_excepted(
exceptions,
audited_account,
region,
resource,
tags,
finding_region,
finding_resource,
finding_tags,
):
# Break loop and return default value since is excepted
break
@@ -201,13 +212,27 @@ def is_allowlisted_in_check(
or check == allowlisted_check
or re.search(allowlisted_check, check)
):
if is_allowlisted_in_region(
allowlisted_regions,
allowlisted_resources,
allowlisted_tags,
region,
resource,
tags,
allowlisted_in_check = True
allowlisted_in_region = is_allowlisted_in_region(
allowlisted_regions, finding_region
)
allowlisted_in_resource = is_allowlisted_in_resource(
allowlisted_resources, finding_resource
)
allowlisted_in_tags = is_allowlisted_in_tags(
allowlisted_tags, finding_tags
)
# For a finding to be allowlisted requires the following set to True:
# - allowlisted_in_check -> True
# - allowlisted_in_region -> True
# - allowlisted_in_tags -> True or allowlisted_in_resource -> True
# - excepted -> False
if (
allowlisted_in_check
and allowlisted_in_region
and (allowlisted_in_tags or allowlisted_in_resource)
):
is_check_allowlisted = True
@@ -220,25 +245,11 @@ def is_allowlisted_in_check(
def is_allowlisted_in_region(
allowlist_regions, allowlist_resources, allowlisted_tags, region, resource, tags
allowlisted_regions,
finding_region,
):
try:
# By default is not allowlisted
is_region_allowlisted = False
# If there is a *, it affects to all regions
if "*" in allowlist_regions or region in allowlist_regions:
for elem in allowlist_resources:
if is_allowlisted_in_tags(
allowlisted_tags,
elem,
resource,
tags,
):
is_region_allowlisted = True
# if we find the element there is no point in continuing with the loop
break
return is_region_allowlisted
return __is_item_matched__(allowlisted_regions, finding_region)
except Exception as error:
logger.critical(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
@@ -246,25 +257,9 @@ def is_allowlisted_in_region(
sys.exit(1)
def is_allowlisted_in_tags(allowlisted_tags, elem, resource, tags):
def is_allowlisted_in_tags(allowlisted_tags, finding_tags):
try:
# By default is not allowlisted
is_tag_allowlisted = False
# Check if it is an *
if elem == "*":
elem = ".*"
# Check if there are allowlisted tags
if allowlisted_tags:
for allowlisted_tag in allowlisted_tags:
if re.search(allowlisted_tag, tags):
is_tag_allowlisted = True
break
else:
if re.search(elem, resource):
is_tag_allowlisted = True
return is_tag_allowlisted
return __is_item_matched__(allowlisted_tags, finding_tags)
except Exception as error:
logger.critical(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
@@ -272,7 +267,25 @@ def is_allowlisted_in_tags(allowlisted_tags, elem, resource, tags):
sys.exit(1)
def is_excepted(exceptions, audited_account, region, resource, tags):
def is_allowlisted_in_resource(allowlisted_resources, finding_resource):
try:
return __is_item_matched__(allowlisted_resources, finding_resource)
except Exception as error:
logger.critical(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
sys.exit(1)
def is_excepted(
exceptions,
audited_account,
finding_region,
finding_resource,
finding_tags,
):
"""is_excepted returns True if the account, region, resource and tags are excepted"""
try:
excepted = False
is_account_excepted = False
@@ -281,39 +294,50 @@ def is_excepted(exceptions, audited_account, region, resource, tags):
is_tag_excepted = False
if exceptions:
excepted_accounts = exceptions.get("Accounts", [])
is_account_excepted = __is_item_matched__(
excepted_accounts, audited_account
)
excepted_regions = exceptions.get("Regions", [])
is_region_excepted = __is_item_matched__(excepted_regions, finding_region)
excepted_resources = exceptions.get("Resources", [])
is_resource_excepted = __is_item_matched__(
excepted_resources, finding_resource
)
excepted_tags = exceptions.get("Tags", [])
if exceptions:
if audited_account in excepted_accounts:
is_account_excepted = True
if region in excepted_regions:
is_region_excepted = True
for excepted_resource in excepted_resources:
if re.search(excepted_resource, resource):
is_resource_excepted = True
for tag in excepted_tags:
if tag in tags:
is_tag_excepted = True
if (
(
(excepted_accounts and is_account_excepted)
or not excepted_accounts
)
and (
(excepted_regions and is_region_excepted)
or not excepted_regions
)
and (
(excepted_resources and is_resource_excepted)
or not excepted_resources
)
and ((excepted_tags and is_tag_excepted) or not excepted_tags)
):
excepted = True
is_tag_excepted = __is_item_matched__(excepted_tags, finding_tags)
if (
is_account_excepted
and is_region_excepted
and is_resource_excepted
and is_tag_excepted
):
excepted = True
return excepted
except Exception as error:
logger.critical(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
sys.exit(1)
def __is_item_matched__(matched_items, finding_items):
"""__is_item_matched__ return True if any of the matched_items are present in the finding_items, otherwise returns False."""
try:
is_item_matched = False
if matched_items and (finding_items or finding_items == ""):
for item in matched_items:
if item == "*":
item = ".*"
if re.search(item, finding_items):
is_item_matched = True
break
return is_item_matched
except Exception as error:
logger.critical(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
sys.exit(1)

View File

@@ -56,12 +56,15 @@ def is_account_only_allowed_in_condition(
):
# if there is an arn/account without the source account -> we do not consider it safe
# here by default we assume is true and look for false entries
is_condition_valid = True
is_condition_key_restrictive = True
for item in condition_statement[condition_operator][value]:
if source_account not in item:
is_condition_valid = False
is_condition_key_restrictive = False
break
if is_condition_key_restrictive:
is_condition_valid = True
# value is a string
elif isinstance(
condition_statement[condition_operator][value],

View File

@@ -14,9 +14,11 @@ def prepare_security_hub_findings(
findings: [], audit_info: AWS_Audit_Info, output_options, enabled_regions: []
) -> dict:
security_hub_findings_per_region = {}
# Create a key per region
for region in audit_info.audited_regions:
# Create a key per audited region
for region in enabled_regions:
security_hub_findings_per_region[region] = []
for finding in findings:
# We don't send the INFO findings to AWS Security Hub
if finding.status == "INFO":
@@ -47,8 +49,10 @@ def prepare_security_hub_findings(
def verify_security_hub_integration_enabled_per_region(
partition: str,
region: str,
session: session.Session,
aws_account_number: str,
) -> bool:
f"""verify_security_hub_integration_enabled returns True if the {SECURITY_HUB_INTEGRATION_NAME} is enabled for the given region. Otherwise returns false."""
prowler_integration_enabled = False
@@ -62,7 +66,8 @@ def verify_security_hub_integration_enabled_per_region(
security_hub_client.describe_hub()
# Check if Prowler integration is enabled in Security Hub
if "prowler/prowler" not in str(
security_hub_prowler_integration_arn = f"arn:{partition}:securityhub:{region}:{aws_account_number}:product-subscription/{SECURITY_HUB_INTEGRATION_NAME}"
if security_hub_prowler_integration_arn not in str(
security_hub_client.list_enabled_products_for_import()
):
logger.error(

View File

@@ -19,17 +19,23 @@ class accessanalyzer_enabled(Check):
f"IAM Access Analyzer {analyzer.name} is enabled."
)
elif analyzer.status == "NOT_AVAILABLE":
report.status = "FAIL"
report.status_extended = (
f"IAM Access Analyzer in account {analyzer.name} is not enabled."
)
else:
report.status = "FAIL"
report.status_extended = (
f"IAM Access Analyzer {analyzer.name} is not active."
)
if analyzer.status == "NOT_AVAILABLE":
report.status = "FAIL"
report.status_extended = f"IAM Access Analyzer in account {analyzer.name} is not enabled."
else:
report.status = "FAIL"
report.status_extended = (
f"IAM Access Analyzer {analyzer.name} is not active."
)
if (
accessanalyzer_client.audit_config.get(
"allowlist_non_default_regions", False
)
and not analyzer.region == accessanalyzer_client.region
):
report.status = "WARNING"
findings.append(report)

View File

@@ -18,10 +18,18 @@ class ec2_securitygroup_not_used(Check):
report.status = "PASS"
report.status_extended = f"Security group {security_group.name} ({security_group.id}) it is being used."
sg_in_lambda = False
sg_associated = False
for function in awslambda_client.functions.values():
if security_group.id in function.security_groups:
sg_in_lambda = True
if len(security_group.network_interfaces) == 0 and not sg_in_lambda:
for sg in ec2_client.security_groups:
if security_group.id in sg.associated_sgs:
sg_associated = True
if (
len(security_group.network_interfaces) == 0
and not sg_in_lambda
and not sg_associated
):
report.status = "FAIL"
report.status_extended = f"Security group {security_group.name} ({security_group.id}) it is not being used."

View File

@@ -117,6 +117,7 @@ class EC2(AWSService):
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
associated_sgs = []
# check if sg has public access to all ports
all_public_ports = False
for ingress_rule in sg["IpPermissions"]:
@@ -128,7 +129,10 @@ class EC2(AWSService):
in self.audited_checks
):
all_public_ports = True
break
# check associated security groups
for sg_group in ingress_rule.get("UserIdGroupPairs", []):
if sg_group.get("GroupId"):
associated_sgs.append(sg_group["GroupId"])
self.security_groups.append(
SecurityGroup(
name=sg["GroupName"],
@@ -138,6 +142,7 @@ class EC2(AWSService):
ingress_rules=sg["IpPermissions"],
egress_rules=sg["IpPermissionsEgress"],
public_ports=all_public_ports,
associated_sgs=associated_sgs,
vpc_id=sg["VpcId"],
tags=sg.get("Tags"),
)
@@ -464,6 +469,7 @@ class SecurityGroup(BaseModel):
id: str
vpc_id: str
public_ports: bool
associated_sgs: list
network_interfaces: list[str] = []
ingress_rules: list[dict]
egress_rules: list[dict]

View File

@@ -6,7 +6,7 @@ class guardduty_centrally_managed(Check):
def execute(self):
findings = []
for detector in guardduty_client.detectors:
if detector.id:
if detector.id and detector.enabled_in_account:
report = Check_Report_AWS(self.metadata())
report.region = detector.region
report.resource_id = detector.id

View File

@@ -6,7 +6,7 @@ class guardduty_no_high_severity_findings(Check):
def execute(self):
findings = []
for detector in guardduty_client.detectors:
if detector.id:
if detector.id and detector.enabled_in_account:
report = Check_Report_AWS(self.metadata())
report.region = detector.region
report.resource_id = detector.id

View File

@@ -139,7 +139,10 @@ class IAM(AWSService):
logger.warning(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -208,14 +211,24 @@ class IAM(AWSService):
reuse_prevention=reuse_prevention,
hard_expiry=hard_expiry,
)
except Exception as error:
if "NoSuchEntity" in str(error):
except ClientError as error:
if error.response["Error"]["Code"] == "NoSuchEntity":
# Password policy does not exist
stored_password_policy = None
logger.warning(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
finally:
return stored_password_policy
@@ -268,17 +281,22 @@ class IAM(AWSService):
logger.info("IAM - List Attached Group Policies...")
try:
for group in self.groups:
list_attached_group_policies_paginator = self.client.get_paginator(
"list_attached_group_policies"
)
attached_group_policies = []
for page in list_attached_group_policies_paginator.paginate(
GroupName=group.name
):
for attached_group_policy in page["AttachedPolicies"]:
attached_group_policies.append(attached_group_policy)
try:
list_attached_group_policies_paginator = self.client.get_paginator(
"list_attached_group_policies"
)
attached_group_policies = []
for page in list_attached_group_policies_paginator.paginate(
GroupName=group.name
):
for attached_group_policy in page["AttachedPolicies"]:
attached_group_policies.append(attached_group_policy)
group.attached_policies = attached_group_policies
group.attached_policies = attached_group_policies
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -337,18 +355,33 @@ class IAM(AWSService):
logger.info("IAM - List Attached User Policies...")
try:
for user in self.users:
attached_user_policies = []
get_user_attached_policies_paginator = self.client.get_paginator(
"list_attached_user_policies"
)
for page in get_user_attached_policies_paginator.paginate(
UserName=user.name
):
for policy in page["AttachedPolicies"]:
attached_user_policies.append(policy)
try:
attached_user_policies = []
get_user_attached_policies_paginator = self.client.get_paginator(
"list_attached_user_policies"
)
for page in get_user_attached_policies_paginator.paginate(
UserName=user.name
):
for policy in page["AttachedPolicies"]:
attached_user_policies.append(policy)
user.attached_policies = attached_user_policies
user.attached_policies = attached_user_policies
except ClientError as error:
if error.response["Error"]["Code"] == "NoSuchEntity":
logger.warning(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -371,10 +404,19 @@ class IAM(AWSService):
role.attached_policies = attached_role_policies
except ClientError as error:
if error.response["Error"]["Code"] == "NoSuchEntityException":
if error.response["Error"]["Code"] == "NoSuchEntity":
logger.warning(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
@@ -639,8 +681,16 @@ class IAM(AWSService):
response = self.client.list_role_tags(RoleName=role.name)["Tags"]
role.tags = response
except ClientError as error:
if error.response["Error"]["Code"] == "NoSuchEntityException":
if error.response["Error"]["Code"] == "NoSuchEntity":
role.tags = []
else:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
@@ -653,8 +703,12 @@ class IAM(AWSService):
response = self.client.list_user_tags(UserName=user.name)["Tags"]
user.tags = response
except ClientError as error:
if error.response["Error"]["Code"] == "NoSuchEntityException":
if error.response["Error"]["Code"] == "NoSuchEntity":
user.tags = []
else:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
@@ -664,13 +718,22 @@ class IAM(AWSService):
try:
for policy in self.policies:
try:
response = self.client.list_policy_tags(PolicyArn=policy.arn)[
"Tags"
]
policy.tags = response
if policy.type != "Inline":
response = self.client.list_policy_tags(PolicyArn=policy.arn)[
"Tags"
]
policy.tags = response
except ClientError as error:
if error.response["Error"]["Code"] == "NoSuchEntityException":
if error.response["Error"]["Code"] == "NoSuchEntity":
policy.tags = []
else:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
@@ -697,9 +760,19 @@ class IAM(AWSService):
]
except ClientError as error:
if error.response["Error"]["Code"] == "NoSuchEntity":
logger.warning(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -717,6 +790,15 @@ class IAM(AWSService):
"AccessKeyMetadata"
]
except ClientError as error:
if error.response["Error"]["Code"] == "NoSuchEntity":
logger.warning(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)

View File

@@ -13,9 +13,14 @@ class rds_instance_deprecated_engine_version(Check):
report.resource_arn = db_instance.arn
report.resource_tags = db_instance.tags
report.status_extended = f"RDS instance {db_instance.id} is using a deprecated engine {db_instance.engine} with version {db_instance.engine_version}."
if (
db_instance.engine_version
hasattr(
rds_client.db_engines.get(db_instance.region, {}).get(
db_instance.engine, {}
),
"engine_versions",
)
and db_instance.engine_version
in rds_client.db_engines[db_instance.region][
db_instance.engine
].engine_versions

View File

@@ -16,23 +16,30 @@ class SQS(AWSService):
super().__init__(__class__.__name__, audit_info)
self.queues = []
self.__threading_call__(self.__list_queues__)
self.__get_queue_attributes__(self.regional_clients)
self.__get_queue_attributes__()
self.__list_queue_tags__()
def __list_queues__(self, regional_client):
logger.info("SQS - describing queues...")
try:
list_queues_paginator = regional_client.get_paginator("list_queues")
for page in list_queues_paginator.paginate():
# The SQS API uses nonstandard pagination
# you must specify a PageSize if there are more than 1000 queues
for page in list_queues_paginator.paginate(
PaginationConfig={"PageSize": 1000}
):
if "QueueUrls" in page:
for queue in page["QueueUrls"]:
arn = f"arn:{self.audited_partition}:sqs:{regional_client.region}:{self.audited_account}:{queue}"
# the queue name is the last path segment of the url
queue_name = queue.split("/")[-1]
arn = f"arn:{self.audited_partition}:sqs:{regional_client.region}:{self.audited_account}:{queue_name}"
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
self.queues.append(
Queue(
arn=arn,
name=queue_name,
id=queue,
region=regional_client.region,
)
@@ -42,28 +49,46 @@ class SQS(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_queue_attributes__(self, regional_clients):
def __get_queue_attributes__(self):
try:
logger.info("SQS - describing queue attributes...")
for queue in self.queues:
regional_client = regional_clients[queue.region]
queue_attributes = regional_client.get_queue_attributes(
QueueUrl=queue.id, AttributeNames=["All"]
)
if "Attributes" in queue_attributes:
if "Policy" in queue_attributes["Attributes"]:
queue.policy = loads(queue_attributes["Attributes"]["Policy"])
if "KmsMasterKeyId" in queue_attributes["Attributes"]:
queue.kms_key_id = queue_attributes["Attributes"][
"KmsMasterKeyId"
]
if "SqsManagedSseEnabled" in queue_attributes["Attributes"]:
if (
queue_attributes["Attributes"]["SqsManagedSseEnabled"]
== "true"
):
queue.kms_key_id = "SqsManagedSseEnabled"
try:
regional_client = self.regional_clients[queue.region]
queue_attributes = regional_client.get_queue_attributes(
QueueUrl=queue.id, AttributeNames=["All"]
)
if "Attributes" in queue_attributes:
if "Policy" in queue_attributes["Attributes"]:
queue.policy = loads(
queue_attributes["Attributes"]["Policy"]
)
if "KmsMasterKeyId" in queue_attributes["Attributes"]:
queue.kms_key_id = queue_attributes["Attributes"][
"KmsMasterKeyId"
]
if "SqsManagedSseEnabled" in queue_attributes["Attributes"]:
if (
queue_attributes["Attributes"]["SqsManagedSseEnabled"]
== "true"
):
queue.kms_key_id = "SqsManagedSseEnabled"
except ClientError as error:
if (
error.response["Error"]["Code"]
== "AWS.SimpleQueueService.NonExistentQueue"
):
logger.warning(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
@@ -87,6 +112,14 @@ class SQS(AWSService):
logger.warning(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
@@ -96,6 +129,7 @@ class SQS(AWSService):
class Queue(BaseModel):
id: str
name: str
arn: str
region: str
policy: dict = None

View File

@@ -7,6 +7,7 @@ from msgraph.core import GraphClient
from prowler.lib.logger import logger
from prowler.providers.azure.lib.audit_info.models import Azure_Identity_Info
from prowler.providers.azure.lib.regions.regions import get_regions_config
class Azure_Provider:
@@ -18,12 +19,14 @@ class Azure_Provider:
managed_entity_auth: bool,
subscription_ids: list,
tenant_id: str,
region: str,
):
logger.info("Instantiating Azure Provider ...")
self.credentials = self.__set_credentials__(
self.region_config = self.__get_region_config__(region)
self.credentials = self.__get_credentials__(
az_cli_auth, sp_env_auth, browser_auth, managed_entity_auth, tenant_id
)
self.identity = self.__set_identity_info__(
self.identity = self.__get_identity_info__(
self.credentials,
az_cli_auth,
sp_env_auth,
@@ -32,7 +35,10 @@ class Azure_Provider:
subscription_ids,
)
def __set_credentials__(
def __get_region_config__(self, region):
return get_regions_config(region)
def __get_credentials__(
self, az_cli_auth, sp_env_auth, browser_auth, managed_entity_auth, tenant_id
):
# Browser auth creds cannot be set with DefaultAzureCredentials()
@@ -52,6 +58,8 @@ class Azure_Provider:
exclude_shared_token_cache_credential=True,
# Azure Auth using PowerShell is not supported
exclude_powershell_credential=True,
# set Authority of a Microsoft Entra endpoint
authority=self.region_config["authority"],
)
except Exception as error:
logger.critical("Failed to retrieve azure credentials")
@@ -61,7 +69,6 @@ class Azure_Provider:
sys.exit(1)
else:
try:
print(tenant_id)
credentials = InteractiveBrowserCredential(tenant_id=tenant_id)
except Exception as error:
logger.critical("Failed to retrieve azure credentials")
@@ -83,7 +90,7 @@ class Azure_Provider:
)
sys.exit(1)
def __set_identity_info__(
def __get_identity_info__(
self,
credentials,
az_cli_auth,
@@ -153,7 +160,11 @@ class Azure_Provider:
logger.info(
"Trying to subscriptions and tenant ids to populate identity structure ..."
)
subscriptions_client = SubscriptionClient(credential=credentials)
subscriptions_client = SubscriptionClient(
credential=credentials,
base_url=self.region_config["base_url"],
credential_scopes=self.region_config["credential_scopes"],
)
if not subscription_ids:
logger.info("Scanning all the Azure subscriptions...")
for subscription in subscriptions_client.subscriptions.list():
@@ -195,3 +206,6 @@ class Azure_Provider:
def get_identity(self):
return self.identity
def get_region_config(self):
return self.region_config

View File

@@ -1,3 +1,6 @@
from argparse import ArgumentTypeError
def init_parser(self):
"""Init the Azure Provider CLI parser"""
azure_parser = self.subparsers.add_parser(
@@ -40,3 +43,27 @@ def init_parser(self):
default=None,
help="Azure Tenant ID to be used with --browser-auth option",
)
# Regions
azure_regions_subparser = azure_parser.add_argument_group("Regions")
azure_regions_subparser.add_argument(
"--azure-region",
nargs="?",
default="AzureCloud",
type=validate_azure_region,
help="Azure region from `az cloud list --output table`, by default AzureCloud",
)
def validate_azure_region(region):
"""validate_azure_region validates if the region passed as argument is valid"""
regions_allowed = [
"AzureChinaCloud",
"AzureUSGovernment",
"AzureGermanCloud",
"AzureCloud",
]
if region not in regions_allowed:
raise ArgumentTypeError(
f"Region {region} not allowed, allowed regions are {' '.join(regions_allowed)}"
)
return region

View File

@@ -1,6 +1,7 @@
from prowler.providers.azure.lib.audit_info.models import (
Azure_Audit_Info,
Azure_Identity_Info,
Azure_Region_Config,
)
azure_audit_info = Azure_Audit_Info(
@@ -9,4 +10,5 @@ azure_audit_info = Azure_Audit_Info(
audit_resources=None,
audit_metadata=None,
audit_config=None,
azure_region_config=Azure_Region_Config(),
)

View File

@@ -13,6 +13,13 @@ class Azure_Identity_Info(BaseModel):
subscriptions: dict = {}
class Azure_Region_Config(BaseModel):
name: str = ""
authority: str = None
base_url: str = ""
credential_scopes: list = []
@dataclass
class Azure_Audit_Info:
credentials: DefaultAzureCredential
@@ -20,12 +27,20 @@ class Azure_Audit_Info:
audit_resources: Optional[Any]
audit_metadata: Optional[Any]
audit_config: dict
azure_region_config: Azure_Region_Config
def __init__(
self, credentials, identity, audit_metadata, audit_resources, audit_config
self,
credentials,
identity,
audit_metadata,
audit_resources,
audit_config,
azure_region_config,
):
self.credentials = credentials
self.identity = identity
self.audit_metadata = audit_metadata
self.audit_resources = audit_resources
self.audit_config = audit_config
self.azure_region_config = azure_region_config

View File

@@ -0,0 +1,38 @@
from azure.identity import AzureAuthorityHosts
from msrestazure.azure_cloud import (
AZURE_CHINA_CLOUD,
AZURE_GERMAN_CLOUD,
AZURE_US_GOV_CLOUD,
)
def get_regions_config(region):
allowed_regions = {
"AzureCloud": {
"authority": None,
"base_url": "https://management.azure.com",
"credential_scopes": ["https://management.azure.com/.default"],
},
"AzureChinaCloud": {
"authority": AzureAuthorityHosts.AZURE_CHINA,
"base_url": AZURE_CHINA_CLOUD.endpoints.resource_manager,
"credential_scopes": [
AZURE_CHINA_CLOUD.endpoints.resource_manager + "/.default"
],
},
"AzureUSGovernment": {
"authority": AzureAuthorityHosts.AZURE_GOVERNMENT,
"base_url": AZURE_US_GOV_CLOUD.endpoints.resource_manager,
"credential_scopes": [
AZURE_US_GOV_CLOUD.endpoints.resource_manager + "/.default"
],
},
"AzureGermanCloud": {
"authority": AzureAuthorityHosts.AZURE_GERMANY,
"base_url": AZURE_GERMAN_CLOUD.endpoints.resource_manager,
"credential_scopes": [
AZURE_GERMAN_CLOUD.endpoints.resource_manager + "/.default"
],
},
}
return allowed_regions[region]

View File

@@ -9,17 +9,27 @@ class AzureService:
audit_info: Azure_Audit_Info,
):
self.clients = self.__set_clients__(
audit_info.identity.subscriptions, audit_info.credentials, service
audit_info.identity.subscriptions,
audit_info.credentials,
service,
audit_info.azure_region_config,
)
self.subscriptions = audit_info.identity.subscriptions
def __set_clients__(self, subscriptions, credentials, service):
def __set_clients__(self, subscriptions, credentials, service, region_config):
clients = {}
try:
for display_name, id in subscriptions.items():
clients.update(
{display_name: service(credential=credentials, subscription_id=id)}
{
display_name: service(
credential=credentials,
subscription_id=id,
base_url=region_config.base_url,
credential_scopes=region_config.credential_scopes,
)
}
)
except Exception as error:
logger.error(

View File

@@ -26,7 +26,10 @@ from prowler.providers.aws.lib.resource_api_tagging.resource_api_tagging import
)
from prowler.providers.azure.azure_provider import Azure_Provider
from prowler.providers.azure.lib.audit_info.audit_info import azure_audit_info
from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
from prowler.providers.azure.lib.audit_info.models import (
Azure_Audit_Info,
Azure_Region_Config,
)
from prowler.providers.gcp.gcp_provider import GCP_Provider
from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
@@ -63,7 +66,7 @@ GCP Account: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} GCP Project IDs: {Fore.Y
report = f"""
This report is being generated using the identity below:
Azure Tenant IDs: {Fore.YELLOW}[{" ".join(audit_info.identity.tenant_ids)}]{Style.RESET_ALL} Azure Tenant Domain: {Fore.YELLOW}[{audit_info.identity.domain}]{Style.RESET_ALL}
Azure Tenant IDs: {Fore.YELLOW}[{" ".join(audit_info.identity.tenant_ids)}]{Style.RESET_ALL} Azure Tenant Domain: {Fore.YELLOW}[{audit_info.identity.domain}]{Style.RESET_ALL} Azure Region: {Fore.YELLOW}[{audit_info.azure_region_config.name}]{Style.RESET_ALL}
Azure Subscriptions: {Fore.YELLOW}{printed_subscriptions}{Style.RESET_ALL}
Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RESET_ALL} Azure Identity ID: {Fore.YELLOW}[{audit_info.identity.identity_id}]{Style.RESET_ALL}
"""
@@ -282,6 +285,10 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE
browser_auth = arguments.get("browser_auth")
managed_entity_auth = arguments.get("managed_entity_auth")
tenant_id = arguments.get("tenant_id")
logger.info("Checking if region is different than default one")
region = arguments.get("azure_region")
if (
not az_cli_auth
and not sp_env_auth
@@ -303,9 +310,17 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE
managed_entity_auth,
subscription_ids,
tenant_id,
region,
)
azure_audit_info.credentials = azure_provider.get_credentials()
azure_audit_info.identity = azure_provider.get_identity()
region_config = azure_provider.get_region_config()
azure_audit_info.azure_region_config = Azure_Region_Config(
name=region,
authority=region_config["authority"],
base_url=region_config["base_url"],
credential_scopes=region_config["credential_scopes"],
)
if not arguments.get("only_logs"):
self.print_azure_credentials(azure_audit_info)

View File

@@ -0,0 +1,32 @@
import importlib
import sys
from shutil import rmtree
from prowler.config.config import default_output_directory
from prowler.lib.logger import logger
def clean_provider_local_output_directories(args):
"""
clean_provider_local_output_directories cleans deletes local custom dirs when output is sent to remote provider storage
"""
try:
# import provider cleaning function
provider_clean_function = f"clean_{args.provider}_local_output_directories"
getattr(importlib.import_module(__name__), provider_clean_function)(args)
except AttributeError as attribute_exception:
logger.info(
f"Cleaning local output directories not initialized for provider {args.provider}: {attribute_exception}"
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
sys.exit(1)
def clean_aws_local_output_directories(args):
"""clean_aws_provider_local_output_directories deletes local custom dirs when output is sent to remote provider storage for aws provider"""
if args.output_bucket or args.output_bucket_no_assume:
if args.output_directory != default_output_directory:
rmtree(args.output_directory)

View File

@@ -22,10 +22,10 @@ packages = [
{include = "prowler"}
]
readme = "README.md"
version = "3.11.0"
version = "3.11.3"
[tool.poetry.dependencies]
alive-progress = "3.1.4"
alive-progress = "3.1.5"
awsipranges = "0.3.3"
azure-identity = "1.15.0"
azure-mgmt-authorization = "4.0.0"
@@ -33,16 +33,17 @@ azure-mgmt-security = "5.0.0"
azure-mgmt-sql = "3.0.1"
azure-mgmt-storage = "21.1.0"
azure-mgmt-subscription = "3.1.1"
azure-storage-blob = "12.18.3"
azure-storage-blob = "12.19.0"
boto3 = "1.26.165"
botocore = "1.29.165"
colorama = "0.4.6"
detect-secrets = "1.4.0"
google-api-python-client = "2.105.0"
google-api-python-client = "2.107.0"
google-auth-httplib2 = "^0.1.0"
mkdocs = {version = "1.5.3", optional = true}
mkdocs-material = {version = "9.4.7", optional = true}
mkdocs-material = {version = "9.4.8", optional = true}
msgraph-core = "0.2.2"
msrestazure = "^0.6.4"
pydantic = "1.10.13"
python = "^3.9"
schema = "0.7.5"
@@ -61,13 +62,13 @@ docker = "6.1.3"
flake8 = "6.1.0"
freezegun = "1.2.2"
mock = "5.1.0"
moto = "4.2.7"
moto = "4.2.8"
openapi-spec-validator = "0.7.1"
pylint = "3.0.2"
pytest = "7.4.3"
pytest-cov = "4.1.0"
pytest-randomly = "3.15.0"
pytest-xdist = "3.3.1"
pytest-xdist = "3.4.0"
safety = "2.3.5"
vulture = "2.10"

View File

@@ -63,7 +63,7 @@ class Test_Config:
def test_check_current_version_with_latest(self):
assert (
check_current_version()
== f"Prowler {MOCK_PROWLER_VERSION} (it is the latest version, yay!)"
== f"Prowler {MOCK_PROWLER_VERSION} (You are running the latest version, yay!)"
)
@mock.patch(

View File

@@ -1,9 +1,11 @@
import uuid
from argparse import ArgumentTypeError
import pytest
from mock import patch
from prowler.lib.cli.parser import ProwlerArgumentParser
from prowler.providers.azure.lib.arguments.arguments import validate_azure_region
prowler_command = "prowler"
@@ -502,6 +504,18 @@ class Test_Parser:
assert service_1 in parsed.services
assert service_2 in parsed.services
def test_checks_parser_services_with_severity(self):
argument1 = "--services"
service_1 = "iam"
argument2 = "--severity"
severity = "low"
command = [prowler_command, argument1, service_1, argument2, severity]
parsed = self.parser.parse(command)
assert len(parsed.services) == 1
assert service_1 in parsed.services
assert len(parsed.severity) == 1
assert severity in parsed.severity
def test_checks_parser_informational_severity(self):
argument = "--severity"
severity = "informational"
@@ -1038,6 +1052,14 @@ class Test_Parser:
assert parsed.subscription_ids[0] == subscription_1
assert parsed.subscription_ids[1] == subscription_2
def test_parser_azure_region(self):
argument = "--azure-region"
region = "AzureChinaCloud"
command = [prowler_command, "azure", argument, region]
parsed = self.parser.parse(command)
assert parsed.provider == "azure"
assert parsed.azure_region == region
# Test AWS flags with Azure provider
def test_parser_azure_with_aws_flag(self, capsys):
command = [prowler_command, "azure", "-p"]
@@ -1080,3 +1102,33 @@ class Test_Parser:
assert len(parsed.project_ids) == 2
assert parsed.project_ids[0] == project_1
assert parsed.project_ids[1] == project_2
def test_validate_azure_region_valid_regions(self):
expected_regions = [
"AzureChinaCloud",
"AzureUSGovernment",
"AzureGermanCloud",
"AzureCloud",
]
input_regions = [
"AzureChinaCloud",
"AzureUSGovernment",
"AzureGermanCloud",
"AzureCloud",
]
for region in input_regions:
assert validate_azure_region(region) in expected_regions
def test_validate_azure_region_invalid_regions(self):
expected_regions = [
"AzureChinaCloud",
"AzureUSGovernment",
"AzureGermanCloud",
"AzureCloud",
]
invalid_region = "non-valid-region"
with pytest.raises(
ArgumentTypeError,
match=f"Region {invalid_region} not allowed, allowed regions are {' '.join(expected_regions)}",
):
validate_azure_region(invalid_region)

View File

@@ -11,6 +11,7 @@ from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.azure.lib.audit_info.models import (
Azure_Audit_Info,
Azure_Identity_Info,
Azure_Region_Config,
)
from prowler.providers.common.models import Audit_Metadata
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
@@ -76,6 +77,7 @@ class Test_Slack_Integration:
audit_resources=None,
audit_metadata=None,
audit_config=None,
azure_region_config=Azure_Region_Config(),
)
assert create_message_identity("aws", aws_audit_info) == (
f"AWS Account *{aws_audit_info.audited_account}*",

View File

@@ -5,9 +5,11 @@ from prowler.providers.common.models import Audit_Metadata
AWS_REGION_US_EAST_1 = "us-east-1"
AWS_REGION_EU_WEST_1 = "eu-west-1"
AWS_REGION_EU_WEST_2 = "eu-west-2"
AWS_PARTITION = "aws"
AWS_ACCOUNT_NUMBER = "123456789012"
AWS_ACCOUNT_ARN = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"
AWS_COMMERCIAL_PARTITION = "aws"
# Mocked AWS Audit Info

View File

@@ -8,15 +8,18 @@ from prowler.providers.aws.lib.allowlist.allowlist import (
is_allowlisted,
is_allowlisted_in_check,
is_allowlisted_in_region,
is_allowlisted_in_resource,
is_allowlisted_in_tags,
is_excepted,
parse_allowlist_file,
)
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.common.models import Audit_Metadata
AWS_ACCOUNT_NUMBER = "123456789012"
AWS_REGION = "us-east-1"
from tests.providers.aws.audit_info_utils import (
AWS_ACCOUNT_NUMBER,
AWS_REGION_EU_WEST_1,
AWS_REGION_US_EAST_1,
)
class Test_Allowlist:
@@ -56,7 +59,7 @@ class Test_Allowlist:
def test_s3_allowlist(self):
audit_info = self.set_mocked_audit_info()
# Create bucket and upload allowlist yaml
s3_resource = resource("s3", region_name=AWS_REGION)
s3_resource = resource("s3", region_name=AWS_REGION_US_EAST_1)
s3_resource.create_bucket(Bucket="test-allowlist")
s3_resource.Object("test-allowlist", "allowlist.yaml").put(
Body=open(
@@ -75,7 +78,7 @@ class Test_Allowlist:
def test_dynamo_allowlist(self):
audit_info = self.set_mocked_audit_info()
# Create table and put item
dynamodb_resource = resource("dynamodb", region_name=AWS_REGION)
dynamodb_resource = resource("dynamodb", region_name=AWS_REGION_US_EAST_1)
table_name = "test-allowlist"
params = {
"TableName": table_name,
@@ -97,7 +100,7 @@ class Test_Allowlist:
Item={
"Accounts": "*",
"Checks": "iam_user_hardware_mfa_enabled",
"Regions": ["eu-west-1", AWS_REGION],
"Regions": [AWS_REGION_EU_WEST_1, AWS_REGION_US_EAST_1],
"Resources": ["keyword"],
}
)
@@ -107,7 +110,7 @@ class Test_Allowlist:
in parse_allowlist_file(
audit_info,
"arn:aws:dynamodb:"
+ AWS_REGION
+ AWS_REGION_US_EAST_1
+ ":"
+ str(AWS_ACCOUNT_NUMBER)
+ ":table/"
@@ -119,7 +122,7 @@ class Test_Allowlist:
def test_dynamo_allowlist_with_tags(self):
audit_info = self.set_mocked_audit_info()
# Create table and put item
dynamodb_resource = resource("dynamodb", region_name=AWS_REGION)
dynamodb_resource = resource("dynamodb", region_name=AWS_REGION_US_EAST_1)
table_name = "test-allowlist"
params = {
"TableName": table_name,
@@ -152,7 +155,7 @@ class Test_Allowlist:
in parse_allowlist_file(
audit_info,
"arn:aws:dynamodb:"
+ AWS_REGION
+ AWS_REGION_US_EAST_1
+ ":"
+ str(AWS_ACCOUNT_NUMBER)
+ ":table/"
@@ -169,7 +172,7 @@ class Test_Allowlist:
"*": {
"Checks": {
"check_test": {
"Regions": [AWS_REGION, "eu-west-1"],
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
"Resources": ["prowler", "^test", "prowler-pro"],
}
}
@@ -183,7 +186,7 @@ class Test_Allowlist:
finding_1.check_metadata = MagicMock
finding_1.check_metadata.CheckID = "check_test"
finding_1.status = "FAIL"
finding_1.region = AWS_REGION
finding_1.region = AWS_REGION_US_EAST_1
finding_1.resource_id = "prowler"
finding_1.resource_tags = []
@@ -195,6 +198,66 @@ class Test_Allowlist:
assert len(allowlisted_findings) == 1
assert allowlisted_findings[0].status == "WARNING"
def test_is_allowlisted_with_everything_excepted(self):
allowlist = {
"Accounts": {
"*": {
"Checks": {
"athena_*": {
"Regions": "*",
"Resources": "*",
"Tags": "*",
"Exceptions": {
"Accounts": ["*"],
"Regions": ["*"],
"Resources": ["*"],
"Tags": ["*"],
},
}
}
}
}
}
assert not is_allowlisted(
allowlist,
AWS_ACCOUNT_NUMBER,
"athena_1",
AWS_REGION_US_EAST_1,
"prowler",
"",
)
def test_is_allowlisted_with_default_allowlist(self):
allowlist = {
"Accounts": {
"*": {
"Checks": {
"*": {
"Tags": ["*"],
"Regions": ["*"],
"Resources": ["*"],
"Exceptions": {
"Tags": [],
"Regions": [],
"Accounts": [],
"Resources": [],
},
}
}
}
}
}
assert is_allowlisted(
allowlist,
AWS_ACCOUNT_NUMBER,
"athena_1",
AWS_REGION_US_EAST_1,
"prowler",
"",
)
def test_is_allowlisted(self):
# Allowlist example
allowlist = {
@@ -202,7 +265,7 @@ class Test_Allowlist:
"*": {
"Checks": {
"check_test": {
"Regions": [AWS_REGION, "eu-west-1"],
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
"Resources": ["prowler", "^test", "prowler-pro"],
}
}
@@ -211,22 +274,37 @@ class Test_Allowlist:
}
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", ""
)
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test", ""
)
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler", ""
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION_US_EAST_1,
"prowler",
"",
)
assert is_allowlisted(
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler-test",
"",
)
assert is_allowlisted(
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION_US_EAST_1,
"test-prowler",
"",
)
assert is_allowlisted(
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION_US_EAST_1,
"prowler-pro-test",
"",
)
@@ -244,7 +322,7 @@ class Test_Allowlist:
"*": {
"Checks": {
"check_test": {
"Regions": [AWS_REGION, "eu-west-1"],
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
"Resources": [".*"],
}
}
@@ -253,15 +331,30 @@ class Test_Allowlist:
}
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", ""
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION_US_EAST_1,
"prowler",
"",
)
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test", ""
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION_US_EAST_1,
"prowler-test",
"",
)
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler", ""
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION_US_EAST_1,
"test-prowler",
"",
)
assert not (
@@ -277,7 +370,7 @@ class Test_Allowlist:
"*": {
"Checks": {
"check_test": {
"Regions": [AWS_REGION, "eu-west-1"],
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
"Resources": ["*"],
}
}
@@ -286,15 +379,30 @@ class Test_Allowlist:
}
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", ""
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION_US_EAST_1,
"prowler",
"",
)
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test", ""
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION_US_EAST_1,
"prowler-test",
"",
)
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler", ""
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION_US_EAST_1,
"test-prowler",
"",
)
assert not (
@@ -310,7 +418,7 @@ class Test_Allowlist:
"*": {
"Checks": {
"check_test_2": {
"Regions": [AWS_REGION, "eu-west-1"],
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
"Resources": ["*"],
}
}
@@ -318,7 +426,7 @@ class Test_Allowlist:
AWS_ACCOUNT_NUMBER: {
"Checks": {
"check_test": {
"Regions": [AWS_REGION],
"Regions": [AWS_REGION_US_EAST_1],
"Resources": ["*"],
}
}
@@ -327,19 +435,39 @@ class Test_Allowlist:
}
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test_2", AWS_REGION, "prowler", ""
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test_2",
AWS_REGION_US_EAST_1,
"prowler",
"",
)
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", ""
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION_US_EAST_1,
"prowler",
"",
)
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler-test", ""
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION_US_EAST_1,
"prowler-test",
"",
)
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "test-prowler", ""
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION_US_EAST_1,
"test-prowler",
"",
)
assert not (
@@ -354,7 +482,7 @@ class Test_Allowlist:
AWS_ACCOUNT_NUMBER: {
"Checks": {
"check_test": {
"Regions": [AWS_REGION],
"Regions": [AWS_REGION_US_EAST_1],
"Resources": ["prowler"],
}
}
@@ -363,7 +491,12 @@ class Test_Allowlist:
}
assert is_allowlisted(
allowlist, AWS_ACCOUNT_NUMBER, "check_test", AWS_REGION, "prowler", ""
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION_US_EAST_1,
"prowler",
"",
)
assert not (
@@ -373,47 +506,27 @@ class Test_Allowlist:
)
def test_is_allowlisted_in_region(self):
# Allowlist example
allowlisted_regions = [AWS_REGION, "eu-west-1"]
allowlisted_resources = ["*"]
allowlisted_regions = [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1]
finding_region = AWS_REGION_US_EAST_1
assert is_allowlisted_in_region(
allowlisted_regions, allowlisted_resources, None, AWS_REGION, "prowler", ""
)
assert is_allowlisted_in_region(allowlisted_regions, finding_region)
assert is_allowlisted_in_region(
allowlisted_regions,
allowlisted_resources,
None,
AWS_REGION,
"prowler-test",
"",
)
def test_is_allowlisted_in_region_wildcard(self):
allowlisted_regions = ["*"]
finding_region = AWS_REGION_US_EAST_1
assert is_allowlisted_in_region(
allowlisted_regions,
allowlisted_resources,
None,
AWS_REGION,
"test-prowler",
"",
)
assert is_allowlisted_in_region(allowlisted_regions, finding_region)
assert not (
is_allowlisted_in_region(
allowlisted_regions,
allowlisted_resources,
None,
"us-east-2",
"test",
"",
)
)
def test_is_not_allowlisted_in_region(self):
allowlisted_regions = [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1]
finding_region = "eu-west-2"
assert not is_allowlisted_in_region(allowlisted_regions, finding_region)
def test_is_allowlisted_in_check(self):
allowlisted_checks = {
"check_test": {
"Regions": [AWS_REGION, "eu-west-1"],
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
"Resources": ["*"],
}
}
@@ -421,9 +534,8 @@ class Test_Allowlist:
assert is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler",
"",
)
@@ -431,9 +543,8 @@ class Test_Allowlist:
assert is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler-test",
"",
)
@@ -441,9 +552,8 @@ class Test_Allowlist:
assert is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION,
AWS_REGION_US_EAST_1,
"test-prowler",
"",
)
@@ -452,7 +562,6 @@ class Test_Allowlist:
is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"check_test",
"us-east-2",
"test",
@@ -464,7 +573,7 @@ class Test_Allowlist:
# Allowlist example
allowlisted_checks = {
"s3_*": {
"Regions": [AWS_REGION, "eu-west-1"],
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
"Resources": ["*"],
}
}
@@ -472,9 +581,8 @@ class Test_Allowlist:
assert is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"s3_bucket_public_access",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler",
"",
)
@@ -482,9 +590,8 @@ class Test_Allowlist:
assert is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"s3_bucket_no_mfa_delete",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler-test",
"",
)
@@ -492,9 +599,8 @@ class Test_Allowlist:
assert is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"s3_bucket_policy_public_write_access",
AWS_REGION,
AWS_REGION_US_EAST_1,
"test-prowler",
"",
)
@@ -503,9 +609,8 @@ class Test_Allowlist:
is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"iam_user_hardware_mfa_enabled",
AWS_REGION,
AWS_REGION_US_EAST_1,
"test",
"",
)
@@ -514,7 +619,7 @@ class Test_Allowlist:
def test_is_allowlisted_lambda_generic_check(self):
allowlisted_checks = {
"lambda_*": {
"Regions": [AWS_REGION, "eu-west-1"],
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
"Resources": ["*"],
}
}
@@ -522,9 +627,8 @@ class Test_Allowlist:
assert is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"awslambda_function_invoke_api_operations_cloudtrail_logging_enabled",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler",
"",
)
@@ -532,9 +636,8 @@ class Test_Allowlist:
assert is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"awslambda_function_no_secrets_in_code",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler",
"",
)
@@ -542,9 +645,8 @@ class Test_Allowlist:
assert is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"awslambda_function_no_secrets_in_variables",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler",
"",
)
@@ -552,9 +654,8 @@ class Test_Allowlist:
assert is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"awslambda_function_not_publicly_accessible",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler",
"",
)
@@ -562,9 +663,8 @@ class Test_Allowlist:
assert is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"awslambda_function_url_cors_policy",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler",
"",
)
@@ -572,9 +672,8 @@ class Test_Allowlist:
assert is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"awslambda_function_url_public",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler",
"",
)
@@ -582,9 +681,8 @@ class Test_Allowlist:
assert is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"awslambda_function_using_supported_runtimes",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler",
"",
)
@@ -592,7 +690,7 @@ class Test_Allowlist:
def test_is_allowlisted_lambda_concrete_check(self):
allowlisted_checks = {
"lambda_function_no_secrets_in_variables": {
"Regions": [AWS_REGION, "eu-west-1"],
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
"Resources": ["*"],
}
}
@@ -600,9 +698,8 @@ class Test_Allowlist:
assert is_allowlisted_in_check(
allowlisted_checks,
AWS_ACCOUNT_NUMBER,
AWS_ACCOUNT_NUMBER,
"awslambda_function_no_secrets_in_variables",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler",
"",
)
@@ -614,7 +711,7 @@ class Test_Allowlist:
"*": {
"Checks": {
"check_test": {
"Regions": [AWS_REGION, "eu-west-1"],
"Regions": [AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1],
"Resources": ["*"],
"Tags": ["environment=dev", "project=.*"],
}
@@ -627,7 +724,7 @@ class Test_Allowlist:
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler",
"environment=dev",
)
@@ -636,7 +733,7 @@ class Test_Allowlist:
allowlist,
AWS_ACCOUNT_NUMBER,
"check_test",
AWS_REGION,
AWS_REGION_US_EAST_1,
"prowler-test",
"environment=dev | project=prowler",
)
@@ -654,56 +751,45 @@ class Test_Allowlist:
def test_is_allowlisted_in_tags(self):
allowlist_tags = ["environment=dev", "project=prowler"]
allowlist_resource = "*"
assert is_allowlisted_in_tags(allowlist_tags, "environment=dev")
assert is_allowlisted_in_tags(
allowlist_tags,
"*",
"prowler",
"environment=dev",
)
assert is_allowlisted_in_tags(
allowlist_tags,
allowlist_resource,
"prowler-test",
"environment=dev | project=prowler",
)
assert not (
is_allowlisted_in_tags(
allowlist_tags,
allowlist_resource,
"test",
"environment=pro",
)
)
def test_is_allowlisted_in_tags_regex(self):
allowlist_tags = ["environment=(dev|test)", ".*=prowler"]
allowlist_resource = "*"
assert is_allowlisted_in_tags(
allowlist_tags,
allowlist_resource,
"prowler-test",
"environment=test | proj=prowler",
)
assert is_allowlisted_in_tags(
allowlist_tags,
allowlist_resource,
"prowler-test",
"env=prod | project=prowler",
)
assert not is_allowlisted_in_tags(
allowlist_tags,
allowlist_resource,
"prowler-test",
"environment=prod | project=myproj",
)
def test_is_allowlisted_in_tags_with_no_tags_in_finding(self):
allowlist_tags = ["environment=(dev|test)", ".*=prowler"]
finding_tags = ""
assert not is_allowlisted_in_tags(allowlist_tags, finding_tags)
def test_is_excepted(self):
# Allowlist example
exceptions = {
@@ -737,6 +823,28 @@ class Test_Allowlist:
"environment=test",
)
def test_is_excepted_all_wildcard(self):
exceptions = {
"Accounts": ["*"],
"Regions": ["*"],
"Resources": ["*"],
"Tags": ["*"],
}
assert is_excepted(
exceptions, AWS_ACCOUNT_NUMBER, "eu-south-2", "test", "environment=test"
)
assert not is_excepted(
exceptions, AWS_ACCOUNT_NUMBER, "eu-south-2", "test", None
)
def test_is_not_excepted(self):
exceptions = {
"Accounts": [AWS_ACCOUNT_NUMBER],
"Regions": ["eu-central-1", "eu-south-3"],
"Resources": ["test"],
"Tags": ["environment=test", "project=.*"],
}
assert not is_excepted(
exceptions,
AWS_ACCOUNT_NUMBER,
@@ -760,3 +868,11 @@ class Test_Allowlist:
"test",
"environment=pro",
)
def test_is_allowlisted_in_resource(self):
allowlist_resources = ["prowler", "^test", "prowler-pro"]
assert is_allowlisted_in_resource(allowlist_resources, "prowler")
assert is_allowlisted_in_resource(allowlist_resources, "prowler-test")
assert is_allowlisted_in_resource(allowlist_resources, "test-prowler")
assert not is_allowlisted_in_resource(allowlist_resources, "random")

View File

@@ -1282,3 +1282,75 @@ class Test_policy_condition_parser:
assert not is_account_only_allowed_in_condition(
condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER
)
def test_condition_parser_two_lists_unrestrictive(self):
condition_statement = {
"StringLike": {
"AWS:ResourceAccount": [
TRUSTED_AWS_ACCOUNT_NUMBER,
NON_TRUSTED_AWS_ACCOUNT_NUMBER,
]
},
"ArnLike": {
"AWS:SourceArn": [
f"arn:aws:cloudtrail:*:{TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*",
f"arn:aws:cloudtrail:*:{NON_TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*",
]
},
}
assert not is_account_only_allowed_in_condition(
condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER
)
def test_condition_parser_two_lists_both_restrictive(self):
condition_statement = {
"StringLike": {
"AWS:ResourceAccount": [
TRUSTED_AWS_ACCOUNT_NUMBER,
]
},
"ArnLike": {
"AWS:SourceArn": [
f"arn:aws:cloudtrail:*:{TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*",
]
},
}
assert is_account_only_allowed_in_condition(
condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER
)
def test_condition_parser_two_lists_first_restrictive(self):
condition_statement = {
"StringLike": {
"AWS:ResourceAccount": [
TRUSTED_AWS_ACCOUNT_NUMBER,
]
},
"ArnLike": {
"AWS:SourceArn": [
f"arn:aws:cloudtrail:*:{TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*",
f"arn:aws:cloudtrail:*:{NON_TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*",
]
},
}
assert is_account_only_allowed_in_condition(
condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER
)
def test_condition_parser_two_lists_second_restrictive(self):
condition_statement = {
"StringLike": {
"AWS:ResourceAccount": [
TRUSTED_AWS_ACCOUNT_NUMBER,
NON_TRUSTED_AWS_ACCOUNT_NUMBER,
]
},
"ArnLike": {
"AWS:SourceArn": [
f"arn:aws:cloudtrail:*:{TRUSTED_AWS_ACCOUNT_NUMBER}:trail/*",
]
},
}
assert is_account_only_allowed_in_condition(
condition_statement, TRUSTED_AWS_ACCOUNT_NUMBER
)

View File

@@ -6,7 +6,6 @@ from mock import MagicMock, patch
from prowler.config.config import prowler_version, timestamp_utc
from prowler.lib.check.models import Check_Report, load_check_metadata
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
# from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.lib.security_hub.security_hub import (
@@ -14,11 +13,13 @@ from prowler.providers.aws.lib.security_hub.security_hub import (
prepare_security_hub_findings,
verify_security_hub_integration_enabled_per_region,
)
from prowler.providers.common.models import Audit_Metadata
AWS_ACCOUNT_ID = "123456789012"
AWS_REGION_1 = "eu-west-1"
AWS_REGION_2 = "eu-west-2"
from tests.providers.aws.audit_info_utils import (
AWS_ACCOUNT_NUMBER,
AWS_COMMERCIAL_PARTITION,
AWS_REGION_EU_WEST_1,
AWS_REGION_EU_WEST_2,
set_mocked_aws_audit_info,
)
# Mocking Security Hub Get Findings
make_api_call = botocore.client.BaseClient._make_api_call
@@ -32,7 +33,7 @@ def mock_make_api_call(self, operation_name, kwarg):
}
if operation_name == "DescribeHub":
return {
"HubArn": f"arn:aws:securityhub:{AWS_REGION_1}:{AWS_ACCOUNT_ID}:hub/default",
"HubArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:hub/default",
"SubscribedAt": "2023-02-07T09:45:43.742Z",
"AutoEnableControls": True,
"ControlFindingGenerator": "STANDARD_CONTROL",
@@ -41,7 +42,7 @@ def mock_make_api_call(self, operation_name, kwarg):
if operation_name == "ListEnabledProductsForImport":
return {
"ProductSubscriptions": [
f"arn:aws:securityhub:{AWS_REGION_1}:{AWS_ACCOUNT_ID}:product-subscription/prowler/prowler",
f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:product-subscription/prowler/prowler",
]
}
@@ -49,32 +50,6 @@ def mock_make_api_call(self, operation_name, kwarg):
class Test_SecurityHub:
def set_mocked_audit_info(self):
return AWS_Audit_Info(
session_config=None,
original_session=None,
audit_session=None,
audited_account=AWS_ACCOUNT_ID,
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_ID}:root",
audited_identity_arn="test-arn",
audited_user_id="test",
audited_partition="aws",
profile="default",
profile_region="eu-west-1",
credentials=None,
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
audit_resources=None,
mfa_enabled=False,
audit_metadata=Audit_Metadata(
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
),
)
def generate_finding(self, status, region):
finding = Check_Report(
load_check_metadata(
@@ -104,14 +79,18 @@ class Test_SecurityHub:
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
def test_verify_security_hub_integration_enabled_per_region(self):
session = self.set_mocked_session(AWS_REGION_1)
assert verify_security_hub_integration_enabled_per_region(AWS_REGION_1, session)
session = self.set_mocked_session(AWS_REGION_EU_WEST_1)
assert verify_security_hub_integration_enabled_per_region(
AWS_COMMERCIAL_PARTITION, AWS_REGION_EU_WEST_1, session, AWS_ACCOUNT_NUMBER
)
def test_prepare_security_hub_findings_enabled_region_not_quiet(self):
enabled_regions = [AWS_REGION_1]
enabled_regions = [AWS_REGION_EU_WEST_1]
output_options = self.set_mocked_output_options(is_quiet=False)
findings = [self.generate_finding("PASS", AWS_REGION_1)]
audit_info = self.set_mocked_audit_info()
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)]
audit_info = set_mocked_aws_audit_info(
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
)
assert prepare_security_hub_findings(
findings,
@@ -119,11 +98,11 @@ class Test_SecurityHub:
output_options,
enabled_regions,
) == {
AWS_REGION_1: [
AWS_REGION_EU_WEST_1: [
{
"SchemaVersion": "2018-10-08",
"Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_ID}-{AWS_REGION_1}-ee26b0dd4",
"ProductArn": f"arn:aws:securityhub:{AWS_REGION_1}::product/prowler/prowler",
"Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_NUMBER}-{AWS_REGION_EU_WEST_1}-ee26b0dd4",
"ProductArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}::product/prowler/prowler",
"RecordState": "ACTIVE",
"ProductFields": {
"ProviderName": "Prowler",
@@ -131,7 +110,7 @@ class Test_SecurityHub:
"ProwlerResourceName": "test",
},
"GeneratorId": "prowler-iam_user_accesskey_unused",
"AwsAccountId": f"{AWS_ACCOUNT_ID}",
"AwsAccountId": f"{AWS_ACCOUNT_NUMBER}",
"Types": ["Software and Configuration Checks"],
"FirstObservedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
"UpdatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
@@ -144,7 +123,7 @@ class Test_SecurityHub:
"Type": "AwsIamAccessAnalyzer",
"Id": "test",
"Partition": "aws",
"Region": f"{AWS_REGION_1}",
"Region": f"{AWS_REGION_EU_WEST_1}",
}
],
"Compliance": {
@@ -160,55 +139,117 @@ class Test_SecurityHub:
},
}
],
AWS_REGION_2: [],
}
def test_prepare_security_hub_findings_quiet_INFO_finding(self):
enabled_regions = [AWS_REGION_1]
enabled_regions = [AWS_REGION_EU_WEST_1]
output_options = self.set_mocked_output_options(is_quiet=False)
findings = [self.generate_finding("INFO", AWS_REGION_1)]
audit_info = self.set_mocked_audit_info()
findings = [self.generate_finding("INFO", AWS_REGION_EU_WEST_1)]
audit_info = set_mocked_aws_audit_info(
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
)
assert prepare_security_hub_findings(
findings,
audit_info,
output_options,
enabled_regions,
) == {AWS_REGION_1: [], AWS_REGION_2: []}
) == {AWS_REGION_EU_WEST_1: []}
def test_prepare_security_hub_findings_disabled_region(self):
enabled_regions = [AWS_REGION_1]
enabled_regions = [AWS_REGION_EU_WEST_1]
output_options = self.set_mocked_output_options(is_quiet=False)
findings = [self.generate_finding("PASS", AWS_REGION_2)]
audit_info = self.set_mocked_audit_info()
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_2)]
audit_info = set_mocked_aws_audit_info(
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
)
assert prepare_security_hub_findings(
findings,
audit_info,
output_options,
enabled_regions,
) == {AWS_REGION_1: [], AWS_REGION_2: []}
) == {AWS_REGION_EU_WEST_1: []}
def test_prepare_security_hub_findings_quiet(self):
enabled_regions = [AWS_REGION_1]
enabled_regions = [AWS_REGION_EU_WEST_1]
output_options = self.set_mocked_output_options(is_quiet=True)
findings = [self.generate_finding("PASS", AWS_REGION_1)]
audit_info = self.set_mocked_audit_info()
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)]
audit_info = set_mocked_aws_audit_info(
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
)
assert prepare_security_hub_findings(
findings,
audit_info,
output_options,
enabled_regions,
) == {AWS_REGION_1: [], AWS_REGION_2: []}
) == {AWS_REGION_EU_WEST_1: []}
def test_prepare_security_hub_findings_no_audited_regions(self):
enabled_regions = [AWS_REGION_EU_WEST_1]
output_options = self.set_mocked_output_options(is_quiet=False)
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)]
audit_info = set_mocked_aws_audit_info()
assert prepare_security_hub_findings(
findings,
audit_info,
output_options,
enabled_regions,
) == {
AWS_REGION_EU_WEST_1: [
{
"SchemaVersion": "2018-10-08",
"Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_NUMBER}-{AWS_REGION_EU_WEST_1}-ee26b0dd4",
"ProductArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}::product/prowler/prowler",
"RecordState": "ACTIVE",
"ProductFields": {
"ProviderName": "Prowler",
"ProviderVersion": prowler_version,
"ProwlerResourceName": "test",
},
"GeneratorId": "prowler-iam_user_accesskey_unused",
"AwsAccountId": f"{AWS_ACCOUNT_NUMBER}",
"Types": ["Software and Configuration Checks"],
"FirstObservedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
"UpdatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
"CreatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
"Severity": {"Label": "LOW"},
"Title": "Ensure Access Keys unused are disabled",
"Description": "test",
"Resources": [
{
"Type": "AwsIamAccessAnalyzer",
"Id": "test",
"Partition": "aws",
"Region": f"{AWS_REGION_EU_WEST_1}",
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [],
"AssociatedStandards": [],
},
"Remediation": {
"Recommendation": {
"Text": "Run sudo yum update and cross your fingers and toes.",
"Url": "https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html",
}
},
}
],
}
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
def test_batch_send_to_security_hub_one_finding(self):
enabled_regions = [AWS_REGION_1]
enabled_regions = [AWS_REGION_EU_WEST_1]
output_options = self.set_mocked_output_options(is_quiet=False)
findings = [self.generate_finding("PASS", AWS_REGION_1)]
audit_info = self.set_mocked_audit_info()
session = self.set_mocked_session(AWS_REGION_1)
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)]
audit_info = set_mocked_aws_audit_info(
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
)
session = self.set_mocked_session(AWS_REGION_EU_WEST_1)
security_hub_findings = prepare_security_hub_findings(
findings,

View File

@@ -33,6 +33,7 @@ class Test_accessanalyzer_enabled:
def test_one_analyzer_not_available(self):
# Include analyzers to check
accessanalyzer_client = mock.MagicMock
accessanalyzer_client.region = AWS_REGION_1
accessanalyzer_client.analyzers = [
Analyzer(
arn=AWS_ACCOUNT_ARN,
@@ -65,8 +66,46 @@ class Test_accessanalyzer_enabled:
assert result[0].region == AWS_REGION_1
assert result[0].resource_tags == []
def test_one_analyzer_not_available_allowlisted(self):
# Include analyzers to check
accessanalyzer_client = mock.MagicMock
accessanalyzer_client.region = AWS_REGION_2
accessanalyzer_client.audit_config = {"allowlist_non_default_regions": True}
accessanalyzer_client.analyzers = [
Analyzer(
arn=AWS_ACCOUNT_ARN,
name=AWS_ACCOUNT_NUMBER,
status="NOT_AVAILABLE",
tags=[],
type="",
region=AWS_REGION_1,
)
]
with mock.patch(
"prowler.providers.aws.services.accessanalyzer.accessanalyzer_service.AccessAnalyzer",
accessanalyzer_client,
):
from prowler.providers.aws.services.accessanalyzer.accessanalyzer_enabled.accessanalyzer_enabled import (
accessanalyzer_enabled,
)
check = accessanalyzer_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "WARNING"
assert (
result[0].status_extended
== f"IAM Access Analyzer in account {AWS_ACCOUNT_NUMBER} is not enabled."
)
assert result[0].resource_id == AWS_ACCOUNT_NUMBER
assert result[0].resource_arn == AWS_ACCOUNT_ARN
assert result[0].region == AWS_REGION_1
assert result[0].resource_tags == []
def test_two_analyzers(self):
accessanalyzer_client = mock.MagicMock
accessanalyzer_client.region = AWS_REGION_1
accessanalyzer_client.analyzers = [
Analyzer(
arn=AWS_ACCOUNT_ARN,

View File

@@ -244,3 +244,88 @@ class Test_ec2_securitygroup_not_used:
assert result[0].resource_id == sg.id
assert result[0].resource_details == sg_name
assert result[0].resource_tags == []
@mock_ec2
@mock_lambda
def test_ec2_associated_sg(self):
# Create EC2 Mocked Resources
ec2 = resource("ec2", AWS_REGION_US_EAST_1)
ec2_client = client("ec2", region_name=AWS_REGION_US_EAST_1)
vpc_id = ec2_client.create_vpc(CidrBlock="10.0.0.0/16")["Vpc"]["VpcId"]
sg_name = "test-sg"
sg_name1 = "test-sg1"
sg = ec2.create_security_group(
GroupName=sg_name, Description="test", VpcId=vpc_id
)
sg1 = ec2.create_security_group(
GroupName=sg_name1, Description="test1", VpcId=vpc_id
)
ec2_client.authorize_security_group_ingress(
GroupId=sg.id,
IpPermissions=[
{
"IpProtocol": "-1",
"UserIdGroupPairs": [
{
"GroupId": sg1.id,
"Description": "Allow traffic from source SG",
}
],
}
],
)
from prowler.providers.aws.services.awslambda.awslambda_service import Lambda
from prowler.providers.aws.services.ec2.ec2_service import EC2
current_audit_info = set_mocked_aws_audit_info(
audited_regions=["us-east-1", "eu-west-1"]
)
with mock.patch(
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
new=current_audit_info,
), mock.patch(
"prowler.providers.aws.services.ec2.ec2_securitygroup_not_used.ec2_securitygroup_not_used.ec2_client",
new=EC2(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.ec2.ec2_securitygroup_not_used.ec2_securitygroup_not_used.awslambda_client",
new=Lambda(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.ec2.ec2_securitygroup_not_used.ec2_securitygroup_not_used import (
ec2_securitygroup_not_used,
)
check = ec2_securitygroup_not_used()
result = check.execute()
# One custom sg
assert len(result) == 2
assert result[0].status == "FAIL"
assert result[0].region == AWS_REGION_US_EAST_1
assert (
result[0].status_extended
== f"Security group {sg_name} ({sg.id}) it is not being used."
)
assert (
result[0].resource_arn
== f"arn:{current_audit_info.audited_partition}:ec2:{AWS_REGION_US_EAST_1}:{current_audit_info.audited_account}:security-group/{sg.id}"
)
assert result[0].resource_id == sg.id
assert result[0].resource_details == sg_name
assert result[0].resource_tags == []
assert result[1].status == "PASS"
assert result[1].region == AWS_REGION_US_EAST_1
assert (
result[1].status_extended
== f"Security group {sg_name1} ({sg1.id}) it is being used."
)
assert (
result[1].resource_arn
== f"arn:{current_audit_info.audited_partition}:ec2:{AWS_REGION_US_EAST_1}:{current_audit_info.audited_account}:security-group/{sg1.id}"
)
assert result[1].resource_id == sg1.id
assert result[1].resource_details == sg_name1
assert result[1].resource_tags == []

View File

@@ -62,6 +62,31 @@ class Test_guardduty_centrally_managed:
assert result[0].region == AWS_REGION
assert result[0].resource_arn == DETECTOR_ARN
def test_not_enabled_account_detector(self):
guardduty_client = mock.MagicMock
guardduty_client.detectors = []
guardduty_client.detectors.append(
Detector(
id=AWS_ACCOUNT_NUMBER,
region=AWS_REGION,
arn=DETECTOR_ARN,
enabled_in_account=False,
)
)
with mock.patch(
"prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty",
guardduty_client,
):
# Test Check
from prowler.providers.aws.services.guardduty.guardduty_centrally_managed.guardduty_centrally_managed import (
guardduty_centrally_managed,
)
check = guardduty_centrally_managed()
result = check.execute()
assert len(result) == 0
def test_detector_centralized_managed(self):
guardduty_client = mock.MagicMock
guardduty_client.detectors = []

View File

@@ -58,6 +58,29 @@ class Test_guardduty_no_high_severity_findings:
assert result[0].resource_arn == DETECTOR_ARN
assert result[0].region == AWS_REGION
def test_not_enabled_account_detector(self):
guardduty_client = mock.MagicMock
guardduty_client.detectors = []
guardduty_client.detectors.append(
Detector(
id=AWS_ACCOUNT_NUMBER,
arn=DETECTOR_ARN,
region=AWS_REGION,
enabled_in_account=False,
)
)
with mock.patch(
"prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty",
guardduty_client,
):
from prowler.providers.aws.services.guardduty.guardduty_no_high_severity_findings.guardduty_no_high_severity_findings import (
guardduty_no_high_severity_findings,
)
check = guardduty_no_high_severity_findings()
result = check.execute()
assert len(result) == 0
def test_high_findings(self):
guardduty_client = mock.MagicMock
guardduty_client.detectors = []

View File

@@ -7,8 +7,11 @@ from prowler.providers.aws.services.sqs.sqs_service import Queue
AWS_REGION = "eu-west-1"
AWS_ACCOUNT_NUMBER = "123456789012"
queue_id = str(uuid4())
topic_arn = f"arn:aws:sqs:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:{queue_id}"
test_queue_name = str(uuid4())
test_queue_url = (
f"https://sqs.{AWS_REGION}.amazonaws.com/{AWS_ACCOUNT_NUMBER}/{test_queue_name}"
)
test_queue_arn = f"arn:aws:sqs:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:{test_queue_name}"
test_restricted_policy = {
"Version": "2012-10-17",
@@ -19,7 +22,7 @@ test_restricted_policy = {
"Effect": "Allow",
"Principal": {"AWS": {AWS_ACCOUNT_NUMBER}},
"Action": "sqs:ReceiveMessage",
"Resource": topic_arn,
"Resource": test_queue_arn,
}
],
}
@@ -33,7 +36,7 @@ test_public_policy = {
"Effect": "Allow",
"Principal": "*",
"Action": "sqs:ReceiveMessage",
"Resource": topic_arn,
"Resource": test_queue_arn,
}
],
}
@@ -47,7 +50,7 @@ test_public_policy_with_condition_same_account_not_valid = {
"Effect": "Allow",
"Principal": "*",
"Action": "sqs:ReceiveMessage",
"Resource": topic_arn,
"Resource": test_queue_arn,
"Condition": {
"DateGreaterThan": {"aws:CurrentTime": "2009-01-31T12:00Z"},
"DateLessThan": {"aws:CurrentTime": "2009-01-31T15:00Z"},
@@ -65,7 +68,7 @@ test_public_policy_with_condition_same_account = {
"Effect": "Allow",
"Principal": "*",
"Action": "sqs:ReceiveMessage",
"Resource": topic_arn,
"Resource": test_queue_arn,
"Condition": {
"StringEquals": {"aws:SourceAccount": f"{AWS_ACCOUNT_NUMBER}"}
},
@@ -82,7 +85,7 @@ test_public_policy_with_condition_diff_account = {
"Effect": "Allow",
"Principal": "*",
"Action": "sqs:ReceiveMessage",
"Resource": topic_arn,
"Resource": test_queue_arn,
"Condition": {"StringEquals": {"aws:SourceAccount": "111122223333"}},
}
],
@@ -110,10 +113,11 @@ class Test_sqs_queues_not_publicly_accessible:
sqs_client.queues = []
sqs_client.queues.append(
Queue(
id=queue_id,
id=test_queue_url,
name=test_queue_name,
region=AWS_REGION,
policy=test_restricted_policy,
arn="arn_test",
arn=test_queue_arn,
)
)
with mock.patch(
@@ -129,8 +133,8 @@ class Test_sqs_queues_not_publicly_accessible:
assert len(result) == 1
assert result[0].status == "PASS"
assert search("is not public", result[0].status_extended)
assert result[0].resource_id == queue_id
assert result[0].resource_arn == "arn_test"
assert result[0].resource_id == test_queue_url
assert result[0].resource_arn == test_queue_arn
assert result[0].resource_tags == []
assert result[0].region == AWS_REGION
@@ -139,10 +143,11 @@ class Test_sqs_queues_not_publicly_accessible:
sqs_client.queues = []
sqs_client.queues.append(
Queue(
id=queue_id,
id=test_queue_url,
name=test_queue_name,
region=AWS_REGION,
policy=test_public_policy,
arn="arn_test",
arn=test_queue_arn,
)
)
with mock.patch(
@@ -161,8 +166,8 @@ class Test_sqs_queues_not_publicly_accessible:
"is public because its policy allows public access",
result[0].status_extended,
)
assert result[0].resource_id == queue_id
assert result[0].resource_arn == "arn_test"
assert result[0].resource_id == test_queue_url
assert result[0].resource_arn == test_queue_arn
assert result[0].resource_tags == []
assert result[0].region == AWS_REGION
@@ -172,10 +177,11 @@ class Test_sqs_queues_not_publicly_accessible:
sqs_client.audited_account = AWS_ACCOUNT_NUMBER
sqs_client.queues.append(
Queue(
id=queue_id,
id=test_queue_url,
name=test_queue_name,
region=AWS_REGION,
policy=test_public_policy_with_condition_same_account_not_valid,
arn="arn_test",
arn=test_queue_arn,
)
)
with mock.patch(
@@ -194,8 +200,8 @@ class Test_sqs_queues_not_publicly_accessible:
"is public because its policy allows public access",
result[0].status_extended,
)
assert result[0].resource_id == queue_id
assert result[0].resource_arn == "arn_test"
assert result[0].resource_id == test_queue_url
assert result[0].resource_arn == test_queue_arn
assert result[0].resource_tags == []
assert result[0].region == AWS_REGION
@@ -205,10 +211,11 @@ class Test_sqs_queues_not_publicly_accessible:
sqs_client.audited_account = AWS_ACCOUNT_NUMBER
sqs_client.queues.append(
Queue(
id=queue_id,
id=test_queue_url,
name=test_queue_name,
region=AWS_REGION,
policy=test_public_policy_with_condition_same_account,
arn="arn_test",
arn=test_queue_arn,
)
)
with mock.patch(
@@ -225,10 +232,10 @@ class Test_sqs_queues_not_publicly_accessible:
assert result[0].status == "PASS"
assert (
result[0].status_extended
== f"SQS queue {queue_id} is not public because its policy only allows access from the same account."
== f"SQS queue {test_queue_url} is not public because its policy only allows access from the same account."
)
assert result[0].resource_id == queue_id
assert result[0].resource_arn == "arn_test"
assert result[0].resource_id == test_queue_url
assert result[0].resource_arn == test_queue_arn
assert result[0].resource_tags == []
assert result[0].region == AWS_REGION
@@ -238,10 +245,11 @@ class Test_sqs_queues_not_publicly_accessible:
sqs_client.audited_account = AWS_ACCOUNT_NUMBER
sqs_client.queues.append(
Queue(
id=queue_id,
id=test_queue_url,
name=test_queue_name,
region=AWS_REGION,
policy=test_public_policy_with_condition_diff_account,
arn="arn_test",
arn=test_queue_arn,
)
)
with mock.patch(
@@ -258,9 +266,9 @@ class Test_sqs_queues_not_publicly_accessible:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"SQS queue {queue_id} is public because its policy allows public access, and the condition does not limit access to resources within the same account."
== f"SQS queue {test_queue_url} is public because its policy allows public access, and the condition does not limit access to resources within the same account."
)
assert result[0].resource_id == queue_id
assert result[0].resource_arn == "arn_test"
assert result[0].resource_id == test_queue_url
assert result[0].resource_arn == test_queue_arn
assert result[0].resource_tags == []
assert result[0].region == AWS_REGION

View File

@@ -8,8 +8,11 @@ AWS_REGION = "eu-west-1"
AWS_ACCOUNT_NUMBER = "123456789012"
test_kms_key_id = str(uuid4())
queue_id = str(uuid4())
topic_arn = f"arn:aws:sqs:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:{queue_id}"
test_queue_name = str(uuid4())
test_queue_url = (
f"https://sqs.{AWS_REGION}.amazonaws.com/{AWS_ACCOUNT_NUMBER}/{test_queue_name}"
)
test_queue_arn = f"arn:aws:sqs:{AWS_REGION}:{AWS_ACCOUNT_NUMBER}:{test_queue_name}"
class Test_sqs_queues_server_side_encryption_enabled:
@@ -33,10 +36,11 @@ class Test_sqs_queues_server_side_encryption_enabled:
sqs_client.queues = []
sqs_client.queues.append(
Queue(
id=queue_id,
id=test_queue_url,
name=test_queue_name,
region=AWS_REGION,
kms_key_id=test_kms_key_id,
arn="arn_test",
arn=test_queue_arn,
)
)
with mock.patch(
@@ -52,17 +56,18 @@ class Test_sqs_queues_server_side_encryption_enabled:
assert len(result) == 1
assert result[0].status == "PASS"
assert search("is using Server Side Encryption", result[0].status_extended)
assert result[0].resource_id == queue_id
assert result[0].resource_arn == "arn_test"
assert result[0].resource_id == test_queue_url
assert result[0].resource_arn == test_queue_arn
def test_queues_no_encryption(self):
sqs_client = mock.MagicMock
sqs_client.queues = []
sqs_client.queues.append(
Queue(
id=queue_id,
id=test_queue_url,
name=test_queue_name,
region=AWS_REGION,
arn="arn_test",
arn=test_queue_arn,
)
)
with mock.patch(
@@ -80,5 +85,5 @@ class Test_sqs_queues_server_side_encryption_enabled:
assert search(
"is not using Server Side Encryption", result[0].status_extended
)
assert result[0].resource_id == queue_id
assert result[0].resource_arn == "arn_test"
assert result[0].resource_id == test_queue_url
assert result[0].resource_arn == test_queue_arn

View File

@@ -110,9 +110,25 @@ class Test_SQS_Service:
sqs = SQS(audit_info)
assert len(sqs.queues) == 1
assert sqs.queues[0].id == queue["QueueUrl"]
assert sqs.queues[0].name == test_queue
assert sqs.queues[0].name == sqs.queues[0].arn.split(":")[-1]
assert sqs.queues[0].name == sqs.queues[0].id.split("/")[-1]
assert sqs.queues[0].arn == test_queue_arn
assert sqs.queues[0].region == AWS_REGION
assert sqs.queues[0].tags == [{"test": "test"}]
# moto does not properly mock this and is hardcoded to return 1000 queues
# so this test currently always fails
# @mock_sqs
# # Test SQS list queues for over 1000 queues
# def test__list_queues__pagination_over_a_thousand(self):
# sqs_client = client("sqs", region_name=AWS_REGION)
# for i in range(0,1050):
# sqs_client.create_queue(QueueName=f"{test_queue}-{i}", tags={"test": "test"})
# audit_info = self.set_mocked_audit_info()
# sqs = SQS(audit_info)
# assert len(sqs.queues) > 1000
@mock_sqs
# Test SQS list queues
def test__get_queue_attributes__(self):

View File

@@ -0,0 +1,50 @@
from azure.identity import AzureAuthorityHosts
from msrestazure.azure_cloud import (
AZURE_CHINA_CLOUD,
AZURE_GERMAN_CLOUD,
AZURE_US_GOV_CLOUD,
)
from prowler.providers.azure.lib.regions.regions import get_regions_config
class Test_azure_regions:
def test_get_regions_config(self):
allowed_regions = [
"AzureCloud",
"AzureChinaCloud",
"AzureUSGovernment",
"AzureGermanCloud",
]
expected_output = {
"AzureCloud": {
"authority": None,
"base_url": "https://management.azure.com",
"credential_scopes": ["https://management.azure.com/.default"],
},
"AzureChinaCloud": {
"authority": AzureAuthorityHosts.AZURE_CHINA,
"base_url": AZURE_CHINA_CLOUD.endpoints.resource_manager,
"credential_scopes": [
AZURE_CHINA_CLOUD.endpoints.resource_manager + "/.default"
],
},
"AzureUSGovernment": {
"authority": AzureAuthorityHosts.AZURE_GOVERNMENT,
"base_url": AZURE_US_GOV_CLOUD.endpoints.resource_manager,
"credential_scopes": [
AZURE_US_GOV_CLOUD.endpoints.resource_manager + "/.default"
],
},
"AzureGermanCloud": {
"authority": AzureAuthorityHosts.AZURE_GERMANY,
"base_url": AZURE_GERMAN_CLOUD.endpoints.resource_manager,
"credential_scopes": [
AZURE_GERMAN_CLOUD.endpoints.resource_manager + "/.default"
],
},
}
for region in allowed_regions:
region_config = get_regions_config(region)
assert region_config == expected_output[region]

View File

@@ -11,6 +11,7 @@ from prowler.providers.azure.azure_provider import Azure_Provider
from prowler.providers.azure.lib.audit_info.models import (
Azure_Audit_Info,
Azure_Identity_Info,
Azure_Region_Config,
)
from prowler.providers.common.audit_info import (
Audit_Info,
@@ -31,6 +32,7 @@ mock_azure_audit_info = Azure_Audit_Info(
audit_metadata=None,
audit_resources=None,
audit_config=None,
azure_region_config=Azure_Region_Config(),
)
mock_set_audit_info = Audit_Info()
@@ -132,8 +134,8 @@ class Test_Set_Audit_Info:
"prowler.providers.common.audit_info.azure_audit_info",
new=mock_azure_audit_info,
)
@patch.object(Azure_Provider, "__set_credentials__", new=mock_set_azure_credentials)
@patch.object(Azure_Provider, "__set_identity_info__", new=mock_set_identity_info)
@patch.object(Azure_Provider, "__get_credentials__", new=mock_set_azure_credentials)
@patch.object(Azure_Provider, "__get_identity_info__", new=mock_set_identity_info)
def test_set_audit_info_azure(self):
provider = "azure"
arguments = {
@@ -150,6 +152,7 @@ class Test_Set_Audit_Info:
"browser_auth": None,
"managed_entity_auth": None,
"config_file": default_config_file_path,
"azure_region": "AzureCloud",
}
audit_info = set_provider_audit_info(provider, arguments)

View File

@@ -0,0 +1,87 @@
import importlib
import logging
import tempfile
from argparse import Namespace
from os import path
from mock import patch
from prowler.providers.common.clean import clean_provider_local_output_directories
class Test_Common_Clean:
def set_provider_input_args(self, provider):
set_args_function = f"set_{provider}_input_args"
args = getattr(
getattr(importlib.import_module(__name__), __class__.__name__),
set_args_function,
)(self)
return args
def set_aws_input_args(self):
args = Namespace()
args.provider = "aws"
args.output_bucket = "test-bucket"
args.output_bucket_no_assume = None
return args
def set_azure_input_args(self):
args = Namespace()
args.provider = "azure"
return args
def test_clean_provider_local_output_directories_non_initialized(self, caplog):
provider = "azure"
input_args = self.set_provider_input_args(provider)
caplog.set_level(logging.INFO)
clean_provider_local_output_directories(input_args)
assert (
f"Cleaning local output directories not initialized for provider {provider}:"
in caplog.text
)
def test_clean_aws_local_output_directories_non_default_dir_output_bucket(self):
provider = "aws"
input_args = self.set_provider_input_args(provider)
with tempfile.TemporaryDirectory() as temp_dir:
input_args.output_directory = temp_dir
clean_provider_local_output_directories(input_args)
assert not path.exists(input_args.output_directory)
def test_clean_aws_local_output_directories_non_default_dir_output_bucket_no_assume(
self,
):
provider = "aws"
input_args = self.set_provider_input_args(provider)
input_args.output_bucket = None
input_args.output_bucket_no_assume = "test"
with tempfile.TemporaryDirectory() as temp_dir:
input_args.output_directory = temp_dir
clean_provider_local_output_directories(input_args)
assert not path.exists(input_args.output_directory)
def test_clean_aws_local_output_directories_default_dir_output_bucket(self):
provider = "aws"
input_args = self.set_provider_input_args(provider)
with tempfile.TemporaryDirectory() as temp_dir:
with patch(
"prowler.providers.common.clean.default_output_directory", new=temp_dir
):
input_args.output_directory = temp_dir
clean_provider_local_output_directories(input_args)
assert path.exists(input_args.output_directory)
def test_clean_aws_local_output_directories_default_dir_output_bucket_no_assume(
self,
):
provider = "aws"
input_args = self.set_provider_input_args(provider)
input_args.output_bucket_no_assume = "test"
input_args.ouput_bucket = None
with tempfile.TemporaryDirectory() as temp_dir:
with patch(
"prowler.providers.common.clean.default_output_directory", new=temp_dir
):
input_args.output_directory = temp_dir
clean_provider_local_output_directories(input_args)
assert path.exists(input_args.output_directory)

View File

@@ -9,6 +9,7 @@ from prowler.providers.aws.lib.audit_info.audit_info import AWS_Audit_Info
from prowler.providers.azure.lib.audit_info.audit_info import (
Azure_Audit_Info,
Azure_Identity_Info,
Azure_Region_Config,
)
from prowler.providers.common.models import Audit_Metadata
from prowler.providers.common.outputs import (
@@ -33,6 +34,7 @@ class Test_Common_Output_Options:
audit_metadata=None,
audit_resources=None,
audit_config=None,
azure_region_config=Azure_Region_Config(),
)
return audit_info
@@ -332,7 +334,7 @@ class Test_Common_Output_Options:
<b>AWS Account:</b> {audit_info.audited_account}
</li>
<li class="list-group-item">
<b>AWS-CLI Profile:</b> {audit_info.profile}
<b>AWS-CLI Profile:</b> default
</li>
<li class="list-group-item">
<b>Audited Regions:</b> All Regions