Compare commits

..

15 Commits

Author SHA1 Message Date
github-actions
1ebf865b0c chore(release): 3.11.2 2023-11-14 13:58:26 +00:00
Sergio Garcia
3a3bb44f11 fix(GuardDuty): only execute checks if GuardDuty enabled (#3028) 2023-11-14 14:14:05 +01:00
Nacho Rivera
f8e713a544 feat(azure regions): support non default azure region (#3013)
Co-authored-by: Pepe Fagoaga <pepe@verica.io>
2023-11-14 13:17:48 +01:00
Pepe Fagoaga
573f1eba56 fix(securityhub): Use enabled_regions instead of audited_regions (#3029) 2023-11-14 12:57:54 +01:00
simone ragonesi
a36be258d8 chore: modify latest version msg (#3036)
Signed-off-by: r3drun3 <simone.ragonesi@sighup.io>
2023-11-14 12:11:55 +01:00
Sergio Garcia
690ec057c3 fix(ec2_securitygroup_not_used): check if security group is associated (#3026) 2023-11-14 12:03:01 +01:00
dependabot[bot]
2681feb1f6 build(deps): bump azure-storage-blob from 12.18.3 to 12.19.0 (#3034)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 11:47:42 +01:00
Sergio Garcia
e662adb8c5 chore(regions_update): Changes in regions for AWS services. (#3035)
Co-authored-by: sergargar <sergargar@users.noreply.github.com>
2023-11-14 11:47:24 +01:00
Sergio Garcia
c94bd96c93 chore(args): make compatible severity and services arguments (#3024) 2023-11-14 11:26:53 +01:00
dependabot[bot]
6d85433194 build(deps): bump alive-progress from 3.1.4 to 3.1.5 (#3033)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 09:41:32 +01:00
dependabot[bot]
7a6092a779 build(deps): bump google-api-python-client from 2.106.0 to 2.107.0 (#3032)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 09:16:00 +01:00
dependabot[bot]
4c84529aed build(deps-dev): bump pytest-xdist from 3.3.1 to 3.4.0 (#3031)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 08:48:02 +01:00
Sergio Garcia
512d3e018f chore(accessanalyzer): include service in allowlist_non_default_regions (#3025) 2023-11-14 08:00:17 +01:00
dependabot[bot]
c6aff985c9 build(deps-dev): bump moto from 4.2.7 to 4.2.8 (#3030)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-14 07:54:34 +01:00
Sergio Garcia
7fadf31a2b chore(release): update Prowler Version to 3.11.1 (#3021)
Co-authored-by: github-actions <noreply@github.com>
2023-11-10 12:53:07 +01:00
36 changed files with 665 additions and 140 deletions

View File

@@ -0,0 +1,16 @@
# Use non default Azure regions
Microsoft provides clouds for compliance with regional laws, which are available for your use.
By default, Prowler uses `AzureCloud` cloud which is the comercial one. (you can list all the available with `az cloud list --output table`).
At the time of writing this documentation the available Azure Clouds from different regions are the following:
- AzureCloud
- AzureChinaCloud
- AzureUSGovernment
- AzureGermanCloud
If you want to change the default one you must include the flag `--azure-region`, i.e.:
```console
prowler azure --az-cli-auth --azure-region AzureChinaCloud
```

View File

@@ -56,6 +56,7 @@ nav:
- Boto3 Configuration: tutorials/aws/boto3-configuration.md
- Azure:
- Authentication: tutorials/azure/authentication.md
- Non default clouds: tutorials/azure/use-non-default-cloud.md
- Subscriptions: tutorials/azure/subscriptions.md
- Google Cloud:
- Authentication: tutorials/gcp/authentication.md

83
poetry.lock generated
View File

@@ -11,15 +11,32 @@ files = [
{file = "about_time-4.2.1-py3-none-any.whl", hash = "sha256:8bbf4c75fe13cbd3d72f49a03b02c5c7dca32169b6d49117c257e7eb3eaee341"},
]
[[package]]
name = "adal"
version = "1.2.7"
description = "Note: This library is already replaced by MSAL Python, available here: https://pypi.org/project/msal/ .ADAL Python remains available here as a legacy. The ADAL for Python library makes it easy for python application to authenticate to Azure Active Directory (AAD) in order to access AAD protected web resources."
optional = false
python-versions = "*"
files = [
{file = "adal-1.2.7-py2.py3-none-any.whl", hash = "sha256:2a7451ed7441ddbc57703042204a3e30ef747478eea022c70f789fc7f084bc3d"},
{file = "adal-1.2.7.tar.gz", hash = "sha256:d74f45b81317454d96e982fd1c50e6fb5c99ac2223728aea8764433a39f566f1"},
]
[package.dependencies]
cryptography = ">=1.1.0"
PyJWT = ">=1.0.0,<3"
python-dateutil = ">=2.1.0,<3"
requests = ">=2.0.0,<3"
[[package]]
name = "alive-progress"
version = "3.1.4"
version = "3.1.5"
description = "A new kind of Progress Bar, with real-time throughput, ETA, and very cool animations!"
optional = false
python-versions = ">=3.7, <4"
files = [
{file = "alive-progress-3.1.4.tar.gz", hash = "sha256:74a95d8d0d42bc99d3a3725dbd06ebb852245f1b64e301a7c375b92b22663f7b"},
{file = "alive_progress-3.1.4-py3-none-any.whl", hash = "sha256:c80ad87ce9c1054b01135a87fae69ecebbfc2107497ae87cbe6aec7e534903db"},
{file = "alive-progress-3.1.5.tar.gz", hash = "sha256:42e399a66c8150dc507602dff7b7953f105ef11faf97ddaa6d27b1cbf45c4c98"},
{file = "alive_progress-3.1.5-py3-none-any.whl", hash = "sha256:347220c1858e3abe137fa0746895668c04df09c5261a13dc03f05795e8a29be5"},
]
[package.dependencies]
@@ -212,13 +229,13 @@ msrest = ">=0.7.1"
[[package]]
name = "azure-storage-blob"
version = "12.18.3"
version = "12.19.0"
description = "Microsoft Azure Blob Storage Client Library for Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "azure-storage-blob-12.18.3.tar.gz", hash = "sha256:d8ced0deee3367fa3d4f3d1a03cd9edadf4440c0a371f503d623fa6c807554ee"},
{file = "azure_storage_blob-12.18.3-py3-none-any.whl", hash = "sha256:c278dde2ac41857a68d615c9f2b36d894ba877a7e84d62795603c7e79d0bb5e9"},
{file = "azure-storage-blob-12.19.0.tar.gz", hash = "sha256:26c0a4320a34a3c2a1b74528ba6812ebcb632a04cd67b1c7377232c4b01a5897"},
{file = "azure_storage_blob-12.19.0-py3-none-any.whl", hash = "sha256:7bbc2c9c16678f7a420367fef6b172ba8730a7e66df7f4d7a55d5b3c8216615b"},
]
[package.dependencies]
@@ -902,13 +919,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"]
[[package]]
name = "google-api-python-client"
version = "2.106.0"
version = "2.107.0"
description = "Google API Client Library for Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "google-api-python-client-2.106.0.tar.gz", hash = "sha256:f6a3862be2f6e5e0536d7bd47b5af3f24ac0b9147c76c830cafb3329d71d5724"},
{file = "google_api_python_client-2.106.0-py2.py3-none-any.whl", hash = "sha256:c47c0dae5dd20aa43e4ea184566fe59d0c8fd0b86dd223b29040d8ea4f7ed6ea"},
{file = "google-api-python-client-2.107.0.tar.gz", hash = "sha256:ef6d4c1a17fe9ec0894fc6d4f61e751c4b859fb33f2ab5b881ceb0b80ba442ba"},
{file = "google_api_python_client-2.107.0-py2.py3-none-any.whl", hash = "sha256:51d7bf676f41a77b00b7b9c72ace0c1db3dd5a4dd392a13ae897cf4f571a3539"},
]
[package.dependencies]
@@ -1421,13 +1438,13 @@ test = ["pytest", "pytest-cov"]
[[package]]
name = "moto"
version = "4.2.7"
version = "4.2.8"
description = ""
optional = false
python-versions = ">=3.7"
files = [
{file = "moto-4.2.7-py2.py3-none-any.whl", hash = "sha256:3e0ef388900448485cd6eff18e9f7fcaa6cf4560b6fb536ba2e2e1278a5ecc59"},
{file = "moto-4.2.7.tar.gz", hash = "sha256:1298006aaa6996b886658eb194cac0e3a5679c9fcce6cb13e741ccc5a7247abb"},
{file = "moto-4.2.8-py2.py3-none-any.whl", hash = "sha256:e78b49ae8acee06a865e4963174bdf974dd66398fb3bb831a7428498506c0c56"},
{file = "moto-4.2.8.tar.gz", hash = "sha256:9b5a363f36f8c3fb36388764e7b8c01c615da2f2cba7da3e681680de14bfc769"},
]
[package.dependencies]
@@ -1442,29 +1459,29 @@ werkzeug = ">=0.5,<2.2.0 || >2.2.0,<2.2.1 || >2.2.1"
xmltodict = "*"
[package.extras]
all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
all = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
apigateway = ["PyYAML (>=5.1)", "ecdsa (!=0.15)", "openapi-spec-validator (>=0.5.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"]
apigatewayv2 = ["PyYAML (>=5.1)"]
appsync = ["graphql-core"]
awslambda = ["docker (>=3.0.0)"]
batch = ["docker (>=3.0.0)"]
cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
cloudformation = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
cognitoidp = ["ecdsa (!=0.15)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"]
ds = ["sshpubkeys (>=3.1.0)"]
dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.1)"]
dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.1)"]
dynamodb = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.2)"]
dynamodbstreams = ["docker (>=3.0.0)", "py-partiql-parser (==0.4.2)"]
ebs = ["sshpubkeys (>=3.1.0)"]
ec2 = ["sshpubkeys (>=3.1.0)"]
efs = ["sshpubkeys (>=3.1.0)"]
eks = ["sshpubkeys (>=3.1.0)"]
glue = ["pyparsing (>=3.0.7)"]
iotdata = ["jsondiff (>=1.1.2)"]
proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "sshpubkeys (>=3.1.0)"]
proxy = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=2.5.1)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "multipart", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
resourcegroupstaggingapi = ["PyYAML (>=5.1)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "sshpubkeys (>=3.1.0)"]
route53resolver = ["sshpubkeys (>=3.1.0)"]
s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.4.1)"]
s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.4.1)"]
server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.1)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
s3 = ["PyYAML (>=5.1)", "py-partiql-parser (==0.4.2)"]
s3crc32c = ["PyYAML (>=5.1)", "crc32c", "py-partiql-parser (==0.4.2)"]
server = ["PyYAML (>=5.1)", "aws-xray-sdk (>=0.93,!=0.96)", "cfn-lint (>=0.40.0)", "docker (>=3.0.0)", "ecdsa (!=0.15)", "flask (!=2.2.0,!=2.2.1)", "flask-cors", "graphql-core", "jsondiff (>=1.1.2)", "openapi-spec-validator (>=0.5.0)", "py-partiql-parser (==0.4.2)", "pyparsing (>=3.0.7)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "setuptools", "sshpubkeys (>=3.1.0)"]
ssm = ["PyYAML (>=5.1)"]
xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"]
@@ -1537,6 +1554,22 @@ requests-oauthlib = ">=0.5.0"
[package.extras]
async = ["aiodns", "aiohttp (>=3.0)"]
[[package]]
name = "msrestazure"
version = "0.6.4"
description = "AutoRest swagger generator Python client runtime. Azure-specific module."
optional = false
python-versions = "*"
files = [
{file = "msrestazure-0.6.4-py2.py3-none-any.whl", hash = "sha256:3de50f56147ef529b31e099a982496690468ecef33f0544cb0fa0cfe1e1de5b9"},
{file = "msrestazure-0.6.4.tar.gz", hash = "sha256:a06f0dabc9a6f5efe3b6add4bd8fb623aeadacf816b7a35b0f89107e0544d189"},
]
[package.dependencies]
adal = ">=0.6.0,<2.0.0"
msrest = ">=0.6.0,<2.0.0"
six = "*"
[[package]]
name = "mypy-extensions"
version = "1.0.0"
@@ -1988,13 +2021,13 @@ pytest = "*"
[[package]]
name = "pytest-xdist"
version = "3.3.1"
version = "3.4.0"
description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"},
{file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"},
{file = "pytest-xdist-3.4.0.tar.gz", hash = "sha256:3a94a931dd9e268e0b871a877d09fe2efb6175c2c23d60d56a6001359002b832"},
{file = "pytest_xdist-3.4.0-py3-none-any.whl", hash = "sha256:e513118bf787677a427e025606f55e95937565e06dfaac8d87f55301e57ae607"},
]
[package.dependencies]
@@ -2889,4 +2922,4 @@ docs = ["mkdocs", "mkdocs-material"]
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
content-hash = "d765b7a8c5d49f3a20316d575093430785a29ed5968d315b727455dea803331c"
content-hash = "594dc3dc4952b294042203c3338b6959fed04eb6eb181796a4ae8c27cde5bf32"

View File

@@ -247,7 +247,7 @@ def prowler():
for region in security_hub_regions:
# Save the regions where AWS Security Hub is enabled
if verify_security_hub_integration_enabled_per_region(
region, audit_info.audit_session
region, audit_info.audit_session, audit_info.audited_account
):
aws_security_enabled_regions.append(region)

View File

@@ -11,7 +11,7 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "3.11.1"
prowler_version = "3.11.2"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
html_logo_img = "https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png"
square_logo_img = "https://user-images.githubusercontent.com/38561120/235905862-9ece5bd7-9aa3-4e48-807a-3a9035eb8bfb.png"
@@ -70,7 +70,9 @@ def check_current_version():
if latest_version != prowler_version:
return f"{prowler_version_string} (latest is {latest_version}, upgrade for the latest features)"
else:
return f"{prowler_version_string} (it is the latest version, yay!)"
return (
f"{prowler_version_string} (You are running the latest version, yay!)"
)
except requests.RequestException:
return f"{prowler_version_string}"
except Exception:

View File

@@ -2,7 +2,7 @@
aws:
# AWS Global Configuration
# aws.allowlist_non_default_regions --> Set to True to allowlist failed findings in non-default regions for GuardDuty, SecurityHub, DRS and Config
# aws.allowlist_non_default_regions --> Set to True to allowlist failed findings in non-default regions for AccessAnalyzer, GuardDuty, SecurityHub, DRS and Config
allowlist_non_default_regions: False
# If you want to allowlist/mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w allowlist.yaml`:
# Allowlist:

View File

@@ -36,6 +36,10 @@ def load_checks_to_execute(
# Check check's severity
if bulk_checks_metadata[check].Severity in severities:
checks_to_execute.add(check)
if service_list:
checks_to_execute = (
recover_checks_from_service(service_list, provider) & checks_to_execute
)
# Handle if there are checks passed using -C/--checks-file
elif checks_file:

View File

@@ -220,7 +220,7 @@ Detailed documentation at https://docs.prowler.cloud
group.add_argument(
"-s", "--services", nargs="+", help="List of services to be executed."
)
group.add_argument(
common_checks_parser.add_argument(
"--severity",
nargs="+",
help="List of severities to be executed [informational, low, medium, high, critical]",

View File

@@ -498,17 +498,6 @@
]
}
},
"appfabric": {
"regions": {
"aws": [
"ap-northeast-1",
"eu-west-1",
"us-east-1"
],
"aws-cn": [],
"aws-us-gov": []
}
},
"appflow": {
"regions": {
"aws": [
@@ -2087,14 +2076,18 @@
"ap-south-2",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ca-central-1",
"eu-central-1",
"eu-central-2",
"eu-north-1",
"eu-south-1",
"eu-south-2",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"il-central-1",
"me-central-1",
"me-south-1",
"sa-east-1",
@@ -2316,15 +2309,22 @@
"ap-northeast-2",
"ap-northeast-3",
"ap-south-1",
"ap-south-2",
"ap-southeast-1",
"ap-southeast-2",
"ap-southeast-3",
"ap-southeast-4",
"ca-central-1",
"eu-central-1",
"eu-central-2",
"eu-north-1",
"eu-south-1",
"eu-south-2",
"eu-west-1",
"eu-west-2",
"eu-west-3",
"il-central-1",
"me-central-1",
"me-south-1",
"sa-east-1",
"us-east-1",
@@ -6727,6 +6727,7 @@
"eu-west-1",
"eu-west-2",
"eu-west-3",
"il-central-1",
"me-central-1",
"me-south-1",
"sa-east-1",

View File

@@ -14,9 +14,11 @@ def prepare_security_hub_findings(
findings: [], audit_info: AWS_Audit_Info, output_options, enabled_regions: []
) -> dict:
security_hub_findings_per_region = {}
# Create a key per region
for region in audit_info.audited_regions:
# Create a key per audited region
for region in enabled_regions:
security_hub_findings_per_region[region] = []
for finding in findings:
# We don't send the INFO findings to AWS Security Hub
if finding.status == "INFO":
@@ -49,6 +51,7 @@ def prepare_security_hub_findings(
def verify_security_hub_integration_enabled_per_region(
region: str,
session: session.Session,
aws_account_number: str,
) -> bool:
f"""verify_security_hub_integration_enabled returns True if the {SECURITY_HUB_INTEGRATION_NAME} is enabled for the given region. Otherwise returns false."""
prowler_integration_enabled = False
@@ -62,7 +65,8 @@ def verify_security_hub_integration_enabled_per_region(
security_hub_client.describe_hub()
# Check if Prowler integration is enabled in Security Hub
if "prowler/prowler" not in str(
security_hub_prowler_integration_arn = f"arn:aws:securityhub:{region}:{aws_account_number}:product-subscription/{SECURITY_HUB_INTEGRATION_NAME}"
if security_hub_prowler_integration_arn not in str(
security_hub_client.list_enabled_products_for_import()
):
logger.error(

View File

@@ -19,17 +19,23 @@ class accessanalyzer_enabled(Check):
f"IAM Access Analyzer {analyzer.name} is enabled."
)
elif analyzer.status == "NOT_AVAILABLE":
report.status = "FAIL"
report.status_extended = (
f"IAM Access Analyzer in account {analyzer.name} is not enabled."
)
else:
report.status = "FAIL"
report.status_extended = (
f"IAM Access Analyzer {analyzer.name} is not active."
)
if analyzer.status == "NOT_AVAILABLE":
report.status = "FAIL"
report.status_extended = f"IAM Access Analyzer in account {analyzer.name} is not enabled."
else:
report.status = "FAIL"
report.status_extended = (
f"IAM Access Analyzer {analyzer.name} is not active."
)
if (
accessanalyzer_client.audit_config.get(
"allowlist_non_default_regions", False
)
and not analyzer.region == accessanalyzer_client.region
):
report.status = "WARNING"
findings.append(report)

View File

@@ -18,10 +18,18 @@ class ec2_securitygroup_not_used(Check):
report.status = "PASS"
report.status_extended = f"Security group {security_group.name} ({security_group.id}) it is being used."
sg_in_lambda = False
sg_associated = False
for function in awslambda_client.functions.values():
if security_group.id in function.security_groups:
sg_in_lambda = True
if len(security_group.network_interfaces) == 0 and not sg_in_lambda:
for sg in ec2_client.security_groups:
if security_group.id in sg.associated_sgs:
sg_associated = True
if (
len(security_group.network_interfaces) == 0
and not sg_in_lambda
and not sg_associated
):
report.status = "FAIL"
report.status_extended = f"Security group {security_group.name} ({security_group.id}) it is not being used."

View File

@@ -117,6 +117,7 @@ class EC2(AWSService):
if not self.audit_resources or (
is_resource_filtered(arn, self.audit_resources)
):
associated_sgs = []
# check if sg has public access to all ports
all_public_ports = False
for ingress_rule in sg["IpPermissions"]:
@@ -128,7 +129,10 @@ class EC2(AWSService):
in self.audited_checks
):
all_public_ports = True
break
# check associated security groups
for sg_group in ingress_rule.get("UserIdGroupPairs", []):
if sg_group.get("GroupId"):
associated_sgs.append(sg_group["GroupId"])
self.security_groups.append(
SecurityGroup(
name=sg["GroupName"],
@@ -138,6 +142,7 @@ class EC2(AWSService):
ingress_rules=sg["IpPermissions"],
egress_rules=sg["IpPermissionsEgress"],
public_ports=all_public_ports,
associated_sgs=associated_sgs,
vpc_id=sg["VpcId"],
tags=sg.get("Tags"),
)
@@ -464,6 +469,7 @@ class SecurityGroup(BaseModel):
id: str
vpc_id: str
public_ports: bool
associated_sgs: list
network_interfaces: list[str] = []
ingress_rules: list[dict]
egress_rules: list[dict]

View File

@@ -6,7 +6,7 @@ class guardduty_centrally_managed(Check):
def execute(self):
findings = []
for detector in guardduty_client.detectors:
if detector.id:
if detector.id and detector.enabled_in_account:
report = Check_Report_AWS(self.metadata())
report.region = detector.region
report.resource_id = detector.id

View File

@@ -6,7 +6,7 @@ class guardduty_no_high_severity_findings(Check):
def execute(self):
findings = []
for detector in guardduty_client.detectors:
if detector.id:
if detector.id and detector.enabled_in_account:
report = Check_Report_AWS(self.metadata())
report.region = detector.region
report.resource_id = detector.id

View File

@@ -7,6 +7,7 @@ from msgraph.core import GraphClient
from prowler.lib.logger import logger
from prowler.providers.azure.lib.audit_info.models import Azure_Identity_Info
from prowler.providers.azure.lib.regions.regions import get_regions_config
class Azure_Provider:
@@ -18,12 +19,14 @@ class Azure_Provider:
managed_entity_auth: bool,
subscription_ids: list,
tenant_id: str,
region: str,
):
logger.info("Instantiating Azure Provider ...")
self.credentials = self.__set_credentials__(
self.region_config = self.__get_region_config__(region)
self.credentials = self.__get_credentials__(
az_cli_auth, sp_env_auth, browser_auth, managed_entity_auth, tenant_id
)
self.identity = self.__set_identity_info__(
self.identity = self.__get_identity_info__(
self.credentials,
az_cli_auth,
sp_env_auth,
@@ -32,7 +35,10 @@ class Azure_Provider:
subscription_ids,
)
def __set_credentials__(
def __get_region_config__(self, region):
return get_regions_config(region)
def __get_credentials__(
self, az_cli_auth, sp_env_auth, browser_auth, managed_entity_auth, tenant_id
):
# Browser auth creds cannot be set with DefaultAzureCredentials()
@@ -52,6 +58,8 @@ class Azure_Provider:
exclude_shared_token_cache_credential=True,
# Azure Auth using PowerShell is not supported
exclude_powershell_credential=True,
# set Authority of a Microsoft Entra endpoint
authority=self.region_config["authority"],
)
except Exception as error:
logger.critical("Failed to retrieve azure credentials")
@@ -61,7 +69,6 @@ class Azure_Provider:
sys.exit(1)
else:
try:
print(tenant_id)
credentials = InteractiveBrowserCredential(tenant_id=tenant_id)
except Exception as error:
logger.critical("Failed to retrieve azure credentials")
@@ -83,7 +90,7 @@ class Azure_Provider:
)
sys.exit(1)
def __set_identity_info__(
def __get_identity_info__(
self,
credentials,
az_cli_auth,
@@ -153,7 +160,11 @@ class Azure_Provider:
logger.info(
"Trying to subscriptions and tenant ids to populate identity structure ..."
)
subscriptions_client = SubscriptionClient(credential=credentials)
subscriptions_client = SubscriptionClient(
credential=credentials,
base_url=self.region_config["base_url"],
credential_scopes=self.region_config["credential_scopes"],
)
if not subscription_ids:
logger.info("Scanning all the Azure subscriptions...")
for subscription in subscriptions_client.subscriptions.list():
@@ -195,3 +206,6 @@ class Azure_Provider:
def get_identity(self):
return self.identity
def get_region_config(self):
return self.region_config

View File

@@ -1,3 +1,6 @@
from argparse import ArgumentTypeError
def init_parser(self):
"""Init the Azure Provider CLI parser"""
azure_parser = self.subparsers.add_parser(
@@ -40,3 +43,27 @@ def init_parser(self):
default=None,
help="Azure Tenant ID to be used with --browser-auth option",
)
# Regions
azure_regions_subparser = azure_parser.add_argument_group("Regions")
azure_regions_subparser.add_argument(
"--azure-region",
nargs="?",
default="AzureCloud",
type=validate_azure_region,
help="Azure region from `az cloud list --output table`, by default AzureCloud",
)
def validate_azure_region(region):
"""validate_azure_region validates if the region passed as argument is valid"""
regions_allowed = [
"AzureChinaCloud",
"AzureUSGovernment",
"AzureGermanCloud",
"AzureCloud",
]
if region not in regions_allowed:
raise ArgumentTypeError(
f"Region {region} not allowed, allowed regions are {' '.join(regions_allowed)}"
)
return region

View File

@@ -1,6 +1,7 @@
from prowler.providers.azure.lib.audit_info.models import (
Azure_Audit_Info,
Azure_Identity_Info,
Azure_Region_Config,
)
azure_audit_info = Azure_Audit_Info(
@@ -9,4 +10,5 @@ azure_audit_info = Azure_Audit_Info(
audit_resources=None,
audit_metadata=None,
audit_config=None,
azure_region_config=Azure_Region_Config(),
)

View File

@@ -13,6 +13,13 @@ class Azure_Identity_Info(BaseModel):
subscriptions: dict = {}
class Azure_Region_Config(BaseModel):
name: str = ""
authority: str = None
base_url: str = ""
credential_scopes: list = []
@dataclass
class Azure_Audit_Info:
credentials: DefaultAzureCredential
@@ -20,12 +27,20 @@ class Azure_Audit_Info:
audit_resources: Optional[Any]
audit_metadata: Optional[Any]
audit_config: dict
azure_region_config: Azure_Region_Config
def __init__(
self, credentials, identity, audit_metadata, audit_resources, audit_config
self,
credentials,
identity,
audit_metadata,
audit_resources,
audit_config,
azure_region_config,
):
self.credentials = credentials
self.identity = identity
self.audit_metadata = audit_metadata
self.audit_resources = audit_resources
self.audit_config = audit_config
self.azure_region_config = azure_region_config

View File

@@ -0,0 +1,38 @@
from azure.identity import AzureAuthorityHosts
from msrestazure.azure_cloud import (
AZURE_CHINA_CLOUD,
AZURE_GERMAN_CLOUD,
AZURE_US_GOV_CLOUD,
)
def get_regions_config(region):
allowed_regions = {
"AzureCloud": {
"authority": None,
"base_url": "https://management.azure.com",
"credential_scopes": ["https://management.azure.com/.default"],
},
"AzureChinaCloud": {
"authority": AzureAuthorityHosts.AZURE_CHINA,
"base_url": AZURE_CHINA_CLOUD.endpoints.resource_manager,
"credential_scopes": [
AZURE_CHINA_CLOUD.endpoints.resource_manager + "/.default"
],
},
"AzureUSGovernment": {
"authority": AzureAuthorityHosts.AZURE_GOVERNMENT,
"base_url": AZURE_US_GOV_CLOUD.endpoints.resource_manager,
"credential_scopes": [
AZURE_US_GOV_CLOUD.endpoints.resource_manager + "/.default"
],
},
"AzureGermanCloud": {
"authority": AzureAuthorityHosts.AZURE_GERMANY,
"base_url": AZURE_GERMAN_CLOUD.endpoints.resource_manager,
"credential_scopes": [
AZURE_GERMAN_CLOUD.endpoints.resource_manager + "/.default"
],
},
}
return allowed_regions[region]

View File

@@ -9,17 +9,27 @@ class AzureService:
audit_info: Azure_Audit_Info,
):
self.clients = self.__set_clients__(
audit_info.identity.subscriptions, audit_info.credentials, service
audit_info.identity.subscriptions,
audit_info.credentials,
service,
audit_info.azure_region_config,
)
self.subscriptions = audit_info.identity.subscriptions
def __set_clients__(self, subscriptions, credentials, service):
def __set_clients__(self, subscriptions, credentials, service, region_config):
clients = {}
try:
for display_name, id in subscriptions.items():
clients.update(
{display_name: service(credential=credentials, subscription_id=id)}
{
display_name: service(
credential=credentials,
subscription_id=id,
base_url=region_config.base_url,
credential_scopes=region_config.credential_scopes,
)
}
)
except Exception as error:
logger.error(

View File

@@ -26,7 +26,10 @@ from prowler.providers.aws.lib.resource_api_tagging.resource_api_tagging import
)
from prowler.providers.azure.azure_provider import Azure_Provider
from prowler.providers.azure.lib.audit_info.audit_info import azure_audit_info
from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
from prowler.providers.azure.lib.audit_info.models import (
Azure_Audit_Info,
Azure_Region_Config,
)
from prowler.providers.gcp.gcp_provider import GCP_Provider
from prowler.providers.gcp.lib.audit_info.audit_info import gcp_audit_info
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
@@ -63,7 +66,7 @@ GCP Account: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} GCP Project IDs: {Fore.Y
report = f"""
This report is being generated using the identity below:
Azure Tenant IDs: {Fore.YELLOW}[{" ".join(audit_info.identity.tenant_ids)}]{Style.RESET_ALL} Azure Tenant Domain: {Fore.YELLOW}[{audit_info.identity.domain}]{Style.RESET_ALL}
Azure Tenant IDs: {Fore.YELLOW}[{" ".join(audit_info.identity.tenant_ids)}]{Style.RESET_ALL} Azure Tenant Domain: {Fore.YELLOW}[{audit_info.identity.domain}]{Style.RESET_ALL} Azure Region: {Fore.YELLOW}[{audit_info.azure_region_config.name}]{Style.RESET_ALL}
Azure Subscriptions: {Fore.YELLOW}{printed_subscriptions}{Style.RESET_ALL}
Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RESET_ALL} Azure Identity ID: {Fore.YELLOW}[{audit_info.identity.identity_id}]{Style.RESET_ALL}
"""
@@ -282,6 +285,10 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE
browser_auth = arguments.get("browser_auth")
managed_entity_auth = arguments.get("managed_entity_auth")
tenant_id = arguments.get("tenant_id")
logger.info("Checking if region is different than default one")
region = arguments.get("azure_region")
if (
not az_cli_auth
and not sp_env_auth
@@ -303,9 +310,17 @@ Azure Identity Type: {Fore.YELLOW}[{audit_info.identity.identity_type}]{Style.RE
managed_entity_auth,
subscription_ids,
tenant_id,
region,
)
azure_audit_info.credentials = azure_provider.get_credentials()
azure_audit_info.identity = azure_provider.get_identity()
region_config = azure_provider.get_region_config()
azure_audit_info.azure_region_config = Azure_Region_Config(
name=region,
authority=region_config["authority"],
base_url=region_config["base_url"],
credential_scopes=region_config["credential_scopes"],
)
if not arguments.get("only_logs"):
self.print_azure_credentials(azure_audit_info)

View File

@@ -22,10 +22,10 @@ packages = [
{include = "prowler"}
]
readme = "README.md"
version = "3.11.1"
version = "3.11.2"
[tool.poetry.dependencies]
alive-progress = "3.1.4"
alive-progress = "3.1.5"
awsipranges = "0.3.3"
azure-identity = "1.15.0"
azure-mgmt-authorization = "4.0.0"
@@ -33,16 +33,17 @@ azure-mgmt-security = "5.0.0"
azure-mgmt-sql = "3.0.1"
azure-mgmt-storage = "21.1.0"
azure-mgmt-subscription = "3.1.1"
azure-storage-blob = "12.18.3"
azure-storage-blob = "12.19.0"
boto3 = "1.26.165"
botocore = "1.29.165"
colorama = "0.4.6"
detect-secrets = "1.4.0"
google-api-python-client = "2.106.0"
google-api-python-client = "2.107.0"
google-auth-httplib2 = "^0.1.0"
mkdocs = {version = "1.5.3", optional = true}
mkdocs-material = {version = "9.4.8", optional = true}
msgraph-core = "0.2.2"
msrestazure = "^0.6.4"
pydantic = "1.10.13"
python = "^3.9"
schema = "0.7.5"
@@ -61,13 +62,13 @@ docker = "6.1.3"
flake8 = "6.1.0"
freezegun = "1.2.2"
mock = "5.1.0"
moto = "4.2.7"
moto = "4.2.8"
openapi-spec-validator = "0.7.1"
pylint = "3.0.2"
pytest = "7.4.3"
pytest-cov = "4.1.0"
pytest-randomly = "3.15.0"
pytest-xdist = "3.3.1"
pytest-xdist = "3.4.0"
safety = "2.3.5"
vulture = "2.10"

View File

@@ -63,7 +63,7 @@ class Test_Config:
def test_check_current_version_with_latest(self):
assert (
check_current_version()
== f"Prowler {MOCK_PROWLER_VERSION} (it is the latest version, yay!)"
== f"Prowler {MOCK_PROWLER_VERSION} (You are running the latest version, yay!)"
)
@mock.patch(

View File

@@ -1,9 +1,11 @@
import uuid
from argparse import ArgumentTypeError
import pytest
from mock import patch
from prowler.lib.cli.parser import ProwlerArgumentParser
from prowler.providers.azure.lib.arguments.arguments import validate_azure_region
prowler_command = "prowler"
@@ -502,6 +504,18 @@ class Test_Parser:
assert service_1 in parsed.services
assert service_2 in parsed.services
def test_checks_parser_services_with_severity(self):
argument1 = "--services"
service_1 = "iam"
argument2 = "--severity"
severity = "low"
command = [prowler_command, argument1, service_1, argument2, severity]
parsed = self.parser.parse(command)
assert len(parsed.services) == 1
assert service_1 in parsed.services
assert len(parsed.severity) == 1
assert severity in parsed.severity
def test_checks_parser_informational_severity(self):
argument = "--severity"
severity = "informational"
@@ -1038,6 +1052,14 @@ class Test_Parser:
assert parsed.subscription_ids[0] == subscription_1
assert parsed.subscription_ids[1] == subscription_2
def test_parser_azure_region(self):
argument = "--azure-region"
region = "AzureChinaCloud"
command = [prowler_command, "azure", argument, region]
parsed = self.parser.parse(command)
assert parsed.provider == "azure"
assert parsed.azure_region == region
# Test AWS flags with Azure provider
def test_parser_azure_with_aws_flag(self, capsys):
command = [prowler_command, "azure", "-p"]
@@ -1080,3 +1102,33 @@ class Test_Parser:
assert len(parsed.project_ids) == 2
assert parsed.project_ids[0] == project_1
assert parsed.project_ids[1] == project_2
def test_validate_azure_region_valid_regions(self):
expected_regions = [
"AzureChinaCloud",
"AzureUSGovernment",
"AzureGermanCloud",
"AzureCloud",
]
input_regions = [
"AzureChinaCloud",
"AzureUSGovernment",
"AzureGermanCloud",
"AzureCloud",
]
for region in input_regions:
assert validate_azure_region(region) in expected_regions
def test_validate_azure_region_invalid_regions(self):
expected_regions = [
"AzureChinaCloud",
"AzureUSGovernment",
"AzureGermanCloud",
"AzureCloud",
]
invalid_region = "non-valid-region"
with pytest.raises(
ArgumentTypeError,
match=f"Region {invalid_region} not allowed, allowed regions are {' '.join(expected_regions)}",
):
validate_azure_region(invalid_region)

View File

@@ -11,6 +11,7 @@ from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.azure.lib.audit_info.models import (
Azure_Audit_Info,
Azure_Identity_Info,
Azure_Region_Config,
)
from prowler.providers.common.models import Audit_Metadata
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
@@ -76,6 +77,7 @@ class Test_Slack_Integration:
audit_resources=None,
audit_metadata=None,
audit_config=None,
azure_region_config=Azure_Region_Config(),
)
assert create_message_identity("aws", aws_audit_info) == (
f"AWS Account *{aws_audit_info.audited_account}*",

View File

@@ -5,6 +5,7 @@ from prowler.providers.common.models import Audit_Metadata
AWS_REGION_US_EAST_1 = "us-east-1"
AWS_REGION_EU_WEST_1 = "eu-west-1"
AWS_REGION_EU_WEST_2 = "eu-west-2"
AWS_PARTITION = "aws"
AWS_ACCOUNT_NUMBER = "123456789012"
AWS_ACCOUNT_ARN = f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root"

View File

@@ -6,7 +6,6 @@ from mock import MagicMock, patch
from prowler.config.config import prowler_version, timestamp_utc
from prowler.lib.check.models import Check_Report, load_check_metadata
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
# from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.aws.lib.security_hub.security_hub import (
@@ -14,11 +13,12 @@ from prowler.providers.aws.lib.security_hub.security_hub import (
prepare_security_hub_findings,
verify_security_hub_integration_enabled_per_region,
)
from prowler.providers.common.models import Audit_Metadata
AWS_ACCOUNT_ID = "123456789012"
AWS_REGION_1 = "eu-west-1"
AWS_REGION_2 = "eu-west-2"
from tests.providers.aws.audit_info_utils import (
AWS_ACCOUNT_NUMBER,
AWS_REGION_EU_WEST_1,
AWS_REGION_EU_WEST_2,
set_mocked_aws_audit_info,
)
# Mocking Security Hub Get Findings
make_api_call = botocore.client.BaseClient._make_api_call
@@ -32,7 +32,7 @@ def mock_make_api_call(self, operation_name, kwarg):
}
if operation_name == "DescribeHub":
return {
"HubArn": f"arn:aws:securityhub:{AWS_REGION_1}:{AWS_ACCOUNT_ID}:hub/default",
"HubArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:hub/default",
"SubscribedAt": "2023-02-07T09:45:43.742Z",
"AutoEnableControls": True,
"ControlFindingGenerator": "STANDARD_CONTROL",
@@ -41,7 +41,7 @@ def mock_make_api_call(self, operation_name, kwarg):
if operation_name == "ListEnabledProductsForImport":
return {
"ProductSubscriptions": [
f"arn:aws:securityhub:{AWS_REGION_1}:{AWS_ACCOUNT_ID}:product-subscription/prowler/prowler",
f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}:{AWS_ACCOUNT_NUMBER}:product-subscription/prowler/prowler",
]
}
@@ -49,32 +49,6 @@ def mock_make_api_call(self, operation_name, kwarg):
class Test_SecurityHub:
def set_mocked_audit_info(self):
return AWS_Audit_Info(
session_config=None,
original_session=None,
audit_session=None,
audited_account=AWS_ACCOUNT_ID,
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_ID}:root",
audited_identity_arn="test-arn",
audited_user_id="test",
audited_partition="aws",
profile="default",
profile_region="eu-west-1",
credentials=None,
assumed_role_info=None,
audited_regions=["eu-west-2", "eu-west-1"],
organizations_metadata=None,
audit_resources=None,
mfa_enabled=False,
audit_metadata=Audit_Metadata(
services_scanned=0,
expected_checks=[],
completed_checks=0,
audit_progress=0,
),
)
def generate_finding(self, status, region):
finding = Check_Report(
load_check_metadata(
@@ -104,14 +78,18 @@ class Test_SecurityHub:
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
def test_verify_security_hub_integration_enabled_per_region(self):
session = self.set_mocked_session(AWS_REGION_1)
assert verify_security_hub_integration_enabled_per_region(AWS_REGION_1, session)
session = self.set_mocked_session(AWS_REGION_EU_WEST_1)
assert verify_security_hub_integration_enabled_per_region(
AWS_REGION_EU_WEST_1, session, AWS_ACCOUNT_NUMBER
)
def test_prepare_security_hub_findings_enabled_region_not_quiet(self):
enabled_regions = [AWS_REGION_1]
enabled_regions = [AWS_REGION_EU_WEST_1]
output_options = self.set_mocked_output_options(is_quiet=False)
findings = [self.generate_finding("PASS", AWS_REGION_1)]
audit_info = self.set_mocked_audit_info()
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)]
audit_info = set_mocked_aws_audit_info(
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
)
assert prepare_security_hub_findings(
findings,
@@ -119,11 +97,11 @@ class Test_SecurityHub:
output_options,
enabled_regions,
) == {
AWS_REGION_1: [
AWS_REGION_EU_WEST_1: [
{
"SchemaVersion": "2018-10-08",
"Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_ID}-{AWS_REGION_1}-ee26b0dd4",
"ProductArn": f"arn:aws:securityhub:{AWS_REGION_1}::product/prowler/prowler",
"Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_NUMBER}-{AWS_REGION_EU_WEST_1}-ee26b0dd4",
"ProductArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}::product/prowler/prowler",
"RecordState": "ACTIVE",
"ProductFields": {
"ProviderName": "Prowler",
@@ -131,7 +109,7 @@ class Test_SecurityHub:
"ProwlerResourceName": "test",
},
"GeneratorId": "prowler-iam_user_accesskey_unused",
"AwsAccountId": f"{AWS_ACCOUNT_ID}",
"AwsAccountId": f"{AWS_ACCOUNT_NUMBER}",
"Types": ["Software and Configuration Checks"],
"FirstObservedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
"UpdatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
@@ -144,7 +122,7 @@ class Test_SecurityHub:
"Type": "AwsIamAccessAnalyzer",
"Id": "test",
"Partition": "aws",
"Region": f"{AWS_REGION_1}",
"Region": f"{AWS_REGION_EU_WEST_1}",
}
],
"Compliance": {
@@ -160,55 +138,117 @@ class Test_SecurityHub:
},
}
],
AWS_REGION_2: [],
}
def test_prepare_security_hub_findings_quiet_INFO_finding(self):
enabled_regions = [AWS_REGION_1]
enabled_regions = [AWS_REGION_EU_WEST_1]
output_options = self.set_mocked_output_options(is_quiet=False)
findings = [self.generate_finding("INFO", AWS_REGION_1)]
audit_info = self.set_mocked_audit_info()
findings = [self.generate_finding("INFO", AWS_REGION_EU_WEST_1)]
audit_info = set_mocked_aws_audit_info(
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
)
assert prepare_security_hub_findings(
findings,
audit_info,
output_options,
enabled_regions,
) == {AWS_REGION_1: [], AWS_REGION_2: []}
) == {AWS_REGION_EU_WEST_1: []}
def test_prepare_security_hub_findings_disabled_region(self):
enabled_regions = [AWS_REGION_1]
enabled_regions = [AWS_REGION_EU_WEST_1]
output_options = self.set_mocked_output_options(is_quiet=False)
findings = [self.generate_finding("PASS", AWS_REGION_2)]
audit_info = self.set_mocked_audit_info()
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_2)]
audit_info = set_mocked_aws_audit_info(
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
)
assert prepare_security_hub_findings(
findings,
audit_info,
output_options,
enabled_regions,
) == {AWS_REGION_1: [], AWS_REGION_2: []}
) == {AWS_REGION_EU_WEST_1: []}
def test_prepare_security_hub_findings_quiet(self):
enabled_regions = [AWS_REGION_1]
enabled_regions = [AWS_REGION_EU_WEST_1]
output_options = self.set_mocked_output_options(is_quiet=True)
findings = [self.generate_finding("PASS", AWS_REGION_1)]
audit_info = self.set_mocked_audit_info()
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)]
audit_info = set_mocked_aws_audit_info(
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
)
assert prepare_security_hub_findings(
findings,
audit_info,
output_options,
enabled_regions,
) == {AWS_REGION_1: [], AWS_REGION_2: []}
) == {AWS_REGION_EU_WEST_1: []}
def test_prepare_security_hub_findings_no_audited_regions(self):
enabled_regions = [AWS_REGION_EU_WEST_1]
output_options = self.set_mocked_output_options(is_quiet=False)
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)]
audit_info = set_mocked_aws_audit_info()
assert prepare_security_hub_findings(
findings,
audit_info,
output_options,
enabled_regions,
) == {
AWS_REGION_EU_WEST_1: [
{
"SchemaVersion": "2018-10-08",
"Id": f"prowler-iam_user_accesskey_unused-{AWS_ACCOUNT_NUMBER}-{AWS_REGION_EU_WEST_1}-ee26b0dd4",
"ProductArn": f"arn:aws:securityhub:{AWS_REGION_EU_WEST_1}::product/prowler/prowler",
"RecordState": "ACTIVE",
"ProductFields": {
"ProviderName": "Prowler",
"ProviderVersion": prowler_version,
"ProwlerResourceName": "test",
},
"GeneratorId": "prowler-iam_user_accesskey_unused",
"AwsAccountId": f"{AWS_ACCOUNT_NUMBER}",
"Types": ["Software and Configuration Checks"],
"FirstObservedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
"UpdatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
"CreatedAt": timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ"),
"Severity": {"Label": "LOW"},
"Title": "Ensure Access Keys unused are disabled",
"Description": "test",
"Resources": [
{
"Type": "AwsIamAccessAnalyzer",
"Id": "test",
"Partition": "aws",
"Region": f"{AWS_REGION_EU_WEST_1}",
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [],
"AssociatedStandards": [],
},
"Remediation": {
"Recommendation": {
"Text": "Run sudo yum update and cross your fingers and toes.",
"Url": "https://myfp.com/recommendations/dangerous_things_and_how_to_fix_them.html",
}
},
}
],
}
@patch("botocore.client.BaseClient._make_api_call", new=mock_make_api_call)
def test_batch_send_to_security_hub_one_finding(self):
enabled_regions = [AWS_REGION_1]
enabled_regions = [AWS_REGION_EU_WEST_1]
output_options = self.set_mocked_output_options(is_quiet=False)
findings = [self.generate_finding("PASS", AWS_REGION_1)]
audit_info = self.set_mocked_audit_info()
session = self.set_mocked_session(AWS_REGION_1)
findings = [self.generate_finding("PASS", AWS_REGION_EU_WEST_1)]
audit_info = set_mocked_aws_audit_info(
audited_regions=[AWS_REGION_EU_WEST_1, AWS_REGION_EU_WEST_2]
)
session = self.set_mocked_session(AWS_REGION_EU_WEST_1)
security_hub_findings = prepare_security_hub_findings(
findings,

View File

@@ -33,6 +33,7 @@ class Test_accessanalyzer_enabled:
def test_one_analyzer_not_available(self):
# Include analyzers to check
accessanalyzer_client = mock.MagicMock
accessanalyzer_client.region = AWS_REGION_1
accessanalyzer_client.analyzers = [
Analyzer(
arn=AWS_ACCOUNT_ARN,
@@ -65,8 +66,46 @@ class Test_accessanalyzer_enabled:
assert result[0].region == AWS_REGION_1
assert result[0].resource_tags == []
def test_one_analyzer_not_available_allowlisted(self):
# Include analyzers to check
accessanalyzer_client = mock.MagicMock
accessanalyzer_client.region = AWS_REGION_2
accessanalyzer_client.audit_config = {"allowlist_non_default_regions": True}
accessanalyzer_client.analyzers = [
Analyzer(
arn=AWS_ACCOUNT_ARN,
name=AWS_ACCOUNT_NUMBER,
status="NOT_AVAILABLE",
tags=[],
type="",
region=AWS_REGION_1,
)
]
with mock.patch(
"prowler.providers.aws.services.accessanalyzer.accessanalyzer_service.AccessAnalyzer",
accessanalyzer_client,
):
from prowler.providers.aws.services.accessanalyzer.accessanalyzer_enabled.accessanalyzer_enabled import (
accessanalyzer_enabled,
)
check = accessanalyzer_enabled()
result = check.execute()
assert len(result) == 1
assert result[0].status == "WARNING"
assert (
result[0].status_extended
== f"IAM Access Analyzer in account {AWS_ACCOUNT_NUMBER} is not enabled."
)
assert result[0].resource_id == AWS_ACCOUNT_NUMBER
assert result[0].resource_arn == AWS_ACCOUNT_ARN
assert result[0].region == AWS_REGION_1
assert result[0].resource_tags == []
def test_two_analyzers(self):
accessanalyzer_client = mock.MagicMock
accessanalyzer_client.region = AWS_REGION_1
accessanalyzer_client.analyzers = [
Analyzer(
arn=AWS_ACCOUNT_ARN,

View File

@@ -244,3 +244,88 @@ class Test_ec2_securitygroup_not_used:
assert result[0].resource_id == sg.id
assert result[0].resource_details == sg_name
assert result[0].resource_tags == []
@mock_ec2
@mock_lambda
def test_ec2_associated_sg(self):
# Create EC2 Mocked Resources
ec2 = resource("ec2", AWS_REGION_US_EAST_1)
ec2_client = client("ec2", region_name=AWS_REGION_US_EAST_1)
vpc_id = ec2_client.create_vpc(CidrBlock="10.0.0.0/16")["Vpc"]["VpcId"]
sg_name = "test-sg"
sg_name1 = "test-sg1"
sg = ec2.create_security_group(
GroupName=sg_name, Description="test", VpcId=vpc_id
)
sg1 = ec2.create_security_group(
GroupName=sg_name1, Description="test1", VpcId=vpc_id
)
ec2_client.authorize_security_group_ingress(
GroupId=sg.id,
IpPermissions=[
{
"IpProtocol": "-1",
"UserIdGroupPairs": [
{
"GroupId": sg1.id,
"Description": "Allow traffic from source SG",
}
],
}
],
)
from prowler.providers.aws.services.awslambda.awslambda_service import Lambda
from prowler.providers.aws.services.ec2.ec2_service import EC2
current_audit_info = set_mocked_aws_audit_info(
audited_regions=["us-east-1", "eu-west-1"]
)
with mock.patch(
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
new=current_audit_info,
), mock.patch(
"prowler.providers.aws.services.ec2.ec2_securitygroup_not_used.ec2_securitygroup_not_used.ec2_client",
new=EC2(current_audit_info),
), mock.patch(
"prowler.providers.aws.services.ec2.ec2_securitygroup_not_used.ec2_securitygroup_not_used.awslambda_client",
new=Lambda(current_audit_info),
):
# Test Check
from prowler.providers.aws.services.ec2.ec2_securitygroup_not_used.ec2_securitygroup_not_used import (
ec2_securitygroup_not_used,
)
check = ec2_securitygroup_not_used()
result = check.execute()
# One custom sg
assert len(result) == 2
assert result[0].status == "FAIL"
assert result[0].region == AWS_REGION_US_EAST_1
assert (
result[0].status_extended
== f"Security group {sg_name} ({sg.id}) it is not being used."
)
assert (
result[0].resource_arn
== f"arn:{current_audit_info.audited_partition}:ec2:{AWS_REGION_US_EAST_1}:{current_audit_info.audited_account}:security-group/{sg.id}"
)
assert result[0].resource_id == sg.id
assert result[0].resource_details == sg_name
assert result[0].resource_tags == []
assert result[1].status == "PASS"
assert result[1].region == AWS_REGION_US_EAST_1
assert (
result[1].status_extended
== f"Security group {sg_name1} ({sg1.id}) it is being used."
)
assert (
result[1].resource_arn
== f"arn:{current_audit_info.audited_partition}:ec2:{AWS_REGION_US_EAST_1}:{current_audit_info.audited_account}:security-group/{sg1.id}"
)
assert result[1].resource_id == sg1.id
assert result[1].resource_details == sg_name1
assert result[1].resource_tags == []

View File

@@ -62,6 +62,31 @@ class Test_guardduty_centrally_managed:
assert result[0].region == AWS_REGION
assert result[0].resource_arn == DETECTOR_ARN
def test_not_enabled_account_detector(self):
guardduty_client = mock.MagicMock
guardduty_client.detectors = []
guardduty_client.detectors.append(
Detector(
id=AWS_ACCOUNT_NUMBER,
region=AWS_REGION,
arn=DETECTOR_ARN,
enabled_in_account=False,
)
)
with mock.patch(
"prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty",
guardduty_client,
):
# Test Check
from prowler.providers.aws.services.guardduty.guardduty_centrally_managed.guardduty_centrally_managed import (
guardduty_centrally_managed,
)
check = guardduty_centrally_managed()
result = check.execute()
assert len(result) == 0
def test_detector_centralized_managed(self):
guardduty_client = mock.MagicMock
guardduty_client.detectors = []

View File

@@ -58,6 +58,29 @@ class Test_guardduty_no_high_severity_findings:
assert result[0].resource_arn == DETECTOR_ARN
assert result[0].region == AWS_REGION
def test_not_enabled_account_detector(self):
guardduty_client = mock.MagicMock
guardduty_client.detectors = []
guardduty_client.detectors.append(
Detector(
id=AWS_ACCOUNT_NUMBER,
arn=DETECTOR_ARN,
region=AWS_REGION,
enabled_in_account=False,
)
)
with mock.patch(
"prowler.providers.aws.services.guardduty.guardduty_service.GuardDuty",
guardduty_client,
):
from prowler.providers.aws.services.guardduty.guardduty_no_high_severity_findings.guardduty_no_high_severity_findings import (
guardduty_no_high_severity_findings,
)
check = guardduty_no_high_severity_findings()
result = check.execute()
assert len(result) == 0
def test_high_findings(self):
guardduty_client = mock.MagicMock
guardduty_client.detectors = []

View File

@@ -0,0 +1,50 @@
from azure.identity import AzureAuthorityHosts
from msrestazure.azure_cloud import (
AZURE_CHINA_CLOUD,
AZURE_GERMAN_CLOUD,
AZURE_US_GOV_CLOUD,
)
from prowler.providers.azure.lib.regions.regions import get_regions_config
class Test_azure_regions:
def test_get_regions_config(self):
allowed_regions = [
"AzureCloud",
"AzureChinaCloud",
"AzureUSGovernment",
"AzureGermanCloud",
]
expected_output = {
"AzureCloud": {
"authority": None,
"base_url": "https://management.azure.com",
"credential_scopes": ["https://management.azure.com/.default"],
},
"AzureChinaCloud": {
"authority": AzureAuthorityHosts.AZURE_CHINA,
"base_url": AZURE_CHINA_CLOUD.endpoints.resource_manager,
"credential_scopes": [
AZURE_CHINA_CLOUD.endpoints.resource_manager + "/.default"
],
},
"AzureUSGovernment": {
"authority": AzureAuthorityHosts.AZURE_GOVERNMENT,
"base_url": AZURE_US_GOV_CLOUD.endpoints.resource_manager,
"credential_scopes": [
AZURE_US_GOV_CLOUD.endpoints.resource_manager + "/.default"
],
},
"AzureGermanCloud": {
"authority": AzureAuthorityHosts.AZURE_GERMANY,
"base_url": AZURE_GERMAN_CLOUD.endpoints.resource_manager,
"credential_scopes": [
AZURE_GERMAN_CLOUD.endpoints.resource_manager + "/.default"
],
},
}
for region in allowed_regions:
region_config = get_regions_config(region)
assert region_config == expected_output[region]

View File

@@ -11,6 +11,7 @@ from prowler.providers.azure.azure_provider import Azure_Provider
from prowler.providers.azure.lib.audit_info.models import (
Azure_Audit_Info,
Azure_Identity_Info,
Azure_Region_Config,
)
from prowler.providers.common.audit_info import (
Audit_Info,
@@ -31,6 +32,7 @@ mock_azure_audit_info = Azure_Audit_Info(
audit_metadata=None,
audit_resources=None,
audit_config=None,
azure_region_config=Azure_Region_Config(),
)
mock_set_audit_info = Audit_Info()
@@ -132,8 +134,8 @@ class Test_Set_Audit_Info:
"prowler.providers.common.audit_info.azure_audit_info",
new=mock_azure_audit_info,
)
@patch.object(Azure_Provider, "__set_credentials__", new=mock_set_azure_credentials)
@patch.object(Azure_Provider, "__set_identity_info__", new=mock_set_identity_info)
@patch.object(Azure_Provider, "__get_credentials__", new=mock_set_azure_credentials)
@patch.object(Azure_Provider, "__get_identity_info__", new=mock_set_identity_info)
def test_set_audit_info_azure(self):
provider = "azure"
arguments = {
@@ -150,6 +152,7 @@ class Test_Set_Audit_Info:
"browser_auth": None,
"managed_entity_auth": None,
"config_file": default_config_file_path,
"azure_region": "AzureCloud",
}
audit_info = set_provider_audit_info(provider, arguments)

View File

@@ -9,6 +9,7 @@ from prowler.providers.aws.lib.audit_info.audit_info import AWS_Audit_Info
from prowler.providers.azure.lib.audit_info.audit_info import (
Azure_Audit_Info,
Azure_Identity_Info,
Azure_Region_Config,
)
from prowler.providers.common.models import Audit_Metadata
from prowler.providers.common.outputs import (
@@ -33,6 +34,7 @@ class Test_Common_Output_Options:
audit_metadata=None,
audit_resources=None,
audit_config=None,
azure_region_config=Azure_Region_Config(),
)
return audit_info