Compare commits

..

1 Commits
3.5.1 ... 3.3.0

Author SHA1 Message Date
github-actions
0f6b46a39d chore(release): 3.3.0 2023-03-16 17:32:05 +00:00
674 changed files with 10089 additions and 32873 deletions

View File

@@ -69,7 +69,7 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Build and push container image (latest)
- name: Build container image (latest)
if: github.event_name == 'push'
uses: docker/build-push-action@v2
with:
@@ -77,11 +77,11 @@ jobs:
tags: |
${{ secrets.DOCKER_HUB_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
${{ secrets.PUBLIC_ECR_REPOSITORY }}/${{ env.IMAGE_NAME }}:${{ env.LATEST_TAG }}
file: ${{ env.DOCKERFILE_PATH }}
file: ${{ env.DOCKERFILE_PATH }}
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push container image (release)
- name: Build container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@v2
with:

View File

@@ -6,6 +6,7 @@ on:
env:
RELEASE_TAG: ${{ github.event.release.tag_name }}
GITHUB_BRANCH: master
jobs:
release-prowler-job:
@@ -16,7 +17,8 @@ jobs:
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v3
with:
ref: ${{ env.GITHUB_BRANCH }}
- name: Install dependencies
run: |
pipx install poetry
@@ -46,7 +48,6 @@ jobs:
with:
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
commit-message: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}."
base: master
branch: release-${{ env.RELEASE_TAG }}
labels: "status/waiting-for-revision, severity/low"
title: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}"
@@ -58,6 +59,13 @@ jobs:
### License
By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license.
# Create pull request to github.com/Homebrew/homebrew-core to update prowler formula
- name: Bump Homebrew formula
uses: mislav/bump-homebrew-formula-action@v2
with:
formula-name: prowler
env:
COMMITTER_TOKEN: ${{ secrets.PROWLER_ACCESS_TOKEN }}
- name: Replicate PyPi Package
run: |
rm -rf ./dist && rm -rf ./build && rm -rf prowler.egg-info
@@ -68,11 +76,3 @@ jobs:
run: |
poetry config pypi-token.pypi ${{ secrets.PYPI_API_TOKEN }}
poetry publish
# Create pull request to github.com/Homebrew/homebrew-core to update prowler formula
- name: Bump Homebrew formula
uses: mislav/bump-homebrew-formula-action@v2
with:
formula-name: prowler
base-branch: release-${{ env.RELEASE_TAG }}
env:
COMMITTER_TOKEN: ${{ secrets.PROWLER_ACCESS_TOKEN }}

View File

@@ -19,7 +19,6 @@ repos:
hooks:
- id: pretty-format-toml
args: [--autofix]
files: pyproject.toml
## BASH
- repo: https://github.com/koalaman/shellcheck-precommit
@@ -57,11 +56,10 @@ repos:
args: ["--ignore=E266,W503,E203,E501,W605"]
- repo: https://github.com/python-poetry/poetry
rev: 1.4.0 # add version here
rev: 1.4.0 # add version here
hooks:
- id: poetry-check
- id: poetry-lock
args: ["--no-update"]
- repo: https://github.com/hadolint/hadolint
rev: v2.12.1-beta
@@ -76,15 +74,6 @@ repos:
entry: bash -c 'pylint --disable=W,C,R,E -j 0 -rn -sn prowler/'
language: system
- id: trufflehog
name: TruffleHog
description: Detect secrets in your data.
# entry: bash -c 'trufflehog git file://. --only-verified --fail'
# For running trufflehog in docker, use the following entry instead:
entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
language: system
stages: ["commit", "push"]
- id: pytest-check
name: pytest-check
entry: bash -c 'pytest tests -n auto'

View File

@@ -11,10 +11,11 @@
</p>
<p align="center">
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
<a href="https://pypi.org/project/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler.svg"></a>
<a href="https://pypi.python.org/pypi/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler.svg"></a>
<a href="https://pypi.org/project/prowler-cloud/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler.svg"></a>
<a href="https://pypi.python.org/pypi/prowler-cloud/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler.svg"></a>
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Prowler Downloads" src="https://img.shields.io/pypi/dw/prowler.svg?label=prowler%20downloads"></a>
<a href="https://pypistats.org/packages/prowler-cloud"><img alt="PyPI Prowler-Cloud Downloads" src="https://img.shields.io/pypi/dw/prowler-cloud.svg?label=prowler-cloud%20downloads"></a>
<a href="https://formulae.brew.sh/formula/prowler#default"><img alt="Brew Prowler Downloads" src="https://img.shields.io/homebrew/installs/dm/prowler?label=brew%20downloads"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/toniblyx/prowler"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/cloud/build/toniblyx/prowler"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/image-size/toniblyx/prowler"></a>
@@ -33,14 +34,14 @@
# Description
`Prowler` is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
`Prowler` is an Open Source security tool to perform AWS and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
It contains hundreds of controls covering CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks.
# 📖 Documentation
The full documentation can now be found at [https://docs.prowler.cloud](https://docs.prowler.cloud)
## Looking for Prowler v2 documentation?
For Prowler v2 Documentation, please go to https://github.com/prowler-cloud/prowler/tree/2.12.1.
@@ -53,7 +54,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
pip install prowler
prowler -v
```
More details at https://docs.prowler.cloud
More details at https://docs.prowler.cloud
## Containers
@@ -62,7 +63,7 @@ The available versions of Prowler are the following:
- `latest`: in sync with master branch (bear in mind that it is not a stable version)
- `<x.y.z>` (release): you can find the releases [here](https://github.com/prowler-cloud/prowler/releases), those are stable releases.
- `stable`: this tag always point to the latest release.
The container images are available here:
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
@@ -84,7 +85,7 @@ python prowler.py -v
You can run Prowler from your workstation, an EC2 instance, Fargate or any other container, Codebuild, CloudShell and Cloud9.
![Architecture](https://github.com/prowler-cloud/prowler/assets/38561120/080261d9-773d-4af1-af79-217a273e3176)
![Architecture](https://github.com/prowler-cloud/prowler/blob/62c1ce73bbcdd6b9e5ba03dfcae26dfd165defd9/docs/img/architecture.png?raw=True)
# 📝 Requirements
@@ -163,22 +164,6 @@ Regarding the subscription scope, Prowler by default scans all the subscriptions
- `Reader`
## Google Cloud Platform
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
- Viewer
- Security Reviewer
- Stackdriver Account Viewer
> `prowler` will scan the project associated with the credentials.
# 💻 Basic Usage
To run prowler, you will need to specify the provider (e.g aws or azure):
@@ -253,14 +238,12 @@ prowler azure [--sp-env-auth, --az-cli-auth, --browser-auth, --managed-identity-
```
> By default, `prowler` will scan all Azure subscriptions.
## Google Cloud Platform
Optionally, you can provide the location of an application credential JSON file with the following argument:
```console
prowler gcp --credentials-file path
```
# 🎉 New Features
- Python: we got rid of all bash and it is now all in Python.
- Faster: huge performance improvements (same account from 2.5 hours to 4 minutes).
- Developers and community: we have made it easier to contribute with new checks and new compliance frameworks. We also included unit tests.
- Multi-cloud: in addition to AWS, we have added Azure, we plan to include GCP and OCI soon, let us know if you want to contribute!
# 📃 License

View File

@@ -79,21 +79,3 @@ Regarding the subscription scope, Prowler by default scans all the subscriptions
- `Security Reader`
- `Reader`
## Google Cloud
### GCP Authentication
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
- Viewer
- Security Reviewer
- Stackdriver Account Viewer
> `prowler` will scan the project associated with the credentials.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 283 KiB

After

Width:  |  Height:  |  Size: 258 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 631 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 320 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 220 KiB

View File

@@ -16,7 +16,7 @@ For **Prowler v2 Documentation**, please go [here](https://github.com/prowler-cl
## About Prowler
**Prowler** is an Open Source security tool to perform AWS, Azure and Google Cloud security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
**Prowler** is an Open Source security tool to perform AWS and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
It contains hundreds of controls covering CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks.
@@ -40,7 +40,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
* `Python >= 3.9`
* `Python pip >= 3.9`
* AWS, GCP and/or Azure credentials
* AWS and/or Azure credentials
_Commands_:
@@ -54,7 +54,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
_Requirements_:
* Have `docker` installed: https://docs.docker.com/get-docker/.
* AWS, GCP and/or Azure credentials
* AWS and/or Azure credentials
* In the command below, change `-v` to your local directory path in order to access the reports.
_Commands_:
@@ -71,7 +71,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
_Requirements for Ubuntu 20.04.3 LTS_:
* AWS, GCP and/or Azure credentials
* AWS and/or Azure credentials
* Install python 3.9 with: `sudo apt-get install python3.9`
* Remove python 3.8 to avoid conflicts if you can: `sudo apt-get remove python3.8`
* Make sure you have the python3 distutils package installed: `sudo apt-get install python3-distutils`
@@ -91,7 +91,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
_Requirements for Developers_:
* AWS, GCP and/or Azure credentials
* AWS and/or Azure credentials
* `git`, `Python >= 3.9`, `pip` and `poetry` installed (`pip install poetry`)
_Commands_:
@@ -108,7 +108,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
_Requirements_:
* AWS, GCP and/or Azure credentials
* AWS and/or Azure credentials
* Latest Amazon Linux 2 should come with Python 3.9 already installed however it may need pip. Install Python pip 3.9 with: `sudo dnf install -y python3-pip`.
* Make sure setuptools for python is already installed with: `pip3 install setuptools`
@@ -125,7 +125,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
_Requirements_:
* `Brew` installed in your Mac or Linux
* AWS, GCP and/or Azure credentials
* AWS and/or Azure credentials
_Commands_:
@@ -194,7 +194,7 @@ You can run Prowler from your workstation, an EC2 instance, Fargate or any other
![Architecture](img/architecture.png)
## Basic Usage
To run Prowler, you will need to specify the provider (e.g aws, gcp or azure):
To run Prowler, you will need to specify the provider (e.g aws or azure):
> If no provider specified, AWS will be used for backward compatibility with most of v2 options.
```console
@@ -226,7 +226,6 @@ For executing specific checks or services you can use options `-c`/`checks` or `
```console
prowler azure --checks storage_blob_public_access_level_is_disabled
prowler aws --services s3 ec2
prowler gcp --services iam compute
```
Also, checks and services can be excluded with options `-e`/`--excluded-checks` or `--excluded-services`:
@@ -234,7 +233,6 @@ Also, checks and services can be excluded with options `-e`/`--excluded-checks`
```console
prowler aws --excluded-checks s3_bucket_public_access
prowler azure --excluded-services defender iam
prowler gcp --excluded-services kms
```
More options and executions methods that will save your time in [Miscelaneous](tutorials/misc.md).
@@ -254,8 +252,6 @@ prowler aws --profile custom-profile -f us-east-1 eu-south-2
```
> By default, `prowler` will scan all AWS regions.
See more details about AWS Authentication in [Requirements](getting-started/requirements.md)
### Azure
With Azure you need to specify which auth method is going to be used:
@@ -274,28 +270,9 @@ prowler azure --browser-auth
prowler azure --managed-identity-auth
```
See more details about Azure Authentication in [Requirements](getting-started/requirements.md)
More details in [Requirements](getting-started/requirements.md)
Prowler by default scans all the subscriptions that is allowed to scan, if you want to scan a single subscription or various concrete subscriptions you can use the following flag (using az cli auth as example):
```console
prowler azure --az-cli-auth --subscription-ids <subscription ID 1> <subscription ID 2> ... <subscription ID N>
```
### Google Cloud
Prowler will use by default your User Account credentials, you can configure it using:
- `gcloud init` to use a new account
- `gcloud config set account <account>` to use an existing account
Then, obtain your access credentials using: `gcloud auth application-default login`
Otherwise, you can generate and download Service Account keys in JSON format (refer to https://cloud.google.com/iam/docs/creating-managing-service-account-keys) and provide the location of the file with the following argument:
```console
prowler gcp --credentials-file path
```
> `prowler` will scan the GCP project associated with the credentials.
See more details about GCP Authentication in [Requirements](getting-started/requirements.md)

View File

@@ -7,52 +7,35 @@ You can use `-w`/`--allowlist-file` with the path of your allowlist yaml file, b
## Allowlist Yaml File Syntax
### Account, Check and/or Region can be * to apply for all the cases.
### Resources and tags are lists that can have either Regex or Keywords.
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
### Account, Check and/or Region can be * to apply for all the cases
### Resources is a list that can have either Regex or Keywords:
########################### ALLOWLIST EXAMPLE ###########################
Allowlist:
Accounts:
"123456789012":
Checks:
Checks:
"iam_user_hardware_mfa_enabled":
Regions:
Regions:
- "us-east-1"
Resources:
Resources:
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
"ec2_*":
Regions:
- "*"
Resources:
- "*" # Will ignore every EC2 check in every account and region
"*":
Regions:
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
Resources:
- "test" # Will ignore every resource containing the string "test" in every account and region
"*":
Checks:
Checks:
"s3_bucket_object_versioning":
Regions:
Regions:
- "eu-west-1"
- "us-east-1"
Resources:
Resources:
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
- "[[:alnum:]]+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
"*":
Regions:
- "*"
Resources:
- "*"
Tags:
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
## Supported Allowlist Locations
@@ -87,7 +70,6 @@ prowler aws -w arn:aws:dynamodb:<region_name>:<account_id>:table/<table_name>
- Checks (String): This field can contain either a Prowler Check Name or an `*` (which applies to all the scanned checks).
- Regions (List): This field contains a list of regions where this allowlist rule is applied (it can also contains an `*` to apply all scanned regions).
- Resources (List): This field contains a list of regex expressions that applies to the resources that are wanted to be allowlisted.
- Tags (List): -Optional- This field contains a list of tuples in the form of 'key=value' that applies to the resources tags that are wanted to be allowlisted.
<img src="../img/allowlist-row.png"/>
@@ -119,7 +101,7 @@ generates an Allowlist:
```
def handler(event, context):
checks = {}
checks["vpc_flow_logs_enabled"] = { "Regions": [ "*" ], "Resources": [ "" ], Optional("Tags"): [ "key:value" ] }
checks["vpc_flow_logs_enabled"] = { "Regions": [ "*" ], "Resources": [ "" ] }
al = { "Allowlist": { "Accounts": { "*": { "Checks": checks } } } }
return al

View File

@@ -13,7 +13,7 @@ Before sending findings to Prowler, you will need to perform next steps:
- Using the AWS Management Console:
![Screenshot 2020-10-29 at 10 26 02 PM](https://user-images.githubusercontent.com/3985464/97634660-5ade3400-1a36-11eb-9a92-4a45cc98c158.png)
3. Allow Prowler to import its findings to AWS Security Hub by adding the policy below to the role or user running Prowler:
- [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json)
- [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/iam/prowler-security-hub.json)
Once it is enabled, it is as simple as running the command below (for all regions):
@@ -29,34 +29,14 @@ prowler -S -f eu-west-1
> **Note 1**: It is recommended to send only fails to Security Hub and that is possible adding `-q` to the command.
> **Note 2**: Since Prowler perform checks to all regions by default you may need to filter by region when runing Security Hub integration, as shown in the example above. Remember to enable Security Hub in the region or regions you need by calling `aws securityhub enable-security-hub --region <region>` and run Prowler with the option `-f <region>` (if no region is used it will try to push findings in all regions hubs). Prowler will send findings to the Security Hub on the region where the scanned resource is located.
> **Note 2**: Since Prowler perform checks to all regions by defauls you may need to filter by region when runing Security Hub integration, as shown in the example above. Remember to enable Security Hub in the region or regions you need by calling `aws securityhub enable-security-hub --region <region>` and run Prowler with the option `-f <region>` (if no region is used it will try to push findings in all regions hubs).
> **Note 3**: To have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours.
> **Note 3** to have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours.
Once you run findings for first time you will be able to see Prowler findings in Findings section:
![Screenshot 2020-10-29 at 10 29 05 PM](https://user-images.githubusercontent.com/3985464/97634676-66c9f600-1a36-11eb-9341-70feb06f6331.png)
## Send findings to Security Hub assuming an IAM Role
When you are auditing a multi-account AWS environment, you can send findings to a Security Hub of another account by assuming an IAM role from that account using the `-R` flag in the Prowler command:
```sh
prowler -S -R arn:aws:iam::123456789012:role/ProwlerExecRole
```
> Remember that the used role needs to have permissions to send findings to Security Hub. To get more information about the permissions required, please refer to the following IAM policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json)
## Send only failed findings to Security Hub
When using Security Hub it is recommended to send only the failed findings generated. To follow that recommendation you could add the `-q` flag to the Prowler command:
```sh
prowler -S -q
```
## Skip sending updates of findings to Security Hub
By default, Prowler archives all its findings in Security Hub that have not appeared in the last scan.

View File

@@ -31,7 +31,7 @@ checks_v3_to_v2_mapping = {
"awslambda_function_url_cors_policy": "extra7180",
"awslambda_function_url_public": "extra7179",
"awslambda_function_using_supported_runtimes": "extra762",
"cloudformation_stack_outputs_find_secrets": "extra742",
"cloudformation_outputs_find_secrets": "extra742",
"cloudformation_stacks_termination_protection_enabled": "extra7154",
"cloudfront_distributions_field_level_encryption_enabled": "extra767",
"cloudfront_distributions_geo_restrictions_enabled": "extra732",
@@ -113,6 +113,7 @@ checks_v3_to_v2_mapping = {
"ec2_securitygroup_allow_wide_open_public_ipv4": "extra778",
"ec2_securitygroup_default_restrict_traffic": "check43",
"ec2_securitygroup_from_launch_wizard": "extra7173",
"ec2_securitygroup_in_use_without_ingress_filtering": "extra74",
"ec2_securitygroup_not_used": "extra75",
"ec2_securitygroup_with_many_ingress_egress_rules": "extra777",
"ecr_repositories_lifecycle_policy_enabled": "extra7194",
@@ -137,6 +138,7 @@ checks_v3_to_v2_mapping = {
"elbv2_internet_facing": "extra79",
"elbv2_listeners_underneath": "extra7158",
"elbv2_logging_enabled": "extra717",
"elbv2_request_smugling": "extra7142",
"elbv2_ssl_listeners": "extra793",
"elbv2_waf_acl_attached": "extra7129",
"emr_cluster_account_public_block_enabled": "extra7178",

View File

@@ -81,4 +81,36 @@ Standard results will be shown and additionally the framework information as the
## Create and contribute adding other Security Frameworks
This information is part of the Developer Guide and can be found here: https://docs.prowler.cloud/en/latest/tutorials/developer-guide/.
If you want to create or contribute with your own security frameworks or add public ones to Prowler you need to make sure the checks are available if not you have to create your own. Then create a compliance file per provider like in `prowler/compliance/aws/` and name it as `<framework>_<version>_<provider>.json` then follow the following format to create yours.
Each file version of a framework will have the following structure at high level with the case that each framework needs to be generally identified), one requirement can be also called one control but one requirement can be linked to multiple prowler checks.:
- `Framework`: string. Indistiguish name of the framework, like CIS
- `Provider`: string. Provider where the framework applies, such as AWS, Azure, OCI,...
- `Version`: string. Version of the framework itself, like 1.4 for CIS.
- `Requirements`: array of objects. Include all requirements or controls with the mapping to Prowler.
- `Requirements_Id`: string. Unique identifier per each requirement in the specific framework
- `Requirements_Description`: string. Description as in the framework.
- `Requirements_Attributes`: array of objects. Includes all needed attributes per each requirement, like levels, sections, etc. Whatever helps to create a dedicated report with the result of the findings. Attributes would be taken as closely as possible from the framework's own terminology directly.
- `Requirements_Checks`: array. Prowler checks that are needed to prove this requirement. It can be one or multiple checks. In case of no automation possible this can be empty.
```
{
"Framework": "<framework>-<provider>",
"Version": "<version>",
"Requirements": [
{
"Id": "<unique-id>",
"Description": "Requiemente full description",
"Checks": [
"Here is the prowler check or checks that is going to be executed"
],
"Attributes": [
{
<Add here your custom attributes.>
}
]
}
```
Finally, to have a proper output file for your reports, your framework data model has to be created in `prowler/lib/outputs/models.py` and also the CLI table output in `prowler/lib/outputs/compliance.py`.

View File

@@ -1,281 +0,0 @@
# Developer Guide
You can extend Prowler in many different ways, in most cases you will want to create your own checks and compliance security frameworks, here is where you can learn about how to get started with it. We also include how to create custom outputs, integrations and more.
## Get the code and install all dependencies
First of all, you need a version of Python 3.9 or higher and also pip installed to be able to install all dependencies requred. Once that is satisfied go a head and clone the repo:
```
git clone https://github.com/prowler-cloud/prowler
cd prowler
```
For isolation and avoid conflicts with other environments, we recommend usage of `poetry`:
```
pip install poetry
```
Then install all dependencies including the ones for developers:
```
poetry install
poetry shell
```
## Contributing with your code or fixes to Prowler
This repo has git pre-commit hooks managed via the pre-commit tool. Install it how ever you like, then in the root of this repo run:
```
pre-commit install
```
You should get an output like the following:
```
pre-commit installed at .git/hooks/pre-commit
```
Before we merge any of your pull requests we pass checks to the code, we use the following tools and automation to make sure the code is secure and dependencies up-to-dated (these should have been already installed if you ran `pipenv install -d`):
- `bandit` for code security review.
- `safety` and `dependabot` for dependencies.
- `hadolint` and `dockle` for our containers security.
- `snyk` in Docker Hub.
- `clair` in Amazon ECR.
- `vulture`, `flake8`, `black` and `pylint` for formatting and best practices.
You can see all dependencies in file `Pipfile`.
## Create a new check for a Provider
### If the check you want to create belongs to an existing service
To create a new check, you will need to create a folder inside the specific service, i.e. `prowler/providers/<provider>/services/<service>/<check_name>/`, with the name of check following the pattern: `service_subservice_action`.
Inside that folder, create the following files:
- An empty `__init__.py`: to make Python treat this check folder as a package.
- A `check_name.py` containing the check's logic, for example:
```
# Import the Check_Report of the specific provider
from prowler.lib.check.models import Check, Check_Report_AWS
# Import the client of the specific service
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
# Create the class for the check
class ec2_ebs_volume_encryption(Check):
def execute(self):
findings = []
# Iterate the service's asset that want to be analyzed
for volume in ec2_client.volumes:
# Initialize a Check Report for each item and assign the region, resource_id, resource_arn and resource_tags
report = Check_Report_AWS(self.metadata())
report.region = volume.region
report.resource_id = volume.id
report.resource_arn = volume.arn
report.resource_tags = volume.tags
# Make the logic with conditions and create a PASS and a FAIL with a status and a status_extended
if volume.encrypted:
report.status = "PASS"
report.status_extended = f"EBS Snapshot {volume.id} is encrypted."
else:
report.status = "FAIL"
report.status_extended = f"EBS Snapshot {volume.id} is unencrypted."
findings.append(report) # Append a report for each item
return findings
```
- A `check_name.metadata.json` containing the check's metadata, for example:
```
{
"Provider": "aws",
"CheckID": "ec2_ebs_volume_encryption",
"CheckTitle": "Ensure there are no EBS Volumes unencrypted.",
"CheckType": [
"Data Protection"
],
"ServiceName": "ec2",
"SubServiceName": "volume",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"Severity": "medium",
"ResourceType": "AwsEc2Volume",
"Description": "Ensure there are no EBS Volumes unencrypted.",
"Risk": "Data encryption at rest prevents data visibility in the event of its unauthorized access or theft.",
"RelatedUrl": "",
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "",
"Terraform": ""
},
"Recommendation": {
"Text": "Encrypt all EBS volumes and Enable Encryption by default You can configure your AWS account to enforce the encryption of the new EBS volumes and snapshot copies that you create. For example; Amazon EBS encrypts the EBS volumes created when you launch an instance and the snapshots that you copy from an unencrypted snapshot.",
"Url": "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html"
}
},
"Categories": [
"encryption"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}
```
### If the check you want to create belongs to a service not supported already by Prowler you will need to create a new service first
To create a new service, you will need to create a folder inside the specific provider, i.e. `prowler/providers/<provider>/services/<service>/`.
Inside that folder, create the following files:
- An empty `__init__.py`: to make Python treat this service folder as a package.
- A `<service>_service.py`, containing all the service's logic and API Calls:
```
# You must import the following libraries
import threading
from typing import Optional
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
# Create a class for the Service
################## <Service>
class <Service>:
def __init__(self, audit_info):
self.service = "<service>" # The name of the service boto3 client
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.<items> = [] # Create an empty list of the items to be gathered, e.g., instances
self.__threading_call__(self.__describe_<items>__)
self.__describe_<item>__() # Optionally you can create another function to retrieve more data about each item
def __get_session__(self):
return self.session
def __threading_call__(self, call):
threads = []
for regional_client in self.regional_clients.values():
threads.append(threading.Thread(target=call, args=(regional_client,)))
for t in threads:
t.start()
for t in threads:
t.join()
def __describe_<items>__(self, regional_client):
"""Get ALL <Service> <Items>"""
logger.info("<Service> - Describing <Items>...")
try:
describe_<items>_paginator = regional_client.get_paginator("describe_<items>") # Paginator to get every item
for page in describe_<items>_paginator.paginate():
for <item> in page["<Items>"]:
if not self.audit_resources or (
is_resource_filtered(<item>["<item_arn>"], self.audit_resources)
):
self.<items>.append(
<Item>(
arn=stack["<item_arn>"],
name=stack["<item_name>"],
tags=stack.get("Tags", []),
region=regional_client.region,
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __describe_<item>__(self):
"""Get Details for a <Service> <Item>"""
logger.info("<Service> - Describing <Item> to get specific details...")
try:
for <item> in self.<items>:
<item>_details = self.regional_clients[<item>.region].describe_<item>(
<Attribute>=<item>.name
)
# For example, check if item is Public
<item>.public = <item>_details.get("Public", False)
except Exception as error:
logger.error(
f"{<item>.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
class <Item>(BaseModel):
"""<Item> holds a <Service> <Item>"""
arn: str
"""<Items>[].Arn"""
name: str
"""<Items>[].Name"""
public: bool
"""<Items>[].Public"""
tags: Optional[list] = []
region: str
```
- A `<service>_client_.py`, containing the initialization of the service's class we have just created so the service's checks can use them:
```
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.<service>.<service>_service import <Service>
<service>_client = <Service>(current_audit_info)
```
## Create a new security compliance framework
If you want to create or contribute with your own security frameworks or add public ones to Prowler you need to make sure the checks are available if not you have to create your own. Then create a compliance file per provider like in `prowler/compliance/aws/` and name it as `<framework>_<version>_<provider>.json` then follow the following format to create yours.
Each file version of a framework will have the following structure at high level with the case that each framework needs to be generally identified, one requirement can be also called one control but one requirement can be linked to multiple prowler checks.:
- `Framework`: string. Indistiguish name of the framework, like CIS
- `Provider`: string. Provider where the framework applies, such as AWS, Azure, OCI,...
- `Version`: string. Version of the framework itself, like 1.4 for CIS.
- `Requirements`: array of objects. Include all requirements or controls with the mapping to Prowler.
- `Requirements_Id`: string. Unique identifier per each requirement in the specific framework
- `Requirements_Description`: string. Description as in the framework.
- `Requirements_Attributes`: array of objects. Includes all needed attributes per each requirement, like levels, sections, etc. Whatever helps to create a dedicated report with the result of the findings. Attributes would be taken as closely as possible from the framework's own terminology directly.
- `Requirements_Checks`: array. Prowler checks that are needed to prove this requirement. It can be one or multiple checks. In case of no automation possible this can be empty.
```
{
"Framework": "<framework>-<provider>",
"Version": "<version>",
"Requirements": [
{
"Id": "<unique-id>",
"Description": "Requiemente full description",
"Checks": [
"Here is the prowler check or checks that is going to be executed"
],
"Attributes": [
{
<Add here your custom attributes.>
}
]
},
...
]
}
```
Finally, to have a proper output file for your reports, your framework data model has to be created in `prowler/lib/outputs/models.py` and also the CLI table output in `prowler/lib/outputs/compliance.py`.
## Create a custom output format
## Create a new integration
## Contribute with documentation
We use `mkdocs` to build this Prowler documentation site so you can easely contribute back with new docs or improving them.
1. Install `mkdocs` with your favorite package manager.
2. Inside the `prowler` repository folder run `mkdocs serve` and point your browser to `http://localhost:8000` and you will see live changes to your local copy of this documentation site.
3. Make all needed changes to docs or add new documents. To do so just edit existing md files inside `prowler/docs` and if you are adding a new section or file please make sure you add it to `mkdocs.yaml` file in the root folder of the Prowler repo.
4. Once you are done with changes, please send a pull request to us for review and merge. Thank you in advance!
## Want some swag as appreciation for your contribution?
If you are like us and you love swag, we are happy to thank you for your contribution with some laptop stickers or whatever other swag we may have at that time. Please, tell us more details and your pull request link in our [Slack workspace here](https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog). You can also reach out to Toni de la Fuente on Twitter [here](https://twitter.com/ToniBlyx), his DMs are open.

View File

@@ -1,29 +0,0 @@
# GCP authentication
Prowler will use by default your User Account credentials, you can configure it using:
- `gcloud init` to use a new account
- `gcloud config set account <account>` to use an existing account
Then, obtain your access credentials using: `gcloud auth application-default login`
Otherwise, you can generate and download Service Account keys in JSON format (refer to https://cloud.google.com/iam/docs/creating-managing-service-account-keys) and provide the location of the file with the following argument:
```console
prowler gcp --credentials-file path
```
> `prowler` will scan the GCP project associated with the credentials.
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the following roles to the member associated with the credentials:
- Viewer
- Security Reviewer
- Stackdriver Account Viewer

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 200 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 456 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 69 KiB

View File

@@ -1,36 +0,0 @@
# Integrations
## Slack
Prowler can be integrated with [Slack](https://slack.com/) to send a summary of the execution having configured a Slack APP in your channel with the following command:
```sh
prowler <provider> --slack
```
![Prowler Slack Message](img/slack-prowler-message.png)
> Slack integration needs SLACK_API_TOKEN and SLACK_CHANNEL_ID environment variables.
### Configuration
To configure the Slack Integration, follow the next steps:
1. Create a Slack Application:
- Go to [Slack API page](https://api.slack.com/tutorials/tracks/getting-a-token), scroll down to the *Create app* button and select your workspace:
![Create Slack App](img/create-slack-app.png)
- Install the application in your selected workspaces:
![Install Slack App in Workspace](img/install-in-slack-workspace.png)
- Get the *Slack App OAuth Token* that Prowler needs to send the message:
![Slack App OAuth Token](img/slack-app-token.png)
2. Optionally, create a Slack Channel (you can use an existing one)
3. Integrate the created Slack App to your Slack channel:
- Click on the channel, go to the Integrations tab, and Add an App.
![Slack App Channel Integration](img/integrate-slack-app.png)
4. Set the following environment variables that Prowler will read:
- `SLACK_API_TOKEN`: the *Slack App OAuth Token* that was previously get.
- `SLACK_CHANNEL_ID`: the name of your Slack Channel where Prowler will send the message.

View File

@@ -51,30 +51,15 @@ prowler <provider> -e/--excluded-checks ec2 rds
```console
prowler <provider> -C/--checks-file <checks_list>.json
```
## Custom Checks
Prowler allows you to include your custom checks with the flag:
## Severities
Each check of Prowler has a severity, there are options related with it:
- List the available checks in the provider:
```console
prowler <provider> -x/--checks-folder <custom_checks_folder>
prowler <provider> --list-severities
```
> S3 URIs are also supported as folders for custom checks, e.g. s3://bucket/prefix/checks_folder/. Make sure that the used credentials have s3:GetObject permissions in the S3 path where the custom checks are located.
The custom checks folder must contain one subfolder per check, each subfolder must be named as the check and must contain:
- An empty `__init__.py`: to make Python treat this check folder as a package.
- A `check_name.py` containing the check's logic.
- A `check_name.metadata.json` containing the check's metadata.
>The check name must start with the service name followed by an underscore (e.g., ec2_instance_public_ip).
To see more information about how to write checks see the [Developer Guide](../developer-guide/#create-a-new-check-for-a-provider).
## Severities
Each of Prowler's checks has a severity, which can be:
- informational
- low
- medium
- high
- critical
To execute specific severity(s):
- Execute specific severity(s):
```console
prowler <provider> --severity critical high
```

View File

@@ -33,8 +33,9 @@ Several checks analyse resources that are exposed to the Internet, these are:
- ec2_instance_internet_facing_with_instance_profile
- ec2_instance_public_ip
- ec2_networkacl_allow_ingress_any_port
- ec2_securitygroup_allow_wide_open_public_ipv4
- ec2_securitygroup_allow_ingress_from_internet_to_any_port
- ec2_securitygroup_allow_wide_open_public_ipv4
- ec2_securitygroup_in_use_without_ingress_filtering
- ecr_repositories_not_publicly_accessible
- eks_control_plane_endpoint_access_restricted
- eks_endpoints_not_publicly_accessible

View File

@@ -14,6 +14,4 @@ prowler <provider> -i
- Also, it creates by default a CSV and JSON to see detailed information about the resources extracted.
![Quick Inventory Example](../img/quick-inventory.jpg)
> The inventorying process is done with `resourcegroupstaggingapi` calls (except for the IAM resources which are done with Boto3 API calls.)
![Quick Inventory Example](../img/quick-inventory.png)

View File

@@ -46,11 +46,9 @@ Prowler supports natively the following output formats:
Hereunder is the structure for each of the supported report formats by Prowler:
### HTML
![HTML Output](../img/output-html.png)
### CSV
| ASSESSMENT_START_TIME | FINDING_UNIQUE_ID | PROVIDER | PROFILE | ACCOUNT_ID | ACCOUNT_NAME | ACCOUNT_EMAIL | ACCOUNT_ARN | ACCOUNT_ORG | ACCOUNT_TAGS | REGION | CHECK_ID | CHECK_TITLE | CHECK_TYPE | STATUS | STATUS_EXTENDED | SERVICE_NAME | SUBSERVICE_NAME | SEVERITY | RESOURCE_ID | RESOURCE_ARN | RESOURCE_TYPE | RESOURCE_DETAILS | RESOURCE_TAGS | DESCRIPTION | COMPLIANCE | RISK | RELATED_URL | REMEDIATION_RECOMMENDATION_TEXT | REMEDIATION_RECOMMENDATION_URL | REMEDIATION_RECOMMENDATION_CODE_NATIVEIAC | REMEDIATION_RECOMMENDATION_CODE_TERRAFORM | REMEDIATION_RECOMMENDATION_CODE_CLI | REMEDIATION_RECOMMENDATION_CODE_OTHER | CATEGORIES | DEPENDS_ON | RELATED_TO | NOTES |
| ------- | ----------- | ------ | -------- | ------------ | ----------- | ---------- | ---------- | --------------------- | -------------------------- | -------------- | ----------------- | ------------------------ | --------------- | ---------- | ----------------- | --------- | -------------- | ----------------- | ------------------ | --------------------- | -------------------- | ------------------- | ------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- |
| ASSESSMENT_START_TIME | FINDING_UNIQUE_ID | PROVIDER | PROFILE | ACCOUNT_ID | ACCOUNT_NAME | ACCOUNT_EMAIL | ACCOUNT_ARN | ACCOUNT_ORG | ACCOUNT_TAGS | REGION | CHECK_ID | CHECK_TITLE | CHECK_TYPE | STATUS | STATUS_EXTENDED | SERVICE_NAME | SUBSERVICE_NAME | SEVERITY | RESOURCE_ID | RESOURCE_ARN | RESOURCE_TYPE | RESOURCE_DETAILS | RESOURCE_TAGS | DESCRIPTION | RISK | RELATED_URL | REMEDIATION_RECOMMENDATION_TEXT | REMEDIATION_RECOMMENDATION_URL | REMEDIATION_RECOMMENDATION_CODE_NATIVEIAC | REMEDIATION_RECOMMENDATION_CODE_TERRAFORM | REMEDIATION_RECOMMENDATION_CODE_CLI | REMEDIATION_RECOMMENDATION_CODE_OTHER | CATEGORIES | DEPENDS_ON | RELATED_TO | NOTES |
| ------- | ----------- | ------ | -------- | ------------ | ----------- | ---------- | ---------- | --------------------- | -------------------------- | -------------- | ----------------- | ------------------------ | --------------- | ---------- | ----------------- | --------- | -------------- | ----------------- | ------------------ | --------------------- | -------------------- | ------------------- | ------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- | -------------------- |
### JSON
@@ -73,10 +71,6 @@ Hereunder is the structure for each of the supported report formats by Prowler:
"Severity": "low",
"ResourceId": "rds-instance-id",
"ResourceArn": "",
"ResourceTags": {
"test": "test",
"enironment": "dev"
},
"ResourceType": "AwsRdsDbInstance",
"ResourceDetails": "",
"Description": "Ensure RDS instances have minor version upgrade enabled.",
@@ -95,15 +89,7 @@ Hereunder is the structure for each of the supported report formats by Prowler:
}
},
"Categories": [],
"Notes": "",
"Compliance": {
"CIS-1.4": [
"1.20"
],
"CIS-1.5": [
"1.20"
]
}
"Notes": ""
},{
"AssessmentStartTime": "2022-12-01T14:16:57.354413",
"FindingUniqueId": "",
@@ -123,7 +109,7 @@ Hereunder is the structure for each of the supported report formats by Prowler:
"ResourceId": "rds-instance-id",
"ResourceArn": "",
"ResourceType": "AwsRdsDbInstance",
"ResourceTags": {},
"ResourceDetails": "",
"Description": "Ensure RDS instances have minor version upgrade enabled.",
"Risk": "Auto Minor Version Upgrade is a feature that you can enable to have your database automatically upgraded when a new minor database engine version is available. Minor version upgrades often patch security vulnerabilities and fix bugs and therefore should be applied.",
"RelatedUrl": "https://aws.amazon.com/blogs/database/best-practices-for-upgrading-amazon-rds-to-major-and-minor-versions-of-postgresql/",
@@ -140,8 +126,7 @@ Hereunder is the structure for each of the supported report formats by Prowler:
}
},
"Categories": [],
"Notes": "",
"Compliance: {}
"Notes": ""
}]
```
@@ -181,30 +166,7 @@ Hereunder is the structure for each of the supported report formats by Prowler:
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"CISA your-systems-2 booting-up-thing-to-do-first-3",
"CIS-1.5 2.3.2",
"AWS-Foundational-Security-Best-Practices rds",
"RBI-Cyber-Security-Framework annex_i_6",
"FFIEC d3-cc-pm-b-1 d3-cc-pm-b-3"
],
"AssociatedStandards": [
{
"StandardsId": "CISA"
},
{
"StandardsId": "CIS-1.5"
},
{
"StandardsId": "AWS-Foundational-Security-Best-Practices"
},
{
"StandardsId": "RBI-Cyber-Security-Framework"
},
{
"StandardsId": "FFIEC"
}
]
"RelatedRequirements": []
},
"Remediation": {
"Recommendation": {
@@ -243,30 +205,7 @@ Hereunder is the structure for each of the supported report formats by Prowler:
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"CISA your-systems-2 booting-up-thing-to-do-first-3",
"CIS-1.5 2.3.2",
"AWS-Foundational-Security-Best-Practices rds",
"RBI-Cyber-Security-Framework annex_i_6",
"FFIEC d3-cc-pm-b-1 d3-cc-pm-b-3"
],
"AssociatedStandards": [
{
"StandardsId": "CISA"
},
{
"StandardsId": "CIS-1.5"
},
{
"StandardsId": "AWS-Foundational-Security-Best-Practices"
},
{
"StandardsId": "RBI-Cyber-Security-Framework"
},
{
"StandardsId": "FFIEC"
}
]
"RelatedRequirements": []
},
"Remediation": {
"Recommendation": {

View File

@@ -33,12 +33,10 @@ nav:
- Reporting: tutorials/reporting.md
- Compliance: tutorials/compliance.md
- Quick Inventory: tutorials/quick-inventory.md
- Integrations: tutorials/integrations.md
- Configuration File: tutorials/configuration_file.md
- Logging: tutorials/logging.md
- Allowlist: tutorials/allowlist.md
- Pentesting: tutorials/pentesting.md
- Developer Guide: tutorials/developer-guide.md
- AWS:
- Assume Role: tutorials/aws/role-assumption.md
- AWS Security Hub: tutorials/aws/securityhub.md
@@ -52,9 +50,6 @@ nav:
- Azure:
- Authentication: tutorials/azure/authentication.md
- Subscriptions: tutorials/azure/subscriptions.md
- Google Cloud:
- Authentication: tutorials/gcp/authentication.md
- Developer Guide: tutorials/developer-guide.md
- Security: security.md
- Contact Us: contact.md
- Troubleshooting: troubleshooting.md

View File

@@ -6,31 +6,23 @@
"account:Get*",
"appstream:Describe*",
"appstream:List*",
"backup:List*",
"cloudtrail:GetInsightSelectors",
"codeartifact:List*",
"codebuild:BatchGet*",
"drs:Describe*",
"ds:Get*",
"ds:Describe*",
"ds:Get*",
"ds:List*",
"ec2:GetEbsEncryptionByDefault",
"ecr:Describe*",
"ecr:GetRegistryScanningConfiguration",
"elasticfilesystem:DescribeBackupPolicy",
"glue:GetConnections",
"glue:GetSecurityConfiguration*",
"glue:SearchTables",
"lambda:GetFunction*",
"logs:FilterLogEvents",
"macie2:GetMacieSession",
"s3:GetAccountPublicAccessBlock",
"shield:DescribeProtection",
"shield:GetSubscriptionState",
"securityhub:BatchImportFindings",
"securityhub:GetFindings",
"ssm:GetDocument",
"ssm-incidents:List*",
"support:Describe*",
"tag:GetTagKeys"
],
@@ -44,8 +36,7 @@
"apigateway:GET"
],
"Resource": [
"arn:aws:apigateway:*::/restapis/*",
"arn:aws:apigateway:*::/apis/*"
"arn:aws:apigateway:*::/restapis/*"
]
}
]

3296
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,6 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
from prowler.lib.banner import print_banner
@@ -12,15 +11,12 @@ from prowler.lib.check.check import (
exclude_services_to_run,
execute_checks,
list_categories,
list_checks,
list_services,
parse_checks_from_folder,
print_categories,
print_checks,
print_compliance_frameworks,
print_compliance_requirements,
print_services,
remove_custom_checks_module,
)
from prowler.lib.check.checks_loader import load_checks_to_execute
from prowler.lib.check.compliance import update_checks_metadata_with_compliance
@@ -30,7 +26,6 @@ from prowler.lib.outputs.compliance import display_compliance_table
from prowler.lib.outputs.html import add_html_footer, fill_html_overview_statistics
from prowler.lib.outputs.json import close_json
from prowler.lib.outputs.outputs import extract_findings_statistics, send_to_s3_bucket
from prowler.lib.outputs.slack import send_slack_message
from prowler.lib.outputs.summary_table import display_summary_table
from prowler.providers.aws.lib.security_hub.security_hub import (
resolve_security_hub_previous_findings,
@@ -57,13 +52,9 @@ def prowler():
services = args.services
categories = args.categories
checks_file = args.checks_file
checks_folder = args.checks_folder
severities = args.severity
compliance_framework = args.compliance
if not args.no_banner:
print_banner(args)
# We treat the compliance framework as another output format
if compliance_framework:
args.output_modes.extend(compliance_framework)
@@ -71,6 +62,9 @@ def prowler():
# Set Logger configuration
set_logging_config(args.log_level, args.log_file, args.only_logs)
if not args.no_banner:
print_banner(args)
if args.list_services:
print_services(list_services(provider))
sys.exit()
@@ -101,18 +95,6 @@ def prowler():
)
sys.exit()
# If -l/--list-checks passed as argument, print checks to execute and quit
if args.list_checks:
print_checks(provider, list_checks(provider), bulk_checks_metadata)
sys.exit()
# Set the audit info based on the selected provider
audit_info = set_provider_audit_info(provider, args.__dict__)
# Import custom checks from folder
if checks_folder:
parse_checks_from_folder(audit_info, checks_folder, provider)
# Load checks to execute
checks_to_execute = load_checks_to_execute(
bulk_checks_metadata,
@@ -139,6 +121,14 @@ def prowler():
# Sort final check list
checks_to_execute = sorted(checks_to_execute)
# If -l/--list-checks passed as argument, print checks to execute and quit
if args.list_checks:
print_checks(provider, checks_to_execute, bulk_checks_metadata)
sys.exit()
# Set the audit info based on the selected provider
audit_info = set_provider_audit_info(provider, args.__dict__)
# Once the audit_info is set and we have the eventual checks based on the resource identifier,
# it is time to check what Prowler's checks are going to be executed
if audit_info.audit_resources:
@@ -171,21 +161,6 @@ def prowler():
# Extract findings stats
stats = extract_findings_statistics(findings)
if args.slack:
if "SLACK_API_TOKEN" in os.environ and "SLACK_CHANNEL_ID" in os.environ:
_ = send_slack_message(
os.environ["SLACK_API_TOKEN"],
os.environ["SLACK_CHANNEL_ID"],
stats,
provider,
audit_info,
)
else:
logger.critical(
"Slack integration needs SLACK_API_TOKEN and SLACK_CHANNEL_ID environment variables (see more in https://docs.prowler.cloud/en/latest/tutorials/integrations/#slack)."
)
sys.exit(1)
if args.output_modes:
for mode in args.output_modes:
# Close json file if exists
@@ -242,10 +217,6 @@ def prowler():
audit_output_options.output_directory,
)
# If custom checks were passed, remove the modules
if checks_folder:
remove_custom_checks_module(checks_folder, provider)
# If there are failed findings exit code 3, except if -z is input
if not args.ignore_exit_code_3 and stats["total_fail"] > 0:
sys.exit(3)

View File

@@ -362,14 +362,14 @@
}
],
"Checks": [
"iam_policy_no_administrative_privileges",
"iam_rotate_access_key_90_days",
"iam_no_root_access_key",
"iam_user_mfa_enabled_console_access",
"iam_root_hardware_mfa_enabled",
"iam_password_policy_minimum_length_14",
"iam_disable_90_days_credentials",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{

View File

@@ -155,8 +155,7 @@
"Id": "1.16",
"Description": "Ensure IAM policies that allow full \"*:*\" administrative privileges are not attached",
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
],
"Attributes": [
{

View File

@@ -155,8 +155,7 @@
"Id": "1.16",
"Description": "Ensure IAM policies that allow full \"*:*\" administrative privileges are not attached",
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
],
"Attributes": [
{

View File

@@ -88,8 +88,7 @@
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_no_custom_policy_permissive_role_assumption",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -182,8 +181,7 @@
"Checks": [
"elbv2_ssl_listeners",
"iam_no_custom_policy_permissive_role_assumption",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},

View File

@@ -103,8 +103,7 @@
"awslambda_function_url_public",
"awslambda_function_url_cors_policy",
"iam_policy_allows_privilege_escalation",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -130,8 +129,7 @@
"Checks": [
"iam_policy_allows_privilege_escalation",
"iam_no_custom_policy_permissive_role_assumption",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -204,8 +202,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -1068,8 +1065,7 @@
],
"Checks": [
"iam_policy_allows_privilege_escalation",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_custom_policy_permissive_role_assumption",
"iam_policy_attached_only_to_group_or_roles",
"iam_role_cross_service_confused_deputy_prevention"
@@ -1630,7 +1626,7 @@
}
],
"Checks": [
"ec2_securitygroup_allow_ingress_from_internet_to_any_port"
"ec2_securitygroup_in_use_without_ingress_filtering"
]
},
{

View File

@@ -26,9 +26,9 @@
"opensearch_service_domains_cloudwatch_logging_enabled",
"guardduty_is_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -58,9 +58,9 @@
"ec2_instance_public_ip",
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",

View File

@@ -20,8 +20,7 @@
"guardduty_is_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -92,9 +91,9 @@
],
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -145,9 +144,9 @@
],
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
@@ -189,9 +188,9 @@
"ec2_instance_public_ip",
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
@@ -250,9 +249,9 @@
],
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials"
]
@@ -270,8 +269,8 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
@@ -291,8 +290,7 @@
"ec2_instance_public_ip",
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
@@ -976,8 +974,7 @@
],
"Checks": [
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{

View File

@@ -418,9 +418,9 @@
],
"Checks": [
"ec2_instance_profile_attached",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
@@ -519,8 +519,8 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges"
]
},
{
@@ -536,8 +536,8 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges"
]
},
{
@@ -553,8 +553,8 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key"
@@ -579,8 +579,8 @@
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_rotate_access_key_90_days",
@@ -755,9 +755,9 @@
"Checks": [
"cloudtrail_multi_region_enabled",
"cloudtrail_cloudwatch_logging_enabled",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{

View File

@@ -35,8 +35,7 @@
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",

View File

@@ -82,9 +82,9 @@
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -166,9 +166,9 @@
"iam_password_policy_number",
"iam_password_policy_symbol",
"iam_password_policy_uppercase",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",

View File

@@ -45,8 +45,7 @@
"elb_ssl_listeners",
"emr_cluster_master_nodes_no_public_ip",
"opensearch_service_domains_encryption_at_rest_enabled",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
@@ -115,8 +114,7 @@
"Checks": [
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
@@ -171,8 +169,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials"
]
@@ -204,8 +201,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -259,8 +255,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -276,8 +271,7 @@
],
"Checks": [
"iam_password_policy_reuse_24",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
"iam_disable_90_days_credentials",
@@ -518,8 +512,7 @@
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_user_mfa_enabled_console_access",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",

View File

@@ -21,8 +21,7 @@
"ec2_instance_public_ip",
"eks_endpoints_not_publicly_accessible",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -61,8 +60,7 @@
"ec2_instance_public_ip",
"eks_endpoints_not_publicly_accessible",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -127,8 +125,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials"
]
@@ -145,8 +142,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials"
]
@@ -163,8 +159,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
@@ -180,8 +175,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
@@ -455,8 +449,7 @@
"ec2_ebs_public_snapshot",
"ec2_instance_managed_by_ssm",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"awslambda_function_url_public",
"rds_snapshots_public_access",
@@ -828,8 +821,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{

View File

@@ -101,8 +101,7 @@
"cloudtrail_cloudwatch_logging_enabled",
"guardduty_is_enabled",
"iam_password_policy_reuse_24",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
@@ -126,8 +125,7 @@
],
"Checks": [
"ec2_ebs_public_snapshot",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_url_public",
@@ -182,8 +180,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -218,8 +215,7 @@
"ec2_instance_public_ip",
"ec2_instance_imdsv2_enabled",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_url_public",
@@ -850,8 +846,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{

View File

@@ -19,8 +19,7 @@
"Checks": [
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -161,8 +160,7 @@
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
@@ -240,8 +238,7 @@
],
"Checks": [
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -277,9 +274,9 @@
"ec2_instance_public_ip",
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_policy_no_administrative_privileges",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",
@@ -351,8 +348,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -380,8 +376,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -409,8 +404,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -438,8 +432,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -467,8 +460,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -496,8 +488,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -525,8 +516,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -554,8 +544,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -582,8 +571,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -611,8 +599,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -645,8 +632,7 @@
"iam_no_root_access_key",
"iam_root_mfa_enabled",
"iam_root_hardware_mfa_enabled",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_password_policy_minimum_length_14",
"ec2_instance_imdsv2_enabled"
@@ -669,8 +655,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -698,8 +683,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -727,8 +711,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -758,8 +741,7 @@
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
@@ -789,8 +771,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -841,8 +822,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -886,8 +866,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -915,8 +894,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -944,8 +922,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -1069,8 +1046,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -1094,8 +1070,8 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges"
]
},
{
@@ -1115,8 +1091,7 @@
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
@@ -1144,8 +1119,8 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
@@ -1163,8 +1138,7 @@
],
"Checks": [
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{
@@ -1203,8 +1177,8 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},
@@ -1458,8 +1432,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -2633,9 +2606,9 @@
"ec2_instance_profile_attached",
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
@@ -2713,8 +2686,7 @@
"ec2_instance_profile_attached",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -2969,8 +2941,7 @@
"ec2_ebs_default_encryption",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -3941,8 +3912,7 @@
"ec2_instance_imdsv2_enabled",
"emr_cluster_master_nodes_no_public_ip",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials",
"awslambda_function_not_publicly_accessible",
@@ -5413,8 +5383,7 @@
"ec2_instance_imdsv2_enabled",
"iam_password_policy_minimum_length_14",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_no_root_access_key",
@@ -5457,8 +5426,7 @@
"ec2_ebs_public_snapshot",
"ec2_instance_public_ip",
"emr_cluster_master_nodes_no_public_ip",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"awslambda_function_not_publicly_accessible",
"awslambda_function_url_public",

View File

@@ -569,8 +569,7 @@
],
"Checks": [
"iam_password_policy_reuse_24",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_rotate_access_key_90_days",
"iam_disable_90_days_credentials",
@@ -625,8 +624,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"iam_disable_90_days_credentials"
]
@@ -1078,8 +1076,7 @@
],
"Checks": [
"ec2_ebs_public_snapshot",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key",
"awslambda_function_url_public",
"rds_snapshots_public_access",

View File

@@ -156,8 +156,7 @@
],
"Checks": [
"iam_no_root_access_key",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_root_hardware_mfa_enabled",
"iam_root_mfa_enabled",
"iam_user_mfa_enabled_console_access",

View File

@@ -113,11 +113,9 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_no_root_access_key"
]
},

View File

@@ -46,8 +46,7 @@
],
"Checks": [
"iam_policy_attached_only_to_group_or_roles",
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_policy_no_administrative_privileges",
"iam_disable_90_days_credentials"
]
},
@@ -312,8 +311,7 @@
}
],
"Checks": [
"iam_aws_attached_policy_no_administrative_privileges",
"iam_customer_attached_policy_no_administrative_privileges"
"iam_policy_no_administrative_privileges"
]
},
{

View File

@@ -1,7 +1,5 @@
### Account, Check and/or Region can be * to apply for all the cases.
### Resources and tags are lists that can have either Regex or Keywords.
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
### Account, Check and/or Region can be * to apply for all the cases
### Resources is a list that can have either Regex or Keywords:
########################### ALLOWLIST EXAMPLE ###########################
Allowlist:
Accounts:
@@ -13,19 +11,11 @@ Allowlist:
Resources:
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
"ec2_*":
Regions:
- "*"
Resources:
- "*" # Will ignore every EC2 check in every account and region
"*":
Regions:
- "*"
Resources:
- "test"
Tags:
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
- "test" # Will ignore every resource containing the string "test" in every account and region
"*":
Checks:
@@ -37,14 +27,6 @@ Allowlist:
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
- "[[:alnum:]]+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
"*":
Regions:
- "*"
Resources:
- "*"
Tags:
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
# EXAMPLE: CONTROL TOWER (to migrate)
# When using Control Tower, guardrails prevent access to certain protected resources. The allowlist

View File

@@ -3,20 +3,15 @@ import pathlib
from datetime import datetime, timezone
from os import getcwd
import requests
import yaml
from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "3.5.1"
prowler_version = "3.3.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
html_logo_img = "https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png"
square_logo_img = "https://user-images.githubusercontent.com/38561120/235905862-9ece5bd7-9aa3-4e48-807a-3a9035eb8bfb.png"
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
azure_logo = "https://user-images.githubusercontent.com/38561120/235927375-b23e2e0f-8932-49ec-b59c-d89f61c8041d.png"
gcp_logo = "https://user-images.githubusercontent.com/38561120/235928332-eb4accdc-c226-4391-8e97-6ca86a91cf50.png"
orange_color = "\033[38;5;208m"
banner_color = "\033[1;92m"
@@ -37,8 +32,6 @@ with os.scandir(compliance_aws_dir) as files:
# AWS services-regions matrix json
aws_services_json_file = "aws_regions_by_service.json"
# gcp_zones_json_file = "gcp_zones.json"
default_output_directory = getcwd() + "/output"
output_file_timestamp = timestamp.strftime("%Y%m%d%H%M%S")
@@ -50,22 +43,6 @@ html_file_suffix = ".html"
config_yaml = f"{pathlib.Path(os.path.dirname(os.path.realpath(__file__)))}/config.yaml"
def check_current_version():
try:
prowler_version_string = f"Prowler {prowler_version}"
release_response = requests.get(
"https://api.github.com/repos/prowler-cloud/prowler/tags"
)
latest_version = release_response.json()[0]["name"]
if latest_version != prowler_version:
return f"{prowler_version_string} (latest is {latest_version}, upgrade for the latest features)"
else:
return f"{prowler_version_string} (it is the latest version, yay!)"
except Exception as error:
logger.error(f"{error.__class__.__name__}: {error}")
return f"{prowler_version_string}"
def change_config_var(variable, value):
try:
with open(config_yaml) as f:

View File

@@ -41,17 +41,3 @@ obsolete_lambda_runtimes:
"dotnetcore2.1",
"ruby2.5",
]
# AWS Organizations
# organizations_scp_check_deny_regions
# organizations_enabled_regions: [
# 'eu-central-1',
# 'eu-west-1',
# "us-east-1"
# ]
organizations_enabled_regions: []
# organizations_delegated_administrators
# organizations_trusted_delegated_administrators: [
# "12345678901"
# ]
organizations_trusted_delegated_administrators: []

View File

@@ -1,8 +1,6 @@
import functools
import importlib
import os
import re
import shutil
import sys
import traceback
from pkgutil import walk_packages
@@ -26,7 +24,6 @@ except KeyError:
except Exception:
sys.exit(1)
import prowler
from prowler.lib.utils.utils import open_file, parse_json_file
from prowler.providers.common.models import Audit_Metadata
from prowler.providers.common.outputs import Provider_Output_Options
@@ -120,66 +117,6 @@ def parse_checks_from_file(input_file: str, provider: str) -> set:
return checks_to_execute
# Load checks from custom folder
def parse_checks_from_folder(audit_info, input_folder: str, provider: str) -> int:
try:
imported_checks = 0
# Check if input folder is a S3 URI
if provider == "aws" and re.search(
"^s3://([^/]+)/(.*?([^/]+))/$", input_folder
):
bucket = input_folder.split("/")[2]
key = ("/").join(input_folder.split("/")[3:])
s3_reource = audit_info.audit_session.resource("s3")
bucket = s3_reource.Bucket(bucket)
for obj in bucket.objects.filter(Prefix=key):
if not os.path.exists(os.path.dirname(obj.key)):
os.makedirs(os.path.dirname(obj.key))
bucket.download_file(obj.key, obj.key)
input_folder = key
# Import custom checks by moving the checks folders to the corresponding services
with os.scandir(input_folder) as checks:
for check in checks:
if check.is_dir():
check_module = input_folder + "/" + check.name
# Copy checks to specific provider/service folder
check_service = check.name.split("_")[0]
prowler_dir = prowler.__path__
prowler_module = f"{prowler_dir[0]}/providers/{provider}/services/{check_service}/{check.name}"
if os.path.exists(prowler_module):
shutil.rmtree(prowler_module)
shutil.copytree(check_module, prowler_module)
imported_checks += 1
return imported_checks
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
# Load checks from custom folder
def remove_custom_checks_module(input_folder: str, provider: str):
# Check if input folder is a S3 URI
s3_uri = False
if provider == "aws" and re.search("^s3://([^/]+)/(.*?([^/]+))/$", input_folder):
input_folder = ("/").join(input_folder.split("/")[3:])
s3_uri = True
with os.scandir(input_folder) as checks:
for check in checks:
if check.is_dir():
# Remove imported checks
check_service = check.name.split("_")[0]
prowler_dir = prowler.__path__
prowler_module = f"{prowler_dir[0]}/providers/{provider}/services/{check_service}/{check.name}"
if os.path.exists(prowler_module):
shutil.rmtree(prowler_module)
# If S3 URI, remove the downloaded folders
if s3_uri and os.path.exists(input_folder):
shutil.rmtree(input_folder)
def list_services(provider: str) -> set():
available_services = set()
checks_tuple = recover_checks_from_provider(provider)
@@ -193,14 +130,6 @@ def list_services(provider: str) -> set():
return sorted(available_services)
def list_checks(provider: str) -> set():
available_checks = set()
checks_tuple = recover_checks_from_provider(provider)
for check_name, _ in checks_tuple:
available_checks.add(check_name)
return sorted(available_checks)
def list_categories(provider: str, bulk_checks_metadata: dict) -> set():
available_categories = set()
for check in bulk_checks_metadata.values():

View File

@@ -128,23 +128,6 @@ class Check_Report_Azure(Check_Report):
self.subscription = ""
@dataclass
class Check_Report_GCP(Check_Report):
"""Contains the GCP Check's finding information."""
resource_name: str
resource_id: str
project_id: str
location: str
def __init__(self, metadata):
super().__init__(metadata)
self.resource_name = ""
self.resource_id = ""
self.project_id = ""
self.location = ""
# Testing Pending
def load_check_metadata(metadata_file: str) -> Check_Metadata_Model:
"""load_check_metadata loads and parse a Check's metadata file"""

View File

@@ -4,8 +4,8 @@ from argparse import RawTextHelpFormatter
from prowler.config.config import (
available_compliance_frameworks,
check_current_version,
default_output_directory,
prowler_version,
)
from prowler.providers.aws.aws_provider import get_aws_available_regions
from prowler.providers.aws.lib.arn.arn import is_valid_arn
@@ -35,7 +35,8 @@ Detailed documentation at https://docs.prowler.cloud
self.parser.add_argument(
"-v",
"--version",
action="store_true",
action="version",
version=f"Prowler {prowler_version}",
help="show Prowler version",
)
# Common arguments parser
@@ -56,7 +57,6 @@ Detailed documentation at https://docs.prowler.cloud
# Init Providers Arguments
self.__init_aws_parser__()
self.__init_azure_parser__()
self.__init_gcp_parser__()
def parse(self, args=None) -> argparse.Namespace:
"""
@@ -66,10 +66,6 @@ Detailed documentation at https://docs.prowler.cloud
if args:
sys.argv = args
if len(sys.argv) == 2 and sys.argv[1] in ("-v", "--version"):
print(check_current_version())
sys.exit(0)
# Set AWS as the default provider if no provider is supplied
if len(sys.argv) == 1:
sys.argv = self.__set_default_provider__(sys.argv)
@@ -154,11 +150,6 @@ Detailed documentation at https://docs.prowler.cloud
common_outputs_parser.add_argument(
"-b", "--no-banner", action="store_true", help="Hide Prowler banner"
)
common_outputs_parser.add_argument(
"--slack",
action="store_true",
help="Send a summary of the execution with a Slack APP in your channel. Environment variables SLACK_API_TOKEN and SLACK_CHANNEL_ID are required (see more in https://docs.prowler.cloud/en/latest/tutorials/integrations/#slack).",
)
def __init_logging_parser__(self):
# Logging Options
@@ -233,12 +224,6 @@ Detailed documentation at https://docs.prowler.cloud
default=[],
# Pending validate choices
)
common_checks_parser.add_argument(
"-x",
"--checks-folder",
nargs="?",
help="Specify external directory with custom checks (each check must have a folder with the required files, see more in https://docs.prowler.cloud/en/latest/tutorials/misc/#custom-checks).",
)
def __init_list_checks_parser__(self):
# List checks options
@@ -445,18 +430,3 @@ Detailed documentation at https://docs.prowler.cloud
default=[],
help="Azure subscription ids to be scanned by prowler",
)
def __init_gcp_parser__(self):
"""Init the GCP Provider CLI parser"""
gcp_parser = self.subparsers.add_parser(
"gcp", parents=[self.common_providers_parser], help="GCP Provider"
)
# Authentication Modes
gcp_auth_subparser = gcp_parser.add_argument_group("Authentication Modes")
gcp_auth_modes_group = gcp_auth_subparser.add_mutually_exclusive_group()
gcp_auth_modes_group.add_argument(
"--credentials-file",
nargs="?",
metavar="FILE_PATH",
help="Authenticate using a Google Service Account Application Credentials JSON file",
)

View File

@@ -16,13 +16,11 @@ from prowler.lib.outputs.models import (
Check_Output_CSV_CIS,
Check_Output_CSV_ENS_RD2022,
Check_Output_CSV_Generic_Compliance,
Gcp_Check_Output_CSV,
generate_csv_fields,
)
from prowler.lib.utils.utils import file_exists, open_file
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
def initialize_file_descriptor(
@@ -84,13 +82,6 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
audit_info,
Azure_Check_Output_CSV,
)
if isinstance(audit_info, GCP_Audit_Info):
file_descriptor = initialize_file_descriptor(
filename,
output_mode,
audit_info,
Gcp_Check_Output_CSV,
)
file_descriptors.update({output_mode: file_descriptor})
elif output_mode == "json":
@@ -100,13 +91,6 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
)
file_descriptors.update({output_mode: file_descriptor})
elif output_mode == "html":
filename = f"{output_directory}/{output_filename}{html_file_suffix}"
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info
)
file_descriptors.update({output_mode: file_descriptor})
elif isinstance(audit_info, AWS_Audit_Info):
if output_mode == "json-asff":
filename = f"{output_directory}/{output_filename}{json_asff_file_suffix}"
@@ -115,6 +99,15 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, audit
)
file_descriptors.update({output_mode: file_descriptor})
elif output_mode == "html":
filename = (
f"{output_directory}/{output_filename}{html_file_suffix}"
)
file_descriptor = initialize_file_descriptor(
filename, output_mode, audit_info
)
file_descriptors.update({output_mode: file_descriptor})
elif output_mode == "ens_rd2022_aws":
filename = f"{output_directory}/{output_filename}_ens_rd2022_aws{csv_file_suffix}"
file_descriptor = initialize_file_descriptor(

View File

@@ -1,4 +1,3 @@
import importlib
import sys
from os import path
@@ -9,7 +8,6 @@ from prowler.config.config import (
prowler_version,
timestamp,
)
from prowler.lib.check.models import Check_Report_AWS, Check_Report_GCP
from prowler.lib.logger import logger
from prowler.lib.outputs.models import (
get_check_compliance,
@@ -18,13 +16,18 @@ from prowler.lib.outputs.models import (
unroll_tags,
)
from prowler.lib.utils.utils import open_file
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
from prowler.providers.azure.lib.audit_info.models import Azure_Audit_Info
from prowler.providers.gcp.lib.audit_info.models import GCP_Audit_Info
def add_html_header(file_descriptor, audit_info):
try:
if not audit_info.profile:
audit_info.profile = "ENV"
if isinstance(audit_info.audited_regions, list):
audited_regions = " ".join(audit_info.audited_regions)
elif not audit_info.audited_regions:
audited_regions = "All Regions"
else:
audited_regions = audit_info.audited_regions
file_descriptor.write(
"""
<!DOCTYPE html>
@@ -111,9 +114,51 @@ def add_html_header(file_descriptor, audit_info):
</li>
</ul>
</div>
</div> """
+ get_assessment_summary(audit_info)
</div>
<div class="col-md-2">
<div class="card">
<div class="card-header">
AWS Assessment Summary
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>AWS Account:</b> """
+ audit_info.audited_account
+ """
</li>
<li class="list-group-item">
<b>AWS-CLI Profile:</b> """
+ audit_info.profile
+ """
</li>
<li class="list-group-item">
<b>Audited Regions:</b> """
+ audited_regions
+ """
</li>
</ul>
</div>
</div>
<div class="col-md-4">
<div class="card">
<div class="card-header">
AWS Credentials
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>User Id:</b> """
+ audit_info.audited_user_id
+ """
</li>
<li class="list-group-item">
<b>Caller Identity ARN:</b>
"""
+ audit_info.audited_identity_arn
+ """
</li>
</ul>
</div>
</div>
<div class="col-md-2">
<div class="card">
<div class="card-header">
@@ -160,44 +205,37 @@ def add_html_header(file_descriptor, audit_info):
"""
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
sys.exit(1)
def fill_html(file_descriptor, finding, output_options):
try:
row_class = "p-3 mb-2 bg-success-custom"
if finding.status == "INFO":
row_class = "table-info"
elif finding.status == "FAIL":
row_class = "table-danger"
elif finding.status == "WARNING":
row_class = "table-warning"
file_descriptor.write(
f"""
<tr class="{row_class}">
<td>{finding.status}</td>
<td>{finding.check_metadata.Severity}</td>
<td>{finding.check_metadata.ServiceName}</td>
<td>{finding.location if isinstance(finding, Check_Report_GCP) else finding.region if isinstance(finding, Check_Report_AWS) else ""}</td>
<td>{finding.check_metadata.CheckID.replace("_", "<wbr>_")}</td>
<td>{finding.check_metadata.CheckTitle}</td>
<td>{finding.resource_id.replace("<", "&lt;").replace(">", "&gt;").replace("_", "<wbr>_")}</td>
<td>{parse_html_string(unroll_tags(finding.resource_tags))}</td>
<td>{finding.status_extended.replace("<", "&lt;").replace(">", "&gt;").replace("_", "<wbr>_")}</td>
<td><p class="show-read-more">{finding.check_metadata.Risk}</p></td>
<td><p class="show-read-more">{finding.check_metadata.Remediation.Recommendation.Text}</p> <a class="read-more" href="{finding.check_metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
<td><p class="show-read-more">{parse_html_string(unroll_dict(get_check_compliance(finding, finding.check_metadata.Provider, output_options)))}</p></td>
</tr>
"""
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
row_class = "p-3 mb-2 bg-success-custom"
if finding.status == "INFO":
row_class = "table-info"
elif finding.status == "FAIL":
row_class = "table-danger"
elif finding.status == "WARNING":
row_class = "table-warning"
file_descriptor.write(
f"""
<tr class="{row_class}">
<td>{finding.status}</td>
<td>{finding.check_metadata.Severity}</td>
<td>{finding.check_metadata.ServiceName}</td>
<td>{finding.region}</td>
<td>{finding.check_metadata.CheckID.replace("_", "<wbr>_")}</td>
<td>{finding.check_metadata.CheckTitle}</td>
<td>{finding.resource_id.replace("<", "&lt;").replace(">", "&gt;").replace("_", "<wbr>_")}</td>
<td>{parse_html_string(unroll_tags(finding.resource_tags))}</td>
<td>{finding.status_extended.replace("<", "&lt;").replace(">", "&gt;").replace("_", "<wbr>_")}</td>
<td><p class="show-read-more">{finding.check_metadata.Risk}</p></td>
<td><p class="show-read-more">{finding.check_metadata.Remediation.Recommendation.Text}</p> <a class="read-more" href="{finding.check_metadata.Remediation.Recommendation.Url}"><i class="fas fa-external-link-alt"></i></a></td>
<td><p class="show-read-more">{parse_html_string(unroll_dict(get_check_compliance(finding, finding.check_metadata.Provider, output_options)))}</p></td>
</tr>
"""
)
def fill_html_overview_statistics(stats, output_filename, output_directory):
@@ -333,207 +371,3 @@ def add_html_footer(output_filename, output_directory):
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
def get_aws_html_assessment_summary(audit_info):
try:
if isinstance(audit_info, AWS_Audit_Info):
if not audit_info.profile:
audit_info.profile = "ENV"
if isinstance(audit_info.audited_regions, list):
audited_regions = " ".join(audit_info.audited_regions)
elif not audit_info.audited_regions:
audited_regions = "All Regions"
else:
audited_regions = audit_info.audited_regions
return (
"""
<div class="col-md-2">
<div class="card">
<div class="card-header">
AWS Assessment Summary
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>AWS Account:</b> """
+ audit_info.audited_account
+ """
</li>
<li class="list-group-item">
<b>AWS-CLI Profile:</b> """
+ audit_info.profile
+ """
</li>
<li class="list-group-item">
<b>Audited Regions:</b> """
+ audited_regions
+ """
</li>
</ul>
</div>
</div>
<div class="col-md-4">
<div class="card">
<div class="card-header">
AWS Credentials
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>User Id:</b> """
+ audit_info.audited_user_id
+ """
</li>
<li class="list-group-item">
<b>Caller Identity ARN:</b> """
+ audit_info.audited_identity_arn
+ """
</li>
</ul>
</div>
</div>
"""
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
def get_azure_html_assessment_summary(audit_info):
try:
if isinstance(audit_info, Azure_Audit_Info):
printed_subscriptions = []
for key, value in audit_info.identity.subscriptions.items():
intermediate = key + " : " + value
printed_subscriptions.append(intermediate)
return (
"""
<div class="col-md-2">
<div class="card">
<div class="card-header">
Azure Assessment Summary
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>Azure Tenant IDs:</b> """
+ " ".join(audit_info.identity.tenant_ids)
+ """
</li>
<li class="list-group-item">
<b>Azure Tenant Domain:</b> """
+ audit_info.identity.domain
+ """
</li>
<li class="list-group-item">
<b>Azure Subscriptions:</b> """
+ " ".join(printed_subscriptions)
+ """
</li>
</ul>
</div>
</div>
<div class="col-md-4">
<div class="card">
<div class="card-header">
Azure Credentials
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>Azure Identity Type:</b> """
+ audit_info.identity.identity_type
+ """
</li>
<li class="list-group-item">
<b>Azure Identity ID:</b> """
+ audit_info.identity.identity_id
+ """
</li>
</ul>
</div>
</div>
"""
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
def get_gcp_html_assessment_summary(audit_info):
try:
if isinstance(audit_info, GCP_Audit_Info):
try:
getattr(audit_info.credentials, "_service_account_email")
profile = (
audit_info.credentials._service_account_email
if audit_info.credentials._service_account_email is not None
else "default"
)
except AttributeError:
profile = "default"
return (
"""
<div class="col-md-2">
<div class="card">
<div class="card-header">
GCP Assessment Summary
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>GCP Project ID:</b> """
+ audit_info.project_id
+ """
</li>
</ul>
</div>
</div>
<div class="col-md-4">
<div class="card">
<div class="card-header">
GCP Credentials
</div>
<ul class="list-group list-group-flush">
<li class="list-group-item">
<b>GCP Account:</b> """
+ profile
+ """
</li>
</ul>
</div>
</div>
"""
)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
def get_assessment_summary(audit_info):
"""
get_assessment_summary gets the HTML assessment summary for the provider
"""
try:
# This is based in the Provider_Audit_Info class
# It is not pretty but useful
# AWS_Audit_Info --> aws
# GCP_Audit_Info --> gcp
# Azure_Audit_Info --> azure
provider = audit_info.__class__.__name__.split("_")[0].lower()
# Dynamically get the Provider quick inventory handler
provider_html_assessment_summary_function = (
f"get_{provider}_html_assessment_summary"
)
return getattr(
importlib.import_module(__name__), provider_html_assessment_summary_function
)(audit_info)
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
sys.exit(1)

View File

@@ -12,27 +12,20 @@ from prowler.providers.aws.lib.audit_info.models import AWS_Organizations_Info
def get_check_compliance(finding, provider, output_options):
try:
check_compliance = {}
# We have to retrieve all the check's compliance requirements
if finding.check_metadata.CheckID in output_options.bulk_checks_metadata:
for compliance in output_options.bulk_checks_metadata[
finding.check_metadata.CheckID
].Compliance:
compliance_fw = compliance.Framework
if compliance.Version:
compliance_fw = f"{compliance_fw}-{compliance.Version}"
if compliance.Provider == provider.upper():
if compliance_fw not in check_compliance:
check_compliance[compliance_fw] = []
for requirement in compliance.Requirements:
check_compliance[compliance_fw].append(requirement.Id)
return check_compliance
except Exception as error:
logger.critical(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
check_compliance = {}
# We have to retrieve all the check's compliance requirements
for compliance in output_options.bulk_checks_metadata[
finding.check_metadata.CheckID
].Compliance:
compliance_fw = compliance.Framework
if compliance.Version:
compliance_fw = f"{compliance_fw}-{compliance.Version}"
if compliance.Provider == provider.upper():
if compliance_fw not in check_compliance:
check_compliance[compliance_fw] = []
for requirement in compliance.Requirements:
check_compliance[compliance_fw].append(requirement.Id)
return check_compliance
def generate_provider_output_csv(
@@ -42,6 +35,8 @@ def generate_provider_output_csv(
set_provider_output_options configures automatically the outputs based on the selected provider and returns the Provider_Output_Options object.
"""
try:
finding_output_model = f"{provider.capitalize()}_Check_Output_{mode.upper()}"
output_model = getattr(importlib.import_module(__name__), finding_output_model)
# Dynamically load the Provider_Output_Options class
finding_output_model = f"{provider.capitalize()}_Check_Output_{mode.upper()}"
output_model = getattr(importlib.import_module(__name__), finding_output_model)
@@ -61,19 +56,6 @@ def generate_provider_output_csv(
)
finding_output = output_model(**data)
if provider == "gcp":
data["resource_id"] = finding.resource_id
data["resource_name"] = finding.resource_name
data["project_id"] = finding.project_id
data["location"] = finding.location
data[
"finding_unique_id"
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.project_id}-{finding.resource_id}"
data["compliance"] = unroll_dict(
get_check_compliance(finding, provider, output_options)
)
finding_output = output_model(**data)
if provider == "aws":
data["profile"] = audit_info.profile
data["account_id"] = audit_info.audited_account
@@ -180,7 +162,7 @@ def unroll_list(listed_items: list):
def unroll_tags(tags: list):
unrolled_items = ""
separator = "|"
if tags and tags != [{}] and tags != [None]:
if tags:
for item in tags:
# Check if there are tags in list
if type(item) == dict:
@@ -323,17 +305,6 @@ class Azure_Check_Output_CSV(Check_Output_CSV):
resource_name: str = ""
class Gcp_Check_Output_CSV(Check_Output_CSV):
"""
Gcp_Check_Output_CSV generates a finding's output in CSV format for the GCP provider.
"""
project_id: str = ""
location: str = ""
resource_id: str = ""
resource_name: str = ""
def generate_provider_output_json(
provider: str, finding, audit_info, mode: str, output_options
):
@@ -362,16 +333,6 @@ def generate_provider_output_json(
finding, provider, output_options
)
if provider == "gcp":
finding_output.ProjectId = audit_info.project_id
finding_output.Location = finding.location
finding_output.ResourceId = finding.resource_id
finding_output.ResourceName = finding.resource_name
finding_output.FindingUniqueId = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.project_id}-{finding.resource_id}"
finding_output.Compliance = get_check_compliance(
finding, provider, output_options
)
if provider == "aws":
finding_output.Profile = audit_info.profile
finding_output.AccountId = audit_info.audited_account
@@ -460,20 +421,6 @@ class Azure_Check_Output_JSON(Check_Output_JSON):
super().__init__(**metadata)
class Gcp_Check_Output_JSON(Check_Output_JSON):
"""
Gcp_Check_Output_JSON generates a finding's output in JSON format for the AWS provider.
"""
ProjectId: str = ""
ResourceId: str = ""
ResourceName: str = ""
Location: str = ""
def __init__(self, **metadata):
super().__init__(**metadata)
class Check_Output_CSV_ENS_RD2022(BaseModel):
"""
Check_Output_CSV_ENS_RD2022 generates a finding's output in CSV ENS RD2022 format.

View File

@@ -20,7 +20,6 @@ from prowler.lib.outputs.models import (
Check_Output_JSON_ASFF,
generate_provider_output_csv,
generate_provider_output_json,
unroll_tags,
)
from prowler.providers.aws.lib.allowlist.allowlist import is_allowlisted
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
@@ -33,8 +32,6 @@ def stdout_report(finding, color, verbose, is_quiet):
details = finding.region
if finding.check_metadata.Provider == "azure":
details = finding.check_metadata.ServiceName
if finding.check_metadata.Provider == "gcp":
details = finding.location
if verbose and not (is_quiet and finding.status != "FAIL"):
print(
@@ -73,7 +70,6 @@ def report(check_findings, output_options, audit_info):
finding.check_metadata.CheckID,
finding.region,
finding.resource_id,
unroll_tags(finding.resource_tags),
):
finding.status = "WARNING"
# Print findings by stdout
@@ -104,6 +100,12 @@ def report(check_findings, output_options, audit_info):
file_descriptors,
)
if "html" in file_descriptors:
fill_html(
file_descriptors["html"], finding, output_options
)
file_descriptors["html"].write("")
if "json-asff" in file_descriptors:
finding_output = Check_Output_JSON_ASFF()
fill_json_asff(
@@ -131,10 +133,6 @@ def report(check_findings, output_options, audit_info):
)
# Common outputs
if "html" in file_descriptors:
fill_html(file_descriptors["html"], finding, output_options)
file_descriptors["html"].write("")
if "csv" in file_descriptors:
csv_writer, finding_output = generate_provider_output_csv(
finding.check_metadata.Provider,
@@ -210,8 +208,6 @@ def send_to_s3_bucket(
filename = f"{output_filename}{json_asff_file_suffix}"
elif output_mode == "html":
filename = f"{output_filename}{html_file_suffix}"
else: # Compliance output mode
filename = f"{output_filename}_{output_mode}{csv_file_suffix}"
logger.info(f"Sending outputs to S3 bucket {output_bucket}")
bucket_remote_dir = output_directory
while "prowler/" in bucket_remote_dir: # Check if it is not a custom directory

View File

@@ -1,135 +0,0 @@
import sys
from slack_sdk import WebClient
from prowler.config.config import aws_logo, azure_logo, gcp_logo, square_logo_img
from prowler.lib.logger import logger
def send_slack_message(token, channel, stats, provider, audit_info):
try:
client = WebClient(token=token)
identity, logo = create_message_identity(provider, audit_info)
response = client.chat_postMessage(
username="Prowler",
icon_url=square_logo_img,
channel="#" + channel,
blocks=create_message_blocks(identity, logo, stats),
)
return response
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def create_message_identity(provider, audit_info):
try:
identity = ""
logo = aws_logo
if provider == "aws":
identity = f"AWS Account *{audit_info.audited_account}*"
elif provider == "gcp":
identity = f"GCP Project *{audit_info.project_id}*"
logo = gcp_logo
elif provider == "azure":
printed_subscriptions = []
for key, value in audit_info.identity.subscriptions.items():
intermediate = "- *" + key + ": " + value + "*\n"
printed_subscriptions.append(intermediate)
identity = f"Azure Subscriptions:\n{''.join(printed_subscriptions)}"
logo = azure_logo
return identity, logo
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def create_message_blocks(identity, logo, stats):
try:
blocks = [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"Hey there 👋 \n I'm *Prowler*, _the handy cloud security tool_ :cloud::key:\n\n I have just finished the security assessment on your {identity} with a total of *{stats['findings_count']}* findings.",
},
"accessory": {
"type": "image",
"image_url": logo,
"alt_text": "Provider Logo",
},
},
{"type": "divider"},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"\n:white_check_mark: *{stats['total_pass']} Passed findings* ({round(stats['total_pass']/stats['findings_count']*100,2)}%)\n",
},
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"\n:x: *{stats['total_fail']} Failed findings* ({round(stats['total_fail']/stats['findings_count']*100,2)}%)\n ",
},
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"\n:bar_chart: *{stats['resources_count']} Scanned Resources*\n",
},
},
{"type": "divider"},
{
"type": "context",
"elements": [
{
"type": "mrkdwn",
"text": f"Used parameters: `prowler {' '.join(sys.argv[1:])} `",
}
],
},
{"type": "divider"},
{
"type": "section",
"text": {"type": "mrkdwn", "text": "Join our Slack Community!"},
"accessory": {
"type": "button",
"text": {"type": "plain_text", "text": "Prowler :slack:"},
"url": "https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog",
},
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "Feel free to contact us in our repo",
},
"accessory": {
"type": "button",
"text": {"type": "plain_text", "text": "Prowler :github:"},
"url": "https://github.com/prowler-cloud/prowler",
},
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "See all the things you can do with ProwlerPro",
},
"accessory": {
"type": "button",
"text": {"type": "plain_text", "text": "Prowler Pro"},
"url": "https://prowler.pro",
},
},
]
return blocks
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)

View File

@@ -20,18 +20,12 @@ def display_summary_table(
entity_type = "Account"
audited_entities = audit_info.audited_account
elif provider == "azure":
if (
audit_info.identity.domain
!= "Unknown tenant domain (missing AAD permissions)"
):
if audit_info.identity.domain:
entity_type = "Tenant Domain"
audited_entities = audit_info.identity.domain
else:
entity_type = "Tenant ID/s"
audited_entities = " ".join(audit_info.identity.tenant_ids)
elif provider == "gcp":
entity_type = "Project ID"
audited_entities = audit_info.project_id
if findings:
current = {
@@ -59,6 +53,7 @@ def display_summary_table(
current["Service"] != finding.check_metadata.ServiceName
and current["Service"]
):
add_service_to_table(findings_table, current)
current["Total"] = current["Critical"] = current["High"] = current[

View File

@@ -85,7 +85,7 @@ def assume_role(session: session.Session, assumed_role_info: AWS_Assume_Role) ->
if assumed_role_info.external_id:
assumed_credentials = sts_client.assume_role(
RoleArn=assumed_role_info.role_arn,
RoleSessionName="ProwlerAsessmentSession",
RoleSessionName="ProwlerProAsessmentSession",
DurationSeconds=assumed_role_info.session_duration,
ExternalId=assumed_role_info.external_id,
)

File diff suppressed because it is too large Load Diff

View File

@@ -3,20 +3,12 @@ import sys
import yaml
from boto3.dynamodb.conditions import Attr
from schema import Optional, Schema
from schema import Schema
from prowler.lib.logger import logger
allowlist_schema = Schema(
{
"Accounts": {
str: {
"Checks": {
str: {"Regions": list, "Resources": list, Optional("Tags"): list}
}
}
}
}
{"Accounts": {str: {"Checks": {str: {"Regions": list, "Resources": list}}}}}
)
@@ -69,25 +61,14 @@ def parse_allowlist_file(audit_info, allowlist_file):
dynamodb_items.update(response["Items"])
for item in dynamodb_items:
# Create allowlist for every item
if "Tags" in item:
allowlist["Accounts"][item["Accounts"]] = {
"Checks": {
item["Checks"]: {
"Regions": item["Regions"],
"Resources": item["Resources"],
"Tags": item["Tags"],
}
}
}
else:
allowlist["Accounts"][item["Accounts"]] = {
"Checks": {
item["Checks"]: {
"Regions": item["Regions"],
"Resources": item["Resources"],
}
allowlist["Accounts"][item["Accounts"]] = {
"Checks": {
item["Checks"]: {
"Regions": item["Regions"],
"Resources": item["Resources"],
}
}
}
else:
with open(allowlist_file) as f:
allowlist = yaml.safe_load(f)["Allowlist"]
@@ -106,18 +87,18 @@ def parse_allowlist_file(audit_info, allowlist_file):
sys.exit(1)
def is_allowlisted(allowlist, audited_account, check, region, resource, tags):
def is_allowlisted(allowlist, audited_account, check, region, resource):
try:
if audited_account in allowlist["Accounts"]:
if is_allowlisted_in_check(
allowlist, audited_account, check, region, resource, tags
allowlist, audited_account, check, region, resource
):
return True
# If there is a *, it affects to all accounts
if "*" in allowlist["Accounts"]:
audited_account = "*"
if is_allowlisted_in_check(
allowlist, audited_account, check, region, resource, tags
allowlist, audited_account, check, region, resource
):
return True
return False
@@ -128,35 +109,21 @@ def is_allowlisted(allowlist, audited_account, check, region, resource, tags):
sys.exit(1)
def is_allowlisted_in_check(allowlist, audited_account, check, region, resource, tags):
def is_allowlisted_in_check(allowlist, audited_account, check, region, resource):
try:
for allowlisted_check in allowlist["Accounts"][audited_account][
"Checks"
].keys():
# If there is a *, it affects to all checks
if "*" == allowlisted_check:
check = "*"
if is_allowlisted_in_region(
allowlist, audited_account, check, region, resource, tags
):
return True
# Check if there is the specific check
elif check == allowlisted_check:
if is_allowlisted_in_region(
allowlist, audited_account, check, region, resource, tags
):
return True
# Check if check is a regex
elif re.search(allowlisted_check, check):
if is_allowlisted_in_region(
allowlist,
audited_account,
allowlisted_check,
region,
resource,
tags,
):
return True
# If there is a *, it affects to all checks
if "*" in allowlist["Accounts"][audited_account]["Checks"]:
check = "*"
if is_allowlisted_in_region(
allowlist, audited_account, check, region, resource
):
return True
# Check if there is the specific check
if check in allowlist["Accounts"][audited_account]["Checks"]:
if is_allowlisted_in_region(
allowlist, audited_account, check, region, resource
):
return True
return False
except Exception as error:
logger.critical(
@@ -165,67 +132,30 @@ def is_allowlisted_in_check(allowlist, audited_account, check, region, resource,
sys.exit(1)
def is_allowlisted_in_region(allowlist, audited_account, check, region, resource, tags):
def is_allowlisted_in_region(allowlist, audited_account, check, region, resource):
try:
# If there is a *, it affects to all regions
if "*" in allowlist["Accounts"][audited_account]["Checks"][check]["Regions"]:
for elem in allowlist["Accounts"][audited_account]["Checks"][check][
"Resources"
]:
if is_allowlisted_in_tags(
allowlist["Accounts"][audited_account]["Checks"][check],
elem,
resource,
tags,
):
# Check if it is an *
if elem == "*":
elem = ".*"
if re.search(elem, resource):
return True
# Check if there is the specific region
if region in allowlist["Accounts"][audited_account]["Checks"][check]["Regions"]:
for elem in allowlist["Accounts"][audited_account]["Checks"][check][
"Resources"
]:
if is_allowlisted_in_tags(
allowlist["Accounts"][audited_account]["Checks"][check],
elem,
resource,
tags,
):
# Check if it is an *
if elem == "*":
elem = ".*"
if re.search(elem, resource):
return True
except Exception as error:
logger.critical(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
sys.exit(1)
def is_allowlisted_in_tags(check_allowlist, elem, resource, tags):
try:
# Check if it is an *
if elem == "*":
elem = ".*"
# Check if there are allowlisted tags
if "Tags" in check_allowlist:
# Check if there are resource tags
if not tags or not re.search(elem, resource):
return False
all_allowed_tags_in_resource_tags = True
for allowed_tag in check_allowlist["Tags"]:
found_allowed_tag = False
for resource_tag in tags:
if re.search(allowed_tag, resource_tag):
found_allowed_tag = True
break
if not found_allowed_tag:
all_allowed_tags_in_resource_tags = False
return all_allowed_tags_in_resource_tags
else:
if re.search(elem, resource):
return True
except Exception as error:
logger.critical(
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
)
sys.exit(1)

View File

@@ -166,7 +166,7 @@ def create_inventory_table(resources: list, resources_in_region: dict) -> dict:
elif "documentation" in split_parts and "parts" in split_parts:
resource_type = "restapis-documentation-parts"
else:
resource_type = resource["arn"].split(":")[5].split("/")[1]
resource_type = resource.split(":")[5].split("/")[1]
else:
resource_type = resource["arn"].split(":")[5].split("/")[0]
if service not in resources_type:

View File

@@ -1,7 +1,6 @@
import threading
from typing import Optional
from botocore.exceptions import ClientError
from pydantic import BaseModel
from prowler.lib.logger import logger
@@ -80,21 +79,10 @@ class AccessAnalyzer:
if analyzer.status == "ACTIVE":
regional_client = self.regional_clients[analyzer.region]
for finding in analyzer.findings:
try:
finding_information = regional_client.get_finding(
analyzerArn=analyzer.arn, id=finding.id
)
finding.status = finding_information["finding"]["status"]
except ClientError as error:
if (
error.response["Error"]["Code"]
== "ResourceNotFoundException"
):
logger.warning(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
finding.status = ""
continue
finding_information = regional_client.get_finding(
analyzerArn=analyzer.arn, id=finding.id
)
finding.status = finding_information["finding"]["status"]
except Exception as error:
logger.error(

View File

@@ -1,20 +1,12 @@
from prowler.providers.aws.aws_provider import generate_regional_clients
################## Account
class Account:
def __init__(self, audit_info):
self.service = "account"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.regional_clients = generate_regional_clients(self.service, audit_info)
# If the region is not set in the audit profile,
# we pick the first region from the regional clients list
self.region = (
audit_info.profile_region
if audit_info.profile_region
else list(self.regional_clients.keys())[0]
)
self.region = audit_info.profile_region
def __get_session__(self):
return self.session

View File

@@ -36,8 +36,8 @@ class ApiGatewayV2:
def __get_apis__(self, regional_client):
logger.info("APIGatewayv2 - Getting APIs...")
try:
get_apis_paginator = regional_client.get_paginator("get_apis")
for page in get_apis_paginator.paginate():
get_rest_apis_paginator = regional_client.get_paginator("get_apis")
for page in get_rest_apis_paginator.paginate():
for apigw in page["Items"]:
if not self.audit_resources or (
is_resource_filtered(apigw["ApiId"], self.audit_resources)

View File

@@ -7,7 +7,7 @@
],
"ServiceName": "autoscaling",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:autoscaling:region:account-id:autoScalingGroupName/resource-name",
"ResourceIdTemplate": "arn:partition:access-analyzer:region:account-id:analyzer/resource-id",
"Severity": "critical",
"ResourceType": "Other",
"Description": "Find secrets in EC2 Auto Scaling Launch Configuration",

View File

@@ -1,30 +0,0 @@
{
"Provider": "aws",
"CheckID": "autoscaling_group_multiple_az",
"CheckTitle": "EC2 Auto Scaling Group should use multiple Availability Zones",
"CheckType": [],
"ServiceName": "autoscaling",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:autoscaling:region:account-id:autoScalingGroupName/resource-name",
"Severity": "medium",
"ResourceType": "Other",
"Description": "EC2 Auto Scaling Group should use multiple Availability Zones",
"Risk": "In case of a failure in a single Availability Zone, the Auto Scaling Group will not be able to launch new instances to replace the failed ones.",
"RelatedUrl": "https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-add-availability-zone.html",
"Remediation": {
"Code": {
"CLI": "aws autoscaling update-auto-scaling-group",
"NativeIaC": "",
"Other": "https://www.trendmicro.com/cloudoneconformity/knowledge-base/aws/AutoScaling/multiple-availability-zones.html",
"Terraform": ""
},
"Recommendation": {
"Text": "Configure multiple Availability Zones for EC2 Auto Scaling Group",
"Url": "https://docs.aws.amazon.com/autoscaling/ec2/userguide/as-add-availability-zone.html"
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -1,28 +0,0 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.autoscaling.autoscaling_client import (
autoscaling_client,
)
class autoscaling_group_multiple_az(Check):
def execute(self):
findings = []
for group in autoscaling_client.groups:
report = Check_Report_AWS(self.metadata())
report.region = group.region
report.resource_id = group.name
report.resource_arn = group.arn
report.resource_tags = group.tags
report.status = "FAIL"
report.status_extended = (
f"Autoscaling group {group.name} has only one availability zones."
)
if len(group.availability_zones) > 1:
report.status = "PASS"
report.status_extended = (
f"Autoscaling group {group.name} has multiple availability zones."
)
findings.append(report)
return findings

View File

@@ -17,8 +17,6 @@ class AutoScaling:
self.regional_clients = generate_regional_clients(self.service, audit_info)
self.launch_configurations = []
self.__threading_call__(self.__describe_launch_configurations__)
self.groups = []
self.__threading_call__(self.__describe_auto_scaling_groups__)
def __get_session__(self):
return self.session
@@ -61,35 +59,6 @@ class AutoScaling:
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __describe_auto_scaling_groups__(self, regional_client):
logger.info("AutoScaling - Describing AutoScaling Groups...")
try:
describe_auto_scaling_groups_paginator = regional_client.get_paginator(
"describe_auto_scaling_groups"
)
for page in describe_auto_scaling_groups_paginator.paginate():
for group in page["AutoScalingGroups"]:
if not self.audit_resources or (
is_resource_filtered(
group["AutoScalingGroupARN"],
self.audit_resources,
)
):
self.groups.append(
Group(
arn=group.get("AutoScalingGroupARN"),
name=group.get("AutoScalingGroupName"),
region=regional_client.region,
availability_zones=group.get("AvailabilityZones"),
tags=group.get("Tags"),
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
class LaunchConfiguration(BaseModel):
arn: str
@@ -97,11 +66,3 @@ class LaunchConfiguration(BaseModel):
user_data: str
image_id: str
region: str
class Group(BaseModel):
arn: str
name: str
region: str
availability_zones: list
tags: list = []

View File

@@ -1,4 +0,0 @@
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.backup.backup_service import Backup
backup_client = Backup(current_audit_info)

View File

@@ -1,34 +0,0 @@
{
"Provider": "aws",
"CheckID": "backup_plans_exist",
"CheckTitle": "Ensure that there is at least one AWS Backup plan",
"CheckType": [
"Recover",
"Resilience",
"Backup"
],
"ServiceName": "backup",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:backup-plan:backup-plan-id",
"Severity": "low",
"ResourceType": "AwsBackupBackupPlan",
"Description": "This check ensures that there is at least one backup plan in place.",
"Risk": "Without a backup plan, an organization may be at risk of losing important data due to accidental deletion, system failures, or natural disasters. This can result in significant financial and reputational damage for the organization.",
"RelatedUrl": "",
"Remediation": {
"Code": {
"CLI": "aws backup create-backup-plan --backup-plan <backup_plan_name> --backup-plan-rule <backup_rule_name>",
"NativeIaC": "",
"Other": "",
"Terraform": ""
},
"Recommendation": {
"Text": "Use AWS Backup to create backup plans for your critical data and services.",
"Url": ""
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -1,24 +0,0 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.backup.backup_client import backup_client
class backup_plans_exist(Check):
def execute(self):
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = "No Backup Plan Exist"
report.resource_arn = ""
report.resource_id = "Backups"
report.region = backup_client.region
if backup_client.backup_plans:
report.status = "PASS"
report.status_extended = (
f"At least one backup plan exists: {backup_client.backup_plans[0].name}"
)
report.resource_arn = backup_client.backup_plans[0].arn
report.resource_id = backup_client.backup_plans[0].name
report.region = backup_client.backup_plans[0].region
findings.append(report)
return findings

View File

@@ -1,34 +0,0 @@
{
"Provider": "aws",
"CheckID": "backup_reportplans_exist",
"CheckTitle": "Ensure that there is at least one AWS Backup report plan",
"CheckType": [
"Recover",
"Resilience",
"Backup"
],
"ServiceName": "backup",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:backup-report-plan:backup-report-plan-id",
"Severity": "low",
"ResourceType": "Other",
"Description": "This check ensures that there is at least one backup report plan in place.",
"Risk": "Without a backup report plan, an organization may lack visibility into the success or failure of backup operations.",
"RelatedUrl": "",
"Remediation": {
"Code": {
"CLI": "aws backup create-report-plan --report-plan-name <report-plan-name> --report-delivery-channel <value> --report-setting <value>",
"NativeIaC": "",
"Other": "",
"Terraform": ""
},
"Recommendation": {
"Text": "Use AWS Backup to create backup report plans that provide visibility into the success or failure of backup operations.",
"Url": ""
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -1,24 +0,0 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.backup.backup_client import backup_client
class backup_reportplans_exist(Check):
def execute(self):
findings = []
# We only check report plans if backup plans exist, reducing noise
if backup_client.backup_plans:
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = "No Backup Report Plan Exist"
report.resource_arn = ""
report.resource_id = "Backups"
report.region = backup_client.region
if backup_client.backup_report_plans:
report.status = "PASS"
report.status_extended = f"At least one backup report plan exists: { backup_client.backup_report_plans[0].name}"
report.resource_arn = backup_client.backup_report_plans[0].arn
report.resource_id = backup_client.backup_report_plans[0].name
report.region = backup_client.backup_report_plans[0].region
findings.append(report)
return findings

View File

@@ -1,175 +0,0 @@
import threading
from datetime import datetime
from pydantic import BaseModel
from prowler.lib.logger import logger
from prowler.lib.scan_filters.scan_filters import is_resource_filtered
from prowler.providers.aws.aws_provider import generate_regional_clients
################## Backup
class Backup:
def __init__(self, audit_info):
self.service = "backup"
self.session = audit_info.audit_session
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.regional_clients = generate_regional_clients(self.service, audit_info)
# If the region is not set in the audit profile,
# we pick the first region from the regional clients list
self.region = (
audit_info.profile_region
if audit_info.profile_region
else list(self.regional_clients.keys())[0]
)
self.backup_vaults = []
self.__threading_call__(self.__list_backup_vaults__)
self.backup_plans = []
self.__threading_call__(self.__list_backup_plans__)
self.backup_report_plans = []
self.__threading_call__(self.__list_backup_report_plans__)
def __threading_call__(self, call):
threads = []
for regional_client in self.regional_clients.values():
threads.append(threading.Thread(target=call, args=(regional_client,)))
for t in threads:
t.start()
for t in threads:
t.join()
def __list_backup_vaults__(self, regional_client):
logger.info("Backup - Listing Backup Vaults...")
try:
list_backup_vaults_paginator = regional_client.get_paginator(
"list_backup_vaults"
)
for page in list_backup_vaults_paginator.paginate():
for configuration in page.get("BackupVaultList"):
if not self.audit_resources or (
is_resource_filtered(
configuration.get("BackupVaultArn"),
self.audit_resources,
)
):
self.backup_vaults.append(
BackupVault(
arn=configuration.get("BackupVaultArn"),
name=configuration.get("BackupVaultName"),
region=regional_client.region,
encryption=configuration.get("EncryptionKeyArn"),
recovery_points=configuration.get(
"NumberOfRecoveryPoints"
),
locked=configuration.get("Locked"),
min_retention_days=configuration.get(
"MinRetentionDays"
),
max_retention_days=configuration.get(
"MaxRetentionDays"
),
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __list_backup_plans__(self, regional_client):
logger.info("Backup - Listing Backup Plans...")
try:
list_backup_plans_paginator = regional_client.get_paginator(
"list_backup_plans"
)
for page in list_backup_plans_paginator.paginate():
for configuration in page.get("BackupPlansList"):
if not self.audit_resources or (
is_resource_filtered(
configuration.get("BackupPlanArn"),
self.audit_resources,
)
):
self.backup_plans.append(
BackupPlan(
arn=configuration.get("BackupPlanArn"),
id=configuration.get("BackupPlanId"),
region=regional_client.region,
name=configuration.get("BackupPlanName"),
version_id=configuration.get("VersionId"),
last_execution_date=configuration.get(
"LastExecutionDate"
),
advanced_settings=configuration.get(
"AdvancedBackupSettings", []
),
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __list_backup_report_plans__(self, regional_client):
logger.info("Backup - Listing Backup Report Plans...")
try:
list_backup_report_plans = regional_client.list_report_plans()[
"ReportPlans"
]
for backup_report_plan in list_backup_report_plans:
if not self.audit_resources or (
is_resource_filtered(
backup_report_plan.get("ReportPlanArn"),
self.audit_resources,
)
):
self.backup_report_plans.append(
BackupReportPlan(
arn=backup_report_plan.get("ReportPlanArn"),
region=regional_client.region,
name=backup_report_plan.get("ReportPlanName"),
last_attempted_execution_date=backup_report_plan.get(
"LastAttemptedExecutionTime"
),
last_successful_execution_date=backup_report_plan.get(
"LastSuccessfulExecutionTime"
),
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
class BackupVault(BaseModel):
arn: str
name: str
region: str
encryption: str
recovery_points: int
locked: bool
min_retention_days: int = None
max_retention_days: int = None
class BackupPlan(BaseModel):
arn: str
id: str
region: str
name: str
version_id: str
last_execution_date: datetime
advanced_settings: list
class BackupReportPlan(BaseModel):
arn: str
region: str
name: str
last_attempted_execution_date: datetime
last_successful_execution_date: datetime

View File

@@ -1,35 +0,0 @@
{
"Provider": "aws",
"CheckID": "backup_vaults_encrypted",
"CheckTitle": "Ensure that AWS Backup vaults are encrypted with AWS KMS",
"CheckType": [
"Recover",
"Resilience",
"Backup",
"Data Protection"
],
"ServiceName": "backup",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:backup-vault:backup-vault-id",
"Severity": "medium",
"ResourceType": "AwsBackupBackupVault",
"Description": "This check ensures that AWS Backup vaults are encrypted with AWS KMS.",
"Risk": "Without encryption using AWS KMS, an organization's backup data may be at risk of unauthorized access, which can lead to data breaches and other security incidents.",
"RelatedUrl": "",
"Remediation": {
"Code": {
"CLI": "aws backup update-backup-vault --backup-vault-name <backup_vault_name> --encryption-key-arn <kms_key_arn>",
"NativeIaC": "",
"Other": "",
"Terraform": ""
},
"Recommendation": {
"Text": "Use AWS KMS to encrypt your AWS Backup vaults and backup data.",
"Url": ""
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -1,28 +0,0 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.backup.backup_client import backup_client
class backup_vaults_encrypted(Check):
def execute(self):
findings = []
for backup_vault in backup_client.backup_vaults:
# By default we assume that the result is fail
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = (
f"Backup Vault {backup_vault.name} is not encrypted"
)
report.resource_arn = backup_vault.arn
report.resource_id = backup_vault.name
report.region = backup_vault.region
# if it is encrypted we only change the status and the status extended
if backup_vault.encryption:
report.status = "PASS"
report.status_extended = (
f"Backup Vault {backup_vault.name} is encrypted"
)
# then we store the finding
findings.append(report)
return findings

View File

@@ -1,34 +0,0 @@
{
"Provider": "aws",
"CheckID": "backup_vaults_exist",
"CheckTitle": "Ensure AWS Backup vaults exist",
"CheckType": [
"Recover",
"Resilience",
"Backup"
],
"ServiceName": "backup",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:backup-vault:backup-vault-id",
"Severity": "low",
"ResourceType": "AwsBackupBackupVault",
"Description": "This check ensures that AWS Backup vaults exist to provide a secure and durable storage location for backup data.",
"Risk": "Without an AWS Backup vault, an organization's critical data may be at risk of being lost in the event of an accidental deletion, system failures, or natural disasters.",
"RelatedUrl": "",
"Remediation": {
"Code": {
"CLI": "aws backup create-backup-vault --backup-vault-name <backup_vault_name>",
"NativeIaC": "",
"Other": "",
"Terraform": ""
},
"Recommendation": {
"Text": "Use AWS Backup to create backup vaults for your critical data and services.",
"Url": ""
}
},
"Categories": [],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -1,22 +0,0 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.backup.backup_client import backup_client
class backup_vaults_exist(Check):
def execute(self):
findings = []
report = Check_Report_AWS(self.metadata())
report.status = "FAIL"
report.status_extended = "No Backup Vault Exist"
report.resource_arn = ""
report.resource_id = "Backups"
report.region = backup_client.region
if backup_client.backup_vaults:
report.status = "PASS"
report.status_extended = f"At least one backup vault exists: { backup_client.backup_vaults[0].name}"
report.resource_arn = backup_client.backup_vaults[0].arn
report.resource_id = backup_client.backup_vaults[0].name
report.region = backup_client.backup_vaults[0].region
findings.append(report)
return findings

View File

@@ -1,7 +1,6 @@
import threading
from typing import Optional
from botocore.client import ClientError
from pydantic import BaseModel
from prowler.lib.logger import logger
@@ -66,8 +65,8 @@ class CloudFormation:
def __describe_stack__(self):
"""Get Details for a CloudFormation Stack"""
logger.info("CloudFormation - Describing Stack to get specific details...")
for stack in self.stacks:
try:
try:
for stack in self.stacks:
stack_details = self.regional_clients[stack.region].describe_stacks(
StackName=stack.name
)
@@ -80,16 +79,10 @@ class CloudFormation:
stack.root_nested_stack = stack_details["Stacks"][0]["RootId"]
stack.is_nested_stack = True if stack.root_nested_stack != "" else False
except ClientError as error:
if error.response["Error"]["Code"] == "ValidationError":
logger.warning(
f"{stack.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
continue
except Exception as error:
logger.error(
f"{stack.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{stack.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
class Stack(BaseModel):

View File

@@ -16,8 +16,14 @@ class cloudfront_distributions_https_enabled(Check):
report.resource_arn = distribution.arn
report.resource_id = distribution.id
report.resource_tags = distribution.tags
if (
distribution.default_cache_config
and distribution.default_cache_config.viewer_protocol_policy
== ViewerProtocolPolicy.allow_all
):
report.status = "FAIL"
report.status_extended = f"CloudFront Distribution {distribution.id} viewers can use HTTP or HTTPS"
elif (
distribution.default_cache_config
and distribution.default_cache_config.viewer_protocol_policy
== ViewerProtocolPolicy.redirect_to_https
@@ -35,10 +41,6 @@ class cloudfront_distributions_https_enabled(Check):
report.status_extended = (
f"CloudFront Distribution {distribution.id} has HTTPS only"
)
else:
report.status = "FAIL"
report.status_extended = f"CloudFront Distribution {distribution.id} viewers can use HTTP or HTTPS"
findings.append(report)
return findings

View File

@@ -83,7 +83,7 @@ class CloudFront:
]["WebACLId"]
# Default Cache Config
default_cache_config = DefaultCacheConfigBehaviour(
default_chache_config = DefaultCacheConfigBehaviour(
realtime_log_config_arn=distribution_config["DistributionConfig"][
"DefaultCacheBehavior"
].get("RealtimeLogConfigArn"),
@@ -96,7 +96,7 @@ class CloudFront:
)
distributions[
distribution_id
].default_cache_config = default_cache_config
].default_cache_config = default_chache_config
except Exception as error:
logger.error(

View File

@@ -1,36 +0,0 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_bucket_requires_mfa_delete",
"CheckTitle": "Ensure the S3 bucket CloudTrail bucket requires MFA delete",
"CheckType": [
"Software and Configuration Checks",
"Industry and Regulatory Standards",
"CIS AWS Foundations Benchmark"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"Severity": "medium",
"ResourceType": "AwsCloudTrailTrail",
"Description": "Ensure the S3 bucket CloudTrail bucket requires MFA",
"Risk": "If the S3 bucket CloudTrail bucket does not require MFA, it can be deleted by an attacker.",
"RelatedUrl": "",
"Remediation": {
"Code": {
"CLI": "aws s3api put-bucket-versioning --bucket DOC-EXAMPLE-BUCKET1 --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa \"SERIAL 123456\"",
"NativeIaC": "",
"Other": "",
"Terraform": ""
},
"Recommendation": {
"Text": "Configure MFA Delete for the S3 bucket CloudTrail bucket",
"Url": "https://docs.aws.amazon.com/AmazonS3/latest/userguide/MultiFactorAuthenticationDelete.html"
}
},
"Categories": [
"forensics-ready"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -1,35 +0,0 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
from prowler.providers.aws.services.s3.s3_client import s3_client
class cloudtrail_bucket_requires_mfa_delete(Check):
def execute(self):
findings = []
for trail in cloudtrail_client.trails:
if trail.is_logging:
trail_bucket_is_in_account = False
trail_bucket = trail.s3_bucket
report = Check_Report_AWS(self.metadata())
report.region = trail.region
report.resource_id = trail.name
report.resource_arn = trail.arn
report.resource_tags = trail.tags
report.status = "FAIL"
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) has not MFA delete enabled"
for bucket in s3_client.buckets:
if trail_bucket == bucket.name:
trail_bucket_is_in_account = True
if bucket.mfa_delete:
report.status = "PASS"
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) has MFA delete enabled"
# check if trail bucket is a cross account bucket
if not trail_bucket_is_in_account:
report.status = "PASS"
report.status_extended = f"Trail {trail.name} bucket ({trail_bucket}) is a cross-account bucket in another account out of Prowler's permissions scope, please check it manually"
findings.append(report)
return findings

View File

@@ -1,36 +0,0 @@
{
"Provider": "aws",
"CheckID": "cloudtrail_insights_exist",
"CheckTitle": "Ensure CloudTrail Insight is enabled",
"CheckType": [
"Software and Configuration Checks",
"Industry and Regulatory Standards",
"CIS AWS Foundations Benchmark"
],
"ServiceName": "cloudtrail",
"SubServiceName": "",
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"Severity": "low",
"ResourceType": "AwsCloudTrailTrail",
"Description": "Ensure CloudTrail Insight is enabled",
"Risk": "CloudTrail Insights provides a powerful way to search and analyze CloudTrail log data using pre-built queries and machine learning algorithms. This can help you to identify potential security threats and suspicious activity in near real-time, such as unauthorized access attempts, policy changes, or resource modifications.",
"RelatedUrl": "",
"Remediation": {
"Code": {
"CLI": "",
"NativeIaC": "",
"Other": "",
"Terraform": ""
},
"Recommendation": {
"Text": "Enable CloudTrail Insight",
"Url": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/logging-insights-events-with-cloudtrail.html"
}
},
"Categories": [
"forensics-ready"
],
"DependsOn": [],
"RelatedTo": [],
"Notes": ""
}

View File

@@ -1,27 +0,0 @@
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.cloudtrail.cloudtrail_client import (
cloudtrail_client,
)
class cloudtrail_insights_exist(Check):
def execute(self):
findings = []
for trail in cloudtrail_client.trails:
if trail.is_logging:
report = Check_Report_AWS(self.metadata())
report.region = trail.region
report.resource_id = trail.name
report.resource_arn = trail.arn
report.resource_tags = trail.tags
report.status = "FAIL"
report.status_extended = (
f"Trail {trail.name} has not insight selectors and it is logging"
)
if trail.has_insight_selectors:
report.status = "PASS"
report.status_extended = (
f"Trail {trail.name} has insight selectors and it is logging"
)
findings.append(report)
return findings

View File

@@ -2,7 +2,6 @@ import threading
from datetime import datetime
from typing import Optional
from botocore.client import ClientError
from pydantic import BaseModel
from prowler.lib.logger import logger
@@ -18,18 +17,11 @@ class Cloudtrail:
self.audited_account = audit_info.audited_account
self.audit_resources = audit_info.audit_resources
self.audited_partition = audit_info.audited_partition
self.region = audit_info.profile_region
self.regional_clients = generate_regional_clients(self.service, audit_info)
# If the region is not set in the audit profile,
# we pick the first region from the regional clients list
self.region = (
audit_info.profile_region
if audit_info.profile_region
else list(self.regional_clients.keys())[0]
)
self.trails = []
self.__threading_call__(self.__get_trails__)
self.__get_trail_status__()
self.__get_insight_selectors__()
self.__get_event_selectors__()
self.__list_tags_for_resource__()
@@ -77,7 +69,6 @@ class Cloudtrail:
kms_key=kms_key_id,
log_group_arn=log_group_arn,
data_events=[],
has_insight_selectors=trail["HasInsightSelectors"],
)
)
if trails_count == 0:
@@ -143,47 +134,6 @@ class Cloudtrail:
f"{client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_insight_selectors__(self):
logger.info("Cloudtrail - Getting trail insihgt selectors...")
try:
for trail in self.trails:
for region, client in self.regional_clients.items():
if trail.region == region and trail.name:
insight_selectors = None
trail.has_insight_selectors = None
try:
client_insight_selectors = client.get_insight_selectors(
TrailName=trail.arn
)
insight_selectors = client_insight_selectors.get(
"InsightSelectors"
)
except ClientError as error:
if (
error.response["Error"]["Code"]
== "InsightNotEnabledException"
):
continue
else:
logger.error(
f"{client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
continue
if insight_selectors:
trail.has_insight_selectors = insight_selectors[0].get(
"InsightType"
)
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __list_tags_for_resource__(self):
logger.info("CloudTrail - List Tags...")
try:
@@ -223,4 +173,3 @@ class Trail(BaseModel):
log_group_arn: str = None
data_events: list[Event_Selector] = []
tags: Optional[list] = []
has_insight_selectors: str = None

View File

@@ -13,17 +13,11 @@ class cloudwatch_log_group_retention_policy_specific_days_enabled(Check):
report.resource_id = log_group.name
report.resource_arn = log_group.arn
report.resource_tags = log_group.tags
if (
log_group.never_expire is False
and log_group.retention_days < specific_retention_days
):
if log_group.retention_days < specific_retention_days:
report.status = "FAIL"
report.status_extended = f"Log Group {log_group.name} has less than {specific_retention_days} days retention period ({log_group.retention_days} days)."
else:
report.status = "PASS"
if log_group.never_expire is True:
report.status_extended = f"Log Group {log_group.name} comply with {specific_retention_days} days retention period since it never expires."
else:
report.status_extended = f"Log Group {log_group.name} comply with {specific_retention_days} days retention period since it has {log_group.retention_days} days."
report.status_extended = f"Log Group {log_group.name} comply with {specific_retention_days} days retention period since it has {log_group.retention_days} days."
findings.append(report)
return findings

View File

@@ -152,18 +152,17 @@ class Logs:
if not self.audit_resources or (
is_resource_filtered(log_group["arn"], self.audit_resources)
):
never_expire = False
kms = log_group.get("kmsKeyId")
retention_days = log_group.get("retentionInDays")
if not retention_days:
never_expire = True
retention_days = 9999
kms = None
retention_days = 0
if "kmsKeyId" in log_group:
kms = log_group["kmsKeyId"]
if "retentionInDays" in log_group:
retention_days = log_group["retentionInDays"]
self.log_groups.append(
LogGroup(
arn=log_group["arn"],
name=log_group["logGroupName"],
retention_days=retention_days,
never_expire=never_expire,
kms_id=kms,
region=regional_client.region,
)
@@ -210,8 +209,8 @@ class Logs:
try:
for log_group in self.log_groups:
regional_client = self.regional_clients[log_group.region]
response = regional_client.list_tags_log_group(
logGroupName=log_group.name
response = regional_client.list_tags_for_resource(
resourceArn=log_group.arn.replace(":*", "") # Remove the tailing :*
)["tags"]
log_group.tags = [response]
except Exception as error:
@@ -241,7 +240,6 @@ class LogGroup(BaseModel):
arn: str
name: str
retention_days: int
never_expire: bool
kms_id: Optional[str]
region: str
log_streams: dict[

View File

@@ -2,7 +2,6 @@ import threading
from enum import Enum
from typing import Optional
from botocore.exceptions import ClientError
from pydantic import BaseModel
from prowler.lib.logger import logger
@@ -69,8 +68,8 @@ class CodeArtifact:
def __list_packages__(self, regional_client):
logger.info("CodeArtifact - Listing Packages and retrieving information...")
for repository in self.repositories:
try:
try:
for repository in self.repositories:
if self.repositories[repository].region == regional_client.region:
list_packages_paginator = regional_client.get_paginator(
"list_packages"
@@ -110,21 +109,15 @@ class CodeArtifact:
sortBy="PUBLISHED_TIME",
)
)
latest_version = ""
latest_origin_type = "UNKNOWN"
latest_status = "Published"
if latest_version_information.get("versions"):
latest_version = latest_version_information["versions"][
0
].get("version")
latest_origin_type = (
latest_version_information["versions"][0]
.get("origin", {})
.get("originType", "UNKNOWN")
)
latest_status = latest_version_information["versions"][
0
].get("status", "Published")
latest_version = latest_version_information["versions"][0][
"version"
]
latest_origin_type = latest_version_information["versions"][
0
]["origin"]["originType"]
latest_status = latest_version_information["versions"][0][
"status"
]
packages.append(
Package(
@@ -149,21 +142,12 @@ class CodeArtifact:
# Save all the packages information
self.repositories[repository].packages = packages
except ClientError as error:
if error.response["Error"]["Code"] == "ResourceNotFoundException":
logger.warning(
f"{regional_client.region} --"
f" {error.__class__.__name__}[{error.__traceback__.tb_lineno}]:"
f" {error}"
)
continue
except Exception as error:
logger.error(
f"{regional_client.region} --"
f" {error.__class__.__name__}[{error.__traceback__.tb_lineno}]:"
f" {error}"
)
except Exception as error:
logger.error(
f"{regional_client.region} --"
f" {error.__class__.__name__}[{error.__traceback__.tb_lineno}]:"
f" {error}"
)
def __list_tags_for_resource__(self):
logger.info("CodeArtifact - List Tags...")
@@ -221,7 +205,7 @@ class OriginInformation(BaseModel):
class LatestPackageVersionStatus(Enum):
"""Possible values for the package status"""
"""Possibel values for the package status"""
Published = "Published"
Unfinished = "Unfinished"

View File

@@ -1,4 +0,0 @@
from prowler.providers.aws.lib.audit_info.audit_info import current_audit_info
from prowler.providers.aws.services.drs.drs_service import DRS
drs_client = DRS(current_audit_info)

Some files were not shown because too many files have changed in this diff Show More