chore(merge): Merge master with Prowler 4.0 (#3467)

Co-authored-by: Sergio Garcia <38561120+sergargar@users.noreply.github.com>
This commit is contained in:
Pepe Fagoaga
2024-02-29 11:19:17 +01:00
committed by GitHub
parent b0f2f34d3b
commit f0c027f54e
932 changed files with 38787 additions and 20014 deletions

View File

@@ -13,3 +13,8 @@ updates:
labels:
- "dependencies"
- "pip"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
target-branch: master

27
.github/labeler.yml vendored Normal file
View File

@@ -0,0 +1,27 @@
documentation:
- changed-files:
- any-glob-to-any-file: "docs/**"
provider/aws:
- changed-files:
- any-glob-to-any-file: "prowler/providers/aws/**"
- any-glob-to-any-file: "tests/providers/aws/**"
provider/azure:
- changed-files:
- any-glob-to-any-file: "prowler/providers/azure/**"
- any-glob-to-any-file: "tests/providers/azure/**"
provider/gcp:
- changed-files:
- any-glob-to-any-file: "prowler/providers/gcp/**"
- any-glob-to-any-file: "tests/providers/gcp/**"
provider/kubernetes:
- changed-files:
- any-glob-to-any-file: "prowler/providers/kubernetes/**"
- any-glob-to-any-file: "tests/providers/kubernetes/**"
github_actions:
- changed-files:
- any-glob-to-any-file: ".github/workflows/*"

View File

@@ -0,0 +1,24 @@
name: Pull Request Documentation Link
on:
pull_request:
branches:
- 'master'
- 'prowler-4.0-dev'
paths:
- 'docs/**'
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
jobs:
documentation-link:
name: Documentation Link
runs-on: ubuntu-latest
steps:
- name: Leave PR comment with the SaaS Documentation URI
uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ env.PR_NUMBER }}
body: |
You can check the documentation for this PR here -> [SaaS Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)

View File

@@ -32,11 +32,11 @@ jobs:
POETRY_VIRTUALENVS_CREATE: "false"
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Setup python (release)
if: github.event_name == 'release'
uses: actions/setup-python@v2
uses: actions/setup-python@v5
with:
python-version: ${{ env.PYTHON_VERSION }}
@@ -52,13 +52,13 @@ jobs:
poetry version ${{ github.event.release.tag_name }}
- name: Login to DockerHub
uses: docker/login-action@v2
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Public ECR
uses: docker/login-action@v2
uses: docker/login-action@v3
with:
registry: public.ecr.aws
username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }}
@@ -67,11 +67,11 @@ jobs:
AWS_REGION: ${{ env.AWS_REGION }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v3
- name: Build and push container image (latest)
if: github.event_name == 'push'
uses: docker/build-push-action@v2
uses: docker/build-push-action@v5
with:
push: true
tags: |
@@ -83,7 +83,7 @@ jobs:
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@v2
uses: docker/build-push-action@v5
with:
# Use local context to get changes
# https://github.com/docker/build-push-action#path-context

View File

@@ -37,11 +37,11 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -52,6 +52,6 @@ jobs:
# queries: security-extended,security-and-quality
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

View File

@@ -7,11 +7,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: TruffleHog OSS
uses: trufflesecurity/trufflehog@v3.4.4
uses: trufflesecurity/trufflehog@v3.68.2
with:
path: ./
base: ${{ github.event.repository.default_branch }}

16
.github/workflows/labeler.yml vendored Normal file
View File

@@ -0,0 +1,16 @@
name: "Pull Request Labeler"
on:
pull_request_target:
branches:
- "master"
- "prowler-4.0-dev"
jobs:
labeler:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: actions/labeler@v5

View File

@@ -14,13 +14,13 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.9", "3.10", "3.11"]
python-version: ["3.9", "3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@v39
uses: tj-actions/changed-files@v42
with:
files: ./**
files_ignore: |
@@ -28,6 +28,7 @@ jobs:
README.md
docs/**
permissions/**
mkdocs.yml
- name: Install poetry
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
@@ -35,7 +36,7 @@ jobs:
pipx install poetry
- name: Set up Python ${{ matrix.python-version }}
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: "poetry"
@@ -87,6 +88,6 @@ jobs:
poetry run pytest -n auto --cov=./prowler --cov-report=xml tests
- name: Upload coverage reports to Codecov
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v4
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -16,7 +16,7 @@ jobs:
name: Release Prowler to PyPI
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ env.GITHUB_BRANCH }}
- name: Install dependencies
@@ -24,7 +24,7 @@ jobs:
pipx install poetry
pipx inject poetry poetry-bumpversion
- name: setup python
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: 3.9
cache: 'poetry'
@@ -44,7 +44,7 @@ jobs:
poetry publish
# Create pull request with new version
- name: Create Pull Request
uses: peter-evans/create-pull-request@v4
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
commit-message: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}."

View File

@@ -23,12 +23,12 @@ jobs:
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v3
- uses: actions/checkout@v4
with:
ref: ${{ env.GITHUB_BRANCH }}
- name: setup python
uses: actions/setup-python@v2
uses: actions/setup-python@v5
with:
python-version: 3.9 #install the python needed
@@ -38,7 +38,7 @@ jobs:
pip install boto3
- name: Configure AWS Credentials -- DEV
uses: aws-actions/configure-aws-credentials@v1
uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: ${{ env.AWS_REGION_DEV }}
role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }}
@@ -50,12 +50,12 @@ jobs:
# Create pull request
- name: Create Pull Request
uses: peter-evans/create-pull-request@v4
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
commit-message: "feat(regions_update): Update regions for AWS services."
branch: "aws-services-regions-updated-${{ github.sha }}"
labels: "status/waiting-for-revision, severity/low"
labels: "status/waiting-for-revision, severity/low, provider/aws"
title: "chore(regions_update): Changes in regions for AWS services."
body: |
### Description

View File

@@ -1,7 +1,7 @@
repos:
## GENERAL
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v4.5.0
hooks:
- id: check-merge-conflict
- id: check-yaml
@@ -15,7 +15,7 @@ repos:
## TOML
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.10.0
rev: v2.12.0
hooks:
- id: pretty-format-toml
args: [--autofix]
@@ -28,7 +28,7 @@ repos:
- id: shellcheck
## PYTHON
- repo: https://github.com/myint/autoflake
rev: v2.2.0
rev: v2.2.1
hooks:
- id: autoflake
args:
@@ -39,25 +39,25 @@ repos:
]
- repo: https://github.com/timothycrosley/isort
rev: 5.12.0
rev: 5.13.2
hooks:
- id: isort
args: ["--profile", "black"]
- repo: https://github.com/psf/black
rev: 22.12.0
rev: 24.1.1
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
rev: 6.1.0
rev: 7.0.0
hooks:
- id: flake8
exclude: contrib
args: ["--ignore=E266,W503,E203,E501,W605"]
- repo: https://github.com/python-poetry/poetry
rev: 1.6.0 # add version here
rev: 1.7.0
hooks:
- id: poetry-check
- id: poetry-lock
@@ -80,18 +80,12 @@ repos:
- id: trufflehog
name: TruffleHog
description: Detect secrets in your data.
# entry: bash -c 'trufflehog git file://. --only-verified --fail'
entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail'
# For running trufflehog in docker, use the following entry instead:
entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
language: system
stages: ["commit", "push"]
- id: pytest-check
name: pytest-check
entry: bash -c 'pytest tests -n auto'
language: system
files: '.*\.py'
- id: bandit
name: bandit
description: "Bandit is a tool for finding common security issues in Python code"

View File

@@ -8,16 +8,18 @@ version: 2
build:
os: "ubuntu-22.04"
tools:
python: "3.9"
python: "3.11"
jobs:
post_create_environment:
# Install poetry
# https://python-poetry.org/docs/#installing-manually
- pip install poetry
# Tell poetry to not use a virtual environment
- poetry config virtualenvs.create false
- python -m pip install poetry
post_install:
- poetry install -E docs
# Install dependencies with 'docs' dependency group
# https://python-poetry.org/docs/managing-dependencies/#dependency-groups
# VIRTUAL_ENV needs to be set manually for now.
# See https://github.com/readthedocs/readthedocs.org/pull/11152/
- VIRTUAL_ENV=${READTHEDOCS_VIRTUALENV_PATH} python -m poetry install --only=docs
mkdocs:
configuration: mkdocs.yml

View File

@@ -55,7 +55,7 @@ further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at community@prowler.cloud. All
reported by contacting the project team at [support.prowler.com](https://customer.support.prowler.com/servicedesk/customer/portals). All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.

View File

@@ -1,24 +1,31 @@
<p align="center">
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/62c1ce73bbcdd6b9e5ba03dfcae26dfd165defd9/docs/img/prowler-pro-dark.png?raw=True#gh-dark-mode-only" width="150" height="36">
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/62c1ce73bbcdd6b9e5ba03dfcae26dfd165defd9/docs/img/prowler-pro-light.png?raw=True#gh-light-mode-only" width="15%" height="15%">
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-black.png?raw=True#gh-light-mode-only" width="350" height="115">
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png?raw=True#gh-dark-mode-only" width="350" height="115">
</p>
<p align="center">
<b><i>See all the things you and your team can do with ProwlerPro at <a href="https://prowler.pro">prowler.pro</a></i></b>
<b><i>Prowler SaaS </b> and <b>Prowler Open Source</b> are as dynamic and adaptable as the environment theyre meant to protect. Trusted by the leaders in security.
</p>
<p align="center">
<b>Learn more at <a href="https://prowler.com">prowler.com</i></b>
</p>
<p align="center">
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/3985464/3617e470-670c-47c9-9794-ce895ebdb627"></a>
<br>
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog">Join our Prowler community!</a>
</p>
<hr>
<p align="center">
<img src="https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png" />
</p>
<p align="center">
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
<a href="https://pypi.org/project/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/v/prowler.svg"></a>
<a href="https://pypi.python.org/pypi/prowler/"><img alt="Python Version" src="https://img.shields.io/pypi/pyversions/prowler.svg"></a>
<a href="https://pypistats.org/packages/prowler"><img alt="PyPI Prowler Downloads" src="https://img.shields.io/pypi/dw/prowler.svg?label=prowler%20downloads"></a>
<a href="https://pypistats.org/packages/prowler-cloud"><img alt="PyPI Prowler-Cloud Downloads" src="https://img.shields.io/pypi/dw/prowler-cloud.svg?label=prowler-cloud%20downloads"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker Pulls" src="https://img.shields.io/docker/pulls/toniblyx/prowler"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/cloud/build/toniblyx/prowler"></a>
<a href="https://hub.docker.com/r/toniblyx/prowler"><img alt="Docker" src="https://img.shields.io/docker/image-size/toniblyx/prowler"></a>
<a href="https://gallery.ecr.aws/prowler-cloud/prowler"><img width="120" height=19" alt="AWS ECR Gallery" src="https://user-images.githubusercontent.com/3985464/151531396-b6535a68-c907-44eb-95a1-a09508178616.png"></a>
<a href="https://codecov.io/gh/prowler-cloud/prowler"><img src="https://codecov.io/gh/prowler-cloud/prowler/graph/badge.svg?token=OflBGsdpDl"/></a>
</p>
<p align="center">
<a href="https://github.com/prowler-cloud/prowler"><img alt="Repo size" src="https://img.shields.io/github/repo-size/prowler-cloud/prowler"></a>
@@ -30,6 +37,7 @@
<a href="https://twitter.com/ToniBlyx"><img alt="Twitter" src="https://img.shields.io/twitter/follow/toniblyx?style=social"></a>
<a href="https://twitter.com/prowlercloud"><img alt="Twitter" src="https://img.shields.io/twitter/follow/prowlercloud?style=social"></a>
</p>
<hr>
# Description
@@ -37,16 +45,16 @@
It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks.
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.cloud/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.cloud/en/latest/tutorials/misc/#categories) |
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|---|---|---|---|---|
| AWS | 301 | 61 -> `prowler aws --list-services` | 25 -> `prowler aws --list-compliance` | 5 -> `prowler aws --list-categories` |
| AWS | 302 | 61 -> `prowler aws --list-services` | 27 -> `prowler aws --list-compliance` | 6 -> `prowler aws --list-categories` |
| GCP | 73 | 11 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
| Azure | 23 | 4 -> `prowler azure --list-services` | CIS soon | 1 -> `prowler azure --list-categories` |
| Kubernetes | Planned | - | - | - |
| Azure | 37 | 4 -> `prowler azure --list-services` | CIS soon | 1 -> `prowler azure --list-categories` |
| Kubernetes | Work In Progress | - | CIS soon | - |
# 📖 Documentation
The full documentation can now be found at [https://docs.prowler.cloud](https://docs.prowler.cloud)
The full documentation can now be found at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
## Looking for Prowler v2 documentation?
For Prowler v2 Documentation, please go to https://github.com/prowler-cloud/prowler/tree/2.12.1.
@@ -54,13 +62,13 @@ For Prowler v2 Documentation, please go to https://github.com/prowler-cloud/prow
# ⚙️ Install
## Pip package
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9:
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9, < 3.13:
```console
pip install prowler
prowler -v
```
More details at https://docs.prowler.cloud
More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
## Containers
@@ -77,7 +85,7 @@ The container images are available here:
## From Github
Python >= 3.9 is required with pip and poetry:
Python >= 3.9, < 3.13 is required with pip and poetry:
```
git clone https://github.com/prowler-cloud/prowler

View File

@@ -14,7 +14,7 @@ As an **AWS Partner** and we have passed the [AWS Foundation Technical Review (F
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or ProwlerPro service, please submit the information by contacting to help@prowler.pro.
The information you share with Verica as part of this process is kept confidential within Verica and the Prowler team. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.
The information you share with ProwlerPro as part of this process is kept confidential within ProwlerPro. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.
We will review the submitted report, and assign it a tracking number. We will then respond to you, acknowledging receipt of the report, and outline the next steps in the process.

View File

@@ -196,14 +196,17 @@ aws:
As you can see in the above code, within the service client, in this case the `ec2_client`, there is an object called `audit_config` which is a Python dictionary containing the values read from the configuration file.
In order to use it, you have to check first if the value is present in the configuration file. If the value is not present, you can create it in the `config.yaml` file and then, read it from the check.
> It is mandatory to always use the `dictionary.get(value, default)` syntax to set a default value in the case the configuration value is not present.
???+ note
It is mandatory to always use the `dictionary.get(value, default)` syntax to set a default value in the case the configuration value is not present.
## Check Metadata
Each Prowler check has metadata associated which is stored at the same level of the check's folder in a file called A `check_name.metadata.json` containing the check's metadata.
> We are going to include comments in this example metadata JSON but they cannot be included because the JSON format does not allow comments.
???+ note
We are going to include comments in this example metadata JSON but they cannot be included because the JSON format does not allow comments.
```json
{

View File

@@ -0,0 +1,45 @@
# Debugging
Debugging in Prowler make things easier!
If you are developing Prowler, it's possible that you will encounter some situations where you have to inspect the code in depth to fix some unexpected issues during the execution. To do that, if you are using VSCode you can run the code using the integrated debugger. Please, refer to this [documentation](https://code.visualstudio.com/docs/editor/debugging) for guidance about the debugger in VSCode.
The following file is an example of the [debugging configuration](https://code.visualstudio.com/docs/editor/debugging#_launch-configurations) file that you can add to [Virtual Studio Code](https://code.visualstudio.com/).
This file should inside the *.vscode* folder and its name has to be *launch.json*:
```json
{
"version": "0.2.0",
"configurations": [
{
"name": "Python: Current File",
"type": "python",
"request": "launch",
"program": "prowler.py",
"args": [
"aws",
"-f",
"eu-west-1",
"--service",
"cloudwatch",
"--log-level",
"ERROR",
"-p",
"dev",
],
"console": "integratedTerminal",
"justMyCode": false
},
{
"name": "Python: Debug Tests",
"type": "python",
"request": "launch",
"program": "${file}",
"purpose": [
"debug-test"
],
"console": "integratedTerminal",
"justMyCode": false
}
]
}
```

View File

@@ -1,6 +1,6 @@
# Developer Guide
You can extend Prowler in many different ways, in most cases you will want to create your own checks and compliance security frameworks, here is where you can learn about how to get started with it. We also include how to create custom outputs, integrations and more.
You can extend Prowler Open Source in many different ways, in most cases you will want to create your own checks and compliance security frameworks, here is where you can learn about how to get started with it. We also include how to create custom outputs, integrations and more.
## Get the code and install all dependencies
@@ -16,7 +16,7 @@ pip install poetry
```
Then install all dependencies including the ones for developers:
```
poetry install
poetry install --with dev
poetry shell
```
@@ -31,7 +31,9 @@ You should get an output like the following:
pre-commit installed at .git/hooks/pre-commit
```
Before we merge any of your pull requests we pass checks to the code, we use the following tools and automation to make sure the code is secure and dependencies up-to-dated (these should have been already installed if you ran `pipenv install -d`):
Before we merge any of your pull requests we pass checks to the code, we use the following tools and automation to make sure the code is secure and dependencies up-to-dated:
???+ note
These should have been already installed if you ran `poetry install --with dev`
- [`bandit`](https://pypi.org/project/bandit/) for code security review.
- [`safety`](https://pypi.org/project/safety/) and [`dependabot`](https://github.com/features/security) for dependencies.

View File

@@ -40,13 +40,15 @@ Other commands to run tests:
- Run tests for a provider service: `pytest -n auto -vvv -s -x tests/providers/<provider>/services/<service>`
- Run tests for a provider check: `pytest -n auto -vvv -s -x tests/providers/<provider>/services/<service>/<check>`
> Refer to the [pytest documentation](https://docs.pytest.org/en/7.1.x/getting-started.html) documentation for more information.
???+ note
Refer to the [pytest documentation](https://docs.pytest.org/en/7.1.x/getting-started.html) documentation for more information.
## AWS
For the AWS provider we have ways to test a Prowler check based on the following criteria:
> Note: We use and contribute to the [Moto](https://github.com/getmoto/moto) library which allows us to easily mock out tests based on AWS infrastructure. **It's awesome!**
???+ note
We use and contribute to the [Moto](https://github.com/getmoto/moto) library which allows us to easily mock out tests based on AWS infrastructure. **It's awesome!**
- AWS API calls covered by [Moto](https://github.com/getmoto/moto):
- Service tests with `@mock_<service>`
@@ -195,7 +197,8 @@ class Test_iam_password_policy_uppercase:
If the IAM service for the check's we want to test is not covered by Moto, we have to inject the objects in the service client using [MagicMock](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.MagicMock). As we have pointed above, we cannot instantiate the service since it will make real calls to the AWS APIs.
> The following example uses the IAM GetAccountPasswordPolicy which is covered by Moto but this is only for demonstration purposes.
???+ note
The following example uses the IAM GetAccountPasswordPolicy which is covered by Moto but this is only for demonstration purposes.
The following code shows how to use MagicMock to create the service objects.
@@ -325,7 +328,8 @@ class Test_iam_password_policy_uppercase:
Note that this does not use Moto, to keep it simple, but if you use any `moto`-decorators in addition to the patch, the call to `orig(self, operation_name, kwarg)` will be intercepted by Moto.
> The above code comes from here https://docs.getmoto.org/en/latest/docs/services/patching_other_services.html
???+ note
The above code comes from here https://docs.getmoto.org/en/latest/docs/services/patching_other_services.html
#### Mocking more than one service
@@ -385,7 +389,7 @@ with mock.patch(
"prowler.providers.<provider>.lib.audit_info.audit_info.audit_info",
new=audit_info,
), mock.patch(
"prowler.providers.aws.services.<service>.<check>.<check>.<service>_client",
"prowler.providers.<provider>.services.<service>.<check>.<check>.<service>_client",
new=<SERVICE>(audit_info),
):
```
@@ -407,10 +411,10 @@ with mock.patch(
"prowler.providers.<provider>.lib.audit_info.audit_info.audit_info",
new=audit_info,
), mock.patch(
"prowler.providers.aws.services.<service>.<SERVICE>",
"prowler.providers.<provider>.services.<service>.<SERVICE>",
new=<SERVICE>(audit_info),
) as service_client, mock.patch(
"prowler.providers.aws.services.<service>.<service>_client.<service>_client",
"prowler.providers.<provider>.services.<service>.<service>_client.<service>_client",
new=service_client,
):
```
@@ -523,7 +527,7 @@ from unittest import mock
from uuid import uuid4
# Azure Constants
AZURE_SUSCRIPTION = str(uuid4())
from tests.providers.azure.azure_fixtures import AZURE_SUBSCRIPTION
@@ -542,7 +546,7 @@ class Test_defender_ensure_defender_for_arm_is_on:
# Create the custom Defender object to be tested
defender_client.pricings = {
AZURE_SUSCRIPTION: {
AZURE_SUBSCRIPTION: {
"Arm": Defender_Pricing(
resource_id=resource_id,
pricing_tier="Not Standard",
@@ -580,9 +584,9 @@ class Test_defender_ensure_defender_for_arm_is_on:
assert result[0].status == "FAIL"
assert (
result[0].status_extended
== f"Defender plan Defender for ARM from subscription {AZURE_SUSCRIPTION} is set to OFF (pricing tier not standard)"
== f"Defender plan Defender for ARM from subscription {AZURE_SUBSCRIPTION} is set to OFF (pricing tier not standard)"
)
assert result[0].subscription == AZURE_SUSCRIPTION
assert result[0].subscription == AZURE_SUBSCRIPTION
assert result[0].resource_name == "Defender plan ARM"
assert result[0].resource_id == resource_id
```

View File

@@ -5,7 +5,7 @@ Prowler has been written in Python using the [AWS SDK (Boto3)](https://boto3.ama
Since Prowler uses AWS Credentials under the hood, you can follow any authentication method as described [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html#cli-configure-quickstart-precedence).
### AWS Authentication
### Authentication
Make sure you have properly configured your AWS-CLI with a valid Access Key and Region or declare AWS variables properly (or instance profile/role):
@@ -26,9 +26,8 @@ Those credentials must be associated to a user or role with proper permissions t
- `arn:aws:iam::aws:policy/SecurityAudit`
- `arn:aws:iam::aws:policy/job-function/ViewOnlyAccess`
> Moreover, some read-only additional permissions are needed for several checks, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json) to the role you are using.
> If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
???+ note
Moreover, some read-only additional permissions are needed for several checks, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json) to the role you are using. If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
### Multi-Factor Authentication
@@ -71,25 +70,51 @@ To use each one you need to pass the proper flag to the execution. Prowler fro A
#### Azure Active Directory scope
Azure Active Directory (AAD) permissions required by the tool are the following:
Microsoft Entra ID (AAD earlier) permissions required by the tool are the following:
- `Directory.Read.All`
- `Policy.Read.All`
The best way to assign it is through the azure web console:
![AAD Permissions](../img/AAD-permissions.png)
1. Access to Microsoft Entra ID
2. In the left menu bar, go to "App registrations"
3. Once there, in the menu bar click on "+ New registration" to register a new application
4. Fill the "Name, select the "Supported account types" and click on "Register. You will be redirected to the applications page.
![Register an Application page](../img/register-application.png)
4. Select the new application
5. In the left menu bar, select "API permissions"
6. Then click on "+ Add a permission" and select "Microsoft Graph"
7. Once in the "Microsoft Graph" view, select "Application permissions"
8. Finally, search for "Directory" and "Policy" and select the following permissions:
- `Directory.Read.All`
- `Policy.Read.All`
![EntraID Permissions](../img/AAD-permissions.png)
#### Subscriptions scope
Regarding the subscription scope, Prowler by default scans all the subscriptions that is able to list, so it is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
Regarding the subscription scope, Prowler by default scans all the subscriptions that is able to list, so it is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
- `Security Reader`
- `Reader`
To assign this roles, follow the instructions:
1. Access your subscription, then select your subscription.
2. Select "Access control (IAM)".
3. In the overview, select "Roles"
![IAM Page](../img/page-IAM.png)
4. Click on "+ Add" and select "Add role assignment"
5. In the search bar, type `Security Reader`, select it and click on "Next"
6. In the Members tab, click on "+ Select members" and add the members you want to assign this role.
7. Click on "Review + assign" to apply the new role.
*Repeat these steps for `Reader` role*
## Google Cloud
### GCP Authentication
### Authentication
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
@@ -99,4 +124,5 @@ Prowler will follow the same credentials search as [Google authentication librar
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
???+ note
By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.

BIN
docs/img/page-IAM.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 348 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 302 KiB

View File

@@ -1,38 +1,13 @@
<p href="https://github.com/prowler-cloud/prowler">
<img align="right" src="./img/prowler-logo.png" height="100">
</p>
<br>
**Prowler** is an Open Source security tool to perform AWS, Azure and Google Cloud security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler SaaS</a>.
# Prowler Documentation
**Welcome to [Prowler Open Source v3](https://github.com/prowler-cloud/prowler/) Documentation!** 📄
For **Prowler v2 Documentation**, please go [here](https://github.com/prowler-cloud/prowler/tree/2.12.0) to the branch and its README.md.
- You are currently in the **Getting Started** section where you can find general information and requirements to help you start with the tool.
- In the [Tutorials](./tutorials/misc.md) section you will see how to take advantage of all the features in Prowler.
- In the [Contact Us](./contact.md) section you can find how to reach us out in case of technical issues.
- In the [About](./about.md) section you will find more information about the Prowler team and license.
## About Prowler
**Prowler** is an Open Source security tool to perform AWS, Azure and Google Cloud security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
It contains hundreds of controls covering CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks.
[![Twitter URL](https://img.shields.io/twitter/url/https/twitter.com/prowlercloud.svg?style=social&label=Follow%20%40prowlercloud)](https://twitter.com/prowlercloud)
## About ProwlerPro
<a href="https://prowler.pro"><img align="right" src="./img/prowler-pro-light.png" width="350"></a> **ProwlerPro** gives you the benefits of Prowler Open Source plus continuous monitoring, faster execution, personalized support, visualization of your data with dashboards, alerts and much more.
Visit <a href="https://prowler.pro">prowler.pro</a> for more info.
![Prowler Execution](img/short-display.png)
Prowler offers hundreds of controls covering more than 25 standards and compliance frameworks like CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks.
## Quick Start
### Installation
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with `Python >= 3.9`:
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/), thus can be installed using pip with `Python >= 3.9`:
=== "Generic"
@@ -136,30 +111,21 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
=== "AWS CloudShell"
Prowler can be easely executed in AWS CloudShell but it has some prerequsites to be able to to so. AWS CloudShell is a container running with `Amazon Linux release 2 (Karoo)` that comes with Python 3.7, since Prowler requires Python >= 3.9 we need to first install a newer version of Python. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [2](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
_Requirements_:
* First install all dependences and then Python, in this case we need to compile it because there is not a package available at the time this document is written:
```
sudo yum -y install gcc openssl-devel bzip2-devel libffi-devel
wget https://www.python.org/ftp/python/3.9.16/Python-3.9.16.tgz
tar zxf Python-3.9.16.tgz
cd Python-3.9.16/
./configure --enable-optimizations
sudo make altinstall
python3.9 --version
cd
```
* Open AWS CloudShell `bash`.
_Commands_:
* Once Python 3.9 is available we can install Prowler from pip:
```
pip3.9 install prowler
pip install prowler
prowler -v
```
> To download the results from AWS CloudShell, select Actions -> Download File and add the full path of each file. For the CSV file it will be something like `/home/cloudshell-user/output/prowler-output-123456789012-20221220191331.csv`
???+ note
To download the results from AWS CloudShell, select Actions -> Download File and add the full path of each file. For the CSV file it will be something like `/home/cloudshell-user/output/prowler-output-123456789012-20221220191331.csv`
=== "Azure CloudShell"
@@ -194,14 +160,18 @@ You can run Prowler from your workstation, an EC2 instance, Fargate or any other
![Architecture](img/architecture.png)
## Basic Usage
To run Prowler, you will need to specify the provider (e.g aws, gcp or azure):
> If no provider specified, AWS will be used for backward compatibility with most of v2 options.
To run Prowler, you will need to specify the provider (e.g `aws`, `gcp` or `azure`):
???+ note
If no provider specified, AWS will be used for backward compatibility with most of v2 options.
```console
prowler <provider>
```
![Prowler Execution](img/short-display.png)
> Running the `prowler` command without options will use your environment variable credentials, see [Requirements](./getting-started/requirements.md) section to review the credentials settings.
???+ note
Running the `prowler` command without options will use your environment variable credentials, see [Requirements](./getting-started/requirements.md) section to review the credentials settings.
If you miss the former output you can use `--verbose` but Prowler v3 is smoking fast, so you won't see much ;)
@@ -252,7 +222,9 @@ Use a custom AWS profile with `-p`/`--profile` and/or AWS regions which you want
```console
prowler aws --profile custom-profile -f us-east-1 eu-south-2
```
> By default, `prowler` will scan all AWS regions.
???+ note
By default, `prowler` will scan all AWS regions.
See more details about AWS Authentication in [Requirements](getting-started/requirements.md)
@@ -302,3 +274,6 @@ prowler gcp --project-ids <Project ID 1> <Project ID 2> ... <Project ID N>
```
See more details about GCP Authentication in [Requirements](getting-started/requirements.md)
## Prowler v2 Documentation
For **Prowler v2 Documentation**, please check it out [here](https://github.com/prowler-cloud/prowler/blob/8818f47333a0c1c1a457453c87af0ea5b89a385f/README.md).

View File

@@ -13,9 +13,9 @@ As an **AWS Partner** and we have passed the [AWS Foundation Technical Review (F
## Reporting Vulnerabilities
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or ProwlerPro service, please submit the information by contacting to help@prowler.pro.
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or Prowler SaaS service, please submit the information by contacting to us via [**support.prowler.com**](http://support.prowler.com).
The information you share with Verica as part of this process is kept confidential within Verica and the Prowler team. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.
The information you share with the Prowler team as part of this process is kept confidential within Prowler. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.
We will review the submitted report, and assign it a tracking number. We will then respond to you, acknowledging receipt of the report, and outline the next steps in the process.

View File

@@ -19,9 +19,8 @@ Those credentials must be associated to a user or role with proper permissions t
- `arn:aws:iam::aws:policy/SecurityAudit`
- `arn:aws:iam::aws:policy/job-function/ViewOnlyAccess`
> Moreover, some read-only additional permissions are needed for several checks, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json) to the role you are using.
> If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
???+ note
Moreover, some read-only additional permissions are needed for several checks, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json) to the role you are using. If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
## Profiles

View File

@@ -32,3 +32,14 @@ Prowler's AWS Provider uses the Boto3 [Standard](https://boto3.amazonaws.com/v1/
- Retry attempts on nondescriptive, transient error codes. Specifically, these HTTP status codes: 500, 502, 503, 504.
- Any retry attempt will include an exponential backoff by a base factor of 2 for a maximum backoff time of 20 seconds.
## Notes for validating retry attempts
If you are making changes to Prowler, and want to validate if requests are being retried or given up on, you can take the following approach
* Run prowler with `--log-level DEBUG` and `--log-file debuglogs.txt`
* Search for retry attempts using `grep -i 'Retry needed' debuglogs.txt`
This is based off of the [AWS documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html#checking-retry-attempts-in-your-client-logs), which states that if a retry is performed, you will see a message starting with "Retry needed".
You can determine the total number of calls made using `grep -i 'Sending http request' debuglogs.txt | wc -l`

View File

@@ -1,26 +1,26 @@
# AWS CloudShell
Prowler can be easily executed in AWS CloudShell but it has some prerequisites to be able to to so. AWS CloudShell is a container running with `Amazon Linux release 2 (Karoo)` that comes with Python 3.7, since Prowler requires Python >= 3.9 we need to first install a newer version of Python. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
- First install all dependences and then Python, in this case we need to compile it because there is not a package available at the time this document is written:
```
sudo yum -y install gcc openssl-devel bzip2-devel libffi-devel
wget https://www.python.org/ftp/python/3.9.16/Python-3.9.16.tgz
tar zxf Python-3.9.16.tgz
cd Python-3.9.16/
./configure --enable-optimizations
sudo make altinstall
python3.9 --version
cd
```
- Once Python 3.9 is available we can install Prowler from pip:
```
pip3.9 install prowler
```
- Now enjoy Prowler:
```
## Installation
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [[2]](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
```shell
pip install prowler
prowler -v
prowler
```
- To download the results from AWS CloudShell, select Actions -> Download File and add the full path of each file. For the CSV file it will be something like `/home/cloudshell-user/output/prowler-output-123456789012-20221220191331.csv`
## Download Files
To download the results from AWS CloudShell, select Actions -> Download File and add the full path of each file. For the CSV file it will be something like `/home/cloudshell-user/output/prowler-output-123456789012-20221220191331.csv`
## Clone Prowler from Github
The limited storage that AWS CloudShell provides for the user's home directory causes issues when installing the poetry dependencies to run Prowler from GitHub. Here is a workaround:
```shell
git clone https://github.com/prowler-cloud/prowler.git
cd prowler
pip install poetry
mkdir /tmp/pypoetry
poetry config cache-dir /tmp/pypoetry
poetry shell
poetry install
python prowler.py -v
```

Binary file not shown.

After

Width:  |  Height:  |  Size: 341 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 291 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 306 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 346 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 293 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 252 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 603 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 273 KiB

View File

@@ -1,21 +1,28 @@
# AWS Organizations
## Get AWS Account details from your AWS Organization
Prowler allows you to get additional information of the scanned account in CSV and JSON outputs. When scanning a single account you get the Account ID as part of the output.
Prowler allows you to get additional information of the scanned account from AWS Organizations.
If you have AWS Organizations Prowler can get your account details like Account Name, Email, ARN, Organization ID and Tags and you will have them next to every finding in the CSV and JSON outputs.
If you have AWS Organizations enabled, Prowler can get your account details like account name, email, ARN, organization id and tags and you will have them next to every finding's output.
In order to do that you can use the option `-O`/`--organizations-role <organizations_role_arn>`. See the following sample command:
In order to do that you can use the argument `-O`/`--organizations-role <organizations_role_arn>`. If this argument is not present Prowler will try to fetch that information automatically if the AWS account is a delegated administrator for the AWS Organization.
???+ note
Refer [here](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_delegate_policies.html) for more information about AWS Organizations delegated administrator.
See the following sample command:
```shell
prowler aws \
-O arn:aws:iam::<management_organizations_account_id>:role/<role_name>
```
> Make sure the role in your AWS Organizations management account has the permissions `organizations:ListAccounts*` and `organizations:ListTagsForResource`.
???+ note
Make sure the role in your AWS Organizations management account has the permissions `organizations:DescribeAccount` and `organizations:ListTagsForResource`.
In that command Prowler will scan the account and getting the account details from the AWS Organizations management account assuming a role and creating two reports with those details in JSON and CSV.
Prowler will scan the AWS account and get the account details from AWS Organizations.
In the JSON output below (redacted) you can see tags coded in base64 to prevent breaking CSV or JSON due to its format:
In the JSON output below you can see tags coded in base64 to prevent breaking CSV or JSON due to its format:
```json
"Account Email": "my-prod-account@domain.com",
@@ -25,13 +32,15 @@ In the JSON output below (redacted) you can see tags coded in base64 to prevent
"Account tags": "\"eyJUYWdzIjpasf0=\""
```
The additional fields in CSV header output are as follow:
The additional fields in CSV header output are as follows:
```csv
ACCOUNT_DETAILS_EMAIL,ACCOUNT_DETAILS_NAME,ACCOUNT_DETAILS_ARN,ACCOUNT_DETAILS_ORG,ACCOUNT_DETAILS_TAGS
```
- ACCOUNT_DETAILS_EMAIL
- ACCOUNT_DETAILS_NAME
- ACCOUNT_DETAILS_ARN
- ACCOUNT_DETAILS_ORG
- ACCOUNT_DETAILS_TAGS
## Extra: run Prowler across all accounts in AWS Organizations by assuming roles
## Extra: Run Prowler across all accounts in AWS Organizations by assuming roles
If you want to run Prowler across all accounts of AWS Organizations you can do this:
@@ -55,4 +64,6 @@ If you want to run Prowler across all accounts of AWS Organizations you can do t
done
```
> Using the same for loop it can be scanned a list of accounts with a variable like `ACCOUNTS_LIST='11111111111 2222222222 333333333'`
???+ note
Using the same for loop it can be scanned a list of accounts with a variable like:
</br>`ACCOUNTS_LIST='11111111111 2222222222 333333333'`

View File

@@ -6,10 +6,13 @@ By default Prowler is able to scan the following AWS partitions:
- China: `aws-cn`
- GovCloud (US): `aws-us-gov`
> To check the available regions for each partition and service please refer to the following document [aws_regions_by_service.json](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/aws_regions_by_service.json)
???+ note
To check the available regions for each partition and service please refer to the following document [aws_regions_by_service.json](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/aws_regions_by_service.json)
It is important to take into consideration that to scan the China (`aws-cn`) or GovCloud (`aws-us-gov`) partitions it is either required to have a valid region for that partition in your AWS credentials or to specify the regions you want to audit for that partition using the `-f/--region` flag.
> Please, refer to https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#configuring-credentials for more information about the AWS credentials configuration.
???+ note
Please, refer to https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#configuring-credentials for more information about the AWS credentials configuration.
Prowler can scan specific region(s) with:
```console
@@ -34,7 +37,8 @@ aws_access_key_id = XXXXXXXXXXXXXXXXXXX
aws_secret_access_key = XXXXXXXXXXXXXXXXXXX
region = cn-north-1
```
> With this option all the partition regions will be scanned without the need of use the `-f/--region` flag
???+ note
With this option all the partition regions will be scanned without the need of use the `-f/--region` flag
## AWS GovCloud (US)
@@ -52,7 +56,8 @@ aws_access_key_id = XXXXXXXXXXXXXXXXXXX
aws_secret_access_key = XXXXXXXXXXXXXXXXXXX
region = us-gov-east-1
```
> With this option all the partition regions will be scanned without the need of use the `-f/--region` flag
???+ note
With this option all the partition regions will be scanned without the need of use the `-f/--region` flag
## AWS ISO (US & Europe)

View File

@@ -23,6 +23,16 @@ prowler aws -R arn:aws:iam::<account_id>:role/<role_name>
prowler aws -T/--session-duration <seconds> -I/--external-id <external_id> -R arn:aws:iam::<account_id>:role/<role_name>
```
## Custom Role Session Name
Prowler can use your custom Role Session name with:
```console
prowler aws --role-session-name <role_session_name>
```
???+ note
It defaults to `ProwlerAssessmentSession`.
## Role MFA
If your IAM Role has MFA configured you can use `--mfa` along with `-R`/`--role <role_arn>` and Prowler will ask you to input the following values to get a new temporary session for the IAM Role provided:
@@ -34,6 +44,7 @@ If your IAM Role has MFA configured you can use `--mfa` along with `-R`/`--role
To create a role to be assumed in one or multiple accounts you can use either as CloudFormation Stack or StackSet the following [template](https://github.com/prowler-cloud/prowler/blob/master/permissions/create_role_to_assume_cfn.yaml) and adapt it.
> _NOTE 1 about Session Duration_: Depending on the amount of checks you run and the size of your infrastructure, Prowler may require more than 1 hour to finish. Use option `-T <seconds>` to allow up to 12h (43200 seconds). To allow more than 1h you need to modify _"Maximum CLI/API session duration"_ for that particular role, read more [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session).
???+ note "About Session Duration"
Depending on the amount of checks you run and the size of your infrastructure, Prowler may require more than 1 hour to finish. Use option `-T <seconds>` to allow up to 12h (43200 seconds). To allow more than 1h you need to modify _"Maximum CLI/API session duration"_ for that particular role, read more [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html#id_roles_use_view-role-max-session).
> _NOTE 2 about Session Duration_: Bear in mind that if you are using roles assumed by role chaining there is a hard limit of 1 hour so consider not using role chaining if possible, read more about that, in foot note 1 below the table [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html).
Bear in mind that if you are using roles assumed by role chaining there is a hard limit of 1 hour so consider not using role chaining if possible, read more about that, in foot note 1 below the table [here](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use.html).

View File

@@ -21,6 +21,5 @@ By default Prowler sends HTML, JSON and CSV output formats, if you want to send
prowler <provider> -M csv -B my-bucket
```
> In the case you do not want to use the assumed role credentials but the initial credentials to put the reports into the S3 bucket, use `-D`/`--output-bucket-no-assume` instead of `-B`/`--output-bucket`.
> Make sure that the used credentials have `s3:PutObject` permissions in the S3 path where the reports are going to be uploaded.
???+ note
In the case you do not want to use the assumed role credentials but the initial credentials to put the reports into the S3 bucket, use `-D`/`--output-bucket-no-assume` instead of `-B`/`--output-bucket`. Make sure that the used credentials have `s3:PutObject` permissions in the S3 path where the reports are going to be uploaded.

View File

@@ -1,61 +1,137 @@
# AWS Security Hub Integration
Prowler supports natively and as **official integration** sending findings to [AWS Security Hub](https://aws.amazon.com/security-hub). This integration allows Prowler to import its findings to AWS Security Hub.
Prowler supports natively and as **official integration** sending findings to [AWS Security Hub](https://aws.amazon.com/security-hub). This integration allows **Prowler** to import its findings to AWS Security Hub.
With Security Hub, you now have a single place that aggregates, organizes, and prioritizes your security alerts, or findings, from multiple AWS services, such as Amazon GuardDuty, Amazon Inspector, Amazon Macie, AWS Identity and Access Management (IAM) Access Analyzer, and AWS Firewall Manager, as well as from AWS Partner solutions and from Prowler for free.
Before sending findings to Prowler, you will need to perform next steps:
Before sending findings, you will need to enable AWS Security Hub and the **Prowler** integration.
1. Since Security Hub is a region based service, enable it in the region or regions you require. Use the AWS Management Console or using the AWS CLI with this command if you have enough permissions:
- `aws securityhub enable-security-hub --region <region>`.
2. Enable Prowler as partner integration integration. Use the AWS Management Console or using the AWS CLI with this command if you have enough permissions:
- `aws securityhub enable-import-findings-for-product --region <region> --product-arn arn:aws:securityhub:<region>::product/prowler/prowler` (change region also inside the ARN).
- Using the AWS Management Console:
![Screenshot 2020-10-29 at 10 26 02 PM](https://user-images.githubusercontent.com/3985464/97634660-5ade3400-1a36-11eb-9a92-4a45cc98c158.png)
3. Allow Prowler to import its findings to AWS Security Hub by adding the policy below to the role or user running Prowler:
- [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json)
## Enable AWS Security Hub
To enable the integration you have to perform the following steps, in _at least_ one AWS region of a given AWS account, to enable **AWS Security Hub** and **Prowler** as a partner integration.
Since **AWS Security Hub** is a region based service, you will need to enable it in the region or regions you require. You can configure it using the AWS Management Console or the AWS CLI.
???+ note
Take into account that enabling this integration will incur in costs in AWS Security Hub, please refer to its pricing [here](https://aws.amazon.com/security-hub/pricing/) for more information.
### Using the AWS Management Console
#### Enable AWS Security Hub
If you have currently AWS Security Hub enabled you can skip to the [next section](#enable-prowler-integration).
1. Open the **AWS Security Hub** console at https://console.aws.amazon.com/securityhub/.
2. When you open the Security Hub console for the first time make sure that you are in the region you want to enable, then choose **Go to Security Hub**.
![](./img/enable.png)
3. On the next page, the Security standards section lists the security standards that Security Hub supports. Select the check box for a standard to enable it, and clear the check box to disable it.
4. Choose **Enable Security Hub**.
![](./img/enable-2.png)
#### Enable Prowler Integration
If you have currently the Prowler integration enabled in AWS Security Hub you can skip to the [next section](#send-findings) and start sending findings.
Once **AWS Security Hub** is enabled you will need to enable **Prowler** as partner integration to allow **Prowler** to send findings to your **AWS Security Hub**.
1. Open the **AWS Security Hub** console at https://console.aws.amazon.com/securityhub/.
2. Select the **Integrations** tab in the right-side menu bar.
![](./img/enable-partner-integration.png)
3. Search for _Prowler_ in the text search box and the **Prowler** integration will appear.
4. Once there, click on **Accept Findings** to allow **AWS Security Hub** to receive findings from **Prowler**.
![](./img/enable-partner-integration-2.png)
5. A new modal will appear to confirm that you are enabling the **Prowler** integration.
![](./img/enable-partner-integration-3.png)
6. Right after click on **Accept Findings**, you will see that the integration is enabled in **AWS Security Hub**.
![](./img/enable-partner-integration-4.png)
### Using the AWS CLI
To enable **AWS Security Hub** and the **Prowler** integration you have to run the following commands using the AWS CLI:
```shell
aws securityhub enable-security-hub --region <region>
```
???+ note
For this command to work you will need the `securityhub:EnableSecurityHub` permission. You will need to set the AWS region where you want to enable AWS Security Hub.
Once **AWS Security Hub** is enabled you will need to enable **Prowler** as partner integration to allow **Prowler** to send findings to your AWS Security Hub. You have to run the following commands using the AWS CLI:
```shell
aws securityhub enable-import-findings-for-product --region eu-west-1 --product-arn arn:aws:securityhub:<region>::product/prowler/prowler
```
???+ note
You will need to set the AWS region where you want to enable the integration and also the AWS region also within the ARN. For this command to work you will need the `securityhub:securityhub:EnableImportFindingsForProduct` permission.
## Send Findings
Once it is enabled, it is as simple as running the command below (for all regions):
```sh
prowler aws -S
prowler aws --security-hub
```
or for only one filtered region like eu-west-1:
```sh
prowler -S -f eu-west-1
prowler --security-hub --region eu-west-1
```
> **Note 1**: It is recommended to send only fails to Security Hub and that is possible adding `-q` to the command.
???+ note
It is recommended to send only fails to Security Hub and that is possible adding `-q/--quiet` to the command. You can use, instead of the `-q/--quiet` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub.
> **Note 2**: Since Prowler perform checks to all regions by default you may need to filter by region when running Security Hub integration, as shown in the example above. Remember to enable Security Hub in the region or regions you need by calling `aws securityhub enable-security-hub --region <region>` and run Prowler with the option `-f <region>` (if no region is used it will try to push findings in all regions hubs). Prowler will send findings to the Security Hub on the region where the scanned resource is located.
Since Prowler perform checks to all regions by default you may need to filter by region when running Security Hub integration, as shown in the example above. Remember to enable Security Hub in the region or regions you need by calling `aws securityhub enable-security-hub --region <region>` and run Prowler with the option `-f/--region <region>` (if no region is used it will try to push findings in all regions hubs). Prowler will send findings to the Security Hub on the region where the scanned resource is located.
> **Note 3**: To have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours.
To have updated findings in Security Hub you have to run Prowler periodically. Once a day or every certain amount of hours.
Once you run findings for first time you will be able to see Prowler findings in Findings section:
### See you Prowler findings in AWS Security Hub
![Screenshot 2020-10-29 at 10 29 05 PM](https://user-images.githubusercontent.com/3985464/97634676-66c9f600-1a36-11eb-9341-70feb06f6331.png)
Once configured the **AWS Security Hub** in your next scan you will receive the **Prowler** findings in the AWS regions configured. To review those findings in **AWS Security Hub**:
1. Open the **AWS Security Hub** console at https://console.aws.amazon.com/securityhub/.
2. Select the **Findings** tab in the right-side menu bar.
![](./img/findings.png)
3. Use the search box filters and use the **Product Name** filter with the value _Prowler_ to see the findings sent from **Prowler**.
4. Then, you can click on the check **Title** to see the details and the history of a finding.
![](./img/finding-details.png)
As you can see in the related requirements section, in the detailed view of the findings, **Prowler** also sends compliance information related to every finding.
## Send findings to Security Hub assuming an IAM Role
When you are auditing a multi-account AWS environment, you can send findings to a Security Hub of another account by assuming an IAM role from that account using the `-R` flag in the Prowler command:
```sh
prowler -S -R arn:aws:iam::123456789012:role/ProwlerExecRole
prowler --security-hub --role arn:aws:iam::123456789012:role/ProwlerExecutionRole
```
> Remember that the used role needs to have permissions to send findings to Security Hub. To get more information about the permissions required, please refer to the following IAM policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json)
???+ note
Remember that the used role needs to have permissions to send findings to Security Hub. To get more information about the permissions required, please refer to the following IAM policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json)
## Send only failed findings to Security Hub
When using Security Hub it is recommended to send only the failed findings generated. To follow that recommendation you could add the `-q` flag to the Prowler command:
When using the **AWS Security Hub** integration you can send only the `FAIL` findings generated by **Prowler**. Therefore, the **AWS Security Hub** usage costs eventually would be lower. To follow that recommendation you could add the `-q/--quiet` flag to the Prowler command:
```sh
prowler -S -q
prowler --security-hub --quiet
```
You can use, instead of the `-q/--quiet` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub:
```sh
prowler --security-hub --send-sh-only-fails
```
## Skip sending updates of findings to Security Hub
@@ -63,5 +139,5 @@ By default, Prowler archives all its findings in Security Hub that have not appe
You can skip this logic by using the option `--skip-sh-update` so Prowler will not archive older findings:
```sh
prowler -S --skip-sh-update
prowler --security-hub --skip-sh-update
```

View File

@@ -1,19 +1,19 @@
# Check Aliases
Prowler allows you to use aliases for the checks. You only have to add the `CheckAliases` key to the check's metadata with a list of the aliases:
"Provider": "<provider>",
"CheckID": "<check_id>",
"CheckTitle": "<check_title>",
"CheckAliases": [
"<check_alias_1>"
"<check_alias_2>",
...
],
...
```json title="check.metadata.json"
"Provider": "<provider>",
"CheckID": "<check_id>",
"CheckTitle": "<check_title>",
"CheckAliases": [
"<check_alias_1>"
"<check_alias_2>",
...
],
...
```
Then, you can execute the check either with its check ID or with one of the previous aliases:
```console
```shell
prowler <provider> -c/--checks <check_alias_1>
Using alias <check_alias_1> for check <check_id>...

View File

@@ -21,35 +21,35 @@ prowler <provider> --list-compliance
```
Currently, the available frameworks are:
- `cis_1.4_aws`
- `cis_1.5_aws`
- `cis_2.0_aws`
- `cisa_aws`
- `ens_rd2022_aws`
- `aws_account_security_onboarding_aws`
- `aws_audit_manager_control_tower_guardrails_aws`
- `aws_foundational_security_best_practices_aws`
- `aws_well_architected_framework_reliability_pillar_aws`
- `aws_well_architected_framework_security_pillar_aws`
- `cis_1.4_aws`
- `cis_1.5_aws`
- `cis_2.0_aws`
- `cis_2.0_gcp`
- `cis_3.0_aws`
- `cisa_aws`
- `ens_rd2022_aws`
- `fedramp_low_revision_4_aws`
- `fedramp_moderate_revision_4_aws`
- `ffiec_aws`
- `gdpr_aws`
- `gxp_eu_annex_11_aws`
- `gxp_21_cfr_part_11_aws`
- `gxp_eu_annex_11_aws`
- `hipaa_aws`
- `iso27001_2013_aws`
- `iso27001_2013_aws`
- `mitre_attack_aws`
- `nist_800_171_revision_2_aws`
- `nist_800_53_revision_4_aws`
- `nist_800_53_revision_5_aws`
- `nist_800_171_revision_2_aws`
- `nist_csf_1.1_aws`
- `pci_3.2.1_aws`
- `rbi_cyber_security_framework_aws`
- `soc2_aws`
## List Requirements of Compliance Frameworks
For each compliance framework, you can use option `--list-compliance-requirements` to list its requirements:
```sh

View File

@@ -37,13 +37,24 @@ The following list includes all the AWS checks with configurable variables that
## Azure
### Configurable Checks
The following list includes all the Azure checks with configurable variables that can be changed in the configuration yaml file:
| Check Name | Value | Type |
|---------------------------------------------------------------|--------------------------------------------------|-----------------|
| `network_public_ip_shodan` | `shodan_api_key` | String |
| `app_ensure_php_version_is_latest` | `php_latest_version` | String |
| `app_ensure_python_version_is_latest` | `python_latest_version` | String |
| `app_ensure_java_version_is_latest` | `java_latest_version` | String |
## GCP
### Configurable Checks
## Config YAML File Structure
> This is the new Prowler configuration file format. The old one without provider keys is still compatible just for the AWS provider.
???+ note
This is the new Prowler configuration file format. The old one without provider keys is still compatible just for the AWS provider.
```yaml title="config.yaml"
# AWS Configuration
@@ -126,6 +137,17 @@ aws:
# Azure Configuration
azure:
# Azure Network Configuration
# azure.network_public_ip_shodan
shodan_api_key: null
# Azure App Configuration
# azure.app_ensure_php_version_is_latest
php_latest_version: "8.2"
# azure.app_ensure_python_version_is_latest
python_latest_version: "3.12"
# azure.app_ensure_java_version_is_latest
java_latest_version: "17"
# GCP Configuration
gcp:

View File

@@ -13,7 +13,8 @@ Otherwise, you can generate and download Service Account keys in JSON format (re
prowler gcp --credentials-file path
```
> `prowler` will scan the GCP project associated with the credentials.
???+ note
`prowler` will scan the GCP project associated with the credentials.
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):

View File

@@ -1,6 +1,7 @@
# Ignore Unused Services
> Currently only available on the AWS provider.
???+ note
Currently only available on the AWS provider.
Prowler allows you to ignore unused services findings, so you can reduce the number of findings in Prowler's reports.
@@ -47,7 +48,7 @@ It is a best practice to encrypt both metadata and connection passwords in AWS G
#### Inspector
Amazon Inspector is a vulnerability discovery service that automates continuous scanning for security vulnerabilities within your Amazon EC2, Amazon ECR, and AWS Lambda environments. Prowler recommends to enable it and resolve all the Inspector's findings. Ignoring the unused services, Prowler will only notify you if there are any Lambda functions, EC2 instances or ECR repositories in the region where Amazon inspector should be enabled.
- `inspector2_findings_exist`
- `inspector2_is_enabled`
#### Macie
Amazon Macie is a security service that uses machine learning to automatically discover, classify and protect sensitive data in S3 buckets. Prowler will only create a finding when Macie is not enabled if there are S3 buckets in your account.

View File

@@ -10,7 +10,9 @@ prowler <provider> --slack
![Prowler Slack Message](img/slack-prowler-message.png)
> Slack integration needs SLACK_API_TOKEN and SLACK_CHANNEL_ID environment variables.
???+ note
Slack integration needs SLACK_API_TOKEN and SLACK_CHANNEL_ID environment variables.
### Configuration
To configure the Slack Integration, follow the next steps:

View File

@@ -18,7 +18,8 @@ You can establish the log level of Prowler with `--log-level` option:
prowler <provider> --log-level {DEBUG,INFO,WARNING,ERROR,CRITICAL}
```
> By default, Prowler will run with the `CRITICAL` log level, since critical errors will abort the execution.
???+ note
By default, Prowler will run with the `CRITICAL` log level, since critical errors will abort the execution.
## Export Logs to File
@@ -45,4 +46,5 @@ An example of a log file will be the following:
"message": "eu-west-2 -- ClientError[124]: An error occurred (UnauthorizedOperation) when calling the DescribeNetworkAcls operation: You are not authorized to perform this operation."
}
> NOTE: Each finding is represented as a `json` object.
???+ note
Each finding is represented as a `json` object.

View File

@@ -61,21 +61,26 @@ Prowler allows you to include your custom checks with the flag:
```console
prowler <provider> -x/--checks-folder <custom_checks_folder>
```
> S3 URIs are also supported as folders for custom checks, e.g. s3://bucket/prefix/checks_folder/. Make sure that the used credentials have s3:GetObject permissions in the S3 path where the custom checks are located.
???+ note
S3 URIs are also supported as folders for custom checks, e.g. `s3://bucket/prefix/checks_folder/`. Make sure that the used credentials have `s3:GetObject` permissions in the S3 path where the custom checks are located.
The custom checks folder must contain one subfolder per check, each subfolder must be named as the check and must contain:
- An empty `__init__.py`: to make Python treat this check folder as a package.
- A `check_name.py` containing the check's logic.
- A `check_name.metadata.json` containing the check's metadata.
>The check name must start with the service name followed by an underscore (e.g., ec2_instance_public_ip).
???+ note
The check name must start with the service name followed by an underscore (e.g., ec2_instance_public_ip).
To see more information about how to write checks see the [Developer Guide](../developer-guide/checks.md#create-a-new-check-for-a-provider).
> If you want to run ONLY your custom check(s), import it with -x (--checks-folder) and then run it with -c (--checks), e.g.:
```console
prowler aws -x s3://bucket/prowler/providers/aws/services/s3/s3_bucket_policy/ -c s3_bucket_policy
```
???+ note
If you want to run ONLY your custom check(s), import it with -x (--checks-folder) and then run it with -c (--checks), e.g.:
```console
prowler aws -x s3://bucket/prowler/providers/aws/services/s3/s3_bucket_policy/ -c s3_bucket_policy
```
## Severities
Each of Prowler's checks has a severity, which can be:

View File

@@ -113,7 +113,8 @@ You will need to pass the S3 URI where your Mute List YAML file was uploaded to
```
prowler aws -w s3://<bucket>/<prefix>/mutelist.yaml
```
> Make sure that the used AWS credentials have s3:GetObject permissions in the S3 path where the mutelist file is located.
???+ note
Make sure that the used AWS credentials have s3:GetObject permissions in the S3 path where the allowlist file is located.
### AWS DynamoDB Table ARN
@@ -138,7 +139,8 @@ The following example will mute all resources in all accounts for the EC2 checks
<img src="../img/mutelist-row.png"/>
> Make sure that the used AWS credentials have `dynamodb:PartiQLSelect` permissions in the table.
???+ note
Make sure that the used AWS credentials have `dynamodb:PartiQLSelect` permissions in the table.
### AWS Lambda ARN

View File

@@ -0,0 +1,188 @@
# Parallel Execution
The strategy used here will be to execute Prowler once per service. You can modify this approach as per your requirements.
This can help for really large accounts, but please be aware of AWS API rate limits:
1. **Service-Specific Limits**: Each AWS service has its own rate limits. For instance, Amazon EC2 might have different rate limits for launching instances versus making API calls to describe instances.
2. **API Rate Limits**: Most of the rate limits in AWS are applied at the API level. Each API call to an AWS service counts towards the rate limit for that service.
3. **Throttling Responses**: When you exceed the rate limit for a service, AWS responds with a throttling error. In AWS SDKs, these are typically represented as `ThrottlingException` or `RateLimitExceeded` errors.
For information on Prowler's retrier configuration please refer to this [page](https://docs.prowler.cloud/en/latest/tutorials/aws/boto3-configuration/).
???+ note
You might need to increase the `--aws-retries-max-attempts` parameter from the default value of 3. The retrier follows an exponential backoff strategy.
## Linux
Generate a list of services that Prowler supports, and populate this info into a file:
```bash
prowler aws --list-services | awk -F"- " '{print $2}' | sed '/^$/d' > services
```
Make any modifications for services you would like to skip scanning by modifying this file.
Then create a new PowerShell script file `parallel-prowler.sh` and add the following contents. Update the `$profile` variable to the AWS CLI profile you want to run Prowler with.
```bash
#!/bin/bash
# Change these variables as needed
profile="your_profile"
account_id=$(aws sts get-caller-identity --profile "${profile}" --query 'Account' --output text)
echo "Executing in account: ${account_id}"
# Maximum number of concurrent processes
MAX_PROCESSES=5
# Loop through the services
while read service; do
echo "$(date '+%Y-%m-%d %H:%M:%S'): Starting job for service: ${service}"
# Run the command in the background
(prowler -p "$profile" -s "$service" -F "${account_id}-${service}" --ignore-unused-services --only-logs; echo "$(date '+%Y-%m-%d %H:%M:%S') - ${service} has completed") &
# Check if we have reached the maximum number of processes
while [ $(jobs -r | wc -l) -ge ${MAX_PROCESSES} ]; do
# Wait for a second before checking again
sleep 1
done
done < ./services
# Wait for all background processes to finish
wait
echo "All jobs completed"
```
Output will be stored in the `output/` folder that is in the same directory from which you executed the script.
## Windows
Generate a list of services that Prowler supports, and populate this info into a file:
```powershell
prowler aws --list-services | ForEach-Object {
# Capture lines that are likely service names
if ($_ -match '^\- \w+$') {
$_.Trim().Substring(2)
}
} | Where-Object {
# Filter out empty or null lines
$_ -ne $null -and $_ -ne ''
} | Set-Content -Path "services"
```
Make any modifications for services you would like to skip scanning by modifying this file.
Then create a new PowerShell script file `parallel-prowler.ps1` and add the following contents. Update the `$profile` variable to the AWS CLI profile you want to run prowler with.
Change any parameters you would like when calling prowler in the `Start-Job -ScriptBlock` section. Note that you need to keep the `--only-logs` parameter, else some encoding issue occurs when trying to render the progress-bar and prowler won't successfully execute.
```powershell
$profile = "your_profile"
$account_id = Invoke-Expression -Command "aws sts get-caller-identity --profile $profile --query 'Account' --output text"
Write-Host "Executing Prowler in $account_id"
# Maximum number of concurrent jobs
$MAX_PROCESSES = 5
# Read services from a file
$services = Get-Content -Path "services"
# Array to keep track of started jobs
$jobs = @()
foreach ($service in $services) {
# Start the command as a job
$job = Start-Job -ScriptBlock {
prowler -p ${using:profile} -s ${using:service} -F "${using:account_id}-${using:service}" --ignore-unused-services --only-logs
$endTimestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
Write-Output "${endTimestamp} - $using:service has completed"
}
$jobs += $job
Write-Host "$(Get-Date -Format 'yyyy-MM-dd HH:mm:ss') - Starting job for service: $service"
# Check if we have reached the maximum number of jobs
while (($jobs | Where-Object { $_.State -eq 'Running' }).Count -ge $MAX_PROCESSES) {
Start-Sleep -Seconds 1
# Check for any completed jobs and receive their output
$completedJobs = $jobs | Where-Object { $_.State -eq 'Completed' }
foreach ($completedJob in $completedJobs) {
Receive-Job -Job $completedJob -Keep | ForEach-Object { Write-Host $_ }
$jobs = $jobs | Where-Object { $_.Id -ne $completedJob.Id }
Remove-Job -Job $completedJob
}
}
}
# Check for any remaining completed jobs
$remainingCompletedJobs = $jobs | Where-Object { $_.State -eq 'Completed' }
foreach ($remainingJob in $remainingCompletedJobs) {
Receive-Job -Job $remainingJob -Keep | ForEach-Object { Write-Host $_ }
Remove-Job -Job $remainingJob
}
Write-Host "$(Get-Date -Format 'yyyy-MM-dd HH:mm:ss') - All jobs completed"
```
Output will be stored in `C:\Users\YOUR-USER\Documents\output\`
## Combining the output files
Guidance is provided for the CSV file format. From the ouput directory, execute either the following Bash or PowerShell script. The script will collect the output from the CSV files, only include the header from the first file, and then output the result as CombinedCSV.csv in the current working directory.
There is no logic implemented in terms of which CSV files it will combine. If you have additional CSV files from other actions, such as running a quick inventory, you will need to move that out of the current (or any nested) directory, or move the output you want to combine into its own folder and run the script from there.
```bash
#!/bin/bash
# Initialize a variable to indicate the first file
firstFile=true
# Find all CSV files and loop through them
find . -name "*.csv" -print0 | while IFS= read -r -d '' file; do
if [ "$firstFile" = true ]; then
# For the first file, keep the header
cat "$file" > CombinedCSV.csv
firstFile=false
else
# For subsequent files, skip the header
tail -n +2 "$file" >> CombinedCSV.csv
fi
done
```
```powershell
# Get all CSV files from current directory and its subdirectories
$csvFiles = Get-ChildItem -Recurse -Filter "*.csv"
# Initialize a variable to track if it's the first file
$firstFile = $true
# Loop through each CSV file
foreach ($file in $csvFiles) {
if ($firstFile) {
# For the first file, keep the header and change the flag
$combinedCsv = Import-Csv -Path $file.FullName
$firstFile = $false
} else {
# For subsequent files, skip the header
$tempCsv = Import-Csv -Path $file.FullName
$combinedCsv += $tempCsv | Select-Object * -Skip 1
}
}
# Export the combined data to a new CSV file
$combinedCsv | Export-Csv -Path "CombinedCSV.csv" -NoTypeInformation
```
## TODO: Additional Improvements
Some services need to instantiate another service to perform a check. For instance, `cloudwatch` will instantiate Prowler's `iam` service to perform the `cloudwatch_cross_account_sharing_disabled` check. When the `iam` service is instantiated, it will perform the `__init__` function, and pull all the information required for that service. This provides an opportunity for an improvement in the above script to group related services together so that the `iam` services (or any other cross-service references) isn't repeatedily instantiated by grouping dependant services together. A complete mapping between these services still needs to be further investigated, but these are the cross-references that have been noted:
* inspector2 needs lambda and ec2
* cloudwatch needs iam
* dlm needs ec2

View File

@@ -50,6 +50,7 @@ Several checks analyse resources that are exposed to the Internet, these are:
- sagemaker_notebook_instance_without_direct_internet_access_configured
- sns_topics_not_publicly_accessible
- sqs_queues_not_publicly_accessible
- network_public_ip_shodan
...
@@ -64,5 +65,9 @@ prowler <provider> --categories internet-exposed
Prowler allows you check if any elastic ip in your AWS Account is exposed in Shodan with `-N`/`--shodan <shodan_api_key>` option:
```console
prowler aws --shodan <shodan_api_key> -c ec2_elastic_ip_shodan
prowler aws -N/--shodan <shodan_api_key> -c ec2_elastic_ip_shodan
```
Also, you can check if any of your Azure Subscription has an public IP exposed in shodan:
```console
prowler azure -N/--shodan <shodan_api_key> -c network_public_ip_shodan
```

View File

@@ -1,14 +1,18 @@
# Quick Inventory
Prowler allows you to execute a quick inventory to extract the number of resources in your provider.
> Currently, it is only available for AWS provider.
???+ note
Currently, it is only available for AWS provider.
- You can use option `-i`/`--quick-inventory` to execute it:
```sh
prowler <provider> -i
```
> By default, it extracts resources from all the regions, you could use `-f`/`--filter-region` to specify the regions to execute the analysis.
???+ note
By default, it extracts resources from all the regions, you could use `-f`/`--filter-region` to specify the regions to execute the analysis.
- This feature specify both the number of resources for each service and for each resource type.

View File

@@ -19,11 +19,12 @@ prowler <provider> -M csv json json-asff html -F <custom_report_name>
```console
prowler <provider> -M csv json json-asff html -o <custom_report_directory>
```
> Both flags can be used simultaneously to provide a custom directory and filename.
```console
prowler <provider> -M csv json json-asff html \
-F <custom_report_name> -o <custom_report_directory>
```
???+ note
Both flags can be used simultaneously to provide a custom directory and filename.
```console
prowler <provider> -M csv json json-asff html \
-F <custom_report_name> -o <custom_report_directory>
```
## Output timestamp format
By default, the timestamp format of the output files is ISO 8601. This can be changed with the flag `--unix-timestamp` generating the timestamp fields in pure unix timestamp format.
@@ -41,50 +42,74 @@ Hereunder is the structure for each of the supported report formats by Prowler:
### HTML
![HTML Output](../img/output-html.png)
### CSV
The following are the columns present in the CSV format:
CSV format has a set of common columns for all the providers, and then provider specific columns.
The common columns are the following:
- ASSESSMENT_START_TIME
- FINDING_UNIQUE_ID
- PROVIDER
- CHECK_ID
- CHECK_TITLE
- CHECK_TYPE
- STATUS
- STATUS_EXTENDED
- SERVICE_NAME
- SUBSERVICE_NAME
- SEVERITY
- RESOURCE_TYPE
- RESOURCE_DETAILS
- RESOURCE_TAGS
- DESCRIPTION
- RISK
- RELATED_URL
- REMEDIATION_RECOMMENDATION_TEXT
- REMEDIATION_RECOMMENDATION_URL
- REMEDIATION_RECOMMENDATION_CODE_NATIVEIAC
- REMEDIATION_RECOMMENDATION_CODE_TERRAFORM
- REMEDIATION_RECOMMENDATION_CODE_CLI
- REMEDIATION_RECOMMENDATION_CODE_OTHER
- COMPLIANCE
- CATEGORIES
- DEPENDS_ON
- RELATED_TO
- NOTES
And then by the provider specific columns:
#### AWS
- PROFILE
- ACCOUNT_ID
- ACCOUNT_NAME
- ACCOUNT_EMAIL
- ACCOUNT_ARN
- ACCOUNT_ORG
- ACCOUNT_TAGS
- REGION
- CHECK_ID
- CHECK_TITLE
- CHECK_TYPE
- STATUS
- STATUS_EXTENDED
- SERVICE_NAME
- SUBSERVICE_NAME
- SEVERITY
- RESOURCE_ID
- RESOURCE_ARN
- RESOURCE_TYPE
- RESOURCE_DETAILS
- RESOURCE_TAGS
- DESCRIPTION
- COMPLIANCE
- RISK
- RELATED_URL
- REMEDIATION_RECOMMENDATION_TEXT
- REMEDIATION_RECOMMENDATION_URL
- REMEDIATION_RECOMMENDATION_CODE_NATIVEIAC
- REMEDIATION_RECOMMENDATION_CODE_TERRAFORM
- REMEDIATION_RECOMMENDATION_CODE_CLI
- REMEDIATION_RECOMMENDATION_CODE_OTHER
- CATEGORIES
- DEPENDS_ON
- RELATED_TO
- NOTES
- ACCOUNT_NAME
- ACCOUNT_EMAIL
- ACCOUNT_ARN
- ACCOUNT_ORG
- ACCOUNT_TAGS
- REGION
- RESOURCE_ID
- RESOURCE_ARN
#### AZURE
- TENANT_DOMAIN
- SUBSCRIPTION
- RESOURCE_ID
- RESOURCE_NAME
#### GCP
- PROJECT_ID
- LOCATION
- RESOURCE_ID
- RESOURCE_NAME
???+ note
Since Prowler v3 the CSV column delimiter is the semicolon (`;`)
> Since Prowler v3 the CSV column delimiter is the semicolon (`;`)
### JSON
The following code is an example output of the JSON format:
@@ -181,7 +206,8 @@ The following code is an example output of the JSON format:
}]
```
> NOTE: Each finding is a `json` object within a list. This has changed in v3 since in v2 the format used was [ndjson](http://ndjson.org/).
???+ note
Each finding is a `json` object within a list. This has changed in v3 since in v2 the format used was [ndjson](http://ndjson.org/).
### JSON-OCSF
@@ -442,7 +468,9 @@ Based on [Open Cybersecurity Schema Framework Security Finding v1.0.0-rc.3](http
}]
```
> NOTE: Each finding is a `json` object.
???+ note
Each finding is a `json` object.
### JSON-ASFF
The following code is an example output of the [JSON-ASFF](https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-syntax.html) format:
@@ -575,4 +603,5 @@ The following code is an example output of the [JSON-ASFF](https://docs.aws.amaz
}]
```
> NOTE: Each finding is a `json` object within a list.
???+ note
Each finding is a `json` object within a list.

View File

@@ -1,15 +1,15 @@
# Project information
site_name: Prowler Documentation
site_url: https://docs.prowler.pro/
site_name: Prowler Open Source Documentation
site_url: https://docs.prowler.com/
site_description: >-
Prowler Documentation Site
Prowler Open Source Documentation
# Theme Configuration
theme:
language: en
logo: img/prowler-logo.png
logo: img/prowler-logo-white.png
name: material
favicon: img/prowler-icon.svg
favicon: favicon.ico
features:
- navigation.tabs
- navigation.tabs.sticky
@@ -19,6 +19,11 @@ theme:
primary: black
accent: green
plugins:
- search
- git-revision-date-localized:
enable_creation_date: true
edit_uri: "https://github.com/prowler-cloud/prowler/tree/master/docs"
# Prowler OSS Repository
repo_url: https://github.com/prowler-cloud/prowler/
@@ -41,6 +46,7 @@ nav:
- Custom Metadata: tutorials/custom-checks-metadata.md
- Ignore Unused Services: tutorials/ignore-unused-services.md
- Pentesting: tutorials/pentesting.md
- Parallel Execution: tutorials/parallel-execution.md
- Developer Guide: developer-guide/introduction.md
- AWS:
- Authentication: tutorials/aws/authentication.md
@@ -73,11 +79,13 @@ nav:
- Testing:
- Unit Tests: developer-guide/unit-testing.md
- Integration Tests: developer-guide/integration-testing.md
- Debugging: developer-guide/debugging.md
- Security: security.md
- Contact Us: contact.md
- Troubleshooting: troubleshooting.md
- About: about.md
- ProwlerPro: https://prowler.pro
- Prowler SaaS: https://prowler.com
# Customization
extra:
consent:
@@ -101,11 +109,15 @@ extra:
link: https://twitter.com/prowlercloud
# Copyright
copyright: Copyright &copy; 2022 Toni de la Fuente, Maintained by the Prowler Team at Verica, Inc.</a>
copyright: >
Copyright &copy; <script>document.write(new Date().getFullYear())</script> Toni de la Fuente, Maintained by the Prowler Team at ProwlerPro, Inc.</a>
</br><a href="#__consent">Change cookie settings</a>
markdown_extensions:
- abbr
- admonition
- pymdownx.details
- pymdownx.superfences
- attr_list
- def_list
- footnotes
@@ -119,8 +131,8 @@ markdown_extensions:
- pymdownx.caret
- pymdownx.details
- pymdownx.emoji:
emoji_generator: !!python/name:materialx.emoji.to_svg
emoji_index: !!python/name:materialx.emoji.twemoji
emoji_index: !!python/name:material.extensions.emoji.twemoji
emoji_generator: !!python/name:material.extensions.emoji.to_svg
- pymdownx.highlight:
anchor_linenums: true
- pymdownx.inlinehilite

3424
poetry.lock generated

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -468,27 +468,6 @@
},
{
"Id": "2.1.1",
"Description": "Ensure all S3 buckets employ encryption-at-rest",
"Checks": [
"s3_bucket_default_encryption"
],
"Attributes": [
{
"Section": "2.1. Simple Storage Service (S3)",
"Profile": "Level 2",
"AssessmentStatus": "Automated",
"Description": "Amazon S3 provides a variety of no, or low, cost encryption options to protect data at rest.",
"RationaleStatement": "Encrypting data at rest reduces the likelihood that it is unintentionally exposed and can nullify the impact of disclosure if the encryption remains unbroken.",
"ImpactStatement": "Amazon S3 buckets with default bucket encryption using SSE-KMS cannot be used as destination buckets for Amazon S3 server access logging. Only SSE-S3 default encryption is supported for server access log destination buckets.",
"RemediationProcedure": "**From Console:** 1. Login to AWS Management Console and open the Amazon S3 console using https://console.aws.amazon.com/s3/ 2. Select a Bucket. 3. Click on 'Properties'. 4. Click edit on `Default Encryption`. 5. Select either `AES-256`, `AWS-KMS`, `SSE-KMS` or `SSE-S3`. 6. Click `Save` 7. Repeat for all the buckets in your AWS account lacking encryption. **From Command Line:** Run either ``` aws s3api put-bucket-encryption --bucket <bucket name> --server-side-encryption-configuration '{\"Rules\": [{\"ApplyServerSideEncryptionByDefault\": {\"SSEAlgorithm\": \"AES256\"}}]}' ``` or ``` aws s3api put-bucket-encryption --bucket <bucket name> --server-side-encryption-configuration '{\"Rules\": [{\"ApplyServerSideEncryptionByDefault\": {\"SSEAlgorithm\": \"aws:kms\",\"KMSMasterKeyID\": \"aws/s3\"}}]}' ``` **Note:** the KMSMasterKeyID can be set to the master key of your choosing; aws/s3 is an AWS preconfigured default.",
"AuditProcedure": "**From Console:** 1. Login to AWS Management Console and open the Amazon S3 console using https://console.aws.amazon.com/s3/ 2. Select a Bucket. 3. Click on 'Properties'. 4. Verify that `Default Encryption` is enabled, and displays either `AES-256`, `AWS-KMS`, `SSE-KMS` or `SSE-S3`. 5. Repeat for all the buckets in your AWS account. **From Command Line:** 1. Run command to list buckets ``` aws s3 ls ``` 2. For each bucket, run ``` aws s3api get-bucket-encryption --bucket <bucket name> ``` 3. Verify that either ``` \"SSEAlgorithm\": \"AES256\" ``` or ``` \"SSEAlgorithm\": \"aws:kms\"``` is displayed.",
"AdditionalInformation": "S3 bucket encryption only applies to objects as they are placed in the bucket. Enabling S3 bucket encryption does **not** encrypt objects previously stored within the bucket.",
"References": "https://docs.aws.amazon.com/AmazonS3/latest/user-guide/default-bucket-encryption.html:https://docs.aws.amazon.com/AmazonS3/latest/dev/bucket-encryption.html#bucket-encryption-related-resources"
}
]
},
{
"Id": "2.1.2",
"Description": "Ensure S3 Bucket Policy is set to deny HTTP requests",
"Checks": [
"s3_bucket_secure_transport_policy"
@@ -509,7 +488,7 @@
]
},
{
"Id": "2.1.3",
"Id": "2.1.2",
"Description": "Ensure MFA Delete is enabled on S3 buckets",
"Checks": [
"s3_bucket_no_mfa_delete"
@@ -530,7 +509,7 @@
]
},
{
"Id": "2.1.4",
"Id": "2.1.3",
"Description": "Ensure all data in Amazon S3 has been discovered, classified and secured when required.",
"Checks": [
"macie_is_enabled"
@@ -551,7 +530,7 @@
]
},
{
"Id": "2.1.5",
"Id": "2.1.4",
"Description": "Ensure that S3 Buckets are configured with 'Block public access (bucket settings)'",
"Checks": [
"s3_bucket_level_public_access_block",

File diff suppressed because it is too large Load Diff

View File

@@ -211,6 +211,31 @@
"iam_avoid_root_usage"
]
},
{
"Id": "op.acc.4.aws.iam.8",
"Description": "Proceso de gestión de derechos de acceso",
"Attributes": [
{
"IdGrupoControl": "op.acc.4",
"Marco": "operacional",
"Categoria": "control de acceso",
"DescripcionControl": "Se restringirá todo acceso a las acciones especificadas para el usuario root de una cuenta.",
"Nivel": "alto",
"Tipo": "requisito",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"organizations_account_part_of_organizations",
"organizations_scp_check_deny_regions"
]
},
{
"Id": "op.acc.4.aws.iam.9",
"Description": "Proceso de gestión de derechos de acceso",
@@ -789,7 +814,8 @@
}
],
"Checks": [
"inspector2_findings_exist"
"inspector2_is_enabled",
"inspector2_active_findings_exist"
]
},
{
@@ -1121,6 +1147,30 @@
"cloudtrail_insights_exist"
]
},
{
"Id": "op.exp.8.r1.aws.ct.3",
"Description": "Revisión de los registros",
"Attributes": [
{
"IdGrupoControl": "op.exp.8.r1",
"Marco": "operacional",
"Categoria": "explotación",
"DescripcionControl": "Registrar los eventos de lectura y escritura de datos.",
"Nivel": "alto",
"Tipo": "refuerzo",
"Dimensiones": [
"trazabilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"cloudwatch_log_metric_filter_and_alarm_for_cloudtrail_configuration_changes_enabled",
"cloudtrail_s3_dataevents_write_enabled",
"cloudtrail_s3_dataevents_read_enabled",
"cloudtrail_insights_exist"
]
},
{
"Id": "op.exp.8.r1.aws.ct.4",
"Description": "Revisión de los registros",
@@ -1233,6 +1283,33 @@
"iam_role_cross_service_confused_deputy_prevention"
]
},
{
"Id": "op.exp.8.r4.aws.ct.1",
"Description": "Control de acceso",
"Attributes": [
{
"IdGrupoControl": "op.exp.8.r4",
"Marco": "operacional",
"Categoria": "explotación",
"DescripcionControl": "Asignar correctamente las políticas AWS IAM para el acceso y borrado de los registros y sus copias de seguridad haciendo uso del principio de mínimo privilegio.",
"Nivel": "alto",
"Tipo": "refuerzo",
"Dimensiones": [
"trazabilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"iam_policy_allows_privilege_escalation",
"iam_customer_attached_policy_no_administrative_privileges",
"iam_customer_unattached_policy_no_administrative_privilege",
"iam_no_custom_policy_permissive_role_assumption",
"iam_policy_attached_only_to_group_or_roles",
"iam_role_cross_service_confused_deputy_prevention",
"iam_policy_no_full_access_to_cloudtrail"
]
},
{
"Id": "op.exp.8.r4.aws.ct.2",
"Description": "Control de acceso",
@@ -1859,7 +1936,8 @@
}
],
"Checks": [
"inspector2_findings_exist"
"inspector2_is_enabled",
"inspector2_active_findings_exist"
]
},
{
@@ -1934,7 +2012,8 @@
}
],
"Checks": [
"inspector2_findings_exist"
"inspector2_is_enabled",
"inspector2_active_findings_exist"
]
},
{
@@ -2110,7 +2189,7 @@
}
],
"Checks": [
"networkfirewall_in_all_vpc"
"fms_policy_compliant"
]
},
{
@@ -2251,6 +2330,31 @@
"cloudfront_distributions_https_enabled"
]
},
{
"Id": "mp.com.4.aws.ws.1",
"Description": "Separación de flujos de información en la red",
"Attributes": [
{
"IdGrupoControl": "mp.com.4",
"Marco": "medidas de protección",
"Categoria": "segregación de redes",
"DescripcionControl": "Se deberán abrir solo los puertos necesarios para el uso del servicio AWS WorkSpaces.",
"Nivel": "alto",
"Tipo": "requisito",
"Dimensiones": [
"confidencialidad",
"integridad",
"trazabilidad",
"autenticidad",
"disponibilidad"
],
"ModoEjecucion": "automático"
}
],
"Checks": [
"workspaces_vpc_2private_1public_subnets_nat"
]
},
{
"Id": "mp.com.4.aws.vpc.1",
"Description": "Separación de flujos de información en la red",
@@ -2323,7 +2427,8 @@
}
],
"Checks": [
"vpc_subnet_separate_private_public"
"vpc_subnet_separate_private_public",
"vpc_different_regions"
]
},
{
@@ -2370,7 +2475,8 @@
}
],
"Checks": [
"vpc_subnet_different_az"
"vpc_subnet_different_az",
"vpc_different_regions"
]
},
{

View File

@@ -29,7 +29,8 @@
"securityhub_enabled",
"elbv2_waf_acl_attached",
"guardduty_is_enabled",
"inspector2_findings_exist",
"inspector2_is_enabled",
"inspector2_active_findings_exist",
"awslambda_function_not_publicly_accessible",
"ec2_instance_public_ip"
],
@@ -576,7 +577,8 @@
"config_recorder_all_regions_enabled",
"securityhub_enabled",
"guardduty_is_enabled",
"inspector2_findings_exist"
"inspector2_is_enabled",
"inspector2_active_findings_exist"
],
"Attributes": [
{
@@ -737,7 +739,8 @@
"iam_user_hardware_mfa_enabled",
"iam_user_mfa_enabled_console_access",
"securityhub_enabled",
"inspector2_findings_exist"
"inspector2_is_enabled",
"inspector2_active_findings_exist"
],
"Attributes": [
{
@@ -1892,7 +1895,8 @@
"networkfirewall_in_all_vpc",
"elbv2_waf_acl_attached",
"guardduty_is_enabled",
"inspector2_findings_exist",
"inspector2_is_enabled",
"inspector2_active_findings_exist",
"ec2_networkacl_allow_ingress_any_port",
"ec2_networkacl_allow_ingress_tcp_port_22",
"ec2_networkacl_allow_ingress_tcp_port_3389",

View File

@@ -13,7 +13,7 @@
"ItemId": "cc_1_1",
"Section": "CC1.0 - Common Criteria Related to Control Environment",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -27,7 +27,7 @@
"ItemId": "cc_1_2",
"Section": "CC1.0 - Common Criteria Related to Control Environment",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -41,7 +41,7 @@
"ItemId": "cc_1_3",
"Section": "CC1.0 - Common Criteria Related to Control Environment",
"Service": "aws",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -62,7 +62,7 @@
"ItemId": "cc_1_4",
"Section": "CC1.0 - Common Criteria Related to Control Environment",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -76,7 +76,7 @@
"ItemId": "cc_1_5",
"Section": "CC1.0 - Common Criteria Related to Control Environment",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -90,7 +90,7 @@
"ItemId": "cc_2_1",
"Section": "CC2.0 - Common Criteria Related to Communication and Information",
"Service": "aws",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -109,7 +109,7 @@
"ItemId": "cc_2_2",
"Section": "CC2.0 - Common Criteria Related to Communication and Information",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -123,7 +123,7 @@
"ItemId": "cc_2_3",
"Section": "CC2.0 - Common Criteria Related to Communication and Information",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -137,7 +137,7 @@
"ItemId": "cc_3_1",
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
"Service": "aws",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -155,7 +155,7 @@
"ItemId": "cc_3_2",
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
"Service": "aws",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -175,7 +175,7 @@
"ItemId": "cc_3_3",
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -189,7 +189,7 @@
"ItemId": "cc_3_4",
"Section": "CC3.0 - Common Criteria Related to Risk Assessment",
"Service": "config",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -205,7 +205,7 @@
"ItemId": "cc_4_1",
"Section": "CC4.0 - Monitoring Activities",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -219,7 +219,7 @@
"ItemId": "cc_4_2",
"Section": "CC4.0 - Monitoring Activities",
"Service": "guardduty",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -236,7 +236,7 @@
"ItemId": "cc_5_1",
"Section": "CC5.0 - Control Activities",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -250,7 +250,7 @@
"ItemId": "cc_5_2",
"Section": "CC5.0 - Control Activities",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -264,7 +264,7 @@
"ItemId": "cc_5_3",
"Section": "CC5.0 - Control Activities",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -278,7 +278,7 @@
"ItemId": "cc_6_1",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "s3",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -294,7 +294,7 @@
"ItemId": "cc_6_2",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "rds",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -310,7 +310,7 @@
"ItemId": "cc_6_3",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "iam",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -328,7 +328,7 @@
"ItemId": "cc_6_4",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -342,7 +342,7 @@
"ItemId": "cc_6_5",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -356,7 +356,7 @@
"ItemId": "cc_6_6",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "ec2",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -372,7 +372,7 @@
"ItemId": "cc_6_7",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "acm",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -388,7 +388,7 @@
"ItemId": "cc_6_8",
"Section": "CC6.0 - Logical and Physical Access",
"Service": "aws",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -405,7 +405,7 @@
"ItemId": "cc_7_1",
"Section": "CC7.0 - System Operations",
"Service": "aws",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -424,7 +424,7 @@
"ItemId": "cc_7_2",
"Section": "CC7.0 - System Operations",
"Service": "aws",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -460,7 +460,7 @@
"ItemId": "cc_7_3",
"Section": "CC7.0 - System Operations",
"Service": "aws",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -492,7 +492,7 @@
"ItemId": "cc_7_4",
"Section": "CC7.0 - System Operations",
"Service": "aws",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -523,7 +523,7 @@
"ItemId": "cc_7_5",
"Section": "CC7.0 - System Operations",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -537,7 +537,7 @@
"ItemId": "cc_8_1",
"Section": "CC8.0 - Change Management",
"Service": "aws",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -553,7 +553,7 @@
"ItemId": "cc_9_1",
"Section": "CC9.0 - Risk Mitigation",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -567,7 +567,7 @@
"ItemId": "cc_9_2",
"Section": "CC9.0 - Risk Mitigation",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -581,7 +581,7 @@
"ItemId": "cc_a_1_1",
"Section": "CCA1.0 - Additional Criterial for Availability",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -595,7 +595,7 @@
"ItemId": "cc_a_1_2",
"Section": "CCA1.0 - Additional Criterial for Availability",
"Service": "aws",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -626,7 +626,7 @@
"ItemId": "cc_a_1_3",
"Section": "CCA1.0 - Additional Criterial for Availability",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -640,7 +640,7 @@
"ItemId": "cc_c_1_1",
"Section": "CCC1.0 - Additional Criterial for Confidentiality",
"Service": "aws",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -656,7 +656,7 @@
"ItemId": "cc_c_1_2",
"Section": "CCC1.0 - Additional Criterial for Confidentiality",
"Service": "s3",
"Soc_Type": "automated"
"Type": "automated"
}
],
"Checks": [
@@ -672,7 +672,7 @@
"ItemId": "p_1_1",
"Section": "P1.0 - Privacy Criteria Related to Notice and Communication of Objectives Related to Privacy",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -686,7 +686,7 @@
"ItemId": "p_2_1",
"Section": "P2.0 - Privacy Criteria Related to Choice and Consent",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -700,7 +700,7 @@
"ItemId": "p_3_1",
"Section": "P3.0 - Privacy Criteria Related to Collection",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -714,7 +714,7 @@
"ItemId": "p_3_2",
"Section": "P3.0 - Privacy Criteria Related to Collection",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -728,7 +728,7 @@
"ItemId": "p_4_1",
"Section": "P4.0 - Privacy Criteria Related to Use, Retention, and Disposal",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -742,7 +742,7 @@
"ItemId": "p_4_2",
"Section": "P4.0 - Privacy Criteria Related to Use, Retention, and Disposal",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -756,7 +756,7 @@
"ItemId": "p_4_3",
"Section": "P4.0 - Privacy Criteria Related to Use, Retention, and Disposal",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -770,7 +770,7 @@
"ItemId": "p_5_1",
"Section": "P5.0 - Privacy Criteria Related to Access",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -784,7 +784,7 @@
"ItemId": "p_5_2",
"Section": "P5.0 - Privacy Criteria Related to Access",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -798,7 +798,7 @@
"ItemId": "p_6_1",
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -812,7 +812,7 @@
"ItemId": "p_6_2",
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -826,7 +826,7 @@
"ItemId": "p_6_3",
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -840,7 +840,7 @@
"ItemId": "p_6_4",
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -854,7 +854,7 @@
"ItemId": "p_6_5",
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -868,7 +868,7 @@
"ItemId": "p_6_6",
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -882,7 +882,7 @@
"ItemId": "p_6_7",
"Section": "P6.0 - Privacy Criteria Related to Disclosure and Notification",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -896,7 +896,7 @@
"ItemId": "p_7_1",
"Section": "P7.0 - Privacy Criteria Related to Quality",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []
@@ -910,7 +910,7 @@
"ItemId": "p_8_1",
"Section": "P8.0 - Privacy Criteria Related to Monitoring and Enforcement",
"Service": "aws",
"Soc_Type": "manual"
"Type": "manual"
}
],
"Checks": []

View File

@@ -38,6 +38,9 @@ Mute List:
- "aws-controltower-ReadOnlyExecutionRole"
- "AWSControlTower_VPCFlowLogsRole"
- "AWSControlTowerExecution"
- "AWSAFTAdmin"
- "AWSAFTExecution"
- "AWSAFTService"
"iam_policy_*":
Regions:
- "*"

View File

@@ -11,7 +11,7 @@ from prowler.lib.logger import logger
timestamp = datetime.today()
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
prowler_version = "3.11.3"
prowler_version = "3.14.0"
html_logo_url = "https://github.com/prowler-cloud/prowler/"
html_logo_img = "https://user-images.githubusercontent.com/3985464/113734260-7ba06900-96fb-11eb-82bc-d4f68a1e2710.png"
square_logo_img = "https://user-images.githubusercontent.com/38561120/235905862-9ece5bd7-9aa3-4e48-807a-3a9035eb8bfb.png"
@@ -23,6 +23,7 @@ orange_color = "\033[38;5;208m"
banner_color = "\033[1;92m"
finding_statuses = ["PASS", "FAIL", "MANUAL"]
valid_severities = ["critical", "high", "medium", "low", "informational"]
# Compliance
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))

View File

@@ -1,11 +1,10 @@
# AWS Configuration
aws:
# AWS Global Configuration
# aws.mute_non_default_regions --> Set to True to mute failed findings in non-default regions for GuardDuty, SecurityHub, DRS and Config
mute_non_default_regions: False
# If you want to mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w mutelist.yaml`:
# Mute List:
# aws.allowlist_non_default_regions --> Set to True to allowlist failed findings in non-default regions for AccessAnalyzer, GuardDuty, SecurityHub, DRS and Config
allowlist_non_default_regions: False
# If you want to allowlist/mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w allowlist.yaml`:
# Allowlist:
# Accounts:
# "*":
# Checks:
@@ -69,8 +68,8 @@ aws:
# AWS Organizations
# organizations_scp_check_deny_regions
# organizations_enabled_regions: [
# 'eu-central-1',
# 'eu-west-1',
# "eu-central-1",
# "eu-west-1",
# "us-east-1"
# ]
organizations_enabled_regions: []
@@ -89,13 +88,23 @@ aws:
# Azure Configuration
azure:
# Azure Network Configuration
# azure.network_public_ip_shodan
shodan_api_key: null
# Azure App Service
# azure.app_ensure_php_version_is_latest
php_latest_version: "8.2"
# azure.app_ensure_python_version_is_latest
python_latest_version: "3.12"
# azure.app_ensure_java_version_is_latest
java_latest_version: "17"
# GCP Configuration
gcp:
# Kubernetes Configuration
kubernetes:
# Kubernetes API Server
# apiserver_audit_log_maxbackup_set
audit_log_maxbackup: 10

View File

@@ -4,7 +4,7 @@ from prowler.config.config import banner_color, orange_color, prowler_version, t
def print_banner(args):
banner = f"""{banner_color} _
banner = rf"""{banner_color} _
_ __ _ __ _____ _| | ___ _ __
| '_ \| '__/ _ \ \ /\ / / |/ _ \ '__|
| |_) | | | (_) \ V V /| | __/ |

View File

@@ -68,9 +68,9 @@ def bulk_load_compliance_frameworks(provider: str) -> dict:
# cis_v1.4_aws.json --> cis_v1.4_aws
compliance_framework_name = filename.split(".json")[0]
# Store the compliance info
bulk_compliance_frameworks[
compliance_framework_name
] = load_compliance_framework(file_path)
bulk_compliance_frameworks[compliance_framework_name] = (
load_compliance_framework(file_path)
)
except Exception as e:
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
@@ -108,14 +108,20 @@ def exclude_services_to_run(
# Load checks from checklist.json
def parse_checks_from_file(input_file: str, provider: str) -> set:
checks_to_execute = set()
with open_file(input_file) as f:
json_file = parse_json_file(f)
"""parse_checks_from_file returns a set of checks read from the given file"""
try:
checks_to_execute = set()
with open_file(input_file) as f:
json_file = parse_json_file(f)
for check_name in json_file[provider]:
checks_to_execute.add(check_name)
for check_name in json_file[provider]:
checks_to_execute.add(check_name)
return checks_to_execute
return checks_to_execute
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
# Load checks from custom folder
@@ -212,7 +218,7 @@ def print_categories(categories: set):
singular_string = f"\nThere is {Fore.YELLOW}{categories_num}{Style.RESET_ALL} available category.\n"
message = plural_string if categories_num > 1 else singular_string
for category in categories:
for category in sorted(categories):
print(f"- {category}")
print(message)
@@ -241,7 +247,7 @@ def print_compliance_frameworks(
singular_string = f"\nThere is {Fore.YELLOW}{frameworks_num}{Style.RESET_ALL} available Compliance Framework.\n"
message = plural_string if frameworks_num > 1 else singular_string
for framework in bulk_compliance_frameworks.keys():
for framework in sorted(bulk_compliance_frameworks.keys()):
print(f"- {framework}")
print(message)
@@ -311,7 +317,7 @@ def print_checks(
def parse_checks_from_compliance_framework(
compliance_frameworks: list, bulk_compliance_frameworks: dict
) -> list:
"""Parse checks from compliance frameworks specification"""
"""parse_checks_from_compliance_framework returns a set of checks from the given compliance_frameworks"""
checks_to_execute = set()
try:
for framework in compliance_frameworks:
@@ -611,22 +617,32 @@ def update_audit_metadata(
)
def recover_checks_from_service(service_list: list, provider: str) -> list:
checks = set()
service_list = [
"awslambda" if service == "lambda" else service for service in service_list
]
for service in service_list:
modules = recover_checks_from_provider(provider, service)
if not modules:
logger.error(f"Service '{service}' does not have checks.")
def recover_checks_from_service(service_list: list, provider: str) -> set:
"""
Recover all checks from the selected provider and service
else:
for check_module in modules:
# Recover check name and module name from import path
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
check_name = check_module[0].split(".")[-1]
# If the service is present in the group list passed as parameters
# if service_name in group_list: checks_from_arn.add(check_name)
checks.add(check_name)
return checks
Returns a set of checks from the given services
"""
try:
checks = set()
service_list = [
"awslambda" if service == "lambda" else service for service in service_list
]
for service in service_list:
service_checks = recover_checks_from_provider(provider, service)
if not service_checks:
logger.error(f"Service '{service}' does not have checks.")
else:
for check in service_checks:
# Recover check name and module name from import path
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
check_name = check[0].split(".")[-1]
# If the service is present in the group list passed as parameters
# if service_name in group_list: checks_from_arn.add(check_name)
checks.add(check_name)
return checks
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)

View File

@@ -1,5 +1,6 @@
from colorama import Fore, Style
from prowler.config.config import valid_severities
from prowler.lib.check.check import (
parse_checks_from_compliance_framework,
parse_checks_from_file,
@@ -10,7 +11,6 @@ from prowler.lib.logger import logger
# Generate the list of checks to execute
# PENDING Test for this function
def load_checks_to_execute(
bulk_checks_metadata: dict,
bulk_compliance_frameworks: dict,
@@ -22,84 +22,110 @@ def load_checks_to_execute(
categories: set,
provider: str,
) -> set:
"""Generate the list of checks to execute based on the cloud provider and input arguments specified"""
checks_to_execute = set()
"""Generate the list of checks to execute based on the cloud provider and the input arguments given"""
try:
# Local subsets
checks_to_execute = set()
check_aliases = {}
check_severities = {key: [] for key in valid_severities}
check_categories = {}
# Handle if there are checks passed using -c/--checks
if check_list:
for check_name in check_list:
checks_to_execute.add(check_name)
# First, loop over the bulk_checks_metadata to extract the needed subsets
for check, metadata in bulk_checks_metadata.items():
# Aliases
for alias in metadata.CheckAliases:
if alias not in check_aliases:
check_aliases[alias] = []
check_aliases[alias].append(check)
# Handle if there are some severities passed using --severity
elif severities:
for check in bulk_checks_metadata:
# Check check's severity
if bulk_checks_metadata[check].Severity in severities:
checks_to_execute.add(check)
if service_list:
checks_to_execute = (
recover_checks_from_service(service_list, provider) & checks_to_execute
)
# Severities
if metadata.Severity:
check_severities[metadata.Severity].append(check)
# Handle if there are checks passed using -C/--checks-file
elif checks_file:
try:
# Categories
for category in metadata.Categories:
if category not in check_categories:
check_categories[category] = []
check_categories[category].append(check)
# Handle if there are checks passed using -c/--checks
if check_list:
for check_name in check_list:
checks_to_execute.add(check_name)
# Handle if there are some severities passed using --severity
elif severities:
for severity in severities:
checks_to_execute.update(check_severities[severity])
if service_list:
checks_to_execute = (
recover_checks_from_service(service_list, provider)
& checks_to_execute
)
# Handle if there are checks passed using -C/--checks-file
elif checks_file:
checks_to_execute = parse_checks_from_file(checks_file, provider)
except Exception as e:
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
# Handle if there are services passed using -s/--services
elif service_list:
checks_to_execute = recover_checks_from_service(service_list, provider)
# Handle if there are services passed using -s/--services
elif service_list:
checks_to_execute = recover_checks_from_service(service_list, provider)
# Handle if there are compliance frameworks passed using --compliance
elif compliance_frameworks:
try:
# Handle if there are compliance frameworks passed using --compliance
elif compliance_frameworks:
checks_to_execute = parse_checks_from_compliance_framework(
compliance_frameworks, bulk_compliance_frameworks
)
except Exception as e:
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
# Handle if there are categories passed using --categories
elif categories:
for cat in categories:
for check in bulk_checks_metadata:
# Check check's categories
if cat in bulk_checks_metadata[check].Categories:
checks_to_execute.add(check)
# Handle if there are categories passed using --categories
elif categories:
for category in categories:
checks_to_execute.update(check_categories[category])
# If there are no checks passed as argument
else:
try:
# If there are no checks passed as argument
else:
# Get all check modules to run with the specific provider
checks = recover_checks_from_provider(provider)
except Exception as e:
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
else:
for check_info in checks:
# Recover check name from import path (last part)
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
check_name = check_info[0]
checks_to_execute.add(check_name)
# Get Check Aliases mapping
check_aliases = {}
for check, metadata in bulk_checks_metadata.items():
for alias in metadata.CheckAliases:
check_aliases[alias] = check
# Check Aliases
checks_to_execute = update_checks_to_execute_with_aliases(
checks_to_execute, check_aliases
)
return checks_to_execute
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
def update_checks_to_execute_with_aliases(
checks_to_execute: set, check_aliases: dict
) -> set:
"""update_checks_to_execute_with_aliases returns the checks_to_execute updated using the check aliases."""
# Verify if any input check is an alias of another check
for input_check in checks_to_execute:
if (
input_check in check_aliases
and check_aliases[input_check] not in checks_to_execute
):
# Remove input check name and add the real one
checks_to_execute.remove(input_check)
checks_to_execute.add(check_aliases[input_check])
print(
f"\nUsing alias {Fore.YELLOW}{input_check}{Style.RESET_ALL} for check {Fore.YELLOW}{check_aliases[input_check]}{Style.RESET_ALL}...\n"
)
return checks_to_execute
try:
new_checks_to_execute = checks_to_execute.copy()
for input_check in checks_to_execute:
if input_check in check_aliases:
# Remove input check name and add the real one
new_checks_to_execute.remove(input_check)
for alias in check_aliases[input_check]:
if alias not in new_checks_to_execute:
new_checks_to_execute.add(alias)
print(
f"\nUsing alias {Fore.YELLOW}{input_check}{Style.RESET_ALL} for check {Fore.YELLOW}{alias}{Style.RESET_ALL}..."
)
return new_checks_to_execute
except Exception as error:
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)

View File

@@ -52,12 +52,12 @@ class ENS_Requirement_Attribute(BaseModel):
class Generic_Compliance_Requirement_Attribute(BaseModel):
"""Generic Compliance Requirement Attribute"""
ItemId: str
ItemId: Optional[str]
Section: Optional[str]
SubSection: Optional[str]
SubGroup: Optional[str]
Service: str
Soc_Type: Optional[str]
Service: Optional[str]
Type: Optional[str]
class CIS_Requirement_Attribute_Profile(str):

View File

@@ -3,9 +3,9 @@ import sys
import yaml
from jsonschema import validate
from prowler.config.config import valid_severities
from prowler.lib.logger import logger
valid_severities = ["critical", "high", "medium", "low", "informational"]
custom_checks_metadata_schema = {
"type": "object",
"properties": {

View File

@@ -8,6 +8,7 @@ from prowler.config.config import (
default_config_file_path,
default_output_directory,
finding_statuses,
valid_severities,
)
from prowler.providers.common.arguments import (
init_providers_parser,
@@ -225,8 +226,8 @@ Detailed documentation at https://docs.prowler.cloud
common_checks_parser.add_argument(
"--severity",
nargs="+",
help="List of severities to be executed [informational, low, medium, high, critical]",
choices=["informational", "low", "medium", "high", "critical"],
help=f"List of severities to be executed {valid_severities}",
choices=valid_severities,
)
group.add_argument(
"--compliance",

View File

@@ -187,7 +187,8 @@ def display_compliance_table(
"Bajo": 0,
}
if finding.status == "FAIL":
fail_count += 1
if attribute.Tipo != "recomendacion":
fail_count += 1
marcos[marco_categoria][
"Estado"
] = f"{Fore.RED}NO CUMPLE{Style.RESET_ALL}"

View File

@@ -42,7 +42,7 @@ def write_compliance_row_generic(
Requirements_Attributes_SubSection=attribute.SubSection,
Requirements_Attributes_SubGroup=attribute.SubGroup,
Requirements_Attributes_Service=attribute.Service,
Requirements_Attributes_Soc_Type=attribute.Soc_Type,
Requirements_Attributes_Type=attribute.Type,
Status=finding.status,
StatusExtended=finding.status_extended,
ResourceId=finding.resource_id,

View File

@@ -408,7 +408,7 @@ def get_azure_html_assessment_summary(audit_info):
if isinstance(audit_info, Azure_Audit_Info):
printed_subscriptions = []
for key, value in audit_info.identity.subscriptions.items():
intermediate = key + " : " + value
intermediate = f"{key} : {value}"
printed_subscriptions.append(intermediate)
# check if identity is str(coming from SP) or dict(coming from browser or)

View File

@@ -51,9 +51,9 @@ def fill_json_asff(finding_output, audit_info, finding, output_options):
finding_output.GeneratorId = "prowler-" + finding.check_metadata.CheckID
finding_output.AwsAccountId = audit_info.audited_account
finding_output.Types = finding.check_metadata.CheckType
finding_output.FirstObservedAt = (
finding_output.UpdatedAt
) = finding_output.CreatedAt = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
finding_output.FirstObservedAt = finding_output.UpdatedAt = (
finding_output.CreatedAt
) = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
finding_output.Severity = Severity(
Label=finding.check_metadata.Severity.upper()
)

View File

@@ -64,9 +64,9 @@ def generate_provider_output_csv(
data["resource_name"] = finding.resource_name
data["subscription"] = finding.subscription
data["tenant_domain"] = audit_info.identity.domain
data[
"finding_unique_id"
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.subscription}-{finding.resource_id}"
data["finding_unique_id"] = (
f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.subscription}-{finding.resource_id}"
)
data["compliance"] = unroll_dict(
get_check_compliance(finding, provider, output_options)
)
@@ -77,9 +77,9 @@ def generate_provider_output_csv(
data["resource_name"] = finding.resource_name
data["project_id"] = finding.project_id
data["location"] = finding.location.lower()
data[
"finding_unique_id"
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.project_id}-{finding.resource_id}"
data["finding_unique_id"] = (
f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.project_id}-{finding.resource_id}"
)
data["compliance"] = unroll_dict(
get_check_compliance(finding, provider, output_options)
)
@@ -89,9 +89,9 @@ def generate_provider_output_csv(
data["resource_id"] = finding.resource_id
data["resource_name"] = finding.resource_name
data["namespace"] = finding.namespace
data[
"finding_unique_id"
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.namespace}-{finding.resource_id}"
data["finding_unique_id"] = (
f"prowler-{provider}-{finding.check_metadata.CheckID}-{finding.namespace}-{finding.resource_id}"
)
data["compliance"] = unroll_dict(
get_check_compliance(finding, provider, output_options)
)
@@ -103,9 +103,9 @@ def generate_provider_output_csv(
data["region"] = finding.region
data["resource_id"] = finding.resource_id
data["resource_arn"] = finding.resource_arn
data[
"finding_unique_id"
] = f"prowler-{provider}-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{finding.resource_id}"
data["finding_unique_id"] = (
f"prowler-{provider}-{finding.check_metadata.CheckID}-{audit_info.audited_account}-{finding.region}-{finding.resource_id}"
)
data["compliance"] = unroll_dict(
get_check_compliance(finding, provider, output_options)
)
@@ -658,8 +658,8 @@ class Check_Output_CSV_Generic_Compliance(BaseModel):
Requirements_Attributes_Section: Optional[str]
Requirements_Attributes_SubSection: Optional[str]
Requirements_Attributes_SubGroup: Optional[str]
Requirements_Attributes_Service: str
Requirements_Attributes_Soc_Type: Optional[str]
Requirements_Attributes_Service: Optional[str]
Requirements_Attributes_Type: Optional[str]
Status: str
StatusExtended: str
ResourceId: str

View File

@@ -13,7 +13,7 @@ def send_slack_message(token, channel, stats, provider, audit_info):
response = client.chat_postMessage(
username="Prowler",
icon_url=square_logo_img,
channel="#" + channel,
channel=f"#{channel}",
blocks=create_message_blocks(identity, logo, stats),
)
return response
@@ -35,7 +35,7 @@ def create_message_identity(provider, audit_info):
elif provider == "azure":
printed_subscriptions = []
for key, value in audit_info.identity.subscriptions.items():
intermediate = "- *" + key + ": " + value + "*\n"
intermediate = f"- *{key}: {value}*\n"
printed_subscriptions.append(intermediate)
identity = f"Azure Subscriptions:\n{''.join(printed_subscriptions)}"
logo = azure_logo
@@ -66,14 +66,14 @@ def create_message_blocks(identity, logo, stats):
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"\n:white_check_mark: *{stats['total_pass']} Passed findings* ({round(stats['total_pass']/stats['findings_count']*100,2)}%)\n",
"text": f"\n:white_check_mark: *{stats['total_pass']} Passed findings* ({round(stats['total_pass'] / stats['findings_count'] * 100 , 2)}%)\n",
},
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": f"\n:x: *{stats['total_fail']} Failed findings* ({round(stats['total_fail']/stats['findings_count']*100,2)}%)\n ",
"text": f"\n:x: *{stats['total_fail']} Failed findings* ({round(stats['total_fail'] / stats['findings_count'] * 100 , 2)}%)\n ",
},
},
{

View File

@@ -99,8 +99,8 @@ def display_summary_table(
print("\nOverview Results:")
overview_table = [
[
f"{Fore.RED}{round(fail_count/len(findings)*100, 2)}% ({fail_count}) Failed{Style.RESET_ALL}",
f"{Fore.GREEN}{round(pass_count/len(findings)*100, 2)}% ({pass_count}) Passed{Style.RESET_ALL}",
f"{Fore.RED}{round(fail_count / len(findings) * 100, 2)}% ({fail_count}) Failed{Style.RESET_ALL}",
f"{Fore.GREEN}{round(pass_count / len(findings) * 100, 2)}% ({pass_count}) Passed{Style.RESET_ALL}",
]
]
print(tabulate(overview_table, tablefmt="rounded_grid"))

View File

@@ -10,7 +10,10 @@ from prowler.config.config import aws_services_json_file
from prowler.lib.check.check import list_modules, recover_checks_from_service
from prowler.lib.logger import logger
from prowler.lib.utils.utils import open_file, parse_json_file
from prowler.providers.aws.config import AWS_STS_GLOBAL_ENDPOINT_REGION
from prowler.providers.aws.config import (
AWS_STS_GLOBAL_ENDPOINT_REGION,
ROLE_SESSION_NAME,
)
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info, AWSAssumeRole
from prowler.providers.aws.lib.credentials.credentials import create_sts_session
@@ -113,9 +116,15 @@ def assume_role(
sts_endpoint_region: str = None,
) -> dict:
try:
role_session_name = (
assumed_role_info.role_session_name
if assumed_role_info.role_session_name
else ROLE_SESSION_NAME
)
assume_role_arguments = {
"RoleArn": assumed_role_info.role_arn,
"RoleSessionName": "ProwlerAsessmentSession",
"RoleSessionName": role_session_name,
"DurationSeconds": assumed_role_info.session_duration,
}
@@ -152,23 +161,31 @@ def input_role_mfa_token_and_code() -> tuple[str]:
def generate_regional_clients(
service: str, audit_info: AWS_Audit_Info, global_service: bool = False
service: str,
audit_info: AWS_Audit_Info,
) -> dict:
"""generate_regional_clients returns a dict with the following format for the given service:
Example:
{"eu-west-1": boto3_service_client}
"""
try:
regional_clients = {}
service_regions = get_available_aws_service_regions(service, audit_info)
# Check if it is global service to gather only one region
if global_service:
if service_regions:
if audit_info.profile_region in service_regions:
service_regions = [audit_info.profile_region]
service_regions = service_regions[:1]
for region in service_regions:
# Get the regions enabled for the account and get the intersection with the service available regions
if audit_info.enabled_regions:
enabled_regions = service_regions.intersection(audit_info.enabled_regions)
else:
enabled_regions = service_regions
for region in enabled_regions:
regional_client = audit_info.audit_session.client(
service, region_name=region, config=audit_info.session_config
)
regional_client.region = region
regional_clients[region] = regional_client
return regional_clients
except Exception as error:
logger.error(
@@ -176,6 +193,26 @@ def generate_regional_clients(
)
def get_aws_enabled_regions(audit_info: AWS_Audit_Info) -> set:
"""get_aws_enabled_regions returns a set of enabled AWS regions"""
# EC2 Client to check enabled regions
service = "ec2"
default_region = get_default_region(service, audit_info)
ec2_client = audit_info.audit_session.client(service, region_name=default_region)
enabled_regions = set()
try:
# With AllRegions=False we only get the enabled regions for the account
for region in ec2_client.describe_regions(AllRegions=False).get("Regions", []):
enabled_regions.add(region.get("RegionName"))
except Exception as error:
logger.warning(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return enabled_regions
def get_aws_available_regions():
try:
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
@@ -216,6 +253,8 @@ def get_checks_from_input_arn(audit_resources: list, provider: str) -> set:
service = "efs"
elif service == "logs":
service = "cloudwatch"
elif service == "cognito":
service = "cognito-idp"
# Check if Prowler has checks in service
try:
list_modules(provider, service)
@@ -267,17 +306,18 @@ def get_regions_from_audit_resources(audit_resources: list) -> set:
return audited_regions
def get_available_aws_service_regions(service: str, audit_info: AWS_Audit_Info) -> list:
def get_available_aws_service_regions(service: str, audit_info: AWS_Audit_Info) -> set:
# Get json locally
actual_directory = pathlib.Path(os.path.dirname(os.path.realpath(__file__)))
with open_file(f"{actual_directory}/{aws_services_json_file}") as f:
data = parse_json_file(f)
# Check if it is a subservice
json_regions = data["services"][service]["regions"][audit_info.audited_partition]
if audit_info.audited_regions: # Check for input aws audit_info.audited_regions
regions = list(
set(json_regions).intersection(audit_info.audited_regions)
) # Get common regions between input and json
json_regions = set(
data["services"][service]["regions"][audit_info.audited_partition]
)
# Check for input aws audit_info.audited_regions
if audit_info.audited_regions:
# Get common regions between input and json
regions = json_regions.intersection(audit_info.audited_regions)
else: # Get all regions from json of the service and partition
regions = json_regions
return regions

View File

@@ -360,7 +360,7 @@ This report is being generated using credentials below:
AWS-CLI Profile: {Fore.YELLOW}[{profile}]{Style.RESET_ALL} AWS Filter Region: {Fore.YELLOW}[{regions}]{Style.RESET_ALL}
AWS Account: {Fore.YELLOW}[{self.identity.account}]{Style.RESET_ALL} UserId: {Fore.YELLOW}[{self.identity.user_id}]{Style.RESET_ALL}
Caller Identity ARN: {Fore.YELLOW}[{ self.identity.identity_arn}]{Style.RESET_ALL}
Caller Identity ARN: {Fore.YELLOW}[{self.identity.identity_arn}]{Style.RESET_ALL}
"""
# If -A is set, print Assumed Role ARN
if self.assumed_role.assumed_role_info.role_arn is not None:
@@ -596,9 +596,9 @@ Caller Identity ARN: {Fore.YELLOW}[{ self.identity.identity_arn}]{Style.RESET_AL
# Set the info to assume the role from the partition, account and role name
if self.assumed_role.assumed_role_info.external_id:
assume_role_arguments[
"ExternalId"
] = self.assumed_role.assumed_role_info.external_id
assume_role_arguments["ExternalId"] = (
self.assumed_role.assumed_role_info.external_id
)
if self.assumed_role.assumed_role_info.mfa_enabled:
mfa_ARN, mfa_TOTP = self.__input_role_mfa_token_and_code__()

File diff suppressed because it is too large Load Diff

View File

@@ -1,2 +1,3 @@
AWS_STS_GLOBAL_ENDPOINT_REGION = "us-east-1"
BOTO3_USER_AGENT_EXTRA = "APN_1826889"
ROLE_SESSION_NAME = "ProwlerAssessmentSession"

View File

@@ -1,6 +1,8 @@
from argparse import ArgumentTypeError, Namespace
from re import fullmatch, search
from prowler.providers.aws.aws_provider import get_aws_available_regions
from prowler.providers.aws.config import ROLE_SESSION_NAME
from prowler.providers.aws.lib.arn.arn import arn_type
@@ -26,6 +28,13 @@ def init_parser(self):
help="ARN of the role to be assumed",
# Pending ARN validation
)
aws_auth_subparser.add_argument(
"--role-session-name",
nargs="?",
default=ROLE_SESSION_NAME,
help="An identifier for the assumed role session. Defaults to ProwlerAssessmentSession",
type=validate_role_session_name,
)
aws_auth_subparser.add_argument(
"--mfa",
action="store_true",
@@ -78,6 +87,11 @@ def init_parser(self):
action="store_true",
help="Skip updating previous findings of Prowler in Security Hub",
)
aws_security_hub_subparser.add_argument(
"--send-sh-only-fails",
action="store_true",
help="Send only Prowler failed findings to SecurityHub",
)
# AWS Quick Inventory
aws_quick_inventory_subparser = aws_parser.add_argument_group("Quick Inventory")
aws_quick_inventory_subparser.add_argument(
@@ -93,6 +107,7 @@ def init_parser(self):
"-B",
"--output-bucket",
nargs="?",
type=validate_bucket,
default=None,
help="Custom output bucket, requires -M <mode> and it can work also with -o flag.",
)
@@ -100,6 +115,7 @@ def init_parser(self):
"-D",
"--output-bucket-no-assume",
nargs="?",
type=validate_bucket,
default=None,
help="Same as -B but do not use the assumed role credentials to put objects to the bucket, instead uses the initial credentials.",
)
@@ -173,9 +189,37 @@ def validate_arguments(arguments: Namespace) -> tuple[bool, str]:
# Handle if session_duration is not the default value or external_id is set
if (
arguments.session_duration and arguments.session_duration != 3600
) or arguments.external_id:
(arguments.session_duration and arguments.session_duration != 3600)
or arguments.external_id
or arguments.role_session_name != ROLE_SESSION_NAME
):
if not arguments.role:
return (False, "To use -I/-T options -R option is needed")
return (
False,
"To use -I/--external-id, -T/--session-duration or --role-session-name options -R/--role option is needed",
)
return (True, "")
def validate_bucket(bucket_name):
"""validate_bucket validates that the input bucket_name is valid"""
if search("(?!(^xn--|.+-s3alias$))^[a-z0-9][a-z0-9-]{1,61}[a-z0-9]$", bucket_name):
return bucket_name
else:
raise ArgumentTypeError(
"Bucket name must be valid (https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html)"
)
def validate_role_session_name(session_name):
"""
validates that the role session name is valid
Documentation: https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
"""
if fullmatch("[\w+=,.@-]{2,64}", session_name):
return session_name
else:
raise ArgumentTypeError(
"Role Session Name must be 2-64 characters long and consist only of upper- and lower-case alphanumeric characters with no spaces. You can also include underscores or any of the following characters: =,.@-"
)

View File

@@ -30,6 +30,7 @@ current_audit_info = AWS_Audit_Info(
session_duration=None,
external_id=None,
mfa_enabled=None,
role_session_name=None,
),
mfa_enabled=None,
audit_resources=None,
@@ -38,4 +39,5 @@ current_audit_info = AWS_Audit_Info(
audit_metadata=None,
audit_config=None,
ignore_unused_services=False,
enabled_regions=set(),
)

View File

@@ -1,4 +1,4 @@
from dataclasses import dataclass
from dataclasses import dataclass, field
from datetime import datetime
from typing import Any, Optional
@@ -20,6 +20,7 @@ class AWSAssumeRole:
session_duration: int
external_id: str
mfa_enabled: bool
role_session_name: str
@dataclass
@@ -53,3 +54,4 @@ class AWS_Audit_Info:
audit_metadata: Optional[Any]
audit_config: Optional[dict] = None
ignore_unused_services: bool = False
enabled_regions: set = field(default_factory=set)

View File

@@ -118,7 +118,7 @@ def parse_mutelist_file(audit_info, mutelist_file):
def mutelist_findings(
mutelist: dict,
audited_account: str,
check_findings: [Any],
check_findings: list[Any],
):
# Check if finding is muted
for finding in check_findings:
@@ -143,28 +143,23 @@ def is_muted(
finding_tags,
):
try:
muted_checks = {}
# By default is not muted
is_finding_muted = False
# First set account key from mutelist dict
if audited_account in mutelist["Accounts"]:
muted_checks = mutelist["Accounts"][audited_account]["Checks"]
# If there is a *, it affects to all accounts
# This cannot be elif since in the case of * and single accounts we
# want to merge muted checks from * to the other accounts check list
if "*" in mutelist["Accounts"]:
checks_multi_account = mutelist["Accounts"]["*"]["Checks"]
muted_checks.update(checks_multi_account)
# Test if it is muted
if is_muted_in_check(
muted_checks,
audited_account,
check,
finding_region,
finding_resource,
finding_tags,
):
is_finding_muted = True
# We always check all the accounts present in the mutelist
# if one mutes the finding we set the finding as muted
for account in mutelist["Accounts"]:
if account == audited_account or account == "*":
if is_muted_in_check(
mutelist["Accounts"][account]["Checks"],
audited_account,
check,
finding_region,
finding_resource,
finding_tags,
):
is_finding_muted = True
break
return is_finding_muted
except Exception as error:
@@ -204,7 +199,10 @@ def is_muted_in_check(
muted_regions = muted_check_info.get("Regions")
muted_resources = muted_check_info.get("Resources")
muted_tags = muted_check_info.get("Tags")
muted_tags = muted_check_info.get("Tags", "*")
# We need to set the allowlisted_tags if None, "" or [], so the falsy helps
if not muted_tags:
muted_tags = "*"
# If there is a *, it affects to all checks
if (
"*" == muted_check
@@ -221,13 +219,15 @@ def is_muted_in_check(
# For a finding to be muted requires the following set to True:
# - muted_in_check -> True
# - muted_in_region -> True
# - muted_in_tags -> True or muted_in_resource -> True
# - muted_in_tags -> True
# - muted_in_resource -> True
# - excepted -> False
if (
muted_in_check
and muted_in_region
and (muted_in_tags or muted_in_resource)
and muted_in_tags
and muted_in_resource
):
is_check_muted = True
@@ -305,10 +305,17 @@ def is_excepted(
is_tag_excepted = __is_item_matched__(excepted_tags, finding_tags)
if (
is_account_excepted
and is_region_excepted
and is_resource_excepted
and is_tag_excepted
not is_account_excepted
and not is_region_excepted
and not is_resource_excepted
and not is_tag_excepted
):
excepted = False
elif (
(is_account_excepted or not excepted_accounts)
and (is_region_excepted or not excepted_regions)
and (is_resource_excepted or not excepted_resources)
and (is_tag_excepted or not excepted_tags)
):
excepted = True
return excepted

View File

@@ -1,40 +1,61 @@
import sys
from boto3 import client
from boto3 import client, session
from prowler.lib.logger import logger
from prowler.providers.aws.lib.audit_info.models import AWSOrganizationsInfo
def get_organizations_metadata(
metadata_account: str, assumed_credentials: dict
) -> AWSOrganizationsInfo:
aws_account_id: str,
assumed_credentials: dict = None,
session: session = None,
) -> tuple[dict, dict]:
try:
organizations_client = client(
"organizations",
aws_access_key_id=assumed_credentials["Credentials"]["AccessKeyId"],
aws_secret_access_key=assumed_credentials["Credentials"]["SecretAccessKey"],
aws_session_token=assumed_credentials["Credentials"]["SessionToken"],
)
if assumed_credentials:
organizations_client = client(
"organizations",
aws_access_key_id=assumed_credentials["Credentials"]["AccessKeyId"],
aws_secret_access_key=assumed_credentials["Credentials"][
"SecretAccessKey"
],
aws_session_token=assumed_credentials["Credentials"]["SessionToken"],
)
if session:
organizations_client = session.client("organizations")
else:
organizations_client = client("organizations")
organizations_metadata = organizations_client.describe_account(
AccountId=metadata_account
AccountId=aws_account_id
)
list_tags_for_resource = organizations_client.list_tags_for_resource(
ResourceId=metadata_account
ResourceId=aws_account_id
)
return organizations_metadata, list_tags_for_resource
except Exception as error:
logger.critical(f"{error.__class__.__name__} -- {error}")
sys.exit(1)
else:
logger.warning(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
return {}, {}
def parse_organizations_metadata(metadata: dict, tags: dict) -> AWSOrganizationsInfo:
try:
# Convert Tags dictionary to String
account_details_tags = ""
for tag in list_tags_for_resource["Tags"]:
for tag in tags.get("Tags", {}):
account_details_tags += tag["Key"] + ":" + tag["Value"] + ","
account_details = metadata.get("Account", {})
organizations_info = AWSOrganizationsInfo(
account_details_email=organizations_metadata["Account"]["Email"],
account_details_name=organizations_metadata["Account"]["Name"],
account_details_arn=organizations_metadata["Account"]["Arn"],
account_details_org=organizations_metadata["Account"]["Arn"].split("/")[1],
account_details_tags=account_details_tags,
account_details_email=account_details.get("Email", ""),
account_details_name=account_details.get("Name", ""),
account_details_arn=account_details.get("Arn", ""),
account_details_org=account_details.get("Arn", "").split("/")[1],
account_details_tags=account_details_tags.rstrip(","),
)
return organizations_info
except Exception as error:
logger.warning(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)

View File

@@ -1,8 +1,11 @@
def is_account_only_allowed_in_condition(
condition_statement: dict, source_account: str
def is_condition_block_restrictive(
condition_statement: dict, source_account: str, is_cross_account_allowed=False
):
"""
is_account_only_allowed_in_condition parses the IAM Condition policy block and returns True if the source_account passed as argument is within, False if not.
is_condition_block_restrictive parses the IAM Condition policy block and, by default, returns True if the source_account passed as argument is within, False if not.
If argument is_cross_account_allowed is True it tests if the Condition block includes any of the operators allowlisted returning True if does, False if not.
@param condition_statement: dict with an IAM Condition block, e.g.:
{
@@ -54,13 +57,19 @@ def is_account_only_allowed_in_condition(
condition_statement[condition_operator][value],
list,
):
# if there is an arn/account without the source account -> we do not consider it safe
# here by default we assume is true and look for false entries
is_condition_key_restrictive = True
for item in condition_statement[condition_operator][value]:
if source_account not in item:
is_condition_key_restrictive = False
break
# if cross account is not allowed check for each condition block looking for accounts
# different than default
if not is_cross_account_allowed:
# if there is an arn/account without the source account -> we do not consider it safe
# here by default we assume is true and look for false entries
for item in condition_statement[condition_operator][value]:
if source_account not in item:
is_condition_key_restrictive = False
break
if is_condition_key_restrictive:
is_condition_valid = True
if is_condition_key_restrictive:
is_condition_valid = True
@@ -70,10 +79,13 @@ def is_account_only_allowed_in_condition(
condition_statement[condition_operator][value],
str,
):
if (
source_account
in condition_statement[condition_operator][value]
):
if is_cross_account_allowed:
is_condition_valid = True
else:
if (
source_account
in condition_statement[condition_operator][value]
):
is_condition_valid = True
return is_condition_valid

View File

@@ -211,9 +211,13 @@ def create_inventory_table(resources: list, resources_in_region: dict) -> dict:
def create_output(resources: list, audit_info: AWS_Audit_Info, args):
json_output = []
output_file = (
f"prowler-inventory-{audit_info.audited_account}-{output_file_timestamp}"
)
# Check if custom output filename was input, if not, set the default
if not hasattr(args, "output_filename") or args.output_filename is None:
output_file = (
f"prowler-inventory-{audit_info.audited_account}-{output_file_timestamp}"
)
else:
output_file = args.output_filename
for item in sorted(resources, key=lambda d: d["arn"]):
resource = {}
@@ -275,8 +279,8 @@ def create_output(resources: list, audit_info: AWS_Audit_Info, args):
f"\n{Fore.YELLOW}WARNING: Only resources that have or have had tags will appear (except for IAM and S3).\nSee more in https://docs.prowler.cloud/en/latest/tutorials/quick-inventory/#objections{Style.RESET_ALL}"
)
print("\nMore details in files:")
print(f" - CSV: {args.output_directory}/{output_file+csv_file_suffix}")
print(f" - JSON: {args.output_directory}/{output_file+json_file_suffix}")
print(f" - CSV: {args.output_directory}/{output_file + csv_file_suffix}")
print(f" - JSON: {args.output_directory}/{output_file + json_file_suffix}")
# Send output to S3 if needed (-B / -D)
for mode in ["json", "csv"]:

View File

@@ -1,5 +1,3 @@
import sys
from prowler.config.config import (
csv_file_suffix,
html_file_suffix,
@@ -29,7 +27,7 @@ def send_to_s3_bucket(
else: # Compliance output mode
filename = f"{output_filename}_{output_mode}{csv_file_suffix}"
logger.info(f"Sending outputs to S3 bucket {output_bucket_name}")
logger.info(f"Sending output file {filename} to S3 bucket {output_bucket_name}")
# File location
file_name = output_directory + "/" + filename
@@ -41,10 +39,9 @@ def send_to_s3_bucket(
s3_client.upload_file(file_name, output_bucket_name, object_name)
except Exception as error:
logger.critical(
logger.error(
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
)
sys.exit(1)
def get_s3_object_path(output_directory: str) -> str:

View File

@@ -1,4 +1,5 @@
from boto3 import session
from botocore.client import ClientError
from prowler.config.config import timestamp_utc
from prowler.lib.logger import logger
@@ -11,7 +12,7 @@ SECURITY_HUB_MAX_BATCH = 100
def prepare_security_hub_findings(
findings: [], audit_info: AWS_Audit_Info, output_options, enabled_regions: []
findings: list, audit_info: AWS_Audit_Info, output_options, enabled_regions: list
) -> dict:
security_hub_findings_per_region = {}
@@ -29,7 +30,11 @@ def prepare_security_hub_findings(
continue
# Handle status filters, if any
if not output_options.status or finding.status in output_options.status:
if (
not output_options.status
or finding.status in output_options.status
or output_options.send_sh_only_fails
):
continue
# Get the finding region
@@ -70,15 +75,32 @@ def verify_security_hub_integration_enabled_per_region(
if security_hub_prowler_integration_arn not in str(
security_hub_client.list_enabled_products_for_import()
):
logger.error(
logger.warning(
f"Security Hub is enabled in {region} but Prowler integration does not accept findings. More info: https://docs.prowler.cloud/en/latest/tutorials/aws/securityhub/"
)
else:
prowler_integration_enabled = True
# Handle all the permissions / configuration errors
except ClientError as client_error:
# Check if Account is subscribed to Security Hub
error_code = client_error.response["Error"]["Code"]
error_message = client_error.response["Error"]["Message"]
if (
error_code == "InvalidAccessException"
and f"Account {aws_account_number} is not subscribed to AWS Security Hub in region {region}"
in error_message
):
logger.warning(
f"{client_error.__class__.__name__} -- [{client_error.__traceback__.tb_lineno}]: {client_error}"
)
else:
logger.error(
f"{client_error.__class__.__name__} -- [{client_error.__traceback__.tb_lineno}]: {client_error}"
)
except Exception as error:
logger.error(
f"{error.__class__.__name__} -- [{error.__traceback__.tb_lineno}]:{error} in region {region}"
f"{error.__class__.__name__} -- [{error.__traceback__.tb_lineno}]: {error}"
)
finally:
@@ -167,7 +189,7 @@ def resolve_security_hub_previous_findings(
def __send_findings_to_security_hub__(
findings: [dict], region: str, security_hub_client
findings: list[dict], region: str, security_hub_client
):
"""Private function send_findings_to_security_hub chunks the findings in groups of 100 findings and send them to AWS Security Hub. It returns the number of sent findings."""
success_count = 0

View File

@@ -1,13 +1,23 @@
import threading
from concurrent.futures import ThreadPoolExecutor, as_completed
from prowler.lib.logger import logger
from prowler.providers.aws.aws_provider_new import AwsProvider
# TODO: review the following code
# from prowler.providers.aws.aws_provider import (
# generate_regional_clients,
# get_default_region,
# )
MAX_WORKERS = 10
class AWSService:
"""The AWSService class offers a parent class for each AWS Service to generate:
- AWS Regional Clients
- Shared information like the account ID and ARN, the the AWS partition and the checks audited
- AWS Session
- Thread pool for the __threading_call__
- Also handles if the AWS Service is Global
"""
@@ -33,6 +43,8 @@ class AWSService:
self.regional_clients = provider.generate_regional_clients(
self.service, global_service
)
# TODO: review the following code
# self.regional_clients = generate_regional_clients(self.service, audit_info)
# Get a single region and client if the service needs it (e.g. AWS Global Service)
# We cannot include this within an else because some services needs both the regional_clients
@@ -40,14 +52,40 @@ class AWSService:
self.region = provider.get_default_region(self.service)
self.client = self.session.client(self.service, self.region)
# Thread pool for __threading_call__
self.thread_pool = ThreadPoolExecutor(max_workers=MAX_WORKERS)
def __get_session__(self):
return self.session
def __threading_call__(self, call):
threads = []
for regional_client in self.regional_clients.values():
threads.append(threading.Thread(target=call, args=(regional_client,)))
for t in threads:
t.start()
for t in threads:
t.join()
def __threading_call__(self, call, iterator=None):
# Use the provided iterator, or default to self.regional_clients
items = iterator if iterator is not None else self.regional_clients.values()
# Determine the total count for logging
item_count = len(items)
# Trim leading and trailing underscores from the call's name
call_name = call.__name__.strip("_")
# Add Capitalization
call_name = " ".join([x.capitalize() for x in call_name.split("_")])
# Print a message based on the call's name, and if its regional or processing a list of items
if iterator is None:
logger.info(
f"{self.service.upper()} - Starting threads for '{call_name}' function across {item_count} regions..."
)
else:
logger.info(
f"{self.service.upper()} - Starting threads for '{call_name}' function to process {item_count} items..."
)
# Submit tasks to the thread pool
futures = [self.thread_pool.submit(call, item) for item in items]
# Wait for all tasks to complete
for future in as_completed(futures):
try:
future.result() # Raises exceptions from the thread, if any
except Exception:
# Handle exceptions if necessary
pass # Replace 'pass' with any additional exception handling logic. Currently handled within the called function

View File

@@ -85,21 +85,36 @@ class AccessAnalyzer(AWSService):
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
# TODO: We need to include ListFindingsV2
# https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/accessanalyzer/client/list_findings_v2.html
def __list_findings__(self):
logger.info("AccessAnalyzer - Listing Findings per Analyzer...")
try:
for analyzer in self.analyzers:
if analyzer.status == "ACTIVE":
regional_client = self.regional_clients[analyzer.region]
list_findings_paginator = regional_client.get_paginator(
"list_findings"
try:
if analyzer.status == "ACTIVE":
regional_client = self.regional_clients[analyzer.region]
list_findings_paginator = regional_client.get_paginator(
"list_findings"
)
for page in list_findings_paginator.paginate(
analyzerArn=analyzer.arn
):
for finding in page["findings"]:
analyzer.findings.append(Finding(id=finding["id"]))
except ClientError as error:
if error.response["Error"]["Code"] == "ValidationException":
logger.warning(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
else:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
for page in list_findings_paginator.paginate(
analyzerArn=analyzer.arn
):
for finding in page["findings"]:
analyzer.findings.append(Finding(id=finding["id"]))
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

View File

@@ -19,7 +19,11 @@ class acm_certificates_expiration_check(Check):
report.resource_tags = certificate.tags
else:
report.status = "FAIL"
report.status_extended = f"ACM Certificate {certificate.id} for {certificate.name} is about to expire in {DAYS_TO_EXPIRE_THRESHOLD} days."
if certificate.expiration_days < 0:
report.status_extended = f"ACM Certificate {certificate.id} for {certificate.name} has expired ({abs(certificate.expiration_days)} days ago)."
else:
report.status_extended = f"ACM Certificate {certificate.id} for {certificate.name} is about to expire in {certificate.expiration_days} days."
report.resource_id = certificate.id
report.resource_details = certificate.name
report.resource_arn = certificate.arn

View File

@@ -1,7 +1,7 @@
{
"Provider": "aws",
"CheckID": "apigateway_restapi_authorizers_enabled",
"CheckTitle": "Check if API Gateway has configured authorizers.",
"CheckTitle": "Check if API Gateway has configured authorizers at api or method level.",
"CheckAliases": [
"apigateway_authorizers_enabled"
],
@@ -13,7 +13,7 @@
"ResourceIdTemplate": "arn:partition:service:region:account-id:resource-id",
"Severity": "medium",
"ResourceType": "AwsApiGatewayRestApi",
"Description": "Check if API Gateway has configured authorizers.",
"Description": "Check if API Gateway has configured authorizers at api or method level.",
"Risk": "If no authorizer is enabled anyone can use the service.",
"RelatedUrl": "",
"Remediation": {

View File

@@ -13,12 +13,41 @@ class apigateway_restapi_authorizers_enabled(Check):
report.resource_id = rest_api.name
report.resource_arn = rest_api.arn
report.resource_tags = rest_api.tags
# it there are not authorizers at api level and resources without methods (default case) ->
report.status = "FAIL"
report.status_extended = f"API Gateway {rest_api.name} ID {rest_api.id} does not have an authorizer configured at api level."
if rest_api.authorizer:
report.status = "PASS"
report.status_extended = f"API Gateway {rest_api.name} ID {rest_api.id} has an authorizer configured."
report.status_extended = f"API Gateway {rest_api.name} ID {rest_api.id} has an authorizer configured at api level"
else:
report.status = "FAIL"
report.status_extended = f"API Gateway {rest_api.name} ID {rest_api.id} does not have an authorizer configured."
# we want to know if api has not authorizers and all the resources don't have methods configured
resources_have_methods = False
all_methods_authorized = True
resource_paths_with_unathorized_methods = []
for resource in rest_api.resources:
# if the resource has methods test if they have all configured authorizer
if resource.resource_methods:
resources_have_methods = True
for (
http_method,
authorization_method,
) in resource.resource_methods.items():
if authorization_method == "NONE":
all_methods_authorized = False
unauthorized_method = (
f"{resource.path} -> {http_method}"
)
resource_paths_with_unathorized_methods.append(
unauthorized_method
)
# if there are methods in at least one resource and are all authorized
if all_methods_authorized and resources_have_methods:
report.status = "PASS"
report.status_extended = f"API Gateway {rest_api.name} ID {rest_api.id} has all methods authorized"
# if there are methods in at least one result but some of then are not authorized-> list it
elif not all_methods_authorized:
report.status_extended = f"API Gateway {rest_api.name} ID {rest_api.id} does not have authorizers at api level and the following paths and methods are unauthorized: {'; '.join(resource_paths_with_unathorized_methods)}."
findings.append(report)
return findings

View File

@@ -17,6 +17,7 @@ class APIGateway(AWSService):
self.__get_authorizers__()
self.__get_rest_api__()
self.__get_stages__()
self.__get_resources__()
def __get_rest_apis__(self, regional_client):
logger.info("APIGateway - Getting Rest APIs...")
@@ -53,7 +54,9 @@ class APIGateway(AWSService):
if authorizers:
rest_api.authorizer = True
except Exception as error:
logger.error(f"{error.__class__.__name__}: {error}")
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_rest_api__(self):
logger.info("APIGateway - Describing Rest API...")
@@ -64,7 +67,9 @@ class APIGateway(AWSService):
if rest_api_info["endpointConfiguration"]["types"] == ["PRIVATE"]:
rest_api.public_endpoint = False
except Exception as error:
logger.error(f"{error.__class__.__name__}: {error}")
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_stages__(self):
logger.info("APIGateway - Getting stages for Rest APIs...")
@@ -95,7 +100,46 @@ class APIGateway(AWSService):
)
)
except Exception as error:
logger.error(f"{error.__class__.__name__}: {error}")
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
def __get_resources__(self):
logger.info("APIGateway - Getting API resources...")
try:
for rest_api in self.rest_apis:
regional_client = self.regional_clients[rest_api.region]
get_resources_paginator = regional_client.get_paginator("get_resources")
for page in get_resources_paginator.paginate(restApiId=rest_api.id):
for resource in page["items"]:
id = resource["id"]
resource_methods = []
methods_auth = {}
for resource_method in resource.get(
"resourceMethods", {}
).keys():
resource_methods.append(resource_method)
for resource_method in resource_methods:
if resource_method != "OPTIONS":
method_config = regional_client.get_method(
restApiId=rest_api.id,
resourceId=id,
httpMethod=resource_method,
)
auth_type = method_config["authorizationType"]
methods_auth.update({resource_method: auth_type})
rest_api.resources.append(
PathResourceMethods(
path=resource["path"], resource_methods=methods_auth
)
)
except Exception as error:
logger.error(
f"{regional_client.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
)
class Stage(BaseModel):
@@ -107,6 +151,11 @@ class Stage(BaseModel):
tags: Optional[list] = []
class PathResourceMethods(BaseModel):
path: str
resource_methods: dict
class RestAPI(BaseModel):
id: str
arn: str
@@ -116,3 +165,4 @@ class RestAPI(BaseModel):
public_endpoint: bool = True
stages: list[Stage] = []
tags: Optional[list] = []
resources: list[PathResourceMethods] = []

View File

@@ -14,13 +14,13 @@ class apigatewayv2_api_access_logging_enabled(Check):
if stage.logging:
report.status = "PASS"
report.status_extended = f"API Gateway V2 {api.name} ID {api.id} in stage {stage.name} has access logging enabled."
report.resource_id = api.name
report.resource_id = f"{api.name}-{stage.name}"
report.resource_arn = api.arn
report.resource_tags = api.tags
else:
report.status = "FAIL"
report.status_extended = f"API Gateway V2 {api.name} ID {api.id} in stage {stage.name} has access logging disabled."
report.resource_id = api.name
report.resource_id = f"{api.name}-{stage.name}"
report.resource_arn = api.arn
report.resource_tags = api.tags
findings.append(report)

View File

@@ -54,10 +54,8 @@ class Athena(AWSService):
)
wg_configuration = wg.get("WorkGroup").get("Configuration")
self.workgroups[
workgroup.arn
].enforce_workgroup_configuration = wg_configuration.get(
"EnforceWorkGroupConfiguration", False
self.workgroups[workgroup.arn].enforce_workgroup_configuration = (
wg_configuration.get("EnforceWorkGroupConfiguration", False)
)
# We include an empty EncryptionConfiguration to handle if the workgroup does not have encryption configured
@@ -77,9 +75,9 @@ class Athena(AWSService):
encryption_configuration = EncryptionConfiguration(
encryption_option=encryption, encrypted=True
)
self.workgroups[
workgroup.arn
].encryption_configuration = encryption_configuration
self.workgroups[workgroup.arn].encryption_configuration = (
encryption_configuration
)
except Exception as error:
logger.error(
f"{self.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"

Some files were not shown because too many files have changed in this diff Show More