Compare commits
230 Commits
3.16.2
...
elasticach
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6ceb2c1e56 | ||
|
|
c67c23dd42 | ||
|
|
8b0bae1c57 | ||
|
|
c873f95743 | ||
|
|
ddd94e6f64 | ||
|
|
722554ad3f | ||
|
|
484cf6f49d | ||
|
|
e4154ed4a2 | ||
|
|
86cb9f5838 | ||
|
|
1622d0aa35 | ||
|
|
b54ecb50bf | ||
|
|
f16857fdf1 | ||
|
|
ab109c935c | ||
|
|
8e7e456431 | ||
|
|
46114cd5f4 | ||
|
|
275e509c8d | ||
|
|
12f135669f | ||
|
|
f004df673d | ||
|
|
3ed24b5d7a | ||
|
|
77eade01a2 | ||
|
|
a2158983f7 | ||
|
|
c0d57c9498 | ||
|
|
35c8ea5e3f | ||
|
|
b36152484d | ||
|
|
768ca3f0ce | ||
|
|
bedd05c075 | ||
|
|
721f73fdbe | ||
|
|
34c2128d88 | ||
|
|
14de3acdaa | ||
|
|
899b2f8eb6 | ||
|
|
27bb05fedc | ||
|
|
e1909b8ad9 | ||
|
|
0ed7a247b6 | ||
|
|
ee46bf3809 | ||
|
|
469254094b | ||
|
|
acac3fc693 | ||
|
|
022b7ef756 | ||
|
|
69d4f55734 | ||
|
|
a0bff4b859 | ||
|
|
23df599a03 | ||
|
|
c8d74ca350 | ||
|
|
8d6ba43ad0 | ||
|
|
44ca2f7a66 | ||
|
|
ec0be1c7fe | ||
|
|
fd732db91b | ||
|
|
67f45b7767 | ||
|
|
396e6a1c36 | ||
|
|
326c46defd | ||
|
|
7a1762be51 | ||
|
|
b466b476a3 | ||
|
|
e4652d4339 | ||
|
|
f1e4cd3938 | ||
|
|
e192a98079 | ||
|
|
833dc83922 | ||
|
|
ab1751c595 | ||
|
|
fff06f971e | ||
|
|
a138d2964e | ||
|
|
e6d7965453 | ||
|
|
ab714f0fc7 | ||
|
|
465b0f6a16 | ||
|
|
bd87351ea7 | ||
|
|
d79ec44e4c | ||
|
|
a2f84a12ea | ||
|
|
6fd71356ee | ||
|
|
a0a305d9b1 | ||
|
|
6396d90fa6 | ||
|
|
e324750ec2 | ||
|
|
5d99f020fa | ||
|
|
b82e928f58 | ||
|
|
da871897e6 | ||
|
|
81778f73e4 | ||
|
|
2623728518 | ||
|
|
97f1d1b476 | ||
|
|
2f6a837bc0 | ||
|
|
5e22c2d9a5 | ||
|
|
99bd637de4 | ||
|
|
b9177e5580 | ||
|
|
fc7ec184d9 | ||
|
|
7a6ca342af | ||
|
|
30b6e5e5c6 | ||
|
|
f8476decf7 | ||
|
|
49e238577c | ||
|
|
026fff79c6 | ||
|
|
36c3870c2f | ||
|
|
54c309dbda | ||
|
|
f00dd35f93 | ||
|
|
e040efb3c8 | ||
|
|
805d50586b | ||
|
|
a289a807c5 | ||
|
|
e9117f95ee | ||
|
|
82bd4e940f | ||
|
|
ad3b0b33f2 | ||
|
|
b2b664a5b0 | ||
|
|
571f3ebe1d | ||
|
|
c7f09df4e7 | ||
|
|
8758ecae97 | ||
|
|
f13c843ba6 | ||
|
|
e95f7dd540 | ||
|
|
693329b87e | ||
|
|
f1ad521f64 | ||
|
|
82fbba6513 | ||
|
|
66fba8e4cd | ||
|
|
417131fa36 | ||
|
|
9c9d270053 | ||
|
|
f7fab165ba | ||
|
|
93bdf43c95 | ||
|
|
b3866b5b71 | ||
|
|
2308084dee | ||
|
|
6eb5496c27 | ||
|
|
c5514fdb63 | ||
|
|
c78c3058fd | ||
|
|
10d9ef9906 | ||
|
|
43426041ef | ||
|
|
125eb9ac53 | ||
|
|
681407e0a2 | ||
|
|
082f3a8fe8 | ||
|
|
397cc26b2a | ||
|
|
331ae92843 | ||
|
|
06843cd41a | ||
|
|
28b5ef9ee9 | ||
|
|
63dcc057d3 | ||
|
|
0bc16ee5ff | ||
|
|
abcc9c2c80 | ||
|
|
daf2ad38bd | ||
|
|
3dc418df39 | ||
|
|
00aaafbc12 | ||
|
|
bd49a55f3d | ||
|
|
013975b7a6 | ||
|
|
392026286a | ||
|
|
29ef974565 | ||
|
|
06c8216092 | ||
|
|
03f04d24a5 | ||
|
|
7b45ed63cc | ||
|
|
6e4dd1d69c | ||
|
|
185b4cba0c | ||
|
|
8198ea4a2c | ||
|
|
aaf3e8a5cf | ||
|
|
ecef56fa8f | ||
|
|
349ce3f2d0 | ||
|
|
e3d4741213 | ||
|
|
9d6d5f1d76 | ||
|
|
3152d67f58 | ||
|
|
cb41c8d15b | ||
|
|
06590842d6 | ||
|
|
d4c22a0ca5 | ||
|
|
c6f9936292 | ||
|
|
eaa8900758 | ||
|
|
e1e95d8879 | ||
|
|
ef3a0f4878 | ||
|
|
64cc36e7e2 | ||
|
|
1e001bb0fd | ||
|
|
6ba123a003 | ||
|
|
36d0f2c23f | ||
|
|
63412e3645 | ||
|
|
191cf276c3 | ||
|
|
45978bd0bb | ||
|
|
9666652d18 | ||
|
|
ad2716d7c9 | ||
|
|
0a7939bea3 | ||
|
|
b8c50a7b45 | ||
|
|
175e8d2b05 | ||
|
|
046069a656 | ||
|
|
f9522da48f | ||
|
|
c03f959005 | ||
|
|
522aeebe5e | ||
|
|
5312f487f9 | ||
|
|
d9b6624d65 | ||
|
|
1506da54fc | ||
|
|
245512d320 | ||
|
|
487190b379 | ||
|
|
74aaeaa95c | ||
|
|
28e8f0de2b | ||
|
|
f60b5017e2 | ||
|
|
fe80821596 | ||
|
|
628a3c4e7b | ||
|
|
3d59c34ec9 | ||
|
|
35043c2dd6 | ||
|
|
ab815123c9 | ||
|
|
69ab84efe1 | ||
|
|
77823afa54 | ||
|
|
63cd6c1290 | ||
|
|
cab32d2f94 | ||
|
|
1f4316e9dd | ||
|
|
ade762a85e | ||
|
|
bda5d62c72 | ||
|
|
2176fff8c3 | ||
|
|
87893bd54b | ||
|
|
b539a888b1 | ||
|
|
d6b2b0ca13 | ||
|
|
58ee45b702 | ||
|
|
c62d97f23a | ||
|
|
d618c5ea12 | ||
|
|
d8e27f0d33 | ||
|
|
38496ff646 | ||
|
|
da1084907e | ||
|
|
3385b630e7 | ||
|
|
fc59183045 | ||
|
|
33242079f7 | ||
|
|
086148819c | ||
|
|
5df9fd881c | ||
|
|
bd17d36e7f | ||
|
|
be55fa22fd | ||
|
|
b48b3a5e2e | ||
|
|
fc03dd37f1 | ||
|
|
d8bb384689 | ||
|
|
0b32a10bb8 | ||
|
|
f0c027f54e | ||
|
|
b0f2f34d3b | ||
|
|
3e6b76df76 | ||
|
|
6197cf792d | ||
|
|
3c4e5a14f7 | ||
|
|
effc743b6e | ||
|
|
364a945d28 | ||
|
|
07b9354d18 | ||
|
|
8b1e537ca5 | ||
|
|
6a20e850bc | ||
|
|
636892bc9a | ||
|
|
b40f32ab57 | ||
|
|
14bab496b5 | ||
|
|
3cc367e0a3 | ||
|
|
36fc575e40 | ||
|
|
24efb34d91 | ||
|
|
c08e244c95 | ||
|
|
c2f8980f1f | ||
|
|
0ef85b3dee | ||
|
|
93a2431211 | ||
|
|
1fe74937c1 | ||
|
|
6ee016e577 | ||
|
|
f7248dfb1c | ||
|
|
856afb3966 |
2
.github/CODEOWNERS
vendored
@@ -1 +1 @@
|
||||
* @prowler-cloud/prowler-oss
|
||||
* @prowler-cloud/prowler-oss @prowler-cloud/prowler-dev
|
||||
|
||||
26
.github/dependabot.yml
vendored
@@ -5,10 +5,11 @@
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
@@ -17,4 +18,25 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v3"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "github_actions"
|
||||
- "v3"
|
||||
|
||||
3
.github/workflows/find-secrets.yml
vendored
@@ -11,8 +11,9 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@v3.72.0
|
||||
uses: trufflesecurity/trufflehog@v3.74.0
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
head: HEAD
|
||||
extra_args: --only-verified
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services."
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-v3"
|
||||
title: "chore(regions_update): Changes in regions for AWS services."
|
||||
body: |
|
||||
### Description
|
||||
|
||||
6
.gitignore
vendored
@@ -9,8 +9,9 @@
|
||||
__pycache__
|
||||
venv/
|
||||
build/
|
||||
dist/
|
||||
/dist/
|
||||
*.egg-info/
|
||||
*/__pycache__/*.pyc
|
||||
|
||||
# Session
|
||||
Session.vim
|
||||
@@ -51,3 +52,6 @@ junit-reports/
|
||||
.coverage*
|
||||
.coverage
|
||||
coverage*
|
||||
|
||||
# Node
|
||||
node_modules
|
||||
|
||||
@@ -26,6 +26,7 @@ repos:
|
||||
rev: v0.9.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
## PYTHON
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v2.2.1
|
||||
|
||||
@@ -10,4 +10,4 @@
|
||||
Want some swag as appreciation for your contribution?
|
||||
|
||||
# Prowler Developer Guide
|
||||
https://docs.prowler.cloud/en/latest/tutorials/developer-guide/
|
||||
https://docs.prowler.com/projects/prowler-open-source/en/latest/developer-guide/introduction/
|
||||
|
||||
@@ -15,7 +15,8 @@ USER prowler
|
||||
|
||||
# Copy necessary files
|
||||
WORKDIR /home/prowler
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY dashboard/ /home/prowler/dashboard/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler
|
||||
|
||||
@@ -26,6 +27,10 @@ ENV PATH="$HOME/.local/bin:$PATH"
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir .
|
||||
|
||||
# Remove deprecated dash dependencies
|
||||
RUN pip uninstall dash-html-components -y && \
|
||||
pip uninstall dash-core-components -y
|
||||
|
||||
# Remove Prowler directory and build files
|
||||
USER 0
|
||||
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info
|
||||
|
||||
2
Makefile
@@ -27,7 +27,7 @@ lint: ## Lint Code
|
||||
@echo "Running black... "
|
||||
black --check .
|
||||
@echo "Running pylint..."
|
||||
pylint --disable=W,C,R,E -j 0 providers lib util config
|
||||
pylint --disable=W,C,R,E -j 0 prowler util
|
||||
|
||||
##@ PyPI
|
||||
pypi-clean: ## Delete the distribution files
|
||||
|
||||
212
README.md
@@ -41,7 +41,21 @@
|
||||
|
||||
# Description
|
||||
|
||||
`Prowler` is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler SaaS</a>.
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
```console
|
||||
prowler <provider>
|
||||
```
|
||||

|
||||
|
||||
## Prowler Dashboard
|
||||
|
||||
```console
|
||||
prowler dashboard
|
||||
```
|
||||

|
||||
|
||||
It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks.
|
||||
|
||||
@@ -50,16 +64,9 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
|
||||
| AWS | 304 | 61 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 6 -> `prowler aws --list-categories` |
|
||||
| GCP | 75 | 11 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 127 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | Work In Progress | - | CIS soon | - |
|
||||
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
|
||||
|
||||
# 📖 Documentation
|
||||
|
||||
The full documentation can now be found at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
|
||||
|
||||
## Looking for Prowler v2 documentation?
|
||||
For Prowler v2 Documentation, please go to https://github.com/prowler-cloud/prowler/tree/2.12.1.
|
||||
|
||||
# ⚙️ Install
|
||||
# 💻 Installation
|
||||
|
||||
## Pip package
|
||||
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9, < 3.13:
|
||||
@@ -74,9 +81,11 @@ More details at [https://docs.prowler.com](https://docs.prowler.com/projects/pro
|
||||
|
||||
The available versions of Prowler are the following:
|
||||
|
||||
- `latest`: in sync with master branch (bear in mind that it is not a stable version)
|
||||
- `latest`: in sync with `master` branch (bear in mind that it is not a stable version)
|
||||
- `v3-latest`: in sync with `v3` branch (bear in mind that it is not a stable version)
|
||||
- `<x.y.z>` (release): you can find the releases [here](https://github.com/prowler-cloud/prowler/releases), those are stable releases.
|
||||
- `stable`: this tag always point to the latest release.
|
||||
- `v3-stable`: this tag always point to the latest release for v3.
|
||||
|
||||
The container images are available here:
|
||||
|
||||
@@ -97,181 +106,30 @@ python prowler.py -v
|
||||
|
||||
# 📐✏️ High level architecture
|
||||
|
||||
You can run Prowler from your workstation, an EC2 instance, Fargate or any other container, Codebuild, CloudShell and Cloud9.
|
||||
You can run Prowler from your workstation, a Kubernetes Job, a Google Compute Engine, an Azure VM, an EC2 instance, Fargate or any other container, CloudShell and many more.
|
||||
|
||||

|
||||

|
||||
|
||||
# 📝 Requirements
|
||||
# Deprecations from v3
|
||||
|
||||
Prowler has been written in Python using the [AWS SDK (Boto3)](https://boto3.amazonaws.com/v1/documentation/api/latest/index.html#), [Azure SDK](https://azure.github.io/azure-sdk-for-python/) and [GCP API Python Client](https://github.com/googleapis/google-api-python-client/).
|
||||
## AWS
|
||||
## General
|
||||
- `Allowlist` now is called `Mutelist`.
|
||||
- The `--quiet` option has been deprecated, now use the `--status` flag to select the finding's status you want to get from PASS, FAIL or MANUAL.
|
||||
- All `INFO` finding's status has changed to `MANUAL`.
|
||||
- The CSV output format is common for all the providers.
|
||||
|
||||
Since Prowler uses AWS Credentials under the hood, you can follow any authentication method as described [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html#cli-configure-quickstart-precedence).
|
||||
Make sure you have properly configured your AWS-CLI with a valid Access Key and Region or declare AWS variables properly (or instance profile/role):
|
||||
|
||||
```console
|
||||
aws configure
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```console
|
||||
export AWS_ACCESS_KEY_ID="ASXXXXXXX"
|
||||
export AWS_SECRET_ACCESS_KEY="XXXXXXXXX"
|
||||
export AWS_SESSION_TOKEN="XXXXXXXXX"
|
||||
```
|
||||
|
||||
Those credentials must be associated to a user or role with proper permissions to do all checks. To make sure, add the following AWS managed policies to the user or role being used:
|
||||
|
||||
- `arn:aws:iam::aws:policy/SecurityAudit`
|
||||
- `arn:aws:iam::aws:policy/job-function/ViewOnlyAccess`
|
||||
|
||||
> Moreover, some read-only additional permissions are needed for several checks, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json) to the role you are using.
|
||||
|
||||
> If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
|
||||
|
||||
## Azure
|
||||
|
||||
Prowler for Azure supports the following authentication types:
|
||||
|
||||
- Service principal authentication by environment variables (Enterprise Application)
|
||||
- Current az cli credentials stored
|
||||
- Interactive browser authentication
|
||||
- Managed identity authentication
|
||||
|
||||
### Service Principal authentication
|
||||
|
||||
To allow Prowler assume the service principal identity to start the scan, it is needed to configure the following environment variables:
|
||||
|
||||
```console
|
||||
export AZURE_CLIENT_ID="XXXXXXXXX"
|
||||
export AZURE_TENANT_ID="XXXXXXXXX"
|
||||
export AZURE_CLIENT_SECRET="XXXXXXX"
|
||||
```
|
||||
|
||||
If you try to execute Prowler with the `--sp-env-auth` flag and those variables are empty or not exported, the execution is going to fail.
|
||||
### AZ CLI / Browser / Managed Identity authentication
|
||||
|
||||
The other three cases do not need additional configuration, `--az-cli-auth` and `--managed-identity-auth` are automated options, `--browser-auth` needs the user to authenticate using the default browser to start the scan. Also `--browser-auth` needs the tenant id to be specified with `--tenant-id`.
|
||||
|
||||
### Permissions
|
||||
|
||||
To use each one, you need to pass the proper flag to the execution. Prowler for Azure handles two types of permission scopes, which are:
|
||||
|
||||
- **Azure Active Directory permissions**: Used to retrieve metadata from the identity assumed by Prowler and future AAD checks (not mandatory to have access to execute the tool)
|
||||
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool.
|
||||
|
||||
|
||||
#### Azure Active Directory scope
|
||||
|
||||
Azure Active Directory (AAD) permissions required by the tool are the following:
|
||||
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
|
||||
|
||||
#### Subscriptions scope
|
||||
|
||||
Regarding the subscription scope, Prowler by default scans all the subscriptions that is able to list, so it is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
|
||||
|
||||
- `Security Reader`
|
||||
- `Reader`
|
||||
|
||||
|
||||
## Google Cloud Platform
|
||||
|
||||
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
|
||||
|
||||
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
|
||||
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
|
||||
|
||||
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
|
||||
|
||||
# 💻 Basic Usage
|
||||
|
||||
To run prowler, you will need to specify the provider (e.g aws or azure):
|
||||
|
||||
```console
|
||||
prowler <provider>
|
||||
```
|
||||
|
||||

|
||||
|
||||
> Running the `prowler` command without options will use your environment variable credentials.
|
||||
|
||||
By default, prowler will generate a CSV, a JSON and a HTML report, however you can generate JSON-ASFF (only for AWS Security Hub) report with `-M` or `--output-modes`:
|
||||
|
||||
```console
|
||||
prowler <provider> -M csv json json-asff html
|
||||
```
|
||||
|
||||
The html report will be located in the `output` directory as the other files and it will look like:
|
||||
|
||||

|
||||
|
||||
You can use `-l`/`--list-checks` or `--list-services` to list all available checks or services within the provider.
|
||||
|
||||
```console
|
||||
prowler <provider> --list-checks
|
||||
prowler <provider> --list-services
|
||||
```
|
||||
|
||||
For executing specific checks or services you can use options `-c`/`--checks` or `-s`/`--services`:
|
||||
|
||||
```console
|
||||
prowler aws --checks s3_bucket_public_access
|
||||
prowler aws --services s3 ec2
|
||||
```
|
||||
|
||||
Also, checks and services can be excluded with options `-e`/`--excluded-checks` or `--excluded-services`:
|
||||
|
||||
```console
|
||||
prowler aws --excluded-checks s3_bucket_public_access
|
||||
prowler aws --excluded-services s3 ec2
|
||||
```
|
||||
|
||||
You can always use `-h`/`--help` to access to the usage information and all the possible options:
|
||||
|
||||
```console
|
||||
prowler -h
|
||||
```
|
||||
|
||||
## Checks Configurations
|
||||
Several Prowler's checks have user configurable variables that can be modified in a common **configuration file**.
|
||||
This file can be found in the following path:
|
||||
```
|
||||
prowler/config/config.yaml
|
||||
```
|
||||
We have deprecated some of our outputs formats:
|
||||
- The HTML is replaced for the new Prowler Dashboard, run `prowler dashboard`.
|
||||
- The native JSON is replaced for the JSON [OCSF](https://schema.ocsf.io/) v1.1.0, common for all the providers.
|
||||
|
||||
## AWS
|
||||
- Deprecate the AWS flag --sts-endpoint-region since we use AWS STS regional tokens.
|
||||
- To send only FAILS to AWS Security Hub, now use either `--send-sh-only-fails` or `--security-hub --status FAIL`.
|
||||
|
||||
Use a custom AWS profile with `-p`/`--profile` and/or AWS regions which you want to audit with `-f`/`--filter-region`:
|
||||
|
||||
```console
|
||||
prowler aws --profile custom-profile -f us-east-1 eu-south-2
|
||||
```
|
||||
> By default, `prowler` will scan all AWS regions.
|
||||
# 📖 Documentation
|
||||
|
||||
## Azure
|
||||
|
||||
With Azure you need to specify which auth method is going to be used:
|
||||
|
||||
```console
|
||||
prowler azure [--sp-env-auth, --az-cli-auth, --browser-auth, --managed-identity-auth]
|
||||
```
|
||||
> By default, `prowler` will scan all Azure subscriptions.
|
||||
|
||||
## Google Cloud Platform
|
||||
|
||||
Optionally, you can provide the location of an application credential JSON file with the following argument:
|
||||
|
||||
```console
|
||||
prowler gcp --credentials-file path
|
||||
```
|
||||
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
|
||||
Install, Usage, Tutorials and Developer Guide is at https://docs.prowler.com/
|
||||
|
||||
# 📃 License
|
||||
|
||||
|
||||
@@ -1,17 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Install system dependencies
|
||||
sudo yum -y install openssl-devel bzip2-devel libffi-devel gcc
|
||||
# Upgrade to Python 3.9
|
||||
cd /tmp && wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz
|
||||
tar zxf Python-3.9.13.tgz
|
||||
cd Python-3.9.13/ || exit
|
||||
./configure --enable-optimizations
|
||||
sudo make altinstall
|
||||
python3.9 --version
|
||||
# Install Prowler
|
||||
cd ~ || exit
|
||||
python3.9 -m pip install prowler-cloud
|
||||
prowler -v
|
||||
# Run Prowler
|
||||
prowler
|
||||
sudo bash
|
||||
adduser prowler
|
||||
su prowler
|
||||
pip install prowler
|
||||
cd /tmp
|
||||
prowler aws
|
||||
|
||||
2
dashboard/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
DASHBOARD_PORT = 11666
|
||||
DASHBOARD_ARGS = {"debug": True, "port": DASHBOARD_PORT, "use_reloader": False}
|
||||
176
dashboard/__main__.py
Normal file
@@ -0,0 +1,176 @@
|
||||
# Importing Packages
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
import click
|
||||
import dash
|
||||
import dash_bootstrap_components as dbc
|
||||
from colorama import Fore, Style
|
||||
from dash import dcc, html
|
||||
from dash.dependencies import Input, Output
|
||||
|
||||
from dashboard.config import folder_path_overview
|
||||
from prowler.config.config import orange_color
|
||||
from prowler.lib.banner import print_banner
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
cli = sys.modules["flask.cli"]
|
||||
print_banner(verbose=False)
|
||||
print(
|
||||
f"{Fore.GREEN}Loading all CSV files from the folder {folder_path_overview} ...\n{Style.RESET_ALL}"
|
||||
)
|
||||
cli.show_server_banner = lambda *x: click.echo(
|
||||
f"{Fore.YELLOW}NOTE:{Style.RESET_ALL} If you are a {Fore.GREEN}{Style.BRIGHT}Prowler SaaS{Style.RESET_ALL} customer and you want to use your data from your S3 bucket,\nrun: `{orange_color}aws s3 cp s3://<your-bucket>/output/csv ./output --recursive{Style.RESET_ALL}`\nand then run `prowler dashboard` again to load the new files."
|
||||
)
|
||||
|
||||
# Initialize the app - incorporate css
|
||||
dashboard = dash.Dash(
|
||||
__name__,
|
||||
external_stylesheets=[dbc.themes.DARKLY],
|
||||
use_pages=True,
|
||||
suppress_callback_exceptions=True,
|
||||
title="Prowler Dashboard",
|
||||
)
|
||||
|
||||
# Logo
|
||||
prowler_logo = html.Img(
|
||||
src="https://prowler.com/wp-content/uploads/logo-dashboard.png", alt="Prowler Logo"
|
||||
)
|
||||
|
||||
menu_icons = {
|
||||
"overview": "/assets/images/icons/overview.svg",
|
||||
"compliance": "/assets/images/icons/compliance.svg",
|
||||
}
|
||||
|
||||
|
||||
# Function to generate navigation links
|
||||
def generate_nav_links(current_path):
|
||||
nav_links = []
|
||||
for page in dash.page_registry.values():
|
||||
# Gets the icon URL based on the page name
|
||||
icon_url = menu_icons.get(page["name"].lower())
|
||||
is_active = (
|
||||
" bg-prowler-stone-950 border-r-4 border-solid border-prowler-lime"
|
||||
if current_path == page["relative_path"]
|
||||
else ""
|
||||
)
|
||||
link_class = f"block hover:bg-prowler-stone-950 hover:border-r-4 hover:border-solid hover:border-prowler-lime{is_active}"
|
||||
|
||||
link_content = html.Span(
|
||||
[
|
||||
html.Img(src=icon_url, className="w-5"),
|
||||
html.Span(page["name"], className="font-medium text-base leading-6"),
|
||||
],
|
||||
className="flex justify-center lg:justify-normal items-center gap-x-3 py-2 px-3",
|
||||
)
|
||||
|
||||
nav_link = html.Li(
|
||||
dcc.Link(link_content, href=page["relative_path"], className=link_class)
|
||||
)
|
||||
nav_links.append(nav_link)
|
||||
return nav_links
|
||||
|
||||
|
||||
def generate_help_menu():
|
||||
help_links = [
|
||||
{
|
||||
"title": "Help",
|
||||
"url": "https://github.com/prowler-cloud/prowler/issues",
|
||||
"icon": "/assets/images/icons/help.png",
|
||||
},
|
||||
{
|
||||
"title": "Docs",
|
||||
"url": "https://docs.prowler.com",
|
||||
"icon": "/assets/images/icons/docs.png",
|
||||
},
|
||||
]
|
||||
|
||||
link_class = "block hover:bg-prowler-stone-950 hover:border-r-4 hover:border-solid hover:border-prowler-lime"
|
||||
|
||||
menu_items = []
|
||||
for link in help_links:
|
||||
menu_item = html.Li(
|
||||
html.A(
|
||||
html.Span(
|
||||
[
|
||||
html.Img(src=link["icon"], className="w-5"),
|
||||
html.Span(
|
||||
link["title"], className="font-medium text-base leading-6"
|
||||
),
|
||||
],
|
||||
className="flex items-center gap-x-3 py-2 px-3",
|
||||
),
|
||||
href=link["url"],
|
||||
target="_blank",
|
||||
className=link_class,
|
||||
)
|
||||
)
|
||||
menu_items.append(menu_item)
|
||||
|
||||
return menu_items
|
||||
|
||||
|
||||
# Layout
|
||||
dashboard.layout = html.Div(
|
||||
[
|
||||
dcc.Location(id="url", refresh=False),
|
||||
html.Link(rel="icon", href="assets/favicon.ico"),
|
||||
# Placeholder for dynamic navigation bar
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
id="navigation-bar", className="bg-prowler-stone-900 min-w-36 z-10"
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
dash.page_container,
|
||||
],
|
||||
id="content_select",
|
||||
className="bg-prowler-white w-full col-span-11 h-screen mx-auto overflow-y-scroll no-scrollbar px-10 py-7",
|
||||
),
|
||||
],
|
||||
className="grid custom-grid 2xl:custom-grid-large h-screen",
|
||||
),
|
||||
],
|
||||
className="h-screen mx-auto",
|
||||
)
|
||||
|
||||
|
||||
# Callback to update navigation bar
|
||||
@dashboard.callback(Output("navigation-bar", "children"), [Input("url", "pathname")])
|
||||
def update_nav_bar(pathname):
|
||||
return html.Div(
|
||||
[
|
||||
html.Div([prowler_logo], className="mb-8 px-3"),
|
||||
html.H6(
|
||||
"Dashboards",
|
||||
className="px-3 text-prowler-stone-500 text-sm opacity-90 font-regular mb-2",
|
||||
),
|
||||
html.Nav(
|
||||
[html.Ul(generate_nav_links(pathname), className="")],
|
||||
className="flex flex-col gap-y-6",
|
||||
),
|
||||
html.Nav(
|
||||
[
|
||||
html.A(
|
||||
[
|
||||
html.Span(
|
||||
[
|
||||
html.Img(src="assets/favicon.ico", className="w-5"),
|
||||
"Subscribe to prowler SaaS",
|
||||
],
|
||||
className="flex items-center gap-x-3",
|
||||
),
|
||||
],
|
||||
href="https://prowler.com/",
|
||||
target="_blank",
|
||||
className="block p-3 uppercase text-xs hover:bg-prowler-stone-950 hover:border-r-4 hover:border-solid hover:border-prowler-lime",
|
||||
),
|
||||
html.Ul(generate_help_menu(), className=""),
|
||||
],
|
||||
className="flex flex-col gap-y-6 mt-auto",
|
||||
),
|
||||
],
|
||||
className="flex flex-col bg-prowler-stone-900 py-7 h-full",
|
||||
)
|
||||
BIN
dashboard/assets/favicon.ico
Normal file
|
After Width: | Height: | Size: 15 KiB |
4
dashboard/assets/images/icons/compliance.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="#FFF" aria-hidden="true" class="h-5 w-5" viewBox="0 0 24 24">
|
||||
<path fill-rule="evenodd" d="M9 1.5H5.625c-1.036 0-1.875.84-1.875 1.875v17.25c0 1.035.84 1.875 1.875 1.875h12.75c1.035 0 1.875-.84 1.875-1.875V12.75A3.75 3.75 0 0 0 16.5 9h-1.875a1.875 1.875 0 0 1-1.875-1.875V5.25A3.75 3.75 0 0 0 9 1.5zm6.61 10.936a.75.75 0 1 0-1.22-.872l-3.236 4.53L9.53 14.47a.75.75 0 0 0-1.06 1.06l2.25 2.25a.75.75 0 0 0 1.14-.094l3.75-5.25z" clip-rule="evenodd"/>
|
||||
<path d="M12.971 1.816A5.23 5.23 0 0 1 14.25 5.25v1.875c0 .207.168.375.375.375H16.5a5.23 5.23 0 0 1 3.434 1.279 9.768 9.768 0 0 0-6.963-6.963z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 650 B |
BIN
dashboard/assets/images/icons/docs.png
Normal file
|
After Width: | Height: | Size: 734 B |
BIN
dashboard/assets/images/icons/help-black.png
Normal file
|
After Width: | Height: | Size: 441 B |
BIN
dashboard/assets/images/icons/help.png
Normal file
|
After Width: | Height: | Size: 934 B |
4
dashboard/assets/images/icons/overview.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="#FFF" aria-hidden="true" class="h-5 w-5" viewBox="0 0 24 24">
|
||||
<path fill-rule="evenodd" d="M2.25 13.5a8.25 8.25 0 0 1 8.25-8.25.75.75 0 0 1 .75.75v6.75H18a.75.75 0 0 1 .75.75 8.25 8.25 0 0 1-16.5 0z" clip-rule="evenodd"/>
|
||||
<path fill-rule="evenodd" d="M12.75 3a.75.75 0 0 1 .75-.75 8.25 8.25 0 0 1 8.25 8.25.75.75 0 0 1-.75.75h-7.5a.75.75 0 0 1-.75-.75V3z" clip-rule="evenodd"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 435 B |
BIN
dashboard/assets/images/providers/aws_provider.png
Normal file
|
After Width: | Height: | Size: 10 KiB |
BIN
dashboard/assets/images/providers/azure_provider.png
Normal file
|
After Width: | Height: | Size: 6.0 KiB |
BIN
dashboard/assets/images/providers/gcp_provider.png
Normal file
|
After Width: | Height: | Size: 245 KiB |
BIN
dashboard/assets/images/providers/k8s_provider.png
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
dashboard/assets/logo.png
Normal file
|
After Width: | Height: | Size: 11 KiB |
1301
dashboard/assets/styles/dist/output.css
vendored
Normal file
2221
dashboard/common_methods.py
Normal file
23
dashboard/compliance/aws_account_security_onboarding_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
@@ -0,0 +1,22 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_NAME",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_NAME", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_NAME",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ATTRIBUTES_NAME"
|
||||
)
|
||||
24
dashboard/compliance/cis_1_4_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_1_5_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_1_8_kubernetes.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_2_0_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_2_0_azure.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_2_0_gcp.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_2_1_azure.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_3_0_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
23
dashboard/compliance/cisa_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
29
dashboard/compliance/ens_rd2022_aws.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_ens
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_MARCO",
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORIA",
|
||||
"REQUIREMENTS_ATTRIBUTES_IDGRUPOCONTROL",
|
||||
"REQUIREMENTS_ATTRIBUTES_TIPO",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_ens(
|
||||
aux,
|
||||
"REQUIREMENTS_ATTRIBUTES_MARCO",
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORIA",
|
||||
"REQUIREMENTS_ATTRIBUTES_IDGRUPOCONTROL",
|
||||
"REQUIREMENTS_ATTRIBUTES_TIPO",
|
||||
)
|
||||
24
dashboard/compliance/fedramp_low_revision_4_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/fedramp_moderate_revision_4_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/ffiec_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
23
dashboard/compliance/gdpr_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/gxp_21_cfr_part_11_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
23
dashboard/compliance/gxp_eu_annex_11_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/hipaa_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
23
dashboard/compliance/iso27001_2013_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_container_iso
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORY",
|
||||
"REQUIREMENTS_ATTRIBUTES_OBJETIVE_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_OBJETIVE_NAME",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
return get_section_container_iso(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_CATEGORY", "REQUIREMENTS_ATTRIBUTES_OBJETIVE_ID"
|
||||
)
|
||||
23
dashboard/compliance/mitre_attack_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_SUBTECHNIQUES",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_SUBTECHNIQUES"
|
||||
)
|
||||
23
dashboard/compliance/mitre_attack_azure.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_SUBTECHNIQUES",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_SUBTECHNIQUES"
|
||||
)
|
||||
24
dashboard/compliance/nist_800_171_revision_2_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/nist_800_53_revision_4_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/nist_800_53_revision_5_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/nist_csf_1_1_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
20
dashboard/compliance/pci_3_2_1_aws.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_pci
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_pci(aux, "REQUIREMENTS_ID")
|
||||
20
dashboard/compliance/rbi_cyber_security_framework_aws.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_rbi
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
return get_section_containers_rbi(aux, "REQUIREMENTS_ID")
|
||||
24
dashboard/compliance/soc2_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
32
dashboard/config.py
Normal file
@@ -0,0 +1,32 @@
|
||||
import os
|
||||
|
||||
# Emojis to be used in the compliance table
|
||||
pass_emoji = "✅"
|
||||
fail_emoji = "❌"
|
||||
info_emoji = "ℹ️"
|
||||
manual_emoji = "✋🏽"
|
||||
|
||||
# Main colors
|
||||
fail_color = "#e67272"
|
||||
pass_color = "#54d283"
|
||||
info_color = "#2684FF"
|
||||
manual_color = "#636c78"
|
||||
|
||||
# Muted colors
|
||||
muted_fail_color = "#fca903"
|
||||
muted_pass_color = "#03fccf"
|
||||
muted_manual_color = "#b33696"
|
||||
|
||||
# Severity colors
|
||||
critical_color = "#951649"
|
||||
high_color = "#e11d48"
|
||||
medium_color = "#ee6f15"
|
||||
low_color = "#f9f5e6"
|
||||
informational_color = "#3274d9"
|
||||
|
||||
# Folder output path
|
||||
folder_path_overview = os.getcwd() + "/output"
|
||||
folder_path_compliance = os.getcwd() + "/output/compliance"
|
||||
|
||||
# Encoding
|
||||
encoding_format = "utf-8"
|
||||
5
dashboard/lib/arguments/arguments.py
Normal file
@@ -0,0 +1,5 @@
|
||||
def init_dashboard_parser(self):
|
||||
"""Init the Dashboard CLI parser"""
|
||||
# If we don't set `help="Dashboard"` this won't be rendered
|
||||
# We don't want the dashboard to inherit from the common providers parser since it's a different component
|
||||
self.subparsers.add_parser("dashboard")
|
||||
157
dashboard/lib/cards.py
Normal file
@@ -0,0 +1,157 @@
|
||||
from typing import List
|
||||
|
||||
from dash import html
|
||||
|
||||
|
||||
def create_provider_card(
|
||||
provider: str, provider_logo: str, account_type: str, filtered_data
|
||||
) -> List[html.Div]:
|
||||
"""
|
||||
Card to display the provider's name and icon.
|
||||
Args:
|
||||
provider (str): Name of the provider.
|
||||
provider_icon (str): Icon of the provider.
|
||||
Returns:
|
||||
html.Div: Card to display the provider's name and icon.
|
||||
"""
|
||||
accounts = len(
|
||||
filtered_data[filtered_data["PROVIDER"] == provider]["ACCOUNT_UID"].unique()
|
||||
)
|
||||
checks_executed = len(
|
||||
filtered_data[filtered_data["PROVIDER"] == provider]["CHECK_ID"].unique()
|
||||
)
|
||||
fails = len(
|
||||
filtered_data[
|
||||
(filtered_data["PROVIDER"] == provider)
|
||||
& (filtered_data["STATUS"] == "FAIL")
|
||||
]
|
||||
)
|
||||
passes = len(
|
||||
filtered_data[
|
||||
(filtered_data["PROVIDER"] == provider)
|
||||
& (filtered_data["STATUS"] == "PASS")
|
||||
]
|
||||
)
|
||||
# Take the values in the MUTED colum that are true for the provider
|
||||
if "MUTED" in filtered_data.columns:
|
||||
muted = len(
|
||||
filtered_data[
|
||||
(filtered_data["PROVIDER"] == provider)
|
||||
& (filtered_data["MUTED"] == "True")
|
||||
]
|
||||
)
|
||||
else:
|
||||
muted = 0
|
||||
|
||||
return [
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Div([provider_logo], className="w-8"),
|
||||
],
|
||||
className="p-2 shadow-box-up rounded-full",
|
||||
),
|
||||
html.H5(
|
||||
f"{provider.upper()} {account_type}",
|
||||
className="text-base font-semibold leading-snug tracking-normal text-gray-900",
|
||||
),
|
||||
],
|
||||
className="flex justify-between items-center mb-3",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
account_type,
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
accounts,
|
||||
className="inline-block text-xs text-prowler-stone-900 font-bold shadow-box-down px-4 py-1 rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
"Checks",
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
checks_executed,
|
||||
className="inline-block text-xs text-prowler-stone-900 font-bold shadow-box-down px-4 py-1 rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
"FAILED",
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
fails,
|
||||
className="m-[2px] px-4 py-1 rounded-lg bg-gradient-failed",
|
||||
),
|
||||
],
|
||||
className="inline-block text-xs font-bold shadow-box-down rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
"PASSED",
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
passes,
|
||||
className="m-[2px] px-4 py-1 rounded-lg bg-gradient-passed",
|
||||
),
|
||||
],
|
||||
className="inline-block text-xs font-bold shadow-box-down rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
"MUTED",
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
muted,
|
||||
className="m-[2px] px-4 py-1 rounded-lg bg-gradient-muted",
|
||||
),
|
||||
],
|
||||
className="inline-block text-xs font-bold shadow-box-down rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
],
|
||||
className="grid gap-x-8 gap-y-4",
|
||||
),
|
||||
],
|
||||
className="px-4 py-3",
|
||||
),
|
||||
],
|
||||
className="relative flex flex-col bg-white shadow-provider rounded-xl w-full transition ease-in-out delay-100 hover:-translate-y-1 hover:scale-110 hover:z-50 hover:cursor-pointer",
|
||||
)
|
||||
]
|
||||
289
dashboard/lib/dropdowns.py
Normal file
@@ -0,0 +1,289 @@
|
||||
from dash import dcc, html
|
||||
|
||||
|
||||
def create_date_dropdown(assesment_times: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the date of the last available scan for each account.
|
||||
Args:
|
||||
assesment_times (list): List of dates of the last available scan for each account.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the date of the last available scan for each account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Assessment date (last available scan) ",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
html.Img(
|
||||
id="info-file-over",
|
||||
src="/assets/images/icons/help-black.png",
|
||||
className="w-5",
|
||||
title="The date of the last available scan for each account is displayed here. If you have not run prowler yet, the date will be empty.",
|
||||
),
|
||||
],
|
||||
style={"display": "inline-flex"},
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="report-date-filter",
|
||||
options=[
|
||||
{"label": account, "value": account} for account in assesment_times
|
||||
],
|
||||
value=assesment_times[0],
|
||||
clearable=False,
|
||||
multi=False,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_date_dropdown_compliance(assesment_times: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the date of the last available scan for each account.
|
||||
Args:
|
||||
assesment_times (list): List of dates of the last available scan for each account.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the date of the last available scan for each account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Assesment Date:", className="text-prowler-stone-900 font-bold text-sm"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="date-filter-analytics",
|
||||
options=[
|
||||
{"label": account, "value": account} for account in assesment_times
|
||||
],
|
||||
value=assesment_times[0],
|
||||
clearable=False,
|
||||
multi=False,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_region_dropdown(regions: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the region of the account.
|
||||
Args:
|
||||
regions (list): List of regions of the account.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the region of the account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Region / Location / Namespace :",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="region-filter",
|
||||
options=[{"label": region, "value": region} for region in regions],
|
||||
value=["All"], # Initial selection is ALL
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_region_dropdown_compliance(regions: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the region of the account.
|
||||
Args:
|
||||
regions (list): List of regions of the account.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the region of the account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Region / Location / Namespace :",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="region-filter-compliance",
|
||||
options=[{"label": region, "value": region} for region in regions],
|
||||
value=["All"], # Initial selection is ALL
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_account_dropdown(accounts: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the account.
|
||||
Args:
|
||||
accounts (list): List of accounts.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Account / Subscription / Project / Cluster :",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="cloud-account-filter",
|
||||
options=[{"label": account, "value": account} for account in accounts],
|
||||
value=["All"], # Initial selection is ALL
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_account_dropdown_compliance(accounts: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the account.
|
||||
Args:
|
||||
accounts (list): List of accounts.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Account / Subscription / Project / Cluster :",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="cloud-account-filter-compliance",
|
||||
options=[{"label": account, "value": account} for account in accounts],
|
||||
value=["All"], # Initial selection is ALL
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_compliance_dropdown(compliance: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the compliance.
|
||||
Args:
|
||||
compliance (list): List of compliance.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the compliance.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Compliance:", className="text-prowler-stone-900 font-bold text-sm"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="report-compliance-filter",
|
||||
options=[{"label": i, "value": i} for i in compliance],
|
||||
value=compliance[0],
|
||||
clearable=False,
|
||||
style={"color": "#000000"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_severity_dropdown(severity: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the severity.
|
||||
Args:
|
||||
severity (list): List of severity.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the severity.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Severity:", className="text-prowler-stone-900 font-bold text-sm"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="severity-filter",
|
||||
options=[{"label": i, "value": i} for i in severity],
|
||||
value=["All"],
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_service_dropdown(services: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the service.
|
||||
Args:
|
||||
services (list): List of services.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the service.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Service:", className="text-prowler-stone-900 font-bold text-sm"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="service-filter",
|
||||
options=[{"label": i, "value": i} for i in services],
|
||||
value=["All"],
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_status_dropdown(status: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the status.
|
||||
Args:
|
||||
status (list): List of status.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the status.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label("Status:", className="text-prowler-stone-900 font-bold text-sm"),
|
||||
dcc.Dropdown(
|
||||
id="status-filter",
|
||||
options=[{"label": i, "value": i} for i in status],
|
||||
value=["All"],
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_table_row_dropdown(table_rows: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the number of rows in the table.
|
||||
Args:
|
||||
table_rows (list): List of number of rows.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the number of rows in the table.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
dcc.Dropdown(
|
||||
id="table-rows",
|
||||
options=[{"label": i, "value": i} for i in table_rows],
|
||||
value=table_rows[0],
|
||||
clearable=False,
|
||||
style={"color": "#000000", "margin-right": "10px"},
|
||||
),
|
||||
],
|
||||
)
|
||||
174
dashboard/lib/layouts.py
Normal file
@@ -0,0 +1,174 @@
|
||||
from dash import dcc, html
|
||||
|
||||
|
||||
def create_layout_overview(
|
||||
account_dropdown: html.Div,
|
||||
date_dropdown: html.Div,
|
||||
region_dropdown: html.Div,
|
||||
download_button_csv: html.Button,
|
||||
download_button_xlsx: html.Button,
|
||||
severity_dropdown: html.Div,
|
||||
service_dropdown: html.Div,
|
||||
table_row_dropdown: html.Div,
|
||||
status_dropdown: html.Div,
|
||||
) -> html.Div:
|
||||
"""
|
||||
Create the layout of the dashboard.
|
||||
Args:
|
||||
account_dropdown (html.Div): Dropdown to select the account.
|
||||
date_dropdown (html.Div): Dropdown to select the date of the last available scan for each account.
|
||||
region_dropdown (html.Div): Dropdown to select the region of the account.
|
||||
Returns:
|
||||
html.Div: Layout of the dashboard.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
dcc.Location(id="url", refresh=False),
|
||||
html.Div(
|
||||
[
|
||||
html.H1(
|
||||
"Scan Overview",
|
||||
className="text-prowler-stone-900 text-2xxl font-bold",
|
||||
),
|
||||
html.Div(className="d-flex flex-wrap", id="subscribe_card"),
|
||||
],
|
||||
className="flex justify-between border-b border-prowler-500 pb-3",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div([date_dropdown], className=""),
|
||||
html.Div([account_dropdown], className=""),
|
||||
html.Div([region_dropdown], className=""),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-3 lg:gap-y-0",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div([severity_dropdown], className=""),
|
||||
html.Div([service_dropdown], className=""),
|
||||
html.Div([status_dropdown], className=""),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-3 lg:gap-y-0",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(className="flex", id="aws_card", n_clicks=0),
|
||||
html.Div(className="flex", id="azure_card", n_clicks=0),
|
||||
html.Div(className="flex", id="gcp_card", n_clicks=0),
|
||||
html.Div(className="flex", id="k8s_card", n_clicks=0),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-4 lg:gap-y-0",
|
||||
),
|
||||
html.H4(
|
||||
"Count of Findings by severity",
|
||||
className="text-prowler-stone-900 text-lg font-bold",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 sm:col-span-6 lg:col-span-3 gap-y-4",
|
||||
id="status_graph",
|
||||
),
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 sm:col-span-6 lg:col-span-3 gap-y-4",
|
||||
id="two_pie_chart",
|
||||
),
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 sm:col-span-6 lg:col-span-6 col-end-13 gap-y-4",
|
||||
id="line_plot",
|
||||
),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 grid-cols-12 lg:gap-y-0",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.H4(
|
||||
"Top Findings by Severity",
|
||||
className="text-prowler-stone-900 text-lg font-bold",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
(
|
||||
html.Label(
|
||||
"Table Rows:",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
style={"margin-right": "10px"},
|
||||
)
|
||||
),
|
||||
table_row_dropdown,
|
||||
download_button_csv,
|
||||
download_button_xlsx,
|
||||
],
|
||||
className="flex justify-between items-center",
|
||||
),
|
||||
dcc.Download(id="download-data"),
|
||||
],
|
||||
className="flex justify-between items-center",
|
||||
),
|
||||
html.Div(id="table", className="grid"),
|
||||
],
|
||||
className="grid gap-x-8 gap-y-8 2xl:container mx-auto",
|
||||
)
|
||||
|
||||
|
||||
def create_layout_compliance(
|
||||
account_dropdown: html.Div,
|
||||
date_dropdown: html.Div,
|
||||
region_dropdown: html.Div,
|
||||
compliance_dropdown: html.Div,
|
||||
) -> html.Div:
|
||||
return html.Div(
|
||||
[
|
||||
dcc.Location(id="url", refresh=False),
|
||||
html.Div(
|
||||
[
|
||||
html.H1(
|
||||
"Compliance",
|
||||
className="text-prowler-stone-900 text-2xxl font-bold",
|
||||
),
|
||||
html.A(
|
||||
[
|
||||
html.Img(src="assets/favicon.ico", className="w-5 mr-3"),
|
||||
html.Span("Subscribe to prowler SaaS"),
|
||||
],
|
||||
href="https://prowler.pro/",
|
||||
target="_blank",
|
||||
className="text-prowler-stone-900 inline-flex px-4 py-2 text-xs font-bold uppercase transition-all rounded-lg text-gray-900 hover:bg-prowler-stone-900/10 border-solid border-1 hover:border-prowler-stone-900/10 hover:border-solid hover:border-1 border-prowler-stone-900/10",
|
||||
),
|
||||
],
|
||||
className="flex justify-between border-b border-prowler-500 pb-3",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div([date_dropdown], className=""),
|
||||
html.Div([account_dropdown], className=""),
|
||||
html.Div([region_dropdown], className=""),
|
||||
html.Div([compliance_dropdown], className=""),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-4 lg:gap-y-0",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 md:col-span-4 gap-y-4",
|
||||
id="overall_status_result_graph",
|
||||
),
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 md:col-span-7 md:col-end-13 gap-y-4",
|
||||
id="security_level_graph",
|
||||
),
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 md:col-span-2 gap-y-4",
|
||||
id="",
|
||||
),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 grid-cols-12 lg:gap-y-0",
|
||||
),
|
||||
html.H4(
|
||||
"Details compliance:",
|
||||
className="text-prowler-stone-900 text-lg font-bold",
|
||||
),
|
||||
html.Div(className="flex flex-wrap", id="output"),
|
||||
],
|
||||
className="grid gap-x-8 gap-y-8 2xl:container mx-auto",
|
||||
)
|
||||
600
dashboard/pages/compliance.py
Normal file
@@ -0,0 +1,600 @@
|
||||
# Standard library imports
|
||||
import csv
|
||||
import glob
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
|
||||
# Third-party imports
|
||||
import dash
|
||||
import pandas as pd
|
||||
import plotly.express as px
|
||||
from dash import callback, dcc, html
|
||||
from dash.dependencies import Input, Output
|
||||
|
||||
# Config import
|
||||
from dashboard.config import (
|
||||
encoding_format,
|
||||
fail_color,
|
||||
folder_path_compliance,
|
||||
info_color,
|
||||
manual_color,
|
||||
pass_color,
|
||||
)
|
||||
from dashboard.lib.dropdowns import (
|
||||
create_account_dropdown_compliance,
|
||||
create_compliance_dropdown,
|
||||
create_date_dropdown_compliance,
|
||||
create_region_dropdown_compliance,
|
||||
)
|
||||
from dashboard.lib.layouts import create_layout_compliance
|
||||
|
||||
# Suppress warnings
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
# Global variables
|
||||
# TODO: Create a flag to let the user put a custom path
|
||||
|
||||
csv_files = []
|
||||
for file in glob.glob(os.path.join(folder_path_compliance, "*.csv")):
|
||||
with open(file, "r", newline="", encoding=encoding_format) as csvfile:
|
||||
reader = csv.reader(csvfile)
|
||||
num_rows = sum(1 for row in reader)
|
||||
if num_rows > 1:
|
||||
csv_files.append(file)
|
||||
|
||||
|
||||
def load_csv_files(csv_files):
|
||||
# Load CSV files into a single pandas DataFrame.
|
||||
dfs = []
|
||||
results = []
|
||||
for file in csv_files:
|
||||
df = pd.read_csv(file, sep=";", on_bad_lines="skip")
|
||||
if "CHECKID" in df.columns:
|
||||
dfs.append(df)
|
||||
result = file
|
||||
result = result.split("/")[-1]
|
||||
result = re.sub(r"^.*?_", "", result)
|
||||
result = result.replace(".csv", "")
|
||||
result = result.upper()
|
||||
if "AWS" in result:
|
||||
if "AWS_" in result:
|
||||
result = result.replace("_AWS", "")
|
||||
else:
|
||||
result = result.replace("_AWS", " - AWS")
|
||||
if "GCP" in result:
|
||||
result = result.replace("_GCP", " - GCP")
|
||||
if "AZURE" in result:
|
||||
result = result.replace("_AZURE", " - AZURE")
|
||||
if "KUBERNETES" in result:
|
||||
result = result.replace("_KUBERNETES", " - KUBERNETES")
|
||||
result = result[result.find("CIS_") :]
|
||||
results.append(result)
|
||||
|
||||
unique_results = set(results)
|
||||
results = list(unique_results)
|
||||
# Check if there is any CIS report in the list and divide it in level 1 and level 2
|
||||
new_results = []
|
||||
old_results = results.copy()
|
||||
for compliance_name in results:
|
||||
if "CIS_" in compliance_name:
|
||||
old_results.remove(compliance_name)
|
||||
new_results.append(compliance_name + " - Level_1")
|
||||
new_results.append(compliance_name + " - Level_2")
|
||||
|
||||
results = old_results + new_results
|
||||
results.sort()
|
||||
# Handle the case where there are no CSV files
|
||||
try:
|
||||
data = pd.concat(dfs, ignore_index=True)
|
||||
except ValueError:
|
||||
data = None
|
||||
return data, results
|
||||
|
||||
|
||||
data, results = load_csv_files(csv_files)
|
||||
|
||||
if data is None:
|
||||
dash.register_page(__name__)
|
||||
layout = html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.H5(
|
||||
"No data found, check if the CSV files are in the correct folder.",
|
||||
className="card-title",
|
||||
style={"text-align": "left"},
|
||||
)
|
||||
],
|
||||
style={
|
||||
"width": "99%",
|
||||
"margin-right": "0.8%",
|
||||
"margin-bottom": "10px",
|
||||
},
|
||||
)
|
||||
]
|
||||
)
|
||||
else:
|
||||
|
||||
data["ASSESSMENTDATE"] = pd.to_datetime(data["ASSESSMENTDATE"])
|
||||
data["ASSESSMENT_TIME"] = data["ASSESSMENTDATE"].dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
data_values = data["ASSESSMENT_TIME"].unique()
|
||||
data_values.sort()
|
||||
data_values = data_values[::-1]
|
||||
aux = []
|
||||
for value in data_values:
|
||||
if value.split(" ")[0] not in [aux[i].split(" ")[0] for i in range(len(aux))]:
|
||||
aux.append(value)
|
||||
data_values = aux
|
||||
|
||||
data = data[data["ASSESSMENT_TIME"].isin(data_values)]
|
||||
data["ASSESSMENT_TIME"] = data["ASSESSMENT_TIME"].apply(lambda x: x.split(" ")[0])
|
||||
|
||||
# Select Compliance - Dropdown
|
||||
|
||||
compliance_dropdown = create_compliance_dropdown(results)
|
||||
|
||||
# Select Account - Dropdown
|
||||
|
||||
select_account_dropdown_list = ["All"]
|
||||
# Append to the list the unique values of the columns ACCOUNTID, PROJECTID and SUBSCRIPTIONID if they exist
|
||||
if "ACCOUNTID" in data.columns:
|
||||
select_account_dropdown_list = select_account_dropdown_list + list(
|
||||
data["ACCOUNTID"].unique()
|
||||
)
|
||||
if "PROJECTID" in data.columns:
|
||||
select_account_dropdown_list = select_account_dropdown_list + list(
|
||||
data["PROJECTID"].unique()
|
||||
)
|
||||
if "SUBSCRIPTIONID" in data.columns:
|
||||
select_account_dropdown_list = select_account_dropdown_list + list(
|
||||
data["SUBSCRIPTIONID"].unique()
|
||||
)
|
||||
if "SUBSCRIPTION" in data.columns:
|
||||
select_account_dropdown_list = select_account_dropdown_list + list(
|
||||
data["SUBSCRIPTION"].unique()
|
||||
)
|
||||
|
||||
list_items = []
|
||||
for item in select_account_dropdown_list:
|
||||
if item.__class__.__name__ == "str" and "nan" not in item:
|
||||
list_items.append(item)
|
||||
|
||||
account_dropdown = create_account_dropdown_compliance(list_items)
|
||||
|
||||
# Select Region - Dropdown
|
||||
|
||||
select_region_dropdown_list = ["All"]
|
||||
# Append to the list the unique values of the column REGION or LOCATION if it exists
|
||||
if "REGION" in data.columns:
|
||||
# Handle the case where the column REGION is empty
|
||||
data["REGION"] = data["REGION"].fillna("-")
|
||||
select_region_dropdown_list = select_region_dropdown_list + list(
|
||||
data["REGION"].unique()
|
||||
)
|
||||
if "LOCATION" in data.columns:
|
||||
# Handle the case where the column LOCATION is empty
|
||||
data["LOCATION"] = data["LOCATION"].fillna("-")
|
||||
select_region_dropdown_list = select_region_dropdown_list + list(
|
||||
data["LOCATION"].unique()
|
||||
)
|
||||
|
||||
# Clear the list from None and NaN values
|
||||
list_items = []
|
||||
for item in select_region_dropdown_list:
|
||||
if item.__class__.__name__ == "str":
|
||||
list_items.append(item)
|
||||
|
||||
region_dropdown = create_region_dropdown_compliance(list_items)
|
||||
|
||||
# Select Date - Dropdown
|
||||
|
||||
date_dropdown = create_date_dropdown_compliance(
|
||||
list(data["ASSESSMENT_TIME"].unique())
|
||||
)
|
||||
|
||||
dash.register_page(__name__)
|
||||
|
||||
layout = create_layout_compliance(
|
||||
account_dropdown, date_dropdown, region_dropdown, compliance_dropdown
|
||||
)
|
||||
|
||||
|
||||
@callback(
|
||||
[
|
||||
Output("output", "children"),
|
||||
Output("overall_status_result_graph", "children"),
|
||||
Output("security_level_graph", "children"),
|
||||
Output("cloud-account-filter-compliance", "value"),
|
||||
Output("cloud-account-filter-compliance", "options"),
|
||||
Output("region-filter-compliance", "value"),
|
||||
Output("region-filter-compliance", "options"),
|
||||
Output("date-filter-analytics", "value"),
|
||||
Output("date-filter-analytics", "options"),
|
||||
],
|
||||
Input("report-compliance-filter", "value"),
|
||||
Input("cloud-account-filter-compliance", "value"),
|
||||
Input("region-filter-compliance", "value"),
|
||||
Input("date-filter-analytics", "value"),
|
||||
)
|
||||
def display_data(
|
||||
analytics_input, account_filter, region_filter_analytics, date_filter_analytics
|
||||
):
|
||||
|
||||
current_compliance = analytics_input
|
||||
analytics_input = analytics_input.replace(" - ", "_")
|
||||
analytics_input = analytics_input.lower()
|
||||
|
||||
# Check if the compliance selected is the level 1 or level 2 of the CIS
|
||||
is_level_1 = "level_1" in analytics_input
|
||||
analytics_input = analytics_input.replace("_level_1", "").replace("_level_2", "")
|
||||
|
||||
# Filter the data based on the compliance selected
|
||||
files = [file for file in csv_files if analytics_input in file]
|
||||
|
||||
def load_csv_files(files):
|
||||
"""Load CSV files into a single pandas DataFrame."""
|
||||
dfs = []
|
||||
for file in files:
|
||||
df = pd.read_csv(file, sep=";", on_bad_lines="skip")
|
||||
dfs.append(df.astype(str))
|
||||
return pd.concat(dfs, ignore_index=True)
|
||||
|
||||
data = load_csv_files(files)
|
||||
|
||||
# Rename the column LOCATION to REGION for GCP or Azure
|
||||
if "gcp" in analytics_input or "azure" in analytics_input:
|
||||
data = data.rename(columns={"LOCATION": "REGION"})
|
||||
|
||||
# Add the column ACCOUNTID to the data if the provider is kubernetes
|
||||
if "kubernetes" in analytics_input:
|
||||
data.rename(columns={"CONTEXT": "ACCOUNTID"}, inplace=True)
|
||||
data.rename(columns={"NAMESPACE": "REGION"}, inplace=True)
|
||||
if "REQUIREMENTS_ATTRIBUTES_PROFILE" in data.columns:
|
||||
data["REQUIREMENTS_ATTRIBUTES_PROFILE"] = data[
|
||||
"REQUIREMENTS_ATTRIBUTES_PROFILE"
|
||||
].apply(lambda x: x.split(" - ")[0])
|
||||
# Filter the chosen level of the CIS
|
||||
if is_level_1:
|
||||
data = data[data["REQUIREMENTS_ATTRIBUTES_PROFILE"] == "Level 1"]
|
||||
|
||||
# Rename the column PROJECTID to ACCOUNTID for GCP
|
||||
if data.columns.str.contains("PROJECTID").any():
|
||||
data.rename(columns={"PROJECTID": "ACCOUNTID"}, inplace=True)
|
||||
|
||||
# Rename the column SUBSCRIPTIONID to ACCOUNTID for Azure
|
||||
if data.columns.str.contains("SUBSCRIPTIONID").any():
|
||||
data.rename(columns={"SUBSCRIPTIONID": "ACCOUNTID"}, inplace=True)
|
||||
data["REGION"] = "-"
|
||||
# Handle v3 azure cis compliance
|
||||
if data.columns.str.contains("SUBSCRIPTION").any():
|
||||
data.rename(columns={"SUBSCRIPTION": "ACCOUNTID"}, inplace=True)
|
||||
data["REGION"] = "-"
|
||||
|
||||
# Filter ACCOUNT
|
||||
if account_filter == ["All"]:
|
||||
updated_cloud_account_values = data["ACCOUNTID"].unique()
|
||||
elif "All" in account_filter and len(account_filter) > 1:
|
||||
# Remove 'All' from the list
|
||||
account_filter.remove("All")
|
||||
updated_cloud_account_values = account_filter
|
||||
elif len(account_filter) == 0:
|
||||
updated_cloud_account_values = data["ACCOUNTID"].unique()
|
||||
account_filter = ["All"]
|
||||
else:
|
||||
updated_cloud_account_values = account_filter
|
||||
|
||||
data = data[data["ACCOUNTID"].isin(updated_cloud_account_values)]
|
||||
|
||||
account_filter_options = list(data["ACCOUNTID"].unique())
|
||||
account_filter_options = account_filter_options + ["All"]
|
||||
for item in account_filter_options:
|
||||
if "nan" in item or item.__class__.__name__ != "str" or item is None:
|
||||
account_filter_options.remove(item)
|
||||
|
||||
# Filter REGION
|
||||
if region_filter_analytics == ["All"]:
|
||||
updated_region_account_values = data["REGION"].unique()
|
||||
elif "All" in region_filter_analytics and len(region_filter_analytics) > 1:
|
||||
# Remove 'All' from the list
|
||||
region_filter_analytics.remove("All")
|
||||
updated_region_account_values = region_filter_analytics
|
||||
elif len(region_filter_analytics) == 0:
|
||||
updated_region_account_values = data["REGION"].unique()
|
||||
region_filter_analytics = ["All"]
|
||||
else:
|
||||
updated_region_account_values = region_filter_analytics
|
||||
|
||||
data = data[data["REGION"].isin(updated_region_account_values)]
|
||||
|
||||
region_filter_options = list(data["REGION"].unique())
|
||||
region_filter_options = region_filter_options + ["All"]
|
||||
for item in region_filter_options:
|
||||
if item == "nan" or item.__class__.__name__ != "str":
|
||||
region_filter_options.remove(item)
|
||||
|
||||
data["ASSESSMENTDATE"] = pd.to_datetime(data["ASSESSMENTDATE"], errors="coerce")
|
||||
data["ASSESSMENTDATE"] = data["ASSESSMENTDATE"].dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
# Choosing the date that is the most recent
|
||||
data_values = data["ASSESSMENTDATE"].unique()
|
||||
data_values.sort()
|
||||
data_values = data_values[::-1]
|
||||
aux = []
|
||||
|
||||
data_values = [str(i) for i in data_values]
|
||||
for value in data_values:
|
||||
if value.split(" ")[0] not in [aux[i].split(" ")[0] for i in range(len(aux))]:
|
||||
aux.append(value)
|
||||
data_values = [str(i) for i in aux]
|
||||
|
||||
data = data[data["ASSESSMENTDATE"].isin(data_values)]
|
||||
data["ASSESSMENTDATE"] = data["ASSESSMENTDATE"].apply(lambda x: x.split(" ")[0])
|
||||
|
||||
options_date = data["ASSESSMENTDATE"].unique()
|
||||
options_date.sort()
|
||||
options_date = options_date[::-1]
|
||||
|
||||
# Filter DATE
|
||||
if date_filter_analytics in options_date:
|
||||
data = data[data["ASSESSMENTDATE"] == date_filter_analytics]
|
||||
else:
|
||||
date_filter_analytics = options_date[0]
|
||||
data = data[data["ASSESSMENTDATE"] == date_filter_analytics]
|
||||
|
||||
if data.empty:
|
||||
fig = px.pie()
|
||||
pie_1 = dcc.Graph(
|
||||
figure=fig,
|
||||
config={"displayModeBar": False},
|
||||
style={"height": "250px", "width": "250px", "right": "0px"},
|
||||
)
|
||||
|
||||
return [
|
||||
html.Div(
|
||||
[
|
||||
html.H5(
|
||||
"No data found for this compliance",
|
||||
className="card-title",
|
||||
style={"text-align": "left"},
|
||||
)
|
||||
],
|
||||
style={
|
||||
"width": "99%",
|
||||
"margin-right": "0.8%",
|
||||
"margin-bottom": "10px",
|
||||
},
|
||||
)
|
||||
]
|
||||
else:
|
||||
# Check cases where the compliance start with AWS_
|
||||
if "aws_" in analytics_input:
|
||||
analytics_input = analytics_input + "_aws"
|
||||
try:
|
||||
current = analytics_input.replace(".", "_")
|
||||
compliance_module = importlib.import_module(
|
||||
f"dashboard.compliance.{current}"
|
||||
)
|
||||
data.drop_duplicates(keep="first", inplace=True)
|
||||
table = compliance_module.get_table(data)
|
||||
except ModuleNotFoundError:
|
||||
table = html.Div(
|
||||
[
|
||||
html.H5(
|
||||
"No data found for this compliance",
|
||||
className="card-title",
|
||||
style={"text-align": "left", "color": "black"},
|
||||
)
|
||||
],
|
||||
style={
|
||||
"width": "99%",
|
||||
"margin-right": "0.8%",
|
||||
"margin-bottom": "10px",
|
||||
},
|
||||
)
|
||||
|
||||
df = data.copy()
|
||||
df = df.groupby(["STATUS"]).size().reset_index(name="counts")
|
||||
df = df.sort_values(by=["counts"], ascending=False)
|
||||
|
||||
# Pie 1
|
||||
pie_1 = get_pie(df)
|
||||
|
||||
# Get the pie2 depending on the compliance
|
||||
df = data.copy()
|
||||
|
||||
current_filter = ""
|
||||
|
||||
if "pci" in analytics_input:
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ID")
|
||||
current_filter = "req_id"
|
||||
elif (
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION" in df.columns
|
||||
and not df["REQUIREMENTS_ATTRIBUTES_SECTION"].isnull().values.any()
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ATTRIBUTES_SECTION")
|
||||
current_filter = "sections"
|
||||
elif (
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORIA" in df.columns
|
||||
and not df["REQUIREMENTS_ATTRIBUTES_CATEGORIA"].isnull().values.any()
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ATTRIBUTES_CATEGORIA")
|
||||
current_filter = "categorias"
|
||||
elif (
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORY" in df.columns
|
||||
and not df["REQUIREMENTS_ATTRIBUTES_CATEGORY"].isnull().values.any()
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ATTRIBUTES_CATEGORY")
|
||||
current_filter = "categories"
|
||||
elif (
|
||||
"REQUIREMENTS_ATTRIBUTES_SERVICE" in df.columns
|
||||
and not df["REQUIREMENTS_ATTRIBUTES_SERVICE"].isnull().values.any()
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ATTRIBUTES_SERVICE")
|
||||
current_filter = "services"
|
||||
elif (
|
||||
"REQUIREMENTS_ID" in df.columns
|
||||
and not df["REQUIREMENTS_ID"].isnull().values.any()
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ID")
|
||||
current_filter = "techniques"
|
||||
else:
|
||||
fig = px.pie()
|
||||
fig.update_layout(
|
||||
margin=dict(l=0, r=0, t=0, b=0),
|
||||
autosize=True,
|
||||
showlegend=False,
|
||||
paper_bgcolor="#303030",
|
||||
)
|
||||
pie_2 = dcc.Graph(
|
||||
figure=fig,
|
||||
config={"displayModeBar": False},
|
||||
style={"height": "250px", "width": "250px", "right": "0px"},
|
||||
)
|
||||
current_filter = "none"
|
||||
|
||||
# Analytics table
|
||||
|
||||
if not analytics_input:
|
||||
analytics_input = ""
|
||||
|
||||
table_output = get_table(current_compliance, table)
|
||||
|
||||
overall_status_result_graph = get_graph(pie_1, "Overall Status Result")
|
||||
|
||||
security_level_graph = get_graph(
|
||||
pie_2, f"Top 5 failed {current_filter} by findings"
|
||||
)
|
||||
|
||||
return (
|
||||
table_output,
|
||||
overall_status_result_graph,
|
||||
security_level_graph,
|
||||
account_filter,
|
||||
account_filter_options,
|
||||
region_filter_analytics,
|
||||
region_filter_options,
|
||||
date_filter_analytics,
|
||||
options_date,
|
||||
)
|
||||
|
||||
|
||||
def get_graph(pie, title):
|
||||
return [
|
||||
html.Span(
|
||||
title,
|
||||
className="text-center text-prowler-stone-900 uppercase text-xs font-bold",
|
||||
),
|
||||
html.Div(
|
||||
[pie],
|
||||
className="",
|
||||
style={
|
||||
"display": "flex",
|
||||
"justify-content": "center",
|
||||
"align-items": "center",
|
||||
"margin-top": "7%",
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def get_bar_graph(df, column_name):
|
||||
df = df[df["STATUS"] == "FAIL"]
|
||||
df = df.groupby([column_name, "STATUS"]).size().reset_index(name="counts")
|
||||
df = df.sort_values(by=["counts"], ascending=True)
|
||||
# take the top 5
|
||||
df = df.tail(5)
|
||||
|
||||
colums = df[column_name].unique()
|
||||
|
||||
# Cut the text if it is too long
|
||||
for i in range(len(colums)):
|
||||
if len(colums[i]) > 15:
|
||||
colums[i] = colums[i][:15] + "..."
|
||||
|
||||
fig = px.bar(
|
||||
df,
|
||||
x="counts",
|
||||
y=colums,
|
||||
color="STATUS",
|
||||
color_discrete_map={"FAIL": fail_color},
|
||||
orientation="h",
|
||||
)
|
||||
|
||||
fig.update_layout(
|
||||
margin=dict(l=0, r=0, t=0, b=0),
|
||||
autosize=True,
|
||||
showlegend=False,
|
||||
xaxis_title=None,
|
||||
yaxis_title=None,
|
||||
font=dict(size=14, color="#292524"),
|
||||
hoverlabel=dict(font_size=12),
|
||||
paper_bgcolor="#FFF",
|
||||
)
|
||||
|
||||
return dcc.Graph(
|
||||
figure=fig,
|
||||
config={"displayModeBar": False},
|
||||
style={"height": "20rem", "width": "40rem"},
|
||||
)
|
||||
|
||||
|
||||
def get_pie(df):
|
||||
# Define custom colors
|
||||
color_mapping = {
|
||||
"FAIL": fail_color,
|
||||
"PASS": pass_color,
|
||||
"INFO": info_color,
|
||||
"WARN": "#260000",
|
||||
"MANUAL": manual_color,
|
||||
}
|
||||
|
||||
# Use the color_discrete_map parameter to map categories to custom colors
|
||||
fig = px.pie(
|
||||
df,
|
||||
names="STATUS",
|
||||
values="counts",
|
||||
hole=0.7,
|
||||
color="STATUS",
|
||||
color_discrete_map=color_mapping,
|
||||
)
|
||||
fig.update_traces(
|
||||
hovertemplate=None,
|
||||
textposition="outside",
|
||||
textinfo="percent+label",
|
||||
rotation=50,
|
||||
)
|
||||
|
||||
fig.update_layout(
|
||||
margin=dict(l=0, r=0, t=0, b=0),
|
||||
autosize=True,
|
||||
showlegend=False,
|
||||
font=dict(size=14, color="#292524"),
|
||||
hoverlabel=dict(font_size=12),
|
||||
paper_bgcolor="#FFF",
|
||||
)
|
||||
|
||||
pie = dcc.Graph(
|
||||
figure=fig,
|
||||
config={"displayModeBar": False},
|
||||
style={"height": "20rem", "width": "20rem"},
|
||||
)
|
||||
|
||||
return pie
|
||||
|
||||
|
||||
def get_table(current_compliance, table):
|
||||
return [
|
||||
html.Div(
|
||||
[
|
||||
html.H5(
|
||||
f"{current_compliance}",
|
||||
className="text-prowler-stone-900 text-md font-bold uppercase mb-4",
|
||||
),
|
||||
table,
|
||||
],
|
||||
className="relative flex flex-col bg-white shadow-provider rounded-xl px-4 py-3 flex-wrap w-full",
|
||||
),
|
||||
]
|
||||
1222
dashboard/pages/overview.py
Normal file
112
dashboard/src/input.css
Normal file
@@ -0,0 +1,112 @@
|
||||
/*
|
||||
/*
|
||||
/*
|
||||
/*
|
||||
/* Use this file to add custom styles using Tailwind's utility classes. */
|
||||
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
#_dash-app-content {
|
||||
@apply bg-prowler-stone-500;
|
||||
}
|
||||
|
||||
@layer components {
|
||||
.custom-grid {
|
||||
grid-template-columns: minmax(0, 16fr) repeat(11, minmax(0, 11fr));
|
||||
}
|
||||
|
||||
.custom-grid-large {
|
||||
grid-template-columns: minmax(0, 10fr) repeat(11, minmax(0, 11fr));
|
||||
}
|
||||
/* Styles for the accordion in the compliance page */
|
||||
#_dash-app-content .accordion .accordion-header .accordion-button {
|
||||
@apply text-prowler-stone-900 inline-block px-4 text-xs font-bold uppercase transition-all rounded-lg bg-prowler-stone-300 hover:bg-prowler-stone-900/10;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion .accordion-item {
|
||||
@apply text-prowler-stone-900 bg-prowler-white rounded-lg;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion .accordion-button:not(.collapsed) {
|
||||
@apply text-prowler-stone-900 bg-prowler-stone-500;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion .dash-table-container {
|
||||
@apply grid;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion table {
|
||||
@apply rounded-lg;
|
||||
}
|
||||
/* Styles for thead */
|
||||
#_dash-app-content .accordion th {
|
||||
@apply text-prowler-white text-left bg-prowler-stone-900 text-xs py-1 font-bold;
|
||||
}
|
||||
|
||||
/* Styles for td */
|
||||
#_dash-app-content .accordion td {
|
||||
@apply text-prowler-stone-900 text-left bg-prowler-white text-xs py-1 font-light;
|
||||
}
|
||||
|
||||
/* Styles for table cells */
|
||||
#_dash-app-content .accordion table tbody thead,
|
||||
#_dash-app-content .accordion table tbody tr {
|
||||
@apply w-full;
|
||||
}
|
||||
|
||||
/* Check ID */
|
||||
#_dash-app-content .accordion table th:nth-child(1) {
|
||||
@apply w-[60%];
|
||||
}
|
||||
/* Status */
|
||||
#_dash-app-content .accordion table th:nth-child(2) {
|
||||
@apply w-[10%] text-center;
|
||||
}
|
||||
#_dash-app-content .accordion table td:nth-child(2) {
|
||||
@apply text-center;
|
||||
}
|
||||
/* Region */
|
||||
#_dash-app-content .accordion table th:nth-child(3) {
|
||||
@apply w-[10%];
|
||||
}
|
||||
/* Account ID */
|
||||
#_dash-app-content .accordion table th:nth-child(4) {
|
||||
@apply w-[10%];
|
||||
}
|
||||
/* Resource ID */
|
||||
#_dash-app-content .accordion table th:nth-child(5) {
|
||||
@apply w-[10%];
|
||||
}
|
||||
|
||||
#_dash-app-content .compliance-data-layout,
|
||||
#_dash-app-content .accordion-body,
|
||||
#_dash-app-content .compliance-data-layout .accordion.accordion-flush {
|
||||
@apply grid gap-y-4;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion-inner--child,
|
||||
#_dash-app-content .accordion-inner {
|
||||
@apply relative;
|
||||
}
|
||||
|
||||
#_dash-app-content .info-bar {
|
||||
@apply absolute left-1/2 transform -translate-x-1/2 top-2 h-8 z-50;
|
||||
}
|
||||
|
||||
#_dash-app-content .info-bar-child {
|
||||
@apply absolute right-6 top-2 w-auto h-8 z-50;
|
||||
}
|
||||
|
||||
@layer utilities {
|
||||
/* Hide scrollbar for Chrome, Safari and Opera */
|
||||
.no-scrollbar::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
/* Hide scrollbar for IE, Edge and Firefox */
|
||||
.no-scrollbar {
|
||||
-ms-overflow-style: none; /* IE and Edge */
|
||||
scrollbar-width: none; /* Firefox */
|
||||
}
|
||||
}
|
||||
90
dashboard/tailwind.config.js
Normal file
@@ -0,0 +1,90 @@
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
module.exports = {
|
||||
content: [
|
||||
"./assets/**/*.{py,html,js}",
|
||||
"./components/**/*.{py,html,js}",
|
||||
"./pages/**/*.{py,html,js}",
|
||||
"./utils/**/*.{py,html,js}",
|
||||
"./app.py",
|
||||
],
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
prowler: {
|
||||
stone: {
|
||||
950: "#1C1917",
|
||||
900: "#292524",
|
||||
500: "#E7E5E4",
|
||||
300: "#F5F5F4",
|
||||
},
|
||||
gray: {
|
||||
900: "#9bAACF",
|
||||
700: "#BEC8E4",
|
||||
500: "#C8D0E7",
|
||||
300: "#E4EBF5",
|
||||
},
|
||||
status: {
|
||||
passed: "#1FB53F",
|
||||
failed: "#A3231F",
|
||||
},
|
||||
lime: "#84CC16",
|
||||
white: "#FFFFFF",
|
||||
error: "#B91C1C",
|
||||
},
|
||||
},
|
||||
fontSize: {
|
||||
'3xs': '0.625rem', // 10px
|
||||
'2xs': '0.6875rem', // 11px
|
||||
xs: '0.75rem', // 12px
|
||||
sm: '0.875rem', // 14px
|
||||
base: '1rem', // 16px
|
||||
lg: '1.125rem', // 18px
|
||||
xl: '1.25rem', // 20px
|
||||
'2xl': '1.375rem', // 22px
|
||||
'2xxl': '1.5rem', // 24px
|
||||
'3xl': '1.75rem', // 28px
|
||||
'4xl': '2rem', // 32px
|
||||
'5xl': '2.25rem', // 36px
|
||||
'6xl': '2.75rem', // 44px
|
||||
'7xl': '3.5rem' // 56px
|
||||
},
|
||||
fontWeight: {
|
||||
light: 300,
|
||||
regular: 400,
|
||||
medium: 500,
|
||||
bold: 700,
|
||||
heavy: 800
|
||||
},
|
||||
lineHeight: {
|
||||
14: "0.875rem", // 14px
|
||||
22: "1.375rem", // 22px
|
||||
26: "1.625rem", // 26px
|
||||
28: "1.75rem", // 28px
|
||||
30: "1.875rem", // 30px
|
||||
32: "2rem", // 32px
|
||||
34: "2.125rem", // 34px
|
||||
36: "2.25rem", // 36px
|
||||
40: "2.5rem", // 40px
|
||||
44: "2.75rem", // 44px
|
||||
48: "3rem", // 48px
|
||||
56: "3.5rem", // 56px
|
||||
68: "4.25rem", // 68px
|
||||
},
|
||||
boxShadow: {
|
||||
"provider":
|
||||
".3rem .3rem .6rem #c8d0e7, -.2rem -.2rem .5rem #FFF",
|
||||
"box-up":
|
||||
"0.3rem 0.3rem 0.6rem #c8d0e7, -0.2rem -0.2rem 0.5rem #FFF",
|
||||
"box-down":
|
||||
"inset .2rem .2rem .5rem #c8d0e7, inset -.2rem -.2rem .5rem #FFF",
|
||||
},
|
||||
backgroundImage: {
|
||||
"gradient-passed":
|
||||
"linear-gradient(127.43deg, #F1F5F8 -177.68%, #4ADE80 87.35%)",
|
||||
"gradient-failed":
|
||||
"linear-gradient(127.43deg, #F1F5F8 -177.68%, #EF4444 87.35%)",
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [],
|
||||
};
|
||||
@@ -1,9 +0,0 @@
|
||||
# Audit Info
|
||||
|
||||
In each Prowler provider we have a Python object called `audit_info` which is in charge of keeping the credentials, the configuration and the state of each audit, and it's passed to each service during the `__init__`.
|
||||
|
||||
- AWS: https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/lib/audit_info/models.py#L34-L54
|
||||
- GCP: https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/lib/audit_info/models.py#L7-L30
|
||||
- Azure: https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/lib/audit_info/models.py#L17-L31
|
||||
|
||||
This `audit_info` object is shared during the Prowler execution and for that reason is important to mock it in each test to isolate them. See the [testing guide](./unit-testing.md) for more information.
|
||||
@@ -5,9 +5,15 @@ Here you can find how to create new checks for Prowler.
|
||||
**To create a check is required to have a Prowler provider service already created, so if the service is not present or the attribute you want to audit is not retrieved by the service, please refer to the [Service](./services.md) documentation.**
|
||||
|
||||
## Introduction
|
||||
|
||||
The checks are the fundamental piece of Prowler. A check is a simply piece of code that ensures if something is configured against cybersecurity best practices. Then the check generates a finding with the result and includes the check's metadata to give the user more contextual information about the result, the risk and how to remediate it.
|
||||
|
||||
To create a new check for a supported Prowler provider, you will need to create a folder with the check name inside the specific service for the selected provider.
|
||||
|
||||
We are going to use the `ec2_ami_public` check form the `AWS` provider as an example. So the folder name will `prowler/providers/aws/services/ec2/ec2_ami_public` (following the format `prowler/providers/<provider>/services/<service>/<check_name>`), with the name of check following the pattern: `service_subservice/resource_action`.
|
||||
We are going to use the `ec2_ami_public` check from the `AWS` provider as an example. So the folder name will be `prowler/providers/aws/services/ec2/ec2_ami_public` (following the format `prowler/providers/<provider>/services/<service>/<check_name>`), with the name of check following the pattern: `service_subservice_resource_action`.
|
||||
|
||||
???+ note
|
||||
A subservice is an specific component of a service that is gonna be audited. Sometimes it could be the shortened name of the class attribute that is gonna be accessed in the check.
|
||||
|
||||
Inside that folder, we need to create three files:
|
||||
|
||||
@@ -102,7 +108,7 @@ All the checks MUST fill the `report.status` and `report.status_extended` with t
|
||||
- Status -- `report.status`
|
||||
- `PASS` --> If the check is passing against the configured value.
|
||||
- `FAIL` --> If the check is failing against the configured value.
|
||||
- `INFO` --> This value cannot be used unless a manual operation is required in order to determine if the `report.status` is whether `PASS` or `FAIL`.
|
||||
- `MANUAL` --> This value cannot be used unless a manual operation is required in order to determine if the `report.status` is whether `PASS` or `FAIL`.
|
||||
- Status Extended -- `report.status_extended`
|
||||
- MUST end in a dot `.`
|
||||
- MUST include the service audited with the resource and a brief explanation of the result generated, e.g.: `EC2 AMI ami-0123456789 is not public.`
|
||||
@@ -111,7 +117,7 @@ All the checks MUST fill the `report.status` and `report.status_extended` with t
|
||||
|
||||
All the checks MUST fill the `report.region` with the following criteria:
|
||||
|
||||
- If the audited resource is regional use the `region` attribute within the resource object.
|
||||
- If the audited resource is regional use the `region` (the name changes depending on the provider: `location` in Azure and GCP and `namespace` in K8s) attribute within the resource object.
|
||||
- If the audited resource is global use the `service_client.region` within the service client object.
|
||||
|
||||
### Resource ID, Name and ARN
|
||||
@@ -140,7 +146,7 @@ All the checks MUST fill the `report.resource_id` and `report.resource_arn` with
|
||||
### Python Model
|
||||
The following is the Python model for the check's class.
|
||||
|
||||
As per August 5th 2023 the `Check_Metadata_Model` can be found [here](https://github.com/prowler-cloud/prowler/blob/master/prowler/lib/check/models.py#L59-L80).
|
||||
As per April 11th 2024 the `Check_Metadata_Model` can be found [here](https://github.com/prowler-cloud/prowler/blob/master/prowler/lib/check/models.py#L36-L82).
|
||||
|
||||
```python
|
||||
class Check(ABC, Check_Metadata_Model):
|
||||
@@ -243,11 +249,11 @@ Each Prowler check has metadata associated which is stored at the same level of
|
||||
# Code holds different methods to remediate the FAIL finding
|
||||
"Code": {
|
||||
# CLI holds the command in the provider native CLI to remediate it
|
||||
"CLI": "https://docs.bridgecrew.io/docs/public_8#cli-command",
|
||||
"CLI": "https://docs.prowler.com/checks/public_8#cli-command",
|
||||
# NativeIaC holds the native IaC code to remediate it, use "https://docs.bridgecrew.io/docs"
|
||||
"NativeIaC": "",
|
||||
# Other holds the other commands, scripts or code to remediate it, use "https://www.trendmicro.com/cloudoneconformity"
|
||||
"Other": "https://docs.bridgecrew.io/docs/public_8#aws-console",
|
||||
"Other": "https://docs.prowler.com/checks/public_8#aws-console",
|
||||
# Terraform holds the Terraform code to remediate it, use "https://docs.bridgecrew.io/docs"
|
||||
"Terraform": ""
|
||||
},
|
||||
|
||||
@@ -4,5 +4,5 @@ We use `mkdocs` to build this Prowler documentation site so you can easily contr
|
||||
|
||||
1. Install `mkdocs` with your favorite package manager.
|
||||
2. Inside the `prowler` repository folder run `mkdocs serve` and point your browser to `http://localhost:8000` and you will see live changes to your local copy of this documentation site.
|
||||
3. Make all needed changes to docs or add new documents. To do so just edit existing md files inside `prowler/docs` and if you are adding a new section or file please make sure you add it to `mkdocs.yml` file in the root folder of the Prowler repo.
|
||||
3. Make all needed changes to docs or add new documents. To do so just edit existing md files inside `prowler/docs` and if you are adding a new section or file please make sure you add it to `mkdocs.yaml` file in the root folder of the Prowler repo.
|
||||
4. Once you are done with changes, please send a pull request to us for review and merge. Thank you in advance!
|
||||
|
||||
281
docs/developer-guide/provider.md
Normal file
@@ -0,0 +1,281 @@
|
||||
|
||||
# Create a new Provider for Prowler
|
||||
|
||||
Here you can find how to create a new Provider in Prowler to give support for making all security checks needed and make your cloud safer!
|
||||
|
||||
## Introduction
|
||||
|
||||
Providers are the foundation on which Prowler is built, a simple definition for a cloud provider could be "third-party company that offers a platform where any IT resource you need is available at any time upon request". The most well-known cloud providers are Amazon Web Services, Azure from Microsoft and Google Cloud which are already supported by Prowler.
|
||||
|
||||
To create a new provider that is not supported now by Prowler and add your security checks you must create a new folder to store all the related files within it (services, checks, etc.). It must be store in route `prowler/providers/<new_provider_name>/`.
|
||||
|
||||
Inside that folder, you MUST create the following files and folders:
|
||||
|
||||
- A `lib` folder: to store all extra functions.
|
||||
- A `services` folder: to store all [services](./services.md) to audit.
|
||||
- An empty `__init__.py`: to make Python treat this service folder as a package.
|
||||
- A `<new_provider_name>_provider.py`, containing all the provider's logic necessary to get authenticated in the provider, configurations and extra data useful for final report.
|
||||
- A `models.py`, containing all the models necessary for the new provider.
|
||||
|
||||
## Provider
|
||||
|
||||
The structure for Prowler's providers is set up in such a way that they can be utilized through a generic service specific to each provider. This is achieved by passing the required parameters to the constructor, which in turn initializes all the necessary session values.
|
||||
|
||||
### Base Class
|
||||
|
||||
All the providers in Prowler inherits from the same [base class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/common/provider.py). It is an [abstract base class](https://docs.python.org/3/library/abc.html) that defines the interface for all provider classes. The code of the class is the next:
|
||||
|
||||
```python title="Provider Base Class"
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any
|
||||
|
||||
class Provider(ABC):
|
||||
"""
|
||||
The Provider class is an abstract base class that defines the interface for all provider classes in the auditing system.
|
||||
|
||||
Attributes:
|
||||
type (property): The type of the provider.
|
||||
identity (property): The identity of the provider for auditing.
|
||||
session (property): The session of the provider for auditing.
|
||||
audit_config (property): The audit configuration of the provider.
|
||||
output_options (property): The output configuration of the provider for auditing.
|
||||
|
||||
Methods:
|
||||
print_credentials(): Displays the provider's credentials used for auditing in the command-line interface.
|
||||
setup_session(): Sets up the session for the provider.
|
||||
get_output_mapping(): Returns the output mapping between the provider and the generic model.
|
||||
validate_arguments(): Validates the arguments for the provider.
|
||||
get_checks_to_execute_by_audit_resources(): Returns a set of checks based on the input resources to scan.
|
||||
|
||||
Note:
|
||||
This is an abstract base class and should not be instantiated directly. Each provider should implement its own
|
||||
version of the Provider class by inheriting from this base class and implementing the required methods and properties.
|
||||
"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def type(self) -> str:
|
||||
"""
|
||||
type method stores the provider's type.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def identity(self) -> str:
|
||||
"""
|
||||
identity method stores the provider's identity to audit.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
def setup_session(self) -> Any:
|
||||
"""
|
||||
setup_session sets up the session for the provider.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def session(self) -> str:
|
||||
"""
|
||||
session method stores the provider's session to audit.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def audit_config(self) -> str:
|
||||
"""
|
||||
audit_config method stores the provider's audit configuration.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
def print_credentials(self) -> None:
|
||||
"""
|
||||
print_credentials is used to display in the CLI the provider's credentials used to audit.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def output_options(self) -> str:
|
||||
"""
|
||||
output_options method returns the provider's audit output configuration.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@output_options.setter
|
||||
@abstractmethod
|
||||
def output_options(self, value: str) -> Any:
|
||||
"""
|
||||
output_options.setter sets the provider's audit output configuration.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
def get_output_mapping(self) -> dict:
|
||||
"""
|
||||
get_output_mapping returns the output mapping between the provider and the generic model.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def validate_arguments(self) -> None:
|
||||
"""
|
||||
validate_arguments validates the arguments for the provider.
|
||||
|
||||
This method can be overridden in each provider if needed.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_checks_to_execute_by_audit_resources(self) -> set:
|
||||
"""
|
||||
get_checks_to_execute_by_audit_resources returns a set of checks based on the input resources to scan.
|
||||
|
||||
This is a fallback that returns None if the service has not implemented this function.
|
||||
"""
|
||||
return set()
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def mutelist(self):
|
||||
"""
|
||||
mutelist method returns the provider's mutelist.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@mutelist.setter
|
||||
@abstractmethod
|
||||
def mutelist(self, path: str):
|
||||
"""
|
||||
mutelist.setter sets the provider's mutelist.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
```
|
||||
|
||||
### Provider Class
|
||||
|
||||
Due to the complexity and differences of each provider use the rest of the providers as a template for the implementation.
|
||||
|
||||
- [AWS](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/aws_provider.py)
|
||||
- [GCP](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/gcp/gcp_provider.py)
|
||||
- [Azure](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/azure_provider.py)
|
||||
- [Kubernetes](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/kubernetes/kubernetes_provider.py)
|
||||
|
||||
To facilitate understanding here is a pseudocode of how the most basic provider could be with examples.
|
||||
|
||||
```python title="Provider Example Class"
|
||||
|
||||
# Library imports to authenticate in the Provider
|
||||
|
||||
from prowler.config.config import load_and_validate_config_file
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.mutelist.mutelist import parse_mutelist_file
|
||||
from prowler.lib.utils.utils import print_boxes
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.<new_provider_name>.models import (
|
||||
# All providers models needed
|
||||
ProvierSessionModel,
|
||||
ProvierIdentityModel,
|
||||
ProvierOutputOptionsModel
|
||||
)
|
||||
|
||||
class NewProvider(Provider):
|
||||
# All properties from the class, some of this are properties in the base class
|
||||
_type: str = "<provider_name>"
|
||||
_session: <ProvierSessionModel>
|
||||
_identity: <ProvierIdentityModel>
|
||||
_audit_config: dict
|
||||
_output_options: ProvierOutputOptionsModel
|
||||
_mutelist: dict
|
||||
audit_metadata: Audit_Metadata
|
||||
|
||||
def __init__(self, arguments):
|
||||
"""
|
||||
Initializes the NewProvider instance.
|
||||
Args:
|
||||
arguments (dict): A dictionary containing configuration arguments.
|
||||
"""
|
||||
logger.info("Setting <NewProviderName> provider ...")
|
||||
# First get from arguments the necesary from the cloud acount (subscriptions or projects or whatever the provider use for storing services)
|
||||
|
||||
# Set the session with the method enforced by parent class
|
||||
self._session = self.setup_session(credentials_file)
|
||||
|
||||
# Set the Identity class normaly the provider class give by Python provider library
|
||||
self._identity = <ProvierIdentityModel>()
|
||||
|
||||
# Set the provider configuration
|
||||
self._audit_config = load_and_validate_config_file(
|
||||
self._type, arguments.config_file
|
||||
)
|
||||
|
||||
# All enforced properties by the parent class
|
||||
@property
|
||||
def identity(self):
|
||||
return self._identity
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
return self._session
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return self._type
|
||||
|
||||
@property
|
||||
def audit_config(self):
|
||||
return self._audit_config
|
||||
|
||||
@property
|
||||
def output_options(self):
|
||||
return self._output_options
|
||||
|
||||
def setup_session(self, <all_needed_for_auth>):
|
||||
"""
|
||||
Sets up the Provider session.
|
||||
|
||||
Args:
|
||||
<all_needed_for_auth> Can include all necessary arguments to setup the session
|
||||
|
||||
Returns:
|
||||
Credentials necesary to communicate with the provider.
|
||||
"""
|
||||
pass
|
||||
|
||||
"""
|
||||
This method is enforced by parent class and is used to print all relevant
|
||||
information during the prowler execution as a header of execution.
|
||||
Normally the Account ID, User name or stuff like this is displayed in colors using the colorama module (Fore).
|
||||
"""
|
||||
def print_credentials(self):
|
||||
pass
|
||||
|
||||
|
||||
|
||||
```
|
||||
@@ -4,33 +4,36 @@ Here you can find how to create a new service, or to complement an existing one,
|
||||
|
||||
## Introduction
|
||||
|
||||
To create a new service, you will need to create a folder inside the specific provider, i.e. `prowler/providers/<provider>/services/<service>/`.
|
||||
In Prowler, a service is basically a solution that is offered by a cloud provider i.e. [ec2](https://aws.amazon.com/ec2/). Essentially it is a class that stores all the necessary stuff that we will need later in the checks to audit some aspects of our Cloud account.
|
||||
|
||||
To create a new service, you will need to create a folder inside the specific provider, i.e. `prowler/providers/<provider>/services/<new_service_name>/`.
|
||||
|
||||
Inside that folder, you MUST create the following files:
|
||||
|
||||
- An empty `__init__.py`: to make Python treat this service folder as a package.
|
||||
- A `<service>_service.py`, containing all the service's logic and API calls.
|
||||
- A `<service>_client_.py`, containing the initialization of the service's class we have just created so the checks's checks can use it.
|
||||
- A `<new_service_name>_service.py`, containing all the service's logic and API calls.
|
||||
- A `<new_service_name>_client_.py`, containing the initialization of the service's class we have just created so the checks's checks can use it.
|
||||
|
||||
## Service
|
||||
|
||||
The Prowler's service structure is the following and the way to initialise it is just by importing the service client in a check.
|
||||
|
||||
## Service Base Class
|
||||
### Service Base Class
|
||||
|
||||
All the Prowler provider's services inherits from a base class depending on the provider used.
|
||||
|
||||
- [AWS Service Base Class](https://github.com/prowler-cloud/prowler/blob/22f8855ad7dad2e976dabff78611b643e234beaf/prowler/providers/aws/lib/service/service.py)
|
||||
- [GCP Service Base Class](https://github.com/prowler-cloud/prowler/blob/22f8855ad7dad2e976dabff78611b643e234beaf/prowler/providers/gcp/lib/service/service.py)
|
||||
- [Azure Service Base Class](https://github.com/prowler-cloud/prowler/blob/22f8855ad7dad2e976dabff78611b643e234beaf/prowler/providers/azure/lib/service/service.py)
|
||||
- [AWS Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/lib/service/service.py)
|
||||
- [GCP Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/lib/service/service.py)
|
||||
- [Azure Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/gcp/lib/service/service.py)
|
||||
- [Kubernetes Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/kubernetes/lib/service/service.py)
|
||||
|
||||
Each class is used to initialize the credentials and the API's clients to be used in the service. If some threading is used it must be coded there.
|
||||
|
||||
## Service Class
|
||||
### Service Class
|
||||
|
||||
Due to the complexity and differencies of each provider API we are going to use an example service to guide you in how can it be created.
|
||||
Due to the complexity and differences of each provider API we are going to use an example service to guide you in how can it be created.
|
||||
|
||||
The following is the `<service>_service.py` file:
|
||||
The following is the `<new_service_name>_service.py` file:
|
||||
|
||||
```python title="Service Class"
|
||||
from datetime import datetime
|
||||
@@ -55,12 +58,12 @@ from prowler.providers.<provider>.lib.service.service import ServiceParentClass
|
||||
# Create a class for the Service
|
||||
################## <Service>
|
||||
class <Service>(ServiceParentClass):
|
||||
def __init__(self, audit_info):
|
||||
def __init__(self, provider):
|
||||
# Call Service Parent Class __init__
|
||||
# We use the __class__.__name__ to get it automatically
|
||||
# from the Service Class name but you can pass a custom
|
||||
# string if the provider's API service name is different
|
||||
super().__init__(__class__.__name__, audit_info)
|
||||
super().__init__(__class__.__name__, provider)
|
||||
|
||||
# Create an empty dictionary of items to be gathered,
|
||||
# using the unique ID as the dictionary key
|
||||
@@ -175,10 +178,12 @@ class <Service>(ServiceParentClass):
|
||||
f"{<item>.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
```
|
||||
???+note
|
||||
To avoid fake findings, when Prowler can't retrieve the items, because an Access Denied or similar error, we set that items value as `None`.
|
||||
|
||||
### Service Models
|
||||
#### Service Models
|
||||
|
||||
For each class object we need to model we use the Pydantic's [BaseModel](https://docs.pydantic.dev/latest/api/base_model/#pydantic.BaseModel) to take advantage of the data validation.
|
||||
Service models are classes that are used in the service to design all that we need to store in each class object extrated from API calls. We use the Pydantic's [BaseModel](https://docs.pydantic.dev/latest/api/base_model/#pydantic.BaseModel) to take advantage of the data validation.
|
||||
|
||||
```python title="Service Model"
|
||||
# In each service class we have to create some classes using
|
||||
@@ -202,7 +207,7 @@ class <Item>(BaseModel):
|
||||
tags: Optional[list]
|
||||
"""<Items>[].tags"""
|
||||
```
|
||||
### Service Objects
|
||||
#### Service Objects
|
||||
In the service each group of resources should be created as a Python [dictionary](https://docs.python.org/3/tutorial/datastructures.html#dictionaries). This is because we are performing lookups all the time and the Python dictionary lookup has [O(1) complexity](https://en.wikipedia.org/wiki/Big_O_notation#Orders_of_common_functions).
|
||||
|
||||
We MUST set as the dictionary key a unique ID, like the resource Unique ID or ARN.
|
||||
@@ -213,17 +218,17 @@ self.vpcs = {}
|
||||
self.vpcs["vpc-01234567890abcdef"] = VPC_Object_Class()
|
||||
```
|
||||
|
||||
## Service Client
|
||||
### Service Client
|
||||
|
||||
Each Prowler service requires a service client to use the service in the checks.
|
||||
|
||||
The following is the `<service>_client.py` containing the initialization of the service's class we have just created so the service's checks can use them:
|
||||
The following is the `<new_service_name>_client.py` containing the initialization of the service's class we have just created so the service's checks can use them:
|
||||
|
||||
```python
|
||||
from prowler.providers.<provider>.lib.audit_info.audit_info import audit_info
|
||||
from prowler.providers.<provider>.services.<service>.<service>_service import <Service>
|
||||
from prowler.providers.common.common import get_global_provider
|
||||
from prowler.providers.<provider>.services.<new_service_name>.<new_service_name>_service import <Service>
|
||||
|
||||
<service>_client = <Service>(audit_info)
|
||||
<new_service_name>_client = <Service>(get_global_provider())
|
||||
```
|
||||
|
||||
## Permissions
|
||||
|
||||
@@ -62,50 +62,6 @@ For the AWS provider we have ways to test a Prowler check based on the following
|
||||
|
||||
In the following section we are going to explain all of the above scenarios with examples. The main difference between those scenarios comes from if the [Moto](https://github.com/getmoto/moto) library covers the AWS API calls made by the service. You can check the covered API calls [here](https://github.com/getmoto/moto/blob/master/IMPLEMENTATION_COVERAGE.md).
|
||||
|
||||
An important point for the AWS testing is that in each check we MUST have a unique `audit_info` which is the key object during the AWS execution to isolate the test execution.
|
||||
|
||||
Check the [Audit Info](./audit-info.md) section to get more details.
|
||||
|
||||
```python
|
||||
# We need to import the AWS_Audit_Info and the Audit_Metadata
|
||||
# to set the audit_info to call AWS APIs
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audit_config=None,
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["us-east-1", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
```
|
||||
### Checks
|
||||
|
||||
For the AWS tests examples we are going to use the tests for the `iam_password_policy_uppercase` check.
|
||||
@@ -148,29 +104,29 @@ class Test_iam_password_policy_uppercase:
|
||||
# policy we want to set to False the RequireUppercaseCharacters
|
||||
iam_client.update_account_password_policy(RequireUppercaseCharacters=False)
|
||||
|
||||
# We set a mocked audit_info for AWS not to share the same audit state
|
||||
# between checks
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
# The aws_provider is mocked using set_mocked_aws_provider to use it as the return of the get_global_provider method.
|
||||
# this mocked provider is defined in fixtures
|
||||
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
|
||||
|
||||
# The Prowler service import MUST be made within the decorated
|
||||
# code not to make real API calls to the AWS service.
|
||||
from prowler.providers.aws.services.iam.iam_service import IAM
|
||||
|
||||
# Prowler for AWS uses a shared object called `current_audit_info` where it stores
|
||||
# the audit's state, credentials and configuration.
|
||||
# Prowler for AWS uses a shared object called aws_provider where it stores
|
||||
# the info related with the provider
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
new=current_audit_info,
|
||||
"prowler.providers.common.common.get_global_provider",
|
||||
return_value=aws_provider,
|
||||
),
|
||||
# We have to mock also the iam_client from the check to enforce that the iam_client used is the one
|
||||
# created within this check because patch != import, and if you execute tests in parallel some objects
|
||||
# can be already initialised hence the check won't be isolated
|
||||
mock.patch(
|
||||
"prowler.providers.aws.services.iam.iam_password_policy_uppercase.iam_password_policy_uppercase.iam_client",
|
||||
new=IAM(current_audit_info),
|
||||
new=IAM(aws_provider),
|
||||
):
|
||||
# We import the check within the two mocks not to initialise the iam_client with some shared information from
|
||||
# the current_audit_info or the IAM service.
|
||||
# the aws_provider or the IAM service.
|
||||
from prowler.providers.aws.services.iam.iam_password_policy_uppercase.iam_password_policy_uppercase import (
|
||||
iam_password_policy_uppercase,
|
||||
)
|
||||
@@ -235,9 +191,8 @@ class Test_iam_password_policy_uppercase:
|
||||
expiration=True,
|
||||
)
|
||||
|
||||
# We set a mocked audit_info for AWS not to share the same audit state
|
||||
# between checks
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
# We set a mocked aws_provider to unify providers, this way will isolate each test not to step on other tests configuration
|
||||
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
|
||||
|
||||
# In this scenario we have to mock also the IAM service and the iam_client from the check to enforce # that the iam_client used is the one created within this check because patch != import, and if you # execute tests in parallel some objects can be already initialised hence the check won't be isolated.
|
||||
# In this case we don't use the Moto decorator, we use the mocked IAM client for both objects
|
||||
@@ -249,7 +204,7 @@ class Test_iam_password_policy_uppercase:
|
||||
new=mocked_iam_client,
|
||||
):
|
||||
# We import the check within the two mocks not to initialise the iam_client with some shared information from
|
||||
# the current_audit_info or the IAM service.
|
||||
# the aws_provider or the IAM service.
|
||||
from prowler.providers.aws.services.iam.iam_password_policy_uppercase.iam_password_policy_uppercase import (
|
||||
iam_password_policy_uppercase,
|
||||
)
|
||||
@@ -333,19 +288,48 @@ Note that this does not use Moto, to keep it simple, but if you use any `moto`-d
|
||||
|
||||
#### Mocking more than one service
|
||||
|
||||
Since we are mocking the provider, it can be customized setting multiple attributes to the provider:
|
||||
```python
|
||||
def set_mocked_aws_provider(
|
||||
audited_regions: list[str] = [],
|
||||
audited_account: str = AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn: str = AWS_ACCOUNT_ARN,
|
||||
audited_partition: str = AWS_COMMERCIAL_PARTITION,
|
||||
expected_checks: list[str] = [],
|
||||
profile_region: str = None,
|
||||
audit_config: dict = {},
|
||||
fixer_config: dict = {},
|
||||
scan_unused_services: bool = True,
|
||||
audit_session: session.Session = session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
original_session: session.Session = None,
|
||||
enabled_regions: set = None,
|
||||
arguments: Namespace = Namespace(),
|
||||
create_default_organization: bool = True,
|
||||
) -> AwsProvider:
|
||||
```
|
||||
|
||||
If the test your are creating belongs to a check that uses more than one provider service, you should mock each of the services used. For example, the check `cloudtrail_logs_s3_bucket_access_logging_enabled` requires the CloudTrail and the S3 client, hence the service's mock part of the test will be as follows:
|
||||
|
||||
|
||||
```python
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
new=mock_audit_info,
|
||||
"prowler.providers.common.common.get_global_provider",
|
||||
return_value=set_mocked_aws_provider(
|
||||
[AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1]
|
||||
),
|
||||
), mock.patch(
|
||||
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_client",
|
||||
new=Cloudtrail(mock_audit_info),
|
||||
new=Cloudtrail(
|
||||
set_mocked_aws_provider([AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1])
|
||||
),
|
||||
), mock.patch(
|
||||
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled.s3_client",
|
||||
new=S3(mock_audit_info),
|
||||
new=S3(
|
||||
set_mocked_aws_provider([AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1])
|
||||
),
|
||||
):
|
||||
```
|
||||
|
||||
@@ -363,10 +347,10 @@ from prowler.providers.<provider>.services.<service>.<service>_client import <se
|
||||
```
|
||||
2. `<service>_client.py`:
|
||||
```python
|
||||
from prowler.providers.<provider>.lib.audit_info.audit_info import audit_info
|
||||
from prowler.providers.common.common import get_global_provider
|
||||
from prowler.providers.<provider>.services.<service>.<service>_service import <SERVICE>
|
||||
|
||||
<service>_client = <SERVICE>(audit_info)
|
||||
<service>_client = <SERVICE>(mocked_provider)
|
||||
```
|
||||
|
||||
Due to the above import path it's not the same to patch the following objects because if you run a bunch of tests, either in parallel or not, some clients can be already instantiated by another check, hence your test execution will be using another test's service instance:
|
||||
@@ -384,19 +368,20 @@ A useful read about this topic can be found in the following article: https://st
|
||||
|
||||
Mocking a service client using the following code ...
|
||||
|
||||
Once the needed attributes are set for the mocked provider, you can use the mocked provider:
|
||||
```python title="Mocking the service_client"
|
||||
with mock.patch(
|
||||
"prowler.providers.<provider>.lib.audit_info.audit_info.audit_info",
|
||||
new=audit_info,
|
||||
"prowler.providers.common.common.get_global_provider",
|
||||
new=set_mocked_aws_provider([<region>]),
|
||||
), mock.patch(
|
||||
"prowler.providers.<provider>.services.<service>.<check>.<check>.<service>_client",
|
||||
new=<SERVICE>(audit_info),
|
||||
new=<SERVICE>(set_mocked_aws_provider([<region>])),
|
||||
):
|
||||
```
|
||||
will cause that the service will be initialised twice:
|
||||
|
||||
1. When the `<SERVICE>(audit_info)` is mocked out using `mock.patch` to have the object ready for the patching.
|
||||
2. At the `<service>_client.py` when we are patching it since the `mock.patch` needs to go to that object an initialise it, hence the `<SERVICE>(audit_info)` will be called again.
|
||||
1. When the `<SERVICE>(set_mocked_aws_provider([<region>]))` is mocked out using `mock.patch` to have the object ready for the patching.
|
||||
2. At the `<service>_client.py` when we are patching it since the `mock.patch` needs to go to that object an initialise it, hence the `<SERVICE>(set_mocked_aws_provider([<region>]))` will be called again.
|
||||
|
||||
Then, when we import the `<service>_client.py` at `<check>.py`, since we are mocking where the object is used, Python will use the mocked one.
|
||||
|
||||
@@ -408,24 +393,24 @@ Mocking a service client using the following code ...
|
||||
|
||||
```python title="Mocking the service and the service_client"
|
||||
with mock.patch(
|
||||
"prowler.providers.<provider>.lib.audit_info.audit_info.audit_info",
|
||||
new=audit_info,
|
||||
"prowler.providers.common.common.get_global_provider",
|
||||
new=set_mocked_aws_provider([<region>]),
|
||||
), mock.patch(
|
||||
"prowler.providers.<provider>.services.<service>.<SERVICE>",
|
||||
new=<SERVICE>(audit_info),
|
||||
new=<SERVICE>(set_mocked_aws_provider([<region>])),
|
||||
) as service_client, mock.patch(
|
||||
"prowler.providers.<provider>.services.<service>.<service>_client.<service>_client",
|
||||
new=service_client,
|
||||
):
|
||||
```
|
||||
will cause that the service will be initialised once, just when the `<SERVICE>(audit_info)` is mocked out using `mock.patch`.
|
||||
will cause that the service will be initialised once, just when the `set_mocked_aws_provider([<region>])` is mocked out using `mock.patch`.
|
||||
|
||||
Then, at the check_level when Python tries to import the client with `from prowler.providers.<provider>.services.<service>.<service>_client`, since it is already mocked out, the execution will continue using the `service_client` without getting into the `<service>_client.py`.
|
||||
|
||||
|
||||
### Services
|
||||
|
||||
For testing the AWS services we have to follow the same logic as with the AWS checks, we have to check if the AWS API calls made by the service are covered by Moto and we have to test the service `__init__` to verifiy that the information is being correctly retrieved.
|
||||
For testing the AWS services we have to follow the same logic as with the AWS checks, we have to check if the AWS API calls made by the service are covered by Moto and we have to test the service `__init__` to verify that the information is being correctly retrieved.
|
||||
|
||||
The service tests could act as *Integration Tests* since we test how the service retrieves the information from the provider, but since Moto or the custom mock objects mocks that calls this test will fall into *Unit Tests*.
|
||||
|
||||
@@ -437,79 +422,208 @@ Please refer to the [AWS checks tests](./unit-testing.md#checks) for more inform
|
||||
|
||||
For the GCP Provider we don't have any library to mock out the API calls we use. So in this scenario we inject the objects in the service client using [MagicMock](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.MagicMock).
|
||||
|
||||
The following code shows how to use MagicMock to create the service objects for a GCP check test.
|
||||
The following code shows how to use MagicMock to create the service objects for a GCP check test. It is a real example adapted for informative purposes.
|
||||
|
||||
```python
|
||||
# We need to import the unittest.mock to allow us to patch some objects
|
||||
# not to use shared ones between test, hence to isolate the test
|
||||
from re import search
|
||||
from unittest import mock
|
||||
|
||||
# GCP Constants
|
||||
GCP_PROJECT_ID = "123456789012"
|
||||
# Import some constant values needed in every check
|
||||
from tests.providers.gcp.gcp_fixtures import GCP_PROJECT_ID, set_mocked_gcp_provider
|
||||
|
||||
# We are going to create a test for the compute_firewall_rdp_access_from_the_internet_allowed check
|
||||
class Test_compute_firewall_rdp_access_from_the_internet_allowed:
|
||||
# We are going to create a test for the compute_project_os_login_enabled check
|
||||
class Test_compute_project_os_login_enabled:
|
||||
|
||||
# We name the tests with test_<service>_<check_name>_<test_action>
|
||||
def test_compute_compute_firewall_rdp_access_from_the_internet_allowed_one_compliant_rule_with_valid_port(self):
|
||||
def test_one_compliant_project(self):
|
||||
# Import the service resource model to create the mocked object
|
||||
from prowler.providers.gcp.services.compute.compute_service import Project
|
||||
# Create the custom Project object to be tested
|
||||
project = Project(
|
||||
id=GCP_PROJECT_ID,
|
||||
enable_oslogin=True,
|
||||
)
|
||||
# Mocked client with MagicMock
|
||||
compute_client = mock.MagicMock
|
||||
|
||||
# Assign GCP client configuration
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.region = "global"
|
||||
compute_client.projects = [project]
|
||||
|
||||
# Import the service resource model to create the mocked object
|
||||
from prowler.providers.gcp.services.compute.compute_service import Firewall
|
||||
|
||||
# Create the custom Firewall object to be tested
|
||||
firewall = Firewall(
|
||||
name="test",
|
||||
id="1234567890",
|
||||
source_ranges=["0.0.0.0/0"],
|
||||
direction="INGRESS",
|
||||
allowed_rules=[{"IPProtocol": "tcp", "ports": ["443"]}],
|
||||
project_id=GCP_PROJECT_ID,
|
||||
)
|
||||
compute_client.firewalls = [firewall]
|
||||
|
||||
# In this scenario we have to mock also the Compute service and the compute_client from the check to enforce that the compute_client used is the one created within this check because patch != import, and if you execute tests in parallel some objects can be already initialised hence the check won't be isolated.
|
||||
# In this case we don't use the Moto decorator, we use the mocked Compute client for both objects
|
||||
# In this scenario we have to mock the app_client from the check to enforce that the compute_client used is the one created above
|
||||
# And also is mocked the return value of get_global_provider function to return our GCP mocked provider defined in fixtures
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_service.Compute",
|
||||
new=defender_client,
|
||||
"prowler.providers.common.common.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
), mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_client.compute_client",
|
||||
new=defender_client,
|
||||
"prowler.providers.gcp.services.compute.compute_project_os_login_enabled.compute_project_os_login_enabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
|
||||
# We import the check within the two mocks not to initialise the iam_client with some shared information from
|
||||
# the current_audit_info or the Compute service.
|
||||
from prowler.providers.gcp.services.compute.compute_firewall_rdp_access_from_the_internet_allowed.compute_firewall_rdp_access_from_the_internet_allowed import (
|
||||
compute_firewall_rdp_access_from_the_internet_allowed,
|
||||
# We import the check within the two mocks
|
||||
from prowler.providers.gcp.services.compute.compute_project_os_login_enabled.compute_project_os_login_enabled import (
|
||||
compute_project_os_login_enabled,
|
||||
)
|
||||
|
||||
# Once imported, we only need to instantiate the check's class
|
||||
check = compute_firewall_rdp_access_from_the_internet_allowed()
|
||||
|
||||
check = compute_project_os_login_enabled()
|
||||
# And then, call the execute() function to run the check
|
||||
# against the IAM client we've set up.
|
||||
# against the Compute client we've set up.
|
||||
result = check.execute()
|
||||
|
||||
# Last but not least, we need to assert all the fields
|
||||
# from the check's results
|
||||
# Assert the expected results
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert result[0].status_extended == f"Firewall {firewall.name} does not expose port 3389 (RDP) to the internet."
|
||||
assert result[0].resource_name = firewall.name
|
||||
assert result[0].resource_id == firewall.id
|
||||
assert result[0].project_id = GCP_PROJECT_ID
|
||||
assert result[0].location = compute_client.region
|
||||
assert search(
|
||||
f"Project {project.id} has OS Login enabled",
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == project.id
|
||||
assert result[0].location == "global"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
# Complementary test to make more coverage for different scenarios
|
||||
def test_one_non_compliant_project(self):
|
||||
from prowler.providers.gcp.services.compute.compute_service import Project
|
||||
|
||||
project = Project(
|
||||
id=GCP_PROJECT_ID,
|
||||
enable_oslogin=False,
|
||||
)
|
||||
|
||||
compute_client = mock.MagicMock
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.projects = [project]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.common.common.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
), mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_project_os_login_enabled.compute_project_os_login_enabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_project_os_login_enabled.compute_project_os_login_enabled import (
|
||||
compute_project_os_login_enabled,
|
||||
)
|
||||
|
||||
check = compute_project_os_login_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert search(
|
||||
f"Project {project.id} does not have OS Login enabled",
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == project.id
|
||||
assert result[0].location == "global"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
```
|
||||
|
||||
### Services
|
||||
|
||||
Coming soon ...
|
||||
For testing Google Cloud Services, we have to follow the same logic as with the Google Cloud checks. We still mocking all API calls, but in this case, every API call to set up an attribute is defined in [fixtures file](https://github.com/prowler-cloud/prowler/blob/master/tests/providers/gcp/gcp_fixtures.py) in `mock_api_client` function. Remember that EVERY method of a service must be tested.
|
||||
|
||||
The following code shows a real example of a testing class, but it has more comments than usual for educational purposes.
|
||||
|
||||
```python title="BigQuery Service Test"
|
||||
# We need to import the unittest.mock.patch to allow us to patch some objects
|
||||
# not to use shared ones between test, hence to isolate the test
|
||||
from unittest.mock import patch
|
||||
# Import the class needed from the service file
|
||||
from prowler.providers.gcp.services.bigquery.bigquery_service import BigQuery
|
||||
# Necessary constans and functions from fixtures file
|
||||
from tests.providers.gcp.gcp_fixtures import (
|
||||
GCP_PROJECT_ID,
|
||||
mock_api_client,
|
||||
mock_is_api_active,
|
||||
set_mocked_gcp_provider,
|
||||
)
|
||||
|
||||
|
||||
class TestBigQueryService:
|
||||
# Only method needed to test full service
|
||||
def test_service(self):
|
||||
# In this case we are mocking the __is_api_active__ to ensure our mocked project is used
|
||||
# And all the client to use our mocked API calls
|
||||
with patch(
|
||||
"prowler.providers.gcp.lib.service.service.GCPService.__is_api_active__",
|
||||
new=mock_is_api_active,
|
||||
), patch(
|
||||
"prowler.providers.gcp.lib.service.service.GCPService.__generate_client__",
|
||||
new=mock_api_client,
|
||||
):
|
||||
# Instantiate an object of class with the mocked provider
|
||||
bigquery_client = BigQuery(
|
||||
set_mocked_gcp_provider(project_ids=[GCP_PROJECT_ID])
|
||||
)
|
||||
# Check all attributes of the tested class is well set up according API calls mocked from GCP fixture file
|
||||
assert bigquery_client.service == "bigquery"
|
||||
assert bigquery_client.project_ids == [GCP_PROJECT_ID]
|
||||
|
||||
assert len(bigquery_client.datasets) == 2
|
||||
|
||||
assert bigquery_client.datasets[0].name == "unique_dataset1_name"
|
||||
assert bigquery_client.datasets[0].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.datasets[0].region == "US"
|
||||
assert bigquery_client.datasets[0].cmk_encryption
|
||||
assert bigquery_client.datasets[0].public
|
||||
assert bigquery_client.datasets[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
assert bigquery_client.datasets[1].name == "unique_dataset2_name"
|
||||
assert bigquery_client.datasets[1].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.datasets[1].region == "EU"
|
||||
assert not bigquery_client.datasets[1].cmk_encryption
|
||||
assert not bigquery_client.datasets[1].public
|
||||
assert bigquery_client.datasets[1].project_id == GCP_PROJECT_ID
|
||||
|
||||
assert len(bigquery_client.tables) == 2
|
||||
|
||||
assert bigquery_client.tables[0].name == "unique_table1_name"
|
||||
assert bigquery_client.tables[0].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.tables[0].region == "US"
|
||||
assert bigquery_client.tables[0].cmk_encryption
|
||||
assert bigquery_client.tables[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
assert bigquery_client.tables[1].name == "unique_table2_name"
|
||||
assert bigquery_client.tables[1].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.tables[1].region == "US"
|
||||
assert not bigquery_client.tables[1].cmk_encryption
|
||||
assert bigquery_client.tables[1].project_id == GCP_PROJECT_ID
|
||||
```
|
||||
As it can be confusing where all these values come from, I'll give an example to make this clearer. First we need to check
|
||||
what is the API call used to obtain the datasets. In this case if we check the service the call is
|
||||
`self.client.datasets().list(projectId=project_id)`.
|
||||
|
||||
Now in the fixture file we have to mock this call in our `MagicMock` client in the function `mock_api_client`. The best way to mock
|
||||
is following the actual format, add one function where the client is passed to be changed, the format of this function name must be
|
||||
`mock_api_<endpoint>_calls` (*endpoint* refers to the first attribute pointed after *client*).
|
||||
|
||||
In the example of BigQuery the function is called `mock_api_dataset_calls`. And inside of this function we found an assignation to
|
||||
be used in the `__get_datasets__` method in BigQuery class:
|
||||
|
||||
```python
|
||||
# Mocking datasets
|
||||
dataset1_id = str(uuid4())
|
||||
dataset2_id = str(uuid4())
|
||||
|
||||
client.datasets().list().execute.return_value = {
|
||||
"datasets": [
|
||||
{
|
||||
"datasetReference": {
|
||||
"datasetId": "unique_dataset1_name",
|
||||
"projectId": GCP_PROJECT_ID,
|
||||
},
|
||||
"id": dataset1_id,
|
||||
"location": "US",
|
||||
},
|
||||
{
|
||||
"datasetReference": {
|
||||
"datasetId": "unique_dataset2_name",
|
||||
"projectId": GCP_PROJECT_ID,
|
||||
},
|
||||
"id": dataset2_id,
|
||||
"location": "EU",
|
||||
},
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Azure
|
||||
|
||||
@@ -517,246 +631,186 @@ Coming soon ...
|
||||
|
||||
For the Azure Provider we don't have any library to mock out the API calls we use. So in this scenario we inject the objects in the service client using [MagicMock](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.MagicMock).
|
||||
|
||||
In essence, we create object instances and we run the check that we are testing with that instance. In the test we ensure the check executed correctly and results with the expected values.
|
||||
The following code shows how to use MagicMock to create the service objects for a Azure check test. It is a real example adapted for informative purposes.
|
||||
|
||||
The following code shows how to use MagicMock to create the service objects for a Azure check test.
|
||||
|
||||
```python
|
||||
```python title="app_ensure_http_is_redirected_to_https_test.py"
|
||||
# We need to import the unittest.mock to allow us to patch some objects
|
||||
# not to use shared ones between test, hence to isolate the test
|
||||
from unittest import mock
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
# Azure Constants
|
||||
from tests.providers.azure.azure_fixtures import AZURE_SUBSCRIPTION
|
||||
# Import some constans values needed in almost every check
|
||||
from tests.providers.azure.azure_fixtures import (
|
||||
AZURE_SUBSCRIPTION_ID,
|
||||
set_mocked_azure_provider,
|
||||
)
|
||||
|
||||
|
||||
|
||||
# We are going to create a test for the Test_defender_ensure_defender_for_arm_is_on check
|
||||
class Test_defender_ensure_defender_for_arm_is_on:
|
||||
# We are going to create a test for the app_ensure_http_is_redirected_to_https check
|
||||
class Test_app_ensure_http_is_redirected_to_https:
|
||||
|
||||
# We name the tests with test_<service>_<check_name>_<test_action>
|
||||
def test_defender_defender_ensure_defender_for_arm_is_on_arm_pricing_tier_not_standard(self):
|
||||
resource_id = str(uuid4())
|
||||
|
||||
def test_app_http_to_https_disabled(self):
|
||||
resource_id = f"/subscriptions/{uuid4()}"
|
||||
# Mocked client with MagicMock
|
||||
defender_client = mock.MagicMock
|
||||
app_client = mock.MagicMock
|
||||
|
||||
# Import the service resource model to create the mocked object
|
||||
from prowler.providers.azure.services.defender.defender_service import Defender_Pricing
|
||||
|
||||
# Create the custom Defender object to be tested
|
||||
defender_client.pricings = {
|
||||
AZURE_SUBSCRIPTION: {
|
||||
"Arm": Defender_Pricing(
|
||||
resource_id=resource_id,
|
||||
pricing_tier="Not Standard",
|
||||
free_trial_remaining_time=0,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
# In this scenario we have to mock also the Defender service and the defender_client from the check to enforce that the defender_client used is the one created within this check because patch != import, and if you execute tests in parallel some objects can be already initialised hence the check won't be isolated.
|
||||
# In this case we don't use the Moto decorator, we use the mocked Defender client for both objects
|
||||
with mock.patch(
|
||||
"prowler.providers.azure.services.defender.defender_ensure_defender_for_arm_is_on.defender_ensure_defender_for_arm_is_on.defender_client",
|
||||
new=defender_client,
|
||||
# In this scenario we have to mock the app_client from the check to enforce that the app_client used is the one created above
|
||||
# And also is mocked the return value of get_global_provider function to return our Azure mocked provider defined in fixtures
|
||||
with mock.patch(
|
||||
"prowler.providers.common.common.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
), mock.patch(
|
||||
"prowler.providers.azure.services.app.app_ensure_http_is_redirected_to_https.app_ensure_http_is_redirected_to_https.app_client",
|
||||
new=app_client,
|
||||
):
|
||||
|
||||
# We import the check within the two mocks not to initialise the iam_client with some shared information from
|
||||
# the current_audit_info or the Defender service.
|
||||
from prowler.providers.azure.services.defender.defender_ensure_defender_for_arm_is_on.defender_ensure_defender_for_arm_is_on import (
|
||||
defender_ensure_defender_for_arm_is_on,
|
||||
# We import the check within the two mocks
|
||||
from prowler.providers.azure.services.app.app_ensure_http_is_redirected_to_https.app_ensure_http_is_redirected_to_https import (
|
||||
app_ensure_http_is_redirected_to_https,
|
||||
)
|
||||
# Import the service resource model to create the mocked object
|
||||
from prowler.providers.azure.services.app.app_service import WebApp
|
||||
|
||||
# Create the custom App object to be tested
|
||||
app_client.apps = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
"app_id-1": WebApp(
|
||||
resource_id=resource_id,
|
||||
auth_enabled=True,
|
||||
configurations=mock.MagicMock(),
|
||||
client_cert_mode="Ignore",
|
||||
https_only=False,
|
||||
identity=None,
|
||||
location="West Europe",
|
||||
)
|
||||
}
|
||||
}
|
||||
# Once imported, we only need to instantiate the check's class
|
||||
check = defender_ensure_defender_for_arm_is_on()
|
||||
|
||||
check = app_ensure_http_is_redirected_to_https()
|
||||
# And then, call the execute() function to run the check
|
||||
# against the Defender client we've set up.
|
||||
# against the App client we've set up.
|
||||
result = check.execute()
|
||||
|
||||
# Last but not least, we need to assert all the fields
|
||||
# from the check's results
|
||||
# Assert the expected results
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Defender plan Defender for ARM from subscription {AZURE_SUBSCRIPTION} is set to OFF (pricing tier not standard)"
|
||||
== f"HTTP is not redirected to HTTPS for app 'app_id-1' in subscription '{AZURE_SUBSCRIPTION_ID}'."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION
|
||||
assert result[0].resource_name == "Defender plan ARM"
|
||||
assert result[0].resource_name == "app_id-1"
|
||||
assert result[0].resource_id == resource_id
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].location == "West Europe"
|
||||
|
||||
# Complementary test to make more coverage for different scenarios
|
||||
def test_app_http_to_https_enabled(self):
|
||||
resource_id = f"/subscriptions/{uuid4()}"
|
||||
app_client = mock.MagicMock
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.common.common.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
), mock.patch(
|
||||
"prowler.providers.azure.services.app.app_ensure_http_is_redirected_to_https.app_ensure_http_is_redirected_to_https.app_client",
|
||||
new=app_client,
|
||||
):
|
||||
from prowler.providers.azure.services.app.app_ensure_http_is_redirected_to_https.app_ensure_http_is_redirected_to_https import (
|
||||
app_ensure_http_is_redirected_to_https,
|
||||
)
|
||||
from prowler.providers.azure.services.app.app_service import WebApp
|
||||
|
||||
app_client.apps = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
"app_id-1": WebApp(
|
||||
resource_id=resource_id,
|
||||
auth_enabled=True,
|
||||
configurations=mock.MagicMock(),
|
||||
client_cert_mode="Ignore",
|
||||
https_only=True,
|
||||
identity=None,
|
||||
location="West Europe",
|
||||
)
|
||||
}
|
||||
}
|
||||
check = app_ensure_http_is_redirected_to_https()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"HTTP is redirected to HTTPS for app 'app_id-1' in subscription '{AZURE_SUBSCRIPTION_ID}'."
|
||||
)
|
||||
assert result[0].resource_name == "app_id-1"
|
||||
assert result[0].resource_id == resource_id
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].location == "West Europe"
|
||||
|
||||
```
|
||||
|
||||
### Services
|
||||
|
||||
For the Azure Services tests, the idea is similar, we test that the functions we've done for capturing the values of the different objects using the Azure API work correctly. Again, we create an object instance and verify that the values captured for that instance are correct.
|
||||
For testing Azure services, we have to follow the same logic as with the Azure checks. We still mock all the API calls, but in this case, every method that uses an API call to set up an attribute is mocked with the [patch](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.patch) decorator at the beginning of the class. Remember that every method of a service MUST be tested.
|
||||
|
||||
The following code shows how a service test looks like.
|
||||
The following code shows a real example of a testing class, but it has more comments than usual for educational purposes.
|
||||
|
||||
```python
|
||||
#We import patch from unittest.mock for simulating objects, the ones that we'll test with.
|
||||
```python title="AppInsights Service Test"
|
||||
# We need to import the unittest.mock.patch to allow us to patch some objects
|
||||
# not to use shared ones between test, hence to isolate the test
|
||||
from unittest.mock import patch
|
||||
|
||||
#Importing FlowLogs from azure.mgmt.network.models allows us to create objects corresponding
|
||||
#to flow log settings for Azure networking resources.
|
||||
from azure.mgmt.network.models import FlowLog
|
||||
|
||||
#We import the different classes of the Network Service so we can use them.
|
||||
from prowler.providers.azure.services.network.network_service import (
|
||||
BastionHost,
|
||||
Network,
|
||||
NetworkWatcher,
|
||||
PublicIp,
|
||||
SecurityGroup,
|
||||
# Import the models needed from the service file
|
||||
from prowler.providers.azure.services.appinsights.appinsights_service import (
|
||||
AppInsights,
|
||||
Component,
|
||||
)
|
||||
|
||||
#Azure constants
|
||||
# Import some constans values needed in almost every check
|
||||
from tests.providers.azure.azure_fixtures import (
|
||||
AZURE_SUBSCRIPTION,
|
||||
set_mocked_azure_audit_info,
|
||||
AZURE_SUBSCRIPTION_ID,
|
||||
set_mocked_azure_provider,
|
||||
)
|
||||
|
||||
#Mocks the behavior of a function responsible for retrieving security groups from a network service so
|
||||
#basically this is the instance for SecurityGroup that we are going to use
|
||||
def mock_network_get_security_groups(_):
|
||||
# Function to mock the service function __get_components__, this function task is to return a possible value that real function could returns
|
||||
def mock_appinsights_get_components(_):
|
||||
return {
|
||||
AZURE_SUBSCRIPTION: [
|
||||
SecurityGroup(
|
||||
id="id",
|
||||
name="name",
|
||||
location="location",
|
||||
security_rules=[],
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
"app_id-1": Component(
|
||||
resource_id="/subscriptions/resource_id",
|
||||
resource_name="AppInsightsTest",
|
||||
location="westeurope",
|
||||
)
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
#We do the same for all the components we need, BastionHost, NetworkWatcher and PublicIp in this case
|
||||
|
||||
def mock_network_get_bastion_hosts(_):
|
||||
return {
|
||||
AZURE_SUBSCRIPTION: [
|
||||
BastionHost(
|
||||
id="id",
|
||||
name="name",
|
||||
location="location",
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
def mock_network_get_network_watchers(_):
|
||||
return {
|
||||
AZURE_SUBSCRIPTION: [
|
||||
NetworkWatcher(
|
||||
id="id",
|
||||
name="name",
|
||||
location="location",
|
||||
flow_logs=[FlowLog(enabled=True, retention_policy=90)],
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
def mock_network_get_public_ip_addresses(_):
|
||||
return {
|
||||
AZURE_SUBSCRIPTION: [
|
||||
PublicIp(
|
||||
id="id",
|
||||
name="name",
|
||||
location="location",
|
||||
ip_address="ip_address",
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
#We use the 'path' decorator to replace during the test, the original get functions with the mock functions.
|
||||
|
||||
#In this case we are replacing the '__get_security_groups__' with the 'mock_network_get_security_groups'.
|
||||
#We do the same for the rest of the functions.
|
||||
# Patch decorator to use the mocked function instead the function with the real API call
|
||||
@patch(
|
||||
"prowler.providers.azure.services.network.network_service.Network.__get_security_groups__",
|
||||
new=mock_network_get_security_groups,
|
||||
"prowler.providers.azure.services.appinsights.appinsights_service.AppInsights.__get_components__",
|
||||
new=mock_appinsights_get_components,
|
||||
)
|
||||
@patch(
|
||||
"prowler.providers.azure.services.network.network_service.Network.__get_bastion_hosts__",
|
||||
new=mock_network_get_bastion_hosts,
|
||||
)
|
||||
@patch(
|
||||
"prowler.providers.azure.services.network.network_service.Network.__get_network_watchers__",
|
||||
new=mock_network_get_network_watchers,
|
||||
)
|
||||
@patch(
|
||||
"prowler.providers.azure.services.network.network_service.Network.__get_public_ip_addresses__",
|
||||
new=mock_network_get_public_ip_addresses,
|
||||
)
|
||||
|
||||
#We create the class for finally testing the methods
|
||||
class Test_Network_Service:
|
||||
|
||||
#Verifies that Network class initializes correctly a client object
|
||||
class Test_AppInsights_Service:
|
||||
# Mandatory test for every service, this method test the instance of the client is correct
|
||||
def test__get_client__(self):
|
||||
#Creates instance of the Network class with the audit information provided
|
||||
network = Network(set_mocked_azure_audit_info())
|
||||
#Checks if the client is not being initialize correctly
|
||||
app_insights = AppInsights(set_mocked_azure_provider())
|
||||
assert (
|
||||
network.clients[AZURE_SUBSCRIPTION].__class__.__name__
|
||||
== "NetworkManagementClient"
|
||||
app_insights.clients[AZURE_SUBSCRIPTION_ID].__class__.__name__
|
||||
== "ApplicationInsightsManagementClient"
|
||||
)
|
||||
|
||||
#Verifies Securiy Group are set correctly
|
||||
def test__get_security_groups__(self):
|
||||
network = Network(set_mocked_azure_audit_info())
|
||||
# Second typical method that test if subscriptions is defined inside the client object
|
||||
def test__get_subscriptions__(self):
|
||||
app_insights = AppInsights(set_mocked_azure_provider())
|
||||
assert app_insights.subscriptions.__class__.__name__ == "dict"
|
||||
# Test for the function __get_components__, inside this client is used the mocked function
|
||||
def test__get_components__(self):
|
||||
appinsights = AppInsights(set_mocked_azure_provider())
|
||||
assert len(appinsights.components) == 1
|
||||
assert (
|
||||
network.security_groups[AZURE_SUBSCRIPTION][0].__class__.__name__
|
||||
== "SecurityGroup"
|
||||
)
|
||||
#As you can see, every field must be right according to the mocking method
|
||||
assert network.security_groups[AZURE_SUBSCRIPTION][0].id == "id"
|
||||
assert network.security_groups[AZURE_SUBSCRIPTION][0].name == "name"
|
||||
assert network.security_groups[AZURE_SUBSCRIPTION][0].location == "location"
|
||||
assert network.security_groups[AZURE_SUBSCRIPTION][0].security_rules == []
|
||||
|
||||
#Verifies Network Watchers are set correctly
|
||||
def test__get_network_watchers__(self):
|
||||
network = Network(set_mocked_azure_audit_info())
|
||||
assert (
|
||||
network.network_watchers[AZURE_SUBSCRIPTION][0].__class__.__name__
|
||||
== "NetworkWatcher"
|
||||
)
|
||||
assert network.network_watchers[AZURE_SUBSCRIPTION][0].id == "id"
|
||||
assert network.network_watchers[AZURE_SUBSCRIPTION][0].name == "name"
|
||||
assert network.network_watchers[AZURE_SUBSCRIPTION][0].location == "location"
|
||||
assert network.network_watchers[AZURE_SUBSCRIPTION][0].flow_logs == [
|
||||
FlowLog(enabled=True, retention_policy=90)
|
||||
]
|
||||
#Verifies Flow Logs are set correctly
|
||||
def __get_flow_logs__(self):
|
||||
network = Network(set_mocked_azure_audit_info())
|
||||
nw_name = "name"
|
||||
assert (
|
||||
network.network_watchers[AZURE_SUBSCRIPTION][0]
|
||||
.flow_logs[nw_name][0]
|
||||
.__class__.__name__
|
||||
== "FlowLog"
|
||||
)
|
||||
assert network.network_watchers[AZURE_SUBSCRIPTION][0].flow_logs == [
|
||||
FlowLog(enabled=True, retention_policy=90)
|
||||
]
|
||||
assert (
|
||||
network.network_watchers[AZURE_SUBSCRIPTION][0].flow_logs[0].enabled is True
|
||||
appinsights.components[AZURE_SUBSCRIPTION_ID]["app_id-1"].resource_id
|
||||
== "/subscriptions/resource_id"
|
||||
)
|
||||
assert (
|
||||
network.network_watchers[AZURE_SUBSCRIPTION][0]
|
||||
.flow_logs[0]
|
||||
.retention_policy
|
||||
== 90
|
||||
appinsights.components[AZURE_SUBSCRIPTION_ID]["app_id-1"].resource_name
|
||||
== "AppInsightsTest"
|
||||
)
|
||||
assert (
|
||||
appinsights.components[AZURE_SUBSCRIPTION_ID]["app_id-1"].location
|
||||
== "westeurope"
|
||||
)
|
||||
|
||||
...
|
||||
```
|
||||
The code continues with some more verifications the same way.
|
||||
|
||||
Hopefully this will result useful for understanding and creating new Azure Services checks.
|
||||
|
||||
Please refer to the [Azure checks tests](./unit-testing.md#azure) for more information on how to create tests and check the existing services tests [here](https://github.com/prowler-cloud/prowler/tree/master/tests/providers/azure/services).
|
||||
|
||||
|
Before Width: | Height: | Size: 283 KiB After Width: | Height: | Size: 338 KiB |
BIN
docs/img/dashboard.png
Normal file
|
After Width: | Height: | Size: 214 KiB |
|
Before Width: | Height: | Size: 631 KiB |
|
Before Width: | Height: | Size: 87 KiB After Width: | Height: | Size: 240 KiB |
@@ -1,8 +1,20 @@
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure and Google Cloud security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness. We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler SaaS</a>.
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler SaaS</a>.
|
||||
|
||||

|
||||
## Prowler CLI
|
||||
|
||||
Prowler offers hundreds of controls covering more than 25 standards and compliance frameworks like CIS, PCI-DSS, ISO27001, GDPR, HIPAA, FFIEC, SOC2, AWS FTR, ENS and custom security frameworks.
|
||||
```console
|
||||
prowler <provider>
|
||||
```
|
||||

|
||||
|
||||
## Prowler Dashboard
|
||||
|
||||
```console
|
||||
prowler dashboard
|
||||
```
|
||||

|
||||
|
||||
It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks.
|
||||
|
||||
## Quick Start
|
||||
### Installation
|
||||
@@ -15,7 +27,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
* `Python >= 3.9`
|
||||
* `Python pip >= 3.9`
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
_Commands_:
|
||||
|
||||
@@ -29,7 +41,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
_Requirements_:
|
||||
|
||||
* Have `docker` installed: https://docs.docker.com/get-docker/.
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* In the command below, change `-v` to your local directory path in order to access the reports.
|
||||
|
||||
_Commands_:
|
||||
@@ -46,7 +58,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
_Requirements for Ubuntu 20.04.3 LTS_:
|
||||
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* Install python 3.9 with: `sudo apt-get install python3.9`
|
||||
* Remove python 3.8 to avoid conflicts if you can: `sudo apt-get remove python3.8`
|
||||
* Make sure you have the python3 distutils package installed: `sudo apt-get install python3-distutils`
|
||||
@@ -66,7 +78,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
_Requirements for Developers_:
|
||||
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* `git`, `Python >= 3.9`, `pip` and `poetry` installed (`pip install poetry`)
|
||||
|
||||
_Commands_:
|
||||
@@ -83,7 +95,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* Latest Amazon Linux 2 should come with Python 3.9 already installed however it may need pip. Install Python pip 3.9 with: `sudo yum install -y python3-pip`.
|
||||
* Make sure setuptools for python is already installed with: `pip3 install setuptools`
|
||||
|
||||
@@ -100,7 +112,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
_Requirements_:
|
||||
|
||||
* `Brew` installed in your Mac or Linux
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
_Commands_:
|
||||
|
||||
@@ -111,7 +123,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
=== "AWS CloudShell"
|
||||
|
||||
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [2](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
|
||||
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [2](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v4 in AWS CloudShell:
|
||||
|
||||
_Requirements_:
|
||||
|
||||
@@ -120,12 +132,16 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
sudo bash
|
||||
adduser prowler
|
||||
su prowler
|
||||
pip install prowler
|
||||
prowler -v
|
||||
cd /tmp
|
||||
prowler aws
|
||||
```
|
||||
|
||||
???+ note
|
||||
To download the results from AWS CloudShell, select Actions -> Download File and add the full path of each file. For the CSV file it will be something like `/home/cloudshell-user/output/prowler-output-123456789012-20221220191331.csv`
|
||||
To download the results from AWS CloudShell, select Actions -> Download File and add the full path of each file. For the CSV file it will be something like `/tmp/output/prowler-output-123456789012-20221220191331.csv`
|
||||
|
||||
=== "Azure CloudShell"
|
||||
|
||||
@@ -144,9 +160,11 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
The available versions of Prowler are the following:
|
||||
|
||||
- `latest`: in sync with master branch (bear in mind that it is not a stable version)
|
||||
- `latest`: in sync with `master` branch (bear in mind that it is not a stable version)
|
||||
- `v3-latest`: in sync with `v3` branch (bear in mind that it is not a stable version)
|
||||
- `<x.y.z>` (release): you can find the releases [here](https://github.com/prowler-cloud/prowler/releases), those are stable releases.
|
||||
- `stable`: this tag always point to the latest release.
|
||||
- `v3-stable`: this tag always point to the latest release for v3.
|
||||
|
||||
The container images are available here:
|
||||
|
||||
@@ -155,12 +173,30 @@ The container images are available here:
|
||||
|
||||
## High level architecture
|
||||
|
||||
You can run Prowler from your workstation, an EC2 instance, Fargate or any other container, Codebuild, CloudShell, Cloud9 and many more.
|
||||
You can run Prowler from your workstation, a Kubernetes Job, a Google Compute Engine, an Azure VM, an EC2 instance, Fargate or any other container, CloudShell and many more.
|
||||
|
||||

|
||||
|
||||
## Deprecations from v3
|
||||
|
||||
### General
|
||||
- `Allowlist` now is called `Mutelist`.
|
||||
- The `--quiet` option has been deprecated, now use the `--status` flag to select the finding's status you want to get from PASS, FAIL or MANUAL.
|
||||
- All `INFO` finding's status has changed to `MANUAL`.
|
||||
- The CSV output format is common for all the providers.
|
||||
|
||||
We have deprecated some of our outputs formats:
|
||||
|
||||
- The HTML is replaced for the new Prowler Dashboard, run `prowler dashboard`.
|
||||
- The native JSON is replaced for the JSON [OCSF](https://schema.ocsf.io/) v1.1.0, common for all the providers.
|
||||
|
||||
### AWS
|
||||
- Deprecate the AWS flag --sts-endpoint-region since we use AWS STS regional tokens.
|
||||
- To send only FAILS to AWS Security Hub, now use either `--send-sh-only-fails` or `--security-hub --status FAIL`.
|
||||
|
||||
## Basic Usage
|
||||
|
||||
To run Prowler, you will need to specify the provider (e.g `aws`, `gcp` or `azure`):
|
||||
To run Prowler, you will need to specify the provider (e.g `aws`, `gcp`, `azure` or `kubernetes`):
|
||||
|
||||
???+ note
|
||||
If no provider specified, AWS will be used for backward compatibility with most of v2 options.
|
||||
@@ -173,7 +209,7 @@ prowler <provider>
|
||||
???+ note
|
||||
Running the `prowler` command without options will use your environment variable credentials, see [Requirements](./getting-started/requirements.md) section to review the credentials settings.
|
||||
|
||||
If you miss the former output you can use `--verbose` but Prowler v3 is smoking fast, so you won't see much ;)
|
||||
If you miss the former output you can use `--verbose` but Prowler v4 is smoking fast, so you won't see much ;
|
||||
|
||||
By default, Prowler will generate a CSV, JSON and HTML reports, however you can generate a JSON-ASFF (used by AWS Security Hub) report with `-M` or `--output-modes`:
|
||||
|
||||
@@ -197,6 +233,7 @@ For executing specific checks or services you can use options `-c`/`checks` or `
|
||||
prowler azure --checks storage_blob_public_access_level_is_disabled
|
||||
prowler aws --services s3 ec2
|
||||
prowler gcp --services iam compute
|
||||
prowler kubernetes --services etcd apiserver
|
||||
```
|
||||
|
||||
Also, checks and services can be excluded with options `-e`/`--excluded-checks` or `--excluded-services`:
|
||||
@@ -205,6 +242,7 @@ Also, checks and services can be excluded with options `-e`/`--excluded-checks`
|
||||
prowler aws --excluded-checks s3_bucket_public_access
|
||||
prowler azure --excluded-services defender iam
|
||||
prowler gcp --excluded-services kms
|
||||
prowler kubernetes --excluded-services controllermanager
|
||||
```
|
||||
|
||||
More options and executions methods that will save your time in [Miscellaneous](tutorials/misc.md).
|
||||
@@ -226,7 +264,7 @@ prowler aws --profile custom-profile -f us-east-1 eu-south-2
|
||||
???+ note
|
||||
By default, `prowler` will scan all AWS regions.
|
||||
|
||||
See more details about AWS Authentication in [Requirements](getting-started/requirements.md)
|
||||
See more details about AWS Authentication in [Requirements](getting-started/requirements.md#aws)
|
||||
|
||||
### Azure
|
||||
|
||||
@@ -246,7 +284,7 @@ prowler azure --browser-auth --tenant-id "XXXXXXXX"
|
||||
prowler azure --managed-identity-auth
|
||||
```
|
||||
|
||||
See more details about Azure Authentication in [Requirements](getting-started/requirements.md)
|
||||
See more details about Azure Authentication in [Requirements](getting-started/requirements.md#azure)
|
||||
|
||||
Prowler by default scans all the subscriptions that is allowed to scan, if you want to scan a single subscription or various specific subscriptions you can use the following flag (using az cli auth as example):
|
||||
```console
|
||||
@@ -273,7 +311,28 @@ Prowler by default scans all the GCP Projects that is allowed to scan, if you wa
|
||||
prowler gcp --project-ids <Project ID 1> <Project ID 2> ... <Project ID N>
|
||||
```
|
||||
|
||||
See more details about GCP Authentication in [Requirements](getting-started/requirements.md)
|
||||
See more details about GCP Authentication in [Requirements](getting-started/requirements.md#google-cloud)
|
||||
|
||||
### Kubernetes
|
||||
|
||||
Prowler allows you to scan your Kubernetes Cluster either from within the cluster or from outside the cluster.
|
||||
|
||||
For non in-cluster execution, you can provide the location of the KubeConfig file with the following argument:
|
||||
|
||||
```console
|
||||
prowler kubernetes --kubeconfig-file path
|
||||
```
|
||||
|
||||
For in-cluster execution, you can use the supplied yaml to run Prowler as a job:
|
||||
```console
|
||||
kubectl apply -f kubernetes/job.yaml
|
||||
kubectl apply -f kubernetes/prowler-role.yaml
|
||||
kubectl apply -f kubernetes/prowler-rolebinding.yaml
|
||||
kubectl get pods --> prowler-XXXXX
|
||||
kubectl logs prowler-XXXXX
|
||||
```
|
||||
|
||||
> By default, `prowler` will scan all namespaces in your active Kubernetes context, use flag `--context` to specify the context to be scanned and `--namespaces` to specify the namespaces to be scanned.
|
||||
|
||||
## Prowler v2 Documentation
|
||||
For **Prowler v2 Documentation**, please check it out [here](https://github.com/prowler-cloud/prowler/blob/8818f47333a0c1c1a457453c87af0ea5b89a385f/README.md).
|
||||
|
||||
@@ -27,7 +27,7 @@ Those credentials must be associated to a user or role with proper permissions t
|
||||
|
||||
Prowler can use your custom AWS Profile with:
|
||||
```console
|
||||
prowler <provider> -p/--profile <profile_name>
|
||||
prowler aws -p/--profile <profile_name>
|
||||
```
|
||||
|
||||
## Multi-Factor Authentication
|
||||
@@ -36,7 +36,3 @@ If your IAM entity enforces MFA you can use `--mfa` and Prowler will ask you to
|
||||
|
||||
- ARN of your MFA device
|
||||
- TOTP (Time-Based One-Time Password)
|
||||
|
||||
## STS Endpoint Region
|
||||
|
||||
If you are using Prowler in AWS regions that are not enabled by default you need to use the argument `--sts-endpoint-region` to point the AWS STS API calls `assume-role` and `get-caller-identity` to the non-default region, e.g.: `prowler aws --sts-endpoint-region eu-south-2`.
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
# AWS CloudShell
|
||||
|
||||
## Installation
|
||||
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [[2]](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
|
||||
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [[2]](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v4 in AWS CloudShell:
|
||||
```shell
|
||||
sudo bash
|
||||
adduser prowler
|
||||
su prowler
|
||||
pip install prowler
|
||||
prowler -v
|
||||
cd /tmp
|
||||
prowler aws
|
||||
```
|
||||
|
||||
## Download Files
|
||||
@@ -15,11 +19,14 @@ To download the results from AWS CloudShell, select Actions -> Download File and
|
||||
|
||||
The limited storage that AWS CloudShell provides for the user's home directory causes issues when installing the poetry dependencies to run Prowler from GitHub. Here is a workaround:
|
||||
```shell
|
||||
sudo bash
|
||||
adduser prowler
|
||||
su prowler
|
||||
git clone https://github.com/prowler-cloud/prowler.git
|
||||
cd prowler
|
||||
pip install poetry
|
||||
mkdir /tmp/pypoetry
|
||||
poetry config cache-dir /tmp/pypoetry
|
||||
mkdir /tmp/poetry
|
||||
poetry config cache-dir /tmp/poetry
|
||||
poetry shell
|
||||
poetry install
|
||||
python prowler.py -v
|
||||
|
||||
@@ -33,13 +33,6 @@ prowler aws --role-session-name <role_session_name>
|
||||
???+ note
|
||||
It defaults to `ProwlerAssessmentSession`.
|
||||
|
||||
## STS Endpoint Region
|
||||
|
||||
If you are using Prowler in AWS regions that are not enabled by default you need to use the argument `--sts-endpoint-region` to point the AWS STS API calls `assume-role` and `get-caller-identity` to the non-default region, e.g.: `prowler aws --sts-endpoint-region eu-south-2`.
|
||||
|
||||
???+ note
|
||||
Since v3.11.0, Prowler uses a regional token in STS sessions so it can scan all AWS regions without needing the `--sts-endpoint-region` argument. Make sure that you have enabled the AWS Region you want to scan in **BOTH** AWS Accounts (assumed role account and account from which you assume the role).
|
||||
|
||||
## Role MFA
|
||||
|
||||
If your IAM Role has MFA configured you can use `--mfa` along with `-R`/`--role <role_arn>` and Prowler will ask you to input the following values to get a new temporary session for the IAM Role provided:
|
||||
|
||||
@@ -3,13 +3,13 @@
|
||||
To save your report in an S3 bucket, use `-B`/`--output-bucket`.
|
||||
|
||||
```sh
|
||||
prowler <provider> -B my-bucket
|
||||
prowler aws -B my-bucket
|
||||
```
|
||||
|
||||
If you can use a custom folder and/or filename, use `-o`/`--output-directory` and/or `-F`/`--output-filename`.
|
||||
|
||||
```sh
|
||||
prowler <provider> \
|
||||
prowler aws \
|
||||
-B my-bucket \
|
||||
--output-directory test-folder \
|
||||
--output-filename output-filename
|
||||
@@ -18,8 +18,11 @@ prowler <provider> \
|
||||
By default Prowler sends HTML, JSON and CSV output formats, if you want to send a custom output format or a single one of the defaults you can specify it with the `-M`/`--output-modes` flag.
|
||||
|
||||
```sh
|
||||
prowler <provider> -M csv -B my-bucket
|
||||
prowler aws -M csv -B my-bucket
|
||||
```
|
||||
|
||||
|
||||
???+ note
|
||||
In the case you do not want to use the assumed role credentials but the initial credentials to put the reports into the S3 bucket, use `-D`/`--output-bucket-no-assume` instead of `-B`/`--output-bucket`. Make sure that the used credentials have `s3:PutObject` permissions in the S3 path where the reports are going to be uploaded.
|
||||
In the case you do not want to use the assumed role credentials but the initial credentials to put the reports into the S3 bucket, use `-D`/`--output-bucket-no-assume` instead of `-B`/`--output-bucket`.
|
||||
???+ warning
|
||||
Make sure that the used credentials have `s3:PutObject` permissions in the S3 path where the reports are going to be uploaded.
|
||||
|
||||
24
docs/tutorials/aws/threat-detection.md
Normal file
@@ -0,0 +1,24 @@
|
||||
# Threat Detection
|
||||
|
||||
Prowler allows you to do threat detection in AWS based on the CloudTrail log records. To run checks related with threat detection use:
|
||||
```
|
||||
prowler aws --category threat-detection
|
||||
```
|
||||
This comand will run these checks:
|
||||
|
||||
* `cloudtrail_threat_detection_privilege_escalation`
|
||||
* `cloudtrail_threat_detection_enumeration`
|
||||
|
||||
???+ note
|
||||
Threat Detection checks will be only executed using `--category threat-detection` flag due to preformance.
|
||||
|
||||
## Config File
|
||||
|
||||
If you want to manage the behavior of the Threat Detection checks you can edit `config.yaml` file from `/prowler/config`. In this file you can edit the following attributes related with Threat Detection:
|
||||
|
||||
* `threat_detection_privilege_escalation_threshold`: determines the percentage of actions found to decide if it is an privilege_scalation attack event, by default is 0.1 (10%)
|
||||
* `threat_detection_privilege_escalation_minutes`: it is the past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
|
||||
* `threat_detection_privilege_escalation_actions`: these are the default actions related with priviledge scalation.
|
||||
* `threat_detection_enumeration_threshold`: determines the percentage of actions found to decide if it is an enumeration attack event, by default is 0.1 (10%)
|
||||
* `threat_detection_enumeration_minutes`: it is the past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
|
||||
* `threat_detection_enumeration_actions`: these are the default actions related with enumeration attacks.
|
||||
@@ -1,11 +1,11 @@
|
||||
# Check mapping between Prowler v3 and v2
|
||||
# Check mapping between Prowler v4/v3 and v2
|
||||
|
||||
Prowler v3 comes with different identifiers but we maintained the same checks that were implemented in v2. The reason for this change is because in previous versions of Prowler, check names were mostly based on CIS Benchmark for AWS. In v3 all checks are independent from any security framework and they have its own name and ID.
|
||||
Prowler v3 comes with different identifiers but we maintained the same checks that were implemented in v2. The reason for this change is because in previous versions of Prowler, check names were mostly based on CIS Benchmark for AWS. In v4 and v3 all checks are independent from any security framework and they have its own name and ID.
|
||||
|
||||
If you need more information about how new compliance implementation works in Prowler v3 see [Compliance](../compliance.md) section.
|
||||
If you need more information about how new compliance implementation works in Prowler v4 and v3 see [Compliance](../compliance.md) section.
|
||||
|
||||
```
|
||||
checks_v3_to_v2_mapping = {
|
||||
checks_v4_v3_to_v2_mapping = {
|
||||
"accessanalyzer_enabled_without_findings": "extra769",
|
||||
"account_maintain_current_contact_details": "check117",
|
||||
"account_security_contact_information_is_registered": "check118",
|
||||
|
||||
@@ -1,5 +1,21 @@
|
||||
# Compliance
|
||||
Prowler allows you to execute checks based on requirements defined in compliance frameworks.
|
||||
Prowler allows you to execute checks based on requirements defined in compliance frameworks. By default, it will execute and give you an overview of the status of each compliance framework:
|
||||
|
||||
<img src="../img/compliance/compliance.png"/>
|
||||
|
||||
> You can find CSVs containing detailed compliance results inside the compliance folder within Prowler's output folder.
|
||||
|
||||
## Execute Prowler based on Compliance Frameworks
|
||||
Prowler can analyze your environment based on a specific compliance framework and get more details, to do it, you can use option `--compliance`:
|
||||
```sh
|
||||
prowler <provider> --compliance <compliance_framework>
|
||||
```
|
||||
Standard results will be shown and additionally the framework information as the sample below for CIS AWS 2.0. For details a CSV file has been generated as well.
|
||||
|
||||
<img src="../img/compliance/compliance-cis-sample1.png"/>
|
||||
|
||||
???+ note
|
||||
**If Prowler can't find a resource related with a check from a compliance requirement, this requirement won't appear on the output**
|
||||
|
||||
## List Available Compliance Frameworks
|
||||
In order to see which compliance frameworks are cover by Prowler, you can use option `--list-compliance`:
|
||||
@@ -20,6 +36,7 @@ Currently, the available frameworks are:
|
||||
- `cis_2.0_azure`
|
||||
- `cis_2.1_azure`
|
||||
- `cis_3.0_aws`
|
||||
- `cis_1.8_kubernetes`
|
||||
- `cisa_aws`
|
||||
- `ens_rd2022_aws`
|
||||
- `fedramp_low_revision_4_aws`
|
||||
@@ -47,7 +64,6 @@ prowler <provider> --list-compliance-requirements <compliance_framework(s)>
|
||||
```
|
||||
|
||||
Example for the first requirements of CIS 1.5 for AWS:
|
||||
|
||||
```
|
||||
Listing CIS 1.5 AWS Compliance Requirements:
|
||||
|
||||
@@ -80,15 +96,6 @@ Requirement Id: 1.5
|
||||
|
||||
```
|
||||
|
||||
## Execute Prowler based on Compliance Frameworks
|
||||
As we mentioned, Prowler can be execute to analyse you environment based on a specific compliance framework, to do it, you can use option `--compliance`:
|
||||
```sh
|
||||
prowler <provider> --compliance <compliance_framework>
|
||||
```
|
||||
Standard results will be shown and additionally the framework information as the sample below for CIS AWS 1.5. For details a CSV file has been generated as well.
|
||||
|
||||
<img src="../img/compliance-cis-sample1.png"/>
|
||||
|
||||
## Create and contribute adding other Security Frameworks
|
||||
|
||||
This information is part of the Developer Guide and can be found here: https://docs.prowler.cloud/en/latest/tutorials/developer-guide/.
|
||||
|
||||
@@ -29,11 +29,16 @@ The following list includes all the AWS checks with configurable variables that
|
||||
| `organizations_delegated_administrators` | `organizations_trusted_delegated_administrators` | List of Strings |
|
||||
| `ecr_repositories_scan_vulnerabilities_in_latest_image` | `ecr_repository_vulnerability_minimum_severity` | String |
|
||||
| `trustedadvisor_premium_support_plan_subscribed` | `verify_premium_support_plans` | Boolean |
|
||||
| `config_recorder_all_regions_enabled` | `allowlist_non_default_regions` | Boolean |
|
||||
| `drs_job_exist` | `allowlist_non_default_regions` | Boolean |
|
||||
| `guardduty_is_enabled` | `allowlist_non_default_regions` | Boolean |
|
||||
| `securityhub_enabled` | `allowlist_non_default_regions` | Boolean |
|
||||
|
||||
| `config_recorder_all_regions_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `drs_job_exist` | `mute_non_default_regions` | Boolean |
|
||||
| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `securityhub_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_minutes` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_actions` | List of Strings |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_minutes` | Integer |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_actions` | List of Strings |
|
||||
## Azure
|
||||
|
||||
### Configurable Checks
|
||||
@@ -41,7 +46,7 @@ The following list includes all the Azure checks with configurable variables tha
|
||||
|
||||
| Check Name | Value | Type |
|
||||
|---------------------------------------------------------------|--------------------------------------------------|-----------------|
|
||||
| `network_public_ip_shodan` | `shodan_api_key` | String |
|
||||
| `network_public_ip_shodan` | `shodan_api_key` | String |
|
||||
| `app_ensure_php_version_is_latest` | `php_latest_version` | String |
|
||||
| `app_ensure_python_version_is_latest` | `python_latest_version` | String |
|
||||
| `app_ensure_java_version_is_latest` | `java_latest_version` | String |
|
||||
@@ -51,6 +56,19 @@ The following list includes all the Azure checks with configurable variables tha
|
||||
|
||||
### Configurable Checks
|
||||
|
||||
## Kubernetes
|
||||
|
||||
### Configurable Checks
|
||||
The following list includes all the Azure checks with configurable variables that can be changed in the configuration yaml file:
|
||||
|
||||
| Check Name | Value | Type |
|
||||
|---------------------------------------------------------------|--------------------------------------------------|-----------------|
|
||||
| `audit_log_maxbackup` | `audit_log_maxbackup` | String |
|
||||
| `audit_log_maxsize` | `audit_log_maxsize` | String |
|
||||
| `audit_log_maxage` | `audit_log_maxage` | String |
|
||||
| `apiserver_strong_ciphers` | `apiserver_strong_ciphers` | String |
|
||||
| `kubelet_strong_ciphers_only` | `kubelet_strong_ciphers` | String |
|
||||
|
||||
## Config YAML File Structure
|
||||
|
||||
???+ note
|
||||
@@ -61,8 +79,8 @@ The following list includes all the Azure checks with configurable variables tha
|
||||
aws:
|
||||
|
||||
# AWS Global Configuration
|
||||
# aws.allowlist_non_default_regions --> Allowlist Failed Findings in non-default regions for GuardDuty, SecurityHub, DRS and Config
|
||||
allowlist_non_default_regions: False
|
||||
# aws.mute_non_default_regions --> Mute Failed Findings in non-default regions for GuardDuty, SecurityHub, DRS and Config
|
||||
mute_non_default_regions: False
|
||||
|
||||
# AWS IAM Configuration
|
||||
# aws.iam_user_accesskey_unused --> CIS recommends 45 days
|
||||
@@ -100,27 +118,18 @@ aws:
|
||||
# aws.awslambda_function_using_supported_runtimes
|
||||
obsolete_lambda_runtimes:
|
||||
[
|
||||
"java8",
|
||||
"go1.x",
|
||||
"provided",
|
||||
"python3.6",
|
||||
"python2.7",
|
||||
"python3.7",
|
||||
"nodejs4.3",
|
||||
"nodejs4.3-edge",
|
||||
"nodejs6.10",
|
||||
"nodejs",
|
||||
"nodejs8.10",
|
||||
"nodejs10.x",
|
||||
"nodejs12.x",
|
||||
"nodejs14.x",
|
||||
"dotnet5.0",
|
||||
"dotnetcore1.0",
|
||||
"dotnetcore2.0",
|
||||
"dotnetcore2.1",
|
||||
"dotnetcore3.1",
|
||||
"ruby2.5",
|
||||
"ruby2.7",
|
||||
]
|
||||
|
||||
# AWS Organizations
|
||||
@@ -144,6 +153,159 @@ aws:
|
||||
# trustedadvisor_premium_support_plan_subscribed
|
||||
verify_premium_support_plans: True
|
||||
|
||||
# AWS CloudTrail Configuration
|
||||
# aws.cloudtrail_threat_detection_privilege_escalation
|
||||
threat_detection_privilege_escalation_entropy: 0.7 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.7 (70%)
|
||||
threat_detection_privilege_escalation_minutes: 1440 # Past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_privilege_escalation_actions: [
|
||||
"AddPermission",
|
||||
"AddRoleToInstanceProfile",
|
||||
"AddUserToGroup",
|
||||
"AssociateAccessPolicy",
|
||||
"AssumeRole",
|
||||
"AttachGroupPolicy",
|
||||
"AttachRolePolicy",
|
||||
"AttachUserPolicy",
|
||||
"ChangePassword",
|
||||
"CreateAccessEntry",
|
||||
"CreateAccessKey",
|
||||
"CreateDevEndpoint",
|
||||
"CreateEventSourceMapping",
|
||||
"CreateFunction",
|
||||
"CreateGroup",
|
||||
"CreateJob",
|
||||
"CreateKeyPair",
|
||||
"CreateLoginProfile",
|
||||
"CreatePipeline",
|
||||
"CreatePolicyVersion",
|
||||
"CreateRole",
|
||||
"CreateStack",
|
||||
"DeleteRolePermissionsBoundary",
|
||||
"DeleteRolePolicy",
|
||||
"DeleteUserPermissionsBoundary",
|
||||
"DeleteUserPolicy",
|
||||
"DetachRolePolicy",
|
||||
"DetachUserPolicy",
|
||||
"GetCredentialsForIdentity",
|
||||
"GetId",
|
||||
"GetPolicyVersion",
|
||||
"GetUserPolicy",
|
||||
"Invoke",
|
||||
"ModifyInstanceAttribute",
|
||||
"PassRole",
|
||||
"PutGroupPolicy",
|
||||
"PutPipelineDefinition",
|
||||
"PutRolePermissionsBoundary",
|
||||
"PutRolePolicy",
|
||||
"PutUserPermissionsBoundary",
|
||||
"PutUserPolicy",
|
||||
"ReplaceIamInstanceProfileAssociation",
|
||||
"RunInstances",
|
||||
"SetDefaultPolicyVersion",
|
||||
"UpdateAccessKey",
|
||||
"UpdateAssumeRolePolicy",
|
||||
"UpdateDevEndpoint",
|
||||
"UpdateEventSourceMapping",
|
||||
"UpdateFunctionCode",
|
||||
"UpdateJob",
|
||||
"UpdateLoginProfile",
|
||||
]
|
||||
# aws.cloudtrail_threat_detection_enumeration
|
||||
threat_detection_enumeration_entropy: 0.7 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.7 (70%)
|
||||
threat_detection_enumeration_minutes: 1440 # Past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_enumeration_actions: [
|
||||
"DescribeAccessEntry",
|
||||
"DescribeAccountAttributes",
|
||||
"DescribeAvailabilityZones",
|
||||
"DescribeBundleTasks",
|
||||
"DescribeCarrierGateways",
|
||||
"DescribeClientVpnRoutes",
|
||||
"DescribeCluster",
|
||||
"DescribeDhcpOptions",
|
||||
"DescribeFlowLogs",
|
||||
"DescribeImages",
|
||||
"DescribeInstanceAttribute",
|
||||
"DescribeInstanceInformation",
|
||||
"DescribeInstanceTypes",
|
||||
"DescribeInstances",
|
||||
"DescribeInstances",
|
||||
"DescribeKeyPairs",
|
||||
"DescribeLogGroups",
|
||||
"DescribeLogStreams",
|
||||
"DescribeOrganization",
|
||||
"DescribeRegions",
|
||||
"DescribeSecurityGroups",
|
||||
"DescribeSnapshotAttribute",
|
||||
"DescribeSnapshotTierStatus",
|
||||
"DescribeSubscriptionFilters",
|
||||
"DescribeTransitGatewayMulticastDomains",
|
||||
"DescribeVolumes",
|
||||
"DescribeVolumesModifications",
|
||||
"DescribeVpcEndpointConnectionNotifications",
|
||||
"DescribeVpcs",
|
||||
"GetAccount",
|
||||
"GetAccountAuthorizationDetails",
|
||||
"GetAccountSendingEnabled",
|
||||
"GetBucketAcl",
|
||||
"GetBucketLogging",
|
||||
"GetBucketPolicy",
|
||||
"GetBucketReplication",
|
||||
"GetBucketVersioning",
|
||||
"GetCallerIdentity",
|
||||
"GetCertificate",
|
||||
"GetConsoleScreenshot",
|
||||
"GetCostAndUsage",
|
||||
"GetDetector",
|
||||
"GetEbsDefaultKmsKeyId",
|
||||
"GetEbsEncryptionByDefault",
|
||||
"GetFindings",
|
||||
"GetFlowLogsIntegrationTemplate",
|
||||
"GetIdentityVerificationAttributes",
|
||||
"GetInstances",
|
||||
"GetIntrospectionSchema",
|
||||
"GetLaunchTemplateData",
|
||||
"GetLaunchTemplateData",
|
||||
"GetLogRecord",
|
||||
"GetParameters",
|
||||
"GetPolicyVersion",
|
||||
"GetPublicAccessBlock",
|
||||
"GetQueryResults",
|
||||
"GetRegions",
|
||||
"GetSMSAttributes",
|
||||
"GetSMSSandboxAccountStatus",
|
||||
"GetSendQuota",
|
||||
"GetTransitGatewayRouteTableAssociations",
|
||||
"GetUserPolicy",
|
||||
"HeadObject",
|
||||
"ListAccessKeys",
|
||||
"ListAccounts",
|
||||
"ListAllMyBuckets",
|
||||
"ListAssociatedAccessPolicies",
|
||||
"ListAttachedUserPolicies",
|
||||
"ListClusters",
|
||||
"ListDetectors",
|
||||
"ListDomains",
|
||||
"ListFindings",
|
||||
"ListHostedZones",
|
||||
"ListIPSets",
|
||||
"ListIdentities",
|
||||
"ListInstanceProfiles",
|
||||
"ListObjects",
|
||||
"ListOrganizationalUnitsForParent",
|
||||
"ListOriginationNumbers",
|
||||
"ListPolicyVersions",
|
||||
"ListRoles",
|
||||
"ListRoles",
|
||||
"ListRules",
|
||||
"ListServiceQuotas",
|
||||
"ListSubscriptions",
|
||||
"ListTargetsByRule",
|
||||
"ListTopics",
|
||||
"ListUsers",
|
||||
"LookupEvents",
|
||||
"Search",
|
||||
]
|
||||
|
||||
# Azure Configuration
|
||||
azure:
|
||||
# Azure Network Configuration
|
||||
|
||||
@@ -31,6 +31,10 @@ CustomChecksMetadata:
|
||||
Checks:
|
||||
compute_instance_public_ip:
|
||||
Severity: critical
|
||||
kubernetes:
|
||||
Checks:
|
||||
apiserver_anonymous_requests:
|
||||
Severity: low
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
96
docs/tutorials/dashboard.md
Normal file
@@ -0,0 +1,96 @@
|
||||
# Dashboard
|
||||
Prowler allows you to run your own local dashboards using the csv outputs provided by Prowler
|
||||
|
||||
```sh
|
||||
prowler dashboard
|
||||
```
|
||||
???+ note
|
||||
You can expose the `dashboard` server in another address using the `HOST` environment variable.
|
||||
|
||||
To run Prowler local dashboard with Docker, use:
|
||||
|
||||
```sh
|
||||
docker run --env HOST=0.0.0.0 --publish 127.0.0.1:11666:11666 toniblyx/prowler:latest dashboard
|
||||
```
|
||||
|
||||
???+ note
|
||||
**Remember that the `dashboard` server is not authenticated, if you expose it to the internet, you are running it at your own risk.**
|
||||
|
||||
The banner and additional info about the dashboard will be shown on your console:
|
||||
<img src="../img/dashboard/dashboard-banner.png">
|
||||
|
||||
## Overview Page
|
||||
|
||||
The overview page provides a full impression of your findings obtained from Prowler:
|
||||
|
||||
<img src="../img/dashboard/dashboard-overview.png">
|
||||
|
||||
In this page you can do multiple functions:
|
||||
|
||||
* Apply filters (Assessment Date / Account / Region)
|
||||
* See wich files has been scanned to generate the dashboard placing your mouse on the `?` icon:
|
||||
<img src="../img/dashboard/dashboard-files-scanned.png">
|
||||
* Download the `Top 25 Failed Findings by Severity` table using the button `DOWNLOAD THIS TABLE AS CSV`
|
||||
|
||||
## Compliance Page
|
||||
|
||||
This page shows all the info related to the compliance selected, you can apply multiple filters depending on your preferences.
|
||||
|
||||
<img src="../img/dashboard/dashboard-compliance.png">
|
||||
|
||||
To add your own compliance to compliance page, add a file with the compliance name (using `_` instead of `.`) to the path `/dashboard/compliance`.
|
||||
|
||||
In this file use the format present in the others compliance files to create the table. Example for CIS 2.0:
|
||||
```python
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
|
||||
```
|
||||
|
||||
## S3 Integration
|
||||
|
||||
If you are a Prowler Saas customer and you want to use your data from your S3 bucket, you can run:
|
||||
|
||||
```sh
|
||||
aws s3 cp s3://<your-bucket>/output/csv ./output --recursive
|
||||
```
|
||||
to load the dashboard with the new files.
|
||||
|
||||
## Output Path
|
||||
|
||||
Prowler will use the outputs from the folder `/output` (for common prowler outputs) and `/output/compliance` (for prowler compliance outputs) to generate the dashboard.
|
||||
|
||||
To change the path modify the values `folder_path_overview` or `folder_path_compliance` from `/dashboard/config.py`
|
||||
|
||||
## Output Support
|
||||
|
||||
Prowler dashboard supports the detailed outputs:
|
||||
|
||||
| Provider | V3 | V4 | COMPLIANCE-V3 | COMPLIANCE-V4|
|
||||
|---|---|---|---|---|
|
||||
| AWS | ✅ | ✅ | ✅ | ✅ |
|
||||
| Azure | ❌ | ✅ | ❌ | ✅ |
|
||||
| Kubernetes | ❌ | ✅ | ❌ | ✅ |
|
||||
| GCP | ❌ | ✅ | ❌ | ✅ |
|
||||
152
docs/tutorials/fixer.md
Normal file
@@ -0,0 +1,152 @@
|
||||
# Prowler Fixer
|
||||
Prowler allows you to fix some of the failed findings it identifies. You can use the `--fixer` flag to run the fixes that are available for the checks that failed.
|
||||
|
||||
```sh
|
||||
prowler <provider> -c <check_to_fix_1> <check_to_fix_2> ... --fixer
|
||||
```
|
||||
|
||||
<img src="../img/fixer.png">
|
||||
|
||||
???+ note
|
||||
You can see all the available fixes for each provider with the `--list-fixers` flag.
|
||||
|
||||
```sh
|
||||
prowler <provider> --list-fixer
|
||||
```
|
||||
|
||||
## Writing a Fixer
|
||||
To write a fixer, you need to create a file called `<check_id>_fixer.py` inside the check folder, with a function called `fixer` that receives either the region or the resource to be fixed as a parameter, and returns a boolean value indicating if the fix was successful or not.
|
||||
|
||||
For example, the regional fixer for the `ec2_ebs_default_encryption` check, which enables EBS encryption by default in a region, would look like this:
|
||||
```python
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.aws.services.ec2.ec2_client import ec2_client
|
||||
|
||||
|
||||
def fixer(region):
|
||||
"""
|
||||
Enable EBS encryption by default in a region. NOTE: Custom KMS keys for EBS Default Encryption may be overwritten.
|
||||
Requires the ec2:EnableEbsEncryptionByDefault permission:
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": "ec2:EnableEbsEncryptionByDefault",
|
||||
"Resource": "*"
|
||||
}
|
||||
]
|
||||
}
|
||||
Args:
|
||||
region (str): AWS region
|
||||
Returns:
|
||||
bool: True if EBS encryption by default is enabled, False otherwise
|
||||
"""
|
||||
try:
|
||||
regional_client = ec2_client.regional_clients[region]
|
||||
return regional_client.enable_ebs_encryption_by_default()[
|
||||
"EbsEncryptionByDefault"
|
||||
]
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return False
|
||||
```
|
||||
On the other hand, the fixer for the `s3_account_level_public_access_blocks` check, which enables the account-level public access blocks for S3, would look like this:
|
||||
```python
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.providers.aws.services.s3.s3control_client import s3control_client
|
||||
|
||||
|
||||
def fixer(resource_id: str) -> bool:
|
||||
"""
|
||||
Enable S3 Block Public Access for the account. NOTE: By blocking all S3 public access you may break public S3 buckets.
|
||||
Requires the s3:PutAccountPublicAccessBlock permission:
|
||||
{
|
||||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Effect": "Allow",
|
||||
"Action": "s3:PutAccountPublicAccessBlock",
|
||||
"Resource": "*"
|
||||
}
|
||||
]
|
||||
}
|
||||
Returns:
|
||||
bool: True if S3 Block Public Access is enabled, False otherwise
|
||||
"""
|
||||
try:
|
||||
s3control_client.client.put_public_access_block(
|
||||
AccountId=resource_id,
|
||||
PublicAccessBlockConfiguration={
|
||||
"BlockPublicAcls": True,
|
||||
"IgnorePublicAcls": True,
|
||||
"BlockPublicPolicy": True,
|
||||
"RestrictPublicBuckets": True,
|
||||
},
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
```
|
||||
|
||||
## Fixer Config file
|
||||
For some fixers, you can have configurable parameters depending on your use case. You can either use the default config file in `prowler/config/fixer_config.yaml` or create a custom config file and pass it to the fixer with the `--fixer-config` flag. The config file should be a YAML file with the following structure:
|
||||
```yaml
|
||||
# Fixer configuration file
|
||||
aws:
|
||||
# ec2_ebs_default_encryption
|
||||
# No configuration needed for this check
|
||||
|
||||
# s3_account_level_public_access_blocks
|
||||
# No configuration needed for this check
|
||||
|
||||
# iam_password_policy_* checks:
|
||||
iam_password_policy:
|
||||
MinimumPasswordLength: 14
|
||||
RequireSymbols: True
|
||||
RequireNumbers: True
|
||||
RequireUppercaseCharacters: True
|
||||
RequireLowercaseCharacters: True
|
||||
AllowUsersToChangePassword: True
|
||||
MaxPasswordAge: 90
|
||||
PasswordReusePrevention: 24
|
||||
HardExpiry: False
|
||||
|
||||
# accessanalyzer_enabled
|
||||
accessanalyzer_enabled:
|
||||
AnalyzerName: "DefaultAnalyzer"
|
||||
AnalyzerType: "ACCOUNT_UNUSED_ACCESS"
|
||||
|
||||
# guardduty_is_enabled
|
||||
# No configuration needed for this check
|
||||
|
||||
# securityhub_enabled
|
||||
securityhub_enabled:
|
||||
EnableDefaultStandards: True
|
||||
|
||||
# cloudtrail_multi_region_enabled
|
||||
cloudtrail_multi_region_enabled:
|
||||
TrailName: "DefaultTrail"
|
||||
S3BucketName: "my-cloudtrail-bucket"
|
||||
IsMultiRegionTrail: True
|
||||
EnableLogFileValidation: True
|
||||
# CloudWatchLogsLogGroupArn: "arn:aws:logs:us-east-1:123456789012:log-group:my-cloudtrail-log-group"
|
||||
# CloudWatchLogsRoleArn: "arn:aws:iam::123456789012:role/my-cloudtrail-role"
|
||||
# KmsKeyId: "arn:aws:kms:us-east-1:123456789012:key/1234abcd-12ab-34cd-56ef-1234567890ab"
|
||||
|
||||
# kms_cmk_rotation_enabled
|
||||
# No configuration needed for this check
|
||||
|
||||
# ec2_ebs_snapshot_account_block_public_access
|
||||
ec2_ebs_snapshot_account_block_public_access:
|
||||
State: "block-all-sharing"
|
||||
|
||||
# ec2_instance_account_imdsv2_enabled
|
||||
# No configuration needed for this check
|
||||
```
|
||||
28
docs/tutorials/gcp/projects.md
Normal file
@@ -0,0 +1,28 @@
|
||||
# GCP Projects
|
||||
|
||||
By default, Prowler is multi-project, which means that is going to scan all the Google Cloud projects that the authenticated user has access to. If you want to scan a specific project(s), you can use the `--project-ids` argument.
|
||||
|
||||
```console
|
||||
prowler gcp --project-ids project-id1 project-id2
|
||||
```
|
||||
|
||||
???+ note
|
||||
You can use asterisk `*` to scan projects that match a pattern. For example, `prowler gcp --project-ids "prowler*"` will scan all the projects that start with `prowler`.
|
||||
|
||||
???+ note
|
||||
If you want to know the projects that you have access to, you can use the following command:
|
||||
|
||||
```console
|
||||
prowler gcp --list-project-ids
|
||||
```
|
||||
|
||||
### Exclude Projects
|
||||
|
||||
If you want to exclude some projects from the scan, you can use the `--exclude-project-ids` argument.
|
||||
|
||||
```console
|
||||
prowler gcp --exclude-project-ids project-id1 project-id2
|
||||
```
|
||||
|
||||
???+ note
|
||||
You can use asterisk `*` to exclude projects that match a pattern. For example, `prowler gcp --exclude-project-ids "sys*"` will exclude all the projects that start with `sys`.
|
||||
|
Before Width: | Height: | Size: 141 KiB |
BIN
docs/tutorials/img/compliance/compliance-cis-sample1.png
Normal file
|
After Width: | Height: | Size: 40 KiB |
BIN
docs/tutorials/img/compliance/compliance.png
Normal file
|
After Width: | Height: | Size: 42 KiB |
BIN
docs/tutorials/img/dashboard/dashboard-banner.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
docs/tutorials/img/dashboard/dashboard-compliance.png
Normal file
|
After Width: | Height: | Size: 133 KiB |
BIN
docs/tutorials/img/dashboard/dashboard-files-scanned.png
Normal file
|
After Width: | Height: | Size: 8.9 KiB |
BIN
docs/tutorials/img/dashboard/dashboard-overview.png
Normal file
|
After Width: | Height: | Size: 248 KiB |
BIN
docs/tutorials/img/fixer.png
Normal file
|
After Width: | Height: | Size: 67 KiB |
|
Before Width: | Height: | Size: 10 KiB After Width: | Height: | Size: 10 KiB |