Compare commits
122 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5e52ed8b60 | ||
|
|
c5514fdb63 | ||
|
|
c78c3058fd | ||
|
|
10d9ef9906 | ||
|
|
43426041ef | ||
|
|
125eb9ac53 | ||
|
|
681407e0a2 | ||
|
|
082f3a8fe8 | ||
|
|
397cc26b2a | ||
|
|
331ae92843 | ||
|
|
06843cd41a | ||
|
|
28b5ef9ee9 | ||
|
|
63dcc057d3 | ||
|
|
0bc16ee5ff | ||
|
|
abcc9c2c80 | ||
|
|
daf2ad38bd | ||
|
|
3dc418df39 | ||
|
|
00aaafbc12 | ||
|
|
bd49a55f3d | ||
|
|
013975b7a6 | ||
|
|
392026286a | ||
|
|
29ef974565 | ||
|
|
06c8216092 | ||
|
|
03f04d24a5 | ||
|
|
7b45ed63cc | ||
|
|
6e4dd1d69c | ||
|
|
185b4cba0c | ||
|
|
8198ea4a2c | ||
|
|
aaf3e8a5cf | ||
|
|
ecef56fa8f | ||
|
|
349ce3f2d0 | ||
|
|
e3d4741213 | ||
|
|
9d6d5f1d76 | ||
|
|
3152d67f58 | ||
|
|
cb41c8d15b | ||
|
|
06590842d6 | ||
|
|
d4c22a0ca5 | ||
|
|
c6f9936292 | ||
|
|
eaa8900758 | ||
|
|
e1e95d8879 | ||
|
|
ef3a0f4878 | ||
|
|
64cc36e7e2 | ||
|
|
1e001bb0fd | ||
|
|
6ba123a003 | ||
|
|
36d0f2c23f | ||
|
|
63412e3645 | ||
|
|
191cf276c3 | ||
|
|
45978bd0bb | ||
|
|
9666652d18 | ||
|
|
ad2716d7c9 | ||
|
|
0a7939bea3 | ||
|
|
b8c50a7b45 | ||
|
|
175e8d2b05 | ||
|
|
046069a656 | ||
|
|
f9522da48f | ||
|
|
c03f959005 | ||
|
|
522aeebe5e | ||
|
|
5312f487f9 | ||
|
|
d9b6624d65 | ||
|
|
1506da54fc | ||
|
|
245512d320 | ||
|
|
487190b379 | ||
|
|
74aaeaa95c | ||
|
|
28e8f0de2b | ||
|
|
f60b5017e2 | ||
|
|
fe80821596 | ||
|
|
628a3c4e7b | ||
|
|
3d59c34ec9 | ||
|
|
35043c2dd6 | ||
|
|
ab815123c9 | ||
|
|
69ab84efe1 | ||
|
|
77823afa54 | ||
|
|
63cd6c1290 | ||
|
|
cab32d2f94 | ||
|
|
1f4316e9dd | ||
|
|
ade762a85e | ||
|
|
bda5d62c72 | ||
|
|
2176fff8c3 | ||
|
|
87893bd54b | ||
|
|
b539a888b1 | ||
|
|
d6b2b0ca13 | ||
|
|
58ee45b702 | ||
|
|
c62d97f23a | ||
|
|
d618c5ea12 | ||
|
|
d8e27f0d33 | ||
|
|
38496ff646 | ||
|
|
da1084907e | ||
|
|
3385b630e7 | ||
|
|
fc59183045 | ||
|
|
33242079f7 | ||
|
|
086148819c | ||
|
|
5df9fd881c | ||
|
|
bd17d36e7f | ||
|
|
be55fa22fd | ||
|
|
b48b3a5e2e | ||
|
|
fc03dd37f1 | ||
|
|
d8bb384689 | ||
|
|
0b32a10bb8 | ||
|
|
f0c027f54e | ||
|
|
b0f2f34d3b | ||
|
|
3e6b76df76 | ||
|
|
6197cf792d | ||
|
|
3c4e5a14f7 | ||
|
|
effc743b6e | ||
|
|
364a945d28 | ||
|
|
07b9354d18 | ||
|
|
8b1e537ca5 | ||
|
|
6a20e850bc | ||
|
|
636892bc9a | ||
|
|
b40f32ab57 | ||
|
|
14bab496b5 | ||
|
|
3cc367e0a3 | ||
|
|
36fc575e40 | ||
|
|
24efb34d91 | ||
|
|
c08e244c95 | ||
|
|
c2f8980f1f | ||
|
|
0ef85b3dee | ||
|
|
93a2431211 | ||
|
|
1fe74937c1 | ||
|
|
6ee016e577 | ||
|
|
f7248dfb1c | ||
|
|
856afb3966 |
6
.github/CODEOWNERS
vendored
@@ -1,5 +1 @@
|
||||
* @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
|
||||
# To protect a repository fully against unauthorized changes, you also need to define an owner for the CODEOWNERS file itself.
|
||||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#codeowners-and-branch-protection
|
||||
/.github/ @prowler-cloud/sdk
|
||||
* @prowler-cloud/prowler-oss
|
||||
|
||||
52
.github/dependabot.yml
vendored
@@ -5,63 +5,24 @@
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "npm"
|
||||
|
||||
# v4.6
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v4"
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v4.6
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "v4"
|
||||
|
||||
# v3
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
@@ -70,10 +31,9 @@ updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
interval: "weekly"
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "github_actions"
|
||||
- "v3"
|
||||
|
||||
|
||||
26
.github/workflows/build-lint-push-containers.yml
vendored
@@ -43,7 +43,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
prowler_version_major: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION_MAJOR }}
|
||||
prowler_version: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION }}
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
|
||||
@@ -58,15 +57,13 @@ jobs:
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install poetry==1.8.5
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Get Prowler version
|
||||
id: get-prowler-version
|
||||
run: |
|
||||
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Store prowler version major just for the release
|
||||
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
|
||||
@@ -91,6 +88,13 @@ jobs:
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Update Prowler version (release)
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
PROWLER_VERSION="${{ github.event.release.tag_name }}"
|
||||
poetry version "${PROWLER_VERSION}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -111,7 +115,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
@@ -123,7 +127,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
@@ -149,19 +153,19 @@ jobs:
|
||||
echo "LATEST_COMMIT_HASH=${LATEST_COMMIT_HASH}" >> $GITHUB_ENV
|
||||
|
||||
- name: Dispatch event (latest)
|
||||
if: github.event_name == 'push' && needs.container-build-push.outputs.prowler_version_major == '3'
|
||||
if: github.event_name == 'push' && ${{ needs.container-build-push.outputs.prowler_version_major == '3' }}
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"v3-latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
|
||||
|
||||
- name: Dispatch event (release)
|
||||
if: github.event_name == 'release' && needs.container-build-push.outputs.prowler_version_major == '3'
|
||||
if: github.event_name == 'release' && ${{ needs.container-build-push.outputs.prowler_version_major == '3' }}
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ needs.container-build-push.outputs.prowler_version }}"}}'
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ env.PROWLER_VERSION }}"}}'
|
||||
|
||||
3
.github/workflows/find-secrets.yml
vendored
@@ -11,9 +11,8 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@v3.88.20
|
||||
uses: trufflesecurity/trufflehog@v3.72.0
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
head: HEAD
|
||||
extra_args: --only-verified
|
||||
|
||||
8
.github/workflows/pull-request.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@v45
|
||||
uses: tj-actions/changed-files@v44
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pipx install poetry==1.8.5
|
||||
pipx install poetry
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: actions/setup-python@v5
|
||||
@@ -73,7 +73,7 @@ jobs:
|
||||
- name: Safety
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 70612
|
||||
poetry run safety check
|
||||
- name: Vulture
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
@@ -88,6 +88,6 @@ jobs:
|
||||
poetry run pytest -n auto --cov=./prowler --cov-report=xml tests
|
||||
- name: Upload coverage reports to Codecov
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
33
.github/workflows/pypi-release.yml
vendored
@@ -8,6 +8,8 @@ env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
PYTHON_VERSION: 3.11
|
||||
CACHE: "poetry"
|
||||
# TODO: create a bot user for this kind of tasks, like prowler-bot
|
||||
GIT_COMMITTER_EMAIL: "sergio@prowler.com"
|
||||
|
||||
jobs:
|
||||
release-prowler-job:
|
||||
@@ -37,7 +39,8 @@ jobs:
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry==1.8.5
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -45,6 +48,34 @@ jobs:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: ${{ env.CACHE }}
|
||||
|
||||
- name: Update Poetry and config version
|
||||
run: |
|
||||
poetry version ${{ env.RELEASE_TAG }}
|
||||
|
||||
- name: Import GPG key
|
||||
uses: crazy-max/ghaction-import-gpg@v6
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
git_user_signingkey: true
|
||||
git_commit_gpgsign: true
|
||||
|
||||
- name: Push updated version to the release tag
|
||||
run: |
|
||||
# Configure Git
|
||||
git config user.name "github-actions"
|
||||
git config user.email "${{ env.GIT_COMMITTER_EMAIL }}"
|
||||
|
||||
# Add the files with the version changed
|
||||
git add prowler/config/config.py pyproject.toml
|
||||
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify -S
|
||||
|
||||
# Replace the tag with the version updated
|
||||
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}" --sign
|
||||
|
||||
# Push the tag
|
||||
git push -f origin ${{ env.RELEASE_TAG }}
|
||||
|
||||
- name: Build Prowler package
|
||||
run: |
|
||||
poetry build
|
||||
|
||||
@@ -50,12 +50,12 @@ jobs:
|
||||
|
||||
# Create pull request
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services."
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-v3"
|
||||
title: "chore(regions_update): Changes in regions for AWS services."
|
||||
body: |
|
||||
### Description
|
||||
|
||||
6
.gitignore
vendored
@@ -9,8 +9,9 @@
|
||||
__pycache__
|
||||
venv/
|
||||
build/
|
||||
dist/
|
||||
/dist/
|
||||
*.egg-info/
|
||||
*/__pycache__/*.pyc
|
||||
|
||||
# Session
|
||||
Session.vim
|
||||
@@ -51,3 +52,6 @@ junit-reports/
|
||||
.coverage*
|
||||
.coverage
|
||||
coverage*
|
||||
|
||||
# Node
|
||||
node_modules
|
||||
|
||||
@@ -26,6 +26,7 @@ repos:
|
||||
rev: v0.9.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
## PYTHON
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v2.2.1
|
||||
@@ -96,7 +97,7 @@ repos:
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'safety check --ignore 70612'
|
||||
entry: bash -c 'safety check'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
|
||||
@@ -10,4 +10,4 @@
|
||||
Want some swag as appreciation for your contribution?
|
||||
|
||||
# Prowler Developer Guide
|
||||
https://docs.prowler.com/projects/prowler-open-source/en/latest/developer-guide/introduction/
|
||||
https://docs.prowler.cloud/en/latest/tutorials/developer-guide/
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.12.8-alpine3.20
|
||||
FROM python:3.12-alpine
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
|
||||
@@ -15,7 +15,8 @@ USER prowler
|
||||
|
||||
# Copy necessary files
|
||||
WORKDIR /home/prowler
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY dashboard/ /home/prowler/dashboard/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler
|
||||
|
||||
@@ -26,6 +27,10 @@ ENV PATH="$HOME/.local/bin:$PATH"
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir .
|
||||
|
||||
# Remove deprecated dash dependencies
|
||||
RUN pip uninstall dash-html-components -y && \
|
||||
pip uninstall dash-core-components -y
|
||||
|
||||
# Remove Prowler directory and build files
|
||||
USER 0
|
||||
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info
|
||||
|
||||
2
Makefile
@@ -27,7 +27,7 @@ lint: ## Lint Code
|
||||
@echo "Running black... "
|
||||
black --check .
|
||||
@echo "Running pylint..."
|
||||
pylint --disable=W,C,R,E -j 0 providers lib util config
|
||||
pylint --disable=W,C,R,E -j 0 prowler util
|
||||
|
||||
##@ PyPI
|
||||
pypi-clean: ## Delete the distribution files
|
||||
|
||||
23
README.md
@@ -50,7 +50,7 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
|
||||
| AWS | 304 | 61 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 6 -> `prowler aws --list-categories` |
|
||||
| GCP | 75 | 11 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 127 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | Work In Progress | - | CIS soon | - |
|
||||
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
|
||||
|
||||
# 📖 Documentation
|
||||
|
||||
@@ -99,7 +99,7 @@ python prowler.py -v
|
||||
|
||||
You can run Prowler from your workstation, an EC2 instance, Fargate or any other container, Codebuild, CloudShell and Cloud9.
|
||||
|
||||

|
||||

|
||||
|
||||
# 📝 Requirements
|
||||
|
||||
@@ -273,6 +273,25 @@ prowler gcp --credentials-file path
|
||||
```
|
||||
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
|
||||
|
||||
## Kubernetes
|
||||
|
||||
For non in-cluster execution, you can provide the location of the KubeConfig file with the following argument:
|
||||
|
||||
```console
|
||||
prowler kubernetes --kubeconfig-file path
|
||||
```
|
||||
|
||||
For in-cluster execution, you can use the supplied yaml to run Prowler as a job:
|
||||
```console
|
||||
kubectl apply -f job.yaml
|
||||
kubectl apply -f prowler-role.yaml
|
||||
kubectl apply -f prowler-rolebinding.yaml
|
||||
kubectl get pods --> prowler-XXXXX
|
||||
kubectl logs prowler-XXXXX
|
||||
```
|
||||
|
||||
> By default, `prowler` will scan all namespaces in your active Kubernetes context, use flag `--context` to specify the context to be scanned and `--namespaces` to specify the namespaces to be scanned.
|
||||
|
||||
# 📃 License
|
||||
|
||||
Prowler is licensed as Apache License 2.0 as specified in each file. You may obtain a copy of the License at
|
||||
|
||||
@@ -1,17 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Install system dependencies
|
||||
sudo yum -y install openssl-devel bzip2-devel libffi-devel gcc
|
||||
# Upgrade to Python 3.9
|
||||
cd /tmp && wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz
|
||||
tar zxf Python-3.9.13.tgz
|
||||
cd Python-3.9.13/ || exit
|
||||
./configure --enable-optimizations
|
||||
sudo make altinstall
|
||||
python3.9 --version
|
||||
# Install Prowler
|
||||
cd ~ || exit
|
||||
python3.9 -m pip install prowler-cloud
|
||||
prowler -v
|
||||
# Run Prowler
|
||||
prowler
|
||||
sudo bash
|
||||
adduser prowler
|
||||
su prowler
|
||||
pip install prowler
|
||||
cd /tmp || exit
|
||||
prowler aws
|
||||
|
||||
176
dashboard/__main__.py
Normal file
@@ -0,0 +1,176 @@
|
||||
# Importing Packages
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
import click
|
||||
import dash
|
||||
import dash_bootstrap_components as dbc
|
||||
from colorama import Fore, Style
|
||||
from dash import dcc, html
|
||||
from dash.dependencies import Input, Output
|
||||
|
||||
from dashboard.config import folder_path_overview
|
||||
from prowler.config.config import orange_color
|
||||
from prowler.lib.banner import print_banner
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
cli = sys.modules["flask.cli"]
|
||||
print_banner(verbose=False)
|
||||
print(
|
||||
f"{Fore.GREEN}Loading all CSV files from the folder {folder_path_overview} ...\n{Style.RESET_ALL}"
|
||||
)
|
||||
cli.show_server_banner = lambda *x: click.echo(
|
||||
f"{Fore.YELLOW}NOTE:{Style.RESET_ALL} If you are a {Fore.GREEN}{Style.BRIGHT}Prowler SaaS{Style.RESET_ALL} customer and you want to use your data from your S3 bucket,\nrun: `{orange_color}aws s3 cp s3://<your-bucket>/output/csv ./output --recursive{Style.RESET_ALL}`\nand then run `prowler dashboard` again to load the new files."
|
||||
)
|
||||
|
||||
# Initialize the app - incorporate css
|
||||
dashboard = dash.Dash(
|
||||
__name__,
|
||||
external_stylesheets=[dbc.themes.DARKLY],
|
||||
use_pages=True,
|
||||
suppress_callback_exceptions=True,
|
||||
title="Prowler Dashboard",
|
||||
)
|
||||
|
||||
# Logo
|
||||
prowler_logo = html.Img(
|
||||
src="https://prowler.com/wp-content/uploads/logo-dashboard.png", alt="Prowler Logo"
|
||||
)
|
||||
|
||||
menu_icons = {
|
||||
"overview": "/assets/images/icons/overview.svg",
|
||||
"compliance": "/assets/images/icons/compliance.svg",
|
||||
}
|
||||
|
||||
|
||||
# Function to generate navigation links
|
||||
def generate_nav_links(current_path):
|
||||
nav_links = []
|
||||
for page in dash.page_registry.values():
|
||||
# Gets the icon URL based on the page name
|
||||
icon_url = menu_icons.get(page["name"].lower())
|
||||
is_active = (
|
||||
" bg-prowler-stone-950 border-r-4 border-solid border-prowler-lime"
|
||||
if current_path == page["relative_path"]
|
||||
else ""
|
||||
)
|
||||
link_class = f"block hover:bg-prowler-stone-950 hover:border-r-4 hover:border-solid hover:border-prowler-lime{is_active}"
|
||||
|
||||
link_content = html.Span(
|
||||
[
|
||||
html.Img(src=icon_url, className="w-5"),
|
||||
html.Span(page["name"], className="font-medium text-base leading-6"),
|
||||
],
|
||||
className="flex justify-center lg:justify-normal items-center gap-x-3 py-2 px-3",
|
||||
)
|
||||
|
||||
nav_link = html.Li(
|
||||
dcc.Link(link_content, href=page["relative_path"], className=link_class)
|
||||
)
|
||||
nav_links.append(nav_link)
|
||||
return nav_links
|
||||
|
||||
|
||||
def generate_help_menu():
|
||||
help_links = [
|
||||
{
|
||||
"title": "Help",
|
||||
"url": "https://github.com/prowler-cloud/prowler/issues",
|
||||
"icon": "/assets/images/icons/help.png",
|
||||
},
|
||||
{
|
||||
"title": "Docs",
|
||||
"url": "https://docs.prowler.com",
|
||||
"icon": "/assets/images/icons/docs.png",
|
||||
},
|
||||
]
|
||||
|
||||
link_class = "block hover:bg-prowler-stone-950 hover:border-r-4 hover:border-solid hover:border-prowler-lime"
|
||||
|
||||
menu_items = []
|
||||
for link in help_links:
|
||||
menu_item = html.Li(
|
||||
html.A(
|
||||
html.Span(
|
||||
[
|
||||
html.Img(src=link["icon"], className="w-5"),
|
||||
html.Span(
|
||||
link["title"], className="font-medium text-base leading-6"
|
||||
),
|
||||
],
|
||||
className="flex items-center gap-x-3 py-2 px-3",
|
||||
),
|
||||
href=link["url"],
|
||||
target="_blank",
|
||||
className=link_class,
|
||||
)
|
||||
)
|
||||
menu_items.append(menu_item)
|
||||
|
||||
return menu_items
|
||||
|
||||
|
||||
# Layout
|
||||
dashboard.layout = html.Div(
|
||||
[
|
||||
dcc.Location(id="url", refresh=False),
|
||||
html.Link(rel="icon", href="assets/favicon.ico"),
|
||||
# Placeholder for dynamic navigation bar
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
id="navigation-bar", className="bg-prowler-stone-900 min-w-36 z-10"
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
dash.page_container,
|
||||
],
|
||||
id="content_select",
|
||||
className="bg-prowler-white w-full col-span-11 h-screen mx-auto overflow-y-scroll no-scrollbar px-10 py-7",
|
||||
),
|
||||
],
|
||||
className="grid custom-grid 2xl:custom-grid-large h-screen",
|
||||
),
|
||||
],
|
||||
className="h-screen mx-auto",
|
||||
)
|
||||
|
||||
|
||||
# Callback to update navigation bar
|
||||
@dashboard.callback(Output("navigation-bar", "children"), [Input("url", "pathname")])
|
||||
def update_nav_bar(pathname):
|
||||
return html.Div(
|
||||
[
|
||||
html.Div([prowler_logo], className="mb-8 px-3"),
|
||||
html.H6(
|
||||
"Dashboards",
|
||||
className="px-3 text-prowler-stone-500 text-sm opacity-90 font-regular mb-2",
|
||||
),
|
||||
html.Nav(
|
||||
[html.Ul(generate_nav_links(pathname), className="")],
|
||||
className="flex flex-col gap-y-6",
|
||||
),
|
||||
html.Nav(
|
||||
[
|
||||
html.A(
|
||||
[
|
||||
html.Span(
|
||||
[
|
||||
html.Img(src="assets/favicon.ico", className="w-5"),
|
||||
"Subscribe to prowler SaaS",
|
||||
],
|
||||
className="flex items-center gap-x-3",
|
||||
),
|
||||
],
|
||||
href="https://prowler.com/",
|
||||
target="_blank",
|
||||
className="block p-3 uppercase text-xs hover:bg-prowler-stone-950 hover:border-r-4 hover:border-solid hover:border-prowler-lime",
|
||||
),
|
||||
html.Ul(generate_help_menu(), className=""),
|
||||
],
|
||||
className="flex flex-col gap-y-6 mt-auto",
|
||||
),
|
||||
],
|
||||
className="flex flex-col bg-prowler-stone-900 py-7 h-full",
|
||||
)
|
||||
BIN
dashboard/assets/favicon.ico
Normal file
|
After Width: | Height: | Size: 15 KiB |
4
dashboard/assets/images/icons/compliance.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="#FFF" aria-hidden="true" class="h-5 w-5" viewBox="0 0 24 24">
|
||||
<path fill-rule="evenodd" d="M9 1.5H5.625c-1.036 0-1.875.84-1.875 1.875v17.25c0 1.035.84 1.875 1.875 1.875h12.75c1.035 0 1.875-.84 1.875-1.875V12.75A3.75 3.75 0 0 0 16.5 9h-1.875a1.875 1.875 0 0 1-1.875-1.875V5.25A3.75 3.75 0 0 0 9 1.5zm6.61 10.936a.75.75 0 1 0-1.22-.872l-3.236 4.53L9.53 14.47a.75.75 0 0 0-1.06 1.06l2.25 2.25a.75.75 0 0 0 1.14-.094l3.75-5.25z" clip-rule="evenodd"/>
|
||||
<path d="M12.971 1.816A5.23 5.23 0 0 1 14.25 5.25v1.875c0 .207.168.375.375.375H16.5a5.23 5.23 0 0 1 3.434 1.279 9.768 9.768 0 0 0-6.963-6.963z"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 650 B |
BIN
dashboard/assets/images/icons/docs.png
Normal file
|
After Width: | Height: | Size: 734 B |
BIN
dashboard/assets/images/icons/help-black.png
Normal file
|
After Width: | Height: | Size: 441 B |
BIN
dashboard/assets/images/icons/help.png
Normal file
|
After Width: | Height: | Size: 934 B |
4
dashboard/assets/images/icons/overview.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="#FFF" aria-hidden="true" class="h-5 w-5" viewBox="0 0 24 24">
|
||||
<path fill-rule="evenodd" d="M2.25 13.5a8.25 8.25 0 0 1 8.25-8.25.75.75 0 0 1 .75.75v6.75H18a.75.75 0 0 1 .75.75 8.25 8.25 0 0 1-16.5 0z" clip-rule="evenodd"/>
|
||||
<path fill-rule="evenodd" d="M12.75 3a.75.75 0 0 1 .75-.75 8.25 8.25 0 0 1 8.25 8.25.75.75 0 0 1-.75.75h-7.5a.75.75 0 0 1-.75-.75V3z" clip-rule="evenodd"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 435 B |
BIN
dashboard/assets/images/providers/aws_provider.png
Normal file
|
After Width: | Height: | Size: 10 KiB |
BIN
dashboard/assets/images/providers/azure_provider.png
Normal file
|
After Width: | Height: | Size: 6.0 KiB |
BIN
dashboard/assets/images/providers/gcp_provider.png
Normal file
|
After Width: | Height: | Size: 245 KiB |
BIN
dashboard/assets/images/providers/k8s_provider.png
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
dashboard/assets/logo.png
Normal file
|
After Width: | Height: | Size: 11 KiB |
1387
dashboard/assets/styles/dist/output.css
vendored
Normal file
2221
dashboard/common_methods.py
Normal file
23
dashboard/compliance/aws_account_security_onboarding_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
@@ -0,0 +1,22 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_NAME",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_NAME", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_NAME",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ATTRIBUTES_NAME"
|
||||
)
|
||||
24
dashboard/compliance/cis_1_4_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_1_5_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_1_8_kubernetes.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_2_0_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_2_0_azure.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_2_0_gcp.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_2_1_azure.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
24
dashboard/compliance/cis_3_0_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
23
dashboard/compliance/cisa_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
29
dashboard/compliance/ens_rd2022_aws.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_ens
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_MARCO",
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORIA",
|
||||
"REQUIREMENTS_ATTRIBUTES_IDGRUPOCONTROL",
|
||||
"REQUIREMENTS_ATTRIBUTES_TIPO",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_ens(
|
||||
aux,
|
||||
"REQUIREMENTS_ATTRIBUTES_MARCO",
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORIA",
|
||||
"REQUIREMENTS_ATTRIBUTES_IDGRUPOCONTROL",
|
||||
"REQUIREMENTS_ATTRIBUTES_TIPO",
|
||||
)
|
||||
24
dashboard/compliance/fedramp_low_revision_4_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/fedramp_moderate_revision_4_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/ffiec_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
23
dashboard/compliance/gdpr_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/gxp_21_cfr_part_11_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
23
dashboard/compliance/gxp_eu_annex_11_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/hipaa_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
23
dashboard/compliance/iso27001_2013_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_container_iso
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORY",
|
||||
"REQUIREMENTS_ATTRIBUTES_OBJETIVE_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_OBJETIVE_NAME",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
return get_section_container_iso(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_CATEGORY", "REQUIREMENTS_ATTRIBUTES_OBJETIVE_ID"
|
||||
)
|
||||
23
dashboard/compliance/mitre_attack_aws.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_SUBTECHNIQUES",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_SUBTECHNIQUES"
|
||||
)
|
||||
24
dashboard/compliance/nist_800_171_revision_2_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/nist_800_53_revision_4_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/nist_800_53_revision_5_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
24
dashboard/compliance/nist_csf_1_1_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
20
dashboard/compliance/pci_3_2_1_aws.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_pci
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_pci(aux, "REQUIREMENTS_ID")
|
||||
20
dashboard/compliance/rbi_cyber_security_framework_aws.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_rbi
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
return get_section_containers_rbi(aux, "REQUIREMENTS_ID")
|
||||
24
dashboard/compliance/soc2_aws.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format3
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format3(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
29
dashboard/config.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import os
|
||||
|
||||
# Emojis to be used in the compliance table
|
||||
pass_emoji = "✅"
|
||||
fail_emoji = "❌"
|
||||
info_emoji = "ℹ️"
|
||||
manual_emoji = "✋🏽"
|
||||
|
||||
# Main colors
|
||||
fail_color = "#e67272"
|
||||
pass_color = "#54d283"
|
||||
info_color = "#2684FF"
|
||||
manual_color = "#636c78"
|
||||
|
||||
# Muted colors
|
||||
muted_fail_color = "#fca903"
|
||||
muted_pass_color = "#03fccf"
|
||||
muted_manual_color = "#b33696"
|
||||
|
||||
# Severity colors
|
||||
critical_color = "#951649"
|
||||
high_color = "#e11d48"
|
||||
medium_color = "#ee6f15"
|
||||
low_color = "#f9f5e6"
|
||||
informational_color = "#3274d9"
|
||||
|
||||
# Folder output path
|
||||
folder_path_overview = os.getcwd() + "/output"
|
||||
folder_path_compliance = os.getcwd() + "/output/compliance"
|
||||
157
dashboard/lib/cards.py
Normal file
@@ -0,0 +1,157 @@
|
||||
from typing import List
|
||||
|
||||
from dash import html
|
||||
|
||||
|
||||
def create_provider_card(
|
||||
provider: str, provider_logo: str, account_type: str, filtered_data
|
||||
) -> List[html.Div]:
|
||||
"""
|
||||
Card to display the provider's name and icon.
|
||||
Args:
|
||||
provider (str): Name of the provider.
|
||||
provider_icon (str): Icon of the provider.
|
||||
Returns:
|
||||
html.Div: Card to display the provider's name and icon.
|
||||
"""
|
||||
accounts = len(
|
||||
filtered_data[filtered_data["PROVIDER"] == provider]["ACCOUNT_UID"].unique()
|
||||
)
|
||||
checks_executed = len(
|
||||
filtered_data[filtered_data["PROVIDER"] == provider]["CHECK_ID"].unique()
|
||||
)
|
||||
fails = len(
|
||||
filtered_data[
|
||||
(filtered_data["PROVIDER"] == provider)
|
||||
& (filtered_data["STATUS"] == "FAIL")
|
||||
]
|
||||
)
|
||||
passes = len(
|
||||
filtered_data[
|
||||
(filtered_data["PROVIDER"] == provider)
|
||||
& (filtered_data["STATUS"] == "PASS")
|
||||
]
|
||||
)
|
||||
# Take the values in the MUTED colum that are true for the provider
|
||||
if "MUTED" in filtered_data.columns:
|
||||
muted = len(
|
||||
filtered_data[
|
||||
(filtered_data["PROVIDER"] == provider)
|
||||
& (filtered_data["MUTED"] == "True")
|
||||
]
|
||||
)
|
||||
else:
|
||||
muted = 0
|
||||
|
||||
return [
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Div([provider_logo], className="w-8"),
|
||||
],
|
||||
className="p-2 shadow-box-up rounded-full",
|
||||
),
|
||||
html.H5(
|
||||
f"{provider.upper()} {account_type}",
|
||||
className="text-base font-semibold leading-snug tracking-normal text-gray-900",
|
||||
),
|
||||
],
|
||||
className="flex justify-between items-center mb-3",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
account_type,
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
accounts,
|
||||
className="inline-block text-xs text-prowler-stone-900 font-bold shadow-box-down px-4 py-1 rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
"Checks",
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
checks_executed,
|
||||
className="inline-block text-xs text-prowler-stone-900 font-bold shadow-box-down px-4 py-1 rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
"FAILED",
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
fails,
|
||||
className="m-[2px] px-4 py-1 rounded-lg bg-gradient-failed",
|
||||
),
|
||||
],
|
||||
className="inline-block text-xs font-bold shadow-box-down rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
"PASSED",
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
passes,
|
||||
className="m-[2px] px-4 py-1 rounded-lg bg-gradient-passed",
|
||||
),
|
||||
],
|
||||
className="inline-block text-xs font-bold shadow-box-down rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Span(
|
||||
"MUTED",
|
||||
className="text-prowler-stone-900 inline-block text-3xs font-bold uppercase transition-all rounded-lg text-prowler-stone-900 shadow-box-up px-4 py-1 text-center col-span-6 flex justify-center items-center",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
muted,
|
||||
className="m-[2px] px-4 py-1 rounded-lg bg-gradient-muted",
|
||||
),
|
||||
],
|
||||
className="inline-block text-xs font-bold shadow-box-down rounded-lg text-center col-span-5 col-end-13",
|
||||
),
|
||||
],
|
||||
className="grid grid-cols-12",
|
||||
),
|
||||
],
|
||||
className="grid gap-x-8 gap-y-4",
|
||||
),
|
||||
],
|
||||
className="px-4 py-3",
|
||||
),
|
||||
],
|
||||
className="relative flex flex-col bg-white shadow-provider rounded-xl w-full transition ease-in-out delay-100 hover:-translate-y-1 hover:scale-110 hover:z-50 hover:cursor-pointer",
|
||||
)
|
||||
]
|
||||
289
dashboard/lib/dropdowns.py
Normal file
@@ -0,0 +1,289 @@
|
||||
from dash import dcc, html
|
||||
|
||||
|
||||
def create_date_dropdown(assesment_times: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the date of the last available scan for each account.
|
||||
Args:
|
||||
assesment_times (list): List of dates of the last available scan for each account.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the date of the last available scan for each account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Assessment date (last available scan) ",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
html.Img(
|
||||
id="info-file-over",
|
||||
src="/assets/images/icons/help-black.png",
|
||||
className="w-5",
|
||||
title="The date of the last available scan for each account is displayed here. If you have not run prowler yet, the date will be empty.",
|
||||
),
|
||||
],
|
||||
style={"display": "inline-flex"},
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="report-date-filter",
|
||||
options=[
|
||||
{"label": account, "value": account} for account in assesment_times
|
||||
],
|
||||
value=assesment_times[0],
|
||||
clearable=False,
|
||||
multi=False,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_date_dropdown_compliance(assesment_times: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the date of the last available scan for each account.
|
||||
Args:
|
||||
assesment_times (list): List of dates of the last available scan for each account.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the date of the last available scan for each account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Assesment Date:", className="text-prowler-stone-900 font-bold text-sm"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="date-filter-analytics",
|
||||
options=[
|
||||
{"label": account, "value": account} for account in assesment_times
|
||||
],
|
||||
value=assesment_times[0],
|
||||
clearable=False,
|
||||
multi=False,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_region_dropdown(regions: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the region of the account.
|
||||
Args:
|
||||
regions (list): List of regions of the account.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the region of the account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Region / Location / Namespace :",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="region-filter",
|
||||
options=[{"label": region, "value": region} for region in regions],
|
||||
value=["All"], # Initial selection is ALL
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_region_dropdown_compliance(regions: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the region of the account.
|
||||
Args:
|
||||
regions (list): List of regions of the account.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the region of the account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Region / Location / Namespace :",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="region-filter-compliance",
|
||||
options=[{"label": region, "value": region} for region in regions],
|
||||
value=["All"], # Initial selection is ALL
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_account_dropdown(accounts: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the account.
|
||||
Args:
|
||||
accounts (list): List of accounts.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Account / Subscription / Project / Cluster :",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="cloud-account-filter",
|
||||
options=[{"label": account, "value": account} for account in accounts],
|
||||
value=["All"], # Initial selection is ALL
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_account_dropdown_compliance(accounts: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the account.
|
||||
Args:
|
||||
accounts (list): List of accounts.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the account.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Account / Subscription / Project / Cluster :",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="cloud-account-filter-compliance",
|
||||
options=[{"label": account, "value": account} for account in accounts],
|
||||
value=["All"], # Initial selection is ALL
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000", "width": "100%"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_compliance_dropdown(compliance: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the compliance.
|
||||
Args:
|
||||
compliance (list): List of compliance.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the compliance.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Compliance:", className="text-prowler-stone-900 font-bold text-sm"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="report-compliance-filter",
|
||||
options=[{"label": i, "value": i} for i in compliance],
|
||||
value=compliance[0],
|
||||
clearable=False,
|
||||
style={"color": "#000000"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_severity_dropdown(severity: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the severity.
|
||||
Args:
|
||||
severity (list): List of severity.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the severity.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Severity:", className="text-prowler-stone-900 font-bold text-sm"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="severity-filter",
|
||||
options=[{"label": i, "value": i} for i in severity],
|
||||
value=["All"],
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_service_dropdown(services: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the service.
|
||||
Args:
|
||||
services (list): List of services.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the service.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Service:", className="text-prowler-stone-900 font-bold text-sm"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="service-filter",
|
||||
options=[{"label": i, "value": i} for i in services],
|
||||
value=["All"],
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_status_dropdown(status: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the status.
|
||||
Args:
|
||||
status (list): List of status.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the status.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label("Status:", className="text-prowler-stone-900 font-bold text-sm"),
|
||||
dcc.Dropdown(
|
||||
id="status-filter",
|
||||
options=[{"label": i, "value": i} for i in status],
|
||||
value=["All"],
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_table_row_dropdown(table_rows: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the number of rows in the table.
|
||||
Args:
|
||||
table_rows (list): List of number of rows.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the number of rows in the table.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
dcc.Dropdown(
|
||||
id="table-rows",
|
||||
options=[{"label": i, "value": i} for i in table_rows],
|
||||
value=table_rows[0],
|
||||
clearable=False,
|
||||
style={"color": "#000000", "margin-right": "10px"},
|
||||
),
|
||||
],
|
||||
)
|
||||
172
dashboard/lib/layouts.py
Normal file
@@ -0,0 +1,172 @@
|
||||
from dash import dcc, html
|
||||
|
||||
|
||||
def create_layout_overview(
|
||||
account_dropdown: html.Div,
|
||||
date_dropdown: html.Div,
|
||||
region_dropdown: html.Div,
|
||||
download_button: html.Button,
|
||||
severity_dropdown: html.Div,
|
||||
service_dropdown: html.Div,
|
||||
table_row_dropdown: html.Div,
|
||||
status_dropdown: html.Div,
|
||||
) -> html.Div:
|
||||
"""
|
||||
Create the layout of the dashboard.
|
||||
Args:
|
||||
account_dropdown (html.Div): Dropdown to select the account.
|
||||
date_dropdown (html.Div): Dropdown to select the date of the last available scan for each account.
|
||||
region_dropdown (html.Div): Dropdown to select the region of the account.
|
||||
Returns:
|
||||
html.Div: Layout of the dashboard.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
dcc.Location(id="url", refresh=False),
|
||||
html.Div(
|
||||
[
|
||||
html.H1(
|
||||
"Scan Overview",
|
||||
className="text-prowler-stone-900 text-2xxl font-bold",
|
||||
),
|
||||
html.Div(className="d-flex flex-wrap", id="subscribe_card"),
|
||||
],
|
||||
className="flex justify-between border-b border-prowler-500 pb-3",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div([date_dropdown], className=""),
|
||||
html.Div([account_dropdown], className=""),
|
||||
html.Div([region_dropdown], className=""),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-3 lg:gap-y-0",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div([severity_dropdown], className=""),
|
||||
html.Div([service_dropdown], className=""),
|
||||
html.Div([status_dropdown], className=""),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-3 lg:gap-y-0",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(className="flex", id="aws_card", n_clicks=0),
|
||||
html.Div(className="flex", id="azure_card", n_clicks=0),
|
||||
html.Div(className="flex", id="gcp_card", n_clicks=0),
|
||||
html.Div(className="flex", id="k8s_card", n_clicks=0),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-4 lg:gap-y-0",
|
||||
),
|
||||
html.H4(
|
||||
"Count of Findings by severity",
|
||||
className="text-prowler-stone-900 text-lg font-bold",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 sm:col-span-6 lg:col-span-3 gap-y-4",
|
||||
id="status_graph",
|
||||
),
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 sm:col-span-6 lg:col-span-3 gap-y-4",
|
||||
id="two_pie_chart",
|
||||
),
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 sm:col-span-6 lg:col-span-6 col-end-13 gap-y-4",
|
||||
id="line_plot",
|
||||
),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 grid-cols-12 lg:gap-y-0",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.H4(
|
||||
"Top Findings by Severity",
|
||||
className="text-prowler-stone-900 text-lg font-bold",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
(
|
||||
html.Label(
|
||||
"Table Rows:",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
style={"margin-right": "10px"},
|
||||
)
|
||||
),
|
||||
table_row_dropdown,
|
||||
download_button,
|
||||
],
|
||||
className="flex justify-between items-center",
|
||||
),
|
||||
dcc.Download(id="download-data"),
|
||||
],
|
||||
className="flex justify-between items-center",
|
||||
),
|
||||
html.Div(id="table", className="grid"),
|
||||
],
|
||||
className="grid gap-x-8 gap-y-8 2xl:container mx-auto",
|
||||
)
|
||||
|
||||
|
||||
def create_layout_compliance(
|
||||
account_dropdown: html.Div,
|
||||
date_dropdown: html.Div,
|
||||
region_dropdown: html.Div,
|
||||
compliance_dropdown: html.Div,
|
||||
) -> html.Div:
|
||||
return html.Div(
|
||||
[
|
||||
dcc.Location(id="url", refresh=False),
|
||||
html.Div(
|
||||
[
|
||||
html.H1(
|
||||
"Compliance",
|
||||
className="text-prowler-stone-900 text-2xxl font-bold",
|
||||
),
|
||||
html.A(
|
||||
[
|
||||
html.Img(src="assets/favicon.ico", className="w-5 mr-3"),
|
||||
html.Span("Subscribe to prowler SaaS"),
|
||||
],
|
||||
href="https://prowler.pro/",
|
||||
target="_blank",
|
||||
className="text-prowler-stone-900 inline-flex px-4 py-2 text-xs font-bold uppercase transition-all rounded-lg text-gray-900 hover:bg-prowler-stone-900/10 border-solid border-1 hover:border-prowler-stone-900/10 hover:border-solid hover:border-1 border-prowler-stone-900/10",
|
||||
),
|
||||
],
|
||||
className="flex justify-between border-b border-prowler-500 pb-3",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div([date_dropdown], className=""),
|
||||
html.Div([account_dropdown], className=""),
|
||||
html.Div([region_dropdown], className=""),
|
||||
html.Div([compliance_dropdown], className=""),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-4 lg:gap-y-0",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 md:col-span-4 gap-y-4",
|
||||
id="overall_status_result_graph",
|
||||
),
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 md:col-span-7 md:col-end-13 gap-y-4",
|
||||
id="security_level_graph",
|
||||
),
|
||||
html.Div(
|
||||
className="flex flex-col col-span-12 md:col-span-2 gap-y-4",
|
||||
id="",
|
||||
),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 grid-cols-12 lg:gap-y-0",
|
||||
),
|
||||
html.H4(
|
||||
"Details compliance:",
|
||||
className="text-prowler-stone-900 text-lg font-bold",
|
||||
),
|
||||
html.Div(className="flex flex-wrap", id="output"),
|
||||
],
|
||||
className="grid gap-x-8 gap-y-8 2xl:container mx-auto",
|
||||
)
|
||||
592
dashboard/pages/compliance.py
Normal file
@@ -0,0 +1,592 @@
|
||||
# Standard library imports
|
||||
import csv
|
||||
import glob
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import warnings
|
||||
|
||||
# Third-party imports
|
||||
import dash
|
||||
import pandas as pd
|
||||
import plotly.express as px
|
||||
from dash import callback, dcc, html
|
||||
from dash.dependencies import Input, Output
|
||||
|
||||
# Config import
|
||||
from dashboard.config import (
|
||||
fail_color,
|
||||
folder_path_compliance,
|
||||
info_color,
|
||||
manual_color,
|
||||
pass_color,
|
||||
)
|
||||
from dashboard.lib.dropdowns import (
|
||||
create_account_dropdown_compliance,
|
||||
create_compliance_dropdown,
|
||||
create_date_dropdown_compliance,
|
||||
create_region_dropdown_compliance,
|
||||
)
|
||||
from dashboard.lib.layouts import create_layout_compliance
|
||||
|
||||
# Suppress warnings
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
# Global variables
|
||||
# TODO: Create a flag to let the user put a custom path
|
||||
|
||||
csv_files = []
|
||||
for file in glob.glob(os.path.join(folder_path_compliance, "*.csv")):
|
||||
with open(file, "r", newline="") as csvfile:
|
||||
reader = csv.reader(csvfile)
|
||||
num_rows = sum(1 for row in reader)
|
||||
if num_rows > 1:
|
||||
csv_files.append(file)
|
||||
|
||||
|
||||
def load_csv_files(csv_files):
|
||||
# Load CSV files into a single pandas DataFrame.
|
||||
dfs = []
|
||||
results = []
|
||||
for file in csv_files:
|
||||
df = pd.read_csv(file, sep=";", on_bad_lines="skip")
|
||||
if "CHECKID" in df.columns:
|
||||
dfs.append(df)
|
||||
result = file
|
||||
result = result.split("/")[-1]
|
||||
result = re.sub(r"^.*?_", "", result)
|
||||
result = result.replace(".csv", "")
|
||||
result = result.upper()
|
||||
if "AWS" in result:
|
||||
if "AWS_" in result:
|
||||
result = result.replace("_AWS", "")
|
||||
else:
|
||||
result = result.replace("_AWS", " - AWS")
|
||||
if "GCP" in result:
|
||||
result = result.replace("_GCP", " - GCP")
|
||||
if "AZURE" in result:
|
||||
result = result.replace("_AZURE", " - AZURE")
|
||||
if "KUBERNETES" in result:
|
||||
result = result.replace("_KUBERNETES", " - KUBERNETES")
|
||||
result = result[result.find("CIS_") :]
|
||||
results.append(result)
|
||||
|
||||
unique_results = set(results)
|
||||
results = list(unique_results)
|
||||
# Check if there is any CIS report in the list and divide it in level 1 and level 2
|
||||
new_results = []
|
||||
old_results = results.copy()
|
||||
for compliance_name in results:
|
||||
if "CIS_" in compliance_name:
|
||||
old_results.remove(compliance_name)
|
||||
new_results.append(compliance_name + " - Level_1")
|
||||
new_results.append(compliance_name + " - Level_2")
|
||||
|
||||
results = old_results + new_results
|
||||
results.sort()
|
||||
# Handle the case where there are no CSV files
|
||||
try:
|
||||
data = pd.concat(dfs, ignore_index=True)
|
||||
except ValueError:
|
||||
data = None
|
||||
return data, results
|
||||
|
||||
|
||||
data, results = load_csv_files(csv_files)
|
||||
|
||||
if data is None:
|
||||
dash.register_page(__name__)
|
||||
layout = html.Div(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
html.H5(
|
||||
"No data found, check if the CSV files are in the correct folder.",
|
||||
className="card-title",
|
||||
style={"text-align": "left"},
|
||||
)
|
||||
],
|
||||
style={
|
||||
"width": "99%",
|
||||
"margin-right": "0.8%",
|
||||
"margin-bottom": "10px",
|
||||
},
|
||||
)
|
||||
]
|
||||
)
|
||||
else:
|
||||
|
||||
data["ASSESSMENTDATE"] = pd.to_datetime(data["ASSESSMENTDATE"])
|
||||
data["ASSESSMENT_TIME"] = data["ASSESSMENTDATE"].dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
data_values = data["ASSESSMENT_TIME"].unique()
|
||||
data_values.sort()
|
||||
data_values = data_values[::-1]
|
||||
aux = []
|
||||
for value in data_values:
|
||||
if value.split(" ")[0] not in [aux[i].split(" ")[0] for i in range(len(aux))]:
|
||||
aux.append(value)
|
||||
data_values = aux
|
||||
|
||||
data = data[data["ASSESSMENT_TIME"].isin(data_values)]
|
||||
data["ASSESSMENT_TIME"] = data["ASSESSMENT_TIME"].apply(lambda x: x.split(" ")[0])
|
||||
|
||||
# Select Compliance - Dropdown
|
||||
|
||||
compliance_dropdown = create_compliance_dropdown(results)
|
||||
|
||||
# Select Account - Dropdown
|
||||
|
||||
select_account_dropdown_list = ["All"]
|
||||
# Append to the list the unique values of the columns ACCOUNTID, PROJECTID and SUBSCRIPTIONID if they exist
|
||||
if "ACCOUNTID" in data.columns:
|
||||
select_account_dropdown_list = select_account_dropdown_list + list(
|
||||
data["ACCOUNTID"].unique()
|
||||
)
|
||||
if "PROJECTID" in data.columns:
|
||||
select_account_dropdown_list = select_account_dropdown_list + list(
|
||||
data["PROJECTID"].unique()
|
||||
)
|
||||
if "SUBSCRIPTIONID" in data.columns:
|
||||
select_account_dropdown_list = select_account_dropdown_list + list(
|
||||
data["SUBSCRIPTIONID"].unique()
|
||||
)
|
||||
if "SUBSCRIPTION" in data.columns:
|
||||
select_account_dropdown_list = select_account_dropdown_list + list(
|
||||
data["SUBSCRIPTION"].unique()
|
||||
)
|
||||
|
||||
list_items = []
|
||||
for item in select_account_dropdown_list:
|
||||
if item.__class__.__name__ == "str" and "nan" not in item:
|
||||
list_items.append(item)
|
||||
|
||||
account_dropdown = create_account_dropdown_compliance(list_items)
|
||||
|
||||
# Select Region - Dropdown
|
||||
|
||||
select_region_dropdown_list = ["All"]
|
||||
# Append to the list the unique values of the column REGION or LOCATION if it exists
|
||||
if "REGION" in data.columns:
|
||||
# Handle the case where the column REGION is empty
|
||||
data["REGION"] = data["REGION"].fillna("-")
|
||||
select_region_dropdown_list = select_region_dropdown_list + list(
|
||||
data["REGION"].unique()
|
||||
)
|
||||
if "LOCATION" in data.columns:
|
||||
# Handle the case where the column LOCATION is empty
|
||||
data["LOCATION"] = data["LOCATION"].fillna("-")
|
||||
select_region_dropdown_list = select_region_dropdown_list + list(
|
||||
data["LOCATION"].unique()
|
||||
)
|
||||
|
||||
# Clear the list from None and NaN values
|
||||
list_items = []
|
||||
for item in select_region_dropdown_list:
|
||||
if item.__class__.__name__ == "str":
|
||||
list_items.append(item)
|
||||
|
||||
region_dropdown = create_region_dropdown_compliance(list_items)
|
||||
|
||||
# Select Date - Dropdown
|
||||
|
||||
date_dropdown = create_date_dropdown_compliance(
|
||||
list(data["ASSESSMENT_TIME"].unique())
|
||||
)
|
||||
|
||||
dash.register_page(__name__)
|
||||
|
||||
layout = create_layout_compliance(
|
||||
account_dropdown, date_dropdown, region_dropdown, compliance_dropdown
|
||||
)
|
||||
|
||||
|
||||
@callback(
|
||||
[
|
||||
Output("output", "children"),
|
||||
Output("overall_status_result_graph", "children"),
|
||||
Output("security_level_graph", "children"),
|
||||
Output("cloud-account-filter-compliance", "value"),
|
||||
Output("cloud-account-filter-compliance", "options"),
|
||||
Output("region-filter-compliance", "value"),
|
||||
Output("region-filter-compliance", "options"),
|
||||
Output("date-filter-analytics", "value"),
|
||||
Output("date-filter-analytics", "options"),
|
||||
],
|
||||
Input("report-compliance-filter", "value"),
|
||||
Input("cloud-account-filter-compliance", "value"),
|
||||
Input("region-filter-compliance", "value"),
|
||||
Input("date-filter-analytics", "value"),
|
||||
)
|
||||
def display_data(
|
||||
analytics_input, account_filter, region_filter_analytics, date_filter_analytics
|
||||
):
|
||||
|
||||
current_compliance = analytics_input
|
||||
analytics_input = analytics_input.replace(" - ", "_")
|
||||
analytics_input = analytics_input.lower()
|
||||
|
||||
# Check if the compliance selected is the level 1 or level 2 of the CIS
|
||||
is_level_1 = "level_1" in analytics_input
|
||||
analytics_input = analytics_input.replace("_level_1", "").replace("_level_2", "")
|
||||
|
||||
# Filter the data based on the compliance selected
|
||||
files = [file for file in csv_files if analytics_input in file]
|
||||
|
||||
def load_csv_files(files):
|
||||
"""Load CSV files into a single pandas DataFrame."""
|
||||
dfs = []
|
||||
for file in files:
|
||||
df = pd.read_csv(file, sep=";", on_bad_lines="skip")
|
||||
dfs.append(df.astype(str))
|
||||
return pd.concat(dfs, ignore_index=True)
|
||||
|
||||
data = load_csv_files(files)
|
||||
|
||||
# Rename the column LOCATION to REGION for GCP or Azure
|
||||
if "gcp" in analytics_input or "azure" in analytics_input:
|
||||
data = data.rename(columns={"LOCATION": "REGION"})
|
||||
|
||||
# Add the column ACCOUNTID to the data if the provider is kubernetes
|
||||
if "kubernetes" in analytics_input:
|
||||
data.rename(columns={"CONTEXT": "ACCOUNTID"}, inplace=True)
|
||||
data.rename(columns={"NAMESPACE": "REGION"}, inplace=True)
|
||||
if "REQUIREMENTS_ATTRIBUTES_PROFILE" in data.columns:
|
||||
data["REQUIREMENTS_ATTRIBUTES_PROFILE"] = data[
|
||||
"REQUIREMENTS_ATTRIBUTES_PROFILE"
|
||||
].apply(lambda x: x.split(" - ")[0])
|
||||
# Filter the chosen level of the CIS
|
||||
if is_level_1:
|
||||
data = data[data["REQUIREMENTS_ATTRIBUTES_PROFILE"] == "Level 1"]
|
||||
|
||||
# Rename the column PROJECTID to ACCOUNTID for GCP
|
||||
if data.columns.str.contains("PROJECTID").any():
|
||||
data.rename(columns={"PROJECTID": "ACCOUNTID"}, inplace=True)
|
||||
|
||||
# Rename the column SUBSCRIPTIONID to ACCOUNTID for Azure
|
||||
if data.columns.str.contains("SUBSCRIPTIONID").any():
|
||||
data.rename(columns={"SUBSCRIPTIONID": "ACCOUNTID"}, inplace=True)
|
||||
# Handle v3 azure cis compliance
|
||||
if data.columns.str.contains("SUBSCRIPTION").any():
|
||||
data.rename(columns={"SUBSCRIPTION": "ACCOUNTID"}, inplace=True)
|
||||
data["REGION"] = "-"
|
||||
|
||||
# Filter ACCOUNT
|
||||
if account_filter == ["All"]:
|
||||
updated_cloud_account_values = data["ACCOUNTID"].unique()
|
||||
elif "All" in account_filter and len(account_filter) > 1:
|
||||
# Remove 'All' from the list
|
||||
account_filter.remove("All")
|
||||
updated_cloud_account_values = account_filter
|
||||
elif len(account_filter) == 0:
|
||||
updated_cloud_account_values = data["ACCOUNTID"].unique()
|
||||
account_filter = ["All"]
|
||||
else:
|
||||
updated_cloud_account_values = account_filter
|
||||
|
||||
data = data[data["ACCOUNTID"].isin(updated_cloud_account_values)]
|
||||
|
||||
account_filter_options = list(data["ACCOUNTID"].unique())
|
||||
account_filter_options = account_filter_options + ["All"]
|
||||
for item in account_filter_options:
|
||||
if "nan" in item or item.__class__.__name__ != "str" or item is None:
|
||||
account_filter_options.remove(item)
|
||||
|
||||
# Filter REGION
|
||||
if region_filter_analytics == ["All"]:
|
||||
updated_region_account_values = data["REGION"].unique()
|
||||
elif "All" in region_filter_analytics and len(region_filter_analytics) > 1:
|
||||
# Remove 'All' from the list
|
||||
region_filter_analytics.remove("All")
|
||||
updated_region_account_values = region_filter_analytics
|
||||
elif len(region_filter_analytics) == 0:
|
||||
updated_region_account_values = data["REGION"].unique()
|
||||
region_filter_analytics = ["All"]
|
||||
else:
|
||||
updated_region_account_values = region_filter_analytics
|
||||
|
||||
data = data[data["REGION"].isin(updated_region_account_values)]
|
||||
|
||||
region_filter_options = list(data["REGION"].unique())
|
||||
region_filter_options = region_filter_options + ["All"]
|
||||
for item in region_filter_options:
|
||||
if item == "nan" or item.__class__.__name__ != "str":
|
||||
region_filter_options.remove(item)
|
||||
|
||||
data["ASSESSMENTDATE"] = pd.to_datetime(data["ASSESSMENTDATE"], errors="coerce")
|
||||
data["ASSESSMENTDATE"] = data["ASSESSMENTDATE"].dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
# Choosing the date that is the most recent
|
||||
data_values = data["ASSESSMENTDATE"].unique()
|
||||
data_values.sort()
|
||||
data_values = data_values[::-1]
|
||||
aux = []
|
||||
|
||||
data_values = [str(i) for i in data_values]
|
||||
for value in data_values:
|
||||
if value.split(" ")[0] not in [aux[i].split(" ")[0] for i in range(len(aux))]:
|
||||
aux.append(value)
|
||||
data_values = [str(i) for i in aux]
|
||||
|
||||
data = data[data["ASSESSMENTDATE"].isin(data_values)]
|
||||
data["ASSESSMENTDATE"] = data["ASSESSMENTDATE"].apply(lambda x: x.split(" ")[0])
|
||||
|
||||
options_date = data["ASSESSMENTDATE"].unique()
|
||||
options_date.sort()
|
||||
options_date = options_date[::-1]
|
||||
|
||||
# Filter DATE
|
||||
if date_filter_analytics in options_date:
|
||||
data = data[data["ASSESSMENTDATE"] == date_filter_analytics]
|
||||
else:
|
||||
date_filter_analytics = options_date[0]
|
||||
data = data[data["ASSESSMENTDATE"] == date_filter_analytics]
|
||||
|
||||
if data.empty:
|
||||
fig = px.pie()
|
||||
pie_1 = dcc.Graph(
|
||||
figure=fig,
|
||||
config={"displayModeBar": False},
|
||||
style={"height": "250px", "width": "250px", "right": "0px"},
|
||||
)
|
||||
|
||||
return [
|
||||
html.Div(
|
||||
[
|
||||
html.H5(
|
||||
"No data found for this compliance",
|
||||
className="card-title",
|
||||
style={"text-align": "left"},
|
||||
)
|
||||
],
|
||||
style={
|
||||
"width": "99%",
|
||||
"margin-right": "0.8%",
|
||||
"margin-bottom": "10px",
|
||||
},
|
||||
)
|
||||
]
|
||||
else:
|
||||
# Check cases where the compliance start with AWS_
|
||||
if "aws_" in analytics_input:
|
||||
analytics_input = analytics_input + "_aws"
|
||||
try:
|
||||
current = analytics_input.replace(".", "_")
|
||||
compliance_module = importlib.import_module(
|
||||
f"dashboard.compliance.{current}"
|
||||
)
|
||||
data.drop_duplicates(keep="first", inplace=True)
|
||||
table = compliance_module.get_table(data)
|
||||
except ModuleNotFoundError:
|
||||
table = html.Div(
|
||||
[
|
||||
html.H5(
|
||||
"No data found for this compliance",
|
||||
className="card-title",
|
||||
style={"text-align": "left", "color": "black"},
|
||||
)
|
||||
],
|
||||
style={
|
||||
"width": "99%",
|
||||
"margin-right": "0.8%",
|
||||
"margin-bottom": "10px",
|
||||
},
|
||||
)
|
||||
|
||||
df = data.copy()
|
||||
df = df.groupby(["STATUS"]).size().reset_index(name="counts")
|
||||
df = df.sort_values(by=["counts"], ascending=False)
|
||||
|
||||
# Pie 1
|
||||
pie_1 = get_pie(df)
|
||||
|
||||
# Get the pie2 depending on the compliance
|
||||
df = data.copy()
|
||||
|
||||
current_filter = ""
|
||||
|
||||
if "pci" in analytics_input:
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ID")
|
||||
current_filter = "req_id"
|
||||
elif (
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION" in df.columns
|
||||
and not df["REQUIREMENTS_ATTRIBUTES_SECTION"].isnull().values.any()
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ATTRIBUTES_SECTION")
|
||||
current_filter = "sections"
|
||||
elif (
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORIA" in df.columns
|
||||
and not df["REQUIREMENTS_ATTRIBUTES_CATEGORIA"].isnull().values.any()
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ATTRIBUTES_CATEGORIA")
|
||||
current_filter = "categorias"
|
||||
elif (
|
||||
"REQUIREMENTS_ATTRIBUTES_CATEGORY" in df.columns
|
||||
and not df["REQUIREMENTS_ATTRIBUTES_CATEGORY"].isnull().values.any()
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ATTRIBUTES_CATEGORY")
|
||||
current_filter = "categories"
|
||||
elif (
|
||||
"REQUIREMENTS_ATTRIBUTES_SERVICE" in df.columns
|
||||
and not df["REQUIREMENTS_ATTRIBUTES_SERVICE"].isnull().values.any()
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ATTRIBUTES_SERVICE")
|
||||
current_filter = "services"
|
||||
else:
|
||||
fig = px.pie()
|
||||
fig.update_layout(
|
||||
margin=dict(l=0, r=0, t=0, b=0),
|
||||
autosize=True,
|
||||
showlegend=False,
|
||||
paper_bgcolor="#303030",
|
||||
)
|
||||
pie_2 = dcc.Graph(
|
||||
figure=fig,
|
||||
config={"displayModeBar": False},
|
||||
style={"height": "250px", "width": "250px", "right": "0px"},
|
||||
)
|
||||
current_filter = "none"
|
||||
|
||||
# Analytics table
|
||||
|
||||
if not analytics_input:
|
||||
analytics_input = ""
|
||||
|
||||
table_output = get_table(current_compliance, table)
|
||||
|
||||
overall_status_result_graph = get_graph(pie_1, "Overall Status Result")
|
||||
|
||||
security_level_graph = get_graph(
|
||||
pie_2, f"Top 5 failed {current_filter} by findings"
|
||||
)
|
||||
|
||||
return (
|
||||
table_output,
|
||||
overall_status_result_graph,
|
||||
security_level_graph,
|
||||
account_filter,
|
||||
account_filter_options,
|
||||
region_filter_analytics,
|
||||
region_filter_options,
|
||||
date_filter_analytics,
|
||||
options_date,
|
||||
)
|
||||
|
||||
|
||||
def get_graph(pie, title):
|
||||
return [
|
||||
html.Span(
|
||||
title,
|
||||
className="text-center text-prowler-stone-900 uppercase text-xs font-bold",
|
||||
),
|
||||
html.Div(
|
||||
[pie],
|
||||
className="",
|
||||
style={
|
||||
"display": "flex",
|
||||
"justify-content": "center",
|
||||
"align-items": "center",
|
||||
"margin-top": "7%",
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def get_bar_graph(df, column_name):
|
||||
df = df[df["STATUS"] == "FAIL"]
|
||||
df = df.groupby([column_name, "STATUS"]).size().reset_index(name="counts")
|
||||
df = df.sort_values(by=["counts"], ascending=True)
|
||||
# take the top 5
|
||||
df = df.tail(5)
|
||||
|
||||
colums = df[column_name].unique()
|
||||
|
||||
# Cut the text if it is too long
|
||||
for i in range(len(colums)):
|
||||
if len(colums[i]) > 15:
|
||||
colums[i] = colums[i][:15] + "..."
|
||||
|
||||
fig = px.bar(
|
||||
df,
|
||||
x="counts",
|
||||
y=colums,
|
||||
color="STATUS",
|
||||
color_discrete_map={"FAIL": fail_color},
|
||||
orientation="h",
|
||||
)
|
||||
|
||||
fig.update_layout(
|
||||
margin=dict(l=0, r=0, t=0, b=0),
|
||||
autosize=True,
|
||||
showlegend=False,
|
||||
xaxis_title=None,
|
||||
yaxis_title=None,
|
||||
font=dict(size=14, color="#292524"),
|
||||
hoverlabel=dict(font_size=12),
|
||||
paper_bgcolor="#FFF",
|
||||
)
|
||||
|
||||
return dcc.Graph(
|
||||
figure=fig,
|
||||
config={"displayModeBar": False},
|
||||
style={"height": "20rem", "width": "40rem"},
|
||||
)
|
||||
|
||||
|
||||
def get_pie(df):
|
||||
# Define custom colors
|
||||
color_mapping = {
|
||||
"FAIL": fail_color,
|
||||
"PASS": pass_color,
|
||||
"INFO": info_color,
|
||||
"WARN": "#260000",
|
||||
"MANUAL": manual_color,
|
||||
}
|
||||
|
||||
# Use the color_discrete_map parameter to map categories to custom colors
|
||||
fig = px.pie(
|
||||
df,
|
||||
names="STATUS",
|
||||
values="counts",
|
||||
hole=0.7,
|
||||
color="STATUS",
|
||||
color_discrete_map=color_mapping,
|
||||
)
|
||||
fig.update_traces(
|
||||
hovertemplate=None,
|
||||
textposition="outside",
|
||||
textinfo="percent+label",
|
||||
rotation=50,
|
||||
)
|
||||
|
||||
fig.update_layout(
|
||||
margin=dict(l=0, r=0, t=0, b=0),
|
||||
autosize=True,
|
||||
showlegend=False,
|
||||
font=dict(size=14, color="#292524"),
|
||||
hoverlabel=dict(font_size=12),
|
||||
paper_bgcolor="#FFF",
|
||||
)
|
||||
|
||||
pie = dcc.Graph(
|
||||
figure=fig,
|
||||
config={"displayModeBar": False},
|
||||
style={"height": "20rem", "width": "20rem"},
|
||||
)
|
||||
|
||||
return pie
|
||||
|
||||
|
||||
def get_table(current_compliance, table):
|
||||
return [
|
||||
html.Div(
|
||||
[
|
||||
html.H5(
|
||||
f"{current_compliance}",
|
||||
className="text-prowler-stone-900 text-md font-bold uppercase mb-4",
|
||||
),
|
||||
table,
|
||||
],
|
||||
className="relative flex flex-col bg-white shadow-provider rounded-xl px-4 py-3 flex-wrap w-full",
|
||||
),
|
||||
]
|
||||
1074
dashboard/pages/overview.py
Normal file
179
dashboard/src/input.css
Normal file
@@ -0,0 +1,179 @@
|
||||
/*
|
||||
/*
|
||||
/*
|
||||
/*
|
||||
/* Use this file to add custom styles using Tailwind's utility classes. */
|
||||
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
#_dash-app-content {
|
||||
@apply bg-prowler-stone-500;
|
||||
}
|
||||
|
||||
@layer components {
|
||||
.custom-grid {
|
||||
grid-template-columns: minmax(0, 16fr) repeat(11, minmax(0, 11fr));
|
||||
}
|
||||
|
||||
.custom-grid-large {
|
||||
grid-template-columns: minmax(0, 10fr) repeat(11, minmax(0, 11fr));
|
||||
}
|
||||
|
||||
/* Styles for the table in the overview page */
|
||||
.table-overview thead {
|
||||
display: table;
|
||||
width: 100%;
|
||||
table-layout: fixed;
|
||||
}
|
||||
|
||||
.table-overview tbody {
|
||||
-ms-overflow-style: none; /* IE and Edge */
|
||||
scrollbar-width: none; /* Firefox */
|
||||
}
|
||||
|
||||
.table-overview tbody tr {
|
||||
display: table;
|
||||
width: 100%;
|
||||
table-layout: fixed;
|
||||
}
|
||||
/* Styles for thead */
|
||||
.table-overview th {
|
||||
@apply bg-prowler-stone-900 text-sm py-3 font-bold;
|
||||
}
|
||||
|
||||
.table-overview td {
|
||||
@apply text-prowler-stone-900 bg-prowler-white text-sm py-2 font-bold;
|
||||
}
|
||||
|
||||
/* Check ID */
|
||||
.table-overview td:nth-child(1),
|
||||
.table-overview th:nth-child(1) {
|
||||
@apply w-[52%];
|
||||
}
|
||||
/* Severity */
|
||||
.table-overview td:nth-child(2),
|
||||
.table-overview th:nth-child(2) {
|
||||
@apply w-[8%] capitalize;
|
||||
}
|
||||
/* Status */
|
||||
.table-overview td:nth-child(3),
|
||||
.table-overview th:nth-child(3) {
|
||||
@apply w-[7%];
|
||||
}
|
||||
.table-overview td:nth-child(3) {
|
||||
@apply font-bold text-prowler-error;
|
||||
}
|
||||
/* Region */
|
||||
.table-overview td:nth-child(4),
|
||||
.table-overview th:nth-child(4) {
|
||||
@apply w-[9%];
|
||||
}
|
||||
/* Service */
|
||||
.table-overview td:nth-child(5),
|
||||
.table-overview th:nth-child(5) {
|
||||
@apply w-[6%];
|
||||
}
|
||||
/* Provider */
|
||||
.table-overview td:nth-child(6),
|
||||
.table-overview th:nth-child(6) {
|
||||
@apply w-[7%];
|
||||
}
|
||||
/* Account ID */
|
||||
.table-overview td:nth-child(7),
|
||||
.table-overview th:nth-child(7) {
|
||||
@apply w-[11%];
|
||||
}
|
||||
}
|
||||
|
||||
/* Styles for the accordion in the compliance page */
|
||||
#_dash-app-content .accordion .accordion-header .accordion-button {
|
||||
@apply text-prowler-stone-900 inline-block px-4 text-xs font-bold uppercase transition-all rounded-lg bg-prowler-stone-300 hover:bg-prowler-stone-900/10;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion .accordion-item {
|
||||
@apply text-prowler-stone-900 bg-prowler-white rounded-lg;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion .accordion-button:not(.collapsed) {
|
||||
@apply text-prowler-stone-900 bg-prowler-stone-500;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion .dash-table-container {
|
||||
@apply grid;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion table {
|
||||
@apply rounded-lg;
|
||||
}
|
||||
/* Styles for thead */
|
||||
#_dash-app-content .accordion th {
|
||||
@apply text-prowler-white text-left bg-prowler-stone-900 text-xs py-1 font-bold;
|
||||
}
|
||||
|
||||
/* Styles for td */
|
||||
#_dash-app-content .accordion td {
|
||||
@apply text-prowler-stone-900 text-left bg-prowler-white text-xs py-1 font-light;
|
||||
}
|
||||
|
||||
/* Styles for table cells */
|
||||
#_dash-app-content .accordion table tbody thead,
|
||||
#_dash-app-content .accordion table tbody tr {
|
||||
@apply w-full;
|
||||
}
|
||||
|
||||
/* Check ID */
|
||||
#_dash-app-content .accordion table th:nth-child(1) {
|
||||
@apply w-[60%];
|
||||
}
|
||||
/* Status */
|
||||
#_dash-app-content .accordion table th:nth-child(2) {
|
||||
@apply w-[10%] text-center;
|
||||
}
|
||||
#_dash-app-content .accordion table td:nth-child(2) {
|
||||
@apply text-center;
|
||||
}
|
||||
/* Region */
|
||||
#_dash-app-content .accordion table th:nth-child(3) {
|
||||
@apply w-[10%];
|
||||
}
|
||||
/* Account ID */
|
||||
#_dash-app-content .accordion table th:nth-child(4) {
|
||||
@apply w-[10%];
|
||||
}
|
||||
/* Resource ID */
|
||||
#_dash-app-content .accordion table th:nth-child(5) {
|
||||
@apply w-[10%];
|
||||
}
|
||||
|
||||
#_dash-app-content .compliance-data-layout,
|
||||
#_dash-app-content .accordion-body,
|
||||
#_dash-app-content .compliance-data-layout .accordion.accordion-flush {
|
||||
@apply grid gap-y-4;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion-inner--child,
|
||||
#_dash-app-content .accordion-inner {
|
||||
@apply relative;
|
||||
}
|
||||
|
||||
#_dash-app-content .info-bar {
|
||||
@apply absolute left-1/2 transform -translate-x-1/2 top-2 h-8 z-50;
|
||||
}
|
||||
|
||||
#_dash-app-content .info-bar-child {
|
||||
@apply absolute right-6 top-2 w-auto h-8 z-50;
|
||||
}
|
||||
|
||||
@layer utilities {
|
||||
/* Hide scrollbar for Chrome, Safari and Opera */
|
||||
.no-scrollbar::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
/* Hide scrollbar for IE, Edge and Firefox */
|
||||
.no-scrollbar {
|
||||
-ms-overflow-style: none; /* IE and Edge */
|
||||
scrollbar-width: none; /* Firefox */
|
||||
}
|
||||
}
|
||||
90
dashboard/tailwind.config.js
Normal file
@@ -0,0 +1,90 @@
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
module.exports = {
|
||||
content: [
|
||||
"./assets/**/*.{py,html,js}",
|
||||
"./components/**/*.{py,html,js}",
|
||||
"./pages/**/*.{py,html,js}",
|
||||
"./utils/**/*.{py,html,js}",
|
||||
"./app.py",
|
||||
],
|
||||
theme: {
|
||||
extend: {
|
||||
colors: {
|
||||
prowler: {
|
||||
stone: {
|
||||
950: "#1C1917",
|
||||
900: "#292524",
|
||||
500: "#E7E5E4",
|
||||
300: "#F5F5F4",
|
||||
},
|
||||
gray: {
|
||||
900: "#9bAACF",
|
||||
700: "#BEC8E4",
|
||||
500: "#C8D0E7",
|
||||
300: "#E4EBF5",
|
||||
},
|
||||
status: {
|
||||
passed: "#1FB53F",
|
||||
failed: "#A3231F",
|
||||
},
|
||||
lime: "#84CC16",
|
||||
white: "#FFFFFF",
|
||||
error: "#B91C1C",
|
||||
},
|
||||
},
|
||||
fontSize: {
|
||||
'3xs': '0.625rem', // 10px
|
||||
'2xs': '0.6875rem', // 11px
|
||||
xs: '0.75rem', // 12px
|
||||
sm: '0.875rem', // 14px
|
||||
base: '1rem', // 16px
|
||||
lg: '1.125rem', // 18px
|
||||
xl: '1.25rem', // 20px
|
||||
'2xl': '1.375rem', // 22px
|
||||
'2xxl': '1.5rem', // 24px
|
||||
'3xl': '1.75rem', // 28px
|
||||
'4xl': '2rem', // 32px
|
||||
'5xl': '2.25rem', // 36px
|
||||
'6xl': '2.75rem', // 44px
|
||||
'7xl': '3.5rem' // 56px
|
||||
},
|
||||
fontWeight: {
|
||||
light: 300,
|
||||
regular: 400,
|
||||
medium: 500,
|
||||
bold: 700,
|
||||
heavy: 800
|
||||
},
|
||||
lineHeight: {
|
||||
14: "0.875rem", // 14px
|
||||
22: "1.375rem", // 22px
|
||||
26: "1.625rem", // 26px
|
||||
28: "1.75rem", // 28px
|
||||
30: "1.875rem", // 30px
|
||||
32: "2rem", // 32px
|
||||
34: "2.125rem", // 34px
|
||||
36: "2.25rem", // 36px
|
||||
40: "2.5rem", // 40px
|
||||
44: "2.75rem", // 44px
|
||||
48: "3rem", // 48px
|
||||
56: "3.5rem", // 56px
|
||||
68: "4.25rem", // 68px
|
||||
},
|
||||
boxShadow: {
|
||||
"provider":
|
||||
".3rem .3rem .6rem #c8d0e7, -.2rem -.2rem .5rem #FFF",
|
||||
"box-up":
|
||||
"0.3rem 0.3rem 0.6rem #c8d0e7, -0.2rem -0.2rem 0.5rem #FFF",
|
||||
"box-down":
|
||||
"inset .2rem .2rem .5rem #c8d0e7, inset -.2rem -.2rem .5rem #FFF",
|
||||
},
|
||||
backgroundImage: {
|
||||
"gradient-passed":
|
||||
"linear-gradient(127.43deg, #F1F5F8 -177.68%, #4ADE80 87.35%)",
|
||||
"gradient-failed":
|
||||
"linear-gradient(127.43deg, #F1F5F8 -177.68%, #EF4444 87.35%)",
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [],
|
||||
};
|
||||
@@ -101,8 +101,8 @@ All the checks MUST fill the `report.status` and `report.status_extended` with t
|
||||
|
||||
- Status -- `report.status`
|
||||
- `PASS` --> If the check is passing against the configured value.
|
||||
- `FAIL` --> If the check is failing against the configured value.
|
||||
- `INFO` --> This value cannot be used unless a manual operation is required in order to determine if the `report.status` is whether `PASS` or `FAIL`.
|
||||
- `FAIL` --> If the check is passing against the configured value.
|
||||
- `MANUAL` --> This value cannot be used unless a manual operation is required in order to determine if the `report.status` is whether `PASS` or `FAIL`.
|
||||
- Status Extended -- `report.status_extended`
|
||||
- MUST end in a dot `.`
|
||||
- MUST include the service audited with the resource and a brief explanation of the result generated, e.g.: `EC2 AMI ami-0123456789 is not public.`
|
||||
@@ -230,7 +230,7 @@ Each Prowler check has metadata associated which is stored at the same level of
|
||||
# Severity holds the check's severity, always in lowercase (critical, high, medium, low or informational)
|
||||
"Severity": "critical",
|
||||
# ResourceType only for AWS, holds the type from here
|
||||
# https://docs.aws.amazon.com/securityhub/latest/userguide/asff-resources.html
|
||||
# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html
|
||||
"ResourceType": "Other",
|
||||
# Description holds the title of the check, for now is the same as CheckTitle
|
||||
"Description": "Ensure there are no EC2 AMIs set as Public.",
|
||||
@@ -243,11 +243,11 @@ Each Prowler check has metadata associated which is stored at the same level of
|
||||
# Code holds different methods to remediate the FAIL finding
|
||||
"Code": {
|
||||
# CLI holds the command in the provider native CLI to remediate it
|
||||
"CLI": "https://docs.prowler.com/checks/public_8#cli-command",
|
||||
"CLI": "https://docs.bridgecrew.io/docs/public_8#cli-command",
|
||||
# NativeIaC holds the native IaC code to remediate it, use "https://docs.bridgecrew.io/docs"
|
||||
"NativeIaC": "",
|
||||
# Other holds the other commands, scripts or code to remediate it, use "https://www.trendmicro.com/cloudoneconformity"
|
||||
"Other": "https://docs.prowler.com/checks/public_8#aws-console",
|
||||
"Other": "https://docs.bridgecrew.io/docs/public_8#aws-console",
|
||||
# Terraform holds the Terraform code to remediate it, use "https://docs.bridgecrew.io/docs"
|
||||
"Terraform": ""
|
||||
},
|
||||
|
||||
@@ -4,5 +4,5 @@ We use `mkdocs` to build this Prowler documentation site so you can easily contr
|
||||
|
||||
1. Install `mkdocs` with your favorite package manager.
|
||||
2. Inside the `prowler` repository folder run `mkdocs serve` and point your browser to `http://localhost:8000` and you will see live changes to your local copy of this documentation site.
|
||||
3. Make all needed changes to docs or add new documents. To do so just edit existing md files inside `prowler/docs` and if you are adding a new section or file please make sure you add it to `mkdocs.yml` file in the root folder of the Prowler repo.
|
||||
3. Make all needed changes to docs or add new documents. To do so just edit existing md files inside `prowler/docs` and if you are adding a new section or file please make sure you add it to `mkdocs.yaml` file in the root folder of the Prowler repo.
|
||||
4. Once you are done with changes, please send a pull request to us for review and merge. Thank you in advance!
|
||||
|
||||
@@ -175,8 +175,6 @@ class <Service>(ServiceParentClass):
|
||||
f"{<item>.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
```
|
||||
???+note
|
||||
To avoid fake findings, when Prowler can't retrieve the items, because an Access Denied or similar error, we set that items value as `None`.
|
||||
|
||||
### Service Models
|
||||
|
||||
|
||||
@@ -509,113 +509,7 @@ class Test_compute_firewall_rdp_access_from_the_internet_allowed:
|
||||
|
||||
### Services
|
||||
|
||||
For testing Google Cloud Services, we have to follow the same logic as with the Google Cloud checks. We still mocking all API calls, but in this case, every API call to set up an attribute is defined in [fixtures file](https://github.com/prowler-cloud/prowler/blob/master/tests/providers/gcp/gcp_fixtures.py) in `mock_api_client` function. Remember that EVERY method of a service must be tested.
|
||||
|
||||
The following code shows a real example of a testing class, but it has more comments than usual for educational purposes.
|
||||
|
||||
```python title="BigQuery Service Test"
|
||||
# We need to import the unittest.mock.patch to allow us to patch some objects
|
||||
# not to use shared ones between test, hence to isolate the test
|
||||
from unittest.mock import patch
|
||||
# Import the class needed from the service file
|
||||
from prowler.providers.gcp.services.bigquery.bigquery_service import BigQuery
|
||||
# Necessary constans and functions from fixtures file
|
||||
from tests.providers.gcp.gcp_fixtures import (
|
||||
GCP_PROJECT_ID,
|
||||
mock_api_client,
|
||||
mock_is_api_active,
|
||||
set_mocked_gcp_audit_info,
|
||||
)
|
||||
|
||||
|
||||
class TestBigQueryService:
|
||||
# Only method needed to test full service
|
||||
def test_service(self):
|
||||
# In this case we are mocking the __is_api_active__ to ensure our mocked project is used
|
||||
# And all the client to use our mocked API calls
|
||||
with patch(
|
||||
"prowler.providers.gcp.lib.service.service.GCPService.__is_api_active__",
|
||||
new=mock_is_api_active,
|
||||
), patch(
|
||||
"prowler.providers.gcp.lib.service.service.GCPService.__generate_client__",
|
||||
new=mock_api_client,
|
||||
):
|
||||
# Instantiate an object of class with the mocked provider
|
||||
bigquery_client = BigQuery(
|
||||
set_mocked_gcp_audit_info(project_ids=[GCP_PROJECT_ID])
|
||||
)
|
||||
# Check all attributes of the tested class is well set up according API calls mocked from GCP fixture file
|
||||
assert bigquery_client.service == "bigquery"
|
||||
assert bigquery_client.project_ids == [GCP_PROJECT_ID]
|
||||
|
||||
assert len(bigquery_client.datasets) == 2
|
||||
|
||||
assert bigquery_client.datasets[0].name == "unique_dataset1_name"
|
||||
assert bigquery_client.datasets[0].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.datasets[0].region == "US"
|
||||
assert bigquery_client.datasets[0].cmk_encryption
|
||||
assert bigquery_client.datasets[0].public
|
||||
assert bigquery_client.datasets[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
assert bigquery_client.datasets[1].name == "unique_dataset2_name"
|
||||
assert bigquery_client.datasets[1].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.datasets[1].region == "EU"
|
||||
assert not bigquery_client.datasets[1].cmk_encryption
|
||||
assert not bigquery_client.datasets[1].public
|
||||
assert bigquery_client.datasets[1].project_id == GCP_PROJECT_ID
|
||||
|
||||
assert len(bigquery_client.tables) == 2
|
||||
|
||||
assert bigquery_client.tables[0].name == "unique_table1_name"
|
||||
assert bigquery_client.tables[0].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.tables[0].region == "US"
|
||||
assert bigquery_client.tables[0].cmk_encryption
|
||||
assert bigquery_client.tables[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
assert bigquery_client.tables[1].name == "unique_table2_name"
|
||||
assert bigquery_client.tables[1].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.tables[1].region == "US"
|
||||
assert not bigquery_client.tables[1].cmk_encryption
|
||||
assert bigquery_client.tables[1].project_id == GCP_PROJECT_ID
|
||||
```
|
||||
As it can be confusing where all these values come from, I'll give an example to make this clearer. First we need to check
|
||||
what is the API call used to obtain the datasets. In this case if we check the service the call is
|
||||
`self.client.datasets().list(projectId=project_id)`.
|
||||
|
||||
Now in the fixture file we have to mock this call in our `MagicMock` client in the function `mock_api_client`. The best way to mock
|
||||
is following the actual format, add one function where the client is passed to be changed, the format of this function name must be
|
||||
`mock_api_<endpoint>_calls` (*endpoint* refers to the first attribute pointed after *client*).
|
||||
|
||||
In the example of BigQuery the function is called `mock_api_dataset_calls`. And inside of this function we found an assignation to
|
||||
be used in the `__get_datasets__` method in BigQuery class:
|
||||
|
||||
```python
|
||||
# Mocking datasets
|
||||
dataset1_id = str(uuid4())
|
||||
dataset2_id = str(uuid4())
|
||||
|
||||
client.datasets().list().execute.return_value = {
|
||||
"datasets": [
|
||||
{
|
||||
"datasetReference": {
|
||||
"datasetId": "unique_dataset1_name",
|
||||
"projectId": GCP_PROJECT_ID,
|
||||
},
|
||||
"id": dataset1_id,
|
||||
"location": "US",
|
||||
},
|
||||
{
|
||||
"datasetReference": {
|
||||
"datasetId": "unique_dataset2_name",
|
||||
"projectId": GCP_PROJECT_ID,
|
||||
},
|
||||
"id": dataset2_id,
|
||||
"location": "EU",
|
||||
},
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Coming soon ...
|
||||
|
||||
## Azure
|
||||
|
||||
@@ -623,8 +517,6 @@ client.datasets().list().execute.return_value = {
|
||||
|
||||
For the Azure Provider we don't have any library to mock out the API calls we use. So in this scenario we inject the objects in the service client using [MagicMock](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.MagicMock).
|
||||
|
||||
In essence, we create object instances and we run the check that we are testing with that instance. In the test we ensure the check executed correctly and results with the expected values.
|
||||
|
||||
The following code shows how to use MagicMock to create the service objects for a Azure check test.
|
||||
|
||||
```python
|
||||
@@ -665,8 +557,11 @@ class Test_defender_ensure_defender_for_arm_is_on:
|
||||
|
||||
# In this scenario we have to mock also the Defender service and the defender_client from the check to enforce that the defender_client used is the one created within this check because patch != import, and if you execute tests in parallel some objects can be already initialised hence the check won't be isolated.
|
||||
# In this case we don't use the Moto decorator, we use the mocked Defender client for both objects
|
||||
with mock.patch(
|
||||
"prowler.providers.azure.services.defender.defender_ensure_defender_for_arm_is_on.defender_ensure_defender_for_arm_is_on.defender_client",
|
||||
with mock.patch(
|
||||
"prowler.providers.azure.services.defender.defender_service.Defender",
|
||||
new=defender_client,
|
||||
), mock.patch(
|
||||
"prowler.providers.azure.services.defender.defender_client.defender_client",
|
||||
new=defender_client,
|
||||
):
|
||||
|
||||
@@ -680,7 +575,7 @@ class Test_defender_ensure_defender_for_arm_is_on:
|
||||
check = defender_ensure_defender_for_arm_is_on()
|
||||
|
||||
# And then, call the execute() function to run the check
|
||||
# against the Defender client we've set up.
|
||||
# against the IAM client we've set up.
|
||||
result = check.execute()
|
||||
|
||||
# Last but not least, we need to assert all the fields
|
||||
@@ -698,171 +593,4 @@ class Test_defender_ensure_defender_for_arm_is_on:
|
||||
|
||||
### Services
|
||||
|
||||
For the Azure Services tests, the idea is similar, we test that the functions we've done for capturing the values of the different objects using the Azure API work correctly. Again, we create an object instance and verify that the values captured for that instance are correct.
|
||||
|
||||
The following code shows how a service test looks like.
|
||||
|
||||
```python
|
||||
#We import patch from unittest.mock for simulating objects, the ones that we'll test with.
|
||||
from unittest.mock import patch
|
||||
|
||||
#Importing FlowLogs from azure.mgmt.network.models allows us to create objects corresponding
|
||||
#to flow log settings for Azure networking resources.
|
||||
from azure.mgmt.network.models import FlowLog
|
||||
|
||||
#We import the different classes of the Network Service so we can use them.
|
||||
from prowler.providers.azure.services.network.network_service import (
|
||||
BastionHost,
|
||||
Network,
|
||||
NetworkWatcher,
|
||||
PublicIp,
|
||||
SecurityGroup,
|
||||
)
|
||||
|
||||
#Azure constants
|
||||
from tests.providers.azure.azure_fixtures import (
|
||||
AZURE_SUBSCRIPTION,
|
||||
set_mocked_azure_audit_info,
|
||||
)
|
||||
|
||||
#Mocks the behavior of a function responsible for retrieving security groups from a network service so
|
||||
#basically this is the instance for SecurityGroup that we are going to use
|
||||
def mock_network_get_security_groups(_):
|
||||
return {
|
||||
AZURE_SUBSCRIPTION: [
|
||||
SecurityGroup(
|
||||
id="id",
|
||||
name="name",
|
||||
location="location",
|
||||
security_rules=[],
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
#We do the same for all the components we need, BastionHost, NetworkWatcher and PublicIp in this case
|
||||
|
||||
def mock_network_get_bastion_hosts(_):
|
||||
return {
|
||||
AZURE_SUBSCRIPTION: [
|
||||
BastionHost(
|
||||
id="id",
|
||||
name="name",
|
||||
location="location",
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
def mock_network_get_network_watchers(_):
|
||||
return {
|
||||
AZURE_SUBSCRIPTION: [
|
||||
NetworkWatcher(
|
||||
id="id",
|
||||
name="name",
|
||||
location="location",
|
||||
flow_logs=[FlowLog(enabled=True, retention_policy=90)],
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
def mock_network_get_public_ip_addresses(_):
|
||||
return {
|
||||
AZURE_SUBSCRIPTION: [
|
||||
PublicIp(
|
||||
id="id",
|
||||
name="name",
|
||||
location="location",
|
||||
ip_address="ip_address",
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
#We use the 'path' decorator to replace during the test, the original get functions with the mock functions.
|
||||
|
||||
#In this case we are replacing the '__get_security_groups__' with the 'mock_network_get_security_groups'.
|
||||
#We do the same for the rest of the functions.
|
||||
@patch(
|
||||
"prowler.providers.azure.services.network.network_service.Network.__get_security_groups__",
|
||||
new=mock_network_get_security_groups,
|
||||
)
|
||||
@patch(
|
||||
"prowler.providers.azure.services.network.network_service.Network.__get_bastion_hosts__",
|
||||
new=mock_network_get_bastion_hosts,
|
||||
)
|
||||
@patch(
|
||||
"prowler.providers.azure.services.network.network_service.Network.__get_network_watchers__",
|
||||
new=mock_network_get_network_watchers,
|
||||
)
|
||||
@patch(
|
||||
"prowler.providers.azure.services.network.network_service.Network.__get_public_ip_addresses__",
|
||||
new=mock_network_get_public_ip_addresses,
|
||||
)
|
||||
|
||||
#We create the class for finally testing the methods
|
||||
class Test_Network_Service:
|
||||
|
||||
#Verifies that Network class initializes correctly a client object
|
||||
def test__get_client__(self):
|
||||
#Creates instance of the Network class with the audit information provided
|
||||
network = Network(set_mocked_azure_audit_info())
|
||||
#Checks if the client is not being initialize correctly
|
||||
assert (
|
||||
network.clients[AZURE_SUBSCRIPTION].__class__.__name__
|
||||
== "NetworkManagementClient"
|
||||
)
|
||||
|
||||
#Verifies Securiy Group are set correctly
|
||||
def test__get_security_groups__(self):
|
||||
network = Network(set_mocked_azure_audit_info())
|
||||
assert (
|
||||
network.security_groups[AZURE_SUBSCRIPTION][0].__class__.__name__
|
||||
== "SecurityGroup"
|
||||
)
|
||||
#As you can see, every field must be right according to the mocking method
|
||||
assert network.security_groups[AZURE_SUBSCRIPTION][0].id == "id"
|
||||
assert network.security_groups[AZURE_SUBSCRIPTION][0].name == "name"
|
||||
assert network.security_groups[AZURE_SUBSCRIPTION][0].location == "location"
|
||||
assert network.security_groups[AZURE_SUBSCRIPTION][0].security_rules == []
|
||||
|
||||
#Verifies Network Watchers are set correctly
|
||||
def test__get_network_watchers__(self):
|
||||
network = Network(set_mocked_azure_audit_info())
|
||||
assert (
|
||||
network.network_watchers[AZURE_SUBSCRIPTION][0].__class__.__name__
|
||||
== "NetworkWatcher"
|
||||
)
|
||||
assert network.network_watchers[AZURE_SUBSCRIPTION][0].id == "id"
|
||||
assert network.network_watchers[AZURE_SUBSCRIPTION][0].name == "name"
|
||||
assert network.network_watchers[AZURE_SUBSCRIPTION][0].location == "location"
|
||||
assert network.network_watchers[AZURE_SUBSCRIPTION][0].flow_logs == [
|
||||
FlowLog(enabled=True, retention_policy=90)
|
||||
]
|
||||
#Verifies Flow Logs are set correctly
|
||||
def __get_flow_logs__(self):
|
||||
network = Network(set_mocked_azure_audit_info())
|
||||
nw_name = "name"
|
||||
assert (
|
||||
network.network_watchers[AZURE_SUBSCRIPTION][0]
|
||||
.flow_logs[nw_name][0]
|
||||
.__class__.__name__
|
||||
== "FlowLog"
|
||||
)
|
||||
assert network.network_watchers[AZURE_SUBSCRIPTION][0].flow_logs == [
|
||||
FlowLog(enabled=True, retention_policy=90)
|
||||
]
|
||||
assert (
|
||||
network.network_watchers[AZURE_SUBSCRIPTION][0].flow_logs[0].enabled is True
|
||||
)
|
||||
assert (
|
||||
network.network_watchers[AZURE_SUBSCRIPTION][0]
|
||||
.flow_logs[0]
|
||||
.retention_policy
|
||||
== 90
|
||||
)
|
||||
|
||||
...
|
||||
```
|
||||
The code continues with some more verifications the same way.
|
||||
|
||||
Hopefully this will result useful for understanding and creating new Azure Services checks.
|
||||
|
||||
Please refer to the [Azure checks tests](./unit-testing.md#azure) for more information on how to create tests and check the existing services tests [here](https://github.com/prowler-cloud/prowler/tree/master/tests/providers/azure/services).
|
||||
Coming soon ...
|
||||
|
||||
@@ -15,7 +15,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
* `Python >= 3.9`
|
||||
* `Python pip >= 3.9`
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
_Commands_:
|
||||
|
||||
@@ -29,7 +29,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
_Requirements_:
|
||||
|
||||
* Have `docker` installed: https://docs.docker.com/get-docker/.
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* In the command below, change `-v` to your local directory path in order to access the reports.
|
||||
|
||||
_Commands_:
|
||||
@@ -46,7 +46,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
_Requirements for Ubuntu 20.04.3 LTS_:
|
||||
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* Install python 3.9 with: `sudo apt-get install python3.9`
|
||||
* Remove python 3.8 to avoid conflicts if you can: `sudo apt-get remove python3.8`
|
||||
* Make sure you have the python3 distutils package installed: `sudo apt-get install python3-distutils`
|
||||
@@ -66,7 +66,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
_Requirements for Developers_:
|
||||
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* `git`, `Python >= 3.9`, `pip` and `poetry` installed (`pip install poetry`)
|
||||
|
||||
_Commands_:
|
||||
@@ -83,7 +83,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* Latest Amazon Linux 2 should come with Python 3.9 already installed however it may need pip. Install Python pip 3.9 with: `sudo yum install -y python3-pip`.
|
||||
* Make sure setuptools for python is already installed with: `pip3 install setuptools`
|
||||
|
||||
@@ -100,7 +100,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
_Requirements_:
|
||||
|
||||
* `Brew` installed in your Mac or Linux
|
||||
* AWS, GCP and/or Azure credentials
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
_Commands_:
|
||||
|
||||
@@ -111,7 +111,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
=== "AWS CloudShell"
|
||||
|
||||
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [2](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
|
||||
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [2](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v4 in AWS CloudShell:
|
||||
|
||||
_Requirements_:
|
||||
|
||||
@@ -120,8 +120,12 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
sudo bash
|
||||
adduser prowler
|
||||
su prowler
|
||||
pip install prowler
|
||||
prowler -v
|
||||
cd /tmp || exit
|
||||
prowler aws
|
||||
```
|
||||
|
||||
???+ note
|
||||
@@ -160,7 +164,7 @@ You can run Prowler from your workstation, an EC2 instance, Fargate or any other
|
||||

|
||||
## Basic Usage
|
||||
|
||||
To run Prowler, you will need to specify the provider (e.g `aws`, `gcp` or `azure`):
|
||||
To run Prowler, you will need to specify the provider (e.g `aws`, `gcp`, `azure` or `kubernetes`):
|
||||
|
||||
???+ note
|
||||
If no provider specified, AWS will be used for backward compatibility with most of v2 options.
|
||||
@@ -173,7 +177,7 @@ prowler <provider>
|
||||
???+ note
|
||||
Running the `prowler` command without options will use your environment variable credentials, see [Requirements](./getting-started/requirements.md) section to review the credentials settings.
|
||||
|
||||
If you miss the former output you can use `--verbose` but Prowler v3 is smoking fast, so you won't see much ;)
|
||||
If you miss the former output you can use `--verbose` but Prowler v4 is smoking fast, so you won't see much ;
|
||||
|
||||
By default, Prowler will generate a CSV, JSON and HTML reports, however you can generate a JSON-ASFF (used by AWS Security Hub) report with `-M` or `--output-modes`:
|
||||
|
||||
@@ -197,6 +201,7 @@ For executing specific checks or services you can use options `-c`/`checks` or `
|
||||
prowler azure --checks storage_blob_public_access_level_is_disabled
|
||||
prowler aws --services s3 ec2
|
||||
prowler gcp --services iam compute
|
||||
prowler kubernetes --services etcd apiserver
|
||||
```
|
||||
|
||||
Also, checks and services can be excluded with options `-e`/`--excluded-checks` or `--excluded-services`:
|
||||
@@ -205,6 +210,7 @@ Also, checks and services can be excluded with options `-e`/`--excluded-checks`
|
||||
prowler aws --excluded-checks s3_bucket_public_access
|
||||
prowler azure --excluded-services defender iam
|
||||
prowler gcp --excluded-services kms
|
||||
prowler kubernetes --excluded-services controllermanager
|
||||
```
|
||||
|
||||
More options and executions methods that will save your time in [Miscellaneous](tutorials/misc.md).
|
||||
@@ -275,5 +281,26 @@ prowler gcp --project-ids <Project ID 1> <Project ID 2> ... <Project ID N>
|
||||
|
||||
See more details about GCP Authentication in [Requirements](getting-started/requirements.md)
|
||||
|
||||
## Kubernetes
|
||||
|
||||
Prowler allows you to scan your Kubernetes Cluster either from within the cluster or from outside the cluster.
|
||||
|
||||
For non in-cluster execution, you can provide the location of the KubeConfig file with the following argument:
|
||||
|
||||
```console
|
||||
prowler kubernetes --kubeconfig-file path
|
||||
```
|
||||
|
||||
For in-cluster execution, you can use the supplied yaml to run Prowler as a job:
|
||||
```console
|
||||
kubectl apply -f job.yaml
|
||||
kubectl apply -f prowler-role.yaml
|
||||
kubectl apply -f prowler-rolebinding.yaml
|
||||
kubectl get pods --> prowler-XXXXX
|
||||
kubectl logs prowler-XXXXX
|
||||
```
|
||||
|
||||
> By default, `prowler` will scan all namespaces in your active Kubernetes context, use flag `--context` to specify the context to be scanned and `--namespaces` to specify the namespaces to be scanned.
|
||||
|
||||
## Prowler v2 Documentation
|
||||
For **Prowler v2 Documentation**, please check it out [here](https://github.com/prowler-cloud/prowler/blob/8818f47333a0c1c1a457453c87af0ea5b89a385f/README.md).
|
||||
|
||||
@@ -36,7 +36,3 @@ If your IAM entity enforces MFA you can use `--mfa` and Prowler will ask you to
|
||||
|
||||
- ARN of your MFA device
|
||||
- TOTP (Time-Based One-Time Password)
|
||||
|
||||
## STS Endpoint Region
|
||||
|
||||
If you are using Prowler in AWS regions that are not enabled by default you need to use the argument `--sts-endpoint-region` to point the AWS STS API calls `assume-role` and `get-caller-identity` to the non-default region, e.g.: `prowler aws --sts-endpoint-region eu-south-2`.
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
# AWS CloudShell
|
||||
|
||||
## Installation
|
||||
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [[2]](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v3 in AWS CloudShell:
|
||||
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [[2]](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v4 in AWS CloudShell:
|
||||
```shell
|
||||
sudo bash
|
||||
adduser prowler
|
||||
su prowler
|
||||
pip install prowler
|
||||
prowler -v
|
||||
cd /tmp || exit
|
||||
prowler aws
|
||||
```
|
||||
|
||||
## Download Files
|
||||
@@ -15,11 +19,14 @@ To download the results from AWS CloudShell, select Actions -> Download File and
|
||||
|
||||
The limited storage that AWS CloudShell provides for the user's home directory causes issues when installing the poetry dependencies to run Prowler from GitHub. Here is a workaround:
|
||||
```shell
|
||||
sudo bash
|
||||
adduser prowler
|
||||
su prowler
|
||||
git clone https://github.com/prowler-cloud/prowler.git
|
||||
cd prowler
|
||||
pip install poetry
|
||||
mkdir /tmp/pypoetry
|
||||
poetry config cache-dir /tmp/pypoetry
|
||||
mkdir /tmp/poetry
|
||||
poetry config cache-dir /tmp/poetry
|
||||
poetry shell
|
||||
poetry install
|
||||
python prowler.py -v
|
||||
|
||||
@@ -33,13 +33,6 @@ prowler aws --role-session-name <role_session_name>
|
||||
???+ note
|
||||
It defaults to `ProwlerAssessmentSession`.
|
||||
|
||||
## STS Endpoint Region
|
||||
|
||||
If you are using Prowler in AWS regions that are not enabled by default you need to use the argument `--sts-endpoint-region` to point the AWS STS API calls `assume-role` and `get-caller-identity` to the non-default region, e.g.: `prowler aws --sts-endpoint-region eu-south-2`.
|
||||
|
||||
???+ note
|
||||
Since v3.11.0, Prowler uses a regional token in STS sessions so it can scan all AWS regions without needing the `--sts-endpoint-region` argument. Make sure that you have enabled the AWS Region you want to scan in **BOTH** AWS Accounts (assumed role account and account from which you assume the role).
|
||||
|
||||
## Role MFA
|
||||
|
||||
If your IAM Role has MFA configured you can use `--mfa` along with `-R`/`--role <role_arn>` and Prowler will ask you to input the following values to get a new temporary session for the IAM Role provided:
|
||||
|
||||
@@ -95,8 +95,7 @@ checks_v3_to_v2_mapping = {
|
||||
"ec2_networkacl_allow_ingress_any_port": "extra7138",
|
||||
"ec2_networkacl_allow_ingress_tcp_port_22": "check45",
|
||||
"ec2_networkacl_allow_ingress_tcp_port_3389": "check46",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_all_ports": "extra748",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port": "extra74",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_any_port": "extra748",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_port_mongodb_27017_27018": "extra753",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_ftp_port_20_21": "extra7134",
|
||||
"ec2_securitygroup_allow_ingress_from_internet_to_tcp_port_22": "check41",
|
||||
|
||||
@@ -7,6 +7,7 @@ At the time of writing this documentation the available Azure Clouds from differ
|
||||
- AzureCloud
|
||||
- AzureChinaCloud
|
||||
- AzureUSGovernment
|
||||
- AzureGermanCloud
|
||||
|
||||
If you want to change the default one you must include the flag `--azure-region`, i.e.:
|
||||
|
||||
|
||||
@@ -1,5 +1,18 @@
|
||||
# Compliance
|
||||
Prowler allows you to execute checks based on requirements defined in compliance frameworks.
|
||||
Prowler allows you to execute checks based on requirements defined in compliance frameworks. By default, it will execute and give you an overview of the status of each compliance framework:
|
||||
|
||||
<img src="../img/compliance/compliance.png"/>
|
||||
|
||||
> You can find CSVs containing detailed compliance results inside the compliance folder within Prowler's output folder.
|
||||
|
||||
## Execute Prowler based on Compliance Frameworks
|
||||
Prowler can analyze your environment based on a specific compliance framework and get more details, to do it, you can use option `--compliance`:
|
||||
```sh
|
||||
prowler <provider> --compliance <compliance_framework>
|
||||
```
|
||||
Standard results will be shown and additionally the framework information as the sample below for CIS AWS 1.5. For details a CSV file has been generated as well.
|
||||
|
||||
<img src="../img/compliance/compliance-cis-sample1.png"/>
|
||||
|
||||
## List Available Compliance Frameworks
|
||||
In order to see which compliance frameworks are cover by Prowler, you can use option `--list-compliance`:
|
||||
@@ -20,6 +33,7 @@ Currently, the available frameworks are:
|
||||
- `cis_2.0_azure`
|
||||
- `cis_2.1_azure`
|
||||
- `cis_3.0_aws`
|
||||
- `cis_1.8_kubernetes`
|
||||
- `cisa_aws`
|
||||
- `ens_rd2022_aws`
|
||||
- `fedramp_low_revision_4_aws`
|
||||
@@ -47,7 +61,6 @@ prowler <provider> --list-compliance-requirements <compliance_framework(s)>
|
||||
```
|
||||
|
||||
Example for the first requirements of CIS 1.5 for AWS:
|
||||
|
||||
```
|
||||
Listing CIS 1.5 AWS Compliance Requirements:
|
||||
|
||||
@@ -80,15 +93,6 @@ Requirement Id: 1.5
|
||||
|
||||
```
|
||||
|
||||
## Execute Prowler based on Compliance Frameworks
|
||||
As we mentioned, Prowler can be execute to analyse you environment based on a specific compliance framework, to do it, you can use option `--compliance`:
|
||||
```sh
|
||||
prowler <provider> --compliance <compliance_framework>
|
||||
```
|
||||
Standard results will be shown and additionally the framework information as the sample below for CIS AWS 1.5. For details a CSV file has been generated as well.
|
||||
|
||||
<img src="../img/compliance-cis-sample1.png"/>
|
||||
|
||||
## Create and contribute adding other Security Frameworks
|
||||
|
||||
This information is part of the Developer Guide and can be found here: https://docs.prowler.cloud/en/latest/tutorials/developer-guide/.
|
||||
|
||||
@@ -29,13 +29,16 @@ The following list includes all the AWS checks with configurable variables that
|
||||
| `organizations_delegated_administrators` | `organizations_trusted_delegated_administrators` | List of Strings |
|
||||
| `ecr_repositories_scan_vulnerabilities_in_latest_image` | `ecr_repository_vulnerability_minimum_severity` | String |
|
||||
| `trustedadvisor_premium_support_plan_subscribed` | `verify_premium_support_plans` | Boolean |
|
||||
| `config_recorder_all_regions_enabled` | `allowlist_non_default_regions` | Boolean |
|
||||
| `drs_job_exist` | `allowlist_non_default_regions` | Boolean |
|
||||
| `guardduty_is_enabled` | `allowlist_non_default_regions` | Boolean |
|
||||
| `securityhub_enabled` | `allowlist_non_default_regions` | Boolean |
|
||||
| `rds_instance_backup_enabled` | `check_rds_instance_replicas` | Boolean |
|
||||
| `acm_certificates_expiration_check` | `days_to_expire_threshold` | Integer |
|
||||
|
||||
| `config_recorder_all_regions_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `drs_job_exist` | `mute_non_default_regions` | Boolean |
|
||||
| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `securityhub_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_minutes` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_actions` | List of Strings |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_minutes` | Integer |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_actions` | List of Strings |
|
||||
## Azure
|
||||
|
||||
### Configurable Checks
|
||||
@@ -43,7 +46,7 @@ The following list includes all the Azure checks with configurable variables tha
|
||||
|
||||
| Check Name | Value | Type |
|
||||
|---------------------------------------------------------------|--------------------------------------------------|-----------------|
|
||||
| `network_public_ip_shodan` | `shodan_api_key` | String |
|
||||
| `network_public_ip_shodan` | `shodan_api_key` | String |
|
||||
| `app_ensure_php_version_is_latest` | `php_latest_version` | String |
|
||||
| `app_ensure_python_version_is_latest` | `python_latest_version` | String |
|
||||
| `app_ensure_java_version_is_latest` | `java_latest_version` | String |
|
||||
@@ -53,6 +56,19 @@ The following list includes all the Azure checks with configurable variables tha
|
||||
|
||||
### Configurable Checks
|
||||
|
||||
## Kubernetes
|
||||
|
||||
### Configurable Checks
|
||||
The following list includes all the Azure checks with configurable variables that can be changed in the configuration yaml file:
|
||||
|
||||
| Check Name | Value | Type |
|
||||
|---------------------------------------------------------------|--------------------------------------------------|-----------------|
|
||||
| `audit_log_maxbackup` | `audit_log_maxbackup` | String |
|
||||
| `audit_log_maxsize` | `audit_log_maxsize` | String |
|
||||
| `audit_log_maxage` | `audit_log_maxage` | String |
|
||||
| `apiserver_strong_ciphers` | `apiserver_strong_ciphers` | String |
|
||||
| `kubelet_strong_ciphers_only` | `kubelet_strong_ciphers` | String |
|
||||
|
||||
## Config YAML File Structure
|
||||
|
||||
???+ note
|
||||
@@ -61,9 +77,10 @@ The following list includes all the Azure checks with configurable variables tha
|
||||
```yaml title="config.yaml"
|
||||
# AWS Configuration
|
||||
aws:
|
||||
|
||||
# AWS Global Configuration
|
||||
# aws.allowlist_non_default_regions --> Allowlist Failed Findings in non-default regions for GuardDuty, SecurityHub, DRS and Config
|
||||
allowlist_non_default_regions: False
|
||||
# aws.mute_non_default_regions --> Mute Failed Findings in non-default regions for GuardDuty, SecurityHub, DRS and Config
|
||||
mute_non_default_regions: False
|
||||
|
||||
# AWS IAM Configuration
|
||||
# aws.iam_user_accesskey_unused --> CIS recommends 45 days
|
||||
@@ -73,7 +90,6 @@ aws:
|
||||
|
||||
# AWS EC2 Configuration
|
||||
# aws.ec2_elastic_ip_shodan
|
||||
# TODO: create common config
|
||||
shodan_api_key: null
|
||||
# aws.ec2_securitygroup_with_many_ingress_egress_rules --> by default is 50 rules
|
||||
max_security_group_rules: 50
|
||||
@@ -81,7 +97,6 @@ aws:
|
||||
max_ec2_instance_age_in_days: 180
|
||||
|
||||
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
|
||||
# AWS SSM Configuration (aws.ssm_documents_set_as_public)
|
||||
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
|
||||
# Multi account environment: Any additional trusted account number should be added as a space separated list, e.g.
|
||||
# trusted_account_ids : ["123456789012", "098765432109", "678901234567"]
|
||||
@@ -103,67 +118,201 @@ aws:
|
||||
# aws.awslambda_function_using_supported_runtimes
|
||||
obsolete_lambda_runtimes:
|
||||
[
|
||||
"java8",
|
||||
"go1.x",
|
||||
"provided",
|
||||
"python3.6",
|
||||
"python2.7",
|
||||
"python3.7",
|
||||
"nodejs4.3",
|
||||
"nodejs4.3-edge",
|
||||
"nodejs6.10",
|
||||
"nodejs",
|
||||
"nodejs8.10",
|
||||
"nodejs10.x",
|
||||
"nodejs12.x",
|
||||
"nodejs14.x",
|
||||
"dotnet5.0",
|
||||
"dotnetcore1.0",
|
||||
"dotnetcore2.0",
|
||||
"dotnetcore2.1",
|
||||
"dotnetcore3.1",
|
||||
"ruby2.5",
|
||||
"ruby2.7",
|
||||
]
|
||||
|
||||
# AWS Organizations
|
||||
# aws.organizations_scp_check_deny_regions
|
||||
# aws.organizations_enabled_regions: [
|
||||
# "eu-central-1",
|
||||
# "eu-west-1",
|
||||
# organizations_scp_check_deny_regions
|
||||
# organizations_enabled_regions: [
|
||||
# 'eu-central-1',
|
||||
# 'eu-west-1',
|
||||
# "us-east-1"
|
||||
# ]
|
||||
organizations_enabled_regions: []
|
||||
organizations_trusted_delegated_administrators: []
|
||||
|
||||
# AWS ECR
|
||||
# aws.ecr_repositories_scan_vulnerabilities_in_latest_image
|
||||
# ecr_repositories_scan_vulnerabilities_in_latest_image
|
||||
# CRITICAL
|
||||
# HIGH
|
||||
# MEDIUM
|
||||
ecr_repository_vulnerability_minimum_severity: "MEDIUM"
|
||||
|
||||
# AWS Trusted Advisor
|
||||
# aws.trustedadvisor_premium_support_plan_subscribed
|
||||
# trustedadvisor_premium_support_plan_subscribed
|
||||
verify_premium_support_plans: True
|
||||
|
||||
# AWS RDS
|
||||
# aws.rds_instance_backup_enabled
|
||||
# Whether to check RDS instance replicas or not
|
||||
check_rds_instance_replicas: False
|
||||
|
||||
# AWS ACM Configuration
|
||||
# aws.acm_certificates_expiration_check
|
||||
days_to_expire_threshold: 7
|
||||
# AWS CloudTrail Configuration
|
||||
# aws.cloudtrail_threat_detection_privilege_escalation
|
||||
threat_detection_privilege_escalation_entropy: 0.7 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.7 (70%)
|
||||
threat_detection_privilege_escalation_minutes: 1440 # Past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_privilege_escalation_actions: [
|
||||
"AddPermission",
|
||||
"AddRoleToInstanceProfile",
|
||||
"AddUserToGroup",
|
||||
"AssociateAccessPolicy",
|
||||
"AssumeRole",
|
||||
"AttachGroupPolicy",
|
||||
"AttachRolePolicy",
|
||||
"AttachUserPolicy",
|
||||
"ChangePassword",
|
||||
"CreateAccessEntry",
|
||||
"CreateAccessKey",
|
||||
"CreateDevEndpoint",
|
||||
"CreateEventSourceMapping",
|
||||
"CreateFunction",
|
||||
"CreateGroup",
|
||||
"CreateJob",
|
||||
"CreateKeyPair",
|
||||
"CreateLoginProfile",
|
||||
"CreatePipeline",
|
||||
"CreatePolicyVersion",
|
||||
"CreateRole",
|
||||
"CreateStack",
|
||||
"DeleteRolePermissionsBoundary",
|
||||
"DeleteRolePolicy",
|
||||
"DeleteUserPermissionsBoundary",
|
||||
"DeleteUserPolicy",
|
||||
"DetachRolePolicy",
|
||||
"DetachUserPolicy",
|
||||
"GetCredentialsForIdentity",
|
||||
"GetId",
|
||||
"GetPolicyVersion",
|
||||
"GetUserPolicy",
|
||||
"Invoke",
|
||||
"ModifyInstanceAttribute",
|
||||
"PassRole",
|
||||
"PutGroupPolicy",
|
||||
"PutPipelineDefinition",
|
||||
"PutRolePermissionsBoundary",
|
||||
"PutRolePolicy",
|
||||
"PutUserPermissionsBoundary",
|
||||
"PutUserPolicy",
|
||||
"ReplaceIamInstanceProfileAssociation",
|
||||
"RunInstances",
|
||||
"SetDefaultPolicyVersion",
|
||||
"UpdateAccessKey",
|
||||
"UpdateAssumeRolePolicy",
|
||||
"UpdateDevEndpoint",
|
||||
"UpdateEventSourceMapping",
|
||||
"UpdateFunctionCode",
|
||||
"UpdateJob",
|
||||
"UpdateLoginProfile",
|
||||
]
|
||||
# aws.cloudtrail_threat_detection_enumeration
|
||||
threat_detection_enumeration_entropy: 0.7 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.7 (70%)
|
||||
threat_detection_enumeration_minutes: 1440 # Past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_enumeration_actions: [
|
||||
"DescribeAccessEntry",
|
||||
"DescribeAccountAttributes",
|
||||
"DescribeAvailabilityZones",
|
||||
"DescribeBundleTasks",
|
||||
"DescribeCarrierGateways",
|
||||
"DescribeClientVpnRoutes",
|
||||
"DescribeCluster",
|
||||
"DescribeDhcpOptions",
|
||||
"DescribeFlowLogs",
|
||||
"DescribeImages",
|
||||
"DescribeInstanceAttribute",
|
||||
"DescribeInstanceInformation",
|
||||
"DescribeInstanceTypes",
|
||||
"DescribeInstances",
|
||||
"DescribeInstances",
|
||||
"DescribeKeyPairs",
|
||||
"DescribeLogGroups",
|
||||
"DescribeLogStreams",
|
||||
"DescribeOrganization",
|
||||
"DescribeRegions",
|
||||
"DescribeSecurityGroups",
|
||||
"DescribeSnapshotAttribute",
|
||||
"DescribeSnapshotTierStatus",
|
||||
"DescribeSubscriptionFilters",
|
||||
"DescribeTransitGatewayMulticastDomains",
|
||||
"DescribeVolumes",
|
||||
"DescribeVolumesModifications",
|
||||
"DescribeVpcEndpointConnectionNotifications",
|
||||
"DescribeVpcs",
|
||||
"GetAccount",
|
||||
"GetAccountAuthorizationDetails",
|
||||
"GetAccountSendingEnabled",
|
||||
"GetBucketAcl",
|
||||
"GetBucketLogging",
|
||||
"GetBucketPolicy",
|
||||
"GetBucketReplication",
|
||||
"GetBucketVersioning",
|
||||
"GetCallerIdentity",
|
||||
"GetCertificate",
|
||||
"GetConsoleScreenshot",
|
||||
"GetCostAndUsage",
|
||||
"GetDetector",
|
||||
"GetEbsDefaultKmsKeyId",
|
||||
"GetEbsEncryptionByDefault",
|
||||
"GetFindings",
|
||||
"GetFlowLogsIntegrationTemplate",
|
||||
"GetIdentityVerificationAttributes",
|
||||
"GetInstances",
|
||||
"GetIntrospectionSchema",
|
||||
"GetLaunchTemplateData",
|
||||
"GetLaunchTemplateData",
|
||||
"GetLogRecord",
|
||||
"GetParameters",
|
||||
"GetPolicyVersion",
|
||||
"GetPublicAccessBlock",
|
||||
"GetQueryResults",
|
||||
"GetRegions",
|
||||
"GetSMSAttributes",
|
||||
"GetSMSSandboxAccountStatus",
|
||||
"GetSendQuota",
|
||||
"GetTransitGatewayRouteTableAssociations",
|
||||
"GetUserPolicy",
|
||||
"HeadObject",
|
||||
"ListAccessKeys",
|
||||
"ListAccounts",
|
||||
"ListAllMyBuckets",
|
||||
"ListAssociatedAccessPolicies",
|
||||
"ListAttachedUserPolicies",
|
||||
"ListClusters",
|
||||
"ListDetectors",
|
||||
"ListDomains",
|
||||
"ListFindings",
|
||||
"ListHostedZones",
|
||||
"ListIPSets",
|
||||
"ListIdentities",
|
||||
"ListInstanceProfiles",
|
||||
"ListObjects",
|
||||
"ListOrganizationalUnitsForParent",
|
||||
"ListOriginationNumbers",
|
||||
"ListPolicyVersions",
|
||||
"ListRoles",
|
||||
"ListRoles",
|
||||
"ListRules",
|
||||
"ListServiceQuotas",
|
||||
"ListSubscriptions",
|
||||
"ListTargetsByRule",
|
||||
"ListTopics",
|
||||
"ListUsers",
|
||||
"LookupEvents",
|
||||
"Search",
|
||||
]
|
||||
|
||||
# Azure Configuration
|
||||
azure:
|
||||
# Azure Network Configuration
|
||||
# azure.network_public_ip_shodan
|
||||
# TODO: create common config
|
||||
shodan_api_key: null
|
||||
|
||||
# Azure App Service
|
||||
# Azure App Configuration
|
||||
# azure.app_ensure_php_version_is_latest
|
||||
php_latest_version: "8.2"
|
||||
# azure.app_ensure_python_version_is_latest
|
||||
|
||||
@@ -31,6 +31,10 @@ CustomChecksMetadata:
|
||||
Checks:
|
||||
compute_instance_public_ip:
|
||||
Severity: critical
|
||||
kubernetes:
|
||||
Checks:
|
||||
apiserver_anonymous_requests:
|
||||
Severity: low
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
90
docs/tutorials/dashboard.md
Normal file
@@ -0,0 +1,90 @@
|
||||
# Dashboard
|
||||
Prowler allows you to run your own local dashboards using the csv outputs provided by Prowler
|
||||
|
||||
```sh
|
||||
prowler dashboard
|
||||
```
|
||||
|
||||
To run Prowler local dashboard with docker, use:
|
||||
|
||||
```sh
|
||||
docker run toniblyx/prowler:latest dashboard
|
||||
```
|
||||
|
||||
The banner and additional info about the dashboard will be shown on your console:
|
||||
<img src="../img/dashboard/dashboard-banner.png">
|
||||
|
||||
## Overview Page
|
||||
|
||||
The overview page provides a full impression of your findings obtained from Prowler:
|
||||
|
||||
<img src="../img/dashboard/dashboard-overview.png">
|
||||
|
||||
In this page you can do multiple functions:
|
||||
* Apply filters (Assessment Date / Account / Region)
|
||||
* See wich files has been scaned to generate the dashboard placing your mouse on the `?` icon:
|
||||
<img src="../img/dashboard/dashboard-files-scanned.png">
|
||||
* Download the `Top 25 Failed Findings by Severity` table using the button `DOWNLOAD THIS TABLE AS CSV`
|
||||
|
||||
## Compliance Page
|
||||
|
||||
This page shows all the info related to the compliance selected, you can apply multiple filters depending on your preferences.
|
||||
|
||||
<img src="../img/dashboard/dashboard-compliance.png">
|
||||
|
||||
To add your own compliance to compliance page, add a file with the compliance name (using `_` instead of `.`) to the path `/dashboard/compliance`.
|
||||
|
||||
In this file use the format present in the others compliance files to create the table. Example for CIS 2.0:
|
||||
```python
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_cis
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_DESCRIPTION",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_cis(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
|
||||
```
|
||||
|
||||
## S3 Integration
|
||||
|
||||
If you are a Prowler Saas customer and you want to use your data from your S3 bucket, you can run:
|
||||
|
||||
```sh
|
||||
aws s3 cp s3://<your-bucket>/output/csv ./output --recursive
|
||||
```
|
||||
to load the dashboard with the new files.
|
||||
|
||||
## Output Path
|
||||
|
||||
Prowler will use the outputs from the folder `/output` (for common prowler outputs) and `/output/compliance` (for prowler compliance outputs) to generate the dashboard.
|
||||
|
||||
To change the path modify the values `folder_path_overview` or `folder_path_compliance` from `/dashboard/config.py`
|
||||
|
||||
## Output Support
|
||||
|
||||
Prowler dashboard supports the detailed outputs:
|
||||
|
||||
| Provider | V3 | V4 | COMPLIANCE-V3 | COMPLIANCE-V4|
|
||||
|---|---|---|---|---|
|
||||
| AWS | ✅ | ✅ | ✅ | ✅ |
|
||||
| Azure | ❌ | ✅ | ❌ | ✅ |
|
||||
| Kubernetes | ❌ | ✅ | ❌ | ✅ |
|
||||
| GCP | ❌ | ✅ | ❌ | ✅ |
|
||||
|
Before Width: | Height: | Size: 141 KiB After Width: | Height: | Size: 141 KiB |
BIN
docs/tutorials/img/compliance/compliance.png
Normal file
|
After Width: | Height: | Size: 93 KiB |
BIN
docs/tutorials/img/dashboard/dashboard-banner.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
docs/tutorials/img/dashboard/dashboard-compliance.png
Normal file
|
After Width: | Height: | Size: 133 KiB |
BIN
docs/tutorials/img/dashboard/dashboard-files-scanned.png
Normal file
|
After Width: | Height: | Size: 8.9 KiB |
BIN
docs/tutorials/img/dashboard/dashboard-overview.png
Normal file
|
After Width: | Height: | Size: 248 KiB |
|
Before Width: | Height: | Size: 10 KiB After Width: | Height: | Size: 10 KiB |
|
Before Width: | Height: | Size: 94 KiB After Width: | Height: | Size: 94 KiB |
@@ -11,7 +11,7 @@ prowler <provider> --slack
|
||||

|
||||
|
||||
???+ note
|
||||
Slack integration needs SLACK_API_TOKEN and SLACK_CHANNEL_NAME environment variables.
|
||||
Slack integration needs SLACK_API_TOKEN and SLACK_CHANNEL_ID environment variables.
|
||||
|
||||
### Configuration
|
||||
|
||||
@@ -35,4 +35,4 @@ To configure the Slack Integration, follow the next steps:
|
||||
|
||||
4. Set the following environment variables that Prowler will read:
|
||||
- `SLACK_API_TOKEN`: the *Slack App OAuth Token* that was previously get.
|
||||
- `SLACK_CHANNEL_NAME`: the name of your Slack Channel where Prowler will send the message.
|
||||
- `SLACK_CHANNEL_ID`: the name of your Slack Channel where Prowler will send the message.
|
||||
|
||||
@@ -8,7 +8,7 @@ There are different log levels depending on the logging information that is desi
|
||||
|
||||
- **DEBUG**: It will show low-level logs from Python.
|
||||
- **INFO**: It will show all the API calls that are being invoked by the provider.
|
||||
- **WARNING**: It will show all resources that are being **allowlisted**.
|
||||
- **WARNING**: It will show all resources that are being **muted**.
|
||||
- **ERROR**: It will show any errors, e.g., not authorized actions.
|
||||
- **CRITICAL**: The default log level. If a critical log appears, it will **exit** Prowler’s execution.
|
||||
|
||||
|
||||
@@ -9,10 +9,10 @@ Execute Prowler in verbose mode (like in Version 2):
|
||||
```console
|
||||
prowler <provider> --verbose
|
||||
```
|
||||
## Show only Fails
|
||||
Prowler can only display the failed findings:
|
||||
## Filter findings by status
|
||||
Prowler can filter the findings by their status:
|
||||
```console
|
||||
prowler <provider> -q/--quiet
|
||||
prowler <provider> --status [PASS, FAIL, MANUAL]
|
||||
```
|
||||
## Disable Exit Code 3
|
||||
Prowler does not trigger exit code 3 with failed checks:
|
||||
|
||||
@@ -1,19 +1,38 @@
|
||||
# Allowlisting
|
||||
# Mutelisting
|
||||
Sometimes you may find resources that are intentionally configured in a certain way that may be a bad practice but it is all right with it, for example an AWS S3 Bucket open to the internet hosting a web site, or an AWS Security Group with an open port needed in your use case.
|
||||
|
||||
Allowlist option works along with other options and adds a `WARNING` instead of `INFO`, `PASS` or `FAIL` to any output format.
|
||||
Mutelist option works along with other options and will modify the output in the following way if the finding is muted:
|
||||
|
||||
You can use `-w`/`--allowlist-file` with the path of your allowlist yaml file, but first, let's review the syntax.
|
||||
- JSON-OCSF: `status_id` is `Suppressed`.
|
||||
- CSV: `muted` is `True`. The field `status` will keep the original status, `MANUAL`, `PASS` or `FAIL`, of the finding.
|
||||
|
||||
## Allowlist Yaml File Syntax
|
||||
|
||||
You can use `-w`/`--mutelist-file` with the path of your mutelist yaml file:
|
||||
```
|
||||
prowler <provider> -w mutelist.yaml
|
||||
```
|
||||
|
||||
## Mutelist YAML File Syntax
|
||||
|
||||
???+ note
|
||||
For Azure provider, the Account ID is the Subscription Name and the Region is the Location.
|
||||
|
||||
???+ note
|
||||
For GCP provider, the Account ID is the Project ID and the Region is the Zone.
|
||||
|
||||
???+ note
|
||||
For Kubernetes provider, the Account ID is the Cluster Name and the Region is the Namespace.
|
||||
|
||||
The Mutelist file is a YAML file with the following syntax:
|
||||
|
||||
```yaml
|
||||
### Account, Check and/or Region can be * to apply for all the cases.
|
||||
### Resources and tags are lists that can have either Regex or Keywords.
|
||||
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
|
||||
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
|
||||
### For each check you can except Accounts, Regions, Resources and/or Tags.
|
||||
########################### ALLOWLIST EXAMPLE ###########################
|
||||
Allowlist:
|
||||
########################### MUTELIST EXAMPLE ###########################
|
||||
Mutelist:
|
||||
Accounts:
|
||||
"123456789012":
|
||||
Checks:
|
||||
@@ -78,11 +97,13 @@ You can use `-w`/`--allowlist-file` with the path of your allowlist yaml file, b
|
||||
- "test"
|
||||
Tags:
|
||||
- "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
|
||||
```
|
||||
|
||||
## Allowlist specific regions
|
||||
If you want to allowlist/mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w allowlist.yaml`:
|
||||
## AWS Mutelist
|
||||
### Mute specific AWS regions
|
||||
If you want to mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w mutelist.yaml`:
|
||||
|
||||
Allowlist:
|
||||
Mutelist:
|
||||
Accounts:
|
||||
"*":
|
||||
Checks:
|
||||
@@ -93,56 +114,49 @@ If you want to allowlist/mute failed findings only in specific regions, create a
|
||||
Resources:
|
||||
- "*"
|
||||
|
||||
## Default AWS Allowlist
|
||||
Prowler provides you a Default AWS Allowlist with the AWS Resources that should be allowlisted such as all resources created by AWS Control Tower when setting up a landing zone.
|
||||
You can execute Prowler with this allowlist using the following command:
|
||||
```sh
|
||||
prowler aws --allowlist prowler/config/aws_allowlist.yaml
|
||||
```
|
||||
## Supported Allowlist Locations
|
||||
### Default Mutelist
|
||||
For the AWS Provider, Prowler is executed with a Default AWS Mutelist with the AWS Resources that should be muted such as all resources created by AWS Control Tower when setting up a landing zone.
|
||||
You can see this Mutelist file in [`prowler/config/aws_mutelist.yaml`](https://github.com/prowler-cloud/prowler/blob/master/prowler/config/aws_allowlist.yaml).
|
||||
|
||||
The allowlisting flag supports the following locations:
|
||||
### Supported Mutelist Locations
|
||||
|
||||
### Local file
|
||||
You will need to pass the local path where your Allowlist YAML file is located:
|
||||
The mutelisting flag supports the following AWS locations when using the AWS Provider:
|
||||
|
||||
#### AWS S3 URI
|
||||
You will need to pass the S3 URI where your Mutelist YAML file was uploaded to your bucket:
|
||||
```
|
||||
prowler <provider> -w allowlist.yaml
|
||||
```
|
||||
### AWS S3 URI
|
||||
You will need to pass the S3 URI where your Allowlist YAML file was uploaded to your bucket:
|
||||
```
|
||||
prowler aws -w s3://<bucket>/<prefix>/allowlist.yaml
|
||||
prowler aws -w s3://<bucket>/<prefix>/mutelist.yaml
|
||||
```
|
||||
???+ note
|
||||
Make sure that the used AWS credentials have s3:GetObject permissions in the S3 path where the allowlist file is located.
|
||||
Make sure that the used AWS credentials have `s3:GetObject` permissions in the S3 path where the mutelist file is located.
|
||||
|
||||
### AWS DynamoDB Table ARN
|
||||
#### AWS DynamoDB Table ARN
|
||||
|
||||
You will need to pass the DynamoDB Allowlist Table ARN:
|
||||
You will need to pass the DynamoDB Mutelist Table ARN:
|
||||
|
||||
```
|
||||
prowler aws -w arn:aws:dynamodb:<region_name>:<account_id>:table/<table_name>
|
||||
```
|
||||
|
||||
1. The DynamoDB Table must have the following String keys:
|
||||
<img src="../img/allowlist-keys.png"/>
|
||||
<img src="../img/mutelist-keys.png"/>
|
||||
|
||||
- The Allowlist Table must have the following columns:
|
||||
- Accounts (String): This field can contain either an Account ID or an `*` (which applies to all the accounts that use this table as an allowlist).
|
||||
- The Mutelist Table must have the following columns:
|
||||
- Accounts (String): This field can contain either an Account ID or an `*` (which applies to all the accounts that use this table as an mutelist).
|
||||
- Checks (String): This field can contain either a Prowler Check Name or an `*` (which applies to all the scanned checks).
|
||||
- Regions (List): This field contains a list of regions where this allowlist rule is applied (it can also contains an `*` to apply all scanned regions).
|
||||
- Resources (List): This field contains a list of regex expressions that applies to the resources that are wanted to be allowlisted.
|
||||
- Tags (List): -Optional- This field contains a list of tuples in the form of 'key=value' that applies to the resources tags that are wanted to be allowlisted.
|
||||
- Exceptions (Map): -Optional- This field contains a map of lists of accounts/regions/resources/tags that are wanted to be excepted in the allowlist.
|
||||
- Regions (List): This field contains a list of regions where this mutelist rule is applied (it can also contains an `*` to apply all scanned regions).
|
||||
- Resources (List): This field contains a list of regex expressions that applies to the resources that are wanted to be muted.
|
||||
- Tags (List): -Optional- This field contains a list of tuples in the form of 'key=value' that applies to the resources tags that are wanted to be muted.
|
||||
- Exceptions (Map): -Optional- This field contains a map of lists of accounts/regions/resources/tags that are wanted to be excepted in the mutelist.
|
||||
|
||||
The following example will allowlist all resources in all accounts for the EC2 checks in the regions `eu-west-1` and `us-east-1` with the tags `environment=dev` and `environment=prod`, except the resources containing the string `test` in the account `012345678912` and region `eu-west-1` with the tag `environment=prod`:
|
||||
The following example will mute all resources in all accounts for the EC2 checks in the regions `eu-west-1` and `us-east-1` with the tags `environment=dev` and `environment=prod`, except the resources containing the string `test` in the account `012345678912` and region `eu-west-1` with the tag `environment=prod`:
|
||||
|
||||
<img src="../img/allowlist-row.png"/>
|
||||
<img src="../img/mutelist-row.png"/>
|
||||
|
||||
???+ note
|
||||
Make sure that the used AWS credentials have `dynamodb:PartiQLSelect` permissions in the table.
|
||||
|
||||
### AWS Lambda ARN
|
||||
#### AWS Lambda ARN
|
||||
|
||||
You will need to pass the AWS Lambda Function ARN:
|
||||
|
||||
@@ -153,7 +167,7 @@ prowler aws -w arn:aws:lambda:REGION:ACCOUNT_ID:function:FUNCTION_NAME
|
||||
Make sure that the credentials that Prowler uses can invoke the Lambda Function:
|
||||
|
||||
```
|
||||
- PolicyName: GetAllowList
|
||||
- PolicyName: GetMuteList
|
||||
PolicyDocument:
|
||||
Version: '2012-10-17'
|
||||
Statement:
|
||||
@@ -162,14 +176,14 @@ Make sure that the credentials that Prowler uses can invoke the Lambda Function:
|
||||
Resource: arn:aws:lambda:REGION:ACCOUNT_ID:function:FUNCTION_NAME
|
||||
```
|
||||
|
||||
The Lambda Function can then generate an Allowlist dynamically. Here is the code an example Python Lambda Function that
|
||||
generates an Allowlist:
|
||||
The Lambda Function can then generate an Mutelist dynamically. Here is the code an example Python Lambda Function that
|
||||
generates an Mutelist:
|
||||
|
||||
```
|
||||
def handler(event, context):
|
||||
checks = {}
|
||||
checks["vpc_flow_logs_enabled"] = { "Regions": [ "*" ], "Resources": [ "" ], Optional("Tags"): [ "key:value" ] }
|
||||
|
||||
al = { "Allowlist": { "Accounts": { "*": { "Checks": checks } } } }
|
||||
al = { "Mutelist": { "Accounts": { "*": { "Checks": checks } } } }
|
||||
return al
|
||||
```
|
||||
@@ -42,7 +42,7 @@ while read service; do
|
||||
echo "$(date '+%Y-%m-%d %H:%M:%S'): Starting job for service: ${service}"
|
||||
|
||||
# Run the command in the background
|
||||
(prowler -p "$profile" -s "$service" -F "${account_id}-${service}" --ignore-unused-services --only-logs; echo "$(date '+%Y-%m-%d %H:%M:%S') - ${service} has completed") &
|
||||
(prowler -p "$profile" -s "$service" -F "${account_id}-${service}" --only-logs; echo "$(date '+%Y-%m-%d %H:%M:%S') - ${service} has completed") &
|
||||
|
||||
# Check if we have reached the maximum number of processes
|
||||
while [ $(jobs -r | wc -l) -ge ${MAX_PROCESSES} ]; do
|
||||
@@ -98,7 +98,7 @@ $jobs = @()
|
||||
foreach ($service in $services) {
|
||||
# Start the command as a job
|
||||
$job = Start-Job -ScriptBlock {
|
||||
prowler -p ${using:profile} -s ${using:service} -F "${using:account_id}-${using:service}" --ignore-unused-services --only-logs
|
||||
prowler -p ${using:profile} -s ${using:service} -F "${using:account_id}-${using:service}" --only-logs
|
||||
$endTimestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
|
||||
Write-Output "${endTimestamp} - $using:service has completed"
|
||||
}
|
||||
|
||||
@@ -106,9 +106,14 @@ And then by the provider specific columns:
|
||||
- RESOURCE_ID
|
||||
- RESOURCE_NAME
|
||||
|
||||
#### KUBERNETES
|
||||
|
||||
- NAMESPACE
|
||||
- RESOURCE_ID
|
||||
- RESOURCE_NAME
|
||||
|
||||
???+ note
|
||||
Since Prowler v3 the CSV column delimiter is the semicolon (`;`)
|
||||
Since Prowler v4 the CSV column delimiter is the semicolon (`;`)
|
||||
|
||||
### JSON
|
||||
|
||||
@@ -206,9 +211,6 @@ The following code is an example output of the JSON format:
|
||||
}]
|
||||
```
|
||||
|
||||
???+ note
|
||||
Each finding is a `json` object within a list. This has changed in v3 since in v2 the format used was [ndjson](http://ndjson.org/).
|
||||
|
||||
|
||||
### JSON-OCSF
|
||||
|
||||
|
||||
@@ -1,22 +1,16 @@
|
||||
# Ignore Unused Services
|
||||
# Scan Unused Services
|
||||
|
||||
???+ note
|
||||
Currently only available on the AWS provider.
|
||||
|
||||
Prowler allows you to ignore unused services findings, so you can reduce the number of findings in Prowler's reports.
|
||||
By default, Prowler only scans the cloud services that are used (where resources are created) to reduce the number of findings in Prowler's reports. If you want Prowler to also scan unused services, you can use the following command:
|
||||
|
||||
```console
|
||||
prowler <provider> --ignore-unused-services
|
||||
prowler <provider> --scan-unused-services
|
||||
```
|
||||
|
||||
## Services that can be ignored
|
||||
## Services that are ignored
|
||||
### AWS
|
||||
#### ACM
|
||||
You can have certificates in ACM that is not in use by any AWS resource.
|
||||
Prowler will check if every certificate is going to expire soon, if this certificate is not in use by default it is not going to be check if it is expired, is going to expire soon or it is good.
|
||||
|
||||
- `acm_certificates_expiration_check`
|
||||
|
||||
#### Athena
|
||||
When you create an AWS Account, Athena will create a default primary workgroup for you.
|
||||
Prowler will check if that workgroup is enabled and if it is being used by checking if there were queries in the last 45 days.
|
||||
@@ -36,11 +30,9 @@ If EBS default encyption is not enabled, sensitive information at rest is not pr
|
||||
|
||||
- `ec2_ebs_default_encryption`
|
||||
|
||||
If your Security groups are not properly configured the attack surface is increased, nonetheless, Prowler will detect those security groups that are being used (they are attached) to only notify those that are being used. This logic applies to the 15 checks related to open ports in security groups, the check for the default security group and for the security groups that allow ingress and egress traffic.
|
||||
If your Security groups are not properly configured the attack surface is increased, nonetheless, Prowler will detect those security groups that are being used (they are attached) to only notify those that are being used. This logic applies to the 15 checks related to open ports in security groups.
|
||||
|
||||
- `ec2_securitygroup_allow_ingress_from_internet_to_port_X` (15 checks)
|
||||
- `ec2_securitygroup_default_restrict_traffic`
|
||||
- `ec2_securitygroup_allow_wide_open_public_ipv4`
|
||||
|
||||
Prowler will also check for used Network ACLs to only alerts those with open ports that are being used.
|
||||
|
||||
@@ -77,15 +69,3 @@ You should enable Public Access Block at the account level to prevent the exposu
|
||||
VPC Flow Logs provide visibility into network traffic that traverses the VPC and can be used to detect anomalous traffic or insight during security workflows. Nevertheless, Prowler will only check if the Flow Logs are enabled for those VPCs that are in use, in other words, only the VPCs where you have ENIs (network interfaces).
|
||||
|
||||
- `vpc_flow_logs_enabled`
|
||||
|
||||
VPC subnets must not have public IP addresses by default to prevent the exposure of your resources to the internet. Prowler will only check this configuration for those VPCs that are in use, in other words, only the VPCs where you have ENIs (network interfaces).
|
||||
|
||||
- `vpc_subnet_no_public_ip_by_default`
|
||||
|
||||
VPCs should have separate private and public subnets to prevent the exposure of your resources to the internet. Prowler will only check this configuration for those VPCs that are in use, in other words, only the VPCs where you have ENIs (network interfaces).
|
||||
|
||||
- `vpc_subnet_separate_private_public`
|
||||
|
||||
VPCs should have subnets in different availability zones to prevent a single point of failure. Prowler will only check this configuration for those VPCs that are in use, in other words, only the VPCs where you have ENIs (network interfaces).
|
||||
|
||||
- `vpc_subnet_different_az`
|
||||
92
kubernetes/job.yaml
Normal file
@@ -0,0 +1,92 @@
|
||||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
name: prowler
|
||||
spec:
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: prowler
|
||||
spec:
|
||||
containers:
|
||||
- name: prowler
|
||||
image: toniblyx/prowler:stable
|
||||
command: ["prowler"]
|
||||
args: ["kubernetes", "-z"]
|
||||
imagePullPolicy: Always
|
||||
volumeMounts:
|
||||
- name: var-lib-cni
|
||||
mountPath: /var/lib/cni
|
||||
readOnly: true
|
||||
- mountPath: /var/lib/etcd
|
||||
name: var-lib-etcd
|
||||
readOnly: true
|
||||
- mountPath: /var/lib/kubelet
|
||||
name: var-lib-kubelet
|
||||
readOnly: true
|
||||
- mountPath: /var/lib/kube-scheduler
|
||||
name: var-lib-kube-scheduler
|
||||
readOnly: true
|
||||
- mountPath: /var/lib/kube-controller-manager
|
||||
name: var-lib-kube-controller-manager
|
||||
readOnly: true
|
||||
- mountPath: /etc/systemd
|
||||
name: etc-systemd
|
||||
readOnly: true
|
||||
- mountPath: /lib/systemd/
|
||||
name: lib-systemd
|
||||
readOnly: true
|
||||
- mountPath: /srv/kubernetes/
|
||||
name: srv-kubernetes
|
||||
readOnly: true
|
||||
- mountPath: /etc/kubernetes
|
||||
name: etc-kubernetes
|
||||
readOnly: true
|
||||
- mountPath: /usr/local/mount-from-host/bin
|
||||
name: usr-bin
|
||||
readOnly: true
|
||||
- mountPath: /etc/cni/net.d/
|
||||
name: etc-cni-netd
|
||||
readOnly: true
|
||||
- mountPath: /opt/cni/bin/
|
||||
name: opt-cni-bin
|
||||
readOnly: true
|
||||
hostPID: true
|
||||
restartPolicy: Never
|
||||
volumes:
|
||||
- name: var-lib-cni
|
||||
hostPath:
|
||||
path: /var/lib/cni
|
||||
- hostPath:
|
||||
path: /var/lib/etcd
|
||||
name: var-lib-etcd
|
||||
- hostPath:
|
||||
path: /var/lib/kubelet
|
||||
name: var-lib-kubelet
|
||||
- hostPath:
|
||||
path: /var/lib/kube-scheduler
|
||||
name: var-lib-kube-scheduler
|
||||
- hostPath:
|
||||
path: /var/lib/kube-controller-manager
|
||||
name: var-lib-kube-controller-manager
|
||||
- hostPath:
|
||||
path: /etc/systemd
|
||||
name: etc-systemd
|
||||
- hostPath:
|
||||
path: /lib/systemd
|
||||
name: lib-systemd
|
||||
- hostPath:
|
||||
path: /srv/kubernetes
|
||||
name: srv-kubernetes
|
||||
- hostPath:
|
||||
path: /etc/kubernetes
|
||||
name: etc-kubernetes
|
||||
- hostPath:
|
||||
path: /usr/bin
|
||||
name: usr-bin
|
||||
- hostPath:
|
||||
path: /etc/cni/net.d/
|
||||
name: etc-cni-netd
|
||||
- hostPath:
|
||||
path: /opt/cni/bin/
|
||||
name: opt-cni-bin
|
||||
11
kubernetes/prowler-role.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
name: prowler-read-cluster
|
||||
rules:
|
||||
- apiGroups: [""]
|
||||
resources: ["pods", "configmaps", "nodes", "namespaces"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
- apiGroups: ["rbac.authorization.k8s.io"]
|
||||
resources: ["clusterrolebindings", "rolebindings", "clusterroles", "roles"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
12
kubernetes/prowler-rolebinding.yaml
Normal file
@@ -0,0 +1,12 @@
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: prowler-read-cluster-default-sa
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: prowler-read-cluster
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: default
|
||||
namespace: default
|
||||
24
mkdocs.yml
@@ -16,23 +16,8 @@ theme:
|
||||
- navigation.sections
|
||||
- navigation.top
|
||||
palette:
|
||||
# Palette toggle for light mode
|
||||
- media: "(prefers-color-scheme: light)"
|
||||
scheme: default
|
||||
primary: black
|
||||
accent: green
|
||||
toggle:
|
||||
icon: material/weather-night
|
||||
name: Switch to dark mode
|
||||
# Palette toggle for dark mode
|
||||
- media: "(prefers-color-scheme: dark)"
|
||||
scheme: slate
|
||||
primary: black
|
||||
accent: green
|
||||
toggle:
|
||||
icon: material/weather-sunny
|
||||
name: Switch to light mode
|
||||
|
||||
primary: black
|
||||
accent: green
|
||||
|
||||
plugins:
|
||||
- search
|
||||
@@ -52,14 +37,15 @@ nav:
|
||||
- Miscellaneous: tutorials/misc.md
|
||||
- Reporting: tutorials/reporting.md
|
||||
- Compliance: tutorials/compliance.md
|
||||
- Dashboard: tutorials/dashboard.md
|
||||
- Quick Inventory: tutorials/quick-inventory.md
|
||||
- Slack Integration: tutorials/integrations.md
|
||||
- Configuration File: tutorials/configuration_file.md
|
||||
- Logging: tutorials/logging.md
|
||||
- Allowlist: tutorials/allowlist.md
|
||||
- Mutelist: tutorials/mutelist.md
|
||||
- Check Aliases: tutorials/check-aliases.md
|
||||
- Custom Metadata: tutorials/custom-checks-metadata.md
|
||||
- Ignore Unused Services: tutorials/ignore-unused-services.md
|
||||
- Scan Unused Services: tutorials/scan-unused-services.md
|
||||
- Pentesting: tutorials/pentesting.md
|
||||
- Parallel Execution: tutorials/parallel-execution.md
|
||||
- Developer Guide: developer-guide/introduction.md
|
||||
|
||||