Compare commits

..

1 Commits

Author SHA1 Message Date
n4ch04
2a30b3bcac chore(kubernetes mitre): first version of mapping file 2024-05-03 11:27:00 +02:00
2633 changed files with 22278 additions and 155157 deletions

View File

@@ -1,14 +0,0 @@
{
"repoOwner": "prowler-cloud",
"repoName": "prowler",
"targetPRLabels": [
"backport"
],
"sourcePRLabels": [
"was-backported"
],
"copySourcePRLabels": false,
"copySourcePRReviewers": true,
"prTitle": "{{sourcePullRequest.title}}",
"commitConflicts": true
}

6
.github/CODEOWNERS vendored
View File

@@ -1,5 +1 @@
* @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
# To protect a repository fully against unauthorized changes, you also need to define an owner for the CODEOWNERS file itself.
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#codeowners-and-branch-protection
/.github/ @prowler-cloud/sdk
* @prowler-cloud/prowler-oss @prowler-cloud/prowler-dev

View File

@@ -1,5 +1,6 @@
name: 🐞 Bug Report
description: Create a report to help us improve
title: "[Bug]: "
labels: ["bug", "status/needs-triage"]
body:
@@ -26,7 +27,7 @@ body:
id: actual
attributes:
label: Actual Result with Screenshots or Logs
description: If applicable, add screenshots to help explain your problem. Also, you can add logs (anonymize them first!). Here a command that may help to share a log `prowler <your arguments> --log-level ERROR --log-file $(date +%F)_error.log` then attach here the log file.
description: If applicable, add screenshots to help explain your problem. Also, you can add logs (anonymize them first!). Here a command that may help to share a log `prowler <your arguments> --log-level DEBUG --log-file $(date +%F)_debug.log` then attach here the log file.
validations:
required: true
- type: dropdown

View File

@@ -1,7 +1,8 @@
name: 💡 Feature Request
name: 💡 Feature Request
description: Suggest an idea for this project
labels: ["feature-request", "status/needs-triage"]
body:
- type: textarea
id: Problem

View File

@@ -8,7 +8,7 @@ updates:
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "daily"
interval: "weekly"
open-pull-requests-limit: 10
target-branch: master
labels:
@@ -17,17 +17,14 @@ updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
interval: "weekly"
open-pull-requests-limit: 10
target-branch: master
labels:
- "dependencies"
- "github_actions"
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "daily"
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v3
labels:
@@ -37,10 +34,9 @@ updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
interval: "weekly"
open-pull-requests-limit: 10
target-branch: v3
labels:
- "dependencies"
- "github_actions"
- "v3"

54
.github/labeler.yml vendored
View File

@@ -25,57 +25,3 @@ provider/kubernetes:
github_actions:
- changed-files:
- any-glob-to-any-file: ".github/workflows/*"
cli:
- changed-files:
- any-glob-to-any-file: "cli/**"
mutelist:
- changed-files:
- any-glob-to-any-file: "prowler/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/aws/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/azure/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/gcp/lib/mutelist/**"
- any-glob-to-any-file: "prowler/providers/kubernetes/lib/mutelist/**"
- any-glob-to-any-file: "tests/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/aws/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/azure/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/gcp/lib/mutelist/**"
- any-glob-to-any-file: "tests/providers/kubernetes/lib/mutelist/**"
integration/s3:
- changed-files:
- any-glob-to-any-file: "prowler/providers/aws/lib/s3/**"
- any-glob-to-any-file: "tests/providers/aws/lib/s3/**"
integration/slack:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/slack/**"
- any-glob-to-any-file: "tests/lib/outputs/slack/**"
integration/security-hub:
- changed-files:
- any-glob-to-any-file: "prowler/providers/aws/lib/security_hub/**"
- any-glob-to-any-file: "tests/providers/aws/lib/security_hub/**"
- any-glob-to-any-file: "prowler/lib/outputs/asff/**"
- any-glob-to-any-file: "tests/lib/outputs/asff/**"
output/html:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/html/**"
- any-glob-to-any-file: "tests/lib/outputs/html/**"
output/asff:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/asff/**"
- any-glob-to-any-file: "tests/lib/outputs/asff/**"
output/ocsf:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/ocsf/**"
- any-glob-to-any-file: "tests/lib/outputs/ocsf/**"
output/csv:
- changed-files:
- any-glob-to-any-file: "prowler/lib/outputs/csv/**"
- any-glob-to-any-file: "tests/lib/outputs/csv/**"

View File

@@ -2,19 +2,11 @@
Please include relevant motivation and context for this PR.
If fixes an issue please add it with `Fix #XXXX`
### Description
Please include a summary of the change and which issue is fixed. List any dependencies that are required for this change.
### Checklist
- Are there new checks included in this PR? Yes / No
- If so, do we need to update permissions for the provider? Please review this carefully.
- [ ] Review if the code is being covered by tests.
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
- [ ] Review if backport is needed.
### License

View File

@@ -1,42 +0,0 @@
name: Automatic Backport
on:
pull_request_target:
branches: ['master']
types: ['labeled', 'closed']
jobs:
backport:
name: Backport PR
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport'))
runs-on: ubuntu-latest
permissions:
id-token: write
pull-requests: write
contents: write
steps:
# Workaround not to fail the workflow if the PR does not need a backport
# https://github.com/sorenlouv/backport-github-action/issues/127#issuecomment-2258561266
- name: Check for backport labels
id: check_labels
run: |-
labels='${{ toJSON(github.event.pull_request.labels.*.name) }}'
echo "$labels"
matched=$(echo "${labels}" | jq '. | map(select(startswith("backport-to-"))) | length')
echo "matched=$matched"
echo "matched=$matched" >> $GITHUB_OUTPUT
- name: Backport Action
if: fromJSON(steps.check_labels.outputs.matched) > 0
uses: sorenlouv/backport-github-action@v9.5.1
with:
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
auto_backport_label_prefix: backport-to-
- name: Info log
if: ${{ success() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
run: cat ~/.backport/backport.info.log
- name: Debug log
if: ${{ failure() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
run: cat ~/.backport/backport.debug.log

View File

@@ -16,9 +16,9 @@ jobs:
name: Documentation Link
runs-on: ubuntu-latest
steps:
- name: Leave PR comment with the Prowler Documentation URI
- name: Leave PR comment with the SaaS Documentation URI
uses: peter-evans/create-or-update-comment@v4
with:
issue-number: ${{ env.PR_NUMBER }}
body: |
You can check the documentation for this PR here -> [Prowler Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)
You can check the documentation for this PR here -> [SaaS Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)

View File

@@ -43,7 +43,7 @@ jobs:
runs-on: ubuntu-latest
outputs:
prowler_version_major: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION_MAJOR }}
prowler_version: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION }}
prowler_version: ${{ steps.update-prowler-version.outputs.PROWLER_VERSION }}
env:
POETRY_VIRTUALENVS_CREATE: "false"
@@ -65,8 +65,6 @@ jobs:
id: get-prowler-version
run: |
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
# Store prowler version major just for the release
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
@@ -91,6 +89,15 @@ jobs:
;;
esac
- name: Update Prowler version (release)
id: update-prowler-version
if: github.event_name == 'release'
run: |
PROWLER_VERSION="${{ github.event.release.tag_name }}"
poetry version "${PROWLER_VERSION}"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
- name: Login to DockerHub
uses: docker/login-action@v3
with:
@@ -111,7 +118,7 @@ jobs:
- name: Build and push container image (latest)
if: github.event_name == 'push'
uses: docker/build-push-action@v6
uses: docker/build-push-action@v5
with:
push: true
tags: |
@@ -123,7 +130,7 @@ jobs:
- name: Build and push container image (release)
if: github.event_name == 'release'
uses: docker/build-push-action@v6
uses: docker/build-push-action@v5
with:
# Use local context to get changes
# https://github.com/docker/build-push-action#path-context
@@ -153,7 +160,7 @@ jobs:
run: |
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
--data '{"event_type":"dispatch","client_payload":{"version":"v3-latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
@@ -162,6 +169,6 @@ jobs:
run: |
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
--data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ needs.container-build-push.outputs.prowler_version }}"}}'

View File

@@ -13,10 +13,10 @@ name: "CodeQL"
on:
push:
branches: [ "master", "v3", "v4.*" ]
branches: [ "master", "v3" ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ "master", "v3", "v4.*" ]
branches: [ "master", "v3" ]
schedule:
- cron: '00 12 * * *'

View File

@@ -11,7 +11,7 @@ jobs:
with:
fetch-depth: 0
- name: TruffleHog OSS
uses: trufflesecurity/trufflehog@v3.83.2
uses: trufflesecurity/trufflehog@v3.74.0
with:
path: ./
base: ${{ github.event.repository.default_branch }}

View File

@@ -5,7 +5,6 @@ on:
branches:
- "master"
- "v3"
- "v4.*"
jobs:
labeler:

View File

@@ -5,12 +5,10 @@ on:
branches:
- "master"
- "v3"
- "v4.*"
pull_request:
branches:
- "master"
- "v3"
- "v4.*"
jobs:
build:
runs-on: ubuntu-latest
@@ -22,7 +20,7 @@ jobs:
- uses: actions/checkout@v4
- name: Test if changes are in not ignored paths
id: are-non-ignored-files-changed
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v44
with:
files: ./**
files_ignore: |
@@ -31,7 +29,6 @@ jobs:
docs/**
permissions/**
mkdocs.yml
.backportrc.json
- name: Install poetry
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
@@ -76,7 +73,7 @@ jobs:
- name: Safety
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |
poetry run safety check --ignore 70612
poetry run safety check
- name: Vulture
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
run: |

View File

@@ -8,6 +8,8 @@ env:
RELEASE_TAG: ${{ github.event.release.tag_name }}
PYTHON_VERSION: 3.11
CACHE: "poetry"
# TODO: create a bot user for this kind of tasks, like prowler-bot
GIT_COMMITTER_EMAIL: "sergio@prowler.com"
jobs:
release-prowler-job:
@@ -38,6 +40,7 @@ jobs:
- name: Install dependencies
run: |
pipx install poetry
pipx inject poetry poetry-bumpversion
- name: Setup Python
uses: actions/setup-python@v5
@@ -45,6 +48,34 @@ jobs:
python-version: ${{ env.PYTHON_VERSION }}
cache: ${{ env.CACHE }}
- name: Update Poetry and config version
run: |
poetry version ${{ env.RELEASE_TAG }}
- name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
git_user_signingkey: true
git_commit_gpgsign: true
- name: Push updated version to the release tag
run: |
# Configure Git
git config user.name "github-actions"
git config user.email "${{ env.GIT_COMMITTER_EMAIL }}"
# Add the files with the version changed
git add prowler/config/config.py pyproject.toml
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify -S
# Replace the tag with the version updated
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}" --sign
# Push the tag
git push -f origin ${{ env.RELEASE_TAG }}
- name: Build Prowler package
run: |
poetry build

View File

@@ -50,13 +50,13 @@ jobs:
# Create pull request
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
commit-message: "feat(regions_update): Update regions for AWS services"
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
commit-message: "feat(regions_update): Update regions for AWS services."
branch: "aws-services-regions-updated-${{ github.sha }}"
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
title: "chore(regions_update): Changes in regions for AWS services"
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-v3"
title: "chore(regions_update): Changes in regions for AWS services."
body: |
### Description

View File

@@ -1,7 +1,7 @@
repos:
## GENERAL
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
rev: v4.5.0
hooks:
- id: check-merge-conflict
- id: check-yaml
@@ -15,7 +15,7 @@ repos:
## TOML
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.13.0
rev: v2.12.0
hooks:
- id: pretty-format-toml
args: [--autofix]
@@ -23,13 +23,13 @@ repos:
## BASH
- repo: https://github.com/koalaman/shellcheck-precommit
rev: v0.10.0
rev: v0.9.0
hooks:
- id: shellcheck
exclude: contrib
## PYTHON
- repo: https://github.com/myint/autoflake
rev: v2.3.1
rev: v2.2.1
hooks:
- id: autoflake
args:
@@ -46,7 +46,7 @@ repos:
args: ["--profile", "black"]
- repo: https://github.com/psf/black
rev: 24.4.2
rev: 24.1.1
hooks:
- id: black
@@ -58,14 +58,14 @@ repos:
args: ["--ignore=E266,W503,E203,E501,W605"]
- repo: https://github.com/python-poetry/poetry
rev: 1.8.0
rev: 1.7.0
hooks:
- id: poetry-check
- id: poetry-lock
args: ["--no-update"]
- repo: https://github.com/hadolint/hadolint
rev: v2.13.0-beta
rev: v2.12.1-beta
hooks:
- id: hadolint
args: ["--ignore=DL3013"]
@@ -85,7 +85,7 @@ repos:
# For running trufflehog in docker, use the following entry instead:
# entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail'
language: system
stages: ["pre-commit", "pre-push"]
stages: ["commit", "push"]
- id: bandit
name: bandit
@@ -97,7 +97,7 @@ repos:
- id: safety
name: safety
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
entry: bash -c 'safety check --ignore 70612'
entry: bash -c 'safety check'
language: system
- id: vulture

View File

@@ -2,9 +2,9 @@ FROM python:3.12-alpine
LABEL maintainer="https://github.com/prowler-cloud/prowler"
# Update system dependencies and install essential tools
# Update system dependencies
#hadolint ignore=DL3018
RUN apk --no-cache upgrade && apk --no-cache add curl git
RUN apk --no-cache upgrade && apk --no-cache add curl
# Create nonroot user
RUN mkdir -p /home/prowler && \
@@ -13,17 +13,18 @@ RUN mkdir -p /home/prowler && \
chown -R prowler:prowler /home/prowler
USER prowler
# Copy necessary files
# Copy necessary files
WORKDIR /home/prowler
COPY prowler/ /home/prowler/prowler/
COPY dashboard/ /home/prowler/dashboard/
COPY pyproject.toml /home/prowler
COPY README.md /home/prowler
# Install Python dependencies
# Install dependencies
ENV HOME='/home/prowler'
ENV PATH="$HOME/.local/bin:$PATH"
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
#hadolint ignore=DL3013
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir .
# Remove deprecated dash dependencies

View File

@@ -1,6 +1,6 @@
<p align="center">
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-black.png#gh-light-mode-only" width="50%" height="50%">
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-black.png?raw=True#gh-light-mode-only" width="350" height="115">
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png?raw=True#gh-dark-mode-only" width="350" height="115">
</p>
<p align="center">
<b><i>Prowler SaaS </b> and <b>Prowler Open Source</b> are as dynamic and adaptable as the environment theyre meant to protect. Trusted by the leaders in security.
@@ -10,10 +10,11 @@
</p>
<p align="center">
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/38561120/3c8b4ec5-6849-41a5-b5e1-52bbb94af73a"></a>
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/3985464/3617e470-670c-47c9-9794-ce895ebdb627"></a>
<br>
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-2oinmgmw6-cl7gOrljSEqo_aoripVPFA">Join our Prowler community!</a>
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog">Join our Prowler community!</a>
</p>
<hr>
<p align="center">
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
@@ -37,9 +38,6 @@
<a href="https://twitter.com/prowlercloud"><img alt="Twitter" src="https://img.shields.io/twitter/follow/prowlercloud?style=social"></a>
</p>
<hr>
<p align="center">
<img align="center" src="/docs/img/prowler-cli-quick.gif" width="100%" height="100%">
</p>
# Description
@@ -63,9 +61,9 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|---|---|---|---|---|
| AWS | 553 | 77 -> `prowler aws --list-services` | 30 -> `prowler aws --list-compliance` | 9 -> `prowler aws --list-categories` |
| GCP | 77 | 13 -> `prowler gcp --list-services` | 2 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
| Azure | 138 | 17 -> `prowler azure --list-services` | 3 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
| AWS | 304 | 61 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 6 -> `prowler aws --list-categories` |
| GCP | 75 | 11 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
| Azure | 127 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
# 💻 Installation
@@ -77,7 +75,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
pip install prowler
prowler -v
```
>More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
## Containers
@@ -94,7 +92,7 @@ The container images are available here:
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
## From GitHub
## From Github
Python >= 3.9, < 3.13 is required with pip and poetry:
@@ -105,7 +103,7 @@ poetry shell
poetry install
python prowler.py -v
```
> If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
# 📐✏️ High level architecture
You can run Prowler from your workstation, a Kubernetes Job, a Google Compute Engine, an Azure VM, an EC2 instance, Fargate or any other container, CloudShell and many more.
@@ -121,6 +119,7 @@ You can run Prowler from your workstation, a Kubernetes Job, a Google Compute En
- The CSV output format is common for all the providers.
We have deprecated some of our outputs formats:
- The HTML is replaced for the new Prowler Dashboard, run `prowler dashboard`.
- The native JSON is replaced for the JSON [OCSF](https://schema.ocsf.io/) v1.1.0, common for all the providers.
## AWS

View File

@@ -12,7 +12,7 @@ As an **AWS Partner** and we have passed the [AWS Foundation Technical Review (F
## Reporting a Vulnerability
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or ProwlerPro service, please submit the information by contacting to https://support.prowler.com.
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or ProwlerPro service, please submit the information by contacting to help@prowler.pro.
The information you share with ProwlerPro as part of this process is kept confidential within ProwlerPro. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.

View File

@@ -14,4 +14,4 @@ cd ~ || exit
python3.9 -m pip install prowler-cloud
prowler -v
# Run Prowler
prowler aws
prowler

View File

@@ -212,7 +212,6 @@ Resources:
- appstream:Describe*
- codeartifact:List*
- codebuild:BatchGet*
- cognito-idp:GetUserPoolMfaConfig
- ds:Get*
- ds:Describe*
- ds:List*

View File

@@ -1,47 +0,0 @@
#!/bin/bash
# List of project IDs
PROJECT_IDS=(
"project-id-1"
"project-id-2"
"project-id-3"
# Add more project IDs as needed
)
# List of Prowler APIs to enable
APIS=(
"apikeys.googleapis.com"
"artifactregistry.googleapis.com"
"bigquery.googleapis.com"
"sqladmin.googleapis.com" # Cloud SQL
"storage.googleapis.com" # Cloud Storage
"compute.googleapis.com"
"dataproc.googleapis.com"
"dns.googleapis.com"
"containerregistry.googleapis.com" # GCR (Google Container Registry)
"container.googleapis.com" # GKE (Google Kubernetes Engine)
"iam.googleapis.com"
"cloudkms.googleapis.com" # KMS (Key Management Service)
"logging.googleapis.com"
)
# Function to enable APIs for a given project
enable_apis_for_project() {
local PROJECT_ID=$1
echo "Enabling APIs for project: ${PROJECT_ID}"
for API in "${APIS[@]}"; do
echo "Enabling API: $API for project: ${PROJECT_ID}"
if gcloud services enable "${API}" --project="${PROJECT_ID}"; then
echo "Successfully enabled API $API for project ${PROJECT_ID}."
else
echo "Failed to enable API $API for project ${PROJECT_ID}."
fi
done
}
# Loop over each project and enable the APIs
for PROJECT_ID in "${PROJECT_IDS[@]}"; do
enable_apis_for_project "${PROJECT_ID}"
done

View File

@@ -1,23 +0,0 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View File

@@ -1,24 +0,0 @@
apiVersion: v2
name: prowler
description: Prowler Security Tool Helm chart for Kubernetes
# A chart can be either an 'application' or a 'library' chart.
#
# Application charts are a collection of templates that can be packaged into versioned archives
# to be deployed.
#
# Library charts provide useful utilities or functions for the chart developer. They're included as
# a dependency of application charts to inject those utilities and functions into the rendering
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.1.1
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "1.16.0"

View File

@@ -1,78 +0,0 @@
# prowler
![Version: 0.1.1](https://img.shields.io/badge/Version-0.1.1-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: 1.16.0](https://img.shields.io/badge/AppVersion-1.16.0-informational?style=flat-square)
Prowler Security Tool Helm chart for Kubernetes
# Prowler Helm Chart Deployment
This guide provides step-by-step instructions for deploying the Prowler Helm chart.
## Prerequisites
Before you begin, ensure you have the following:
1. A running Kubernetes cluster.
2. Helm installed on your local machine. If you don't have Helm installed, you can follow the [Helm installation guide](https://helm.sh/docs/intro/install/).
3. Proper access to your Kubernetes cluster (e.g., `kubectl` is configured and working).
## Deployment Steps
### 1. Clone the Repository
Clone the repository containing the Helm chart to your local machine.
```sh
git clone git@github.com:prowler-cloud/prowler.git
cd prowler/contrib/k8s/helm
```
### 2. Deploy the helm chart
```
helm install prowler .
```
### 3. Verify the deployment
```
helm status prowler
kubectl get all -n prowler-ns
```
### 4. Clean Up
To uninstall the Helm release and clean up the resources, run:
```helm uninstall prowler
kubectl delete namespace prowler-ns
```
## Values
| Key | Type | Default | Description |
|-----|------|---------|-------------|
| clusterRole.name | string | `"prowler-read-cluster"` | |
| clusterRoleBinding.name | string | `"prowler-read-cluster-binding"` | |
| configMap.name | string | `"prowler-hostpaths"` | |
| configMapData.etcCniNetd | string | `"/etc/cni/net.d"` | |
| configMapData.etcKubernetes | string | `"/etc/kubernetes"` | |
| configMapData.etcSystemd | string | `"/etc/systemd"` | |
| configMapData.libSystemd | string | `"/lib/systemd"` | |
| configMapData.optCniBin | string | `"/opt/cni/bin"` | |
| configMapData.usrBin | string | `"/usr/bin"` | |
| configMapData.varLibCni | string | `"/var/lib/cni"` | |
| configMapData.varLibEtcd | string | `"/var/lib/etcd"` | |
| configMapData.varLibKubeControllerManager | string | `"/var/lib/kube-controller-manager"` | |
| configMapData.varLibKubeScheduler | string | `"/var/lib/kube-scheduler"` | |
| configMapData.varLibKubelet | string | `"/var/lib/kubelet"` | |
| cronjob.hostPID | bool | `true` | |
| cronjob.name | string | `"prowler"` | |
| cronjob.schedule | string | `"0 0 * * *"` | |
| image.pullPolicy | string | `"Always"` | |
| image.repository | string | `"toniblyx/prowler"` | |
| image.tag | string | `"stable"` | |
| namespace.name | string | `"prowler"` | |
| serviceAccount.name | string | `"prowler"` | |
----------------------------------------------
Autogenerated from chart metadata using [helm-docs v1.11.3](https://github.com/norwoodj/helm-docs/releases/v1.11.3)

View File

@@ -1,11 +0,0 @@
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: {{ .Values.clusterRole.name }}
rules:
- apiGroups: [""]
resources: ["pods", "configmaps", "nodes", "namespaces"]
verbs: ["get", "list", "watch"]
- apiGroups: ["rbac.authorization.k8s.io"]
resources: ["clusterrolebindings", "rolebindings", "clusterroles", "roles"]
verbs: ["get", "list", "watch"]

View File

@@ -1,18 +0,0 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: {{ .Values.configMap.name }}
namespace: {{ .Values.namespace.name }}
data:
varLibCni: "{{ .Values.configMap.data.varLibCni }}"
varLibEtcd: "{{ .Values.configMap.data.varLibEtcd }}"
varLibKubelet: "{{ .Values.configMap.data.varLibKubelet }}"
varLibKubeScheduler: "{{ .Values.configMap.data.varLibKubeScheduler }}"
varLibKubeControllerManager: "{{ .Values.configMap.data.varLibKubeControllerManager }}"
etcSystemd: "{{ .Values.configMap.data.etcSystemd }}"
libSystemd: "{{ .Values.configMap.data.libSystemd }}"
etcKubernetes: "{{ .Values.configMap.data.etcKubernetes }}"
usrBin: "{{ .Values.configMap.data.usrBin }}"
etcCniNetd: "{{ .Values.configMap.data.etcCniNetd }}"
optCniBin: "{{ .Values.configMap.data.optCniBin }}"
srvKubernetes: "{{ .Values.configMap.data.srvKubernetes }}"

View File

@@ -1,42 +0,0 @@
apiVersion: batch/v1
kind: CronJob
metadata:
name: {{ .Values.cronjob.name }}
namespace: {{ .Values.namespace.name }}
spec:
schedule: "{{ .Values.cronjob.schedule }}"
jobTemplate:
spec:
template:
metadata:
labels:
app: prowler
spec:
serviceAccountName: {{ .Values.serviceAccount.name }}
containers:
- name: prowler
image: {{ .Values.image.repository }}:{{ .Values.image.tag }}
command: ["prowler"]
args: ["kubernetes", "-z", "-b"]
imagePullPolicy: {{ .Values.image.pullPolicy }}
volumeMounts:
{{- range $key, $value := .Values.configMap.data }}
{{- if and (eq $.Values.clusterType "gke") (eq $key "srvKubernetes") }}
{{- else }}
- name: {{ $key | lower }}
mountPath: {{ $value }}
readOnly: true
{{- end }}
{{- end }}
hostPID: {{ .Values.cronjob.hostPID }}
restartPolicy: Never
volumes:
{{- range $key, $value := .Values.configMap.data }}
{{- if and (eq $.Values.clusterType "gke") (eq $key "srvKubernetes") }}
{{- else }}
- name: {{ $key | lower }}
hostPath:
path: {{ $value }}
{{- end }}
{{- end }}

View File

@@ -1,4 +0,0 @@
apiVersion: v1
kind: Namespace
metadata:
name: {{ .Values.namespace.name }}

View File

@@ -1,12 +0,0 @@
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: {{ .Values.clusterRoleBinding.name }}
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: {{ .Values.clusterRole.name }}
subjects:
- kind: ServiceAccount
name: {{ .Values.serviceAccount.name }}
namespace: {{ .Values.namespace.name }}

View File

@@ -1,5 +0,0 @@
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ .Values.serviceAccount.name }}
namespace: {{ .Values.namespace.name }}

View File

@@ -1,40 +0,0 @@
namespace:
name: prowler-ns
cronjob:
name: prowler
schedule: "0 0 * * *"
hostPID: true
serviceAccount:
name: prowler-sa
image:
repository: toniblyx/prowler
tag: stable
pullPolicy: Always
clusterType:
configMap:
name: prowler-config
data:
varLibCni: "/var/lib/cni"
varLibEtcd: "/var/lib/etcd"
varLibKubelet: "/var/lib/kubelet"
varLibKubeScheduler: "/var/lib/kube-scheduler"
varLibKubeControllerManager: "/var/lib/kube-controller-manager"
etcSystemd: "/etc/systemd"
libSystemd: "/lib/systemd"
etcKubernetes: "/etc/kubernetes"
usrBin: "/usr/bin"
etcCniNetd: "/etc/cni/net.d"
optCniBin: "/opt/cni/bin"
srvKubernetes: "/srv/kubernetes"
clusterRole:
name: prowler-read-cluster
clusterRoleBinding:
name: prowler-read-cluster-binding
roleName: prowler-read-cluster

View File

@@ -12,11 +12,7 @@ Originally based on [org-multi-account](https://github.com/prowler-cloud/prowler
## Architecture Explanation
The solution is designed to be very simple. Prowler is run via an ECS Task definition that launches a single Fargate container. This Task Definition is executed on a schedule using an EventBridge Rule.
## Prerequisites
This solution assumes that you have a VPC architecture with two redundant subnets that can reach the AWS API endpoints (e.g. PrivateLink, NAT Gateway, etc.).
The solution is designed to be very simple. Prowler is run via an ECS Task definition that launches a single Fargate container. This Task Definition is executed on a schedule using an EventBridge Rule.
## CloudFormation Templates
@@ -63,9 +59,9 @@ The logs that are generated and sent to Cloudwatch are error logs, and assessmen
## Instructions
1. Create a Private ECR Repository in the account that will host the Prowler container. The Audit account is recommended, but any account can be used.
2. Configure the .awsvariables file. Note the ROLE name chosen as it will be the CrossAccountRole.
3. Follow the steps from "View Push Commands" to build and upload the container image. Substitute step 2 with the build command provided in the Dockerfile. You need to have Docker and AWS CLI installed, and use the cli to login to the account first. After upload note the Image URI, as it is required for the CF-Prowler-ECS template. Ensure that you pay attention to the architecture while performing the docker build command. A common mistake is not specifying the architecture and then building on Apple silicon. Your task will fail with *exec /home/prowler/.local/bin/prowler: exec format error*.
4. Make sure SecurityHub is enabled in every account in AWS Organizations, and that the SecurityHub integration is enabled as explained in [Prowler - Security Hub Integration](https://github.com/prowler-cloud/prowler#security-hub-integration)
2. Configure the .awsvariables file. Note the ROLE name chosen as it will be the CrossAccountRole.
3. Follow the steps from "View Push Commands" to build and upload the container image. You need to have Docker and AWS CLI installed, and use the cli to login to the account first. After upload note the Image URI, as it is required for the CF-Prowler-ECS template.
4. Make sure SecurityHub is enabled in every account in AWS Organizations, and that the SecurityHub integration is enabled as explained in [Prowler - Security Hub Integration](https://github.com/prowler-cloud/prowler#security-hub-integration)
5. Deploy **CF-Prowler-CrossAccountRole.yml** in the Master Account as a single stack. You will have to choose the CrossAccountRole name (ProwlerXA-Role by default) and the ProwlerTaskRoleName (ProwlerECSTask-Role by default)
6. Deploy **CF-Prowler-CrossAccountRole.yml** in every Member Account as a StackSet. Choose the same CrossAccountName and ProwlerTaskRoleName as the previous step.
7. Deploy **CF-Prowler-IAM.yml** in the account that will host the Prowler container (the same from step 1). The following template parameters must be provided:
@@ -95,4 +91,4 @@ If you permission find errors in the CloudWatch logs, the culprit might be a [Se
## Upgrading Prowler
Prowler version is controlled by the PROWLERVER argument in the Dockerfile, change it to the desired version and follow the ECR Push Commands to update the container image.
Old images can be deleted from the ECR Repository after the new image is confirmed to work. They will show as "untagged" as only one image can hold the "latest" tag.
Old images can be deleted from the ECR Repository after the new image is confirmed to work. They will show as "untagged" as only one image can hold the "latest" tag.

View File

@@ -68,7 +68,7 @@ for accountId in ${ACCOUNTS_IN_ORGS}; do
# Run Prowler
echo -e "Assessing AWS Account: ${accountId}, using Role: ${ROLE} on $(date)"
# Pipe stdout to /dev/null to reduce unnecessary Cloudwatch logs
prowler aws -R arn:"${PARTITION}":iam::"${accountId}":role/"${ROLE}" --security-hub --send-sh-only-fails -f "${REGION}" > /dev/null
prowler aws -R arn:"${PARTITION}":iam::"${accountId}":role/"${ROLE}" -q -S -f "${REGION}" > /dev/null
TOTAL_SEC=$((SECONDS - START_TIME))
printf "Completed AWS Account: ${accountId} in %02dh:%02dm:%02ds" $((TOTAL_SEC / 3600)) $((TOTAL_SEC % 3600 / 60)) $((TOTAL_SEC % 60))
echo ""

View File

@@ -60,42 +60,24 @@ Resources:
Effect: Allow
Resource: "*"
Action:
- account:Get*
- appstream:Describe*
- appstream:List*
- backup:List*
- cloudtrail:GetInsightSelectors
- codeartifact:List*
- codebuild:BatchGet*
- cognito-idp:GetUserPoolMfaConfig
- dlm:Get*
- drs:Describe*
- ds:Describe*
- ds:Get*
- ds:List*
- dynamodb:GetResourcePolicy
- ds:ListAuthorizedApplications
- ec2:GetEbsEncryptionByDefault
- ec2:GetSnapshotBlockPublicAccessState
- ec2:GetInstanceMetadataDefaults
- ecr:Describe*
- ecr:GetRegistryScanningConfiguration
- elasticfilesystem:DescribeBackupPolicy
- glue:GetConnections
- glue:GetSecurityConfiguration*
- glue:GetSecurityConfiguration
- glue:SearchTables
- lambda:GetFunction*
- logs:FilterLogEvents
- lightsail:GetRelationalDatabases
- macie2:GetMacieSession
- lambda:GetFunction
- s3:GetAccountPublicAccessBlock
- shield:DescribeProtection
- shield:GetSubscriptionState
- ssm:GetDocument
- ssm-incidents:List*
- support:Describe*
- tag:GetTagKeys
- wellarchitected:List*
- PolicyName: Prowler-Security-Hub
PolicyDocument:
Version: 2012-10-17
Statement:
- Sid: AllowProwlerSecurityHub
Effect: Allow
Resource: "*"

View File

@@ -62,7 +62,7 @@ Resources:
awslogs-stream-prefix: ecs
Cpu: 1024
ExecutionRoleArn: !Ref ECSExecutionRole
Memory: 8192
Memory: 2048
NetworkMode: awsvpc
TaskRoleArn: !Ref ProwlerTaskRole
Family: SecurityHubProwlerTask

View File

@@ -97,15 +97,9 @@ Outputs:
ECSExecutionRoleARN:
Description: ARN of the ECS Task Execution Role
Value: !GetAtt ECSExecutionRole.Arn
Export:
Name: ECSExecutionRoleArn
ProwlerTaskRoleARN:
Description: ARN of the ECS Prowler Task Role
Value: !GetAtt ProwlerTaskRole.Arn
Export:
Name: ProwlerTaskRoleArn
ECSEventRoleARN:
Description: ARN of the Eventbridge Task Role
Value: !GetAtt ECSEventRole.Arn
Export:
Name: ECSEventRoleARN

View File

@@ -16,18 +16,18 @@ from prowler.lib.banner import print_banner
warnings.filterwarnings("ignore")
cli = sys.modules["flask.cli"]
print_banner()
print_banner(verbose=False)
print(
f"{Fore.GREEN}Loading all CSV files from the folder {folder_path_overview} ...\n{Style.RESET_ALL}"
)
cli.show_server_banner = lambda *x: click.echo(
f"{Fore.YELLOW}NOTE:{Style.RESET_ALL} If you are using {Fore.GREEN}{Style.BRIGHT}Prowler SaaS{Style.RESET_ALL} with the S3 integration or that integration \nfrom {Fore.CYAN}{Style.BRIGHT}Prowler Open Source{Style.RESET_ALL} and you want to use your data from your S3 bucket,\nrun: `{orange_color}aws s3 cp s3://<your-bucket>/output/csv ./output --recursive{Style.RESET_ALL}`\nand then run `prowler dashboard` again to load the new files."
f"{Fore.YELLOW}NOTE:{Style.RESET_ALL} If you are a {Fore.GREEN}{Style.BRIGHT}Prowler SaaS{Style.RESET_ALL} customer and you want to use your data from your S3 bucket,\nrun: `{orange_color}aws s3 cp s3://<your-bucket>/output/csv ./output --recursive{Style.RESET_ALL}`\nand then run `prowler dashboard` again to load the new files."
)
# Initialize the app - incorporate css
dashboard = dash.Dash(
__name__,
external_stylesheets=[dbc.themes.FLATLY],
external_stylesheets=[dbc.themes.DARKLY],
use_pages=True,
suppress_callback_exceptions=True,
title="Prowler Dashboard",
@@ -60,9 +60,7 @@ def generate_nav_links(current_path):
link_content = html.Span(
[
html.Img(src=icon_url, className="w-5"),
html.Span(
page["name"], className="font-medium text-base leading-6 text-white"
),
html.Span(page["name"], className="font-medium text-base leading-6"),
],
className="flex justify-center lg:justify-normal items-center gap-x-3 py-2 px-3",
)
@@ -98,8 +96,7 @@ def generate_help_menu():
[
html.Img(src=link["icon"], className="w-5"),
html.Span(
link["title"],
className="font-medium text-base leading-6 text-white",
link["title"], className="font-medium text-base leading-6"
),
],
className="flex items-center gap-x-3 py-2 px-3",
@@ -163,7 +160,7 @@ def update_nav_bar(pathname):
html.Img(src="assets/favicon.ico", className="w-5"),
"Subscribe to prowler SaaS",
],
className="flex items-center gap-x-3 text-white",
className="flex items-center gap-x-3",
),
],
href="https://prowler.com/",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" shape-rendering="geometricPrecision" text-rendering="geometricPrecision" image-rendering="optimizeQuality" fill-rule="evenodd" clip-rule="evenodd" viewBox="0 0 443 511.62"><path fill-rule="nonzero" d="M152.93 286.97c0 17.1-13.87 30.97-30.97 30.97-17.11 0-30.98-13.87-30.98-30.97v-177.4l-37.45 40.31c-11.63 12.5-31.19 13.2-43.68 1.57-12.49-11.62-13.19-31.18-1.57-43.68L99.33 9.79l2.06-1.94c12.69-11.35 32.2-10.26 43.55 2.43l91.05 101.47c11.35 12.69 10.26 32.2-2.43 43.55-12.68 11.36-32.19 10.27-43.55-2.42l-37.08-41.33v175.42zm236.24 71.77c11.35-12.69 30.86-13.78 43.55-2.43 12.69 11.36 13.78 30.87 2.42 43.56L344.1 501.34c-11.36 12.69-30.87 13.78-43.55 2.42l-2.02-1.97-91.09-97.95c-11.63-12.49-10.93-32.05 1.57-43.67 12.49-11.63 32.05-10.93 43.67 1.57l37.46 40.31V231.53c0-17.11 13.87-30.97 30.97-30.97s30.97 13.86 30.97 30.97v168.54l37.09-41.33z"/></svg>

Before

Width:  |  Height:  |  Size: 896 B

View File

@@ -1 +0,0 @@
<svg class="svg-icon" style="width: 1.001953125em; height: 1em;vertical-align: middle;fill: currentColor;overflow: hidden;" viewBox="0 0 1026 1024" version="1.1" xmlns="http://www.w3.org/2000/svg"><path d="M1013.7 90.8C997.8 75.5 972.4 76 957.1 92L510.9 557.1 73.2 90.8C58 74.7 32.7 73.9 16.6 89 0.5 104.1-0.3 129.4 14.8 145.5l466.6 497.1 1.5 1.5c0.2 0.2 0.4 0.4 0.7 0.6 0.3 0.3 0.6 0.5 0.9 0.8 0.3 0.3 0.6 0.5 0.9 0.7 0.2 0.2 0.4 0.4 0.7 0.6 0.3 0.2 0.6 0.5 0.9 0.7 0.2 0.2 0.5 0.4 0.7 0.5l0.9 0.6c0.3 0.2 0.5 0.4 0.8 0.5 0.3 0.2 0.6 0.3 0.9 0.5 0.3 0.2 0.6 0.3 0.9 0.5 0.3 0.2 0.5 0.3 0.8 0.4 0.3 0.2 0.6 0.3 1 0.5 0.3 0.1 0.5 0.3 0.8 0.4 0.3 0.2 0.7 0.3 1 0.5 0.2 0.1 0.5 0.2 0.7 0.3 0.4 0.2 0.7 0.3 1.1 0.4 0.2 0.1 0.5 0.2 0.7 0.3 0.4 0.1 0.8 0.3 1.2 0.4 0.2 0.1 0.5 0.1 0.7 0.2l1.2 0.3c0.2 0.1 0.4 0.1 0.7 0.2 0.4 0.1 0.8 0.2 1.3 0.3 0.2 0 0.4 0.1 0.6 0.1 0.4 0.1 0.9 0.2 1.3 0.2 0.2 0 0.4 0.1 0.6 0.1 0.5 0.1 0.9 0.1 1.4 0.2 0.2 0 0.4 0 0.6 0.1 0.5 0 1 0.1 1.5 0.1h4.6c0.5 0 1-0.1 1.5-0.1 0.2 0 0.4 0 0.5-0.1 0.5 0 0.9-0.1 1.4-0.2 0.2 0 0.4-0.1 0.6-0.1 0.4-0.1 0.9-0.1 1.3-0.2 0.2 0 0.4-0.1 0.6-0.1l1.2-0.3c0.2-0.1 0.4-0.1 0.7-0.2l1.2-0.3c0.2-0.1 0.5-0.1 0.7-0.2 0.4-0.1 0.8-0.2 1.1-0.4 0.2-0.1 0.5-0.2 0.7-0.3 0.4-0.1 0.7-0.3 1.1-0.4 0.3-0.1 0.5-0.2 0.8-0.3 0.3-0.1 0.7-0.3 1-0.5 0.3-0.1 0.5-0.2 0.8-0.4 0.3-0.2 0.6-0.3 0.9-0.5 0.3-0.1 0.6-0.3 0.8-0.4 0.3-0.2 0.6-0.3 0.8-0.5 0.3-0.2 0.6-0.3 0.9-0.5 0.3-0.2 0.5-0.3 0.8-0.5l0.9-0.6c0.2-0.2 0.4-0.3 0.7-0.5 0.3-0.2 0.6-0.5 1-0.7 0.2-0.1 0.4-0.3 0.6-0.5 0.3-0.3 0.7-0.5 1-0.8 0.2-0.1 0.3-0.3 0.5-0.5 0.5-0.5 1-0.9 1.5-1.4l0.9-0.9 475.4-495.6c15.3-15.7 14.7-41.1-1.2-56.3z" fill="#898989" /></svg>

Before

Width:  |  Height:  |  Size: 1.6 KiB

View File

@@ -5,7 +5,7 @@
/* Use this file to add custom styles using Tailwind's utility classes. */
/*
! tailwindcss v3.4.3 | MIT License | https://tailwindcss.com */
! tailwindcss v3.4.1 | MIT License | https://tailwindcss.com */
/*
1. Prevent padding and border from affecting element width. (https://github.com/mozdevs/cssremedy/issues/4)
@@ -216,8 +216,6 @@ textarea {
/* 1 */
line-height: inherit;
/* 1 */
letter-spacing: inherit;
/* 1 */
color: inherit;
/* 1 */
margin: 0;
@@ -241,9 +239,9 @@ select {
*/
button,
input:where([type='button']),
input:where([type='reset']),
input:where([type='submit']) {
[type='button'],
[type='reset'],
[type='submit'] {
-webkit-appearance: button;
/* 1 */
background-color: transparent;
@@ -499,10 +497,6 @@ video {
--tw-backdrop-opacity: ;
--tw-backdrop-saturate: ;
--tw-backdrop-sepia: ;
--tw-contain-size: ;
--tw-contain-layout: ;
--tw-contain-paint: ;
--tw-contain-style: ;
}
::backdrop {
@@ -553,18 +547,14 @@ video {
--tw-backdrop-opacity: ;
--tw-backdrop-saturate: ;
--tw-backdrop-sepia: ;
--tw-contain-size: ;
--tw-contain-layout: ;
--tw-contain-paint: ;
--tw-contain-style: ;
}
.custom-grid {
grid-template-columns: minmax(0, 16fr) repeat(11, minmax(0, 11fr));
}
.collapse {
visibility: collapse;
.visible {
visibility: visible;
}
.relative {
@@ -604,10 +594,6 @@ video {
margin-right: auto;
}
.mb-0 {
margin-bottom: 0px;
}
.mb-2 {
margin-bottom: 0.5rem;
}
@@ -632,14 +618,6 @@ video {
margin-top: auto;
}
.mb-\[30px\] {
margin-bottom: 30px;
}
.mt-\[30px\] {
margin-top: 30px;
}
.block {
display: block;
}
@@ -656,6 +634,14 @@ video {
display: inline-flex;
}
.min-w-36 {
min-width: 9rem;
}
.min-w-44 {
min-width: 11rem;
}
.table {
display: table;
}
@@ -676,10 +662,6 @@ video {
max-height: 300px;
}
.w-3 {
width: 0.75rem;
}
.w-5 {
width: 1.25rem;
}
@@ -688,50 +670,6 @@ video {
width: 2rem;
}
.w-\[10\%\] {
width: 10%;
}
.w-\[10\.5\%\] {
width: 10.5%;
}
.w-\[11\%\] {
width: 11%;
}
.w-\[13\.5\%\] {
width: 13.5%;
}
.w-\[14\.5\%\] {
width: 14.5%;
}
.w-\[15\%\] {
width: 15%;
}
.w-\[36\%\] {
width: 36%;
}
.w-\[4\%\] {
width: 4%;
}
.w-\[40\.5\%\] {
width: 40.5%;
}
.w-\[9\%\] {
width: 9%;
}
.w-\[9\.5\%\] {
width: 9.5%;
}
.w-fit {
width: -moz-fit-content;
width: fit-content;
@@ -741,10 +679,6 @@ video {
width: 100%;
}
.min-w-36 {
min-width: 9rem;
}
.grid-cols-12 {
grid-template-columns: repeat(12, minmax(0, 1fr));
}
@@ -862,31 +796,30 @@ video {
}
.bg-gradient-failed {
background-image: linear-gradient(127.43deg, #F1F5F8 -177.68%, #EF4444 87.35%);
background-image: linear-gradient(127.43deg, #F1F5F8 -177.68%, #e67272 87.35%);
}
.bg-gradient-passed {
background-image: linear-gradient(127.43deg, #F1F5F8 -177.68%, #4ADE80 87.35%);
background-image: linear-gradient(127.43deg, #F1F5F8 -177.68%, #54d283 87.35%);
}
.p-2 {
padding: 0.5rem;
.bg-gradient-muted {
background-image: linear-gradient(127.43deg, #F1F5F8 -177.68%, #636c78 87.35%);
}
.p-3 {
padding: 0.75rem;
}
.p-2 {
padding: 0.5rem;
}
.px-10 {
padding-left: 2.5rem;
padding-right: 2.5rem;
}
.px-2 {
padding-left: 0.5rem;
padding-right: 0.5rem;
}
.px-3 {
padding-left: 0.75rem;
padding-right: 0.75rem;
@@ -921,10 +854,6 @@ video {
padding-bottom: 0.75rem;
}
.pr-2 {
padding-right: 0.5rem;
}
.text-center {
text-align: center;
}
@@ -1000,11 +929,6 @@ video {
color: rgb(41 37 36 / var(--tw-text-opacity));
}
.text-white {
--tw-text-opacity: 1;
color: rgb(255 255 255 / var(--tw-text-opacity));
}
.opacity-90 {
opacity: 0.9;
}
@@ -1068,6 +992,19 @@ video {
/* Firefox */
}
/*Styles for previous-vext-container from table*/
.previous-next-container {
margin-top: 1rem;
color: #000;
}
/*Style for input in filter table*/
.dash-table-container .dash-spreadsheet-container .dash-spreadsheet-inner input:not([type=radio]):not([type=checkbox]) {
color: #FFF !important;
opacity: 1 !important;
}
#_dash-app-content {
--tw-bg-opacity: 1;
background-color: rgb(231 229 228 / var(--tw-bg-opacity));
@@ -1104,10 +1041,6 @@ video {
color: rgb(41 37 36 / var(--tw-text-opacity));
}
#_dash-app-content .accordion .accordion-collapse.collapse {
visibility: visible;
}
#_dash-app-content .accordion .accordion-button:not(.collapsed) {
--tw-bg-opacity: 1;
background-color: rgb(231 229 228 / var(--tw-bg-opacity));
@@ -1224,10 +1157,6 @@ video {
width: auto;
}
.overview-table .card .collapse {
visibility: visible;
}
@media (min-width: 1536px) {
.\32xl\:container {
width: 100%;
@@ -1370,37 +1299,3 @@ video {
row-gap: 0px;
}
}
@media (min-width: 1536px) {
.\32xl\:w-\[10\%\] {
width: 10%;
}
.\32xl\:w-\[12\.5\%\] {
width: 12.5%;
}
.\32xl\:w-\[14\%\] {
width: 14%;
}
.\32xl\:w-\[15\.5\%\] {
width: 15.5%;
}
.\32xl\:w-\[2\%\] {
width: 2%;
}
.\32xl\:w-\[48\%\] {
width: 48%;
}
.\32xl\:w-\[71\.5\%\] {
width: 71.5%;
}
.\32xl\:w-\[9\%\] {
width: 9%;
}
}

View File

@@ -1535,7 +1535,7 @@ def get_section_container_iso(data, section_1, section_2):
return html.Div(section_containers, className="compliance-data-layout")
def get_section_containers_format4(data, section_1):
def get_section_containers_pci(data, section_1):
data["STATUS"] = data["STATUS"].apply(map_status_to_icon)
data[section_1] = data[section_1].astype(str)
@@ -1654,13 +1654,9 @@ def get_section_containers_format4(data, section_1):
)
graph_div_service = html.Div(graph_service, className="graph-section-req")
if "REQUIREMENTS_NAME" not in specific_data.columns:
title_internal = f"{service}"
else:
title_internal = f"{service} - {specific_data['REQUIREMENTS_NAME'].iloc[0]}"
internal_accordion_item = dbc.AccordionItem(
title=title_internal,
title=service,
children=[html.Div([data_table], className="inner-accordion-content")],
)
@@ -2223,232 +2219,3 @@ def get_section_containers_ens(data, section_1, section_2, section_3, section_4)
section_containers.append(section_container)
return html.Div(section_containers, className="compliance-data-layout")
# This function extracts and compares up to two numeric values, ensuring correct sorting for version-like strings.
def extract_numeric_values(value):
numbers = re.findall(r"\d+", str(value))
if len(numbers) >= 2:
return int(numbers[0]), int(numbers[1])
elif len(numbers) == 1:
return int(numbers[0]), 0
return 0, 0
def get_section_containers_kisa_ismsp(data, section_1, section_2):
data["STATUS"] = data["STATUS"].apply(map_status_to_icon)
data[section_1] = data[section_1].astype(str)
data[section_2] = data[section_2].astype(str)
data.sort_values(
by=section_1,
key=lambda x: x.map(extract_numeric_values),
ascending=True,
inplace=True,
)
findings_counts_section = (
data.groupby([section_2, "STATUS"]).size().unstack(fill_value=0)
)
findings_counts_name = (
data.groupby([section_1, "STATUS"]).size().unstack(fill_value=0)
)
section_containers = []
for name in data[section_1].unique():
success_name = (
findings_counts_name.loc[name, pass_emoji]
if pass_emoji in findings_counts_name.columns
else 0
)
failed_name = (
findings_counts_name.loc[name, fail_emoji]
if fail_emoji in findings_counts_name.columns
else 0
)
fig_name = go.Figure(
data=[
go.Bar(
name="Failed",
x=[failed_name],
y=[""],
orientation="h",
marker=dict(color="#e77676"),
width=[0.8],
),
go.Bar(
name="Success",
x=[success_name],
y=[""],
orientation="h",
marker=dict(color="#45cc6e"),
width=[0.8],
),
]
)
fig_name.update_layout(
barmode="stack",
margin=dict(l=10, r=10, t=10, b=10),
paper_bgcolor="rgba(0,0,0,0)",
plot_bgcolor="rgba(0,0,0,0)",
showlegend=False,
width=350,
height=30,
xaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
yaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
annotations=[
dict(
x=success_name + failed_name,
y=0,
xref="x",
yref="y",
text=str(success_name),
showarrow=False,
font=dict(color="#45cc6e", size=14),
xanchor="left",
yanchor="middle",
),
dict(
x=0,
y=0,
xref="x",
yref="y",
text=str(failed_name),
showarrow=False,
font=dict(color="#e77676", size=14),
xanchor="right",
yanchor="middle",
),
],
)
graph_name = dcc.Graph(
figure=fig_name, config={"staticPlot": True}, className="info-bar"
)
graph_div = html.Div(graph_name, className="graph-section")
direct_internal_items = []
for section in data[data[section_1] == name][section_2].unique():
specific_data = data[
(data[section_1] == name) & (data[section_2] == section)
]
success_section = (
findings_counts_section.loc[section, pass_emoji]
if pass_emoji in findings_counts_section.columns
else 0
)
failed_section = (
findings_counts_section.loc[section, fail_emoji]
if fail_emoji in findings_counts_section.columns
else 0
)
data_table = dash_table.DataTable(
data=specific_data.to_dict("records"),
columns=[
{"name": i, "id": i}
for i in ["CHECKID", "STATUS", "REGION", "ACCOUNTID", "RESOURCEID"]
],
style_table={"overflowX": "auto"},
style_as_list_view=True,
style_cell={"textAlign": "left", "padding": "5px"},
)
fig_section = go.Figure(
data=[
go.Bar(
name="Failed",
x=[failed_section],
y=[""],
orientation="h",
marker=dict(color="#e77676"),
),
go.Bar(
name="Success",
x=[success_section],
y=[""],
orientation="h",
marker=dict(color="#45cc6e"),
),
]
)
fig_section.update_layout(
barmode="stack",
margin=dict(l=10, r=10, t=10, b=10),
paper_bgcolor="rgba(0,0,0,0)",
plot_bgcolor="rgba(0,0,0,0)",
showlegend=False,
width=350,
height=30,
xaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
yaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
annotations=[
dict(
x=success_section + failed_section,
y=0,
xref="x",
yref="y",
text=str(success_section),
showarrow=False,
font=dict(color="#45cc6e", size=14),
xanchor="left",
yanchor="middle",
),
dict(
x=0,
y=0,
xref="x",
yref="y",
text=str(failed_section),
showarrow=False,
font=dict(color="#e77676", size=14),
xanchor="right",
yanchor="middle",
),
],
)
graph_section = dcc.Graph(
figure=fig_section,
config={"staticPlot": True},
className="info-bar-child",
)
graph_div_section = html.Div(graph_section, className="graph-section-req")
internal_accordion_item = dbc.AccordionItem(
title=section,
children=[html.Div([data_table], className="inner-accordion-content")],
)
internal_section_container = html.Div(
[
graph_div_section,
dbc.Accordion(
[internal_accordion_item], start_collapsed=True, flush=True
),
],
className="accordion-inner--child",
)
direct_internal_items.append(internal_section_container)
accordion_item = dbc.AccordionItem(
title=f"{name}", children=direct_internal_items
)
section_container = html.Div(
[
graph_div,
dbc.Accordion([accordion_item], start_collapsed=True, flush=True),
],
className="accordion-inner",
)
section_containers.append(section_container)
return html.Div(section_containers, className="compliance-data-layout")

View File

@@ -1,23 +0,0 @@
import warnings
from dashboard.common_methods import get_section_containers_format1
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_ATTRIBUTES_SECTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
].copy()
return get_section_containers_format1(
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
)

View File

@@ -6,13 +6,6 @@ warnings.filterwarnings("ignore")
def get_table(data):
# append the requirements_description to idgrupocontrol
data["REQUIREMENTS_ATTRIBUTES_IDGRUPOCONTROL"] = (
data["REQUIREMENTS_ATTRIBUTES_IDGRUPOCONTROL"]
+ " - "
+ data["REQUIREMENTS_DESCRIPTION"]
)
aux = data[
[
"REQUIREMENTS_ATTRIBUTES_MARCO",

View File

@@ -1,25 +0,0 @@
import warnings
from dashboard.common_methods import get_section_containers_kisa_ismsp
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_ATTRIBUTES_SUBDOMAIN",
"REQUIREMENTS_ATTRIBUTES_SECTION",
# "REQUIREMENTS_DESCRIPTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
].copy()
return get_section_containers_kisa_ismsp(
aux, "REQUIREMENTS_ATTRIBUTES_SUBDOMAIN", "REQUIREMENTS_ATTRIBUTES_SECTION"
)

View File

@@ -1,25 +0,0 @@
import warnings
from dashboard.common_methods import get_section_containers_kisa_ismsp
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_ATTRIBUTES_SUBDOMAIN",
"REQUIREMENTS_ATTRIBUTES_SECTION",
# "REQUIREMENTS_DESCRIPTION",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
].copy()
return get_section_containers_kisa_ismsp(
aux, "REQUIREMENTS_ATTRIBUTES_SUBDOMAIN", "REQUIREMENTS_ATTRIBUTES_SECTION"
)

View File

@@ -1,6 +1,6 @@
import warnings
from dashboard.common_methods import get_section_containers_format4
from dashboard.common_methods import get_section_containers_format2
warnings.filterwarnings("ignore")
@@ -9,13 +9,15 @@ def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_NAME",
"REQUIREMENTS_SUBTECHNIQUES",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
]
].copy()
return get_section_containers_format4(aux, "REQUIREMENTS_ID")
return get_section_containers_format2(
aux, "REQUIREMENTS_ID", "REQUIREMENTS_SUBTECHNIQUES"
)

View File

@@ -1,6 +1,6 @@
import warnings
from dashboard.common_methods import get_section_containers_format4
from dashboard.common_methods import get_section_containers_format2
warnings.filterwarnings("ignore")
@@ -9,13 +9,15 @@ def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_NAME",
"REQUIREMENTS_SUBTECHNIQUES",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
]
].copy()
return get_section_containers_format4(aux, "REQUIREMENTS_ID")
return get_section_containers_format2(
aux, "REQUIREMENTS_ID", "REQUIREMENTS_SUBTECHNIQUES"
)

View File

@@ -1,23 +0,0 @@
import warnings
from dashboard.common_methods import get_section_containers_format2
warnings.filterwarnings("ignore")
def get_table(data):
aux = data[
[
"REQUIREMENTS_ID",
"REQUIREMENTS_SUBTECHNIQUES",
"CHECKID",
"STATUS",
"REGION",
"ACCOUNTID",
"RESOURCEID",
]
].copy()
return get_section_containers_format2(
aux, "REQUIREMENTS_ID", "REQUIREMENTS_SUBTECHNIQUES"
)

View File

@@ -1,6 +1,6 @@
import warnings
from dashboard.common_methods import get_section_containers_format4
from dashboard.common_methods import get_section_containers_pci
warnings.filterwarnings("ignore")
@@ -17,4 +17,4 @@ def get_table(data):
]
]
return get_section_containers_format4(aux, "REQUIREMENTS_ID")
return get_section_containers_pci(aux, "REQUIREMENTS_ID")

View File

@@ -21,13 +21,12 @@ muted_manual_color = "#b33696"
critical_color = "#951649"
high_color = "#e11d48"
medium_color = "#ee6f15"
low_color = "#fcf45d"
low_color = "#f9f5e6"
informational_color = "#3274d9"
# Folder output path
folder_path_overview = os.getcwd() + "/output"
folder_path_compliance = os.getcwd() + "/output/compliance"
# Encoding
encoding_format = "utf-8"
# Error action, it is recommended to use "ignore" or "replace"
error_action = "ignore"

View File

@@ -11,7 +11,6 @@ def create_layout_overview(
service_dropdown: html.Div,
table_row_dropdown: html.Div,
status_dropdown: html.Div,
table_div_header: html.Div,
) -> html.Div:
"""
Create the layout of the dashboard.
@@ -41,7 +40,7 @@ def create_layout_overview(
html.Div([account_dropdown], className=""),
html.Div([region_dropdown], className=""),
],
className="grid gap-x-4 mt-[30px] mb-[30px] sm:grid-cols-2 lg:grid-cols-3",
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-3 lg:gap-y-0",
),
html.Div(
[
@@ -49,7 +48,7 @@ def create_layout_overview(
html.Div([service_dropdown], className=""),
html.Div([status_dropdown], className=""),
],
className="grid gap-x-4 mb-[30px] sm:grid-cols-2 lg:grid-cols-3",
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-3 lg:gap-y-0",
),
html.Div(
[
@@ -58,11 +57,11 @@ def create_layout_overview(
html.Div(className="flex", id="gcp_card", n_clicks=0),
html.Div(className="flex", id="k8s_card", n_clicks=0),
],
className="grid gap-x-4 mb-[30px] sm:grid-cols-2 lg:grid-cols-4",
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-4 lg:gap-y-0",
),
html.H4(
"Count of Findings by severity",
className="text-prowler-stone-900 text-lg font-bold mb-[30px]",
className="text-prowler-stone-900 text-lg font-bold",
),
html.Div(
[
@@ -79,7 +78,7 @@ def create_layout_overview(
id="line_plot",
),
],
className="grid gap-x-4 grid-cols-12 mb-[30px]",
className="grid gap-x-4 gap-y-4 grid-cols-12 lg:gap-y-0",
),
html.Div(
[
@@ -106,10 +105,9 @@ def create_layout_overview(
],
className="flex justify-between items-center",
),
table_div_header,
html.Div(id="table", className="grid"),
],
className="grid gap-x-8 2xl:container mx-auto",
className="grid gap-x-8 gap-y-8 2xl:container mx-auto",
)

View File

@@ -16,7 +16,6 @@ from dash.dependencies import Input, Output
# Config import
from dashboard.config import (
encoding_format,
error_action,
fail_color,
folder_path_compliance,
info_color,
@@ -30,7 +29,6 @@ from dashboard.lib.dropdowns import (
create_region_dropdown_compliance,
)
from dashboard.lib.layouts import create_layout_compliance
from prowler.lib.logger import logger
# Suppress warnings
warnings.filterwarnings("ignore")
@@ -40,16 +38,11 @@ warnings.filterwarnings("ignore")
csv_files = []
for file in glob.glob(os.path.join(folder_path_compliance, "*.csv")):
try:
with open(
file, "r", newline="", encoding=encoding_format, errors=error_action
) as csvfile:
reader = csv.reader(csvfile)
num_rows = sum(1 for row in reader)
if num_rows > 1:
csv_files.append(file)
except UnicodeDecodeError:
logger.error(f"Error decoding file: {file}")
with open(file, "r", newline="", encoding=encoding_format) as csvfile:
reader = csv.reader(csvfile)
num_rows = sum(1 for row in reader)
if num_rows > 1:
csv_files.append(file)
def load_csv_files(csv_files):
@@ -57,7 +50,7 @@ def load_csv_files(csv_files):
dfs = []
results = []
for file in csv_files:
df = pd.read_csv(file, sep=";", on_bad_lines="skip", encoding=encoding_format)
df = pd.read_csv(file, sep=";", on_bad_lines="skip")
if "CHECKID" in df.columns:
dfs.append(df)
result = file
@@ -245,9 +238,7 @@ def display_data(
"""Load CSV files into a single pandas DataFrame."""
dfs = []
for file in files:
df = pd.read_csv(
file, sep=";", on_bad_lines="skip", encoding=encoding_format
)
df = pd.read_csv(file, sep=";", on_bad_lines="skip")
dfs.append(df.astype(str))
return pd.concat(dfs, ignore_index=True)
@@ -272,7 +263,7 @@ def display_data(
# Rename the column PROJECTID to ACCOUNTID for GCP
if data.columns.str.contains("PROJECTID").any():
data.rename(columns={"PROJECTID": "ACCOUNTID"}, inplace=True)
data["REGION"] = "-"
# Rename the column SUBSCRIPTIONID to ACCOUNTID for Azure
if data.columns.str.contains("SUBSCRIPTIONID").any():
data.rename(columns={"SUBSCRIPTIONID": "ACCOUNTID"}, inplace=True)
@@ -474,7 +465,7 @@ def display_data(
overall_status_result_graph = get_graph(pie_1, "Overall Status Result")
security_level_graph = get_graph(
pie_2, f"Top 5 failed {current_filter} by requirements"
pie_2, f"Top 5 failed {current_filter} by findings"
)
return (

File diff suppressed because it is too large Load Diff

View File

@@ -8,6 +8,10 @@
@tailwind components;
@tailwind utilities;
#_dash-app-content {
@apply bg-prowler-stone-500;
}
@layer components {
.custom-grid {
grid-template-columns: minmax(0, 16fr) repeat(11, minmax(0, 11fr));
@@ -16,24 +20,6 @@
.custom-grid-large {
grid-template-columns: minmax(0, 10fr) repeat(11, minmax(0, 11fr));
}
}
@layer utilities {
/* Hide scrollbar for Chrome, Safari and Opera */
.no-scrollbar::-webkit-scrollbar {
display: none;
}
/* Hide scrollbar for IE, Edge and Firefox */
.no-scrollbar {
-ms-overflow-style: none; /* IE and Edge */
scrollbar-width: none; /* Firefox */
}
}
#_dash-app-content {
@apply bg-prowler-stone-500;
}
/* Styles for the accordion in the compliance page */
#_dash-app-content .accordion .accordion-header .accordion-button {
@apply text-prowler-stone-900 inline-block px-4 text-xs font-bold uppercase transition-all rounded-lg bg-prowler-stone-300 hover:bg-prowler-stone-900/10;
@@ -43,10 +29,6 @@
@apply text-prowler-stone-900 bg-prowler-white rounded-lg;
}
#_dash-app-content .accordion .accordion-collapse.collapse {
@apply visible
}
#_dash-app-content .accordion .accordion-button:not(.collapsed) {
@apply text-prowler-stone-900 bg-prowler-stone-500;
}
@@ -117,6 +99,14 @@
@apply absolute right-6 top-2 w-auto h-8 z-50;
}
.overview-table .card .collapse {
@apply visible
}
@layer utilities {
/* Hide scrollbar for Chrome, Safari and Opera */
.no-scrollbar::-webkit-scrollbar {
display: none;
}
/* Hide scrollbar for IE, Edge and Firefox */
.no-scrollbar {
-ms-overflow-style: none; /* IE and Edge */
scrollbar-width: none; /* Firefox */
}
}

View File

@@ -1,9 +1,11 @@
/** @type {import('tailwindcss').Config} */
module.exports = {
content: [
"*.{py,html,js}",
"./**/*.{py,html,js}",
"./**/**/*.{py,html,js}",
"./assets/**/*.{py,html,js}",
"./components/**/*.{py,html,js}",
"./pages/**/*.{py,html,js}",
"./utils/**/*.{py,html,js}",
"./app.py",
],
theme: {
extend: {

View File

@@ -120,42 +120,6 @@ All the checks MUST fill the `report.region` with the following criteria:
- If the audited resource is regional use the `region` (the name changes depending on the provider: `location` in Azure and GCP and `namespace` in K8s) attribute within the resource object.
- If the audited resource is global use the `service_client.region` within the service client object.
### Check Severity
The severity of the checks are defined in the metadata file with the `Severity` field. The severity is always in lowercase and can be one of the following values:
- `critical`
- `high`
- `medium`
- `low`
- `informational`
You may need to change it in the check's code if the check has different scenarios that could change the severity. This can be done by using the `report.check_metadata.Severity` attribute:
```python
if <valid for more than 6 months>:
report.status = "PASS"
report.check_metadata.Severity = "informational"
report.status_extended = f"RDS Instance {db_instance.id} certificate has over 6 months of validity left."
elif <valid for more than 3 months>:
report.status = "PASS"
report.check_metadata.Severity = "low"
report.status_extended = f"RDS Instance {db_instance.id} certificate has between 3 and 6 months of validity."
elif <valid for more than 1 month>:
report.status = "FAIL"
report.check_metadata.Severity = "medium"
report.status_extended = f"RDS Instance {db_instance.id} certificate less than 3 months of validity."
elif <valid for less than 1 month>:
report.status = "FAIL"
report.check_metadata.Severity = "high"
report.status_extended = f"RDS Instance {db_instance.id} certificate less than 1 month of validity."
else:
report.status = "FAIL"
report.check_metadata.Severity = "critical"
report.status_extended = (
f"RDS Instance {db_instance.id} certificate has expired."
)
```
### Resource ID, Name and ARN
All the checks MUST fill the `report.resource_id` and `report.resource_arn` with the following criteria:
@@ -222,7 +186,7 @@ class ec2_securitygroup_with_many_ingress_egress_rules(Check):
max_security_group_rules = ec2_client.audit_config.get(
"max_security_group_rules", 50
)
for security_group_arn, security_group in ec2_client.security_groups.items():
for security_group in ec2_client.security_groups:
```
```yaml title="config.yaml"
@@ -272,7 +236,7 @@ Each Prowler check has metadata associated which is stored at the same level of
# Severity holds the check's severity, always in lowercase (critical, high, medium, low or informational)
"Severity": "critical",
# ResourceType only for AWS, holds the type from here
# https://docs.aws.amazon.com/securityhub/latest/userguide/asff-resources.html
# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html
"ResourceType": "Other",
# Description holds the title of the check, for now is the same as CheckTitle
"Description": "Ensure there are no EC2 AMIs set as Public.",
@@ -319,7 +283,7 @@ Each Prowler check has metadata associated which is stored at the same level of
For the Remediation Code we use the following knowledge base to fill it:
- Official documentation for the provider
- https://docs.prowler.com/checks/checks-index
- https://docs.bridgecrew.io
- https://www.trendmicro.com/cloudoneconformity
- https://github.com/cloudmatos/matos/tree/master/remediations

View File

@@ -1,11 +1,7 @@
# Debugging
Debugging in Prowler make things easier!
If you are developing Prowler, it's possible that you will encounter some situations where you have to inspect the code in depth to fix some unexpected issues during the execution.
## VSCode
In VSCode you can run the code using the integrated debugger. Please, refer to this [documentation](https://code.visualstudio.com/docs/editor/debugging) for guidance about the debugger in VSCode.
If you are developing Prowler, it's possible that you will encounter some situations where you have to inspect the code in depth to fix some unexpected issues during the execution. To do that, if you are using VSCode you can run the code using the integrated debugger. Please, refer to this [documentation](https://code.visualstudio.com/docs/editor/debugging) for guidance about the debugger in VSCode.
The following file is an example of the [debugging configuration](https://code.visualstudio.com/docs/editor/debugging#_launch-configurations) file that you can add to [Virtual Studio Code](https://code.visualstudio.com/).
This file should inside the *.vscode* folder and its name has to be *launch.json*:
@@ -15,62 +11,31 @@ This file should inside the *.vscode* folder and its name has to be *launch.json
"version": "0.2.0",
"configurations": [
{
"name": "Debug AWS Check",
"type": "debugpy",
"name": "Python: Current File",
"type": "python",
"request": "launch",
"program": "prowler.py",
"args": [
"aws",
"-f",
"eu-west-1",
"--service",
"cloudwatch",
"--log-level",
"ERROR",
"-c",
"<check_name>"
"-p",
"dev",
],
"console": "integratedTerminal",
"justMyCode": false
},
{
"name": "Debug Azure Check",
"type": "debugpy",
"name": "Python: Debug Tests",
"type": "python",
"request": "launch",
"program": "prowler.py",
"args": [
"azure",
"--sp-env-auth",
"--log-level",
"ERROR",
"-c",
"<check_name>"
],
"console": "integratedTerminal",
"justMyCode": false
},
{
"name": "Debug GCP Check",
"type": "debugpy",
"request": "launch",
"program": "prowler.py",
"args": [
"gcp",
"--log-level",
"ERROR",
"-c",
"<check_name>"
],
"console": "integratedTerminal",
"justMyCode": false
},
{
"name": "Debug K8s Check",
"type": "debugpy",
"request": "launch",
"program": "prowler.py",
"args": [
"kubernetes",
"--log-level",
"ERROR",
"-c",
"<check_name>"
"program": "${file}",
"purpose": [
"debug-test"
],
"console": "integratedTerminal",
"justMyCode": false

View File

@@ -4,18 +4,16 @@ You can extend Prowler Open Source in many different ways, in most cases you wil
## Get the code and install all dependencies
First of all, you need a version of Python 3.9 or higher and also `pip` installed to be able to install all dependencies required.
Then, to start working with the Prowler Github repository you need to fork it to be able to propose changes for new features, bug fixing, etc. To fork the Prowler repo please refer to [this guide](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo?tool=webui#forking-a-repository).
Once that is satisfied go ahead and clone your forked repo:
First of all, you need a version of Python 3.9 or higher and also pip installed to be able to install all dependencies required. Once that is satisfied go a head and clone the repo:
```
git clone https://github.com/<your-github-user>/prowler
git clone https://github.com/prowler-cloud/prowler
cd prowler
```
For isolation and to avoid conflicts with other environments, we recommend using `poetry`, a Python dependency management tool. You can install it by following the instructions [here](https://python-poetry.org/docs/#installation).
For isolation and avoid conflicts with other environments, we recommend usage of `poetry`:
```
pip install poetry
```
Then install all dependencies including the ones for developers:
```
poetry install --with dev
@@ -46,13 +44,6 @@ Before we merge any of your pull requests we pass checks to the code, we use the
You can see all dependencies in file `pyproject.toml`.
Moreover, you would need to install [`TruffleHog`](https://github.com/trufflesecurity/trufflehog) on the latest version to check for secrets in the code. You can install it using the official installation guide [here](https://github.com/trufflesecurity/trufflehog?tab=readme-ov-file#floppy_disk-installation).
Additionally, please ensure to follow the code documentation practices outlined in this guide: [Google Python Style Guide - Comments and Docstrings](https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings).
???+ note
If you have any trouble when committing to the Prowler repository, add the `--no-verify` flag to the `git commit` command.
## Pull Request Checklist
If you create or review a PR in https://github.com/prowler-cloud/prowler please follow this checklist:

View File

@@ -44,6 +44,7 @@ class Provider(ABC):
Methods:
print_credentials(): Displays the provider's credentials used for auditing in the command-line interface.
setup_session(): Sets up the session for the provider.
get_output_mapping(): Returns the output mapping between the provider and the generic model.
validate_arguments(): Validates the arguments for the provider.
get_checks_to_execute_by_audit_resources(): Returns a set of checks based on the input resources to scan.
@@ -130,6 +131,15 @@ class Provider(ABC):
"""
raise NotImplementedError()
@abstractmethod
def get_output_mapping(self) -> dict:
"""
get_output_mapping returns the output mapping between the provider and the generic model.
This method needs to be created in each provider.
"""
raise NotImplementedError()
def validate_arguments(self) -> None:
"""
validate_arguments validates the arguments for the provider.

View File

@@ -23,8 +23,8 @@ The Prowler's service structure is the following and the way to initialise it is
All the Prowler provider's services inherits from a base class depending on the provider used.
- [AWS Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/lib/service/service.py)
- [GCP Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/gcp/lib/service/service.py)
- [Azure Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/lib/service/service.py)
- [GCP Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/lib/service/service.py)
- [Azure Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/gcp/lib/service/service.py)
- [Kubernetes Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/kubernetes/lib/service/service.py)
Each class is used to initialize the credentials and the API's clients to be used in the service. If some threading is used it must be coded there.
@@ -225,10 +225,10 @@ Each Prowler service requires a service client to use the service in the checks.
The following is the `<new_service_name>_client.py` containing the initialization of the service's class we have just created so the service's checks can use them:
```python
from prowler.providers.common.provider import Provider
from prowler.providers.common.common import get_global_provider
from prowler.providers.<provider>.services.<new_service_name>.<new_service_name>_service import <Service>
<new_service_name>_client = <Service>(Provider.get_global_provider())
<new_service_name>_client = <Service>(get_global_provider())
```
## Permissions

View File

@@ -115,7 +115,7 @@ class Test_iam_password_policy_uppercase:
# Prowler for AWS uses a shared object called aws_provider where it stores
# the info related with the provider
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
"prowler.providers.common.common.get_global_provider",
return_value=aws_provider,
),
# We have to mock also the iam_client from the check to enforce that the iam_client used is the one
@@ -191,6 +191,9 @@ class Test_iam_password_policy_uppercase:
expiration=True,
)
# We set a mocked aws_provider to unify providers, this way will isolate each test not to step on other tests configuration
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
# In this scenario we have to mock also the IAM service and the iam_client from the check to enforce # that the iam_client used is the one created within this check because patch != import, and if you # execute tests in parallel some objects can be already initialised hence the check won't be isolated.
# In this case we don't use the Moto decorator, we use the mocked IAM client for both objects
with mock.patch(
@@ -313,7 +316,7 @@ If the test your are creating belongs to a check that uses more than one provide
```python
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
"prowler.providers.common.common.get_global_provider",
return_value=set_mocked_aws_provider(
[AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1]
),
@@ -344,10 +347,10 @@ from prowler.providers.<provider>.services.<service>.<service>_client import <se
```
2. `<service>_client.py`:
```python
from prowler.providers.common.provider import Provider
from prowler.providers.common.common import get_global_provider
from prowler.providers.<provider>.services.<service>.<service>_service import <SERVICE>
<service>_client = <SERVICE>(Provider.get_global_provider())
<service>_client = <SERVICE>(mocked_provider)
```
Due to the above import path it's not the same to patch the following objects because if you run a bunch of tests, either in parallel or not, some clients can be already instantiated by another check, hence your test execution will be using another test's service instance:
@@ -368,7 +371,7 @@ Mocking a service client using the following code ...
Once the needed attributes are set for the mocked provider, you can use the mocked provider:
```python title="Mocking the service_client"
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
"prowler.providers.common.common.get_global_provider",
new=set_mocked_aws_provider([<region>]),
), mock.patch(
"prowler.providers.<provider>.services.<service>.<check>.<check>.<service>_client",
@@ -390,7 +393,7 @@ Mocking a service client using the following code ...
```python title="Mocking the service and the service_client"
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
"prowler.providers.common.common.get_global_provider",
new=set_mocked_aws_provider([<region>]),
), mock.patch(
"prowler.providers.<provider>.services.<service>.<SERVICE>",
@@ -447,7 +450,7 @@ class Test_compute_project_os_login_enabled:
# In this scenario we have to mock the app_client from the check to enforce that the compute_client used is the one created above
# And also is mocked the return value of get_global_provider function to return our GCP mocked provider defined in fixtures
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
"prowler.providers.common.common.get_global_provider",
return_value=set_mocked_gcp_provider(),
), mock.patch(
"prowler.providers.gcp.services.compute.compute_project_os_login_enabled.compute_project_os_login_enabled.compute_client",
@@ -487,7 +490,7 @@ class Test_compute_project_os_login_enabled:
compute_client.projects = [project]
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
"prowler.providers.common.common.get_global_provider",
return_value=set_mocked_gcp_provider(),
), mock.patch(
"prowler.providers.gcp.services.compute.compute_project_os_login_enabled.compute_project_os_login_enabled.compute_client",
@@ -592,7 +595,7 @@ is following the actual format, add one function where the client is passed to b
`mock_api_<endpoint>_calls` (*endpoint* refers to the first attribute pointed after *client*).
In the example of BigQuery the function is called `mock_api_dataset_calls`. And inside of this function we found an assignation to
be used in the `_get_datasets` method in BigQuery class:
be used in the `__get_datasets__` method in BigQuery class:
```python
# Mocking datasets
@@ -655,7 +658,7 @@ class Test_app_ensure_http_is_redirected_to_https:
# In this scenario we have to mock the app_client from the check to enforce that the app_client used is the one created above
# And also is mocked the return value of get_global_provider function to return our Azure mocked provider defined in fixtures
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
"prowler.providers.common.common.get_global_provider",
return_value=set_mocked_azure_provider(),
), mock.patch(
"prowler.providers.azure.services.app.app_ensure_http_is_redirected_to_https.app_ensure_http_is_redirected_to_https.app_client",
@@ -705,7 +708,7 @@ class Test_app_ensure_http_is_redirected_to_https:
app_client = mock.MagicMock
with mock.patch(
"prowler.providers.common.provider.Provider.get_global_provider",
"prowler.providers.common.common.get_global_provider",
return_value=set_mocked_azure_provider(),
), mock.patch(
"prowler.providers.azure.services.app.app_ensure_http_is_redirected_to_https.app_ensure_http_is_redirected_to_https.app_client",
@@ -765,7 +768,7 @@ from tests.providers.azure.azure_fixtures import (
set_mocked_azure_provider,
)
# Function to mock the service function _get_components, this function task is to return a possible value that real function could returns
# Function to mock the service function __get_components__, this function task is to return a possible value that real function could returns
def mock_appinsights_get_components(_):
return {
AZURE_SUBSCRIPTION_ID: {
@@ -779,12 +782,12 @@ def mock_appinsights_get_components(_):
# Patch decorator to use the mocked function instead the function with the real API call
@patch(
"prowler.providers.azure.services.appinsights.appinsights_service.AppInsights._get_components",
"prowler.providers.azure.services.appinsights.appinsights_service.AppInsights.__get_components__",
new=mock_appinsights_get_components,
)
class Test_AppInsights_Service:
# Mandatory test for every service, this method test the instance of the client is correct
def test_get_client(self):
def test__get_client__(self):
app_insights = AppInsights(set_mocked_azure_provider())
assert (
app_insights.clients[AZURE_SUBSCRIPTION_ID].__class__.__name__
@@ -794,8 +797,8 @@ class Test_AppInsights_Service:
def test__get_subscriptions__(self):
app_insights = AppInsights(set_mocked_azure_provider())
assert app_insights.subscriptions.__class__.__name__ == "dict"
# Test for the function _get_components, inside this client is used the mocked function
def test_get_components(self):
# Test for the function __get_components__, inside this client is used the mocked function
def test__get_components__(self):
appinsights = AppInsights(set_mocked_azure_provider())
assert len(appinsights.components) == 1
assert (

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -40,10 +40,10 @@ If your IAM entity enforces MFA you can use `--mfa` and Prowler will ask you to
Prowler for Azure supports the following authentication types:
- [Service principal application](https://learn.microsoft.com/en-us/entra/identity-platform/app-objects-and-service-principals?tabs=browser#service-principal-object) by environment variables (recommended)
- Service principal authentication by environment variables (Enterprise Application)
- Current az cli credentials stored
- Interactive browser authentication
- [Managed identity](https://learn.microsoft.com/en-us/entra/identity/managed-identities-azure-resources/overview) authentication
- Managed identity authentication
### Service Principal authentication
@@ -56,8 +56,6 @@ export AZURE_CLIENT_SECRET="XXXXXXX"
```
If you try to execute Prowler with the `--sp-env-auth` flag and those variables are empty or not exported, the execution is going to fail.
Follow the instructions in the [Create Prowler Service Principal](../tutorials/azure/create-prowler-service-principal.md) section to create a service principal.
### AZ CLI / Browser / Managed Identity authentication
The other three cases does not need additional configuration, `--az-cli-auth` and `--managed-identity-auth` are automated options. To use `--browser-auth` the user needs to authenticate against Azure using the default browser to start the scan, also `tenant-id` is required.
@@ -66,22 +64,55 @@ The other three cases does not need additional configuration, `--az-cli-auth` an
To use each one you need to pass the proper flag to the execution. Prowler for Azure handles two types of permission scopes, which are:
- **Microsoft Entra ID permissions**: Used to retrieve metadata from the identity assumed by Prowler and specific Entra checks (not mandatory to have access to execute the tool). The permissions required by the tool are the following:
- **Microsoft Entra ID permissions**: Used to retrieve metadata from the identity assumed by Prowler (not mandatory to have access to execute the tool).
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool.
#### Microsoft Entra ID scope
Microsoft Entra ID (AAD earlier) permissions required by the tool are the following:
- `Directory.Read.All`
- `Policy.Read.All`
- `UserAuthenticationMethod.Read.All`
The best way to assign it is through the Azure web console:
1. Access to Microsoft Entra ID
2. In the left menu bar, go to "App registrations"
3. Once there, in the menu bar click on "+ New registration" to register a new application
4. Fill the "Name, select the "Supported account types" and click on "Register. You will be redirected to the applications page.
![Register an Application page](../img/register-application.png)
4. Select the new application
5. In the left menu bar, select "API permissions"
6. Then click on "+ Add a permission" and select "Microsoft Graph"
7. Once in the "Microsoft Graph" view, select "Application permissions"
8. Finally, search for "Directory", "Policy" and "UserAuthenticationMethod" select the following permissions:
- `Directory.Read.All`
- `Policy.Read.All`
- `UserAuthenticationMethod.Read.All`
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool. It is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
- `Reader`
- `ProwlerRole` (custom role defined in [prowler-azure-custom-role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json))
![EntraID Permissions](../img/AAD-permissions.png)
To assign the permissions, follow the instructions in the [Microsoft Entra ID permissions](../tutorials/azure/create-prowler-service-principal.md#assigning-the-proper-permissions) section and the [Azure subscriptions permissions](../tutorials/azure/subscriptions.md#assigning-proper-permissions) section, respectively.
#### Checks that require ProwlerRole
#### Subscriptions scope
The following checks require the `ProwlerRole` custom role to be executed, if you want to run them, make sure you have assigned the role to the identity that is going to be assumed by Prowler:
Regarding the subscription scope, Prowler by default scans all the subscriptions that is able to list, so it is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
- `app_function_access_keys_configured`
- `app_function_ftps_deployment_disabled`
- `Security Reader`
- `Reader`
To assign this roles, follow the instructions:
1. Access your subscription, then select your subscription.
2. Select "Access control (IAM)".
3. In the overview, select "Roles"
![IAM Page](../img/page-IAM.png)
4. Click on "+ Add" and select "Add role assignment"
5. In the search bar, type `Security Reader`, select it and click on "Next"
6. In the Members tab, click on "+ Select members" and add the members you want to assign this role.
7. Click on "Review + assign" to apply the new role.
*Repeat these steps for `Reader` role*
## Google Cloud

View File

@@ -0,0 +1 @@
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 240.29 285.79"><defs><style>.cls-1{fill:url(#linear-gradient);}.cls-2{fill:#71be44;}</style><linearGradient id="linear-gradient" x1="157.45" y1="97.85" x2="211.7" y2="97.85" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#5a9b37"/><stop offset="1" stop-color="#71be44"/></linearGradient></defs><circle class="cls-1" cx="148.2" cy="97.85" r="67.45"/><path class="cls-2" d="M66.28,30.4H148.2a0,0,0,0,1,0,0V185.35a81.93,81.93,0,0,1-81.93,81.93h0a0,0,0,0,1,0,0V30.4A0,0,0,0,1,66.28,30.4Z"/></svg>

After

Width:  |  Height:  |  Size: 635 B

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 8.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 357 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 351 KiB

After

Width:  |  Height:  |  Size: 338 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 688 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 746 KiB

After

Width:  |  Height:  |  Size: 214 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 258 KiB

After

Width:  |  Height:  |  Size: 848 KiB

BIN
docs/img/page-IAM.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 348 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 552 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 9.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

After

Width:  |  Height:  |  Size: 11 KiB

BIN
docs/img/prowler-logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.2 KiB

Some files were not shown because too many files have changed in this diff Show More