Compare commits
307 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a35fbec7ff | ||
|
|
11ca3b59bc | ||
|
|
cfd2165b26 | ||
|
|
6acf8d6404 | ||
|
|
ece220a71d | ||
|
|
8adc72ad57 | ||
|
|
9addf86aa5 | ||
|
|
2913d50a52 | ||
|
|
c6c06b3354 | ||
|
|
8242fa883e | ||
|
|
6646bae26c | ||
|
|
32da86f393 | ||
|
|
74d02e1da6 | ||
|
|
8ec6e89e5c | ||
|
|
17012ec1a4 | ||
|
|
8461257428 | ||
|
|
26a5ffaf82 | ||
|
|
563ddb3707 | ||
|
|
2c11c3d6f9 | ||
|
|
e050f44d63 | ||
|
|
4fd3405bbf | ||
|
|
a1c2caa745 | ||
|
|
f639dc8bf4 | ||
|
|
35325d9f40 | ||
|
|
71503b553a | ||
|
|
d91a240ea8 | ||
|
|
b9b5f66073 | ||
|
|
e3f66840aa | ||
|
|
0d6c529a46 | ||
|
|
5237658047 | ||
|
|
c00f61ac10 | ||
|
|
2cd840a2b5 | ||
|
|
7e630ebe27 | ||
|
|
2f1c0facfd | ||
|
|
603bb03f35 | ||
|
|
b7af1a06e8 | ||
|
|
02fc034b1f | ||
|
|
40522cdc62 | ||
|
|
dc11d85451 | ||
|
|
13c50086eb | ||
|
|
f7729381e0 | ||
|
|
d244475578 | ||
|
|
10dcbaea7b | ||
|
|
c91bbdcf2b | ||
|
|
c7dbcb17d6 | ||
|
|
5a8a9286db | ||
|
|
2476a1275a | ||
|
|
ac680c58cd | ||
|
|
68f0916ce4 | ||
|
|
dc896fc0af | ||
|
|
76af71d2df | ||
|
|
96f761e4ef | ||
|
|
9e16e477e9 | ||
|
|
2038e30d3e | ||
|
|
a4dc6975b0 | ||
|
|
a4a89fa581 | ||
|
|
fc449bfd7b | ||
|
|
2477948ae9 | ||
|
|
ca98584ded | ||
|
|
489830f01a | ||
|
|
bd56ca2979 | ||
|
|
04483a9a4f | ||
|
|
684f63d398 | ||
|
|
b528dd44cd | ||
|
|
dfdeac0a46 | ||
|
|
b52b67fd4b | ||
|
|
5cf7d89aab | ||
|
|
f5e6b1e438 | ||
|
|
aa44bde940 | ||
|
|
ddc927a4ad | ||
|
|
fbc99259e2 | ||
|
|
28f6f0abcc | ||
|
|
0933a04239 | ||
|
|
5185f3a41e | ||
|
|
6d20b11394 | ||
|
|
a01635e9ea | ||
|
|
3bf9cd3db1 | ||
|
|
e15f0b2d0f | ||
|
|
f2de059ca1 | ||
|
|
8c8ac95d9c | ||
|
|
89159c2111 | ||
|
|
70eb59185b | ||
|
|
f97af19860 | ||
|
|
5ccd8af2a2 | ||
|
|
b53e8abc87 | ||
|
|
db4c4fdaeb | ||
|
|
44afe2db3e | ||
|
|
204d548cd0 | ||
|
|
3faf80c0d7 | ||
|
|
5078e4a823 | ||
|
|
d1b57ebd75 | ||
|
|
fdab3a737a | ||
|
|
b6f01b92dd | ||
|
|
c92537c791 | ||
|
|
3e7cc2e0a2 | ||
|
|
b8cfdb590b | ||
|
|
577afbd521 | ||
|
|
d01cc51b6d | ||
|
|
ffa60b4ccd | ||
|
|
d6dd0f7244 | ||
|
|
4df0dc4904 | ||
|
|
386a1e1d1a | ||
|
|
db9d7a4439 | ||
|
|
5725035e29 | ||
|
|
96a49e97d2 | ||
|
|
2a95750525 | ||
|
|
b868d1a7fe | ||
|
|
37ade2a722 | ||
|
|
c67032e07f | ||
|
|
0de8ef032a | ||
|
|
027aa9796d | ||
|
|
a505776227 | ||
|
|
3be9de376a | ||
|
|
bd26d74b28 | ||
|
|
ca27854ff0 | ||
|
|
abd18dc14d | ||
|
|
297f506fd3 | ||
|
|
78ca4b93a5 | ||
|
|
c80d51b585 | ||
|
|
cf9b23c302 | ||
|
|
ef4b9e8d6a | ||
|
|
a5a8c2a769 | ||
|
|
64b21ae2b9 | ||
|
|
3da4824a1d | ||
|
|
2247296cf9 | ||
|
|
615127f790 | ||
|
|
42f21a52c9 | ||
|
|
e9442b2f89 | ||
|
|
6336b1c0d9 | ||
|
|
a0603b972e | ||
|
|
f319884532 | ||
|
|
d49139c4f4 | ||
|
|
046c82232d | ||
|
|
027aafd9ea | ||
|
|
215d5dabd7 | ||
|
|
f5e2ac7486 | ||
|
|
6fc24b5435 | ||
|
|
3d99e6ea28 | ||
|
|
b23aefadc1 | ||
|
|
b585a31a14 | ||
|
|
9c817ae8a9 | ||
|
|
cd7f19c00e | ||
|
|
d1a7d19799 | ||
|
|
d7dffbc44b | ||
|
|
0402cc7e2d | ||
|
|
bf83f38c89 | ||
|
|
673619c8a1 | ||
|
|
2345a7384b | ||
|
|
e387c591c3 | ||
|
|
47a37c7d0d | ||
|
|
7b359cf1eb | ||
|
|
35d525b903 | ||
|
|
b5b193427d | ||
|
|
e6ae539323 | ||
|
|
541b907038 | ||
|
|
040e1eaa5e | ||
|
|
e23a674277 | ||
|
|
e73cefdf1a | ||
|
|
9ed4e89c60 | ||
|
|
da547b2bbe | ||
|
|
ca033745c9 | ||
|
|
fb49fb83ae | ||
|
|
76e0b23365 | ||
|
|
82ccdc45d2 | ||
|
|
de777a6417 | ||
|
|
87d8cda745 | ||
|
|
64abd0a6d0 | ||
|
|
096d7c6304 | ||
|
|
4908e06544 | ||
|
|
d42cc66d9f | ||
|
|
7a5318b936 | ||
|
|
ffb494f9a4 | ||
|
|
f515b2b53b | ||
|
|
a3cf7665ac | ||
|
|
dbaf72958e | ||
|
|
169d1686d2 | ||
|
|
ba726b205d | ||
|
|
630d980861 | ||
|
|
7d81040eae | ||
|
|
4009d96f8a | ||
|
|
cee5064b11 | ||
|
|
e5c911abef | ||
|
|
ff5c41f363 | ||
|
|
cf84875355 | ||
|
|
fc23eccc7b | ||
|
|
c5fb11e815 | ||
|
|
fdab1edd3e | ||
|
|
ea74d82c48 | ||
|
|
093738c65f | ||
|
|
bae224c891 | ||
|
|
32cded949d | ||
|
|
6463dcdde0 | ||
|
|
0b16dab2ad | ||
|
|
825c620e6f | ||
|
|
819a5597a3 | ||
|
|
4bae3d2600 | ||
|
|
131cb82751 | ||
|
|
029caf3b10 | ||
|
|
9ee23a39b5 | ||
|
|
4837df4352 | ||
|
|
d173d58a93 | ||
|
|
af29570fe9 | ||
|
|
9253cd42dd | ||
|
|
836b4ba2cc | ||
|
|
f28c0578aa | ||
|
|
536f0df9d3 | ||
|
|
465261e1df | ||
|
|
3667370604 | ||
|
|
9ca64e7bdb | ||
|
|
95a9f1c458 | ||
|
|
9fbd627f9a | ||
|
|
7203fcf4f1 | ||
|
|
f10bb343a6 | ||
|
|
9147a45e2f | ||
|
|
5353d515b6 | ||
|
|
e8a94733bf | ||
|
|
625be45742 | ||
|
|
ecb6cb897f | ||
|
|
f07bd79442 | ||
|
|
b7c1fabae1 | ||
|
|
59d3b2f33e | ||
|
|
6c098e98e3 | ||
|
|
380011fd1e | ||
|
|
e97bf32a90 | ||
|
|
ed18ea0ec4 | ||
|
|
dc897986bc | ||
|
|
e296d6e5c1 | ||
|
|
1252e6163b | ||
|
|
8ad14c7833 | ||
|
|
61b9ecc214 | ||
|
|
f8f2c19454 | ||
|
|
922438a7a0 | ||
|
|
920f98c9ef | ||
|
|
9b1ad5dd2e | ||
|
|
d7a97b6e1d | ||
|
|
07db051d14 | ||
|
|
6fec85589d | ||
|
|
f82aa1c3e1 | ||
|
|
ee9faedbbe | ||
|
|
e5dec1251d | ||
|
|
692a39b08f | ||
|
|
60b3523def | ||
|
|
e1428bc1ff | ||
|
|
0ff8b7e02a | ||
|
|
7b84008046 | ||
|
|
30a092e2aa | ||
|
|
11a7ff2977 | ||
|
|
12ba978361 | ||
|
|
42182a2b70 | ||
|
|
26eaec3101 | ||
|
|
daf6194dee | ||
|
|
e28300a1db | ||
|
|
1a225c334f | ||
|
|
1d64ca4372 | ||
|
|
2a139e3dc7 | ||
|
|
89d1712ff1 | ||
|
|
45ea9e1e79 | ||
|
|
4b46fe9788 | ||
|
|
28b9e269b7 | ||
|
|
0a41ec4746 | ||
|
|
e6472f9bfc | ||
|
|
c033af6194 | ||
|
|
4d662dc446 | ||
|
|
0de10c4742 | ||
|
|
f7b7ce3b95 | ||
|
|
7b43b3d31e | ||
|
|
84b9c442fe | ||
|
|
a890895e8b | ||
|
|
f3c6720a1c | ||
|
|
8c29bbfe4e | ||
|
|
910c969473 | ||
|
|
2795673ebc | ||
|
|
dc510e0683 | ||
|
|
070edc1693 | ||
|
|
8645ee20c3 | ||
|
|
8d4abd7638 | ||
|
|
f4106f4b72 | ||
|
|
4087aaf6cf | ||
|
|
c3ef0d4ca8 | ||
|
|
a1aed37482 | ||
|
|
d05a15ef5a | ||
|
|
ef9d3b902e | ||
|
|
366bb91a1e | ||
|
|
0c01cf28c4 | ||
|
|
f895e4df6a | ||
|
|
2affed81ad | ||
|
|
b33b529e74 | ||
|
|
0bbb762c74 | ||
|
|
ec5fb035b1 | ||
|
|
e45a189422 | ||
|
|
b2b66bd080 | ||
|
|
b905d73b82 | ||
|
|
6ed3167e17 | ||
|
|
3a2fea7136 | ||
|
|
212ff2439e | ||
|
|
7b2a7faf6b | ||
|
|
2725d476a4 | ||
|
|
dfa940440c | ||
|
|
862bc8cae8 | ||
|
|
a51bdef083 | ||
|
|
52955f9c6e | ||
|
|
581cfcc917 | ||
|
|
4ee29225bc | ||
|
|
095b6bc463 | ||
|
|
bd1fcdd68a | ||
|
|
98f6003069 | ||
|
|
583c3c6ca7 |
6
.github/CODEOWNERS
vendored
@@ -1 +1,5 @@
|
||||
* @prowler-cloud/prowler-oss @prowler-cloud/prowler-dev
|
||||
* @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
|
||||
# To protect a repository fully against unauthorized changes, you also need to define an owner for the CODEOWNERS file itself.
|
||||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#codeowners-and-branch-protection
|
||||
/.github/ @prowler-cloud/sdk
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,6 +1,5 @@
|
||||
name: 🐞 Bug Report
|
||||
description: Create a report to help us improve
|
||||
title: "[Bug]: "
|
||||
labels: ["bug", "status/needs-triage"]
|
||||
|
||||
body:
|
||||
|
||||
3
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
@@ -1,8 +1,7 @@
|
||||
name: 💡 Feature Request
|
||||
name: 💡 Feature Request
|
||||
description: Suggest an idea for this project
|
||||
labels: ["feature-request", "status/needs-triage"]
|
||||
|
||||
|
||||
body:
|
||||
- type: textarea
|
||||
id: Problem
|
||||
|
||||
8
.github/dependabot.yml
vendored
@@ -8,7 +8,7 @@ updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
@@ -17,14 +17,14 @@ updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
@@ -34,7 +34,7 @@ updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
|
||||
50
.github/labeler.yml
vendored
@@ -29,3 +29,53 @@ github_actions:
|
||||
cli:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "cli/**"
|
||||
|
||||
mutelist:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/aws/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/azure/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/gcp/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/kubernetes/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/aws/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/azure/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/gcp/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/kubernetes/lib/mutelist/**"
|
||||
|
||||
integration/s3:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/aws/lib/s3/**"
|
||||
- any-glob-to-any-file: "tests/providers/aws/lib/s3/**"
|
||||
|
||||
integration/slack:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/slack/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/slack/**"
|
||||
|
||||
integration/security-hub:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/aws/lib/security_hub/**"
|
||||
- any-glob-to-any-file: "tests/providers/aws/lib/security_hub/**"
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/asff/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/asff/**"
|
||||
|
||||
output/html:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/html/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/html/**"
|
||||
|
||||
output/asff:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/asff/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/asff/**"
|
||||
|
||||
output/ocsf:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/ocsf/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/ocsf/**"
|
||||
|
||||
output/csv:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/csv/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/csv/**"
|
||||
|
||||
7
.github/pull_request_template.md
vendored
@@ -2,11 +2,18 @@
|
||||
|
||||
Please include relevant motivation and context for this PR.
|
||||
|
||||
If fixes an issue please add it with `Fix #XXXX`
|
||||
|
||||
### Description
|
||||
|
||||
Please include a summary of the change and which issue is fixed. List any dependencies that are required for this change.
|
||||
|
||||
### Checklist
|
||||
|
||||
- Are there new checks included in this PR? Yes / No
|
||||
- If so, do we need to update permissions for the provider? Please review this carefully.
|
||||
- [ ] Review if the code is being covered by tests.
|
||||
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
|
||||
|
||||
### License
|
||||
|
||||
|
||||
17
.github/workflows/build-lint-push-containers.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
prowler_version_major: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION_MAJOR }}
|
||||
prowler_version: ${{ steps.update-prowler-version.outputs.PROWLER_VERSION }}
|
||||
prowler_version: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION }}
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
|
||||
@@ -65,6 +65,8 @@ jobs:
|
||||
id: get-prowler-version
|
||||
run: |
|
||||
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Store prowler version major just for the release
|
||||
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
|
||||
@@ -89,15 +91,6 @@ jobs:
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Update Prowler version (release)
|
||||
id: update-prowler-version
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
PROWLER_VERSION="${{ github.event.release.tag_name }}"
|
||||
poetry version "${PROWLER_VERSION}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -118,7 +111,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
@@ -130,7 +123,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
@@ -13,10 +13,10 @@ name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master", "v3" ]
|
||||
branches: [ "master", "v3", "v4.*" ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ "master", "v3" ]
|
||||
branches: [ "master", "v3", "v4.*" ]
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
|
||||
2
.github/workflows/find-secrets.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@v3.76.3
|
||||
uses: trufflesecurity/trufflehog@3.80.4
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
|
||||
4
.github/workflows/pull-request.yml
vendored
@@ -5,10 +5,12 @@ on:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -73,7 +75,7 @@ jobs:
|
||||
- name: Safety
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 67599
|
||||
poetry run safety check --ignore 70612
|
||||
- name: Vulture
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
|
||||
21
.github/workflows/pypi-release.yml
vendored
@@ -40,7 +40,6 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -48,10 +47,6 @@ jobs:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: ${{ env.CACHE }}
|
||||
|
||||
- name: Update Poetry and config version
|
||||
run: |
|
||||
poetry version ${{ env.RELEASE_TAG }}
|
||||
|
||||
- name: Import GPG key
|
||||
uses: crazy-max/ghaction-import-gpg@v6
|
||||
with:
|
||||
@@ -60,22 +55,6 @@ jobs:
|
||||
git_user_signingkey: true
|
||||
git_commit_gpgsign: true
|
||||
|
||||
- name: Push updated version to the release tag
|
||||
run: |
|
||||
# Configure Git
|
||||
git config user.name "github-actions"
|
||||
git config user.email "${{ env.GIT_COMMITTER_EMAIL }}"
|
||||
|
||||
# Add the files with the version changed
|
||||
git add prowler/config/config.py pyproject.toml
|
||||
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify -S
|
||||
|
||||
# Replace the tag with the version updated
|
||||
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}" --sign
|
||||
|
||||
# Push the tag
|
||||
git push -f origin ${{ env.RELEASE_TAG }}
|
||||
|
||||
- name: Build Prowler package
|
||||
run: |
|
||||
poetry build
|
||||
|
||||
@@ -97,7 +97,7 @@ repos:
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'safety check --ignore 67599'
|
||||
entry: bash -c 'safety check --ignore 70612'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
|
||||
21
README.md
@@ -1,6 +1,6 @@
|
||||
<p align="center">
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-black.png?raw=True#gh-light-mode-only" width="500" height="83">
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png?raw=True#gh-dark-mode-only" width="500" height="83">
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-black.png#gh-light-mode-only" width="50%" height="50%">
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
|
||||
</p>
|
||||
<p align="center">
|
||||
<b><i>Prowler SaaS </b> and <b>Prowler Open Source</b> are as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
|
||||
@@ -10,11 +10,10 @@
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/3985464/3617e470-670c-47c9-9794-ce895ebdb627"></a>
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/38561120/3c8b4ec5-6849-41a5-b5e1-52bbb94af73a"></a>
|
||||
<br>
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog">Join our Prowler community!</a>
|
||||
</p>
|
||||
|
||||
<hr>
|
||||
<p align="center">
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
|
||||
@@ -38,6 +37,9 @@
|
||||
<a href="https://twitter.com/prowlercloud"><img alt="Twitter" src="https://img.shields.io/twitter/follow/prowlercloud?style=social"></a>
|
||||
</p>
|
||||
<hr>
|
||||
<p align="center">
|
||||
<img align="center" src="/docs/img/prowler-cli-quick.gif" width="100%" height="100%">
|
||||
</p>
|
||||
|
||||
# Description
|
||||
|
||||
@@ -61,9 +63,9 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 356 | 65 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 6 -> `prowler aws --list-categories` |
|
||||
| AWS | 385 | 67 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 7 -> `prowler aws --list-categories` |
|
||||
| GCP | 77 | 13 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 127 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| Azure | 135 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
|
||||
|
||||
# 💻 Installation
|
||||
@@ -75,7 +77,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```
|
||||
More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
|
||||
>More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
|
||||
|
||||
## Containers
|
||||
|
||||
@@ -92,7 +94,7 @@ The container images are available here:
|
||||
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
|
||||
|
||||
## From Github
|
||||
## From GitHub
|
||||
|
||||
Python >= 3.9, < 3.13 is required with pip and poetry:
|
||||
|
||||
@@ -103,8 +105,7 @@ poetry shell
|
||||
poetry install
|
||||
python prowler.py -v
|
||||
```
|
||||
???+ note
|
||||
If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
|
||||
> If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
|
||||
# 📐✏️ High level architecture
|
||||
|
||||
You can run Prowler from your workstation, a Kubernetes Job, a Google Compute Engine, an Azure VM, an EC2 instance, Fargate or any other container, CloudShell and many more.
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
# CLI
|
||||
To show the banner, use:
|
||||
`python cli/cli.py banner`
|
||||
## Listing
|
||||
List services by provider.
|
||||
`python cli/cli.py <provider> list-services`
|
||||
63
cli/cli.py
@@ -1,63 +0,0 @@
|
||||
import typer
|
||||
|
||||
from prowler.lib.banner import print_banner
|
||||
from prowler.lib.check.check import (
|
||||
list_fixers,
|
||||
list_services,
|
||||
print_fixers,
|
||||
print_services,
|
||||
)
|
||||
|
||||
app = typer.Typer()
|
||||
aws = typer.Typer(name="aws")
|
||||
azure = typer.Typer(name="azure")
|
||||
gcp = typer.Typer(name="gcp")
|
||||
kubernetes = typer.Typer(name="kubernetes")
|
||||
|
||||
app.add_typer(aws, name="aws")
|
||||
app.add_typer(azure, name="azure")
|
||||
app.add_typer(gcp, name="gcp")
|
||||
app.add_typer(kubernetes, name="kubernetes")
|
||||
|
||||
|
||||
def list_resources(provider: str, resource_type: str):
|
||||
if resource_type == "services":
|
||||
print_services(list_services(provider))
|
||||
elif resource_type == "fixers":
|
||||
print_fixers(list_fixers(provider))
|
||||
|
||||
|
||||
def create_list_commands(provider_typer: typer.Typer):
|
||||
provider_name = provider_typer.info.name
|
||||
|
||||
@provider_typer.command(
|
||||
"list-services",
|
||||
help=f"List the {provider_name} services that are supported by Prowler.",
|
||||
)
|
||||
def list_services_command():
|
||||
list_resources(provider_name, "services")
|
||||
|
||||
@provider_typer.command(
|
||||
"list-fixers",
|
||||
help=f"List the {provider_name} fixers that are supported by Prowler.",
|
||||
)
|
||||
def list_fixers_command():
|
||||
list_resources(provider_name, "fixers")
|
||||
|
||||
|
||||
create_list_commands(aws)
|
||||
create_list_commands(azure)
|
||||
create_list_commands(gcp)
|
||||
create_list_commands(kubernetes)
|
||||
|
||||
|
||||
@app.command("banner", help="Prints the banner of the tool.")
|
||||
def banner(show: bool = True):
|
||||
if show:
|
||||
print_banner(show)
|
||||
else:
|
||||
print("Banner is not shown.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app()
|
||||
23
contrib/k8s/helm/.helmignore
Normal file
@@ -0,0 +1,23 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*.orig
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
24
contrib/k8s/helm/Chart.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
apiVersion: v2
|
||||
name: prowler
|
||||
description: Prowler Security Tool Helm chart for Kubernetes
|
||||
|
||||
# A chart can be either an 'application' or a 'library' chart.
|
||||
#
|
||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
||||
# to be deployed.
|
||||
#
|
||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
||||
type: application
|
||||
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.1
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
appVersion: "1.16.0"
|
||||
78
contrib/k8s/helm/README.md
Normal file
@@ -0,0 +1,78 @@
|
||||
# prowler
|
||||
|
||||
  
|
||||
|
||||
Prowler Security Tool Helm chart for Kubernetes
|
||||
|
||||
# Prowler Helm Chart Deployment
|
||||
|
||||
This guide provides step-by-step instructions for deploying the Prowler Helm chart.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before you begin, ensure you have the following:
|
||||
|
||||
1. A running Kubernetes cluster.
|
||||
2. Helm installed on your local machine. If you don't have Helm installed, you can follow the [Helm installation guide](https://helm.sh/docs/intro/install/).
|
||||
3. Proper access to your Kubernetes cluster (e.g., `kubectl` is configured and working).
|
||||
|
||||
## Deployment Steps
|
||||
|
||||
### 1. Clone the Repository
|
||||
|
||||
Clone the repository containing the Helm chart to your local machine.
|
||||
|
||||
```sh
|
||||
git clone git@github.com:prowler-cloud/prowler.git
|
||||
cd prowler/contrib/k8s/helm
|
||||
```
|
||||
|
||||
### 2. Deploy the helm chart
|
||||
|
||||
```
|
||||
helm install prowler .
|
||||
```
|
||||
|
||||
### 3. Verify the deployment
|
||||
|
||||
```
|
||||
helm status prowler
|
||||
kubectl get all -n prowler-ns
|
||||
```
|
||||
|
||||
### 4. Clean Up
|
||||
To uninstall the Helm release and clean up the resources, run:
|
||||
|
||||
```helm uninstall prowler
|
||||
kubectl delete namespace prowler-ns
|
||||
```
|
||||
|
||||
## Values
|
||||
|
||||
| Key | Type | Default | Description |
|
||||
|-----|------|---------|-------------|
|
||||
| clusterRole.name | string | `"prowler-read-cluster"` | |
|
||||
| clusterRoleBinding.name | string | `"prowler-read-cluster-binding"` | |
|
||||
| configMap.name | string | `"prowler-hostpaths"` | |
|
||||
| configMapData.etcCniNetd | string | `"/etc/cni/net.d"` | |
|
||||
| configMapData.etcKubernetes | string | `"/etc/kubernetes"` | |
|
||||
| configMapData.etcSystemd | string | `"/etc/systemd"` | |
|
||||
| configMapData.libSystemd | string | `"/lib/systemd"` | |
|
||||
| configMapData.optCniBin | string | `"/opt/cni/bin"` | |
|
||||
| configMapData.usrBin | string | `"/usr/bin"` | |
|
||||
| configMapData.varLibCni | string | `"/var/lib/cni"` | |
|
||||
| configMapData.varLibEtcd | string | `"/var/lib/etcd"` | |
|
||||
| configMapData.varLibKubeControllerManager | string | `"/var/lib/kube-controller-manager"` | |
|
||||
| configMapData.varLibKubeScheduler | string | `"/var/lib/kube-scheduler"` | |
|
||||
| configMapData.varLibKubelet | string | `"/var/lib/kubelet"` | |
|
||||
| cronjob.hostPID | bool | `true` | |
|
||||
| cronjob.name | string | `"prowler"` | |
|
||||
| cronjob.schedule | string | `"0 0 * * *"` | |
|
||||
| image.pullPolicy | string | `"Always"` | |
|
||||
| image.repository | string | `"toniblyx/prowler"` | |
|
||||
| image.tag | string | `"stable"` | |
|
||||
| namespace.name | string | `"prowler"` | |
|
||||
| serviceAccount.name | string | `"prowler"` | |
|
||||
|
||||
----------------------------------------------
|
||||
Autogenerated from chart metadata using [helm-docs v1.11.3](https://github.com/norwoodj/helm-docs/releases/v1.11.3)
|
||||
11
contrib/k8s/helm/templates/cluster-role.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
name: {{ .Values.clusterRole.name }}
|
||||
rules:
|
||||
- apiGroups: [""]
|
||||
resources: ["pods", "configmaps", "nodes", "namespaces"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
- apiGroups: ["rbac.authorization.k8s.io"]
|
||||
resources: ["clusterrolebindings", "rolebindings", "clusterroles", "roles"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
18
contrib/k8s/helm/templates/cm.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ .Values.configMap.name }}
|
||||
namespace: {{ .Values.namespace.name }}
|
||||
data:
|
||||
varLibCni: "{{ .Values.configMap.data.varLibCni }}"
|
||||
varLibEtcd: "{{ .Values.configMap.data.varLibEtcd }}"
|
||||
varLibKubelet: "{{ .Values.configMap.data.varLibKubelet }}"
|
||||
varLibKubeScheduler: "{{ .Values.configMap.data.varLibKubeScheduler }}"
|
||||
varLibKubeControllerManager: "{{ .Values.configMap.data.varLibKubeControllerManager }}"
|
||||
etcSystemd: "{{ .Values.configMap.data.etcSystemd }}"
|
||||
libSystemd: "{{ .Values.configMap.data.libSystemd }}"
|
||||
etcKubernetes: "{{ .Values.configMap.data.etcKubernetes }}"
|
||||
usrBin: "{{ .Values.configMap.data.usrBin }}"
|
||||
etcCniNetd: "{{ .Values.configMap.data.etcCniNetd }}"
|
||||
optCniBin: "{{ .Values.configMap.data.optCniBin }}"
|
||||
srvKubernetes: "{{ .Values.configMap.data.srvKubernetes }}"
|
||||
42
contrib/k8s/helm/templates/job.yaml
Normal file
@@ -0,0 +1,42 @@
|
||||
apiVersion: batch/v1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: {{ .Values.cronjob.name }}
|
||||
namespace: {{ .Values.namespace.name }}
|
||||
spec:
|
||||
schedule: "{{ .Values.cronjob.schedule }}"
|
||||
jobTemplate:
|
||||
spec:
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: prowler
|
||||
spec:
|
||||
serviceAccountName: {{ .Values.serviceAccount.name }}
|
||||
containers:
|
||||
- name: prowler
|
||||
image: {{ .Values.image.repository }}:{{ .Values.image.tag }}
|
||||
command: ["prowler"]
|
||||
args: ["kubernetes", "-z", "-b"]
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
volumeMounts:
|
||||
{{- range $key, $value := .Values.configMap.data }}
|
||||
{{- if and (eq $.Values.clusterType "gke") (eq $key "srvKubernetes") }}
|
||||
{{- else }}
|
||||
- name: {{ $key | lower }}
|
||||
mountPath: {{ $value }}
|
||||
readOnly: true
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
hostPID: {{ .Values.cronjob.hostPID }}
|
||||
restartPolicy: Never
|
||||
volumes:
|
||||
{{- range $key, $value := .Values.configMap.data }}
|
||||
{{- if and (eq $.Values.clusterType "gke") (eq $key "srvKubernetes") }}
|
||||
{{- else }}
|
||||
- name: {{ $key | lower }}
|
||||
hostPath:
|
||||
path: {{ $value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
4
contrib/k8s/helm/templates/namespace.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
apiVersion: v1
|
||||
kind: Namespace
|
||||
metadata:
|
||||
name: {{ .Values.namespace.name }}
|
||||
12
contrib/k8s/helm/templates/role-binding.yaml
Normal file
@@ -0,0 +1,12 @@
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: {{ .Values.clusterRoleBinding.name }}
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: {{ .Values.clusterRole.name }}
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: {{ .Values.serviceAccount.name }}
|
||||
namespace: {{ .Values.namespace.name }}
|
||||
5
contrib/k8s/helm/templates/sa.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: {{ .Values.serviceAccount.name }}
|
||||
namespace: {{ .Values.namespace.name }}
|
||||
40
contrib/k8s/helm/values.yaml
Normal file
@@ -0,0 +1,40 @@
|
||||
namespace:
|
||||
name: prowler-ns
|
||||
|
||||
cronjob:
|
||||
name: prowler
|
||||
schedule: "0 0 * * *"
|
||||
hostPID: true
|
||||
|
||||
serviceAccount:
|
||||
name: prowler-sa
|
||||
|
||||
image:
|
||||
repository: toniblyx/prowler
|
||||
tag: stable
|
||||
pullPolicy: Always
|
||||
|
||||
clusterType:
|
||||
|
||||
configMap:
|
||||
name: prowler-config
|
||||
data:
|
||||
varLibCni: "/var/lib/cni"
|
||||
varLibEtcd: "/var/lib/etcd"
|
||||
varLibKubelet: "/var/lib/kubelet"
|
||||
varLibKubeScheduler: "/var/lib/kube-scheduler"
|
||||
varLibKubeControllerManager: "/var/lib/kube-controller-manager"
|
||||
etcSystemd: "/etc/systemd"
|
||||
libSystemd: "/lib/systemd"
|
||||
etcKubernetes: "/etc/kubernetes"
|
||||
usrBin: "/usr/bin"
|
||||
etcCniNetd: "/etc/cni/net.d"
|
||||
optCniBin: "/opt/cni/bin"
|
||||
srvKubernetes: "/srv/kubernetes"
|
||||
|
||||
clusterRole:
|
||||
name: prowler-read-cluster
|
||||
|
||||
clusterRoleBinding:
|
||||
name: prowler-read-cluster-binding
|
||||
roleName: prowler-read-cluster
|
||||
@@ -16,12 +16,12 @@ from prowler.lib.banner import print_banner
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
cli = sys.modules["flask.cli"]
|
||||
print_banner(verbose=False)
|
||||
print_banner()
|
||||
print(
|
||||
f"{Fore.GREEN}Loading all CSV files from the folder {folder_path_overview} ...\n{Style.RESET_ALL}"
|
||||
)
|
||||
cli.show_server_banner = lambda *x: click.echo(
|
||||
f"{Fore.YELLOW}NOTE:{Style.RESET_ALL} If you are a {Fore.GREEN}{Style.BRIGHT}Prowler SaaS{Style.RESET_ALL} customer and you want to use your data from your S3 bucket,\nrun: `{orange_color}aws s3 cp s3://<your-bucket>/output/csv ./output --recursive{Style.RESET_ALL}`\nand then run `prowler dashboard` again to load the new files."
|
||||
f"{Fore.YELLOW}NOTE:{Style.RESET_ALL} If you are using {Fore.GREEN}{Style.BRIGHT}Prowler SaaS{Style.RESET_ALL} with the S3 integration or that integration \nfrom {Fore.CYAN}{Style.BRIGHT}Prowler Open Source{Style.RESET_ALL} and you want to use your data from your S3 bucket,\nrun: `{orange_color}aws s3 cp s3://<your-bucket>/output/csv ./output --recursive{Style.RESET_ALL}`\nand then run `prowler dashboard` again to load the new files."
|
||||
)
|
||||
|
||||
# Initialize the app - incorporate css
|
||||
|
||||
|
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 15 KiB |
@@ -21,7 +21,7 @@ muted_manual_color = "#b33696"
|
||||
critical_color = "#951649"
|
||||
high_color = "#e11d48"
|
||||
medium_color = "#ee6f15"
|
||||
low_color = "#f9f5e6"
|
||||
low_color = "#fcf45d"
|
||||
informational_color = "#3274d9"
|
||||
|
||||
# Folder output path
|
||||
|
||||
@@ -945,7 +945,7 @@ def filter_data(
|
||||
color_mapping_status = {
|
||||
"FAIL": fail_color,
|
||||
"PASS": pass_color,
|
||||
"INFO": info_color,
|
||||
"LOW": info_color,
|
||||
"MANUAL": manual_color,
|
||||
"WARNING": muted_fail_color,
|
||||
"MUTED (FAIL)": muted_fail_color,
|
||||
@@ -1564,7 +1564,10 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
|
||||
data.get(
|
||||
"FINDING_UID", ""
|
||||
)
|
||||
)
|
||||
),
|
||||
style={
|
||||
"margin-left": "5px"
|
||||
},
|
||||
),
|
||||
],
|
||||
style={"display": "flex"},
|
||||
@@ -1644,28 +1647,10 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
|
||||
"STATUS_EXTENDED",
|
||||
"",
|
||||
)
|
||||
)
|
||||
),
|
||||
],
|
||||
style={"display": "flex"},
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.P(
|
||||
html.Strong(
|
||||
"Risk: ",
|
||||
style={
|
||||
"margin-right": "5px"
|
||||
},
|
||||
)
|
||||
),
|
||||
html.P(
|
||||
str(
|
||||
data.get(
|
||||
"RISK",
|
||||
"",
|
||||
)
|
||||
)
|
||||
),
|
||||
style={
|
||||
"margin-left": "5px"
|
||||
},
|
||||
),
|
||||
],
|
||||
style={"display": "flex"},
|
||||
@@ -1689,7 +1674,10 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
|
||||
)
|
||||
),
|
||||
html.P(
|
||||
str(data.get("RISK", ""))
|
||||
str(data.get("RISK", "")),
|
||||
style={
|
||||
"margin-left": "5px"
|
||||
},
|
||||
),
|
||||
],
|
||||
style={"display": "flex"},
|
||||
@@ -1744,7 +1732,10 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
|
||||
"REMEDIATION_RECOMMENDATION_TEXT",
|
||||
"",
|
||||
)
|
||||
)
|
||||
),
|
||||
style={
|
||||
"margin-left": "5px"
|
||||
},
|
||||
),
|
||||
],
|
||||
style={"display": "flex"},
|
||||
@@ -1772,7 +1763,10 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
|
||||
"",
|
||||
)
|
||||
),
|
||||
style={"color": "#3182ce"},
|
||||
style={
|
||||
"color": "#3182ce",
|
||||
"margin-left": "5px",
|
||||
},
|
||||
),
|
||||
],
|
||||
style={"display": "flex"},
|
||||
|
||||
@@ -319,7 +319,7 @@ Each Prowler check has metadata associated which is stored at the same level of
|
||||
For the Remediation Code we use the following knowledge base to fill it:
|
||||
|
||||
- Official documentation for the provider
|
||||
- https://docs.bridgecrew.io
|
||||
- https://docs.prowler.com/checks/checks-index
|
||||
- https://www.trendmicro.com/cloudoneconformity
|
||||
- https://github.com/cloudmatos/matos/tree/master/remediations
|
||||
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
# Debugging
|
||||
|
||||
Debugging in Prowler make things easier!
|
||||
If you are developing Prowler, it's possible that you will encounter some situations where you have to inspect the code in depth to fix some unexpected issues during the execution. To do that, if you are using VSCode you can run the code using the integrated debugger. Please, refer to this [documentation](https://code.visualstudio.com/docs/editor/debugging) for guidance about the debugger in VSCode.
|
||||
If you are developing Prowler, it's possible that you will encounter some situations where you have to inspect the code in depth to fix some unexpected issues during the execution.
|
||||
|
||||
## VSCode
|
||||
|
||||
In VSCode you can run the code using the integrated debugger. Please, refer to this [documentation](https://code.visualstudio.com/docs/editor/debugging) for guidance about the debugger in VSCode.
|
||||
The following file is an example of the [debugging configuration](https://code.visualstudio.com/docs/editor/debugging#_launch-configurations) file that you can add to [Virtual Studio Code](https://code.visualstudio.com/).
|
||||
|
||||
This file should inside the *.vscode* folder and its name has to be *launch.json*:
|
||||
@@ -11,31 +15,62 @@ This file should inside the *.vscode* folder and its name has to be *launch.json
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Current File",
|
||||
"type": "python",
|
||||
"name": "Debug AWS Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"aws",
|
||||
"-f",
|
||||
"eu-west-1",
|
||||
"--service",
|
||||
"cloudwatch",
|
||||
"--log-level",
|
||||
"ERROR",
|
||||
"-p",
|
||||
"dev",
|
||||
"-c",
|
||||
"<check_name>"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": "Python: Debug Tests",
|
||||
"type": "python",
|
||||
"name": "Debug Azure Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${file}",
|
||||
"purpose": [
|
||||
"debug-test"
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"azure",
|
||||
"--sp-env-auth",
|
||||
"--log-level",
|
||||
"ERROR",
|
||||
"-c",
|
||||
"<check_name>"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": "Debug GCP Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"gcp",
|
||||
"--log-level",
|
||||
"ERROR",
|
||||
"-c",
|
||||
"<check_name>"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": "Debug K8s Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"kubernetes",
|
||||
"--log-level",
|
||||
"ERROR",
|
||||
"-c",
|
||||
"<check_name>"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
|
||||
@@ -4,10 +4,14 @@ You can extend Prowler Open Source in many different ways, in most cases you wil
|
||||
|
||||
## Get the code and install all dependencies
|
||||
|
||||
First of all, you need a version of Python 3.9 or higher and also pip installed to be able to install all dependencies required. Once that is satisfied go a head and clone the repo:
|
||||
First of all, you need a version of Python 3.9 or higher and also `pip` installed to be able to install all dependencies required.
|
||||
|
||||
Then, to start working with the Prowler Github repository you need to fork it to be able to propose changes for new features, bug fixing, etc. To fork the Prowler repo please refer to [this guide](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo?tool=webui#forking-a-repository).
|
||||
|
||||
Once that is satisfied go ahead and clone your forked repo:
|
||||
|
||||
```
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
git clone https://github.com/<your-github-user>/prowler
|
||||
cd prowler
|
||||
```
|
||||
For isolation and avoid conflicts with other environments, we recommend usage of `poetry`:
|
||||
@@ -44,7 +48,10 @@ Before we merge any of your pull requests we pass checks to the code, we use the
|
||||
|
||||
You can see all dependencies in file `pyproject.toml`.
|
||||
|
||||
Moreover, you would need to install [`TruffleHog`](https://github.com/trufflesecurity/trufflehog) to check for secrets in the code. You can install it using the official installation guide [here](https://github.com/trufflesecurity/trufflehog?tab=readme-ov-file#floppy_disk-installation).
|
||||
Moreover, you would need to install [`TruffleHog`](https://github.com/trufflesecurity/trufflehog) on the latest version to check for secrets in the code. You can install it using the official installation guide [here](https://github.com/trufflesecurity/trufflehog?tab=readme-ov-file#floppy_disk-installation).
|
||||
|
||||
???+ note
|
||||
If you have any trouble when committing to the Prowler repository, add the `--no-verify` flag to the `git commit` command.
|
||||
|
||||
## Pull Request Checklist
|
||||
|
||||
|
||||
@@ -23,8 +23,8 @@ The Prowler's service structure is the following and the way to initialise it is
|
||||
All the Prowler provider's services inherits from a base class depending on the provider used.
|
||||
|
||||
- [AWS Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/lib/service/service.py)
|
||||
- [GCP Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/lib/service/service.py)
|
||||
- [Azure Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/gcp/lib/service/service.py)
|
||||
- [GCP Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/gcp/lib/service/service.py)
|
||||
- [Azure Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/lib/service/service.py)
|
||||
- [Kubernetes Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/kubernetes/lib/service/service.py)
|
||||
|
||||
Each class is used to initialize the credentials and the API's clients to be used in the service. If some threading is used it must be coded there.
|
||||
|
||||
BIN
docs/favicon.ico
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
@@ -40,10 +40,10 @@ If your IAM entity enforces MFA you can use `--mfa` and Prowler will ask you to
|
||||
|
||||
Prowler for Azure supports the following authentication types:
|
||||
|
||||
- Service principal authentication by environment variables (Enterprise Application)
|
||||
- [Service principal application](https://learn.microsoft.com/en-us/entra/identity-platform/app-objects-and-service-principals?tabs=browser#service-principal-object) by environment variables (recommended)
|
||||
- Current az cli credentials stored
|
||||
- Interactive browser authentication
|
||||
- Managed identity authentication
|
||||
- [Managed identity](https://learn.microsoft.com/en-us/entra/identity/managed-identities-azure-resources/overview) authentication
|
||||
|
||||
### Service Principal authentication
|
||||
|
||||
@@ -56,6 +56,8 @@ export AZURE_CLIENT_SECRET="XXXXXXX"
|
||||
```
|
||||
|
||||
If you try to execute Prowler with the `--sp-env-auth` flag and those variables are empty or not exported, the execution is going to fail.
|
||||
Follow the instructions in the [Create Prowler Service Principal](../tutorials/azure/create-prowler-service-principal.md) section to create a service principal.
|
||||
|
||||
### AZ CLI / Browser / Managed Identity authentication
|
||||
|
||||
The other three cases does not need additional configuration, `--az-cli-auth` and `--managed-identity-auth` are automated options. To use `--browser-auth` the user needs to authenticate against Azure using the default browser to start the scan, also `tenant-id` is required.
|
||||
@@ -64,55 +66,22 @@ The other three cases does not need additional configuration, `--az-cli-auth` an
|
||||
|
||||
To use each one you need to pass the proper flag to the execution. Prowler for Azure handles two types of permission scopes, which are:
|
||||
|
||||
- **Microsoft Entra ID permissions**: Used to retrieve metadata from the identity assumed by Prowler (not mandatory to have access to execute the tool).
|
||||
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool.
|
||||
|
||||
|
||||
#### Microsoft Entra ID scope
|
||||
|
||||
Microsoft Entra ID (AAD earlier) permissions required by the tool are the following:
|
||||
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
- `UserAuthenticationMethod.Read.All`
|
||||
|
||||
The best way to assign it is through the Azure web console:
|
||||
|
||||
1. Access to Microsoft Entra ID
|
||||
2. In the left menu bar, go to "App registrations"
|
||||
3. Once there, in the menu bar click on "+ New registration" to register a new application
|
||||
4. Fill the "Name, select the "Supported account types" and click on "Register. You will be redirected to the applications page.
|
||||

|
||||
4. Select the new application
|
||||
5. In the left menu bar, select "API permissions"
|
||||
6. Then click on "+ Add a permission" and select "Microsoft Graph"
|
||||
7. Once in the "Microsoft Graph" view, select "Application permissions"
|
||||
8. Finally, search for "Directory", "Policy" and "UserAuthenticationMethod" select the following permissions:
|
||||
- **Microsoft Entra ID permissions**: Used to retrieve metadata from the identity assumed by Prowler and specific Entra checks (not mandatory to have access to execute the tool). The permissions required by the tool are the following:
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
- `UserAuthenticationMethod.Read.All`
|
||||

|
||||
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool. It is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
|
||||
- `Reader`
|
||||
- `ProwlerRole` (custom role defined in [prowler-azure-custom-role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json))
|
||||
|
||||
To assign the permissions, follow the instructions in the [Microsoft Entra ID permissions](../tutorials/azure/create-prowler-service-principal.md#assigning-the-proper-permissions) section and the [Azure subscriptions permissions](../tutorials/azure/subscriptions.md#assigning-proper-permissions) section, respectively.
|
||||
|
||||
#### Subscriptions scope
|
||||
#### Checks that require ProwlerRole
|
||||
|
||||
Regarding the subscription scope, Prowler by default scans all the subscriptions that is able to list, so it is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
|
||||
The following checks require the `ProwlerRole` custom role to be executed, if you want to run them, make sure you have assigned the role to the identity that is going to be assumed by Prowler:
|
||||
|
||||
- `Security Reader`
|
||||
- `Reader`
|
||||
|
||||
To assign this roles, follow the instructions:
|
||||
|
||||
1. Access your subscription, then select your subscription.
|
||||
2. Select "Access control (IAM)".
|
||||
3. In the overview, select "Roles"
|
||||

|
||||
4. Click on "+ Add" and select "Add role assignment"
|
||||
5. In the search bar, type `Security Reader`, select it and click on "Next"
|
||||
6. In the Members tab, click on "+ Select members" and add the members you want to assign this role.
|
||||
7. Click on "Review + assign" to apply the new role.
|
||||
|
||||
*Repeat these steps for `Reader` role*
|
||||
- `app_function_access_keys_configured`
|
||||
- `app_function_ftps_deployment_disabled`
|
||||
|
||||
## Google Cloud
|
||||
|
||||
|
||||
BIN
docs/img/add-reader-role.gif
Normal file
|
After Width: | Height: | Size: 1.4 MiB |
BIN
docs/img/add-sub-to-management-group.gif
Normal file
|
After Width: | Height: | Size: 357 KiB |
BIN
docs/img/create-management-group.gif
Normal file
|
After Width: | Height: | Size: 688 KiB |
|
Before Width: | Height: | Size: 214 KiB After Width: | Height: | Size: 746 KiB |
|
Before Width: | Height: | Size: 348 KiB |
BIN
docs/img/prowler-cli-quick.gif
Normal file
|
After Width: | Height: | Size: 552 KiB |
@@ -212,10 +212,10 @@ prowler <provider>
|
||||
|
||||
If you miss the former output you can use `--verbose` but Prowler v4 is smoking fast, so you won't see much ;
|
||||
|
||||
By default, Prowler will generate a CSV, JSON and HTML reports, however you can generate a JSON-ASFF (used by AWS Security Hub) report with `-M` or `--output-modes`:
|
||||
By default, Prowler generates CSV, JSON-OCSF and HTML reports. However, you can generate a JSON-ASFF report (used by AWS Security Hub) with `-M` or `--output-modes`:
|
||||
|
||||
```console
|
||||
prowler <provider> -M csv json json-asff html
|
||||
prowler <provider> -M csv json-asff json-ocsf html
|
||||
```
|
||||
The html report will be located in the output directory as the other files and it will look like:
|
||||
|
||||
|
||||
@@ -85,7 +85,7 @@ prowler --security-hub --region eu-west-1
|
||||
```
|
||||
|
||||
???+ note
|
||||
It is recommended to send only fails to Security Hub and that is possible adding `-q/--quiet` to the command. You can use, instead of the `-q/--quiet` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub.
|
||||
It is recommended to send only fails to Security Hub and that is possible adding `--status FAIL` to the command. You can use, instead of the `--status FAIL` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub.
|
||||
|
||||
Since Prowler perform checks to all regions by default you may need to filter by region when running Security Hub integration, as shown in the example above. Remember to enable Security Hub in the region or regions you need by calling `aws securityhub enable-security-hub --region <region>` and run Prowler with the option `-f/--region <region>` (if no region is used it will try to push findings in all regions hubs). Prowler will send findings to the Security Hub on the region where the scanned resource is located.
|
||||
|
||||
@@ -121,13 +121,13 @@ prowler --security-hub --role arn:aws:iam::123456789012:role/ProwlerExecutionRol
|
||||
|
||||
## Send only failed findings to Security Hub
|
||||
|
||||
When using the **AWS Security Hub** integration you can send only the `FAIL` findings generated by **Prowler**. Therefore, the **AWS Security Hub** usage costs eventually would be lower. To follow that recommendation you could add the `-q/--quiet` flag to the Prowler command:
|
||||
When using the **AWS Security Hub** integration you can send only the `FAIL` findings generated by **Prowler**. Therefore, the **AWS Security Hub** usage costs eventually would be lower. To follow that recommendation you could add the `--status FAIL` flag to the Prowler command:
|
||||
|
||||
```sh
|
||||
prowler --security-hub --quiet
|
||||
prowler --security-hub --status FAIL
|
||||
```
|
||||
|
||||
You can use, instead of the `-q/--quiet` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub:
|
||||
You can use, instead of the `--status FAIL` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub:
|
||||
|
||||
```sh
|
||||
prowler --security-hub --send-sh-only-fails
|
||||
|
||||
34
docs/tutorials/azure/create-prowler-service-principal.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# How to create Prowler Service Principal
|
||||
|
||||
To allow Prowler assume an identity to start the scan with the required privileges is necesary to create a Service Principal. To create one follow the next steps:
|
||||
|
||||
1. Access to Microsoft Entra ID
|
||||
2. In the left menu bar, go to "App registrations"
|
||||
3. Once there, in the menu bar click on "+ New registration" to register a new application
|
||||
4. Fill the "Name, select the "Supported account types" and click on "Register. You will be redirected to the applications page.
|
||||
5. Once in the application page, in the left menu bar, select "Certificates & secrets"
|
||||
6. In the "Certificates & secrets" view, click on "+ New client secret"
|
||||
7. Fill the "Description" and "Expires" fields and click on "Add"
|
||||
8. Copy the value of the secret, it is going to be used as `AZURE_CLIENT_SECRET` environment variable.
|
||||
|
||||

|
||||
|
||||
## Assigning the proper permissions
|
||||
|
||||
To allow Prowler to retrieve metadata from the identity assumed and specific Entra checks, it is needed to assign the following permissions:
|
||||
|
||||
1. Access to Microsoft Entra ID
|
||||
2. In the left menu bar, go to "App registrations"
|
||||
3. Once there, select the application that you have created
|
||||
4. In the left menu bar, select "API permissions"
|
||||
5. Then click on "+ Add a permission" and select "Microsoft Graph"
|
||||
6. Once in the "Microsoft Graph" view, select "Application permissions"
|
||||
7. Finally, search for "Directory", "Policy" and "UserAuthenticationMethod" select the following permissions:
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
- `UserAuthenticationMethod.Read.All`
|
||||
8. Click on "Add permissions" to apply the new permissions.
|
||||
9. Finally, click on "Grant admin consent for [your tenant]" to apply the permissions.
|
||||
|
||||
|
||||

|
||||
@@ -1,6 +1,6 @@
|
||||
# Azure subscriptions scope
|
||||
|
||||
By default, Prowler is multisubscription, which means that is going to scan all the subscriptions is able to list. If you only assign permissions to one subscription, it is going to scan a single one.
|
||||
By default, Prowler is multisubscription, which means that is going to scan all the subscriptions is able to list. If you only assign permissions to one subscription, it is going to scan a single one.
|
||||
Prowler also has the ability to limit the subscriptions to scan to a set passed as input argument, to do so:
|
||||
|
||||
```console
|
||||
@@ -8,3 +8,36 @@ prowler azure --az-cli-auth --subscription-ids <subscription ID 1> <subscription
|
||||
```
|
||||
|
||||
Where you can pass from 1 up to N subscriptions to be scanned.
|
||||
|
||||
## Assigning proper permissions
|
||||
|
||||
Regarding the subscription scope, Prowler by default scans all subscriptions that it is able to list, so it is necessary to add the `Reader` RBAC built-in roles per subscription or management group (recommended for multiple subscriptions, see it in the [next section](#recommendation-for-multiple-subscriptions)) to the entity that will be adopted by the tool:
|
||||
|
||||
To assign this roles, follow the instructions:
|
||||
|
||||
1. Access your subscription, then select your subscription.
|
||||
2. Select "Access control (IAM)".
|
||||
3. In the overview, select "Roles".
|
||||
4. Click on "+ Add" and select "Add role assignment".
|
||||
5. In the search bar, type `Reader`, select it and click on "Next".
|
||||
6. In the Members tab, click on "+ Select members" and add the members you want to assign this role.
|
||||
7. Click on "Review + assign" to apply the new role.
|
||||
|
||||

|
||||
|
||||
Moreover, some additional read-only permissions are needed for some checks, for this kind of checks that are not covered by built-in roles we use a custom role. This role is defined in [prowler-azure-custom-role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json). Once the cusotm role is created, repeat the steps mentioned above to assign the new `ProwlerRole` to an identity.
|
||||
|
||||
## Recommendation for multiple subscriptions
|
||||
|
||||
While scanning multiple subscriptions could be tedious to create and assign roles for each one. For this reason in Prowler we recommend the usage of *[management groups](https://learn.microsoft.com/en-us/azure/governance/management-groups/overview)* to group all subscriptions that are going to be audited by Prowler.
|
||||
|
||||
To do this in a proper way you have to [create a new management group](https://learn.microsoft.com/en-us/azure/governance/management-groups/create-management-group-portal) and add all roles in the same way that have been done for subscription scope.
|
||||
|
||||

|
||||
|
||||
Once the management group is properly set you can add all the subscription that you want to audit.
|
||||
|
||||

|
||||
|
||||
???+ note
|
||||
By default, `prowler` will scan all subscriptions in the Azure tenant, use the flag `--subscription-id` to specify the subscriptions to be scanned.
|
||||
|
||||
@@ -29,17 +29,23 @@ The following list includes all the AWS checks with configurable variables that
|
||||
| `organizations_delegated_administrators` | `organizations_trusted_delegated_administrators` | List of Strings |
|
||||
| `ecr_repositories_scan_vulnerabilities_in_latest_image` | `ecr_repository_vulnerability_minimum_severity` | String |
|
||||
| `trustedadvisor_premium_support_plan_subscribed` | `verify_premium_support_plans` | Boolean |
|
||||
| `config_recorder_all_regions_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `drs_job_exist` | `mute_non_default_regions` | Boolean |
|
||||
| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `securityhub_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_minutes` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_actions` | List of Strings |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_minutes` | Integer |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_actions` | List of Strings |
|
||||
| `rds_instance_backup_enabled` | `check_rds_instance_replicas` | Boolean |
|
||||
| `config_recorder_all_regions_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `drs_job_exist` | `mute_non_default_regions` | Boolean |
|
||||
| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `securityhub_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_minutes` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_actions` | List of Strings |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_minutes` | Integer |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_actions` | List of Strings |
|
||||
| `rds_instance_backup_enabled` | `check_rds_instance_replicas` | Boolean |
|
||||
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_interface_types` | List of Strings |
|
||||
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_instance_owners` | List of Strings |
|
||||
| `acm_certificates_expiration_check` | `days_to_expire_threshold` | Integer |
|
||||
| `eks_control_plane_logging_all_types_enabled` | `eks_required_log_types` | List of Strings |
|
||||
|
||||
|
||||
## Azure
|
||||
|
||||
### Configurable Checks
|
||||
@@ -78,10 +84,20 @@ The following list includes all the Azure checks with configurable variables tha
|
||||
```yaml title="config.yaml"
|
||||
# AWS Configuration
|
||||
aws:
|
||||
|
||||
# AWS Global Configuration
|
||||
# aws.mute_non_default_regions --> Mute Failed Findings in non-default regions for GuardDuty, SecurityHub, DRS and Config
|
||||
# aws.mute_non_default_regions --> Set to True to muted failed findings in non-default regions for AccessAnalyzer, GuardDuty, SecurityHub, DRS and Config
|
||||
mute_non_default_regions: False
|
||||
# If you want to mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w mutelist.yaml`:
|
||||
# Mutelist:
|
||||
# Accounts:
|
||||
# "*":
|
||||
# Checks:
|
||||
# "*":
|
||||
# Regions:
|
||||
# - "ap-southeast-1"
|
||||
# - "ap-southeast-2"
|
||||
# Resources:
|
||||
# - "*"
|
||||
|
||||
# AWS IAM Configuration
|
||||
# aws.iam_user_accesskey_unused --> CIS recommends 45 days
|
||||
@@ -91,11 +107,24 @@ aws:
|
||||
|
||||
# AWS EC2 Configuration
|
||||
# aws.ec2_elastic_ip_shodan
|
||||
# TODO: create common config
|
||||
shodan_api_key: null
|
||||
# aws.ec2_securitygroup_with_many_ingress_egress_rules --> by default is 50 rules
|
||||
max_security_group_rules: 50
|
||||
# aws.ec2_instance_older_than_specific_days --> by default is 6 months (180 days)
|
||||
max_ec2_instance_age_in_days: 180
|
||||
# aws.ec2_securitygroup_allow_ingress_from_internet_to_any_port
|
||||
# allowed network interface types for security groups open to the Internet
|
||||
ec2_allowed_interface_types:
|
||||
[
|
||||
"api_gateway_managed",
|
||||
"vpc_endpoint",
|
||||
]
|
||||
# allowed network interface owners for security groups open to the Internet
|
||||
ec2_allowed_instance_owners:
|
||||
[
|
||||
"amazon-elb"
|
||||
]
|
||||
|
||||
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
|
||||
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
|
||||
@@ -119,205 +148,234 @@ aws:
|
||||
# aws.awslambda_function_using_supported_runtimes
|
||||
obsolete_lambda_runtimes:
|
||||
[
|
||||
"java8",
|
||||
"go1.x",
|
||||
"provided",
|
||||
"python3.6",
|
||||
"python2.7",
|
||||
"python3.7",
|
||||
"nodejs4.3",
|
||||
"nodejs4.3-edge",
|
||||
"nodejs6.10",
|
||||
"nodejs",
|
||||
"nodejs8.10",
|
||||
"nodejs10.x",
|
||||
"nodejs12.x",
|
||||
"nodejs14.x",
|
||||
"dotnet5.0",
|
||||
"dotnetcore1.0",
|
||||
"dotnetcore2.0",
|
||||
"dotnetcore2.1",
|
||||
"dotnetcore3.1",
|
||||
"ruby2.5",
|
||||
"ruby2.7",
|
||||
]
|
||||
|
||||
# AWS Organizations
|
||||
# organizations_scp_check_deny_regions
|
||||
# organizations_enabled_regions: [
|
||||
# 'eu-central-1',
|
||||
# 'eu-west-1',
|
||||
# aws.organizations_scp_check_deny_regions
|
||||
# aws.organizations_enabled_regions: [
|
||||
# "eu-central-1",
|
||||
# "eu-west-1",
|
||||
# "us-east-1"
|
||||
# ]
|
||||
organizations_enabled_regions: []
|
||||
organizations_trusted_delegated_administrators: []
|
||||
|
||||
# AWS ECR
|
||||
# ecr_repositories_scan_vulnerabilities_in_latest_image
|
||||
# aws.ecr_repositories_scan_vulnerabilities_in_latest_image
|
||||
# CRITICAL
|
||||
# HIGH
|
||||
# MEDIUM
|
||||
ecr_repository_vulnerability_minimum_severity: "MEDIUM"
|
||||
|
||||
# AWS Trusted Advisor
|
||||
# trustedadvisor_premium_support_plan_subscribed
|
||||
# aws.trustedadvisor_premium_support_plan_subscribed
|
||||
verify_premium_support_plans: True
|
||||
|
||||
# AWS CloudTrail Configuration
|
||||
# aws.cloudtrail_threat_detection_privilege_escalation
|
||||
threat_detection_privilege_escalation_entropy: 0.7 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.7 (70%)
|
||||
threat_detection_privilege_escalation_threshold: 0.1 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.1 (10%)
|
||||
threat_detection_privilege_escalation_minutes: 1440 # Past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_privilege_escalation_actions: [
|
||||
"AddPermission",
|
||||
"AddRoleToInstanceProfile",
|
||||
"AddUserToGroup",
|
||||
"AssociateAccessPolicy",
|
||||
"AssumeRole",
|
||||
"AttachGroupPolicy",
|
||||
"AttachRolePolicy",
|
||||
"AttachUserPolicy",
|
||||
"ChangePassword",
|
||||
"CreateAccessEntry",
|
||||
"CreateAccessKey",
|
||||
"CreateDevEndpoint",
|
||||
"CreateEventSourceMapping",
|
||||
"CreateFunction",
|
||||
"CreateGroup",
|
||||
"CreateJob",
|
||||
"CreateKeyPair",
|
||||
"CreateLoginProfile",
|
||||
"CreatePipeline",
|
||||
"CreatePolicyVersion",
|
||||
"CreateRole",
|
||||
"CreateStack",
|
||||
"DeleteRolePermissionsBoundary",
|
||||
"DeleteRolePolicy",
|
||||
"DeleteUserPermissionsBoundary",
|
||||
"DeleteUserPolicy",
|
||||
"DetachRolePolicy",
|
||||
"DetachUserPolicy",
|
||||
"GetCredentialsForIdentity",
|
||||
"GetId",
|
||||
"GetPolicyVersion",
|
||||
"GetUserPolicy",
|
||||
"Invoke",
|
||||
"ModifyInstanceAttribute",
|
||||
"PassRole",
|
||||
"PutGroupPolicy",
|
||||
"PutPipelineDefinition",
|
||||
"PutRolePermissionsBoundary",
|
||||
"PutRolePolicy",
|
||||
"PutUserPermissionsBoundary",
|
||||
"PutUserPolicy",
|
||||
"ReplaceIamInstanceProfileAssociation",
|
||||
"RunInstances",
|
||||
"SetDefaultPolicyVersion",
|
||||
"UpdateAccessKey",
|
||||
"UpdateAssumeRolePolicy",
|
||||
"UpdateDevEndpoint",
|
||||
"UpdateEventSourceMapping",
|
||||
"UpdateFunctionCode",
|
||||
"UpdateJob",
|
||||
"UpdateLoginProfile",
|
||||
]
|
||||
threat_detection_privilege_escalation_actions:
|
||||
[
|
||||
"AddPermission",
|
||||
"AddRoleToInstanceProfile",
|
||||
"AddUserToGroup",
|
||||
"AssociateAccessPolicy",
|
||||
"AssumeRole",
|
||||
"AttachGroupPolicy",
|
||||
"AttachRolePolicy",
|
||||
"AttachUserPolicy",
|
||||
"ChangePassword",
|
||||
"CreateAccessEntry",
|
||||
"CreateAccessKey",
|
||||
"CreateDevEndpoint",
|
||||
"CreateEventSourceMapping",
|
||||
"CreateFunction",
|
||||
"CreateGroup",
|
||||
"CreateJob",
|
||||
"CreateKeyPair",
|
||||
"CreateLoginProfile",
|
||||
"CreatePipeline",
|
||||
"CreatePolicyVersion",
|
||||
"CreateRole",
|
||||
"CreateStack",
|
||||
"DeleteRolePermissionsBoundary",
|
||||
"DeleteRolePolicy",
|
||||
"DeleteUserPermissionsBoundary",
|
||||
"DeleteUserPolicy",
|
||||
"DetachRolePolicy",
|
||||
"DetachUserPolicy",
|
||||
"GetCredentialsForIdentity",
|
||||
"GetId",
|
||||
"GetPolicyVersion",
|
||||
"GetUserPolicy",
|
||||
"Invoke",
|
||||
"ModifyInstanceAttribute",
|
||||
"PassRole",
|
||||
"PutGroupPolicy",
|
||||
"PutPipelineDefinition",
|
||||
"PutRolePermissionsBoundary",
|
||||
"PutRolePolicy",
|
||||
"PutUserPermissionsBoundary",
|
||||
"PutUserPolicy",
|
||||
"ReplaceIamInstanceProfileAssociation",
|
||||
"RunInstances",
|
||||
"SetDefaultPolicyVersion",
|
||||
"UpdateAccessKey",
|
||||
"UpdateAssumeRolePolicy",
|
||||
"UpdateDevEndpoint",
|
||||
"UpdateEventSourceMapping",
|
||||
"UpdateFunctionCode",
|
||||
"UpdateJob",
|
||||
"UpdateLoginProfile",
|
||||
]
|
||||
# aws.cloudtrail_threat_detection_enumeration
|
||||
threat_detection_enumeration_entropy: 0.7 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.7 (70%)
|
||||
threat_detection_enumeration_threshold: 0.1 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.1 (10%)
|
||||
threat_detection_enumeration_minutes: 1440 # Past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_enumeration_actions: [
|
||||
"DescribeAccessEntry",
|
||||
"DescribeAccountAttributes",
|
||||
"DescribeAvailabilityZones",
|
||||
"DescribeBundleTasks",
|
||||
"DescribeCarrierGateways",
|
||||
"DescribeClientVpnRoutes",
|
||||
"DescribeCluster",
|
||||
"DescribeDhcpOptions",
|
||||
"DescribeFlowLogs",
|
||||
"DescribeImages",
|
||||
"DescribeInstanceAttribute",
|
||||
"DescribeInstanceInformation",
|
||||
"DescribeInstanceTypes",
|
||||
"DescribeInstances",
|
||||
"DescribeInstances",
|
||||
"DescribeKeyPairs",
|
||||
"DescribeLogGroups",
|
||||
"DescribeLogStreams",
|
||||
"DescribeOrganization",
|
||||
"DescribeRegions",
|
||||
"DescribeSecurityGroups",
|
||||
"DescribeSnapshotAttribute",
|
||||
"DescribeSnapshotTierStatus",
|
||||
"DescribeSubscriptionFilters",
|
||||
"DescribeTransitGatewayMulticastDomains",
|
||||
"DescribeVolumes",
|
||||
"DescribeVolumesModifications",
|
||||
"DescribeVpcEndpointConnectionNotifications",
|
||||
"DescribeVpcs",
|
||||
"GetAccount",
|
||||
"GetAccountAuthorizationDetails",
|
||||
"GetAccountSendingEnabled",
|
||||
"GetBucketAcl",
|
||||
"GetBucketLogging",
|
||||
"GetBucketPolicy",
|
||||
"GetBucketReplication",
|
||||
"GetBucketVersioning",
|
||||
"GetCallerIdentity",
|
||||
"GetCertificate",
|
||||
"GetConsoleScreenshot",
|
||||
"GetCostAndUsage",
|
||||
"GetDetector",
|
||||
"GetEbsDefaultKmsKeyId",
|
||||
"GetEbsEncryptionByDefault",
|
||||
"GetFindings",
|
||||
"GetFlowLogsIntegrationTemplate",
|
||||
"GetIdentityVerificationAttributes",
|
||||
"GetInstances",
|
||||
"GetIntrospectionSchema",
|
||||
"GetLaunchTemplateData",
|
||||
"GetLaunchTemplateData",
|
||||
"GetLogRecord",
|
||||
"GetParameters",
|
||||
"GetPolicyVersion",
|
||||
"GetPublicAccessBlock",
|
||||
"GetQueryResults",
|
||||
"GetRegions",
|
||||
"GetSMSAttributes",
|
||||
"GetSMSSandboxAccountStatus",
|
||||
"GetSendQuota",
|
||||
"GetTransitGatewayRouteTableAssociations",
|
||||
"GetUserPolicy",
|
||||
"HeadObject",
|
||||
"ListAccessKeys",
|
||||
"ListAccounts",
|
||||
"ListAllMyBuckets",
|
||||
"ListAssociatedAccessPolicies",
|
||||
"ListAttachedUserPolicies",
|
||||
"ListClusters",
|
||||
"ListDetectors",
|
||||
"ListDomains",
|
||||
"ListFindings",
|
||||
"ListHostedZones",
|
||||
"ListIPSets",
|
||||
"ListIdentities",
|
||||
"ListInstanceProfiles",
|
||||
"ListObjects",
|
||||
"ListOrganizationalUnitsForParent",
|
||||
"ListOriginationNumbers",
|
||||
"ListPolicyVersions",
|
||||
"ListRoles",
|
||||
"ListRoles",
|
||||
"ListRules",
|
||||
"ListServiceQuotas",
|
||||
"ListSubscriptions",
|
||||
"ListTargetsByRule",
|
||||
"ListTopics",
|
||||
"ListUsers",
|
||||
"LookupEvents",
|
||||
"Search",
|
||||
]
|
||||
threat_detection_enumeration_actions:
|
||||
[
|
||||
"DescribeAccessEntry",
|
||||
"DescribeAccountAttributes",
|
||||
"DescribeAvailabilityZones",
|
||||
"DescribeBundleTasks",
|
||||
"DescribeCarrierGateways",
|
||||
"DescribeClientVpnRoutes",
|
||||
"DescribeCluster",
|
||||
"DescribeDhcpOptions",
|
||||
"DescribeFlowLogs",
|
||||
"DescribeImages",
|
||||
"DescribeInstanceAttribute",
|
||||
"DescribeInstanceInformation",
|
||||
"DescribeInstanceTypes",
|
||||
"DescribeInstances",
|
||||
"DescribeInstances",
|
||||
"DescribeKeyPairs",
|
||||
"DescribeLogGroups",
|
||||
"DescribeLogStreams",
|
||||
"DescribeOrganization",
|
||||
"DescribeRegions",
|
||||
"DescribeSecurityGroups",
|
||||
"DescribeSnapshotAttribute",
|
||||
"DescribeSnapshotTierStatus",
|
||||
"DescribeSubscriptionFilters",
|
||||
"DescribeTransitGatewayMulticastDomains",
|
||||
"DescribeVolumes",
|
||||
"DescribeVolumesModifications",
|
||||
"DescribeVpcEndpointConnectionNotifications",
|
||||
"DescribeVpcs",
|
||||
"GetAccount",
|
||||
"GetAccountAuthorizationDetails",
|
||||
"GetAccountSendingEnabled",
|
||||
"GetBucketAcl",
|
||||
"GetBucketLogging",
|
||||
"GetBucketPolicy",
|
||||
"GetBucketReplication",
|
||||
"GetBucketVersioning",
|
||||
"GetCallerIdentity",
|
||||
"GetCertificate",
|
||||
"GetConsoleScreenshot",
|
||||
"GetCostAndUsage",
|
||||
"GetDetector",
|
||||
"GetEbsDefaultKmsKeyId",
|
||||
"GetEbsEncryptionByDefault",
|
||||
"GetFindings",
|
||||
"GetFlowLogsIntegrationTemplate",
|
||||
"GetIdentityVerificationAttributes",
|
||||
"GetInstances",
|
||||
"GetIntrospectionSchema",
|
||||
"GetLaunchTemplateData",
|
||||
"GetLaunchTemplateData",
|
||||
"GetLogRecord",
|
||||
"GetParameters",
|
||||
"GetPolicyVersion",
|
||||
"GetPublicAccessBlock",
|
||||
"GetQueryResults",
|
||||
"GetRegions",
|
||||
"GetSMSAttributes",
|
||||
"GetSMSSandboxAccountStatus",
|
||||
"GetSendQuota",
|
||||
"GetTransitGatewayRouteTableAssociations",
|
||||
"GetUserPolicy",
|
||||
"HeadObject",
|
||||
"ListAccessKeys",
|
||||
"ListAccounts",
|
||||
"ListAllMyBuckets",
|
||||
"ListAssociatedAccessPolicies",
|
||||
"ListAttachedUserPolicies",
|
||||
"ListClusters",
|
||||
"ListDetectors",
|
||||
"ListDomains",
|
||||
"ListFindings",
|
||||
"ListHostedZones",
|
||||
"ListIPSets",
|
||||
"ListIdentities",
|
||||
"ListInstanceProfiles",
|
||||
"ListObjects",
|
||||
"ListOrganizationalUnitsForParent",
|
||||
"ListOriginationNumbers",
|
||||
"ListPolicyVersions",
|
||||
"ListRoles",
|
||||
"ListRoles",
|
||||
"ListRules",
|
||||
"ListServiceQuotas",
|
||||
"ListSubscriptions",
|
||||
"ListTargetsByRule",
|
||||
"ListTopics",
|
||||
"ListUsers",
|
||||
"LookupEvents",
|
||||
"Search",
|
||||
]
|
||||
|
||||
# AWS RDS Configuration
|
||||
# aws.rds_instance_backup_enabled
|
||||
# Whether to check RDS instance replicas or not
|
||||
check_rds_instance_replicas: False
|
||||
|
||||
# AWS ACM Configuration
|
||||
# aws.acm_certificates_expiration_check
|
||||
days_to_expire_threshold: 7
|
||||
|
||||
# AWS EKS Configuration
|
||||
# aws.eks_control_plane_logging_all_types_enabled
|
||||
# EKS control plane logging types that must be enabled
|
||||
eks_required_log_types:
|
||||
[
|
||||
"api",
|
||||
"audit",
|
||||
"authenticator",
|
||||
"controllerManager",
|
||||
"scheduler",
|
||||
]
|
||||
|
||||
# Azure Configuration
|
||||
azure:
|
||||
# Azure Network Configuration
|
||||
# azure.network_public_ip_shodan
|
||||
# TODO: create common config
|
||||
shodan_api_key: null
|
||||
|
||||
# Azure App Configuration
|
||||
# Azure App Service
|
||||
# azure.app_ensure_php_version_is_latest
|
||||
php_latest_version: "8.2"
|
||||
# azure.app_ensure_python_version_is_latest
|
||||
@@ -331,4 +389,34 @@ gcp:
|
||||
# gcp.compute_public_address_shodan
|
||||
shodan_api_key: null
|
||||
|
||||
# Kubernetes Configuration
|
||||
kubernetes:
|
||||
# Kubernetes API Server
|
||||
# kubernetes.apiserver_audit_log_maxbackup_set
|
||||
audit_log_maxbackup: 10
|
||||
# kubernetes.apiserver_audit_log_maxsize_set
|
||||
audit_log_maxsize: 100
|
||||
# kubernetes.apiserver_audit_log_maxage_set
|
||||
audit_log_maxage: 30
|
||||
# kubernetes.apiserver_strong_ciphers_only
|
||||
apiserver_strong_ciphers:
|
||||
[
|
||||
"TLS_AES_128_GCM_SHA256",
|
||||
"TLS_AES_256_GCM_SHA384",
|
||||
"TLS_CHACHA20_POLY1305_SHA256",
|
||||
]
|
||||
# Kubelet
|
||||
# kubernetes.kubelet_strong_ciphers_only
|
||||
kubelet_strong_ciphers:
|
||||
[
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_RSA_WITH_AES_128_GCM_SHA256",
|
||||
]
|
||||
|
||||
```
|
||||
|
||||
@@ -54,7 +54,7 @@ CustomChecksMetadata:
|
||||
RelatedUrl: https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html
|
||||
Remediation:
|
||||
Code:
|
||||
CLI: aws s3api put-bucket-versioning --bucket <bucket-name> --versioning-configuration Status=Enabled
|
||||
CLI: aws s3api put-bucket-versioning --bucket <bucket-name> --versioning-configuration Status=Enabled,MFADelete=Enabled
|
||||
NativeIaC: https://aws.amazon.com/es/s3/features/versioning/
|
||||
Other: https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html
|
||||
Terraform: https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_versioning
|
||||
|
||||
@@ -81,7 +81,7 @@ def get_table(data):
|
||||
|
||||
## S3 Integration
|
||||
|
||||
If you are a Prowler Saas customer and you want to use your data from your S3 bucket, you can run:
|
||||
If you are using Prowler SaaS with the S3 integration or that integration from Prowler Open Source and you want to use your data from your S3 bucket, you can run:
|
||||
|
||||
```sh
|
||||
aws s3 cp s3://<your-bucket>/output/csv ./output --recursive
|
||||
|
||||
@@ -25,7 +25,17 @@ Prowler will follow the same credentials search as [Google authentication librar
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
|
||||
|
||||
# GCP Service APIs
|
||||
## Impersonate Service Account
|
||||
|
||||
If you want to impersonate a GCP service account, you can use the `--impersonate-service-account` argument:
|
||||
|
||||
```console
|
||||
prowler gcp --impersonate-service-account <service-account-email>
|
||||
```
|
||||
|
||||
This argument will use the default credentials to impersonate the service account provided.
|
||||
|
||||
## Service APIs
|
||||
|
||||
Prowler will use the Google Cloud APIs to get the information needed to perform the checks. Make sure that the following APIs are enabled in the project:
|
||||
|
||||
|
||||
BIN
docs/tutorials/img/create-sp.gif
Normal file
|
After Width: | Height: | Size: 4.5 MiB |
|
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 26 KiB |
20
docs/tutorials/kubernetes/in-cluster.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# In-Cluster Execution
|
||||
|
||||
For in-cluster execution, you can use the supplied yaml files inside `/kubernetes`:
|
||||
|
||||
* [job.yaml](https://github.com/prowler-cloud/prowler/blob/master/kubernetes/job.yaml)
|
||||
* [prowler-role.yaml](https://github.com/prowler-cloud/prowler/blob/master/kubernetes/prowler-role.yaml)
|
||||
* [prowler-rolebinding.yaml](https://github.com/prowler-cloud/prowler/blob/master/kubernetes/prowler-rolebinding.yaml)
|
||||
|
||||
They can be used to run Prowler as a job within a new Prowler namespace:
|
||||
|
||||
```console
|
||||
kubectl apply -f kubernetes/job.yaml
|
||||
kubectl apply -f kubernetes/prowler-role.yaml
|
||||
kubectl apply -f kubernetes/prowler-rolebinding.yaml
|
||||
kubectl get pods --namespace prowler-ns --> prowler-XXXXX
|
||||
kubectl logs prowler-XXXXX --namespace prowler-ns
|
||||
```
|
||||
|
||||
???+ note
|
||||
By default, `prowler` will scan all namespaces in your active Kubernetes context. Use the [`--namespace`](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/kubernetes/namespace/) flag to specify the namespace(s) to be scanned.
|
||||
23
docs/tutorials/kubernetes/misc.md
Normal file
@@ -0,0 +1,23 @@
|
||||
# Miscellaneous
|
||||
|
||||
## Context Filtering
|
||||
|
||||
Prowler will scan the active Kubernetes context by default.
|
||||
|
||||
To specify the Kubernetes context to be scanned, use the `--context` flag followed by the desired context name. For example:
|
||||
|
||||
```console
|
||||
prowler --context my-context
|
||||
```
|
||||
|
||||
This will ensure that Prowler scans the specified context/cluster for vulnerabilities and misconfigurations.
|
||||
|
||||
## Namespace Filtering
|
||||
|
||||
By default, `prowler` will scan all namespaces in the context you specify.
|
||||
|
||||
To specify the namespace(s) to be scanned, use the `--namespace` flag followed by the desired namespace(s) separated by spaces. For example:
|
||||
|
||||
```console
|
||||
prowler --namespace namespace1 namespace2
|
||||
```
|
||||
15
docs/tutorials/kubernetes/outside-cluster.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# Non in-cluster execution
|
||||
|
||||
For non in-cluster execution, you can provide the location of the [kubeconfig](https://kubernetes.io/docs/concepts/configuration/organize-cluster-access-kubeconfig/) file with the following argument:
|
||||
|
||||
```console
|
||||
prowler kubernetes --kubeconfig-file /path/to/kubeconfig
|
||||
```
|
||||
???+ note
|
||||
If no `--kubeconfig-file` is provided, Prowler will use the default KubeConfig file location (`~/.kube/config`).
|
||||
|
||||
???+ note
|
||||
`prowler` will scan the active Kubernetes context by default. Use the [`--context`](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/kubernetes/context/) flag to specify the context to be scanned.
|
||||
|
||||
???+ note
|
||||
By default, `prowler` will scan all namespaces in your active Kubernetes context. Use the [`--namespace`](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/kubernetes/namespace/) flag to specify the namespace(s) to be scanned.
|
||||
@@ -10,7 +10,7 @@ Execute Prowler in verbose mode (like in Version 2):
|
||||
prowler <provider> --verbose
|
||||
```
|
||||
## Filter findings by status
|
||||
Prowler can filter the findings by their status:
|
||||
Prowler can filter the findings by their status, so you can see only in the CLI and in the reports the findings with a specific status:
|
||||
```console
|
||||
prowler <provider> --status [PASS, FAIL, MANUAL]
|
||||
```
|
||||
|
||||
@@ -7,97 +7,121 @@ Mutelist option works along with other options and will modify the output in the
|
||||
- CSV: `muted` is `True`. The field `status` will keep the original status, `MANUAL`, `PASS` or `FAIL`, of the finding.
|
||||
|
||||
|
||||
You can use `-w`/`--mutelist-file` with the path of your mutelist yaml file:
|
||||
## How the Mutelist Works
|
||||
|
||||
The Mutelist uses an "ANDed" and "ORed" logic to determine which resources, checks, regions, and tags should be muted. For each check, the Mutelist checks if the account, region, and resource match the specified criteria, using an "ANDed" logic. If tags are specified, the mutelist uses and "ORed" logic to see if at least one tag is present in the resource.
|
||||
|
||||
If any of the criteria do not match, the check is not muted.
|
||||
|
||||
## Mutelist Specification
|
||||
|
||||
???+ note
|
||||
- For Azure provider, the Account ID is the Subscription Name and the Region is the Location.
|
||||
- For GCP provider, the Account ID is the Project ID and the Region is the Zone.
|
||||
- For Kubernetes provider, the Account ID is the Cluster Name and the Region is the Namespace.
|
||||
|
||||
The Mutelist file uses the [YAML](https://en.wikipedia.org/wiki/YAML) format with the following syntax:
|
||||
|
||||
```yaml
|
||||
### Account, Check and/or Region can be * to apply for all the cases.
|
||||
### Resources and tags are lists that can have either Regex or Keywords.
|
||||
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
|
||||
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
|
||||
### For each check you can except Accounts, Regions, Resources and/or Tags.
|
||||
########################### MUTELIST EXAMPLE ###########################
|
||||
Mutelist:
|
||||
Accounts:
|
||||
"123456789012":
|
||||
Checks:
|
||||
"iam_user_hardware_mfa_enabled":
|
||||
Regions:
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
|
||||
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
|
||||
"ec2_*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*" # Will ignore every EC2 check in every account and region
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
|
||||
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
|
||||
|
||||
"*":
|
||||
Checks:
|
||||
"s3_bucket_object_versioning":
|
||||
Regions:
|
||||
- "eu-west-1"
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
|
||||
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
|
||||
- ".+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
|
||||
"ecs_task_definitions_no_environment_secrets":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Exceptions:
|
||||
Accounts:
|
||||
- "0123456789012"
|
||||
Regions:
|
||||
- "eu-west-1"
|
||||
- "eu-south-2" # Will ignore every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Tags:
|
||||
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
|
||||
|
||||
"123456789012":
|
||||
Checks:
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Exceptions:
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
|
||||
```
|
||||
|
||||
### Account, Check, Region, Resource, and Tag
|
||||
|
||||
| Field | Description | Logic |
|
||||
|----------|----------|----------|
|
||||
| `<account_id>` | Use `*` to apply the mutelist to all accounts. | `ANDed` |
|
||||
| `<check_name>` | The name of the Prowler check. Use `*` to apply the mutelist to all checks. | `ANDed` |
|
||||
| `<region>` | The region identifier. Use `*` to apply the mutelist to all regions. | `ANDed` |
|
||||
| `<resource>` | The resource identifier. Use `*` to apply the mutelist to all resources. | `ANDed` |
|
||||
| `<tag>` | The tag value. | `ORed` |
|
||||
|
||||
|
||||
## How to Use the Mutelist
|
||||
|
||||
To use the Mutelist, you need to specify the path to the Mutelist YAML file using the `-w` or `--mutelist-file` option when running Prowler:
|
||||
|
||||
```
|
||||
prowler <provider> -w mutelist.yaml
|
||||
```
|
||||
|
||||
## Mutelist YAML File Syntax
|
||||
Replace `<provider>` with the appropriate provider name.
|
||||
|
||||
???+ note
|
||||
For Azure provider, the Account ID is the Subscription Name and the Region is the Location.
|
||||
## Considerations
|
||||
|
||||
???+ note
|
||||
For GCP provider, the Account ID is the Project ID and the Region is the Zone.
|
||||
- The Mutelist can be used in combination with other Prowler options, such as the `--service` or `--checks` option, to further customize the scanning process.
|
||||
- Make sure to review and update the Mutelist regularly to ensure it reflects the desired exclusions and remains up to date with your infrastructure.
|
||||
|
||||
???+ note
|
||||
For Kubernetes provider, the Account ID is the Cluster Name and the Region is the Namespace.
|
||||
|
||||
The Mutelist file is a YAML file with the following syntax:
|
||||
|
||||
```yaml
|
||||
### Account, Check and/or Region can be * to apply for all the cases.
|
||||
### Resources and tags are lists that can have either Regex or Keywords.
|
||||
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
|
||||
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
|
||||
### For each check you can except Accounts, Regions, Resources and/or Tags.
|
||||
########################### MUTELIST EXAMPLE ###########################
|
||||
Mutelist:
|
||||
Accounts:
|
||||
"123456789012":
|
||||
Checks:
|
||||
"iam_user_hardware_mfa_enabled":
|
||||
Regions:
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
|
||||
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
|
||||
"ec2_*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*" # Will ignore every EC2 check in every account and region
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
|
||||
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
|
||||
|
||||
"*":
|
||||
Checks:
|
||||
"s3_bucket_object_versioning":
|
||||
Regions:
|
||||
- "eu-west-1"
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
|
||||
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
|
||||
- ".+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
|
||||
"ecs_task_definitions_no_environment_secrets":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Exceptions:
|
||||
Accounts:
|
||||
- "0123456789012"
|
||||
Regions:
|
||||
- "eu-west-1"
|
||||
- "eu-south-2" # Will ignore every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Tags:
|
||||
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
|
||||
|
||||
"123456789012":
|
||||
Checks:
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Exceptions:
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
|
||||
```
|
||||
|
||||
## AWS Mutelist
|
||||
### Mute specific AWS regions
|
||||
|
||||
@@ -125,7 +125,7 @@ The JSON-OCSF output format implements the [Detection Finding](https://schema.oc
|
||||
"product": {
|
||||
"name": "Prowler",
|
||||
"vendor_name": "Prowler",
|
||||
"version": "4.1.0"
|
||||
"version": "4.2.4"
|
||||
},
|
||||
"version": "1.1.0"
|
||||
},
|
||||
@@ -333,7 +333,7 @@ The following is the mapping between the native JSON and the Detection Finding f
|
||||
| --- |---|
|
||||
| AssessmentStartTime | event_time |
|
||||
| FindingUniqueId | finding_info.uid |
|
||||
| Provider | cloud.account.type |
|
||||
| Provider | cloud.provider |
|
||||
| CheckID | metadata.event_code |
|
||||
| CheckTitle | finding_info.title |
|
||||
| CheckType | unmapped.check_type |
|
||||
|
||||
@@ -11,6 +11,12 @@ prowler <provider> --scan-unused-services
|
||||
|
||||
## Services that are ignored
|
||||
### AWS
|
||||
#### ACM
|
||||
You can have certificates in ACM that are not in use by any AWS resource.
|
||||
Prowler will check if every certificate is going to expire soon, if this certificate is not in use by default it is not going to be check if it is expired, is going to expire soon or it is good.
|
||||
|
||||
- `acm_certificates_expiration_check`
|
||||
|
||||
#### Athena
|
||||
When you create an AWS Account, Athena will create a default primary workgroup for you.
|
||||
Prowler will check if that workgroup is enabled and if it is being used by checking if there were queries in the last 45 days.
|
||||
@@ -30,9 +36,10 @@ If EBS default encyption is not enabled, sensitive information at rest is not pr
|
||||
|
||||
- `ec2_ebs_default_encryption`
|
||||
|
||||
If your Security groups are not properly configured the attack surface is increased, nonetheless, Prowler will detect those security groups that are being used (they are attached) to only notify those that are being used. This logic applies to the 15 checks related to open ports in security groups.
|
||||
If your Security groups are not properly configured the attack surface is increased, nonetheless, Prowler will detect those security groups that are being used (they are attached) to only notify those that are being used. This logic applies to the 15 checks related to open ports in security groups and the check for the default security group.
|
||||
|
||||
- `ec2_securitygroup_allow_ingress_from_internet_to_port_X` (15 checks)
|
||||
- `ec2_securitygroup_default_restrict_traffic`
|
||||
|
||||
Prowler will also check for used Network ACLs to only alerts those with open ports that are being used.
|
||||
|
||||
@@ -69,3 +76,15 @@ You should enable Public Access Block at the account level to prevent the exposu
|
||||
VPC Flow Logs provide visibility into network traffic that traverses the VPC and can be used to detect anomalous traffic or insight during security workflows. Nevertheless, Prowler will only check if the Flow Logs are enabled for those VPCs that are in use, in other words, only the VPCs where you have ENIs (network interfaces).
|
||||
|
||||
- `vpc_flow_logs_enabled`
|
||||
|
||||
VPC subnets must not have public IP addresses by default to prevent the exposure of your resources to the internet. Prowler will only check this configuration for those VPCs that are in use, in other words, only the VPCs where you have ENIs (network interfaces).
|
||||
|
||||
- `vpc_subnet_no_public_ip_by_default`
|
||||
|
||||
VPCs should have separate private and public subnets to prevent the exposure of your resources to the internet. Prowler will only check this configuration for those VPCs that are in use, in other words, only the VPCs where you have ENIs (network interfaces).
|
||||
|
||||
- `vpc_subnet_separate_private_public`
|
||||
|
||||
VPCs should have subnets in different availability zones to prevent a single point of failure. Prowler will only check this configuration for those VPCs that are in use, in other words, only the VPCs where you have ENIs (network interfaces).
|
||||
|
||||
- `vpc_subnet_different_az`
|
||||
|
||||
@@ -83,9 +83,14 @@ nav:
|
||||
- Authentication: tutorials/azure/authentication.md
|
||||
- Non default clouds: tutorials/azure/use-non-default-cloud.md
|
||||
- Subscriptions: tutorials/azure/subscriptions.md
|
||||
- Create Prowler Service Principal: tutorials/azure/create-prowler-service-principal.md
|
||||
- Google Cloud:
|
||||
- Authentication: tutorials/gcp/authentication.md
|
||||
- Projects: tutorials/gcp/projects.md
|
||||
- Kubernetes:
|
||||
- In-Cluster Execution: tutorials/kubernetes/in-cluster.md
|
||||
- Non In-Cluster Execution: tutorials/kubernetes/outside-cluster.md
|
||||
- Miscellaneous: tutorials/kubernetes/misc.md
|
||||
- Developer Guide:
|
||||
- Introduction: developer-guide/introduction.md
|
||||
- Provider: developer-guide/provider.md
|
||||
|
||||
@@ -58,20 +58,29 @@ Resources:
|
||||
- 'account:Get*'
|
||||
- 'appstream:Describe*'
|
||||
- 'appstream:List*'
|
||||
- 'backup:List*'
|
||||
- 'cloudtrail:GetInsightSelectors'
|
||||
- 'codeartifact:List*'
|
||||
- 'codebuild:BatchGet*'
|
||||
- 'cognito-idp:GetUserPoolMfaConfig'
|
||||
- 'dlm:Get*'
|
||||
- 'drs:Describe*'
|
||||
- 'ds:Get*'
|
||||
- 'ds:Describe*'
|
||||
- 'ds:List*'
|
||||
- 'dynamodb:GetResourcePolicy'
|
||||
- 'ec2:GetEbsEncryptionByDefault'
|
||||
- 'ec2:GetSnapshotBlockPublicAccessState'
|
||||
- 'ec2:GetInstanceMetadataDefaults'
|
||||
- 'ecr:Describe*'
|
||||
- 'ecr:GetRegistryScanningConfiguration'
|
||||
- 'elasticfilesystem:DescribeBackupPolicy'
|
||||
- 'glue:GetConnections'
|
||||
- 'glue:GetSecurityConfiguration*'
|
||||
- 'glue:SearchTables'
|
||||
- 'lambda:GetFunction*'
|
||||
- 'logs:FilterLogEvents'
|
||||
- 'lightsail:GetRelationalDatabases'
|
||||
- 'macie2:GetMacieSession'
|
||||
- 's3:GetAccountPublicAccessBlock'
|
||||
- 'shield:DescribeProtection'
|
||||
@@ -79,8 +88,10 @@ Resources:
|
||||
- 'securityhub:BatchImportFindings'
|
||||
- 'securityhub:GetFindings'
|
||||
- 'ssm:GetDocument'
|
||||
- 'ssm-incidents:List*'
|
||||
- 'support:Describe*'
|
||||
- 'tag:GetTagKeys'
|
||||
- 'wellarchitected:List*'
|
||||
Resource: '*'
|
||||
- PolicyName: ProwlerScanRoleAdditionalViewPrivilegesApiGateway
|
||||
PolicyDocument:
|
||||
|
||||
@@ -16,7 +16,10 @@
|
||||
"ds:Get*",
|
||||
"ds:Describe*",
|
||||
"ds:List*",
|
||||
"dynamodb:GetResourcePolicy",
|
||||
"ec2:GetEbsEncryptionByDefault",
|
||||
"ec2:GetSnapshotBlockPublicAccessState",
|
||||
"ec2:GetInstanceMetadataDefaults",
|
||||
"ecr:Describe*",
|
||||
"ecr:GetRegistryScanningConfiguration",
|
||||
"elasticfilesystem:DescribeBackupPolicy",
|
||||
@@ -25,6 +28,7 @@
|
||||
"glue:SearchTables",
|
||||
"lambda:GetFunction*",
|
||||
"logs:FilterLogEvents",
|
||||
"lightsail:GetRelationalDatabases",
|
||||
"macie2:GetMacieSession",
|
||||
"s3:GetAccountPublicAccessBlock",
|
||||
"shield:DescribeProtection",
|
||||
|
||||
20
permissions/prowler-azure-custom-role.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"properties": {
|
||||
"roleName": "ProwlerRole",
|
||||
"description": "Role used for checks that require read-only access to Azure resources and are not covered by the Reader role.",
|
||||
"assignableScopes": [
|
||||
"/"
|
||||
],
|
||||
"permissions": [
|
||||
{
|
||||
"actions": [
|
||||
"Microsoft.Web/sites/host/listkeys/action",
|
||||
"Microsoft.Web/sites/config/list/Action"
|
||||
],
|
||||
"notActions": [],
|
||||
"dataActions": [],
|
||||
"notDataActions": []
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
549
poetry.lock
generated
@@ -6,7 +6,13 @@ from os import environ
|
||||
|
||||
from colorama import Fore, Style
|
||||
|
||||
from prowler.config.config import get_available_compliance_frameworks
|
||||
from prowler.config.config import (
|
||||
csv_file_suffix,
|
||||
get_available_compliance_frameworks,
|
||||
html_file_suffix,
|
||||
json_asff_file_suffix,
|
||||
json_ocsf_file_suffix,
|
||||
)
|
||||
from prowler.lib.banner import print_banner
|
||||
from prowler.lib.check.check import (
|
||||
bulk_load_checks_metadata,
|
||||
@@ -36,19 +42,32 @@ from prowler.lib.check.custom_checks_metadata import (
|
||||
)
|
||||
from prowler.lib.cli.parser import ProwlerArgumentParser
|
||||
from prowler.lib.logger import logger, set_logging_config
|
||||
from prowler.lib.outputs.compliance.compliance import display_compliance_table
|
||||
from prowler.lib.outputs.html.html import add_html_footer, fill_html_overview_statistics
|
||||
from prowler.lib.outputs.json.json import close_json
|
||||
from prowler.lib.outputs.outputs import extract_findings_statistics
|
||||
from prowler.lib.outputs.slack import send_slack_message
|
||||
from prowler.lib.outputs.summary_table import display_summary_table
|
||||
from prowler.providers.aws.lib.s3.s3 import send_to_s3_bucket
|
||||
from prowler.providers.aws.lib.security_hub.security_hub import (
|
||||
batch_send_to_security_hub,
|
||||
prepare_security_hub_findings,
|
||||
resolve_security_hub_previous_findings,
|
||||
verify_security_hub_integration_enabled_per_region,
|
||||
from prowler.lib.outputs.asff.asff import ASFF
|
||||
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
|
||||
AWSWellArchitected,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_kubernetes import KubernetesCIS
|
||||
from prowler.lib.outputs.compliance.compliance import display_compliance_table
|
||||
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
|
||||
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_aws import AWSMitreAttack
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
|
||||
AzureMitreAttack,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_gcp import GCPMitreAttack
|
||||
from prowler.lib.outputs.csv.csv import CSV
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
from prowler.lib.outputs.html.html import HTML
|
||||
from prowler.lib.outputs.ocsf.ocsf import OCSF
|
||||
from prowler.lib.outputs.outputs import extract_findings_statistics
|
||||
from prowler.lib.outputs.slack.slack import Slack
|
||||
from prowler.lib.outputs.summary_table import display_summary_table
|
||||
from prowler.providers.aws.lib.s3.s3 import S3
|
||||
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHub
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.common.quick_inventory import run_provider_quick_inventory
|
||||
|
||||
@@ -89,7 +108,8 @@ def prowler():
|
||||
)
|
||||
|
||||
if not args.no_banner:
|
||||
print_banner(args.verbose, getattr(args, "fixer", None))
|
||||
legend = args.verbose or getattr(args, "fixer", None)
|
||||
print_banner(legend)
|
||||
|
||||
# We treat the compliance framework as another output format
|
||||
if compliance_framework:
|
||||
@@ -179,7 +199,17 @@ def prowler():
|
||||
|
||||
# Import custom checks from folder
|
||||
if checks_folder:
|
||||
parse_checks_from_folder(global_provider, checks_folder)
|
||||
custom_checks = parse_checks_from_folder(global_provider, checks_folder)
|
||||
# Workaround to be able to execute custom checks alongside all checks if nothing is explicitly set
|
||||
if (
|
||||
not checks_file
|
||||
and not checks
|
||||
and not services
|
||||
and not severities
|
||||
and not compliance_framework
|
||||
and not categories
|
||||
):
|
||||
checks_to_execute.update(custom_checks)
|
||||
|
||||
# Exclude checks if -e/--excluded-checks
|
||||
if excluded_checks:
|
||||
@@ -248,116 +278,342 @@ def prowler():
|
||||
stats = extract_findings_statistics(findings)
|
||||
|
||||
if args.slack:
|
||||
# TODO: this should be also in a config file
|
||||
if "SLACK_API_TOKEN" in environ and (
|
||||
"SLACK_CHANNEL_NAME" in environ or "SLACK_CHANNEL_ID" in environ
|
||||
):
|
||||
_ = send_slack_message(
|
||||
environ["SLACK_API_TOKEN"],
|
||||
(
|
||||
environ["SLACK_CHANNEL_NAME"]
|
||||
if "SLACK_CHANNEL_NAME" in environ
|
||||
else environ["SLACK_CHANNEL_ID"]
|
||||
),
|
||||
stats,
|
||||
global_provider,
|
||||
|
||||
token = environ["SLACK_API_TOKEN"]
|
||||
channel = (
|
||||
environ["SLACK_CHANNEL_NAME"]
|
||||
if "SLACK_CHANNEL_NAME" in environ
|
||||
else environ["SLACK_CHANNEL_ID"]
|
||||
)
|
||||
prowler_args = " ".join(sys.argv[1:])
|
||||
slack = Slack(token, channel, global_provider)
|
||||
_ = slack.send(stats, prowler_args)
|
||||
else:
|
||||
# Refactor(CLI)
|
||||
logger.critical(
|
||||
"Slack integration needs SLACK_API_TOKEN and SLACK_CHANNEL_NAME environment variables (see more in https://docs.prowler.cloud/en/latest/tutorials/integrations/#slack)."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Outputs
|
||||
# TODO: this part is needed since the checks generates a Check_Report_XXX and the output uses Finding
|
||||
# This will be refactored for the outputs generate directly the Finding
|
||||
finding_outputs = [
|
||||
Finding.generate_output(global_provider, finding) for finding in findings
|
||||
]
|
||||
|
||||
generated_outputs = {"regular": [], "compliance": []}
|
||||
|
||||
if args.output_formats:
|
||||
for mode in args.output_formats:
|
||||
# Close json file if exists
|
||||
if "json" in mode:
|
||||
close_json(
|
||||
global_provider.output_options.output_filename,
|
||||
global_provider.output_options.output_directory,
|
||||
mode,
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/"
|
||||
f"{global_provider.output_options.output_filename}"
|
||||
)
|
||||
if mode == "csv":
|
||||
csv_output = CSV(
|
||||
findings=finding_outputs,
|
||||
create_file_descriptor=True,
|
||||
file_path=f"{filename}{csv_file_suffix}",
|
||||
)
|
||||
generated_outputs["regular"].append(csv_output)
|
||||
# Write CSV Finding Object to file
|
||||
csv_output.batch_write_data_to_file()
|
||||
|
||||
if mode == "json-asff":
|
||||
asff_output = ASFF(
|
||||
findings=finding_outputs,
|
||||
create_file_descriptor=True,
|
||||
file_path=f"{filename}{json_asff_file_suffix}",
|
||||
)
|
||||
generated_outputs["regular"].append(asff_output)
|
||||
# Write ASFF Finding Object to file
|
||||
asff_output.batch_write_data_to_file()
|
||||
|
||||
if mode == "json-ocsf":
|
||||
json_output = OCSF(
|
||||
findings=finding_outputs,
|
||||
create_file_descriptor=True,
|
||||
file_path=f"{filename}{json_ocsf_file_suffix}",
|
||||
)
|
||||
generated_outputs["regular"].append(json_output)
|
||||
json_output.batch_write_data_to_file()
|
||||
if mode == "html":
|
||||
html_output = HTML(
|
||||
findings=finding_outputs,
|
||||
create_file_descriptor=True,
|
||||
file_path=f"{filename}{html_file_suffix}",
|
||||
)
|
||||
generated_outputs["regular"].append(html_output)
|
||||
html_output.batch_write_data_to_file(
|
||||
provider=global_provider, stats=stats
|
||||
)
|
||||
|
||||
if "html" in mode:
|
||||
add_html_footer(
|
||||
global_provider.output_options.output_filename,
|
||||
global_provider.output_options.output_directory,
|
||||
# Compliance Frameworks
|
||||
input_compliance_frameworks = set(
|
||||
global_provider.output_options.output_modes
|
||||
).intersection(get_available_compliance_frameworks(provider))
|
||||
if provider == "aws":
|
||||
for compliance_name in input_compliance_frameworks:
|
||||
if compliance_name.startswith("cis_"):
|
||||
# Generate CIS Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
fill_html_overview_statistics(
|
||||
stats,
|
||||
global_provider.output_options.output_filename,
|
||||
global_provider.output_options.output_directory,
|
||||
cis = AWSCIS(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(cis)
|
||||
cis.batch_write_data_to_file()
|
||||
elif compliance_name == "mitre_attack_aws":
|
||||
# Generate MITRE ATT&CK Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
mitre_attack = AWSMitreAttack(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(mitre_attack)
|
||||
mitre_attack.batch_write_data_to_file()
|
||||
elif compliance_name.startswith("ens_"):
|
||||
# Generate ENS Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
ens = AWSENS(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(ens)
|
||||
ens.batch_write_data_to_file()
|
||||
elif compliance_name.startswith("aws_well_architected_framework"):
|
||||
# Generate AWS Well-Architected Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
aws_well_architected = AWSWellArchitected(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(aws_well_architected)
|
||||
aws_well_architected.batch_write_data_to_file()
|
||||
elif compliance_name.startswith("iso27001_"):
|
||||
# Generate ISO27001 Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
iso27001 = AWSISO27001(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(iso27001)
|
||||
iso27001.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
generic_compliance = GenericCompliance(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(generic_compliance)
|
||||
generic_compliance.batch_write_data_to_file()
|
||||
|
||||
# Send output to S3 if needed (-B / -D)
|
||||
if provider == "aws" and (
|
||||
args.output_bucket or args.output_bucket_no_assume
|
||||
):
|
||||
output_bucket = args.output_bucket
|
||||
bucket_session = global_provider.session.current_session
|
||||
# Check if -D was input
|
||||
if args.output_bucket_no_assume:
|
||||
output_bucket = args.output_bucket_no_assume
|
||||
bucket_session = global_provider.session.original_session
|
||||
send_to_s3_bucket(
|
||||
global_provider.output_options.output_filename,
|
||||
args.output_directory,
|
||||
mode,
|
||||
output_bucket,
|
||||
bucket_session,
|
||||
elif provider == "azure":
|
||||
for compliance_name in input_compliance_frameworks:
|
||||
if compliance_name.startswith("cis_"):
|
||||
# Generate CIS Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
cis = AzureCIS(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(cis)
|
||||
cis.batch_write_data_to_file()
|
||||
elif compliance_name == "mitre_attack_azure":
|
||||
# Generate MITRE ATT&CK Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
mitre_attack = AzureMitreAttack(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(mitre_attack)
|
||||
mitre_attack.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
generic_compliance = GenericCompliance(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(generic_compliance)
|
||||
generic_compliance.batch_write_data_to_file()
|
||||
|
||||
elif provider == "gcp":
|
||||
for compliance_name in input_compliance_frameworks:
|
||||
if compliance_name.startswith("cis_"):
|
||||
# Generate CIS Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
cis = GCPCIS(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(cis)
|
||||
cis.batch_write_data_to_file()
|
||||
elif compliance_name == "mitre_attack_gcp":
|
||||
# Generate MITRE ATT&CK Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
mitre_attack = GCPMitreAttack(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(mitre_attack)
|
||||
mitre_attack.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
generic_compliance = GenericCompliance(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(generic_compliance)
|
||||
generic_compliance.batch_write_data_to_file()
|
||||
|
||||
elif provider == "kubernetes":
|
||||
for compliance_name in input_compliance_frameworks:
|
||||
if compliance_name.startswith("cis_"):
|
||||
# Generate CIS Finding Object
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
cis = KubernetesCIS(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(cis)
|
||||
cis.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{global_provider.output_options.output_directory}/compliance/"
|
||||
f"{global_provider.output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
generic_compliance = GenericCompliance(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(generic_compliance)
|
||||
generic_compliance.batch_write_data_to_file()
|
||||
|
||||
# AWS Security Hub Integration
|
||||
if provider == "aws" and args.security_hub:
|
||||
print(
|
||||
f"{Style.BRIGHT}\nSending findings to AWS Security Hub, please wait...{Style.RESET_ALL}"
|
||||
)
|
||||
# Verify where AWS Security Hub is enabled
|
||||
aws_security_enabled_regions = []
|
||||
security_hub_regions = (
|
||||
global_provider.get_available_aws_service_regions("securityhub")
|
||||
if not global_provider.identity.audited_regions
|
||||
else global_provider.identity.audited_regions
|
||||
)
|
||||
for region in security_hub_regions:
|
||||
# Save the regions where AWS Security Hub is enabled
|
||||
if verify_security_hub_integration_enabled_per_region(
|
||||
global_provider.identity.partition,
|
||||
region,
|
||||
global_provider.session.current_session,
|
||||
global_provider.identity.account,
|
||||
):
|
||||
aws_security_enabled_regions.append(region)
|
||||
|
||||
# Prepare the findings to be sent to Security Hub
|
||||
security_hub_findings_per_region = prepare_security_hub_findings(
|
||||
findings,
|
||||
global_provider,
|
||||
global_provider.output_options,
|
||||
aws_security_enabled_regions,
|
||||
)
|
||||
|
||||
# Send the findings to Security Hub
|
||||
findings_sent_to_security_hub = batch_send_to_security_hub(
|
||||
security_hub_findings_per_region, global_provider.session.current_session
|
||||
)
|
||||
|
||||
print(
|
||||
f"{Style.BRIGHT}{Fore.GREEN}\n{findings_sent_to_security_hub} findings sent to AWS Security Hub!{Style.RESET_ALL}"
|
||||
)
|
||||
|
||||
# Resolve previous fails of Security Hub
|
||||
if not args.skip_sh_update:
|
||||
if provider == "aws":
|
||||
# Send output to S3 if needed (-B / -D) for all the output formats
|
||||
if args.output_bucket or args.output_bucket_no_assume:
|
||||
output_bucket = args.output_bucket
|
||||
bucket_session = global_provider.session.current_session
|
||||
# Check if -D was input
|
||||
if args.output_bucket_no_assume:
|
||||
output_bucket = args.output_bucket_no_assume
|
||||
bucket_session = global_provider.session.original_session
|
||||
s3 = S3(
|
||||
session=bucket_session,
|
||||
bucket_name=output_bucket,
|
||||
output_directory=args.output_directory,
|
||||
)
|
||||
s3.send_to_bucket(generated_outputs)
|
||||
if args.security_hub:
|
||||
print(
|
||||
f"{Style.BRIGHT}\nArchiving previous findings in AWS Security Hub, please wait...{Style.RESET_ALL}"
|
||||
f"{Style.BRIGHT}\nSending findings to AWS Security Hub, please wait...{Style.RESET_ALL}"
|
||||
)
|
||||
findings_archived_in_security_hub = resolve_security_hub_previous_findings(
|
||||
security_hub_findings_per_region,
|
||||
global_provider,
|
||||
|
||||
security_hub_regions = (
|
||||
global_provider.get_available_aws_service_regions("securityhub")
|
||||
if not global_provider.identity.audited_regions
|
||||
else global_provider.identity.audited_regions
|
||||
)
|
||||
|
||||
security_hub = SecurityHub(
|
||||
aws_account_id=global_provider.identity.account,
|
||||
aws_partition=global_provider.identity.partition,
|
||||
aws_session=global_provider.session.current_session,
|
||||
findings=asff_output.data,
|
||||
send_only_fails=global_provider.output_options.send_sh_only_fails,
|
||||
aws_security_hub_available_regions=security_hub_regions,
|
||||
)
|
||||
# Send the findings to Security Hub
|
||||
findings_sent_to_security_hub = security_hub.batch_send_to_security_hub()
|
||||
print(
|
||||
f"{Style.BRIGHT}{Fore.GREEN}\n{findings_archived_in_security_hub} findings archived in AWS Security Hub!{Style.RESET_ALL}"
|
||||
f"{Style.BRIGHT}{Fore.GREEN}\n{findings_sent_to_security_hub} findings sent to AWS Security Hub!{Style.RESET_ALL}"
|
||||
)
|
||||
|
||||
# Resolve previous fails of Security Hub
|
||||
if not args.skip_sh_update:
|
||||
print(
|
||||
f"{Style.BRIGHT}\nArchiving previous findings in AWS Security Hub, please wait...{Style.RESET_ALL}"
|
||||
)
|
||||
findings_archived_in_security_hub = (
|
||||
security_hub.archive_previous_findings()
|
||||
)
|
||||
print(
|
||||
f"{Style.BRIGHT}{Fore.GREEN}\n{findings_archived_in_security_hub} findings archived in AWS Security Hub!{Style.RESET_ALL}"
|
||||
)
|
||||
|
||||
# Display summary table
|
||||
if not args.only_logs:
|
||||
display_summary_table(
|
||||
|
||||
@@ -1288,7 +1288,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2 etcd",
|
||||
"Section": "2 Etcd",
|
||||
"Profile": "Level 1 - Master Node",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "Configure TLS encryption for the etcd service.",
|
||||
@@ -1310,7 +1310,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2 etcd",
|
||||
"Section": "2 Etcd",
|
||||
"Profile": "Level 1 - Master Node",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "Enable client authentication on etcd service.",
|
||||
@@ -1332,7 +1332,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2 etcd",
|
||||
"Section": "2 Etcd",
|
||||
"Profile": "Level 1 - Master Node",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "Do not use self-signed certificates for TLS.",
|
||||
@@ -1354,7 +1354,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2 etcd",
|
||||
"Section": "2 Etcd",
|
||||
"Profile": "Level 1 - Master Node",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "etcd should be configured to make use of TLS encryption for peer connections.",
|
||||
@@ -1376,7 +1376,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2 etcd",
|
||||
"Section": "2 Etcd",
|
||||
"Profile": "Level 1 - Master Node",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "etcd should be configured for peer authentication.",
|
||||
@@ -1398,7 +1398,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2 etcd",
|
||||
"Section": "2 Etcd",
|
||||
"Profile": "Level 1 - Master Node",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "Do not use automatically generated self-signed certificates for TLS connections between peers.",
|
||||
@@ -1420,7 +1420,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2 etcd",
|
||||
"Section": "2 Etcd",
|
||||
"Profile": "Level 2 - Master Node",
|
||||
"AssessmentStatus": "Manual",
|
||||
"Description": "Use a different certificate authority for etcd from the one used for Kubernetes.",
|
||||
@@ -2634,7 +2634,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "5.4",
|
||||
"Section": "5.4 Secrets Management",
|
||||
"Profile": "Level 2 - Master Node",
|
||||
"AssessmentStatus": "Manual",
|
||||
"Description": "Kubernetes supports mounting secrets as data volumes or as environment variables. Minimize the use of environment variable secrets.",
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from os import getcwd
|
||||
|
||||
@@ -11,7 +10,7 @@ from prowler.lib.logger import logger
|
||||
|
||||
timestamp = datetime.today()
|
||||
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||
prowler_version = "4.1.0"
|
||||
prowler_version = "4.3.3"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
|
||||
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
|
||||
@@ -65,6 +64,8 @@ default_config_file_path = (
|
||||
default_fixer_config_file_path = (
|
||||
f"{pathlib.Path(os.path.dirname(os.path.realpath(__file__)))}/fixer_config.yaml"
|
||||
)
|
||||
encoding_format_utf_8 = "utf-8"
|
||||
available_output_formats = ["csv", "json-asff", "json-ocsf", "html"]
|
||||
|
||||
|
||||
def get_default_mute_file_path(provider: str):
|
||||
@@ -99,52 +100,84 @@ def check_current_version():
|
||||
|
||||
def load_and_validate_config_file(provider: str, config_file_path: str) -> dict:
|
||||
"""
|
||||
load_and_validate_config_file reads the Prowler config file in YAML format from the default location or the file passed with the --config-file flag
|
||||
Reads the Prowler config file in YAML format from the default location or the file passed with the --config-file flag.
|
||||
|
||||
Args:
|
||||
provider (str): The provider name (e.g., 'aws', 'gcp', 'azure', 'kubernetes').
|
||||
config_file_path (str): The path to the configuration file.
|
||||
|
||||
Returns:
|
||||
dict: The configuration dictionary for the specified provider.
|
||||
"""
|
||||
try:
|
||||
with open(config_file_path) as f:
|
||||
config = {}
|
||||
with open(config_file_path, "r", encoding=encoding_format_utf_8) as f:
|
||||
config_file = yaml.safe_load(f)
|
||||
|
||||
# Not to introduce a breaking change we have to allow the old format config file without any provider keys
|
||||
# and a new format with a key for each provider to include their configuration values within
|
||||
# Check if the new format is passed
|
||||
if (
|
||||
"aws" in config_file
|
||||
or "gcp" in config_file
|
||||
or "azure" in config_file
|
||||
or "kubernetes" in config_file
|
||||
):
|
||||
# Not to introduce a breaking change, allow the old format config file without any provider keys
|
||||
# and a new format with a key for each provider to include their configuration values within.
|
||||
if any(key in config_file for key in ["aws", "gcp", "azure", "kubernetes"]):
|
||||
config = config_file.get(provider, {})
|
||||
else:
|
||||
config = config_file if config_file else {}
|
||||
# Not to break Azure, K8s and GCP does not support neither use the old config format
|
||||
# Not to break Azure, K8s and GCP does not support or use the old config format
|
||||
if provider in ["azure", "gcp", "kubernetes"]:
|
||||
config = {}
|
||||
|
||||
return config
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
except FileNotFoundError as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
except yaml.YAMLError as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
except UnicodeDecodeError as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
def load_and_validate_fixer_config_file(
|
||||
provider: str, fixer_config_file_path: str
|
||||
) -> dict:
|
||||
"""
|
||||
load_and_validate_fixer_config_file reads the Prowler fixer config file in YAML format from the default location or the file passed with the --fixer-config flag
|
||||
Reads the Prowler fixer config file in YAML format from the default location or the file passed with the --fixer-config flag.
|
||||
|
||||
Args:
|
||||
provider (str): The provider name (e.g., 'aws', 'gcp', 'azure', 'kubernetes').
|
||||
fixer_config_file_path (str): The path to the fixer configuration file.
|
||||
|
||||
Returns:
|
||||
dict: The fixer configuration dictionary for the specified provider.
|
||||
"""
|
||||
try:
|
||||
with open(fixer_config_file_path) as f:
|
||||
with open(fixer_config_file_path, "r", encoding=encoding_format_utf_8) as f:
|
||||
fixer_config_file = yaml.safe_load(f)
|
||||
|
||||
return fixer_config_file.get(provider, {})
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
except FileNotFoundError as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
except yaml.YAMLError as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
except UnicodeDecodeError as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
@@ -29,6 +29,18 @@ aws:
|
||||
max_security_group_rules: 50
|
||||
# aws.ec2_instance_older_than_specific_days --> by default is 6 months (180 days)
|
||||
max_ec2_instance_age_in_days: 180
|
||||
# aws.ec2_securitygroup_allow_ingress_from_internet_to_any_port
|
||||
# allowed network interface types for security groups open to the Internet
|
||||
ec2_allowed_interface_types:
|
||||
[
|
||||
"api_gateway_managed",
|
||||
"vpc_endpoint",
|
||||
]
|
||||
# allowed network interface owners for security groups open to the Internet
|
||||
ec2_allowed_instance_owners:
|
||||
[
|
||||
"amazon-elb"
|
||||
]
|
||||
|
||||
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
|
||||
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
|
||||
@@ -250,10 +262,28 @@ aws:
|
||||
"LookupEvents",
|
||||
"Search",
|
||||
]
|
||||
|
||||
# AWS RDS Configuration
|
||||
# aws.rds_instance_backup_enabled
|
||||
# Whether to check RDS instance replicas or not
|
||||
check_rds_instance_replicas: False
|
||||
|
||||
# AWS ACM Configuration
|
||||
# aws.acm_certificates_expiration_check
|
||||
days_to_expire_threshold: 7
|
||||
|
||||
# AWS EKS Configuration
|
||||
# aws.eks_control_plane_logging_all_types_enabled
|
||||
# EKS control plane logging types that must be enabled
|
||||
eks_required_log_types:
|
||||
[
|
||||
"api",
|
||||
"audit",
|
||||
"authenticator",
|
||||
"controllerManager",
|
||||
"scheduler",
|
||||
]
|
||||
|
||||
# Azure Configuration
|
||||
azure:
|
||||
# Azure Network Configuration
|
||||
|
||||
@@ -3,7 +3,16 @@ from colorama import Fore, Style
|
||||
from prowler.config.config import banner_color, orange_color, prowler_version, timestamp
|
||||
|
||||
|
||||
def print_banner(verbose: bool, fixer: bool = False):
|
||||
def print_banner(legend: bool = False):
|
||||
"""
|
||||
Prints the banner with optional legend for color codes.
|
||||
|
||||
Parameters:
|
||||
- legend (bool): Flag to indicate whether to print the color legend or not. Default is False.
|
||||
|
||||
Returns:
|
||||
- None
|
||||
"""
|
||||
banner = rf"""{banner_color} _
|
||||
_ __ _ __ _____ _| | ___ _ __
|
||||
| '_ \| '__/ _ \ \ /\ / / |/ _ \ '__|
|
||||
@@ -15,7 +24,7 @@ def print_banner(verbose: bool, fixer: bool = False):
|
||||
"""
|
||||
print(banner)
|
||||
|
||||
if verbose or fixer:
|
||||
if legend:
|
||||
print(
|
||||
f"""
|
||||
{Style.BRIGHT}Color code for results:{Style.RESET_ALL}
|
||||
|
||||
@@ -19,7 +19,6 @@ from prowler.lib.check.compliance_models import load_compliance_framework
|
||||
from prowler.lib.check.custom_checks_metadata import update_check_metadata
|
||||
from prowler.lib.check.models import Check, load_check_metadata
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.mutelist.mutelist import mutelist_findings
|
||||
from prowler.lib.outputs.outputs import report
|
||||
from prowler.lib.utils.utils import open_file, parse_json_file, print_boxes
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
@@ -126,9 +125,10 @@ def parse_checks_from_file(input_file: str, provider: str) -> set:
|
||||
|
||||
|
||||
# Load checks from custom folder
|
||||
def parse_checks_from_folder(provider, input_folder: str) -> int:
|
||||
def parse_checks_from_folder(provider, input_folder: str) -> set:
|
||||
# TODO: move the AWS-specific code into the provider
|
||||
try:
|
||||
imported_checks = 0
|
||||
custom_checks = set()
|
||||
# Check if input folder is a S3 URI
|
||||
if provider.type == "aws" and re.search(
|
||||
"^s3://([^/]+)/(.*?([^/]+))/$", input_folder
|
||||
@@ -156,8 +156,8 @@ def parse_checks_from_folder(provider, input_folder: str) -> int:
|
||||
if os.path.exists(prowler_module):
|
||||
shutil.rmtree(prowler_module)
|
||||
shutil.copytree(check_module, prowler_module)
|
||||
imported_checks += 1
|
||||
return imported_checks
|
||||
custom_checks.add(check.name)
|
||||
return custom_checks
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
@@ -438,7 +438,7 @@ def import_check(check_path: str) -> ModuleType:
|
||||
return lib
|
||||
|
||||
|
||||
def run_check(check: Check, output_options) -> list:
|
||||
def run_check(check: Check, verbose: bool = False, only_logs: bool = False) -> list:
|
||||
"""
|
||||
Run the check and return the findings
|
||||
Args:
|
||||
@@ -448,7 +448,7 @@ def run_check(check: Check, output_options) -> list:
|
||||
list: list of findings
|
||||
"""
|
||||
findings = []
|
||||
if output_options.verbose or output_options.fixer:
|
||||
if verbose:
|
||||
print(
|
||||
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity}]{Style.RESET_ALL}"
|
||||
)
|
||||
@@ -456,7 +456,7 @@ def run_check(check: Check, output_options) -> list:
|
||||
try:
|
||||
findings = check.execute()
|
||||
except Exception as error:
|
||||
if not output_options.only_logs:
|
||||
if not only_logs:
|
||||
print(
|
||||
f"Something went wrong in {check.CheckID}, please use --log-level ERROR"
|
||||
)
|
||||
@@ -611,9 +611,9 @@ def execute_checks(
|
||||
else:
|
||||
# Prepare your messages
|
||||
messages = [f"Config File: {Fore.YELLOW}{config_file}{Style.RESET_ALL}"]
|
||||
if global_provider.mutelist_file_path:
|
||||
if global_provider.mutelist.mutelist_file_path:
|
||||
messages.append(
|
||||
f"Mutelist File: {Fore.YELLOW}{global_provider.mutelist_file_path}{Style.RESET_ALL}"
|
||||
f"Mutelist File: {Fore.YELLOW}{global_provider.mutelist.mutelist_file_path}{Style.RESET_ALL}"
|
||||
)
|
||||
if global_provider.type == "aws":
|
||||
messages.append(
|
||||
@@ -698,7 +698,21 @@ def execute(
|
||||
)
|
||||
|
||||
# Run check
|
||||
check_findings = run_check(check_class, global_provider.output_options)
|
||||
verbose = (
|
||||
global_provider.output_options.verbose
|
||||
or global_provider.output_options.fixer
|
||||
)
|
||||
check_findings = run_check(
|
||||
check_class, verbose, global_provider.output_options.only_logs
|
||||
)
|
||||
|
||||
# Exclude findings per status
|
||||
if global_provider.output_options.status:
|
||||
check_findings = [
|
||||
finding
|
||||
for finding in check_findings
|
||||
if finding.status in global_provider.output_options.status
|
||||
]
|
||||
|
||||
# Update Audit Status
|
||||
services_executed.add(service)
|
||||
@@ -708,11 +722,21 @@ def execute(
|
||||
)
|
||||
|
||||
# Mutelist findings
|
||||
if hasattr(global_provider, "mutelist") and global_provider.mutelist:
|
||||
check_findings = mutelist_findings(
|
||||
global_provider,
|
||||
check_findings,
|
||||
)
|
||||
if hasattr(global_provider, "mutelist") and global_provider.mutelist.mutelist:
|
||||
# TODO: make this prettier
|
||||
is_finding_muted_args = {}
|
||||
if global_provider.type == "aws":
|
||||
is_finding_muted_args["aws_account_id"] = (
|
||||
global_provider.identity.account
|
||||
)
|
||||
elif global_provider.type == "kubernetes":
|
||||
is_finding_muted_args["cluster"] = global_provider.identity.cluster
|
||||
|
||||
for finding in check_findings:
|
||||
is_finding_muted_args["finding"] = finding
|
||||
finding.muted = global_provider.mutelist.is_finding_muted(
|
||||
**is_finding_muted_args
|
||||
)
|
||||
|
||||
# Refactor(Outputs)
|
||||
# Report the check's findings
|
||||
|
||||
@@ -1,16 +1,21 @@
|
||||
import sys
|
||||
|
||||
from pydantic import parse_obj_as
|
||||
|
||||
from prowler.lib.check.compliance_models import Compliance_Base_Model
|
||||
from prowler.lib.check.models import Check_Metadata_Model
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
def update_checks_metadata_with_compliance(
|
||||
bulk_compliance_frameworks: dict, bulk_checks_metadata: dict
|
||||
):
|
||||
"""Update the check metadata model with the compliance framework"""
|
||||
) -> dict:
|
||||
"""
|
||||
Update the check metadata model with the compliance framework
|
||||
Args:
|
||||
bulk_compliance_frameworks (dict): The compliance frameworks
|
||||
bulk_checks_metadata (dict): The checks metadata
|
||||
|
||||
Returns:
|
||||
dict: The checks metadata with the compliance frameworks
|
||||
"""
|
||||
try:
|
||||
for check in bulk_checks_metadata:
|
||||
check_compliance = []
|
||||
@@ -22,7 +27,7 @@ def update_checks_metadata_with_compliance(
|
||||
# Include the requirement into the check's framework requirements
|
||||
compliance_requirements.append(requirement)
|
||||
# Create the Compliance_Model
|
||||
compliance = Compliance_Base_Model(
|
||||
compliance = ComplianceBaseModel(
|
||||
Framework=framework.Framework,
|
||||
Provider=framework.Provider,
|
||||
Version=framework.Version,
|
||||
@@ -33,53 +38,6 @@ def update_checks_metadata_with_compliance(
|
||||
check_compliance.append(compliance)
|
||||
# Save it into the check's metadata
|
||||
bulk_checks_metadata[check].Compliance = check_compliance
|
||||
|
||||
# Add requirements of Manual Controls
|
||||
for framework in bulk_compliance_frameworks.values():
|
||||
for requirement in framework.Requirements:
|
||||
compliance_requirements = []
|
||||
# Verify if requirement is Manual
|
||||
if not requirement.Checks:
|
||||
compliance_requirements.append(requirement)
|
||||
# Create the Compliance_Model
|
||||
compliance = Compliance_Base_Model(
|
||||
Framework=framework.Framework,
|
||||
Provider=framework.Provider,
|
||||
Version=framework.Version,
|
||||
Description=framework.Description,
|
||||
Requirements=compliance_requirements,
|
||||
)
|
||||
# Include the compliance framework for the check
|
||||
check_compliance.append(compliance)
|
||||
# Create metadata for Manual Control
|
||||
manual_check_metadata = {
|
||||
"Provider": framework.Provider.lower(),
|
||||
"CheckID": "manual_check",
|
||||
"CheckTitle": "Manual Check",
|
||||
"CheckType": [],
|
||||
"ServiceName": "",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "",
|
||||
"Description": "",
|
||||
"Risk": "",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {"CLI": "", "NativeIaC": "", "Other": "", "Terraform": ""},
|
||||
"Recommendation": {"Text": "", "Url": ""},
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
}
|
||||
manual_check = parse_obj_as(Check_Metadata_Model, manual_check_metadata)
|
||||
# Save it into the check's metadata
|
||||
bulk_checks_metadata["manual_check"] = manual_check
|
||||
bulk_checks_metadata["manual_check"].Compliance = check_compliance
|
||||
|
||||
return bulk_checks_metadata
|
||||
except Exception as e:
|
||||
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
@@ -91,7 +91,6 @@ class CIS_Requirement_Attribute(BaseModel):
|
||||
AdditionalInformation: str
|
||||
DefaultValue: Optional[str]
|
||||
References: str
|
||||
DefaultValue: Optional[str]
|
||||
|
||||
|
||||
# Well Architected Requirement Attribute
|
||||
@@ -189,8 +188,8 @@ class Compliance_Requirement(BaseModel):
|
||||
Checks: list[str]
|
||||
|
||||
|
||||
class Compliance_Base_Model(BaseModel):
|
||||
"""Compliance_Base_Model holds the base model for every compliance framework"""
|
||||
class ComplianceBaseModel(BaseModel):
|
||||
"""ComplianceBaseModel holds the base model for every compliance framework"""
|
||||
|
||||
Framework: str
|
||||
Provider: str
|
||||
@@ -218,10 +217,10 @@ class Compliance_Base_Model(BaseModel):
|
||||
# Testing Pending
|
||||
def load_compliance_framework(
|
||||
compliance_specification_file: str,
|
||||
) -> Compliance_Base_Model:
|
||||
) -> ComplianceBaseModel:
|
||||
"""load_compliance_framework loads and parse a Compliance Framework Specification"""
|
||||
try:
|
||||
compliance_framework = Compliance_Base_Model.parse_file(
|
||||
compliance_framework = ComplianceBaseModel.parse_file(
|
||||
compliance_specification_file
|
||||
)
|
||||
except ValidationError as error:
|
||||
|
||||
@@ -96,13 +96,15 @@ class Check(ABC, Check_Metadata_Model):
|
||||
data = Check_Metadata_Model.parse_file(metadata_file).dict()
|
||||
# Calls parents init function
|
||||
super().__init__(**data)
|
||||
# TODO: verify that the CheckID is the same as the filename and classname
|
||||
# to mimic the test done at test_<provider>_checks_metadata_is_valid
|
||||
|
||||
def metadata(self) -> dict:
|
||||
"""Return the JSON representation of the check's metadata"""
|
||||
return self.json()
|
||||
|
||||
@abstractmethod
|
||||
def execute(self):
|
||||
def execute(self) -> list:
|
||||
"""Execute the check's logic"""
|
||||
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ from argparse import RawTextHelpFormatter
|
||||
from dashboard.lib.arguments.arguments import init_dashboard_parser
|
||||
from prowler.config.config import (
|
||||
available_compliance_frameworks,
|
||||
available_output_formats,
|
||||
check_current_version,
|
||||
default_config_file_path,
|
||||
default_fixer_config_file_path,
|
||||
@@ -147,7 +148,7 @@ Detailed documentation at https://docs.prowler.com
|
||||
nargs="+",
|
||||
help="Output modes, by default csv and json-oscf are saved. When using AWS Security Hub integration, json-asff output is also saved.",
|
||||
default=["csv", "json-ocsf", "html"],
|
||||
choices=["csv", "json-asff", "json-ocsf", "html"],
|
||||
choices=available_output_formats,
|
||||
)
|
||||
common_outputs_parser.add_argument(
|
||||
"--output-filename",
|
||||
@@ -262,7 +263,7 @@ Detailed documentation at https://docs.prowler.com
|
||||
group.add_argument(
|
||||
"--compliance",
|
||||
nargs="+",
|
||||
help="Compliance Framework to check against for. The format should be the following: framework_version_provider (e.g.: ens_rd2022_aws)",
|
||||
help="Compliance Framework to check against for. The format should be the following: framework_version_provider (e.g.: cis_3.0_aws)",
|
||||
choices=available_compliance_frameworks,
|
||||
)
|
||||
group.add_argument(
|
||||
|
||||
23
prowler/lib/mutelist/models.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from schema import Optional, Schema
|
||||
|
||||
mutelist_schema = Schema(
|
||||
{
|
||||
"Accounts": {
|
||||
str: {
|
||||
"Checks": {
|
||||
str: {
|
||||
"Regions": list,
|
||||
"Resources": list,
|
||||
Optional("Tags"): list,
|
||||
Optional("Exceptions"): {
|
||||
Optional("Accounts"): list,
|
||||
Optional("Regions"): list,
|
||||
Optional("Resources"): list,
|
||||
Optional("Tags"): list,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
@@ -1,375 +1,331 @@
|
||||
import re
|
||||
import sys
|
||||
from typing import Any
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
import yaml
|
||||
from boto3 import Session
|
||||
from boto3.dynamodb.conditions import Attr
|
||||
from schema import Optional, Schema
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.utils import unroll_tags
|
||||
|
||||
mutelist_schema = Schema(
|
||||
{
|
||||
"Accounts": {
|
||||
str: {
|
||||
"Checks": {
|
||||
str: {
|
||||
"Regions": list,
|
||||
"Resources": list,
|
||||
Optional("Tags"): list,
|
||||
Optional("Exceptions"): {
|
||||
Optional("Accounts"): list,
|
||||
Optional("Regions"): list,
|
||||
Optional("Resources"): list,
|
||||
Optional("Tags"): list,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
from prowler.lib.mutelist.models import mutelist_schema
|
||||
|
||||
|
||||
def parse_mutelist_file(
|
||||
mutelist_path: str, aws_session: Session = None, aws_account: str = None
|
||||
):
|
||||
try:
|
||||
# Check if file is a S3 URI
|
||||
if re.search("^s3://([^/]+)/(.*?([^/]+))$", mutelist_path):
|
||||
bucket = mutelist_path.split("/")[2]
|
||||
key = ("/").join(mutelist_path.split("/")[3:])
|
||||
s3_client = aws_session.client("s3")
|
||||
mutelist = yaml.safe_load(
|
||||
s3_client.get_object(Bucket=bucket, Key=key)["Body"]
|
||||
)["Mutelist"]
|
||||
# Check if file is a Lambda Function ARN
|
||||
elif re.search(r"^arn:(\w+):lambda:", mutelist_path):
|
||||
lambda_region = mutelist_path.split(":")[3]
|
||||
lambda_client = aws_session.client("lambda", region_name=lambda_region)
|
||||
lambda_response = lambda_client.invoke(
|
||||
FunctionName=mutelist_path, InvocationType="RequestResponse"
|
||||
)
|
||||
lambda_payload = lambda_response["Payload"].read()
|
||||
mutelist = yaml.safe_load(lambda_payload)["Mutelist"]
|
||||
# Check if file is a DynamoDB ARN
|
||||
elif re.search(
|
||||
r"^arn:aws(-cn|-us-gov)?:dynamodb:[a-z]{2}-[a-z-]+-[1-9]{1}:[0-9]{12}:table\/[a-zA-Z0-9._-]+$",
|
||||
mutelist_path,
|
||||
):
|
||||
mutelist = {"Accounts": {}}
|
||||
table_region = mutelist_path.split(":")[3]
|
||||
dynamodb_resource = aws_session.resource(
|
||||
"dynamodb", region_name=table_region
|
||||
)
|
||||
dynamo_table = dynamodb_resource.Table(mutelist_path.split("/")[1])
|
||||
response = dynamo_table.scan(
|
||||
FilterExpression=Attr("Accounts").is_in([aws_account, "*"])
|
||||
)
|
||||
dynamodb_items = response["Items"]
|
||||
# Paginate through all results
|
||||
while "LastEvaluatedKey" in dynamodb_items:
|
||||
response = dynamo_table.scan(
|
||||
ExclusiveStartKey=response["LastEvaluatedKey"],
|
||||
FilterExpression=Attr("Accounts").is_in([aws_account, "*"]),
|
||||
)
|
||||
dynamodb_items.update(response["Items"])
|
||||
for item in dynamodb_items:
|
||||
# Create mutelist for every item
|
||||
mutelist["Accounts"][item["Accounts"]] = {
|
||||
"Checks": {
|
||||
item["Checks"]: {
|
||||
"Regions": item["Regions"],
|
||||
"Resources": item["Resources"],
|
||||
}
|
||||
}
|
||||
}
|
||||
if "Tags" in item:
|
||||
mutelist["Accounts"][item["Accounts"]]["Checks"][item["Checks"]][
|
||||
"Tags"
|
||||
] = item["Tags"]
|
||||
if "Exceptions" in item:
|
||||
mutelist["Accounts"][item["Accounts"]]["Checks"][item["Checks"]][
|
||||
"Exceptions"
|
||||
] = item["Exceptions"]
|
||||
class Mutelist(ABC):
|
||||
"""
|
||||
Abstract base class for managing a mutelist.
|
||||
|
||||
Attributes:
|
||||
_mutelist (dict): Dictionary containing information about muted checks for different accounts.
|
||||
_mutelist_file_path (str): Path to the mutelist file.
|
||||
MUTELIST_KEY (str): Key used to access the mutelist in the mutelist file.
|
||||
|
||||
Methods:
|
||||
__init__: Initializes a Mutelist object.
|
||||
mutelist: Property that returns the mutelist dictionary.
|
||||
mutelist_file_path: Property that returns the mutelist file path.
|
||||
is_finding_muted: Abstract method to check if a finding is muted.
|
||||
get_mutelist_file_from_local_file: Retrieves the mutelist file from a local file.
|
||||
validate_mutelist: Validates the mutelist against a schema.
|
||||
is_muted: Checks if a finding is muted for the audited account, check, region, resource, and tags.
|
||||
is_muted_in_check: Checks if a check is muted.
|
||||
is_excepted: Checks if the account, region, resource, and tags are excepted based on the exceptions.
|
||||
"""
|
||||
|
||||
_mutelist: dict = {}
|
||||
_mutelist_file_path: str = None
|
||||
|
||||
MUTELIST_KEY = "Mutelist"
|
||||
|
||||
def __init__(
|
||||
self, mutelist_path: str = "", mutelist_content: dict = {}
|
||||
) -> "Mutelist":
|
||||
if mutelist_path:
|
||||
self._mutelist_file_path = mutelist_path
|
||||
self.get_mutelist_file_from_local_file(mutelist_path)
|
||||
else:
|
||||
with open(mutelist_path) as f:
|
||||
mutelist = yaml.safe_load(f)["Mutelist"]
|
||||
self._mutelist = mutelist_content
|
||||
|
||||
if self._mutelist:
|
||||
self.validate_mutelist()
|
||||
|
||||
@property
|
||||
def mutelist(self) -> dict:
|
||||
return self._mutelist
|
||||
|
||||
@property
|
||||
def mutelist_file_path(self) -> dict:
|
||||
return self._mutelist_file_path
|
||||
|
||||
@abstractmethod
|
||||
def is_finding_muted(self) -> bool:
|
||||
raise NotImplementedError
|
||||
|
||||
def get_mutelist_file_from_local_file(self, mutelist_path: str):
|
||||
try:
|
||||
mutelist_schema.validate(mutelist)
|
||||
with open(mutelist_path) as f:
|
||||
self._mutelist = yaml.safe_load(f)[self.MUTELIST_KEY]
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
|
||||
def validate_mutelist(self) -> bool:
|
||||
try:
|
||||
self._mutelist = mutelist_schema.validate(self._mutelist)
|
||||
return True
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- Mutelist YAML is malformed - {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
return mutelist
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
self._mutelist = {}
|
||||
return False
|
||||
|
||||
def is_muted(
|
||||
self,
|
||||
audited_account: str,
|
||||
check: str,
|
||||
finding_region: str,
|
||||
finding_resource: str,
|
||||
finding_tags,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if the provided finding is muted for the audited account, check, region, resource and tags.
|
||||
|
||||
def mutelist_findings(
|
||||
global_provider: Any,
|
||||
check_findings: list[Any],
|
||||
):
|
||||
# Check if finding is muted
|
||||
for finding in check_findings:
|
||||
# TODO: Move this mapping to the execute_check function and pass that output to the mutelist and the report
|
||||
if global_provider.type == "aws":
|
||||
finding.muted = is_muted(
|
||||
global_provider.mutelist,
|
||||
global_provider.identity.account,
|
||||
finding.check_metadata.CheckID,
|
||||
finding.region,
|
||||
finding.resource_id,
|
||||
unroll_tags(finding.resource_tags),
|
||||
The Mutelist works in a way that each field is ANDed, so if a check is muted for an account, region, resource and tags, it will be muted.
|
||||
The exceptions are ORed, so if a check is excepted for an account, region, resource or tags, it will not be muted.
|
||||
The only particularity is the tags, which are ORed.
|
||||
|
||||
So, for the following Mutelist:
|
||||
```
|
||||
Mutelist:
|
||||
Accounts:
|
||||
'*':
|
||||
Checks:
|
||||
ec2_instance_detailed_monitoring_enabled:
|
||||
Regions: ['*']
|
||||
Resources:
|
||||
- 'i-123456789'
|
||||
Tags:
|
||||
- 'Name=AdminInstance | Environment=Prod'
|
||||
```
|
||||
The check `ec2_instance_detailed_monitoring_enabled` will be muted for all accounts and regions and for the resource_id 'i-123456789' with at least one of the tags 'Name=AdminInstance' or 'Environment=Prod'.
|
||||
|
||||
Args:
|
||||
mutelist (dict): Dictionary containing information about muted checks for different accounts.
|
||||
audited_account (str): The account being audited.
|
||||
check (str): The check to be evaluated for muting.
|
||||
finding_region (str): The region where the finding occurred.
|
||||
finding_resource (str): The resource related to the finding.
|
||||
finding_tags: The tags associated with the finding.
|
||||
|
||||
Returns:
|
||||
bool: True if the finding is muted for the audited account, check, region, resource and tags., otherwise False.
|
||||
"""
|
||||
try:
|
||||
# By default is not muted
|
||||
is_finding_muted = False
|
||||
|
||||
# We always check all the accounts present in the mutelist
|
||||
# if one mutes the finding we set the finding as muted
|
||||
for account in self._mutelist.get("Accounts", []):
|
||||
if account == audited_account or account == "*":
|
||||
if self.is_muted_in_check(
|
||||
self._mutelist["Accounts"][account]["Checks"],
|
||||
audited_account,
|
||||
check,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
):
|
||||
is_finding_muted = True
|
||||
break
|
||||
|
||||
return is_finding_muted
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
elif global_provider.type == "azure":
|
||||
finding.muted = is_muted(
|
||||
global_provider.mutelist,
|
||||
finding.subscription,
|
||||
finding.check_metadata.CheckID,
|
||||
# TODO: add region to the findings when we add Azure Locations
|
||||
# finding.region,
|
||||
"",
|
||||
finding.resource_name,
|
||||
unroll_tags(finding.resource_tags),
|
||||
)
|
||||
elif global_provider.type == "gcp":
|
||||
finding.muted = is_muted(
|
||||
global_provider.mutelist,
|
||||
finding.project_id,
|
||||
finding.check_metadata.CheckID,
|
||||
finding.location,
|
||||
finding.resource_name,
|
||||
unroll_tags(finding.resource_tags),
|
||||
)
|
||||
elif global_provider.type == "kubernetes":
|
||||
finding.muted = is_muted(
|
||||
global_provider.mutelist,
|
||||
global_provider.identity.cluster,
|
||||
finding.check_metadata.CheckID,
|
||||
finding.namespace,
|
||||
finding.resource_name,
|
||||
unroll_tags(finding.resource_tags),
|
||||
)
|
||||
return check_findings
|
||||
return False
|
||||
|
||||
def is_muted_in_check(
|
||||
self,
|
||||
muted_checks,
|
||||
audited_account,
|
||||
check,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if the provided check is muted.
|
||||
|
||||
def is_muted(
|
||||
mutelist: dict,
|
||||
audited_account: str,
|
||||
check: str,
|
||||
finding_region: str,
|
||||
finding_resource: str,
|
||||
finding_tags,
|
||||
):
|
||||
try:
|
||||
# By default is not muted
|
||||
is_finding_muted = False
|
||||
Args:
|
||||
muted_checks (dict): Dictionary containing information about muted checks.
|
||||
audited_account (str): The account to be audited.
|
||||
check (str): The check to be evaluated for muting.
|
||||
finding_region (str): The region where the finding occurred.
|
||||
finding_resource (str): The resource related to the finding.
|
||||
finding_tags (str): The tags associated with the finding.
|
||||
|
||||
# We always check all the accounts present in the mutelist
|
||||
# if one mutes the finding we set the finding as muted
|
||||
for account in mutelist["Accounts"]:
|
||||
if account == audited_account or account == "*":
|
||||
if is_muted_in_check(
|
||||
mutelist["Accounts"][account]["Checks"],
|
||||
audited_account,
|
||||
check,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
Returns:
|
||||
bool: True if the check is muted, otherwise False.
|
||||
"""
|
||||
try:
|
||||
# Default value is not muted
|
||||
is_check_muted = False
|
||||
|
||||
for muted_check, muted_check_info in muted_checks.items():
|
||||
# map lambda to awslambda
|
||||
muted_check = re.sub("^lambda", "awslambda", muted_check)
|
||||
|
||||
check_match = (
|
||||
"*" == muted_check
|
||||
or check == muted_check
|
||||
or self.is_item_matched([muted_check], check)
|
||||
)
|
||||
|
||||
# Check if the finding is excepted
|
||||
exceptions = muted_check_info.get("Exceptions")
|
||||
if (
|
||||
self.is_excepted(
|
||||
exceptions,
|
||||
audited_account,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
)
|
||||
and check_match
|
||||
):
|
||||
is_finding_muted = True
|
||||
# Break loop and return default value since is excepted
|
||||
break
|
||||
|
||||
return is_finding_muted
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
muted_regions = muted_check_info.get("Regions")
|
||||
muted_resources = muted_check_info.get("Resources")
|
||||
muted_tags = muted_check_info.get("Tags", "*")
|
||||
# We need to set the muted_tags if None, "" or [], so the falsy helps
|
||||
if not muted_tags:
|
||||
muted_tags = "*"
|
||||
# If there is a *, it affects to all checks
|
||||
if check_match:
|
||||
muted_in_check = True
|
||||
muted_in_region = self.is_item_matched(
|
||||
muted_regions, finding_region
|
||||
)
|
||||
muted_in_resource = self.is_item_matched(
|
||||
muted_resources, finding_resource
|
||||
)
|
||||
muted_in_tags = self.is_item_matched(muted_tags, finding_tags)
|
||||
|
||||
# For a finding to be muted requires the following set to True:
|
||||
# - muted_in_check -> True
|
||||
# - muted_in_region -> True
|
||||
# - muted_in_tags -> True
|
||||
# - muted_in_resource -> True
|
||||
# - excepted -> False
|
||||
|
||||
def is_muted_in_check(
|
||||
muted_checks,
|
||||
audited_account,
|
||||
check,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
):
|
||||
try:
|
||||
# Default value is not muted
|
||||
is_check_muted = False
|
||||
if (
|
||||
muted_in_check
|
||||
and muted_in_region
|
||||
and muted_in_tags
|
||||
and muted_in_resource
|
||||
):
|
||||
is_check_muted = True
|
||||
|
||||
for muted_check, muted_check_info in muted_checks.items():
|
||||
# map lambda to awslambda
|
||||
muted_check = re.sub("^lambda", "awslambda", muted_check)
|
||||
|
||||
check_match = (
|
||||
"*" == muted_check
|
||||
or check == muted_check
|
||||
or re.search(muted_check, check)
|
||||
return is_check_muted
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
# Check if the finding is excepted
|
||||
exceptions = muted_check_info.get("Exceptions")
|
||||
if (
|
||||
is_excepted(
|
||||
exceptions,
|
||||
audited_account,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
)
|
||||
and check_match
|
||||
):
|
||||
# Break loop and return default value since is excepted
|
||||
break
|
||||
return False
|
||||
|
||||
muted_regions = muted_check_info.get("Regions")
|
||||
muted_resources = muted_check_info.get("Resources")
|
||||
muted_tags = muted_check_info.get("Tags", "*")
|
||||
# We need to set the muted_tags if None, "" or [], so the falsy helps
|
||||
if not muted_tags:
|
||||
muted_tags = "*"
|
||||
# If there is a *, it affects to all checks
|
||||
if check_match:
|
||||
muted_in_check = True
|
||||
muted_in_region = is_muted_in_region(muted_regions, finding_region)
|
||||
muted_in_resource = is_muted_in_resource(
|
||||
muted_resources, finding_resource
|
||||
)
|
||||
muted_in_tags = is_muted_in_tags(muted_tags, finding_tags)
|
||||
def is_excepted(
|
||||
self,
|
||||
exceptions,
|
||||
audited_account,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if the provided account, region, resource, and tags are excepted based on the exceptions dictionary.
|
||||
|
||||
# For a finding to be muted requires the following set to True:
|
||||
# - muted_in_check -> True
|
||||
# - muted_in_region -> True
|
||||
# - muted_in_tags -> True
|
||||
# - muted_in_resource -> True
|
||||
# - excepted -> False
|
||||
Args:
|
||||
exceptions (dict): Dictionary containing exceptions for different attributes like Accounts, Regions, Resources, and Tags.
|
||||
audited_account (str): The account to be audited.
|
||||
finding_region (str): The region where the finding occurred.
|
||||
finding_resource (str): The resource related to the finding.
|
||||
finding_tags (str): The tags associated with the finding.
|
||||
|
||||
Returns:
|
||||
bool: True if the account, region, resource, and tags are excepted based on the exceptions, otherwise False.
|
||||
"""
|
||||
try:
|
||||
excepted = False
|
||||
is_account_excepted = False
|
||||
is_region_excepted = False
|
||||
is_resource_excepted = False
|
||||
is_tag_excepted = False
|
||||
if exceptions:
|
||||
excepted_accounts = exceptions.get("Accounts", [])
|
||||
is_account_excepted = self.is_item_matched(
|
||||
excepted_accounts, audited_account
|
||||
)
|
||||
|
||||
excepted_regions = exceptions.get("Regions", [])
|
||||
is_region_excepted = self.is_item_matched(
|
||||
excepted_regions, finding_region
|
||||
)
|
||||
|
||||
excepted_resources = exceptions.get("Resources", [])
|
||||
is_resource_excepted = self.is_item_matched(
|
||||
excepted_resources, finding_resource
|
||||
)
|
||||
|
||||
excepted_tags = exceptions.get("Tags", [])
|
||||
is_tag_excepted = self.is_item_matched(excepted_tags, finding_tags)
|
||||
|
||||
if (
|
||||
muted_in_check
|
||||
and muted_in_region
|
||||
and muted_in_tags
|
||||
and muted_in_resource
|
||||
not is_account_excepted
|
||||
and not is_region_excepted
|
||||
and not is_resource_excepted
|
||||
and not is_tag_excepted
|
||||
):
|
||||
is_check_muted = True
|
||||
|
||||
return is_check_muted
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_muted_in_region(
|
||||
mutelist_regions,
|
||||
finding_region,
|
||||
):
|
||||
try:
|
||||
return __is_item_matched__(mutelist_regions, finding_region)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_muted_in_tags(muted_tags, finding_tags):
|
||||
try:
|
||||
return __is_item_matched__(muted_tags, finding_tags)
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_muted_in_resource(muted_resources, finding_resource):
|
||||
try:
|
||||
return __is_item_matched__(muted_resources, finding_resource)
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def is_excepted(
|
||||
exceptions,
|
||||
audited_account,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
):
|
||||
"""is_excepted returns True if the account, region, resource and tags are excepted"""
|
||||
try:
|
||||
excepted = False
|
||||
is_account_excepted = False
|
||||
is_region_excepted = False
|
||||
is_resource_excepted = False
|
||||
is_tag_excepted = False
|
||||
if exceptions:
|
||||
excepted_accounts = exceptions.get("Accounts", [])
|
||||
is_account_excepted = __is_item_matched__(
|
||||
excepted_accounts, audited_account
|
||||
excepted = False
|
||||
elif (
|
||||
(is_account_excepted or not excepted_accounts)
|
||||
and (is_region_excepted or not excepted_regions)
|
||||
and (is_resource_excepted or not excepted_resources)
|
||||
and (is_tag_excepted or not excepted_tags)
|
||||
):
|
||||
excepted = True
|
||||
return excepted
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
return False
|
||||
|
||||
excepted_regions = exceptions.get("Regions", [])
|
||||
is_region_excepted = __is_item_matched__(excepted_regions, finding_region)
|
||||
@staticmethod
|
||||
def is_item_matched(matched_items, finding_items):
|
||||
"""
|
||||
Check if any of the items in matched_items are present in finding_items.
|
||||
|
||||
excepted_resources = exceptions.get("Resources", [])
|
||||
is_resource_excepted = __is_item_matched__(
|
||||
excepted_resources, finding_resource
|
||||
Args:
|
||||
matched_items (list): List of items to be matched.
|
||||
finding_items (str): String to search for matched items.
|
||||
|
||||
Returns:
|
||||
bool: True if any of the matched_items are present in finding_items, otherwise False.
|
||||
"""
|
||||
try:
|
||||
is_item_matched = False
|
||||
if matched_items and (finding_items or finding_items == ""):
|
||||
for item in matched_items:
|
||||
if item.startswith("*"):
|
||||
item = ".*" + item[1:]
|
||||
if re.search(item, finding_items):
|
||||
is_item_matched = True
|
||||
break
|
||||
return is_item_matched
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
|
||||
excepted_tags = exceptions.get("Tags", [])
|
||||
is_tag_excepted = __is_item_matched__(excepted_tags, finding_tags)
|
||||
|
||||
if (
|
||||
not is_account_excepted
|
||||
and not is_region_excepted
|
||||
and not is_resource_excepted
|
||||
and not is_tag_excepted
|
||||
):
|
||||
excepted = False
|
||||
elif (
|
||||
(is_account_excepted or not excepted_accounts)
|
||||
and (is_region_excepted or not excepted_regions)
|
||||
and (is_resource_excepted or not excepted_resources)
|
||||
and (is_tag_excepted or not excepted_tags)
|
||||
):
|
||||
excepted = True
|
||||
return excepted
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def __is_item_matched__(matched_items, finding_items):
|
||||
"""__is_item_matched__ return True if any of the matched_items are present in the finding_items, otherwise returns False."""
|
||||
try:
|
||||
is_item_matched = False
|
||||
if matched_items and (finding_items or finding_items == ""):
|
||||
for item in matched_items:
|
||||
if item == "*":
|
||||
item = ".*"
|
||||
if re.search(item, finding_items):
|
||||
is_item_matched = True
|
||||
break
|
||||
return is_item_matched
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
sys.exit(1)
|
||||
return False
|
||||
|
||||
401
prowler/lib/outputs/asff/asff.py
Normal file
@@ -0,0 +1,401 @@
|
||||
from json import dump
|
||||
from os import SEEK_SET
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, validator
|
||||
|
||||
from prowler.config.config import prowler_version, timestamp_utc
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
from prowler.lib.outputs.output import Output
|
||||
from prowler.lib.utils.utils import hash_sha512
|
||||
|
||||
|
||||
class ASFF(Output):
|
||||
"""
|
||||
ASFF class represents a transformation of findings into AWS Security Finding Format (ASFF).
|
||||
|
||||
This class provides methods to transform a list of findings into the ASFF format required by AWS Security Hub. It includes operations such as generating unique identifiers, formatting timestamps, handling compliance frameworks, and ensuring the status values match the allowed values in ASFF.
|
||||
|
||||
Attributes:
|
||||
- _data: A list to store the transformed findings.
|
||||
- _file_descriptor: A file descriptor to write to file.
|
||||
|
||||
Methods:
|
||||
- transform(findings: list[Finding]) -> None: Transforms a list of findings into ASFF format.
|
||||
- batch_write_data_to_file() -> None: Writes the findings data to a file in JSON ASFF format.
|
||||
- generate_status(status: str, muted: bool = False) -> str: Generates the ASFF status based on the provided status and muted flag.
|
||||
|
||||
References:
|
||||
- AWS Security Hub API Reference: https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
|
||||
- AWS Security Finding Format Syntax: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-syntax.html
|
||||
"""
|
||||
|
||||
def transform(self, findings: list[Finding]) -> None:
|
||||
"""
|
||||
Transforms a list of findings into AWS Security Finding Format (ASFF).
|
||||
|
||||
This method iterates over the list of findings provided as input and transforms each finding into the ASFF format required by AWS Security Hub. It performs several operations for each finding, including generating unique identifiers, formatting timestamps, handling compliance frameworks, and ensuring the status values match the allowed values in ASFF.
|
||||
|
||||
Parameters:
|
||||
- findings (list[Finding]): A list of Finding objects representing the findings to be transformed.
|
||||
|
||||
Returns:
|
||||
- None
|
||||
|
||||
Notes:
|
||||
- The method skips findings with a status of "MANUAL" as it is not valid in SecurityHub.
|
||||
- It generates unique identifiers for each finding based on specific attributes.
|
||||
- It formats timestamps in the required ASFF format.
|
||||
- It handles compliance frameworks and associated standards for each finding.
|
||||
- It ensures that the finding status matches the allowed values in ASFF.
|
||||
|
||||
References:
|
||||
- AWS Security Hub API Reference: https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
|
||||
- AWS Security Finding Format Syntax: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-syntax.html
|
||||
"""
|
||||
try:
|
||||
for finding in findings:
|
||||
# MANUAL status is not valid in SecurityHub
|
||||
# https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
|
||||
if finding.status == "MANUAL":
|
||||
continue
|
||||
timestamp = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
associated_standards, compliance_summary = ASFF.format_compliance(
|
||||
finding.compliance
|
||||
)
|
||||
|
||||
# Ensures finding_status matches allowed values in ASFF
|
||||
finding_status = ASFF.generate_status(finding.status, finding.muted)
|
||||
self._data.append(
|
||||
AWSSecurityFindingFormat(
|
||||
# The following line cannot be changed because it is the format we use to generate unique findings for AWS Security Hub
|
||||
# If changed some findings could be lost because the unique identifier will be different
|
||||
Id=f"prowler-{finding.check_id}-{finding.account_uid}-{finding.region}-{hash_sha512(finding.resource_uid)}",
|
||||
ProductArn=f"arn:{finding.partition}:securityhub:{finding.region}::product/prowler/prowler",
|
||||
ProductFields=ProductFields(
|
||||
ProwlerResourceName=finding.resource_uid,
|
||||
),
|
||||
GeneratorId="prowler-" + finding.check_id,
|
||||
AwsAccountId=finding.account_uid,
|
||||
Types=(
|
||||
finding.check_type.split(",")
|
||||
if finding.check_type
|
||||
else ["Software and Configuration Checks"]
|
||||
),
|
||||
FirstObservedAt=timestamp,
|
||||
UpdatedAt=timestamp,
|
||||
CreatedAt=timestamp,
|
||||
Severity=Severity(Label=finding.severity.value),
|
||||
Title=finding.check_title,
|
||||
Description=finding.description,
|
||||
Resources=[
|
||||
Resource(
|
||||
Id=finding.resource_uid,
|
||||
Type=finding.resource_type,
|
||||
Partition=finding.partition,
|
||||
Region=finding.region,
|
||||
Tags=finding.resource_tags,
|
||||
)
|
||||
],
|
||||
Compliance=Compliance(
|
||||
Status=finding_status,
|
||||
AssociatedStandards=associated_standards,
|
||||
RelatedRequirements=compliance_summary,
|
||||
),
|
||||
Remediation=Remediation(
|
||||
Recommendation=Recommendation(
|
||||
Text=finding.remediation_recommendation_text,
|
||||
Url=finding.remediation_recommendation_url,
|
||||
)
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def batch_write_data_to_file(self) -> None:
|
||||
"""
|
||||
Writes the findings data to a file in JSON ASFF format.
|
||||
|
||||
This method iterates over the findings data stored in the '_data' attribute and writes it to the file descriptor '_file_descriptor' in JSON format. It starts by writing the JSON opening/header '[', then iterates over each finding, dumping it to the file with an indent of 4 spaces. After writing all findings, it writes the closing ']' to complete the JSON array structure. Finally, it closes the file descriptor.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
try:
|
||||
if (
|
||||
getattr(self, "_file_descriptor", None)
|
||||
and not self._file_descriptor.closed
|
||||
and self._data
|
||||
):
|
||||
# Write JSON opening/header [
|
||||
self._file_descriptor.write("[")
|
||||
|
||||
# Write findings
|
||||
for finding in self._data:
|
||||
dump(
|
||||
finding.dict(exclude_none=True),
|
||||
self._file_descriptor,
|
||||
indent=4,
|
||||
)
|
||||
self._file_descriptor.write(",")
|
||||
|
||||
# Write footer/closing ]
|
||||
if self._file_descriptor.tell() > 0:
|
||||
if self._file_descriptor.tell() != 1:
|
||||
self._file_descriptor.seek(
|
||||
self._file_descriptor.tell() - 1, SEEK_SET
|
||||
)
|
||||
self._file_descriptor.truncate()
|
||||
self._file_descriptor.write("]")
|
||||
|
||||
# Close file descriptor
|
||||
self._file_descriptor.close()
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def generate_status(status: str, muted: bool = False) -> str:
|
||||
"""
|
||||
Generates the ASFF status based on the provided status and muted flag.
|
||||
|
||||
Parameters:
|
||||
- status (str): The status of the finding.
|
||||
- muted (bool): Flag indicating if the finding is muted.
|
||||
|
||||
Returns:
|
||||
- str: The ASFF status corresponding to the provided status and muted flag.
|
||||
|
||||
References:
|
||||
- AWS Security Hub API Reference: https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
|
||||
"""
|
||||
json_asff_status = ""
|
||||
if muted:
|
||||
# Per AWS Security Hub "MUTED" is not a valid status
|
||||
# https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
|
||||
json_asff_status = "WARNING"
|
||||
else:
|
||||
if status == "PASS":
|
||||
json_asff_status = "PASSED"
|
||||
elif status == "FAIL":
|
||||
json_asff_status = "FAILED"
|
||||
else:
|
||||
# MANUAL is set to NOT_AVAILABLE
|
||||
json_asff_status = "NOT_AVAILABLE"
|
||||
|
||||
return json_asff_status
|
||||
|
||||
@staticmethod
|
||||
def format_compliance(compliance: dict) -> tuple[list[dict], list[str]]:
|
||||
"""
|
||||
Transforms a dictionary of compliance data into a tuple of associated standards and compliance summaries.
|
||||
|
||||
Parameters:
|
||||
- compliance (dict): A dictionary containing compliance data where keys are standards and values are lists of compliance details.
|
||||
|
||||
Returns:
|
||||
- tuple[list[dict], list[str]]: A tuple containing a list of associated standards (each as a dictionary with 'StandardsId') and a list of compliance summaries.
|
||||
|
||||
Notes:
|
||||
- The method limits the number of associated standards to 20.
|
||||
- Each compliance summary is a concatenation of the standard key and its associated compliance details.
|
||||
- If the concatenated summary exceeds 64 characters, it is truncated to 63 characters.
|
||||
|
||||
Example:
|
||||
format_compliance({"standard1": ["detail1", "detail2"], "standard2": ["detail3"]}) -> ([{"StandardsId": "standard1"}, {"StandardsId": "standard2"}], ["standard1 detail1 detail2", "standard2 detail3"])
|
||||
"""
|
||||
compliance_summary = []
|
||||
associated_standards = []
|
||||
for key, value in compliance.items():
|
||||
if (
|
||||
len(associated_standards) < 20
|
||||
): # AssociatedStandards should NOT have more than 20 items
|
||||
associated_standards.append({"StandardsId": key})
|
||||
item = f"{key} {' '.join(value)}"
|
||||
if len(item) > 64:
|
||||
item = item[0:63]
|
||||
compliance_summary.append(item)
|
||||
return associated_standards, compliance_summary
|
||||
|
||||
|
||||
class ProductFields(BaseModel):
|
||||
"""
|
||||
Class representing the Product Fields of a finding in the AWS Security Finding Format.
|
||||
|
||||
Attributes:
|
||||
- ProviderName (str): The name of the provider, default value is "Prowler".
|
||||
- ProviderVersion (str): The version of the provider, fetched from the prowler_version in config.py.
|
||||
- ProwlerResourceName (str): The name of the Prowler resource.
|
||||
"""
|
||||
|
||||
ProviderName: str = "Prowler"
|
||||
ProviderVersion: str = prowler_version
|
||||
ProwlerResourceName: str
|
||||
|
||||
|
||||
class Severity(BaseModel):
|
||||
"""
|
||||
Class representing the severity of a finding in the AWS Security Finding Format.
|
||||
|
||||
Attributes:
|
||||
- Label (str): A string representing the severity label of the finding.
|
||||
|
||||
This class is used to define the severity level of a finding in the AWS Security Finding Format.
|
||||
"""
|
||||
|
||||
Label: str
|
||||
|
||||
@validator("Label", pre=True, always=True)
|
||||
def severity_uppercase(severity):
|
||||
return severity.upper()
|
||||
|
||||
|
||||
class Resource(BaseModel):
|
||||
"""
|
||||
Class representing a resource in the AWS Security Finding Format.
|
||||
|
||||
Attributes:
|
||||
- Type (str): The type of the resource.
|
||||
- Id (str): The unique identifier of the resource.
|
||||
- Partition (str): The partition where the resource resides.
|
||||
- Region (str): The region where the resource is located.
|
||||
- Tags (Optional[dict]): Optional dictionary of tags associated with the resource.
|
||||
|
||||
This class defines the structure of a resource within the AWS Security Finding Format. It includes attributes to specify the type, unique identifier, partition, region, and optional tags of the resource.
|
||||
"""
|
||||
|
||||
Type: str
|
||||
Id: str
|
||||
Partition: str
|
||||
Region: str
|
||||
Tags: Optional[dict]
|
||||
|
||||
@validator("Tags", pre=True, always=True)
|
||||
def tags_cannot_be_empty_dict(tags):
|
||||
if not tags:
|
||||
return None
|
||||
return tags
|
||||
|
||||
|
||||
class Compliance(BaseModel):
|
||||
"""
|
||||
Class representing the compliance details of a finding in the AWS Security Finding Format.
|
||||
|
||||
Attributes:
|
||||
- Status (str): The compliance status of the finding.
|
||||
- RelatedRequirements (list[str]): A list of related compliance requirements for the finding.
|
||||
- AssociatedStandards (list[dict]): A list of associated standards with the finding, where each item is a dictionary containing the 'StandardsId'.
|
||||
|
||||
This class defines the structure of compliance information within the AWS Security Finding Format. It includes attributes to specify the compliance status, related requirements, and associated standards of a finding.
|
||||
"""
|
||||
|
||||
Status: str
|
||||
RelatedRequirements: list[str]
|
||||
AssociatedStandards: list[dict]
|
||||
|
||||
@validator("Status", pre=True, always=True)
|
||||
def status(status):
|
||||
if status not in ["PASSED", "WARNING", "FAILED", "NOT_AVAILABLE"]:
|
||||
raise ValueError("must contain a space")
|
||||
return status
|
||||
|
||||
|
||||
class Recommendation(BaseModel):
|
||||
"""
|
||||
Class representing a recommendation for remediation in the AWS Security Finding Format.
|
||||
|
||||
Attributes:
|
||||
- Text (str): The text description of the recommendation.
|
||||
- Url (str): The URL link for additional information related to the recommendation.
|
||||
|
||||
This class defines the structure of a recommendation within the AWS Security Finding Format. It includes attributes to specify the text description and URL link for further details regarding the recommendation.
|
||||
"""
|
||||
|
||||
Text: str = ""
|
||||
Url: str = ""
|
||||
|
||||
@validator("Text", pre=True, always=True)
|
||||
def text_must_not_exceed_512_chars(text):
|
||||
text_validated = text
|
||||
if len(text) > 512:
|
||||
text_validated = text[:509] + "..."
|
||||
return text_validated
|
||||
|
||||
@validator("Url", pre=True, always=True)
|
||||
def set_default_url_if_empty(url):
|
||||
default_url = "https://docs.aws.amazon.com/securityhub/latest/userguide/what-is-securityhub.html"
|
||||
if url:
|
||||
default_url = url
|
||||
return default_url
|
||||
|
||||
|
||||
class Remediation(BaseModel):
|
||||
"""
|
||||
Class representing a remediation action in the AWS Security Finding Format.
|
||||
|
||||
Attributes:
|
||||
- Recommendation (Recommendation): An instance of the Recommendation class providing details for remediation.
|
||||
|
||||
This class defines the structure of a remediation action within the AWS Security Finding Format. It includes an attribute to specify the recommendation for remediation, which is an instance of the Recommendation class.
|
||||
"""
|
||||
|
||||
Recommendation: Recommendation
|
||||
|
||||
|
||||
class AWSSecurityFindingFormat(BaseModel):
|
||||
"""
|
||||
AWSSecurityFindingFormat generates a finding's output in JSON ASFF format: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-syntax.html
|
||||
|
||||
Attributes:
|
||||
- SchemaVersion (str): The version of the ASFF schema being used, default value is "2018-10-08".
|
||||
- Id (str): The unique identifier of the finding.
|
||||
- ProductArn (str): The ARN of the product generating the finding.
|
||||
- RecordState (str): The state of the finding record, default value is "ACTIVE".
|
||||
- ProductFields (ProductFields): An instance of the ProductFields class representing the product fields of the finding.
|
||||
- GeneratorId (str): The ID of the generator.
|
||||
- AwsAccountId (str): The AWS account ID associated with the finding.
|
||||
- Types (list[str]): A list of types associated with the finding, default value is None.
|
||||
- FirstObservedAt (str): The timestamp when the finding was first observed.
|
||||
- UpdatedAt (str): The timestamp when the finding was last updated.
|
||||
- CreatedAt (str): The timestamp when the finding was created.
|
||||
- Severity (Severity): An instance of the Severity class representing the severity of the finding.
|
||||
- Title (str): The title of the finding.
|
||||
- Description (str): The description of the finding, truncated to 1024 characters if longer.
|
||||
- Resources (list[Resource]): A list of resources associated with the finding, default value is None.
|
||||
- Compliance (Compliance): An instance of the Compliance class representing the compliance details of the finding.
|
||||
- Remediation (Remediation): An instance of the Remediation class providing details for remediation.
|
||||
|
||||
This class defines the structure of a finding in the AWS Security Finding Format, including various attributes such as schema version, identifiers, timestamps, severity, title, description, resources, compliance details, and remediation information.
|
||||
"""
|
||||
|
||||
SchemaVersion: str = "2018-10-08"
|
||||
Id: str
|
||||
ProductArn: str
|
||||
RecordState: str = "ACTIVE"
|
||||
ProductFields: ProductFields
|
||||
GeneratorId: str
|
||||
AwsAccountId: str
|
||||
Types: list[str] = None
|
||||
FirstObservedAt: str
|
||||
UpdatedAt: str
|
||||
CreatedAt: str
|
||||
Severity: Severity
|
||||
Title: str
|
||||
Description: str
|
||||
Resources: list[Resource] = None
|
||||
Compliance: Compliance
|
||||
Remediation: Remediation
|
||||
|
||||
@validator("Description", pre=True, always=True)
|
||||
def description_must_not_exceed_1024_chars(description):
|
||||
description_validated = description
|
||||
if len(description) > 1024:
|
||||
description_validated = description[:1021] + "..."
|
||||
return description_validated
|
||||
@@ -2,7 +2,6 @@ from operator import attrgetter
|
||||
|
||||
from prowler.config.config import timestamp
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.common_models import FindingOutput
|
||||
from prowler.lib.outputs.utils import unroll_list, unroll_tags
|
||||
from prowler.lib.utils.utils import outputs_unix_timestamp
|
||||
|
||||
@@ -22,87 +21,6 @@ def get_provider_data_mapping(provider) -> dict:
|
||||
return data
|
||||
|
||||
|
||||
def generate_provider_output(provider, finding, csv_data) -> FindingOutput:
|
||||
"""
|
||||
generate_provider_output returns the provider's Finding output model
|
||||
"""
|
||||
# TODO: we have to standardize this between the above mapping and the provider.get_output_mapping()
|
||||
try:
|
||||
if provider.type == "aws":
|
||||
# TODO: probably Organization UID is without the account id
|
||||
csv_data["auth_method"] = f"profile: {csv_data['auth_method']}"
|
||||
csv_data["resource_name"] = finding.resource_id
|
||||
csv_data["resource_uid"] = finding.resource_arn
|
||||
csv_data["region"] = finding.region
|
||||
|
||||
elif provider.type == "azure":
|
||||
# TODO: we should show the authentication method used I think
|
||||
csv_data["auth_method"] = (
|
||||
f"{provider.identity.identity_type}: {provider.identity.identity_id}"
|
||||
)
|
||||
# Get the first tenant domain ID, just in case
|
||||
csv_data["account_organization_uid"] = csv_data["account_organization_uid"][
|
||||
0
|
||||
]
|
||||
csv_data["account_uid"] = (
|
||||
csv_data["account_organization_uid"]
|
||||
if "Tenant:" in finding.subscription
|
||||
else provider.identity.subscriptions[finding.subscription]
|
||||
)
|
||||
csv_data["account_name"] = finding.subscription
|
||||
csv_data["resource_name"] = finding.resource_name
|
||||
csv_data["resource_uid"] = finding.resource_id
|
||||
csv_data["region"] = finding.location
|
||||
|
||||
elif provider.type == "gcp":
|
||||
csv_data["auth_method"] = f"Principal: {csv_data['auth_method']}"
|
||||
csv_data["account_uid"] = provider.projects[finding.project_id].id
|
||||
csv_data["account_name"] = provider.projects[finding.project_id].name
|
||||
csv_data["account_tags"] = provider.projects[finding.project_id].labels
|
||||
csv_data["resource_name"] = finding.resource_name
|
||||
csv_data["resource_uid"] = finding.resource_id
|
||||
csv_data["region"] = finding.location
|
||||
|
||||
if (
|
||||
provider.projects
|
||||
and finding.project_id in provider.projects
|
||||
and getattr(provider.projects[finding.project_id], "organization")
|
||||
):
|
||||
csv_data["account_organization_uid"] = provider.projects[
|
||||
finding.project_id
|
||||
].organization.id
|
||||
# TODO: for now is None since we don't retrieve that data
|
||||
csv_data["account_organization"] = provider.projects[
|
||||
finding.project_id
|
||||
].organization.display_name
|
||||
|
||||
elif provider.type == "kubernetes":
|
||||
if provider.identity.context == "In-Cluster":
|
||||
csv_data["auth_method"] = "in-cluster"
|
||||
else:
|
||||
csv_data["auth_method"] = "kubeconfig"
|
||||
csv_data["resource_name"] = finding.resource_name
|
||||
csv_data["resource_uid"] = finding.resource_id
|
||||
csv_data["account_name"] = f"context: {provider.identity.context}"
|
||||
csv_data["region"] = f"namespace: {finding.namespace}"
|
||||
|
||||
# Finding Unique ID
|
||||
# TODO: move this to a function
|
||||
# TODO: in Azure, GCP and K8s there are fidings without resource_name
|
||||
csv_data["finding_uid"] = (
|
||||
f"prowler-{provider.type}-{finding.check_metadata.CheckID}-{csv_data['account_uid']}-{csv_data['region']}-{csv_data['resource_name']}"
|
||||
)
|
||||
|
||||
finding_output = FindingOutput(**csv_data)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
else:
|
||||
return finding_output
|
||||
|
||||
|
||||
# TODO: add test for outputs_unix_timestamp
|
||||
def fill_common_finding_data(finding: dict, unix_timestamp: bool) -> dict:
|
||||
finding_data = {
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Optional, Union
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.config.config import prowler_version
|
||||
|
||||
|
||||
class Status(str, Enum):
|
||||
PASS = "PASS"
|
||||
FAIL = "FAIL"
|
||||
MANUAL = "MANUAL"
|
||||
|
||||
|
||||
class Severity(str, Enum):
|
||||
critical = "critical"
|
||||
high = "high"
|
||||
medium = "medium"
|
||||
low = "low"
|
||||
informational = "informational"
|
||||
|
||||
|
||||
class FindingOutput(BaseModel):
|
||||
"""
|
||||
FindingOutput generates a finding's output. It can be written to CSV or another format doing the mapping.
|
||||
|
||||
This is the base finding output model for every provider.
|
||||
"""
|
||||
|
||||
auth_method: str
|
||||
timestamp: Union[int, datetime]
|
||||
account_uid: str
|
||||
# Optional since depends on permissions
|
||||
account_name: Optional[str]
|
||||
# Optional since depends on permissions
|
||||
account_email: Optional[str]
|
||||
# Optional since depends on permissions
|
||||
account_organization_uid: Optional[str]
|
||||
# Optional since depends on permissions
|
||||
account_organization_name: Optional[str]
|
||||
# Optional since depends on permissions
|
||||
account_tags: Optional[list[str]]
|
||||
finding_uid: str
|
||||
provider: str
|
||||
check_id: str
|
||||
check_title: str
|
||||
check_type: str
|
||||
status: Status
|
||||
status_extended: str
|
||||
muted: bool = False
|
||||
service_name: str
|
||||
subservice_name: str
|
||||
severity: Severity
|
||||
resource_type: str
|
||||
resource_uid: str
|
||||
resource_name: str
|
||||
resource_details: str
|
||||
resource_tags: str
|
||||
# Only present for AWS and Azure
|
||||
partition: Optional[str]
|
||||
region: str
|
||||
description: str
|
||||
risk: str
|
||||
related_url: str
|
||||
remediation_recommendation_text: str
|
||||
remediation_recommendation_url: str
|
||||
remediation_code_nativeiac: str
|
||||
remediation_code_terraform: str
|
||||
remediation_code_cli: str
|
||||
remediation_code_other: str
|
||||
compliance: dict
|
||||
categories: str
|
||||
depends_on: str
|
||||
related_to: str
|
||||
notes: str
|
||||
prowler_version: str = prowler_version
|
||||
@@ -0,0 +1,97 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.outputs.compliance.aws_well_architected.models import (
|
||||
AWSWellArchitectedModel,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
|
||||
|
||||
class AWSWellArchitected(ComplianceOutput):
|
||||
"""
|
||||
This class represents the AWS Well-Architected compliance output.
|
||||
|
||||
Attributes:
|
||||
- _data (list): A list to store transformed data from findings.
|
||||
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
|
||||
|
||||
Methods:
|
||||
- transform: Transforms findings into AWS Well-Architected compliance format.
|
||||
"""
|
||||
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
Transforms a list of findings into AWS Well-Architected compliance format.
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
- None
|
||||
"""
|
||||
for finding in findings:
|
||||
# Get the compliance requirements for the finding
|
||||
finding_requirements = finding.compliance.get(compliance_name, [])
|
||||
for requirement in compliance.Requirements:
|
||||
if requirement.Id in finding_requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = AWSWellArchitectedModel(
|
||||
Provider=finding.provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=finding.account_uid,
|
||||
Region=finding.region,
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Name=attribute.Name,
|
||||
Requirements_Attributes_WellArchitectedQuestionId=attribute.WellArchitectedQuestionId,
|
||||
Requirements_Attributes_WellArchitectedPracticeId=attribute.WellArchitectedPracticeId,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_SubSection=attribute.SubSection,
|
||||
Requirements_Attributes_LevelOfRisk=attribute.LevelOfRisk,
|
||||
Requirements_Attributes_AssessmentMethod=attribute.AssessmentMethod,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_ImplementationGuidanceUrl=attribute.ImplementationGuidanceUrl,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_uid,
|
||||
ResourceName=finding.resource_name,
|
||||
CheckId=finding.check_id,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
# Add manual requirements to the compliance output
|
||||
for requirement in compliance.Requirements:
|
||||
if not requirement.Checks:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = AWSWellArchitectedModel(
|
||||
Provider=compliance.Provider.lower(),
|
||||
Description=compliance.Description,
|
||||
AccountId="",
|
||||
Region="",
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Name=attribute.Name,
|
||||
Requirements_Attributes_WellArchitectedQuestionId=attribute.WellArchitectedQuestionId,
|
||||
Requirements_Attributes_WellArchitectedPracticeId=attribute.WellArchitectedPracticeId,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_SubSection=attribute.SubSection,
|
||||
Requirements_Attributes_LevelOfRisk=attribute.LevelOfRisk,
|
||||
Requirements_Attributes_AssessmentMethod=attribute.AssessmentMethod,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_ImplementationGuidanceUrl=attribute.ImplementationGuidanceUrl,
|
||||
Status="MANUAL",
|
||||
StatusExtended="Manual check",
|
||||
ResourceId="manual_check",
|
||||
ResourceName="Manual check",
|
||||
CheckId="manual",
|
||||
Muted=False,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
@@ -0,0 +1,32 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AWSWellArchitectedModel(BaseModel):
|
||||
"""
|
||||
AWSWellArchitectedModel generates a finding's output in AWS Well-Architected Framework format.
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
Description: str
|
||||
AccountId: str
|
||||
Region: str
|
||||
AssessmentDate: str
|
||||
Requirements_Id: str
|
||||
Requirements_Description: str
|
||||
Requirements_Attributes_Name: str
|
||||
Requirements_Attributes_WellArchitectedQuestionId: str
|
||||
Requirements_Attributes_WellArchitectedPracticeId: str
|
||||
Requirements_Attributes_Section: str
|
||||
Requirements_Attributes_SubSection: Optional[str]
|
||||
Requirements_Attributes_LevelOfRisk: str
|
||||
Requirements_Attributes_AssessmentMethod: str
|
||||
Requirements_Attributes_Description: str
|
||||
Requirements_Attributes_ImplementationGuidanceUrl: str
|
||||
Status: str
|
||||
StatusExtended: str
|
||||
ResourceId: str
|
||||
CheckId: str
|
||||
Muted: bool
|
||||
ResourceName: str
|
||||
@@ -1,60 +0,0 @@
|
||||
from csv import DictWriter
|
||||
|
||||
from prowler.config.config import timestamp
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.compliance.models import Check_Output_CSV_AWS_Well_Architected
|
||||
from prowler.lib.outputs.csv.csv import generate_csv_fields
|
||||
from prowler.lib.utils.utils import outputs_unix_timestamp
|
||||
|
||||
|
||||
def write_compliance_row_aws_well_architected_framework(
|
||||
file_descriptors, finding, compliance, output_options, provider
|
||||
):
|
||||
try:
|
||||
compliance_output = compliance.Framework
|
||||
if compliance.Version != "":
|
||||
compliance_output += "_" + compliance.Version
|
||||
if compliance.Provider != "":
|
||||
compliance_output += "_" + compliance.Provider
|
||||
compliance_output = compliance_output.lower().replace("-", "_")
|
||||
csv_header = generate_csv_fields(Check_Output_CSV_AWS_Well_Architected)
|
||||
csv_writer = DictWriter(
|
||||
file_descriptors[compliance_output],
|
||||
fieldnames=csv_header,
|
||||
delimiter=";",
|
||||
)
|
||||
for requirement in compliance.Requirements:
|
||||
requirement_description = requirement.Description
|
||||
requirement_id = requirement.Id
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = Check_Output_CSV_AWS_Well_Architected(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=provider.identity.account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=outputs_unix_timestamp(
|
||||
output_options.unix_timestamp, timestamp
|
||||
),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_Name=attribute.Name,
|
||||
Requirements_Attributes_WellArchitectedQuestionId=attribute.WellArchitectedQuestionId,
|
||||
Requirements_Attributes_WellArchitectedPracticeId=attribute.WellArchitectedPracticeId,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_SubSection=attribute.SubSection,
|
||||
Requirements_Attributes_LevelOfRisk=attribute.LevelOfRisk,
|
||||
Requirements_Attributes_AssessmentMethod=attribute.AssessmentMethod,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_ImplementationGuidanceUrl=attribute.ImplementationGuidanceUrl,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
|
||||
csv_writer.writerow(compliance_row.__dict__)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
@@ -2,75 +2,6 @@ from colorama import Fore, Style
|
||||
from tabulate import tabulate
|
||||
|
||||
from prowler.config.config import orange_color
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.compliance.cis_aws import generate_compliance_row_cis_aws
|
||||
from prowler.lib.outputs.compliance.cis_azure import generate_compliance_row_cis_azure
|
||||
from prowler.lib.outputs.compliance.cis_gcp import generate_compliance_row_cis_gcp
|
||||
from prowler.lib.outputs.compliance.cis_kubernetes import (
|
||||
generate_compliance_row_cis_kubernetes,
|
||||
)
|
||||
from prowler.lib.outputs.csv.csv import write_csv
|
||||
|
||||
|
||||
def write_compliance_row_cis(
|
||||
file_descriptors,
|
||||
finding,
|
||||
compliance,
|
||||
output_options,
|
||||
provider,
|
||||
input_compliance_frameworks,
|
||||
):
|
||||
try:
|
||||
compliance_output = (
|
||||
"cis_" + compliance.Version + "_" + compliance.Provider.lower()
|
||||
)
|
||||
|
||||
# Only with the version of CIS that was selected
|
||||
if compliance_output in str(input_compliance_frameworks):
|
||||
for requirement in compliance.Requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
if compliance.Provider == "AWS":
|
||||
(compliance_row, csv_header) = generate_compliance_row_cis_aws(
|
||||
finding,
|
||||
compliance,
|
||||
requirement,
|
||||
attribute,
|
||||
output_options,
|
||||
provider,
|
||||
)
|
||||
elif compliance.Provider == "Azure":
|
||||
(compliance_row, csv_header) = (
|
||||
generate_compliance_row_cis_azure(
|
||||
finding,
|
||||
compliance,
|
||||
requirement,
|
||||
attribute,
|
||||
output_options,
|
||||
)
|
||||
)
|
||||
elif compliance.Provider == "GCP":
|
||||
(compliance_row, csv_header) = generate_compliance_row_cis_gcp(
|
||||
finding, compliance, requirement, attribute, output_options
|
||||
)
|
||||
elif compliance.Provider == "Kubernetes":
|
||||
(compliance_row, csv_header) = (
|
||||
generate_compliance_row_cis_kubernetes(
|
||||
finding,
|
||||
compliance,
|
||||
requirement,
|
||||
attribute,
|
||||
output_options,
|
||||
provider,
|
||||
)
|
||||
)
|
||||
|
||||
write_csv(
|
||||
file_descriptors[compliance_output], csv_header, compliance_row
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def get_cis_table(
|
||||
97
prowler/lib/outputs/compliance/cis/cis_aws.py
Normal file
@@ -0,0 +1,97 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.outputs.compliance.cis.models import AWSCISModel
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
|
||||
|
||||
class AWSCIS(ComplianceOutput):
|
||||
"""
|
||||
This class represents the AWS CIS compliance output.
|
||||
|
||||
Attributes:
|
||||
- _data (list): A list to store transformed data from findings.
|
||||
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
|
||||
|
||||
Methods:
|
||||
- transform: Transforms findings into AWS CIS compliance format.
|
||||
"""
|
||||
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
Transforms a list of findings into AWS CIS compliance format.
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
- None
|
||||
"""
|
||||
for finding in findings:
|
||||
# Get the compliance requirements for the finding
|
||||
finding_requirements = finding.compliance.get(compliance_name, [])
|
||||
for requirement in compliance.Requirements:
|
||||
if requirement.Id in finding_requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = AWSCISModel(
|
||||
Provider=finding.provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=finding.account_uid,
|
||||
Region=finding.region,
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_uid,
|
||||
ResourceName=finding.resource_name,
|
||||
CheckId=finding.check_id,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
# Add manual requirements to the compliance output
|
||||
for requirement in compliance.Requirements:
|
||||
if not requirement.Checks:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = AWSCISModel(
|
||||
Provider=compliance.Provider.lower(),
|
||||
Description=compliance.Description,
|
||||
AccountId="",
|
||||
Region="",
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Status="MANUAL",
|
||||
StatusExtended="Manual check",
|
||||
ResourceId="manual_check",
|
||||
ResourceName="Manual check",
|
||||
CheckId="manual",
|
||||
Muted=False,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
99
prowler/lib/outputs/compliance/cis/cis_azure.py
Normal file
@@ -0,0 +1,99 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.outputs.compliance.cis.models import AzureCISModel
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
|
||||
|
||||
class AzureCIS(ComplianceOutput):
|
||||
"""
|
||||
This class represents the Azure CIS compliance output.
|
||||
|
||||
Attributes:
|
||||
- _data (list): A list to store transformed data from findings.
|
||||
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
|
||||
|
||||
Methods:
|
||||
- transform: Transforms findings into Azure CIS compliance format.
|
||||
"""
|
||||
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
Transforms a list of findings into Azure CIS compliance format.
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
- None
|
||||
"""
|
||||
for finding in findings:
|
||||
# Get the compliance requirements for the finding
|
||||
finding_requirements = finding.compliance.get(compliance_name, [])
|
||||
for requirement in compliance.Requirements:
|
||||
if requirement.Id in finding_requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = AzureCISModel(
|
||||
Provider=finding.provider,
|
||||
Description=compliance.Description,
|
||||
Subscription=finding.account_name,
|
||||
Location=finding.region,
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_DefaultValue=attribute.DefaultValue,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_uid,
|
||||
ResourceName=finding.resource_name,
|
||||
CheckId=finding.check_id,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
# Add manual requirements to the compliance output
|
||||
for requirement in compliance.Requirements:
|
||||
if not requirement.Checks:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = AzureCISModel(
|
||||
Provider=compliance.Provider.lower(),
|
||||
Description=compliance.Description,
|
||||
Subscription="",
|
||||
Location="",
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_DefaultValue=attribute.DefaultValue,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Status="MANUAL",
|
||||
StatusExtended="Manual check",
|
||||
ResourceId="manual_check",
|
||||
ResourceName="Manual check",
|
||||
CheckId="manual",
|
||||
Muted=False,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
97
prowler/lib/outputs/compliance/cis/cis_gcp.py
Normal file
@@ -0,0 +1,97 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.outputs.compliance.cis.models import GCPCISModel
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
|
||||
|
||||
class GCPCIS(ComplianceOutput):
|
||||
"""
|
||||
This class represents the GCP CIS compliance output.
|
||||
|
||||
Attributes:
|
||||
- _data (list): A list to store transformed data from findings.
|
||||
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
|
||||
|
||||
Methods:
|
||||
- transform: Transforms findings into GCP CIS compliance format.
|
||||
"""
|
||||
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
Transforms a list of findings into GCP CIS compliance format.
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
- None
|
||||
"""
|
||||
for finding in findings:
|
||||
# Get the compliance requirements for the finding
|
||||
finding_requirements = finding.compliance.get(compliance_name, [])
|
||||
for requirement in compliance.Requirements:
|
||||
if requirement.Id in finding_requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = GCPCISModel(
|
||||
Provider=finding.provider,
|
||||
Description=compliance.Description,
|
||||
ProjectId=finding.account_uid,
|
||||
Location=finding.region,
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_uid,
|
||||
ResourceName=finding.resource_name,
|
||||
CheckId=finding.check_id,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
# Add manual requirements to the compliance output
|
||||
for requirement in compliance.Requirements:
|
||||
if not requirement.Checks:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = GCPCISModel(
|
||||
Provider=compliance.Provider.lower(),
|
||||
Description=compliance.Description,
|
||||
ProjectId="",
|
||||
Location="",
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Status="MANUAL",
|
||||
StatusExtended="Manual check",
|
||||
ResourceId="manual_check",
|
||||
ResourceName="Manual check",
|
||||
CheckId="manual",
|
||||
Muted=False,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
101
prowler/lib/outputs/compliance/cis/cis_kubernetes.py
Normal file
@@ -0,0 +1,101 @@
|
||||
from datetime import datetime
|
||||
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.outputs.compliance.cis.models import KubernetesCISModel
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
|
||||
|
||||
class KubernetesCIS(ComplianceOutput):
|
||||
"""
|
||||
This class represents the Kubernetes CIS compliance output.
|
||||
|
||||
Attributes:
|
||||
- _data (list): A list to store transformed data from findings.
|
||||
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
|
||||
|
||||
Methods:
|
||||
- transform: Transforms findings into Kubernetes CIS compliance format.
|
||||
"""
|
||||
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
Transforms a list of findings into Kubernetes CIS compliance format.
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
- None
|
||||
"""
|
||||
for finding in findings:
|
||||
# Get the compliance requirements for the finding
|
||||
finding_requirements = finding.compliance.get(compliance_name, [])
|
||||
for requirement in compliance.Requirements:
|
||||
if requirement.Id in finding_requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = KubernetesCISModel(
|
||||
Provider=finding.provider,
|
||||
Description=compliance.Description,
|
||||
Context=finding.account_name,
|
||||
Namespace=finding.region,
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Requirements_Attributes_DefaultValue=attribute.DefaultValue,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_uid,
|
||||
ResourceName=finding.resource_name,
|
||||
CheckId=finding.check_id,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
# Add manual requirements to the compliance output
|
||||
for requirement in compliance.Requirements:
|
||||
if not requirement.Checks:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = KubernetesCISModel(
|
||||
Provider=compliance.Provider.lower(),
|
||||
Description=compliance.Description,
|
||||
Context="",
|
||||
Namespace="",
|
||||
AssessmentDate=str(datetime.now()),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Requirements_Attributes_DefaultValue=attribute.DefaultValue,
|
||||
Status="MANUAL",
|
||||
StatusExtended="Manual check",
|
||||
ResourceId="manual_check",
|
||||
ResourceName="Manual check",
|
||||
CheckId="manual",
|
||||
Muted=False,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
162
prowler/lib/outputs/compliance/cis/models.py
Normal file
@@ -0,0 +1,162 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AWSCISModel(BaseModel):
|
||||
"""
|
||||
AWSCISModel generates a finding's output in AWS CIS Compliance format.
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
Description: str
|
||||
AccountId: str
|
||||
Region: str
|
||||
AssessmentDate: str
|
||||
Requirements_Id: str
|
||||
Requirements_Description: str
|
||||
Requirements_Attributes_Section: str
|
||||
Requirements_Attributes_Profile: str
|
||||
Requirements_Attributes_AssessmentStatus: str
|
||||
Requirements_Attributes_Description: str
|
||||
Requirements_Attributes_RationaleStatement: str
|
||||
Requirements_Attributes_ImpactStatement: str
|
||||
Requirements_Attributes_RemediationProcedure: str
|
||||
Requirements_Attributes_AuditProcedure: str
|
||||
Requirements_Attributes_AdditionalInformation: str
|
||||
Requirements_Attributes_References: str
|
||||
Status: str
|
||||
StatusExtended: str
|
||||
ResourceId: str
|
||||
ResourceName: str
|
||||
CheckId: str
|
||||
Muted: bool
|
||||
|
||||
|
||||
class AzureCISModel(BaseModel):
|
||||
"""
|
||||
AzureCISModel generates a finding's output in Azure CIS Compliance format.
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
Description: str
|
||||
Subscription: str
|
||||
Location: str
|
||||
AssessmentDate: str
|
||||
Requirements_Id: str
|
||||
Requirements_Description: str
|
||||
Requirements_Attributes_Section: str
|
||||
Requirements_Attributes_Profile: str
|
||||
Requirements_Attributes_AssessmentStatus: str
|
||||
Requirements_Attributes_Description: str
|
||||
Requirements_Attributes_RationaleStatement: str
|
||||
Requirements_Attributes_ImpactStatement: str
|
||||
Requirements_Attributes_RemediationProcedure: str
|
||||
Requirements_Attributes_AuditProcedure: str
|
||||
Requirements_Attributes_AdditionalInformation: str
|
||||
Requirements_Attributes_DefaultValue: str
|
||||
Requirements_Attributes_References: str
|
||||
Status: str
|
||||
StatusExtended: str
|
||||
ResourceId: str
|
||||
ResourceName: str
|
||||
CheckId: str
|
||||
Muted: bool
|
||||
|
||||
|
||||
class GCPCISModel(BaseModel):
|
||||
"""
|
||||
GCPCISModel generates a finding's output in GCP CIS Compliance format.
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
Description: str
|
||||
ProjectId: str
|
||||
Location: str
|
||||
AssessmentDate: str
|
||||
Requirements_Id: str
|
||||
Requirements_Description: str
|
||||
Requirements_Attributes_Section: str
|
||||
Requirements_Attributes_Profile: str
|
||||
Requirements_Attributes_AssessmentStatus: str
|
||||
Requirements_Attributes_Description: str
|
||||
Requirements_Attributes_RationaleStatement: str
|
||||
Requirements_Attributes_ImpactStatement: str
|
||||
Requirements_Attributes_RemediationProcedure: str
|
||||
Requirements_Attributes_AuditProcedure: str
|
||||
Requirements_Attributes_AdditionalInformation: str
|
||||
Requirements_Attributes_References: str
|
||||
Status: str
|
||||
StatusExtended: str
|
||||
ResourceId: str
|
||||
ResourceName: str
|
||||
CheckId: str
|
||||
Muted: bool
|
||||
|
||||
|
||||
class KubernetesCISModel(BaseModel):
|
||||
"""
|
||||
KubernetesCISModel generates a finding's output in Kubernetes CIS Compliance format.
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
Description: str
|
||||
Context: str
|
||||
Namespace: str
|
||||
AssessmentDate: str
|
||||
Requirements_Id: str
|
||||
Requirements_Description: str
|
||||
Requirements_Attributes_Section: str
|
||||
Requirements_Attributes_Profile: str
|
||||
Requirements_Attributes_AssessmentStatus: str
|
||||
Requirements_Attributes_Description: str
|
||||
Requirements_Attributes_RationaleStatement: str
|
||||
Requirements_Attributes_ImpactStatement: str
|
||||
Requirements_Attributes_RemediationProcedure: str
|
||||
Requirements_Attributes_AuditProcedure: str
|
||||
Requirements_Attributes_AdditionalInformation: str
|
||||
Requirements_Attributes_References: str
|
||||
Requirements_Attributes_DefaultValue: str
|
||||
Status: str
|
||||
StatusExtended: str
|
||||
ResourceId: str
|
||||
ResourceName: str
|
||||
CheckId: str
|
||||
Muted: bool
|
||||
|
||||
|
||||
# TODO: Create a parent class for the common fields of CIS and have the specific classes from each provider to inherit from it.
|
||||
# It is not done yet because it is needed to respect the current order of the fields in the output file.
|
||||
|
||||
# class AWS(CIS):
|
||||
# """
|
||||
# AWS CIS Compliance format.
|
||||
# """
|
||||
|
||||
# AccountId: str
|
||||
# Region: str
|
||||
|
||||
|
||||
# class Azure(CIS):
|
||||
# """
|
||||
# Azure CIS Compliance format.
|
||||
# """
|
||||
|
||||
# Subscription: str
|
||||
# Location: str
|
||||
|
||||
|
||||
# class GCP(CIS):
|
||||
# """
|
||||
# GCP CIS Compliance format.
|
||||
# """
|
||||
|
||||
# ProjectId: str
|
||||
# Location: str
|
||||
|
||||
|
||||
# class Kubernetes(CIS):
|
||||
# """
|
||||
# Kubernetes CIS Compliance format.
|
||||
# """
|
||||
|
||||
# Context: str
|
||||
# Namespace: str
|
||||
@@ -1,36 +0,0 @@
|
||||
from prowler.config.config import timestamp
|
||||
from prowler.lib.outputs.compliance.models import Check_Output_CSV_AWS_CIS
|
||||
from prowler.lib.outputs.csv.csv import generate_csv_fields
|
||||
from prowler.lib.utils.utils import outputs_unix_timestamp
|
||||
|
||||
|
||||
def generate_compliance_row_cis_aws(
|
||||
finding, compliance, requirement, attribute, output_options, provider
|
||||
):
|
||||
compliance_row = Check_Output_CSV_AWS_CIS(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=provider.identity.account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
csv_header = generate_csv_fields(Check_Output_CSV_AWS_CIS)
|
||||
|
||||
return compliance_row, csv_header
|
||||
@@ -1,37 +0,0 @@
|
||||
from prowler.config.config import timestamp
|
||||
from prowler.lib.outputs.compliance.models import Check_Output_CSV_AZURE_CIS
|
||||
from prowler.lib.outputs.csv.csv import generate_csv_fields
|
||||
from prowler.lib.utils.utils import outputs_unix_timestamp
|
||||
|
||||
|
||||
def generate_compliance_row_cis_azure(
|
||||
finding, compliance, requirement, attribute, output_options
|
||||
):
|
||||
compliance_row = Check_Output_CSV_AZURE_CIS(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
Description=compliance.Description,
|
||||
Subscription=finding.subscription,
|
||||
AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_DefaultValue=attribute.DefaultValue,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
ResourceName=finding.resource_name,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
csv_header = generate_csv_fields(Check_Output_CSV_AZURE_CIS)
|
||||
|
||||
return compliance_row, csv_header
|
||||
@@ -1,37 +0,0 @@
|
||||
from prowler.config.config import timestamp
|
||||
from prowler.lib.outputs.compliance.models import Check_Output_CSV_GCP_CIS
|
||||
from prowler.lib.outputs.csv.csv import generate_csv_fields
|
||||
from prowler.lib.utils.utils import outputs_unix_timestamp
|
||||
|
||||
|
||||
def generate_compliance_row_cis_gcp(
|
||||
finding, compliance, requirement, attribute, output_options
|
||||
):
|
||||
compliance_row = Check_Output_CSV_GCP_CIS(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
Description=compliance.Description,
|
||||
ProjectId=finding.project_id,
|
||||
Location=finding.location.lower(),
|
||||
AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
ResourceName=finding.resource_name,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
csv_header = generate_csv_fields(Check_Output_CSV_GCP_CIS)
|
||||
|
||||
return compliance_row, csv_header
|
||||
@@ -1,37 +0,0 @@
|
||||
from prowler.config.config import timestamp
|
||||
from prowler.lib.outputs.compliance.models import Check_Output_CSV_KUBERNETES_CIS
|
||||
from prowler.lib.outputs.csv.csv import generate_csv_fields
|
||||
from prowler.lib.utils.utils import outputs_unix_timestamp
|
||||
|
||||
|
||||
def generate_compliance_row_cis_kubernetes(
|
||||
finding, compliance, requirement, attribute, output_options, provider
|
||||
):
|
||||
compliance_row = Check_Output_CSV_KUBERNETES_CIS(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
Description=compliance.Description,
|
||||
Context=provider.identity.context,
|
||||
Namespace=finding.namespace,
|
||||
AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_Profile=attribute.Profile,
|
||||
Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_RationaleStatement=attribute.RationaleStatement,
|
||||
Requirements_Attributes_ImpactStatement=attribute.ImpactStatement,
|
||||
Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure,
|
||||
Requirements_Attributes_AuditProcedure=attribute.AuditProcedure,
|
||||
Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation,
|
||||
Requirements_Attributes_References=attribute.References,
|
||||
Requirements_Attributes_DefaultValue=attribute.DefaultValue,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
csv_header = generate_csv_fields(Check_Output_CSV_KUBERNETES_CIS)
|
||||
|
||||
return compliance_row, csv_header
|
||||
@@ -2,144 +2,16 @@ import sys
|
||||
|
||||
from prowler.lib.check.models import Check_Report
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.compliance.aws_well_architected_framework import (
|
||||
write_compliance_row_aws_well_architected_framework,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.cis import get_cis_table, write_compliance_row_cis
|
||||
from prowler.lib.outputs.compliance.ens_rd2022_aws import (
|
||||
get_ens_rd2022_aws_table,
|
||||
write_compliance_row_ens_rd2022_aws,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.generic import (
|
||||
from prowler.lib.outputs.compliance.cis.cis import get_cis_table
|
||||
from prowler.lib.outputs.compliance.ens.ens import get_ens_table
|
||||
from prowler.lib.outputs.compliance.generic.generic_table import (
|
||||
get_generic_compliance_table,
|
||||
write_compliance_row_generic,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.iso27001_2013_aws import (
|
||||
write_compliance_row_iso27001_2013_aws,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack import (
|
||||
get_mitre_attack_table,
|
||||
write_compliance_row_mitre_attack,
|
||||
)
|
||||
|
||||
|
||||
def add_manual_controls(
|
||||
output_options, provider, file_descriptors, input_compliance_frameworks
|
||||
):
|
||||
try:
|
||||
# Check if MANUAL control was already added to output
|
||||
if "manual_check" in output_options.bulk_checks_metadata:
|
||||
manual_finding = Check_Report(
|
||||
output_options.bulk_checks_metadata["manual_check"].json()
|
||||
)
|
||||
manual_finding.status = "MANUAL"
|
||||
manual_finding.status_extended = "Manual check"
|
||||
manual_finding.resource_id = "manual_check"
|
||||
manual_finding.resource_name = "Manual check"
|
||||
manual_finding.region = ""
|
||||
manual_finding.location = ""
|
||||
manual_finding.project_id = ""
|
||||
manual_finding.subscription = ""
|
||||
manual_finding.namespace = ""
|
||||
fill_compliance(
|
||||
output_options,
|
||||
manual_finding,
|
||||
provider,
|
||||
file_descriptors,
|
||||
input_compliance_frameworks,
|
||||
)
|
||||
del output_options.bulk_checks_metadata["manual_check"]
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def get_check_compliance_frameworks_in_input(
|
||||
check_id, bulk_checks_metadata, input_compliance_frameworks
|
||||
):
|
||||
"""get_check_compliance_frameworks_in_input returns a list of Compliance for the given check if the compliance framework is present in the input compliance to execute"""
|
||||
check_compliances = []
|
||||
if bulk_checks_metadata and bulk_checks_metadata[check_id]:
|
||||
for compliance in bulk_checks_metadata[check_id].Compliance:
|
||||
compliance_name = ""
|
||||
if compliance.Version:
|
||||
compliance_name = (
|
||||
compliance.Framework.lower()
|
||||
+ "_"
|
||||
+ compliance.Version.lower()
|
||||
+ "_"
|
||||
+ compliance.Provider.lower()
|
||||
)
|
||||
else:
|
||||
compliance_name = (
|
||||
compliance.Framework.lower() + "_" + compliance.Provider.lower()
|
||||
)
|
||||
if compliance_name.replace("-", "_") in input_compliance_frameworks:
|
||||
check_compliances.append(compliance)
|
||||
return check_compliances
|
||||
|
||||
|
||||
def fill_compliance(
|
||||
output_options, finding, provider, file_descriptors, input_compliance_frameworks
|
||||
):
|
||||
try:
|
||||
# We have to retrieve all the check's compliance requirements and get the ones matching with the input ones
|
||||
check_compliances = get_check_compliance_frameworks_in_input(
|
||||
finding.check_metadata.CheckID,
|
||||
output_options.bulk_checks_metadata,
|
||||
input_compliance_frameworks,
|
||||
)
|
||||
|
||||
for compliance in check_compliances:
|
||||
if compliance.Framework == "ENS" and compliance.Version == "RD2022":
|
||||
write_compliance_row_ens_rd2022_aws(
|
||||
file_descriptors, finding, compliance, output_options, provider
|
||||
)
|
||||
|
||||
elif compliance.Framework == "CIS":
|
||||
write_compliance_row_cis(
|
||||
file_descriptors,
|
||||
finding,
|
||||
compliance,
|
||||
output_options,
|
||||
provider,
|
||||
input_compliance_frameworks,
|
||||
)
|
||||
|
||||
elif (
|
||||
"AWS-Well-Architected-Framework" in compliance.Framework
|
||||
and compliance.Provider == "AWS"
|
||||
):
|
||||
write_compliance_row_aws_well_architected_framework(
|
||||
file_descriptors, finding, compliance, output_options, provider
|
||||
)
|
||||
|
||||
elif (
|
||||
compliance.Framework == "ISO27001"
|
||||
and compliance.Version == "2013"
|
||||
and compliance.Provider == "AWS"
|
||||
):
|
||||
write_compliance_row_iso27001_2013_aws(
|
||||
file_descriptors, finding, compliance, output_options, provider
|
||||
)
|
||||
|
||||
elif compliance.Framework == "MITRE-ATTACK" and compliance.Version == "":
|
||||
write_compliance_row_mitre_attack(
|
||||
file_descriptors, finding, compliance, provider
|
||||
)
|
||||
|
||||
else:
|
||||
write_compliance_row_generic(
|
||||
file_descriptors, finding, compliance, output_options, provider
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def display_compliance_table(
|
||||
findings: list,
|
||||
bulk_checks_metadata: dict,
|
||||
@@ -147,10 +19,24 @@ def display_compliance_table(
|
||||
output_filename: str,
|
||||
output_directory: str,
|
||||
compliance_overview: bool,
|
||||
):
|
||||
) -> None:
|
||||
"""
|
||||
display_compliance_table generates the compliance table for the given compliance framework.
|
||||
|
||||
Args:
|
||||
findings (list): The list of findings
|
||||
bulk_checks_metadata (dict): The bulk checks metadata
|
||||
compliance_framework (str): The compliance framework to generate the table
|
||||
output_filename (str): The output filename
|
||||
output_directory (str): The output directory
|
||||
compliance_overview (bool): The compliance
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
try:
|
||||
if "ens_rd2022_aws" == compliance_framework:
|
||||
get_ens_rd2022_aws_table(
|
||||
if "ens_" in compliance_framework:
|
||||
get_ens_table(
|
||||
findings,
|
||||
bulk_checks_metadata,
|
||||
compliance_framework,
|
||||
@@ -192,7 +78,10 @@ def display_compliance_table(
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_check_compliance(finding, provider_type, output_options) -> dict:
|
||||
# TODO: this should be in the Check class
|
||||
def get_check_compliance(
|
||||
finding: Check_Report, provider_type: str, bulk_checks_metadata: dict
|
||||
) -> dict:
|
||||
"""get_check_compliance returns a map with the compliance framework as key and the requirements where the finding's check is present.
|
||||
|
||||
Example:
|
||||
@@ -201,12 +90,20 @@ def get_check_compliance(finding, provider_type, output_options) -> dict:
|
||||
"CIS-1.4": ["2.1.3"],
|
||||
"CIS-1.5": ["2.1.3"],
|
||||
}
|
||||
|
||||
Args:
|
||||
finding (Any): The Check_Report finding
|
||||
provider_type (str): The provider type
|
||||
bulk_checks_metadata (dict): The bulk checks metadata
|
||||
|
||||
Returns:
|
||||
dict: The compliance framework as key and the requirements where the finding's check is present.
|
||||
"""
|
||||
try:
|
||||
check_compliance = {}
|
||||
# We have to retrieve all the check's compliance requirements
|
||||
if finding.check_metadata.CheckID in output_options.bulk_checks_metadata:
|
||||
for compliance in output_options.bulk_checks_metadata[
|
||||
if finding.check_metadata.CheckID in bulk_checks_metadata:
|
||||
for compliance in bulk_checks_metadata[
|
||||
finding.check_metadata.CheckID
|
||||
].Compliance:
|
||||
compliance_fw = compliance.Framework
|
||||
@@ -221,7 +118,7 @@ def get_check_compliance(finding, provider_type, output_options) -> dict:
|
||||
check_compliance[compliance_fw].append(requirement.Id)
|
||||
return check_compliance
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
return {}
|
||||
|
||||
81
prowler/lib/outputs/compliance/compliance_output.py
Normal file
@@ -0,0 +1,81 @@
|
||||
from csv import DictWriter
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
from prowler.lib.outputs.output import Output
|
||||
|
||||
|
||||
class ComplianceOutput(Output):
|
||||
"""
|
||||
This class represents an abstract base class for defining different types of outputs for findings.
|
||||
|
||||
Attributes:
|
||||
_data (list): A list to store transformed data from findings.
|
||||
_file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
|
||||
|
||||
Methods:
|
||||
__init__: Initializes the Output class with findings, optionally creates a file descriptor.
|
||||
data: Property to access the transformed data.
|
||||
file_descriptor: Property to access the file descriptor.
|
||||
transform: Abstract method to transform findings into a specific format.
|
||||
batch_write_data_to_file: Abstract method to write data to a file in batches.
|
||||
create_file_descriptor: Method to create a file descriptor for writing data to a file.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
findings: List[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
create_file_descriptor: bool = False,
|
||||
file_path: str = None,
|
||||
file_extension: str = "",
|
||||
) -> None:
|
||||
self._data = []
|
||||
|
||||
if not file_extension and file_path:
|
||||
self._file_extension = "".join(Path(file_path).suffixes)
|
||||
if file_extension:
|
||||
self._file_extension = file_extension
|
||||
|
||||
if findings:
|
||||
# Get the compliance name of the model
|
||||
compliance_name = (
|
||||
compliance.Framework + "-" + compliance.Version
|
||||
if compliance.Version
|
||||
else compliance.Framework
|
||||
)
|
||||
self.transform(findings, compliance, compliance_name)
|
||||
if create_file_descriptor:
|
||||
self.create_file_descriptor(file_path)
|
||||
|
||||
def batch_write_data_to_file(self) -> None:
|
||||
"""
|
||||
Writes the findings data to a CSV file in the specific compliance format.
|
||||
|
||||
Returns:
|
||||
- None
|
||||
"""
|
||||
try:
|
||||
if (
|
||||
getattr(self, "_file_descriptor", None)
|
||||
and not self._file_descriptor.closed
|
||||
and self._data
|
||||
):
|
||||
csv_writer = DictWriter(
|
||||
self._file_descriptor,
|
||||
fieldnames=[field.upper() for field in self._data[0].dict().keys()],
|
||||
delimiter=";",
|
||||
)
|
||||
csv_writer.writeheader()
|
||||
for finding in self._data:
|
||||
csv_writer.writerow(
|
||||
{k.upper(): v for k, v in finding.dict().items()}
|
||||
)
|
||||
self._file_descriptor.close()
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
@@ -1,58 +1,10 @@
|
||||
from csv import DictWriter
|
||||
|
||||
from colorama import Fore, Style
|
||||
from tabulate import tabulate
|
||||
|
||||
from prowler.config.config import orange_color, timestamp
|
||||
from prowler.lib.outputs.compliance.models import Check_Output_CSV_ENS_RD2022
|
||||
from prowler.lib.outputs.csv.csv import generate_csv_fields
|
||||
from prowler.lib.utils.utils import outputs_unix_timestamp
|
||||
from prowler.config.config import orange_color
|
||||
|
||||
|
||||
def write_compliance_row_ens_rd2022_aws(
|
||||
file_descriptors, finding, compliance, output_options, provider
|
||||
):
|
||||
compliance_output = "ens_rd2022_aws"
|
||||
csv_header = generate_csv_fields(Check_Output_CSV_ENS_RD2022)
|
||||
csv_writer = DictWriter(
|
||||
file_descriptors[compliance_output],
|
||||
fieldnames=csv_header,
|
||||
delimiter=";",
|
||||
)
|
||||
for requirement in compliance.Requirements:
|
||||
requirement_description = requirement.Description
|
||||
requirement_id = requirement.Id
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = Check_Output_CSV_ENS_RD2022(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=provider.identity.account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=outputs_unix_timestamp(
|
||||
output_options.unix_timestamp, timestamp
|
||||
),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_IdGrupoControl=attribute.IdGrupoControl,
|
||||
Requirements_Attributes_Marco=attribute.Marco,
|
||||
Requirements_Attributes_Categoria=attribute.Categoria,
|
||||
Requirements_Attributes_DescripcionControl=attribute.DescripcionControl,
|
||||
Requirements_Attributes_Nivel=attribute.Nivel,
|
||||
Requirements_Attributes_Tipo=attribute.Tipo,
|
||||
Requirements_Attributes_Dimensiones=",".join(attribute.Dimensiones),
|
||||
Requirements_Attributes_ModoEjecucion=attribute.ModoEjecucion,
|
||||
Requirements_Attributes_Dependencias=",".join(attribute.Dependencias),
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
|
||||
csv_writer.writerow(compliance_row.__dict__)
|
||||
|
||||
|
||||
def get_ens_rd2022_aws_table(
|
||||
def get_ens_table(
|
||||
findings: list,
|
||||
bulk_checks_metadata: dict,
|
||||
compliance_framework: str,
|
||||
@@ -78,11 +30,7 @@ def get_ens_rd2022_aws_table(
|
||||
check = bulk_checks_metadata[finding.check_metadata.CheckID]
|
||||
check_compliances = check.Compliance
|
||||
for compliance in check_compliances:
|
||||
if (
|
||||
compliance.Framework == "ENS"
|
||||
and compliance.Provider == "AWS"
|
||||
and compliance.Version == "RD2022"
|
||||
):
|
||||
if compliance.Framework == "ENS" and compliance.Provider == "AWS":
|
||||
for requirement in compliance.Requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
marco_categoria = f"{attribute.Marco}/{attribute.Categoria}"
|
||||
103
prowler/lib/outputs/compliance/ens/ens_aws.py
Normal file
@@ -0,0 +1,103 @@
|
||||
from prowler.lib.check.compliance_models import ComplianceBaseModel
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.compliance.ens.models import AWSENSModel
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
|
||||
|
||||
class AWSENS(ComplianceOutput):
|
||||
"""
|
||||
This class represents the AWS ENS compliance output.
|
||||
|
||||
Attributes:
|
||||
- _data (list): A list to store transformed data from findings.
|
||||
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
|
||||
|
||||
Methods:
|
||||
- transform: Transforms findings into AWS ENS compliance format.
|
||||
"""
|
||||
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: ComplianceBaseModel,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
Transforms a list of findings into AWS ENS compliance format.
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (ComplianceBaseModel): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
- None
|
||||
"""
|
||||
for finding in findings:
|
||||
# Get the compliance requirements for the finding
|
||||
finding_requirements = finding.compliance.get(compliance_name, [])
|
||||
for requirement in compliance.Requirements:
|
||||
if requirement.Id in finding_requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = AWSENSModel(
|
||||
Provider=finding.provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=finding.account_uid,
|
||||
Region=finding.region,
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_IdGrupoControl=attribute.IdGrupoControl,
|
||||
Requirements_Attributes_Marco=attribute.Marco,
|
||||
Requirements_Attributes_Categoria=attribute.Categoria,
|
||||
Requirements_Attributes_DescripcionControl=attribute.DescripcionControl,
|
||||
Requirements_Attributes_Nivel=attribute.Nivel,
|
||||
Requirements_Attributes_Tipo=attribute.Tipo,
|
||||
Requirements_Attributes_Dimensiones=",".join(
|
||||
attribute.Dimensiones
|
||||
),
|
||||
Requirements_Attributes_ModoEjecucion=attribute.ModoEjecucion,
|
||||
Requirements_Attributes_Dependencias=",".join(
|
||||
attribute.Dependencias
|
||||
),
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_uid,
|
||||
ResourceName=finding.resource_name,
|
||||
CheckId=finding.check_id,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
# Add manual requirements to the compliance output
|
||||
for requirement in compliance.Requirements:
|
||||
if not requirement.Checks:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = AWSENSModel(
|
||||
Provider=compliance.Provider.lower(),
|
||||
Description=compliance.Description,
|
||||
AccountId="",
|
||||
Region="",
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_IdGrupoControl=attribute.IdGrupoControl,
|
||||
Requirements_Attributes_Marco=attribute.Marco,
|
||||
Requirements_Attributes_Categoria=attribute.Categoria,
|
||||
Requirements_Attributes_DescripcionControl=attribute.DescripcionControl,
|
||||
Requirements_Attributes_Nivel=attribute.Nivel,
|
||||
Requirements_Attributes_Tipo=attribute.Tipo,
|
||||
Requirements_Attributes_Dimensiones=",".join(
|
||||
attribute.Dimensiones
|
||||
),
|
||||
Requirements_Attributes_ModoEjecucion=attribute.ModoEjecucion,
|
||||
Requirements_Attributes_Dependencias=",".join(
|
||||
attribute.Dependencias
|
||||
),
|
||||
Status="MANUAL",
|
||||
StatusExtended="Manual check",
|
||||
ResourceId="manual_check",
|
||||
ResourceName="Manual check",
|
||||
CheckId="manual",
|
||||
Muted=False,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
30
prowler/lib/outputs/compliance/ens/models.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AWSENSModel(BaseModel):
|
||||
"""
|
||||
AWSENSModel generates a finding's output in CSV ENS format for AWS.
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
Description: str
|
||||
AccountId: str
|
||||
Region: str
|
||||
AssessmentDate: str
|
||||
Requirements_Id: str
|
||||
Requirements_Description: str
|
||||
Requirements_Attributes_IdGrupoControl: str
|
||||
Requirements_Attributes_Marco: str
|
||||
Requirements_Attributes_Categoria: str
|
||||
Requirements_Attributes_DescripcionControl: str
|
||||
Requirements_Attributes_Nivel: str
|
||||
Requirements_Attributes_Tipo: str
|
||||
Requirements_Attributes_Dimensiones: str
|
||||
Requirements_Attributes_ModoEjecucion: str
|
||||
Requirements_Attributes_Dependencias: str
|
||||
Status: str
|
||||
StatusExtended: str
|
||||
ResourceId: str
|
||||
CheckId: str
|
||||
Muted: bool
|
||||
ResourceName: str
|
||||