Compare commits
460 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
42197991c2 | ||
|
|
92b95f66d7 | ||
|
|
60dc0457af | ||
|
|
9c81e78661 | ||
|
|
8e3db5593a | ||
|
|
a4e655b976 | ||
|
|
f7ef0a0a0d | ||
|
|
807f2f9f30 | ||
|
|
00232ca7ea | ||
|
|
12c7912ef5 | ||
|
|
bdd17ea678 | ||
|
|
8a808e4d5d | ||
|
|
f84928c232 | ||
|
|
9c40457050 | ||
|
|
f1f0609697 | ||
|
|
1af7f658a8 | ||
|
|
1298620da8 | ||
|
|
75c48cfaa3 | ||
|
|
3406a07ae5 | ||
|
|
cc9e1c5af8 | ||
|
|
0343f01cca | ||
|
|
cad7985c28 | ||
|
|
71030f6f42 | ||
|
|
6883467d2f | ||
|
|
2c6944176f | ||
|
|
1ef15f0b24 | ||
|
|
f5b0583df5 | ||
|
|
db225e9d2a | ||
|
|
c9ae9df87f | ||
|
|
159a090c02 | ||
|
|
605c6770e5 | ||
|
|
ae950484ed | ||
|
|
c54b815b90 | ||
|
|
7a937c7708 | ||
|
|
d62e74853e | ||
|
|
bab59bc86e | ||
|
|
39e8485fc1 | ||
|
|
b9f46cafff | ||
|
|
48377ca865 | ||
|
|
4d902e02bb | ||
|
|
e146491d4b | ||
|
|
4eed5c7a99 | ||
|
|
f169599a56 | ||
|
|
95768baa9e | ||
|
|
d8d348f609 | ||
|
|
bd336250ee | ||
|
|
a975e96a45 | ||
|
|
3933440a08 | ||
|
|
36e7bf0912 | ||
|
|
897e25dd3c | ||
|
|
f4a8059f9b | ||
|
|
71d844c101 | ||
|
|
c2b2754926 | ||
|
|
cfd4019281 | ||
|
|
989fce300d | ||
|
|
70fdc2693e | ||
|
|
9797c11152 | ||
|
|
007c1febf7 | ||
|
|
163027a49d | ||
|
|
80c4802b36 | ||
|
|
285eb45673 | ||
|
|
5c2f2ee3b3 | ||
|
|
1f83e4fe7b | ||
|
|
b29f99441a | ||
|
|
82c065bff4 | ||
|
|
168d44d14b | ||
|
|
910a72140b | ||
|
|
d988877173 | ||
|
|
4fd673fd7c | ||
|
|
1bff2451e5 | ||
|
|
0921daf18b | ||
|
|
7ff80dbb8f | ||
|
|
f487bda1fe | ||
|
|
d61e999b8f | ||
|
|
bcb63d0b2d | ||
|
|
71f50422ad | ||
|
|
2b49aa8e89 | ||
|
|
921b6b1e85 | ||
|
|
fc155e8368 | ||
|
|
79f1cf89cf | ||
|
|
496d4daf01 | ||
|
|
559c0d4e0b | ||
|
|
2fda2388bb | ||
|
|
0f79312c33 | ||
|
|
472aea6a91 | ||
|
|
0d18406f80 | ||
|
|
05da5d1796 | ||
|
|
fb449cede8 | ||
|
|
61df2ce0c2 | ||
|
|
b7e20344a8 | ||
|
|
c2552ee508 | ||
|
|
57f1fa5bfa | ||
|
|
0b238243b1 | ||
|
|
df405254c6 | ||
|
|
460acf2860 | ||
|
|
dec3e652c5 | ||
|
|
fc03188bfb | ||
|
|
ff244138d9 | ||
|
|
903f9c576f | ||
|
|
0005f86a5f | ||
|
|
a2144ad353 | ||
|
|
5f075b296d | ||
|
|
0c7b960e08 | ||
|
|
c65e91f834 | ||
|
|
5876fea163 | ||
|
|
a557d62d84 | ||
|
|
f25319f3f6 | ||
|
|
1e02b05d2d | ||
|
|
78042063cb | ||
|
|
8129b174f1 | ||
|
|
3f78fb4220 | ||
|
|
e11bb478d6 | ||
|
|
dec5fb6428 | ||
|
|
256ccfea79 | ||
|
|
1a8bc14587 | ||
|
|
8483486095 | ||
|
|
7aaecbabab | ||
|
|
5cc9554c23 | ||
|
|
5d42ae6e6f | ||
|
|
38b73fb0c0 | ||
|
|
84a76f4535 | ||
|
|
a126fd82b3 | ||
|
|
bf139138e0 | ||
|
|
0fcf4243f5 | ||
|
|
bbb0248bc1 | ||
|
|
e6581255c2 | ||
|
|
717932ae26 | ||
|
|
3f56731e6d | ||
|
|
0f837f658e | ||
|
|
b70977163e | ||
|
|
98fc624010 | ||
|
|
ccb755340f | ||
|
|
49ff901195 | ||
|
|
e7d0d49809 | ||
|
|
47bb97961c | ||
|
|
1178317567 | ||
|
|
edd0dd1080 | ||
|
|
ae1b114a13 | ||
|
|
3c9c28f351 | ||
|
|
93e6751e35 | ||
|
|
680781656b | ||
|
|
21382efd07 | ||
|
|
097e61ab9d | ||
|
|
52d83bd83b | ||
|
|
49cfe15abc | ||
|
|
0ef30c655a | ||
|
|
e2d211c188 | ||
|
|
62a1d91869 | ||
|
|
8c1347323e | ||
|
|
cb807e4aed | ||
|
|
bcc8d5f1fe | ||
|
|
59acd303fb | ||
|
|
0675cc8fdb | ||
|
|
ed27491118 | ||
|
|
abb28af68e | ||
|
|
18885d0cd7 | ||
|
|
ca56ac4e77 | ||
|
|
8f2b39b3ce | ||
|
|
761eebac1e | ||
|
|
8bdff0d681 | ||
|
|
55e0656375 | ||
|
|
e666b66ec0 | ||
|
|
cdb4f73803 | ||
|
|
b4c7345124 | ||
|
|
af8cc37eea | ||
|
|
28bed98ee4 | ||
|
|
3d39eb7db6 | ||
|
|
2c5f2e9f5c | ||
|
|
5ce54e5605 | ||
|
|
6c029a9d7d | ||
|
|
96f893c3ec | ||
|
|
f0047cf5a7 | ||
|
|
1b18aef0f0 | ||
|
|
80e13bffa2 | ||
|
|
384d16749c | ||
|
|
9c4ba1183b | ||
|
|
40a88e07d1 | ||
|
|
692ed760e0 | ||
|
|
6c3e451f32 | ||
|
|
24f511b567 | ||
|
|
89c6652bd6 | ||
|
|
8aca456285 | ||
|
|
824a465667 | ||
|
|
086c203e6b | ||
|
|
f746a9e742 | ||
|
|
90810d9098 | ||
|
|
75b3f52309 | ||
|
|
8ecb4696d4 | ||
|
|
7b22c9c97b | ||
|
|
84f0542b98 | ||
|
|
8faa40dfb6 | ||
|
|
47f7555d05 | ||
|
|
96d9cbd8af | ||
|
|
c8bc54aa48 | ||
|
|
fad0b8995a | ||
|
|
d4b6fa27e2 | ||
|
|
a37723fd32 | ||
|
|
fc5eefe532 | ||
|
|
ffd9b2a2f6 | ||
|
|
112f48ac08 | ||
|
|
95ec3d91b4 | ||
|
|
b0709d08cd | ||
|
|
a0e3cb87a4 | ||
|
|
1b9cc9e3db | ||
|
|
d9fb67bc43 | ||
|
|
a79022dce8 | ||
|
|
0a2ce690f4 | ||
|
|
bbc51114b0 | ||
|
|
32da86f393 | ||
|
|
74d02e1da6 | ||
|
|
8ec6e89e5c | ||
|
|
17012ec1a4 | ||
|
|
8461257428 | ||
|
|
26a5ffaf82 | ||
|
|
563ddb3707 | ||
|
|
2c11c3d6f9 | ||
|
|
e050f44d63 | ||
|
|
4fd3405bbf | ||
|
|
a1c2caa745 | ||
|
|
f639dc8bf4 | ||
|
|
35325d9f40 | ||
|
|
71503b553a | ||
|
|
d91a240ea8 | ||
|
|
b9b5f66073 | ||
|
|
e3f66840aa | ||
|
|
0d6c529a46 | ||
|
|
5237658047 | ||
|
|
c00f61ac10 | ||
|
|
2cd840a2b5 | ||
|
|
7e630ebe27 | ||
|
|
2f1c0facfd | ||
|
|
603bb03f35 | ||
|
|
b7af1a06e8 | ||
|
|
02fc034b1f | ||
|
|
40522cdc62 | ||
|
|
dc11d85451 | ||
|
|
13c50086eb | ||
|
|
f7729381e0 | ||
|
|
d244475578 | ||
|
|
10dcbaea7b | ||
|
|
c91bbdcf2b | ||
|
|
c7dbcb17d6 | ||
|
|
5a8a9286db | ||
|
|
2476a1275a | ||
|
|
ac680c58cd | ||
|
|
68f0916ce4 | ||
|
|
dc896fc0af | ||
|
|
76af71d2df | ||
|
|
96f761e4ef | ||
|
|
9e16e477e9 | ||
|
|
2038e30d3e | ||
|
|
a4dc6975b0 | ||
|
|
a4a89fa581 | ||
|
|
fc449bfd7b | ||
|
|
2477948ae9 | ||
|
|
ca98584ded | ||
|
|
489830f01a | ||
|
|
bd56ca2979 | ||
|
|
04483a9a4f | ||
|
|
684f63d398 | ||
|
|
b528dd44cd | ||
|
|
dfdeac0a46 | ||
|
|
b52b67fd4b | ||
|
|
5cf7d89aab | ||
|
|
f5e6b1e438 | ||
|
|
aa44bde940 | ||
|
|
ddc927a4ad | ||
|
|
fbc99259e2 | ||
|
|
28f6f0abcc | ||
|
|
0933a04239 | ||
|
|
5185f3a41e | ||
|
|
6d20b11394 | ||
|
|
a01635e9ea | ||
|
|
3bf9cd3db1 | ||
|
|
e15f0b2d0f | ||
|
|
f2de059ca1 | ||
|
|
8c8ac95d9c | ||
|
|
89159c2111 | ||
|
|
70eb59185b | ||
|
|
f97af19860 | ||
|
|
5ccd8af2a2 | ||
|
|
b53e8abc87 | ||
|
|
db4c4fdaeb | ||
|
|
44afe2db3e | ||
|
|
204d548cd0 | ||
|
|
3faf80c0d7 | ||
|
|
5078e4a823 | ||
|
|
d1b57ebd75 | ||
|
|
fdab3a737a | ||
|
|
b6f01b92dd | ||
|
|
c92537c791 | ||
|
|
3e7cc2e0a2 | ||
|
|
b8cfdb590b | ||
|
|
577afbd521 | ||
|
|
d01cc51b6d | ||
|
|
ffa60b4ccd | ||
|
|
d6dd0f7244 | ||
|
|
4df0dc4904 | ||
|
|
386a1e1d1a | ||
|
|
db9d7a4439 | ||
|
|
5725035e29 | ||
|
|
96a49e97d2 | ||
|
|
2a95750525 | ||
|
|
b868d1a7fe | ||
|
|
37ade2a722 | ||
|
|
c67032e07f | ||
|
|
0de8ef032a | ||
|
|
027aa9796d | ||
|
|
a505776227 | ||
|
|
3be9de376a | ||
|
|
bd26d74b28 | ||
|
|
ca27854ff0 | ||
|
|
abd18dc14d | ||
|
|
297f506fd3 | ||
|
|
78ca4b93a5 | ||
|
|
c80d51b585 | ||
|
|
cf9b23c302 | ||
|
|
ef4b9e8d6a | ||
|
|
a5a8c2a769 | ||
|
|
64b21ae2b9 | ||
|
|
3da4824a1d | ||
|
|
2247296cf9 | ||
|
|
615127f790 | ||
|
|
42f21a52c9 | ||
|
|
e9442b2f89 | ||
|
|
6336b1c0d9 | ||
|
|
a0603b972e | ||
|
|
f319884532 | ||
|
|
d49139c4f4 | ||
|
|
046c82232d | ||
|
|
027aafd9ea | ||
|
|
215d5dabd7 | ||
|
|
f5e2ac7486 | ||
|
|
6fc24b5435 | ||
|
|
3d99e6ea28 | ||
|
|
b23aefadc1 | ||
|
|
b585a31a14 | ||
|
|
9c817ae8a9 | ||
|
|
cd7f19c00e | ||
|
|
d1a7d19799 | ||
|
|
d7dffbc44b | ||
|
|
0402cc7e2d | ||
|
|
bf83f38c89 | ||
|
|
673619c8a1 | ||
|
|
2345a7384b | ||
|
|
e387c591c3 | ||
|
|
47a37c7d0d | ||
|
|
7b359cf1eb | ||
|
|
35d525b903 | ||
|
|
b5b193427d | ||
|
|
e6ae539323 | ||
|
|
541b907038 | ||
|
|
040e1eaa5e | ||
|
|
e23a674277 | ||
|
|
e73cefdf1a | ||
|
|
9ed4e89c60 | ||
|
|
da547b2bbe | ||
|
|
ca033745c9 | ||
|
|
fb49fb83ae | ||
|
|
76e0b23365 | ||
|
|
82ccdc45d2 | ||
|
|
de777a6417 | ||
|
|
87d8cda745 | ||
|
|
64abd0a6d0 | ||
|
|
096d7c6304 | ||
|
|
4908e06544 | ||
|
|
d42cc66d9f | ||
|
|
7a5318b936 | ||
|
|
ffb494f9a4 | ||
|
|
f515b2b53b | ||
|
|
a3cf7665ac | ||
|
|
dbaf72958e | ||
|
|
169d1686d2 | ||
|
|
ba726b205d | ||
|
|
630d980861 | ||
|
|
7d81040eae | ||
|
|
4009d96f8a | ||
|
|
cee5064b11 | ||
|
|
e5c911abef | ||
|
|
ff5c41f363 | ||
|
|
cf84875355 | ||
|
|
fc23eccc7b | ||
|
|
c5fb11e815 | ||
|
|
fdab1edd3e | ||
|
|
ea74d82c48 | ||
|
|
093738c65f | ||
|
|
bae224c891 | ||
|
|
32cded949d | ||
|
|
6463dcdde0 | ||
|
|
0b16dab2ad | ||
|
|
825c620e6f | ||
|
|
819a5597a3 | ||
|
|
4bae3d2600 | ||
|
|
131cb82751 | ||
|
|
029caf3b10 | ||
|
|
9ee23a39b5 | ||
|
|
4837df4352 | ||
|
|
d173d58a93 | ||
|
|
af29570fe9 | ||
|
|
9253cd42dd | ||
|
|
836b4ba2cc | ||
|
|
f28c0578aa | ||
|
|
536f0df9d3 | ||
|
|
465261e1df | ||
|
|
3667370604 | ||
|
|
9ca64e7bdb | ||
|
|
95a9f1c458 | ||
|
|
9fbd627f9a | ||
|
|
7203fcf4f1 | ||
|
|
f10bb343a6 | ||
|
|
9147a45e2f | ||
|
|
5353d515b6 | ||
|
|
e8a94733bf | ||
|
|
625be45742 | ||
|
|
ecb6cb897f | ||
|
|
f07bd79442 | ||
|
|
b7c1fabae1 | ||
|
|
59d3b2f33e | ||
|
|
6c098e98e3 | ||
|
|
380011fd1e | ||
|
|
e97bf32a90 | ||
|
|
ed18ea0ec4 | ||
|
|
dc897986bc | ||
|
|
e296d6e5c1 | ||
|
|
1252e6163b | ||
|
|
8ad14c7833 | ||
|
|
61b9ecc214 | ||
|
|
f8f2c19454 | ||
|
|
922438a7a0 | ||
|
|
920f98c9ef | ||
|
|
9b1ad5dd2e | ||
|
|
d7a97b6e1d | ||
|
|
07db051d14 | ||
|
|
6fec85589d | ||
|
|
f82aa1c3e1 | ||
|
|
ee9faedbbe | ||
|
|
e5dec1251d | ||
|
|
692a39b08f | ||
|
|
60b3523def | ||
|
|
e1428bc1ff | ||
|
|
0ff8b7e02a | ||
|
|
7b84008046 | ||
|
|
30a092e2aa | ||
|
|
11a7ff2977 | ||
|
|
12ba978361 | ||
|
|
42182a2b70 | ||
|
|
26eaec3101 | ||
|
|
daf6194dee | ||
|
|
e28300a1db | ||
|
|
1a225c334f | ||
|
|
1d64ca4372 | ||
|
|
2a139e3dc7 | ||
|
|
89d1712ff1 | ||
|
|
45ea9e1e79 | ||
|
|
4b46fe9788 | ||
|
|
28b9e269b7 | ||
|
|
0a41ec4746 | ||
|
|
e6472f9bfc | ||
|
|
c033af6194 | ||
|
|
4d662dc446 |
14
.backportrc.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"repoOwner": "prowler-cloud",
|
||||
"repoName": "prowler",
|
||||
"targetPRLabels": [
|
||||
"backport"
|
||||
],
|
||||
"sourcePRLabels": [
|
||||
"was-backported"
|
||||
],
|
||||
"copySourcePRLabels": false,
|
||||
"copySourcePRReviewers": true,
|
||||
"prTitle": "{{sourcePullRequest.title}}",
|
||||
"commitConflicts": true
|
||||
}
|
||||
6
.github/CODEOWNERS
vendored
@@ -1 +1,5 @@
|
||||
* @prowler-cloud/prowler-oss @prowler-cloud/prowler-dev
|
||||
* @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
|
||||
# To protect a repository fully against unauthorized changes, you also need to define an owner for the CODEOWNERS file itself.
|
||||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#codeowners-and-branch-protection
|
||||
/.github/ @prowler-cloud/sdk
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,6 +1,5 @@
|
||||
name: 🐞 Bug Report
|
||||
description: Create a report to help us improve
|
||||
title: "[Bug]: "
|
||||
labels: ["bug", "status/needs-triage"]
|
||||
|
||||
body:
|
||||
|
||||
3
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
@@ -1,8 +1,7 @@
|
||||
name: 💡 Feature Request
|
||||
name: 💡 Feature Request
|
||||
description: Suggest an idea for this project
|
||||
labels: ["feature-request", "status/needs-triage"]
|
||||
|
||||
|
||||
body:
|
||||
- type: textarea
|
||||
id: Problem
|
||||
|
||||
8
.github/dependabot.yml
vendored
@@ -8,7 +8,7 @@ updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
@@ -17,14 +17,14 @@ updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
@@ -34,7 +34,7 @@ updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
|
||||
50
.github/labeler.yml
vendored
@@ -29,3 +29,53 @@ github_actions:
|
||||
cli:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "cli/**"
|
||||
|
||||
mutelist:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/aws/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/azure/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/gcp/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/kubernetes/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/aws/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/azure/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/gcp/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/kubernetes/lib/mutelist/**"
|
||||
|
||||
integration/s3:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/aws/lib/s3/**"
|
||||
- any-glob-to-any-file: "tests/providers/aws/lib/s3/**"
|
||||
|
||||
integration/slack:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/slack/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/slack/**"
|
||||
|
||||
integration/security-hub:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/aws/lib/security_hub/**"
|
||||
- any-glob-to-any-file: "tests/providers/aws/lib/security_hub/**"
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/asff/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/asff/**"
|
||||
|
||||
output/html:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/html/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/html/**"
|
||||
|
||||
output/asff:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/asff/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/asff/**"
|
||||
|
||||
output/ocsf:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/ocsf/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/ocsf/**"
|
||||
|
||||
output/csv:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/csv/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/csv/**"
|
||||
|
||||
8
.github/pull_request_template.md
vendored
@@ -2,11 +2,19 @@
|
||||
|
||||
Please include relevant motivation and context for this PR.
|
||||
|
||||
If fixes an issue please add it with `Fix #XXXX`
|
||||
|
||||
### Description
|
||||
|
||||
Please include a summary of the change and which issue is fixed. List any dependencies that are required for this change.
|
||||
|
||||
### Checklist
|
||||
|
||||
- Are there new checks included in this PR? Yes / No
|
||||
- If so, do we need to update permissions for the provider? Please review this carefully.
|
||||
- [ ] Review if the code is being covered by tests.
|
||||
- [ ] Review if code is being documented following this specification https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings
|
||||
- [ ] Review if backport is needed.
|
||||
|
||||
### License
|
||||
|
||||
|
||||
42
.github/workflows/backport.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
name: Automatic Backport
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: ['master']
|
||||
types: ['labeled', 'closed']
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport PR
|
||||
if: github.event.pull_request.merged == true && !(contains(github.event.pull_request.labels.*.name, 'backport'))
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
# Workaround not to fail the workflow if the PR does not need a backport
|
||||
# https://github.com/sorenlouv/backport-github-action/issues/127#issuecomment-2258561266
|
||||
- name: Check for backport labels
|
||||
id: check_labels
|
||||
run: |-
|
||||
labels='${{ toJSON(github.event.pull_request.labels.*.name) }}'
|
||||
echo "$labels"
|
||||
matched=$(echo "${labels}" | jq '. | map(select(startswith("backport-to-"))) | length')
|
||||
echo "matched=$matched"
|
||||
echo "matched=$matched" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Backport Action
|
||||
if: fromJSON(steps.check_labels.outputs.matched) > 0
|
||||
uses: sorenlouv/backport-github-action@v9.5.1
|
||||
with:
|
||||
github_token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
auto_backport_label_prefix: backport-to-
|
||||
|
||||
- name: Info log
|
||||
if: ${{ success() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
|
||||
run: cat ~/.backport/backport.info.log
|
||||
|
||||
- name: Debug log
|
||||
if: ${{ failure() && fromJSON(steps.check_labels.outputs.matched) > 0 }}
|
||||
run: cat ~/.backport/backport.debug.log
|
||||
@@ -16,9 +16,9 @@ jobs:
|
||||
name: Documentation Link
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Leave PR comment with the SaaS Documentation URI
|
||||
- name: Leave PR comment with the Prowler Documentation URI
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
issue-number: ${{ env.PR_NUMBER }}
|
||||
body: |
|
||||
You can check the documentation for this PR here -> [SaaS Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)
|
||||
You can check the documentation for this PR here -> [Prowler Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/)
|
||||
|
||||
21
.github/workflows/build-lint-push-containers.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
prowler_version_major: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION_MAJOR }}
|
||||
prowler_version: ${{ steps.update-prowler-version.outputs.PROWLER_VERSION }}
|
||||
prowler_version: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION }}
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
|
||||
@@ -65,6 +65,8 @@ jobs:
|
||||
id: get-prowler-version
|
||||
run: |
|
||||
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
# Store prowler version major just for the release
|
||||
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
|
||||
@@ -89,15 +91,6 @@ jobs:
|
||||
;;
|
||||
esac
|
||||
|
||||
- name: Update Prowler version (release)
|
||||
id: update-prowler-version
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
PROWLER_VERSION="${{ github.event.release.tag_name }}"
|
||||
poetry version "${PROWLER_VERSION}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -118,7 +111,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
@@ -130,7 +123,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
@@ -160,7 +153,7 @@ jobs:
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"v3-latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
|
||||
|
||||
@@ -169,6 +162,6 @@ jobs:
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ needs.container-build-push.outputs.prowler_version }}"}}'
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
@@ -13,10 +13,10 @@ name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master", "v3" ]
|
||||
branches: [ "master", "v3", "v4.*" ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ "master", "v3" ]
|
||||
branches: [ "master", "v3", "v4.*" ]
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
|
||||
2
.github/workflows/find-secrets.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@v3.77.0
|
||||
uses: trufflesecurity/trufflehog@v3.82.6
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
|
||||
1
.github/workflows/labeler.yml
vendored
@@ -5,6 +5,7 @@ on:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
|
||||
7
.github/workflows/pull-request.yml
vendored
@@ -5,10 +5,12 @@ on:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "v3"
|
||||
- "v4.*"
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -20,7 +22,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Test if changes are in not ignored paths
|
||||
id: are-non-ignored-files-changed
|
||||
uses: tj-actions/changed-files@v44
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: ./**
|
||||
files_ignore: |
|
||||
@@ -29,6 +31,7 @@ jobs:
|
||||
docs/**
|
||||
permissions/**
|
||||
mkdocs.yml
|
||||
.backportrc.json
|
||||
- name: Install poetry
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
@@ -73,7 +76,7 @@ jobs:
|
||||
- name: Safety
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check --ignore 67599 --ignore 70612
|
||||
poetry run safety check --ignore 70612
|
||||
- name: Vulture
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
|
||||
31
.github/workflows/pypi-release.yml
vendored
@@ -8,8 +8,6 @@ env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
PYTHON_VERSION: 3.11
|
||||
CACHE: "poetry"
|
||||
# TODO: create a bot user for this kind of tasks, like prowler-bot
|
||||
GIT_COMMITTER_EMAIL: "sergio@prowler.com"
|
||||
|
||||
jobs:
|
||||
release-prowler-job:
|
||||
@@ -40,7 +38,6 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
@@ -48,34 +45,6 @@ jobs:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
cache: ${{ env.CACHE }}
|
||||
|
||||
- name: Update Poetry and config version
|
||||
run: |
|
||||
poetry version ${{ env.RELEASE_TAG }}
|
||||
|
||||
- name: Import GPG key
|
||||
uses: crazy-max/ghaction-import-gpg@v6
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
git_user_signingkey: true
|
||||
git_commit_gpgsign: true
|
||||
|
||||
- name: Push updated version to the release tag
|
||||
run: |
|
||||
# Configure Git
|
||||
git config user.name "github-actions"
|
||||
git config user.email "${{ env.GIT_COMMITTER_EMAIL }}"
|
||||
|
||||
# Add the files with the version changed
|
||||
git add prowler/config/config.py pyproject.toml
|
||||
git commit -m "chore(release): ${{ env.RELEASE_TAG }}" --no-verify -S
|
||||
|
||||
# Replace the tag with the version updated
|
||||
git tag -fa ${{ env.RELEASE_TAG }} -m "chore(release): ${{ env.RELEASE_TAG }}" --sign
|
||||
|
||||
# Push the tag
|
||||
git push -f origin ${{ env.RELEASE_TAG }}
|
||||
|
||||
- name: Build Prowler package
|
||||
run: |
|
||||
poetry build
|
||||
|
||||
@@ -50,13 +50,13 @@ jobs:
|
||||
|
||||
# Create pull request
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services."
|
||||
token: ${{ secrets.PROWLER_BOT_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services"
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-v3"
|
||||
title: "chore(regions_update): Changes in regions for AWS services."
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-to-v3"
|
||||
title: "chore(regions_update): Changes in regions for AWS services"
|
||||
body: |
|
||||
### Description
|
||||
|
||||
|
||||
@@ -97,7 +97,7 @@ repos:
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'safety check --ignore 67599 --ignore 70612'
|
||||
entry: bash -c 'safety check --ignore 70612'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
|
||||
11
Dockerfile
@@ -2,9 +2,9 @@ FROM python:3.12-alpine
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
|
||||
# Update system dependencies
|
||||
# Update system dependencies and install essential tools
|
||||
#hadolint ignore=DL3018
|
||||
RUN apk --no-cache upgrade && apk --no-cache add curl
|
||||
RUN apk --no-cache upgrade && apk --no-cache add curl git
|
||||
|
||||
# Create nonroot user
|
||||
RUN mkdir -p /home/prowler && \
|
||||
@@ -13,18 +13,17 @@ RUN mkdir -p /home/prowler && \
|
||||
chown -R prowler:prowler /home/prowler
|
||||
USER prowler
|
||||
|
||||
# Copy necessary files
|
||||
# Copy necessary files
|
||||
WORKDIR /home/prowler
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY dashboard/ /home/prowler/dashboard/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler
|
||||
|
||||
# Install dependencies
|
||||
# Install Python dependencies
|
||||
ENV HOME='/home/prowler'
|
||||
ENV PATH="$HOME/.local/bin:$PATH"
|
||||
#hadolint ignore=DL3013
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
RUN pip install --no-cache-dir --upgrade pip setuptools wheel && \
|
||||
pip install --no-cache-dir .
|
||||
|
||||
# Remove deprecated dash dependencies
|
||||
|
||||
18
README.md
@@ -12,7 +12,7 @@
|
||||
<p align="center">
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/38561120/3c8b4ec5-6849-41a5-b5e1-52bbb94af73a"></a>
|
||||
<br>
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog">Join our Prowler community!</a>
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-2oinmgmw6-cl7gOrljSEqo_aoripVPFA">Join our Prowler community!</a>
|
||||
</p>
|
||||
<hr>
|
||||
<p align="center">
|
||||
@@ -37,6 +37,9 @@
|
||||
<a href="https://twitter.com/prowlercloud"><img alt="Twitter" src="https://img.shields.io/twitter/follow/prowlercloud?style=social"></a>
|
||||
</p>
|
||||
<hr>
|
||||
<p align="center">
|
||||
<img align="center" src="/docs/img/prowler-cli-quick.gif" width="100%" height="100%">
|
||||
</p>
|
||||
|
||||
# Description
|
||||
|
||||
@@ -60,9 +63,9 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 359 | 66 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 7 -> `prowler aws --list-categories` |
|
||||
| GCP | 77 | 13 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 127 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| AWS | 457 | 67 -> `prowler aws --list-services` | 30 -> `prowler aws --list-compliance` | 9 -> `prowler aws --list-categories` |
|
||||
| GCP | 77 | 13 -> `prowler gcp --list-services` | 2 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 136 | 17 -> `prowler azure --list-services` | 3 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
|
||||
|
||||
# 💻 Installation
|
||||
@@ -74,7 +77,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```
|
||||
More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
|
||||
>More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
|
||||
|
||||
## Containers
|
||||
|
||||
@@ -91,7 +94,7 @@ The container images are available here:
|
||||
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
|
||||
|
||||
## From Github
|
||||
## From GitHub
|
||||
|
||||
Python >= 3.9, < 3.13 is required with pip and poetry:
|
||||
|
||||
@@ -102,8 +105,7 @@ poetry shell
|
||||
poetry install
|
||||
python prowler.py -v
|
||||
```
|
||||
???+ note
|
||||
If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
|
||||
> If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
|
||||
# 📐✏️ High level architecture
|
||||
|
||||
You can run Prowler from your workstation, a Kubernetes Job, a Google Compute Engine, an Azure VM, an EC2 instance, Fargate or any other container, CloudShell and many more.
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
# CLI
|
||||
To show the banner, use:
|
||||
`python cli/cli.py banner`
|
||||
## Listing
|
||||
List services by provider.
|
||||
`python cli/cli.py <provider> list-services`
|
||||
63
cli/cli.py
@@ -1,63 +0,0 @@
|
||||
import typer
|
||||
|
||||
from prowler.lib.banner import print_banner
|
||||
from prowler.lib.check.check import (
|
||||
list_fixers,
|
||||
list_services,
|
||||
print_fixers,
|
||||
print_services,
|
||||
)
|
||||
|
||||
app = typer.Typer()
|
||||
aws = typer.Typer(name="aws")
|
||||
azure = typer.Typer(name="azure")
|
||||
gcp = typer.Typer(name="gcp")
|
||||
kubernetes = typer.Typer(name="kubernetes")
|
||||
|
||||
app.add_typer(aws, name="aws")
|
||||
app.add_typer(azure, name="azure")
|
||||
app.add_typer(gcp, name="gcp")
|
||||
app.add_typer(kubernetes, name="kubernetes")
|
||||
|
||||
|
||||
def list_resources(provider: str, resource_type: str):
|
||||
if resource_type == "services":
|
||||
print_services(list_services(provider))
|
||||
elif resource_type == "fixers":
|
||||
print_fixers(list_fixers(provider))
|
||||
|
||||
|
||||
def create_list_commands(provider_typer: typer.Typer):
|
||||
provider_name = provider_typer.info.name
|
||||
|
||||
@provider_typer.command(
|
||||
"list-services",
|
||||
help=f"List the {provider_name} services that are supported by Prowler.",
|
||||
)
|
||||
def list_services_command():
|
||||
list_resources(provider_name, "services")
|
||||
|
||||
@provider_typer.command(
|
||||
"list-fixers",
|
||||
help=f"List the {provider_name} fixers that are supported by Prowler.",
|
||||
)
|
||||
def list_fixers_command():
|
||||
list_resources(provider_name, "fixers")
|
||||
|
||||
|
||||
create_list_commands(aws)
|
||||
create_list_commands(azure)
|
||||
create_list_commands(gcp)
|
||||
create_list_commands(kubernetes)
|
||||
|
||||
|
||||
@app.command("banner", help="Prints the banner of the tool.")
|
||||
def banner(show: bool = True):
|
||||
if show:
|
||||
print_banner(show)
|
||||
else:
|
||||
print("Banner is not shown.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app()
|
||||
23
contrib/k8s/helm/.helmignore
Normal file
@@ -0,0 +1,23 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*.orig
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
24
contrib/k8s/helm/Chart.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
apiVersion: v2
|
||||
name: prowler
|
||||
description: Prowler Security Tool Helm chart for Kubernetes
|
||||
|
||||
# A chart can be either an 'application' or a 'library' chart.
|
||||
#
|
||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
||||
# to be deployed.
|
||||
#
|
||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
||||
type: application
|
||||
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.1
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
appVersion: "1.16.0"
|
||||
78
contrib/k8s/helm/README.md
Normal file
@@ -0,0 +1,78 @@
|
||||
# prowler
|
||||
|
||||
  
|
||||
|
||||
Prowler Security Tool Helm chart for Kubernetes
|
||||
|
||||
# Prowler Helm Chart Deployment
|
||||
|
||||
This guide provides step-by-step instructions for deploying the Prowler Helm chart.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before you begin, ensure you have the following:
|
||||
|
||||
1. A running Kubernetes cluster.
|
||||
2. Helm installed on your local machine. If you don't have Helm installed, you can follow the [Helm installation guide](https://helm.sh/docs/intro/install/).
|
||||
3. Proper access to your Kubernetes cluster (e.g., `kubectl` is configured and working).
|
||||
|
||||
## Deployment Steps
|
||||
|
||||
### 1. Clone the Repository
|
||||
|
||||
Clone the repository containing the Helm chart to your local machine.
|
||||
|
||||
```sh
|
||||
git clone git@github.com:prowler-cloud/prowler.git
|
||||
cd prowler/contrib/k8s/helm
|
||||
```
|
||||
|
||||
### 2. Deploy the helm chart
|
||||
|
||||
```
|
||||
helm install prowler .
|
||||
```
|
||||
|
||||
### 3. Verify the deployment
|
||||
|
||||
```
|
||||
helm status prowler
|
||||
kubectl get all -n prowler-ns
|
||||
```
|
||||
|
||||
### 4. Clean Up
|
||||
To uninstall the Helm release and clean up the resources, run:
|
||||
|
||||
```helm uninstall prowler
|
||||
kubectl delete namespace prowler-ns
|
||||
```
|
||||
|
||||
## Values
|
||||
|
||||
| Key | Type | Default | Description |
|
||||
|-----|------|---------|-------------|
|
||||
| clusterRole.name | string | `"prowler-read-cluster"` | |
|
||||
| clusterRoleBinding.name | string | `"prowler-read-cluster-binding"` | |
|
||||
| configMap.name | string | `"prowler-hostpaths"` | |
|
||||
| configMapData.etcCniNetd | string | `"/etc/cni/net.d"` | |
|
||||
| configMapData.etcKubernetes | string | `"/etc/kubernetes"` | |
|
||||
| configMapData.etcSystemd | string | `"/etc/systemd"` | |
|
||||
| configMapData.libSystemd | string | `"/lib/systemd"` | |
|
||||
| configMapData.optCniBin | string | `"/opt/cni/bin"` | |
|
||||
| configMapData.usrBin | string | `"/usr/bin"` | |
|
||||
| configMapData.varLibCni | string | `"/var/lib/cni"` | |
|
||||
| configMapData.varLibEtcd | string | `"/var/lib/etcd"` | |
|
||||
| configMapData.varLibKubeControllerManager | string | `"/var/lib/kube-controller-manager"` | |
|
||||
| configMapData.varLibKubeScheduler | string | `"/var/lib/kube-scheduler"` | |
|
||||
| configMapData.varLibKubelet | string | `"/var/lib/kubelet"` | |
|
||||
| cronjob.hostPID | bool | `true` | |
|
||||
| cronjob.name | string | `"prowler"` | |
|
||||
| cronjob.schedule | string | `"0 0 * * *"` | |
|
||||
| image.pullPolicy | string | `"Always"` | |
|
||||
| image.repository | string | `"toniblyx/prowler"` | |
|
||||
| image.tag | string | `"stable"` | |
|
||||
| namespace.name | string | `"prowler"` | |
|
||||
| serviceAccount.name | string | `"prowler"` | |
|
||||
|
||||
----------------------------------------------
|
||||
Autogenerated from chart metadata using [helm-docs v1.11.3](https://github.com/norwoodj/helm-docs/releases/v1.11.3)
|
||||
11
contrib/k8s/helm/templates/cluster-role.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
name: {{ .Values.clusterRole.name }}
|
||||
rules:
|
||||
- apiGroups: [""]
|
||||
resources: ["pods", "configmaps", "nodes", "namespaces"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
- apiGroups: ["rbac.authorization.k8s.io"]
|
||||
resources: ["clusterrolebindings", "rolebindings", "clusterroles", "roles"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
18
contrib/k8s/helm/templates/cm.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ .Values.configMap.name }}
|
||||
namespace: {{ .Values.namespace.name }}
|
||||
data:
|
||||
varLibCni: "{{ .Values.configMap.data.varLibCni }}"
|
||||
varLibEtcd: "{{ .Values.configMap.data.varLibEtcd }}"
|
||||
varLibKubelet: "{{ .Values.configMap.data.varLibKubelet }}"
|
||||
varLibKubeScheduler: "{{ .Values.configMap.data.varLibKubeScheduler }}"
|
||||
varLibKubeControllerManager: "{{ .Values.configMap.data.varLibKubeControllerManager }}"
|
||||
etcSystemd: "{{ .Values.configMap.data.etcSystemd }}"
|
||||
libSystemd: "{{ .Values.configMap.data.libSystemd }}"
|
||||
etcKubernetes: "{{ .Values.configMap.data.etcKubernetes }}"
|
||||
usrBin: "{{ .Values.configMap.data.usrBin }}"
|
||||
etcCniNetd: "{{ .Values.configMap.data.etcCniNetd }}"
|
||||
optCniBin: "{{ .Values.configMap.data.optCniBin }}"
|
||||
srvKubernetes: "{{ .Values.configMap.data.srvKubernetes }}"
|
||||
42
contrib/k8s/helm/templates/job.yaml
Normal file
@@ -0,0 +1,42 @@
|
||||
apiVersion: batch/v1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: {{ .Values.cronjob.name }}
|
||||
namespace: {{ .Values.namespace.name }}
|
||||
spec:
|
||||
schedule: "{{ .Values.cronjob.schedule }}"
|
||||
jobTemplate:
|
||||
spec:
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: prowler
|
||||
spec:
|
||||
serviceAccountName: {{ .Values.serviceAccount.name }}
|
||||
containers:
|
||||
- name: prowler
|
||||
image: {{ .Values.image.repository }}:{{ .Values.image.tag }}
|
||||
command: ["prowler"]
|
||||
args: ["kubernetes", "-z", "-b"]
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
volumeMounts:
|
||||
{{- range $key, $value := .Values.configMap.data }}
|
||||
{{- if and (eq $.Values.clusterType "gke") (eq $key "srvKubernetes") }}
|
||||
{{- else }}
|
||||
- name: {{ $key | lower }}
|
||||
mountPath: {{ $value }}
|
||||
readOnly: true
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
hostPID: {{ .Values.cronjob.hostPID }}
|
||||
restartPolicy: Never
|
||||
volumes:
|
||||
{{- range $key, $value := .Values.configMap.data }}
|
||||
{{- if and (eq $.Values.clusterType "gke") (eq $key "srvKubernetes") }}
|
||||
{{- else }}
|
||||
- name: {{ $key | lower }}
|
||||
hostPath:
|
||||
path: {{ $value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
4
contrib/k8s/helm/templates/namespace.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
apiVersion: v1
|
||||
kind: Namespace
|
||||
metadata:
|
||||
name: {{ .Values.namespace.name }}
|
||||
12
contrib/k8s/helm/templates/role-binding.yaml
Normal file
@@ -0,0 +1,12 @@
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: {{ .Values.clusterRoleBinding.name }}
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: {{ .Values.clusterRole.name }}
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: {{ .Values.serviceAccount.name }}
|
||||
namespace: {{ .Values.namespace.name }}
|
||||
5
contrib/k8s/helm/templates/sa.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: {{ .Values.serviceAccount.name }}
|
||||
namespace: {{ .Values.namespace.name }}
|
||||
40
contrib/k8s/helm/values.yaml
Normal file
@@ -0,0 +1,40 @@
|
||||
namespace:
|
||||
name: prowler-ns
|
||||
|
||||
cronjob:
|
||||
name: prowler
|
||||
schedule: "0 0 * * *"
|
||||
hostPID: true
|
||||
|
||||
serviceAccount:
|
||||
name: prowler-sa
|
||||
|
||||
image:
|
||||
repository: toniblyx/prowler
|
||||
tag: stable
|
||||
pullPolicy: Always
|
||||
|
||||
clusterType:
|
||||
|
||||
configMap:
|
||||
name: prowler-config
|
||||
data:
|
||||
varLibCni: "/var/lib/cni"
|
||||
varLibEtcd: "/var/lib/etcd"
|
||||
varLibKubelet: "/var/lib/kubelet"
|
||||
varLibKubeScheduler: "/var/lib/kube-scheduler"
|
||||
varLibKubeControllerManager: "/var/lib/kube-controller-manager"
|
||||
etcSystemd: "/etc/systemd"
|
||||
libSystemd: "/lib/systemd"
|
||||
etcKubernetes: "/etc/kubernetes"
|
||||
usrBin: "/usr/bin"
|
||||
etcCniNetd: "/etc/cni/net.d"
|
||||
optCniBin: "/opt/cni/bin"
|
||||
srvKubernetes: "/srv/kubernetes"
|
||||
|
||||
clusterRole:
|
||||
name: prowler-read-cluster
|
||||
|
||||
clusterRoleBinding:
|
||||
name: prowler-read-cluster-binding
|
||||
roleName: prowler-read-cluster
|
||||
@@ -21,7 +21,7 @@ print(
|
||||
f"{Fore.GREEN}Loading all CSV files from the folder {folder_path_overview} ...\n{Style.RESET_ALL}"
|
||||
)
|
||||
cli.show_server_banner = lambda *x: click.echo(
|
||||
f"{Fore.YELLOW}NOTE:{Style.RESET_ALL} If you are a {Fore.GREEN}{Style.BRIGHT}Prowler SaaS{Style.RESET_ALL} customer and you want to use your data from your S3 bucket,\nrun: `{orange_color}aws s3 cp s3://<your-bucket>/output/csv ./output --recursive{Style.RESET_ALL}`\nand then run `prowler dashboard` again to load the new files."
|
||||
f"{Fore.YELLOW}NOTE:{Style.RESET_ALL} If you are using {Fore.GREEN}{Style.BRIGHT}Prowler SaaS{Style.RESET_ALL} with the S3 integration or that integration \nfrom {Fore.CYAN}{Style.BRIGHT}Prowler Open Source{Style.RESET_ALL} and you want to use your data from your S3 bucket,\nrun: `{orange_color}aws s3 cp s3://<your-bucket>/output/csv ./output --recursive{Style.RESET_ALL}`\nand then run `prowler dashboard` again to load the new files."
|
||||
)
|
||||
|
||||
# Initialize the app - incorporate css
|
||||
|
||||
@@ -2223,3 +2223,232 @@ def get_section_containers_ens(data, section_1, section_2, section_3, section_4)
|
||||
section_containers.append(section_container)
|
||||
|
||||
return html.Div(section_containers, className="compliance-data-layout")
|
||||
|
||||
|
||||
# This function extracts and compares up to two numeric values, ensuring correct sorting for version-like strings.
|
||||
def extract_numeric_values(value):
|
||||
numbers = re.findall(r"\d+", str(value))
|
||||
if len(numbers) >= 2:
|
||||
return int(numbers[0]), int(numbers[1])
|
||||
elif len(numbers) == 1:
|
||||
return int(numbers[0]), 0
|
||||
return 0, 0
|
||||
|
||||
|
||||
def get_section_containers_kisa_ismsp(data, section_1, section_2):
|
||||
data["STATUS"] = data["STATUS"].apply(map_status_to_icon)
|
||||
data[section_1] = data[section_1].astype(str)
|
||||
data[section_2] = data[section_2].astype(str)
|
||||
data.sort_values(
|
||||
by=section_1,
|
||||
key=lambda x: x.map(extract_numeric_values),
|
||||
ascending=True,
|
||||
inplace=True,
|
||||
)
|
||||
|
||||
findings_counts_section = (
|
||||
data.groupby([section_2, "STATUS"]).size().unstack(fill_value=0)
|
||||
)
|
||||
findings_counts_name = (
|
||||
data.groupby([section_1, "STATUS"]).size().unstack(fill_value=0)
|
||||
)
|
||||
|
||||
section_containers = []
|
||||
|
||||
for name in data[section_1].unique():
|
||||
success_name = (
|
||||
findings_counts_name.loc[name, pass_emoji]
|
||||
if pass_emoji in findings_counts_name.columns
|
||||
else 0
|
||||
)
|
||||
failed_name = (
|
||||
findings_counts_name.loc[name, fail_emoji]
|
||||
if fail_emoji in findings_counts_name.columns
|
||||
else 0
|
||||
)
|
||||
|
||||
fig_name = go.Figure(
|
||||
data=[
|
||||
go.Bar(
|
||||
name="Failed",
|
||||
x=[failed_name],
|
||||
y=[""],
|
||||
orientation="h",
|
||||
marker=dict(color="#e77676"),
|
||||
width=[0.8],
|
||||
),
|
||||
go.Bar(
|
||||
name="Success",
|
||||
x=[success_name],
|
||||
y=[""],
|
||||
orientation="h",
|
||||
marker=dict(color="#45cc6e"),
|
||||
width=[0.8],
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
fig_name.update_layout(
|
||||
barmode="stack",
|
||||
margin=dict(l=10, r=10, t=10, b=10),
|
||||
paper_bgcolor="rgba(0,0,0,0)",
|
||||
plot_bgcolor="rgba(0,0,0,0)",
|
||||
showlegend=False,
|
||||
width=350,
|
||||
height=30,
|
||||
xaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
|
||||
yaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
|
||||
annotations=[
|
||||
dict(
|
||||
x=success_name + failed_name,
|
||||
y=0,
|
||||
xref="x",
|
||||
yref="y",
|
||||
text=str(success_name),
|
||||
showarrow=False,
|
||||
font=dict(color="#45cc6e", size=14),
|
||||
xanchor="left",
|
||||
yanchor="middle",
|
||||
),
|
||||
dict(
|
||||
x=0,
|
||||
y=0,
|
||||
xref="x",
|
||||
yref="y",
|
||||
text=str(failed_name),
|
||||
showarrow=False,
|
||||
font=dict(color="#e77676", size=14),
|
||||
xanchor="right",
|
||||
yanchor="middle",
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
graph_name = dcc.Graph(
|
||||
figure=fig_name, config={"staticPlot": True}, className="info-bar"
|
||||
)
|
||||
|
||||
graph_div = html.Div(graph_name, className="graph-section")
|
||||
|
||||
direct_internal_items = []
|
||||
|
||||
for section in data[data[section_1] == name][section_2].unique():
|
||||
specific_data = data[
|
||||
(data[section_1] == name) & (data[section_2] == section)
|
||||
]
|
||||
success_section = (
|
||||
findings_counts_section.loc[section, pass_emoji]
|
||||
if pass_emoji in findings_counts_section.columns
|
||||
else 0
|
||||
)
|
||||
failed_section = (
|
||||
findings_counts_section.loc[section, fail_emoji]
|
||||
if fail_emoji in findings_counts_section.columns
|
||||
else 0
|
||||
)
|
||||
|
||||
data_table = dash_table.DataTable(
|
||||
data=specific_data.to_dict("records"),
|
||||
columns=[
|
||||
{"name": i, "id": i}
|
||||
for i in ["CHECKID", "STATUS", "REGION", "ACCOUNTID", "RESOURCEID"]
|
||||
],
|
||||
style_table={"overflowX": "auto"},
|
||||
style_as_list_view=True,
|
||||
style_cell={"textAlign": "left", "padding": "5px"},
|
||||
)
|
||||
|
||||
fig_section = go.Figure(
|
||||
data=[
|
||||
go.Bar(
|
||||
name="Failed",
|
||||
x=[failed_section],
|
||||
y=[""],
|
||||
orientation="h",
|
||||
marker=dict(color="#e77676"),
|
||||
),
|
||||
go.Bar(
|
||||
name="Success",
|
||||
x=[success_section],
|
||||
y=[""],
|
||||
orientation="h",
|
||||
marker=dict(color="#45cc6e"),
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
fig_section.update_layout(
|
||||
barmode="stack",
|
||||
margin=dict(l=10, r=10, t=10, b=10),
|
||||
paper_bgcolor="rgba(0,0,0,0)",
|
||||
plot_bgcolor="rgba(0,0,0,0)",
|
||||
showlegend=False,
|
||||
width=350,
|
||||
height=30,
|
||||
xaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
|
||||
yaxis=dict(showticklabels=False, showgrid=False, zeroline=False),
|
||||
annotations=[
|
||||
dict(
|
||||
x=success_section + failed_section,
|
||||
y=0,
|
||||
xref="x",
|
||||
yref="y",
|
||||
text=str(success_section),
|
||||
showarrow=False,
|
||||
font=dict(color="#45cc6e", size=14),
|
||||
xanchor="left",
|
||||
yanchor="middle",
|
||||
),
|
||||
dict(
|
||||
x=0,
|
||||
y=0,
|
||||
xref="x",
|
||||
yref="y",
|
||||
text=str(failed_section),
|
||||
showarrow=False,
|
||||
font=dict(color="#e77676", size=14),
|
||||
xanchor="right",
|
||||
yanchor="middle",
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
graph_section = dcc.Graph(
|
||||
figure=fig_section,
|
||||
config={"staticPlot": True},
|
||||
className="info-bar-child",
|
||||
)
|
||||
|
||||
graph_div_section = html.Div(graph_section, className="graph-section-req")
|
||||
|
||||
internal_accordion_item = dbc.AccordionItem(
|
||||
title=section,
|
||||
children=[html.Div([data_table], className="inner-accordion-content")],
|
||||
)
|
||||
|
||||
internal_section_container = html.Div(
|
||||
[
|
||||
graph_div_section,
|
||||
dbc.Accordion(
|
||||
[internal_accordion_item], start_collapsed=True, flush=True
|
||||
),
|
||||
],
|
||||
className="accordion-inner--child",
|
||||
)
|
||||
|
||||
direct_internal_items.append(internal_section_container)
|
||||
|
||||
accordion_item = dbc.AccordionItem(
|
||||
title=f"{name}", children=direct_internal_items
|
||||
)
|
||||
section_container = html.Div(
|
||||
[
|
||||
graph_div,
|
||||
dbc.Accordion([accordion_item], start_collapsed=True, flush=True),
|
||||
],
|
||||
className="accordion-inner",
|
||||
)
|
||||
|
||||
section_containers.append(section_container)
|
||||
|
||||
return html.Div(section_containers, className="compliance-data-layout")
|
||||
|
||||
25
dashboard/compliance/kisa_isms_p_2023_aws.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_kisa_ismsp
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBDOMAIN",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
# "REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_kisa_ismsp(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SUBDOMAIN", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
25
dashboard/compliance/kisa_isms_p_2023_korean_aws.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_kisa_ismsp
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SUBDOMAIN",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
# "REQUIREMENTS_DESCRIPTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_kisa_ismsp(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SUBDOMAIN", "REQUIREMENTS_ATTRIBUTES_SECTION"
|
||||
)
|
||||
@@ -21,7 +21,7 @@ muted_manual_color = "#b33696"
|
||||
critical_color = "#951649"
|
||||
high_color = "#e11d48"
|
||||
medium_color = "#ee6f15"
|
||||
low_color = "#f9f5e6"
|
||||
low_color = "#fcf45d"
|
||||
informational_color = "#3274d9"
|
||||
|
||||
# Folder output path
|
||||
|
||||
@@ -945,7 +945,7 @@ def filter_data(
|
||||
color_mapping_status = {
|
||||
"FAIL": fail_color,
|
||||
"PASS": pass_color,
|
||||
"INFO": info_color,
|
||||
"LOW": info_color,
|
||||
"MANUAL": manual_color,
|
||||
"WARNING": muted_fail_color,
|
||||
"MUTED (FAIL)": muted_fail_color,
|
||||
@@ -1564,7 +1564,10 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
|
||||
data.get(
|
||||
"FINDING_UID", ""
|
||||
)
|
||||
)
|
||||
),
|
||||
style={
|
||||
"margin-left": "5px"
|
||||
},
|
||||
),
|
||||
],
|
||||
style={"display": "flex"},
|
||||
@@ -1644,28 +1647,10 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
|
||||
"STATUS_EXTENDED",
|
||||
"",
|
||||
)
|
||||
)
|
||||
),
|
||||
],
|
||||
style={"display": "flex"},
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.P(
|
||||
html.Strong(
|
||||
"Risk: ",
|
||||
style={
|
||||
"margin-right": "5px"
|
||||
},
|
||||
)
|
||||
),
|
||||
html.P(
|
||||
str(
|
||||
data.get(
|
||||
"RISK",
|
||||
"",
|
||||
)
|
||||
)
|
||||
),
|
||||
style={
|
||||
"margin-left": "5px"
|
||||
},
|
||||
),
|
||||
],
|
||||
style={"display": "flex"},
|
||||
@@ -1689,7 +1674,10 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
|
||||
)
|
||||
),
|
||||
html.P(
|
||||
str(data.get("RISK", ""))
|
||||
str(data.get("RISK", "")),
|
||||
style={
|
||||
"margin-left": "5px"
|
||||
},
|
||||
),
|
||||
],
|
||||
style={"display": "flex"},
|
||||
@@ -1744,7 +1732,10 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
|
||||
"REMEDIATION_RECOMMENDATION_TEXT",
|
||||
"",
|
||||
)
|
||||
)
|
||||
),
|
||||
style={
|
||||
"margin-left": "5px"
|
||||
},
|
||||
),
|
||||
],
|
||||
style={"display": "flex"},
|
||||
@@ -1772,7 +1763,10 @@ def generate_table(data, index, color_mapping_severity, color_mapping_status):
|
||||
"",
|
||||
)
|
||||
),
|
||||
style={"color": "#3182ce"},
|
||||
style={
|
||||
"color": "#3182ce",
|
||||
"margin-left": "5px",
|
||||
},
|
||||
),
|
||||
],
|
||||
style={"display": "flex"},
|
||||
|
||||
@@ -222,7 +222,7 @@ class ec2_securitygroup_with_many_ingress_egress_rules(Check):
|
||||
max_security_group_rules = ec2_client.audit_config.get(
|
||||
"max_security_group_rules", 50
|
||||
)
|
||||
for security_group in ec2_client.security_groups:
|
||||
for security_group_arn, security_group in ec2_client.security_groups.items():
|
||||
```
|
||||
|
||||
```yaml title="config.yaml"
|
||||
@@ -272,7 +272,7 @@ Each Prowler check has metadata associated which is stored at the same level of
|
||||
# Severity holds the check's severity, always in lowercase (critical, high, medium, low or informational)
|
||||
"Severity": "critical",
|
||||
# ResourceType only for AWS, holds the type from here
|
||||
# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-template-resource-type-ref.html
|
||||
# https://docs.aws.amazon.com/securityhub/latest/userguide/asff-resources.html
|
||||
"ResourceType": "Other",
|
||||
# Description holds the title of the check, for now is the same as CheckTitle
|
||||
"Description": "Ensure there are no EC2 AMIs set as Public.",
|
||||
@@ -319,7 +319,7 @@ Each Prowler check has metadata associated which is stored at the same level of
|
||||
For the Remediation Code we use the following knowledge base to fill it:
|
||||
|
||||
- Official documentation for the provider
|
||||
- https://docs.bridgecrew.io
|
||||
- https://docs.prowler.com/checks/checks-index
|
||||
- https://www.trendmicro.com/cloudoneconformity
|
||||
- https://github.com/cloudmatos/matos/tree/master/remediations
|
||||
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
# Debugging
|
||||
|
||||
Debugging in Prowler make things easier!
|
||||
If you are developing Prowler, it's possible that you will encounter some situations where you have to inspect the code in depth to fix some unexpected issues during the execution. To do that, if you are using VSCode you can run the code using the integrated debugger. Please, refer to this [documentation](https://code.visualstudio.com/docs/editor/debugging) for guidance about the debugger in VSCode.
|
||||
If you are developing Prowler, it's possible that you will encounter some situations where you have to inspect the code in depth to fix some unexpected issues during the execution.
|
||||
|
||||
## VSCode
|
||||
|
||||
In VSCode you can run the code using the integrated debugger. Please, refer to this [documentation](https://code.visualstudio.com/docs/editor/debugging) for guidance about the debugger in VSCode.
|
||||
The following file is an example of the [debugging configuration](https://code.visualstudio.com/docs/editor/debugging#_launch-configurations) file that you can add to [Virtual Studio Code](https://code.visualstudio.com/).
|
||||
|
||||
This file should inside the *.vscode* folder and its name has to be *launch.json*:
|
||||
@@ -11,31 +15,62 @@ This file should inside the *.vscode* folder and its name has to be *launch.json
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Current File",
|
||||
"type": "python",
|
||||
"name": "Debug AWS Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"aws",
|
||||
"-f",
|
||||
"eu-west-1",
|
||||
"--service",
|
||||
"cloudwatch",
|
||||
"--log-level",
|
||||
"ERROR",
|
||||
"-p",
|
||||
"dev",
|
||||
"-c",
|
||||
"<check_name>"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": "Python: Debug Tests",
|
||||
"type": "python",
|
||||
"name": "Debug Azure Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${file}",
|
||||
"purpose": [
|
||||
"debug-test"
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"azure",
|
||||
"--sp-env-auth",
|
||||
"--log-level",
|
||||
"ERROR",
|
||||
"-c",
|
||||
"<check_name>"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": "Debug GCP Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"gcp",
|
||||
"--log-level",
|
||||
"ERROR",
|
||||
"-c",
|
||||
"<check_name>"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": "Debug K8s Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"kubernetes",
|
||||
"--log-level",
|
||||
"ERROR",
|
||||
"-c",
|
||||
"<check_name>"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
|
||||
@@ -4,16 +4,18 @@ You can extend Prowler Open Source in many different ways, in most cases you wil
|
||||
|
||||
## Get the code and install all dependencies
|
||||
|
||||
First of all, you need a version of Python 3.9 or higher and also pip installed to be able to install all dependencies required. Once that is satisfied go a head and clone the repo:
|
||||
First of all, you need a version of Python 3.9 or higher and also `pip` installed to be able to install all dependencies required.
|
||||
|
||||
Then, to start working with the Prowler Github repository you need to fork it to be able to propose changes for new features, bug fixing, etc. To fork the Prowler repo please refer to [this guide](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo?tool=webui#forking-a-repository).
|
||||
|
||||
Once that is satisfied go ahead and clone your forked repo:
|
||||
|
||||
```
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
git clone https://github.com/<your-github-user>/prowler
|
||||
cd prowler
|
||||
```
|
||||
For isolation and avoid conflicts with other environments, we recommend usage of `poetry`:
|
||||
```
|
||||
pip install poetry
|
||||
```
|
||||
For isolation and to avoid conflicts with other environments, we recommend using `poetry`, a Python dependency management tool. You can install it by following the instructions [here](https://python-poetry.org/docs/#installation).
|
||||
|
||||
Then install all dependencies including the ones for developers:
|
||||
```
|
||||
poetry install --with dev
|
||||
@@ -44,7 +46,12 @@ Before we merge any of your pull requests we pass checks to the code, we use the
|
||||
|
||||
You can see all dependencies in file `pyproject.toml`.
|
||||
|
||||
Moreover, you would need to install [`TruffleHog`](https://github.com/trufflesecurity/trufflehog) to check for secrets in the code. You can install it using the official installation guide [here](https://github.com/trufflesecurity/trufflehog?tab=readme-ov-file#floppy_disk-installation).
|
||||
Moreover, you would need to install [`TruffleHog`](https://github.com/trufflesecurity/trufflehog) on the latest version to check for secrets in the code. You can install it using the official installation guide [here](https://github.com/trufflesecurity/trufflehog?tab=readme-ov-file#floppy_disk-installation).
|
||||
|
||||
Additionally, please ensure to follow the code documentation practices outlined in this guide: [Google Python Style Guide - Comments and Docstrings](https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings).
|
||||
|
||||
???+ note
|
||||
If you have any trouble when committing to the Prowler repository, add the `--no-verify` flag to the `git commit` command.
|
||||
|
||||
## Pull Request Checklist
|
||||
|
||||
|
||||
@@ -23,8 +23,8 @@ The Prowler's service structure is the following and the way to initialise it is
|
||||
All the Prowler provider's services inherits from a base class depending on the provider used.
|
||||
|
||||
- [AWS Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/lib/service/service.py)
|
||||
- [GCP Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/lib/service/service.py)
|
||||
- [Azure Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/gcp/lib/service/service.py)
|
||||
- [GCP Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/gcp/lib/service/service.py)
|
||||
- [Azure Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/lib/service/service.py)
|
||||
- [Kubernetes Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/kubernetes/lib/service/service.py)
|
||||
|
||||
Each class is used to initialize the credentials and the API's clients to be used in the service. If some threading is used it must be coded there.
|
||||
|
||||
@@ -592,7 +592,7 @@ is following the actual format, add one function where the client is passed to b
|
||||
`mock_api_<endpoint>_calls` (*endpoint* refers to the first attribute pointed after *client*).
|
||||
|
||||
In the example of BigQuery the function is called `mock_api_dataset_calls`. And inside of this function we found an assignation to
|
||||
be used in the `__get_datasets__` method in BigQuery class:
|
||||
be used in the `_get_datasets` method in BigQuery class:
|
||||
|
||||
```python
|
||||
# Mocking datasets
|
||||
@@ -765,7 +765,7 @@ from tests.providers.azure.azure_fixtures import (
|
||||
set_mocked_azure_provider,
|
||||
)
|
||||
|
||||
# Function to mock the service function __get_components__, this function task is to return a possible value that real function could returns
|
||||
# Function to mock the service function _get_components, this function task is to return a possible value that real function could returns
|
||||
def mock_appinsights_get_components(_):
|
||||
return {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
@@ -779,12 +779,12 @@ def mock_appinsights_get_components(_):
|
||||
|
||||
# Patch decorator to use the mocked function instead the function with the real API call
|
||||
@patch(
|
||||
"prowler.providers.azure.services.appinsights.appinsights_service.AppInsights.__get_components__",
|
||||
"prowler.providers.azure.services.appinsights.appinsights_service.AppInsights._get_components",
|
||||
new=mock_appinsights_get_components,
|
||||
)
|
||||
class Test_AppInsights_Service:
|
||||
# Mandatory test for every service, this method test the instance of the client is correct
|
||||
def test__get_client__(self):
|
||||
def test_get_client(self):
|
||||
app_insights = AppInsights(set_mocked_azure_provider())
|
||||
assert (
|
||||
app_insights.clients[AZURE_SUBSCRIPTION_ID].__class__.__name__
|
||||
@@ -794,8 +794,8 @@ class Test_AppInsights_Service:
|
||||
def test__get_subscriptions__(self):
|
||||
app_insights = AppInsights(set_mocked_azure_provider())
|
||||
assert app_insights.subscriptions.__class__.__name__ == "dict"
|
||||
# Test for the function __get_components__, inside this client is used the mocked function
|
||||
def test__get_components__(self):
|
||||
# Test for the function _get_components, inside this client is used the mocked function
|
||||
def test_get_components(self):
|
||||
appinsights = AppInsights(set_mocked_azure_provider())
|
||||
assert len(appinsights.components) == 1
|
||||
assert (
|
||||
|
||||
@@ -40,10 +40,10 @@ If your IAM entity enforces MFA you can use `--mfa` and Prowler will ask you to
|
||||
|
||||
Prowler for Azure supports the following authentication types:
|
||||
|
||||
- Service principal authentication by environment variables (Enterprise Application)
|
||||
- [Service principal application](https://learn.microsoft.com/en-us/entra/identity-platform/app-objects-and-service-principals?tabs=browser#service-principal-object) by environment variables (recommended)
|
||||
- Current az cli credentials stored
|
||||
- Interactive browser authentication
|
||||
- Managed identity authentication
|
||||
- [Managed identity](https://learn.microsoft.com/en-us/entra/identity/managed-identities-azure-resources/overview) authentication
|
||||
|
||||
### Service Principal authentication
|
||||
|
||||
@@ -56,6 +56,8 @@ export AZURE_CLIENT_SECRET="XXXXXXX"
|
||||
```
|
||||
|
||||
If you try to execute Prowler with the `--sp-env-auth` flag and those variables are empty or not exported, the execution is going to fail.
|
||||
Follow the instructions in the [Create Prowler Service Principal](../tutorials/azure/create-prowler-service-principal.md) section to create a service principal.
|
||||
|
||||
### AZ CLI / Browser / Managed Identity authentication
|
||||
|
||||
The other three cases does not need additional configuration, `--az-cli-auth` and `--managed-identity-auth` are automated options. To use `--browser-auth` the user needs to authenticate against Azure using the default browser to start the scan, also `tenant-id` is required.
|
||||
@@ -64,55 +66,22 @@ The other three cases does not need additional configuration, `--az-cli-auth` an
|
||||
|
||||
To use each one you need to pass the proper flag to the execution. Prowler for Azure handles two types of permission scopes, which are:
|
||||
|
||||
- **Microsoft Entra ID permissions**: Used to retrieve metadata from the identity assumed by Prowler (not mandatory to have access to execute the tool).
|
||||
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool.
|
||||
|
||||
|
||||
#### Microsoft Entra ID scope
|
||||
|
||||
Microsoft Entra ID (AAD earlier) permissions required by the tool are the following:
|
||||
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
- `UserAuthenticationMethod.Read.All`
|
||||
|
||||
The best way to assign it is through the Azure web console:
|
||||
|
||||
1. Access to Microsoft Entra ID
|
||||
2. In the left menu bar, go to "App registrations"
|
||||
3. Once there, in the menu bar click on "+ New registration" to register a new application
|
||||
4. Fill the "Name, select the "Supported account types" and click on "Register. You will be redirected to the applications page.
|
||||

|
||||
4. Select the new application
|
||||
5. In the left menu bar, select "API permissions"
|
||||
6. Then click on "+ Add a permission" and select "Microsoft Graph"
|
||||
7. Once in the "Microsoft Graph" view, select "Application permissions"
|
||||
8. Finally, search for "Directory", "Policy" and "UserAuthenticationMethod" select the following permissions:
|
||||
- **Microsoft Entra ID permissions**: Used to retrieve metadata from the identity assumed by Prowler and specific Entra checks (not mandatory to have access to execute the tool). The permissions required by the tool are the following:
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
- `UserAuthenticationMethod.Read.All`
|
||||

|
||||
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool. It is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
|
||||
- `Reader`
|
||||
- `ProwlerRole` (custom role defined in [prowler-azure-custom-role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json))
|
||||
|
||||
To assign the permissions, follow the instructions in the [Microsoft Entra ID permissions](../tutorials/azure/create-prowler-service-principal.md#assigning-the-proper-permissions) section and the [Azure subscriptions permissions](../tutorials/azure/subscriptions.md#assigning-proper-permissions) section, respectively.
|
||||
|
||||
#### Subscriptions scope
|
||||
#### Checks that require ProwlerRole
|
||||
|
||||
Regarding the subscription scope, Prowler by default scans all the subscriptions that is able to list, so it is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
|
||||
The following checks require the `ProwlerRole` custom role to be executed, if you want to run them, make sure you have assigned the role to the identity that is going to be assumed by Prowler:
|
||||
|
||||
- `Security Reader`
|
||||
- `Reader`
|
||||
|
||||
To assign this roles, follow the instructions:
|
||||
|
||||
1. Access your subscription, then select your subscription.
|
||||
2. Select "Access control (IAM)".
|
||||
3. In the overview, select "Roles"
|
||||

|
||||
4. Click on "+ Add" and select "Add role assignment"
|
||||
5. In the search bar, type `Security Reader`, select it and click on "Next"
|
||||
6. In the Members tab, click on "+ Select members" and add the members you want to assign this role.
|
||||
7. Click on "Review + assign" to apply the new role.
|
||||
|
||||
*Repeat these steps for `Reader` role*
|
||||
- `app_function_access_keys_configured`
|
||||
- `app_function_ftps_deployment_disabled`
|
||||
|
||||
## Google Cloud
|
||||
|
||||
|
||||
BIN
docs/img/add-reader-role.gif
Normal file
|
After Width: | Height: | Size: 1.4 MiB |
BIN
docs/img/add-sub-to-management-group.gif
Normal file
|
After Width: | Height: | Size: 357 KiB |
BIN
docs/img/create-management-group.gif
Normal file
|
After Width: | Height: | Size: 688 KiB |
|
Before Width: | Height: | Size: 214 KiB After Width: | Height: | Size: 746 KiB |
|
Before Width: | Height: | Size: 348 KiB |
BIN
docs/img/prowler-cli-quick.gif
Normal file
|
After Width: | Height: | Size: 552 KiB |
109
docs/index.md
@@ -19,14 +19,40 @@ It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, Fe
|
||||
## Quick Start
|
||||
### Installation
|
||||
|
||||
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/), thus can be installed using pip with `Python >= 3.9`:
|
||||
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/), thus can be installed as Python package with `Python >= 3.9`:
|
||||
|
||||
=== "Generic"
|
||||
=== "pipx"
|
||||
|
||||
[pipx](https://pipx.pypa.io/stable/) is a tool to install Python applications in isolated environments. It is recommended to use `pipx` for a global installation.
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* `Python >= 3.9`
|
||||
* `Python pip >= 3.9`
|
||||
* `pipx` installed: [pipx installation](https://pipx.pypa.io/stable/installation/).
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
_Commands_:
|
||||
|
||||
``` bash
|
||||
pipx install prowler
|
||||
prowler -v
|
||||
```
|
||||
|
||||
To upgrade Prowler to the latest version, run:
|
||||
|
||||
``` bash
|
||||
pipx upgrade prowler
|
||||
```
|
||||
|
||||
=== "pip"
|
||||
|
||||
???+ warning
|
||||
This method is not recommended because it will modify the environment which you choose to install. Consider using [pipx](https://docs.prowler.com/projects/prowler-open-source/en/latest/#__tabbed_1_1) for a global installation.
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* `Python >= 3.9`
|
||||
* `Python pip >= 21.0.0`
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
_Commands_:
|
||||
@@ -36,13 +62,19 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
prowler -v
|
||||
```
|
||||
|
||||
To upgrade Prowler to the latest version, run:
|
||||
|
||||
``` bash
|
||||
pip install --upgrade prowler
|
||||
```
|
||||
|
||||
=== "Docker"
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* Have `docker` installed: https://docs.docker.com/get-docker/.
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* In the command below, change `-v` to your local directory path in order to access the reports.
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
_Commands_:
|
||||
|
||||
@@ -54,41 +86,21 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
--env AWS_SESSION_TOKEN toniblyx/prowler:latest
|
||||
```
|
||||
|
||||
=== "Ubuntu"
|
||||
|
||||
_Requirements for Ubuntu 20.04.3 LTS_:
|
||||
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* Install python 3.9 with: `sudo apt-get install python3.9`
|
||||
* Remove python 3.8 to avoid conflicts if you can: `sudo apt-get remove python3.8`
|
||||
* Make sure you have the python3 distutils package installed: `sudo apt-get install python3-distutils`
|
||||
* To make sure you use pip for 3.9 get the get-pip script with: `curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py`
|
||||
* Execute it with the proper python version: `sudo python3.9 get-pip.py`
|
||||
* Now you should have pip for 3.9 ready: `pip3.9 --version`
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
pip3.9 install prowler
|
||||
export PATH=$PATH:/home/$HOME/.local/bin/
|
||||
prowler -v
|
||||
```
|
||||
|
||||
=== "GitHub"
|
||||
|
||||
_Requirements for Developers_:
|
||||
|
||||
* `git`
|
||||
* `poetry` installed: [poetry installation](https://python-poetry.org/docs/#installation).
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* `git`, `Python >= 3.9`, `pip` and `poetry` installed (`pip install poetry`)
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
cd prowler
|
||||
poetry shell
|
||||
poetry install
|
||||
python prowler.py -v
|
||||
poetry run python prowler.py -v
|
||||
```
|
||||
???+ note
|
||||
If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
|
||||
@@ -97,15 +109,33 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* `Python >= 3.9`
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
* Latest Amazon Linux 2 should come with Python 3.9 already installed however it may need pip. Install Python pip 3.9 with: `sudo yum install -y python3-pip`.
|
||||
* Make sure setuptools for python is already installed with: `pip3 install setuptools`
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
pip3.9 install prowler
|
||||
export PATH=$PATH:/home/$HOME/.local/bin/
|
||||
python3 -m pip install --user pipx
|
||||
python3 -m pipx ensurepath
|
||||
pipx install prowler
|
||||
prowler -v
|
||||
```
|
||||
|
||||
=== "Ubuntu"
|
||||
|
||||
_Requirements_:
|
||||
|
||||
* `Ubuntu 23.04` or above, if you are using an older version of Ubuntu check [pipx installation](https://docs.prowler.com/projects/prowler-open-source/en/latest/#__tabbed_1_1) and ensure you have `Python >= 3.9`.
|
||||
* `Python >= 3.9`
|
||||
* AWS, GCP, Azure and/or Kubernetes credentials
|
||||
|
||||
_Commands_:
|
||||
|
||||
``` bash
|
||||
sudo apt update
|
||||
sudo apt install pipx
|
||||
pipx ensurepath
|
||||
pipx install prowler
|
||||
prowler -v
|
||||
```
|
||||
|
||||
@@ -125,7 +155,7 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
=== "AWS CloudShell"
|
||||
|
||||
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [2](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v4 in AWS CloudShell:
|
||||
After the migration of AWS CloudShell from Amazon Linux 2 to Amazon Linux 2023 [[1]](https://aws.amazon.com/about-aws/whats-new/2023/12/aws-cloudshell-migrated-al2023/) [[2]](https://docs.aws.amazon.com/cloudshell/latest/userguide/cloudshell-AL2023-migration.html), there is no longer a need to manually compile Python 3.9 as it's already included in AL2023. Prowler can thus be easily installed following the Generic method of installation via pip. Follow the steps below to successfully execute Prowler v4 in AWS CloudShell:
|
||||
|
||||
_Requirements_:
|
||||
|
||||
@@ -133,11 +163,13 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
```bash
|
||||
sudo bash
|
||||
adduser prowler
|
||||
su prowler
|
||||
pip install prowler
|
||||
python3 -m pip install --user pipx
|
||||
python3 -m pipx ensurepath
|
||||
pipx install prowler
|
||||
cd /tmp
|
||||
prowler aws
|
||||
```
|
||||
@@ -153,9 +185,12 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler/),
|
||||
|
||||
_Commands_:
|
||||
|
||||
```
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```bash
|
||||
python3 -m pip install --user pipx
|
||||
python3 -m pipx ensurepath
|
||||
pipx install prowler
|
||||
cd /tmp
|
||||
prowler azure --az-cli-auth
|
||||
```
|
||||
|
||||
## Prowler container versions
|
||||
|
||||
@@ -85,7 +85,7 @@ prowler --security-hub --region eu-west-1
|
||||
```
|
||||
|
||||
???+ note
|
||||
It is recommended to send only fails to Security Hub and that is possible adding `-q/--quiet` to the command. You can use, instead of the `-q/--quiet` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub.
|
||||
It is recommended to send only fails to Security Hub and that is possible adding `--status FAIL` to the command. You can use, instead of the `--status FAIL` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub.
|
||||
|
||||
Since Prowler perform checks to all regions by default you may need to filter by region when running Security Hub integration, as shown in the example above. Remember to enable Security Hub in the region or regions you need by calling `aws securityhub enable-security-hub --region <region>` and run Prowler with the option `-f/--region <region>` (if no region is used it will try to push findings in all regions hubs). Prowler will send findings to the Security Hub on the region where the scanned resource is located.
|
||||
|
||||
@@ -121,13 +121,13 @@ prowler --security-hub --role arn:aws:iam::123456789012:role/ProwlerExecutionRol
|
||||
|
||||
## Send only failed findings to Security Hub
|
||||
|
||||
When using the **AWS Security Hub** integration you can send only the `FAIL` findings generated by **Prowler**. Therefore, the **AWS Security Hub** usage costs eventually would be lower. To follow that recommendation you could add the `-q/--quiet` flag to the Prowler command:
|
||||
When using the **AWS Security Hub** integration you can send only the `FAIL` findings generated by **Prowler**. Therefore, the **AWS Security Hub** usage costs eventually would be lower. To follow that recommendation you could add the `--status FAIL` flag to the Prowler command:
|
||||
|
||||
```sh
|
||||
prowler --security-hub --quiet
|
||||
prowler --security-hub --status FAIL
|
||||
```
|
||||
|
||||
You can use, instead of the `-q/--quiet` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub:
|
||||
You can use, instead of the `--status FAIL` argument, the `--send-sh-only-fails` argument to save all the findings in the Prowler outputs but just to send FAIL findings to AWS Security Hub:
|
||||
|
||||
```sh
|
||||
prowler --security-hub --send-sh-only-fails
|
||||
|
||||
34
docs/tutorials/azure/create-prowler-service-principal.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# How to create Prowler Service Principal
|
||||
|
||||
To allow Prowler assume an identity to start the scan with the required privileges is necesary to create a Service Principal. To create one follow the next steps:
|
||||
|
||||
1. Access to Microsoft Entra ID
|
||||
2. In the left menu bar, go to "App registrations"
|
||||
3. Once there, in the menu bar click on "+ New registration" to register a new application
|
||||
4. Fill the "Name, select the "Supported account types" and click on "Register. You will be redirected to the applications page.
|
||||
5. Once in the application page, in the left menu bar, select "Certificates & secrets"
|
||||
6. In the "Certificates & secrets" view, click on "+ New client secret"
|
||||
7. Fill the "Description" and "Expires" fields and click on "Add"
|
||||
8. Copy the value of the secret, it is going to be used as `AZURE_CLIENT_SECRET` environment variable.
|
||||
|
||||

|
||||
|
||||
## Assigning the proper permissions
|
||||
|
||||
To allow Prowler to retrieve metadata from the identity assumed and specific Entra checks, it is needed to assign the following permissions:
|
||||
|
||||
1. Access to Microsoft Entra ID
|
||||
2. In the left menu bar, go to "App registrations"
|
||||
3. Once there, select the application that you have created
|
||||
4. In the left menu bar, select "API permissions"
|
||||
5. Then click on "+ Add a permission" and select "Microsoft Graph"
|
||||
6. Once in the "Microsoft Graph" view, select "Application permissions"
|
||||
7. Finally, search for "Directory", "Policy" and "UserAuthenticationMethod" select the following permissions:
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
- `UserAuthenticationMethod.Read.All`
|
||||
8. Click on "Add permissions" to apply the new permissions.
|
||||
9. Finally, click on "Grant admin consent for [your tenant]" to apply the permissions.
|
||||
|
||||
|
||||

|
||||
@@ -1,6 +1,6 @@
|
||||
# Azure subscriptions scope
|
||||
|
||||
By default, Prowler is multisubscription, which means that is going to scan all the subscriptions is able to list. If you only assign permissions to one subscription, it is going to scan a single one.
|
||||
By default, Prowler is multisubscription, which means that is going to scan all the subscriptions is able to list. If you only assign permissions to one subscription, it is going to scan a single one.
|
||||
Prowler also has the ability to limit the subscriptions to scan to a set passed as input argument, to do so:
|
||||
|
||||
```console
|
||||
@@ -8,3 +8,36 @@ prowler azure --az-cli-auth --subscription-ids <subscription ID 1> <subscription
|
||||
```
|
||||
|
||||
Where you can pass from 1 up to N subscriptions to be scanned.
|
||||
|
||||
## Assigning proper permissions
|
||||
|
||||
Regarding the subscription scope, Prowler by default scans all subscriptions that it is able to list, so it is necessary to add the `Reader` RBAC built-in roles per subscription or management group (recommended for multiple subscriptions, see it in the [next section](#recommendation-for-multiple-subscriptions)) to the entity that will be adopted by the tool:
|
||||
|
||||
To assign this roles, follow the instructions:
|
||||
|
||||
1. Access your subscription, then select your subscription.
|
||||
2. Select "Access control (IAM)".
|
||||
3. In the overview, select "Roles".
|
||||
4. Click on "+ Add" and select "Add role assignment".
|
||||
5. In the search bar, type `Reader`, select it and click on "Next".
|
||||
6. In the Members tab, click on "+ Select members" and add the members you want to assign this role.
|
||||
7. Click on "Review + assign" to apply the new role.
|
||||
|
||||

|
||||
|
||||
Moreover, some additional read-only permissions are needed for some checks, for this kind of checks that are not covered by built-in roles we use a custom role. This role is defined in [prowler-azure-custom-role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json). Once the cusotm role is created, repeat the steps mentioned above to assign the new `ProwlerRole` to an identity.
|
||||
|
||||
## Recommendation for multiple subscriptions
|
||||
|
||||
While scanning multiple subscriptions could be tedious to create and assign roles for each one. For this reason in Prowler we recommend the usage of *[management groups](https://learn.microsoft.com/en-us/azure/governance/management-groups/overview)* to group all subscriptions that are going to be audited by Prowler.
|
||||
|
||||
To do this in a proper way you have to [create a new management group](https://learn.microsoft.com/en-us/azure/governance/management-groups/create-management-group-portal) and add all roles in the same way that have been done for subscription scope.
|
||||
|
||||

|
||||
|
||||
Once the management group is properly set you can add all the subscription that you want to audit.
|
||||
|
||||

|
||||
|
||||
???+ note
|
||||
By default, `prowler` will scan all subscriptions in the Azure tenant, use the flag `--subscription-id` to specify the subscriptions to be scanned.
|
||||
|
||||
@@ -7,7 +7,6 @@ At the time of writing this documentation the available Azure Clouds from differ
|
||||
- AzureCloud
|
||||
- AzureChinaCloud
|
||||
- AzureUSGovernment
|
||||
- AzureGermanCloud
|
||||
|
||||
If you want to change the default one you must include the flag `--azure-region`, i.e.:
|
||||
|
||||
|
||||
@@ -13,35 +13,55 @@ The following list includes all the AWS checks with configurable variables that
|
||||
|
||||
| Check Name | Value | Type |
|
||||
|---------------------------------------------------------------|--------------------------------------------------|-----------------|
|
||||
| `acm_certificates_expiration_check` | `days_to_expire_threshold` | Integer |
|
||||
| `appstream_fleet_maximum_session_duration` | `max_session_duration_seconds` | Integer |
|
||||
| `appstream_fleet_session_disconnect_timeout` | `max_disconnect_timeout_in_seconds` | Integer |
|
||||
| `appstream_fleet_session_idle_disconnect_timeout` | `max_idle_disconnect_timeout_in_seconds` | Integer |
|
||||
| `autoscaling_find_secrets_ec2_launch_configuration` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `awslambda_function_no_secrets_in_code` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `awslambda_function_no_secrets_in_variables` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `awslambda_function_using_supported_runtimes` | `obsolete_lambda_runtimes` | Integer |
|
||||
| `awslambda_function_vpc_is_in_multi_azs` | `lambda_min_azs` | Integer |
|
||||
| `cloudformation_stack_outputs_find_secrets` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_actions` | List of Strings |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_minutes` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_actions` | List of Strings |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_minutes` | Integer |
|
||||
| `cloudwatch_log_group_no_secrets_in_logs` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `cloudwatch_log_group_retention_policy_specific_days_enabled` | `log_group_retention_days` | Integer |
|
||||
| `codebuild_project_no_secrets_in_variables` | `excluded_sensitive_environment_variables` | List of Strings |
|
||||
| `codebuild_project_no_secrets_in_variables` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `config_recorder_all_regions_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `drs_job_exist` | `mute_non_default_regions` | Boolean |
|
||||
| `ec2_elastic_ip_shodan` | `shodan_api_key` | String |
|
||||
| `ec2_instance_older_than_specific_days` | `max_ec2_instance_age_in_days` | Integer |
|
||||
| `ec2_instance_secrets_user_data` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `ec2_launch_template_no_secrets` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_instance_owners` | List of Strings |
|
||||
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_interface_types` | List of Strings |
|
||||
| `ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports`| `ec2_sg_high_risk_ports` | List of Integer |
|
||||
| `ec2_securitygroup_with_many_ingress_egress_rules` | `max_security_group_rules` | Integer |
|
||||
| `ecs_task_definitions_no_environment_secrets` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `ecr_repositories_scan_vulnerabilities_in_latest_image` | `ecr_repository_vulnerability_minimum_severity` | String |
|
||||
| `eks_cluster_uses_a_supported_version` | `eks_cluster_oldest_version_supported` | String |
|
||||
| `eks_control_plane_logging_all_types_enabled` | `eks_required_log_types` | List of Strings |
|
||||
| `elb_is_in_multiple_az` | `elb_min_azs` | Integer |
|
||||
| `elbv2_is_in_multiple_az` | `elbv2_min_azs` | Integer |
|
||||
| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `iam_user_accesskey_unused` | `max_unused_access_keys_days` | Integer |
|
||||
| `iam_user_console_access_unused` | `max_console_access_days` | Integer |
|
||||
| `ec2_elastic_ip_shodan` | `shodan_api_key` | String |
|
||||
| `ec2_securitygroup_with_many_ingress_egress_rules` | `max_security_group_rules` | Integer |
|
||||
| `ec2_instance_older_than_specific_days` | `max_ec2_instance_age_in_days` | Integer |
|
||||
| `organizations_delegated_administrators` | `organizations_trusted_delegated_administrators` | List of Strings |
|
||||
| `organizations_scp_check_deny_regions` | `organizations_enabled_regions` | List of Strings |
|
||||
| `rds_instance_backup_enabled` | `check_rds_instance_replicas` | Boolean |
|
||||
| `securityhub_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `ssm_document_secrets` | `secrets_ignore_patterns` | List of Strings |
|
||||
| `trustedadvisor_premium_support_plan_subscribed` | `verify_premium_support_plans` | Boolean |
|
||||
| `vpc_endpoint_connections_trust_boundaries` | `trusted_account_ids` | List of Strings |
|
||||
| `vpc_endpoint_services_allowed_principals_trust_boundaries` | `trusted_account_ids` | List of Strings |
|
||||
| `cloudwatch_log_group_retention_policy_specific_days_enabled` | `log_group_retention_days` | Integer |
|
||||
| `appstream_fleet_session_idle_disconnect_timeout` | `max_idle_disconnect_timeout_in_seconds` | Integer |
|
||||
| `appstream_fleet_session_disconnect_timeout` | `max_disconnect_timeout_in_seconds` | Integer |
|
||||
| `appstream_fleet_maximum_session_duration` | `max_session_duration_seconds` | Integer |
|
||||
| `awslambda_function_using_supported_runtimes` | `obsolete_lambda_runtimes` | Integer |
|
||||
| `organizations_scp_check_deny_regions` | `organizations_enabled_regions` | List of Strings |
|
||||
| `organizations_delegated_administrators` | `organizations_trusted_delegated_administrators` | List of Strings |
|
||||
| `ecr_repositories_scan_vulnerabilities_in_latest_image` | `ecr_repository_vulnerability_minimum_severity` | String |
|
||||
| `trustedadvisor_premium_support_plan_subscribed` | `verify_premium_support_plans` | Boolean |
|
||||
| `config_recorder_all_regions_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `drs_job_exist` | `mute_non_default_regions` | Boolean |
|
||||
| `guardduty_is_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `securityhub_enabled` | `mute_non_default_regions` | Boolean |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_minutes` | Integer |
|
||||
| `cloudtrail_threat_detection_privilege_escalation` | `threat_detection_privilege_escalation_actions` | List of Strings |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_entropy` | Integer |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_minutes` | Integer |
|
||||
| `cloudtrail_threat_detection_enumeration` | `threat_detection_enumeration_actions` | List of Strings |
|
||||
| `rds_instance_backup_enabled` | `check_rds_instance_replicas` | Boolean |
|
||||
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_interface_types` | List of Strings |
|
||||
| `ec2_securitygroup_allow_ingress_from_internet_to_any_port` | `ec2_allowed_instance_owners` | List of Strings |
|
||||
|
||||
|
||||
## Azure
|
||||
|
||||
### Configurable Checks
|
||||
@@ -80,10 +100,20 @@ The following list includes all the Azure checks with configurable variables tha
|
||||
```yaml title="config.yaml"
|
||||
# AWS Configuration
|
||||
aws:
|
||||
|
||||
# AWS Global Configuration
|
||||
# aws.mute_non_default_regions --> Mute Failed Findings in non-default regions for GuardDuty, SecurityHub, DRS and Config
|
||||
# aws.mute_non_default_regions --> Set to True to muted failed findings in non-default regions for AccessAnalyzer, GuardDuty, SecurityHub, DRS and Config
|
||||
mute_non_default_regions: False
|
||||
# If you want to mute failed findings only in specific regions, create a file with the following syntax and run it with `prowler aws -w mutelist.yaml`:
|
||||
# Mutelist:
|
||||
# Accounts:
|
||||
# "*":
|
||||
# Checks:
|
||||
# "*":
|
||||
# Regions:
|
||||
# - "ap-southeast-1"
|
||||
# - "ap-southeast-2"
|
||||
# Resources:
|
||||
# - "*"
|
||||
|
||||
# AWS IAM Configuration
|
||||
# aws.iam_user_accesskey_unused --> CIS recommends 45 days
|
||||
@@ -93,6 +123,7 @@ aws:
|
||||
|
||||
# AWS EC2 Configuration
|
||||
# aws.ec2_elastic_ip_shodan
|
||||
# TODO: create common config
|
||||
shodan_api_key: null
|
||||
# aws.ec2_securitygroup_with_many_ingress_egress_rules --> by default is 50 rules
|
||||
max_security_group_rules: 50
|
||||
@@ -102,16 +133,32 @@ aws:
|
||||
# allowed network interface types for security groups open to the Internet
|
||||
ec2_allowed_interface_types:
|
||||
[
|
||||
"api_gateway_managed",
|
||||
"vpc_endpoint",
|
||||
"api_gateway_managed",
|
||||
"vpc_endpoint",
|
||||
]
|
||||
# allowed network interface owners for security groups open to the Internet
|
||||
ec2_allowed_instance_owners:
|
||||
[
|
||||
"amazon-elb"
|
||||
"amazon-elb"
|
||||
]
|
||||
# aws.ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports
|
||||
ec2_sg_high_risk_ports:
|
||||
[
|
||||
25,
|
||||
110,
|
||||
135,
|
||||
143,
|
||||
445,
|
||||
3000,
|
||||
4333,
|
||||
5000,
|
||||
5500,
|
||||
8080,
|
||||
8088,
|
||||
]
|
||||
|
||||
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
|
||||
# AWS SSM Configuration (aws.ssm_documents_set_as_public)
|
||||
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
|
||||
# Multi account environment: Any additional trusted account number should be added as a space separated list, e.g.
|
||||
# trusted_account_ids : ["123456789012", "098765432109", "678901234567"]
|
||||
@@ -133,205 +180,246 @@ aws:
|
||||
# aws.awslambda_function_using_supported_runtimes
|
||||
obsolete_lambda_runtimes:
|
||||
[
|
||||
"java8",
|
||||
"go1.x",
|
||||
"provided",
|
||||
"python3.6",
|
||||
"python2.7",
|
||||
"python3.7",
|
||||
"nodejs4.3",
|
||||
"nodejs4.3-edge",
|
||||
"nodejs6.10",
|
||||
"nodejs",
|
||||
"nodejs8.10",
|
||||
"nodejs10.x",
|
||||
"nodejs12.x",
|
||||
"nodejs14.x",
|
||||
"dotnet5.0",
|
||||
"dotnetcore1.0",
|
||||
"dotnetcore2.0",
|
||||
"dotnetcore2.1",
|
||||
"dotnetcore3.1",
|
||||
"ruby2.5",
|
||||
"ruby2.7",
|
||||
]
|
||||
|
||||
# AWS Organizations
|
||||
# organizations_scp_check_deny_regions
|
||||
# organizations_enabled_regions: [
|
||||
# 'eu-central-1',
|
||||
# 'eu-west-1',
|
||||
# aws.organizations_scp_check_deny_regions
|
||||
# aws.organizations_enabled_regions: [
|
||||
# "eu-central-1",
|
||||
# "eu-west-1",
|
||||
# "us-east-1"
|
||||
# ]
|
||||
organizations_enabled_regions: []
|
||||
organizations_trusted_delegated_administrators: []
|
||||
|
||||
# AWS ECR
|
||||
# ecr_repositories_scan_vulnerabilities_in_latest_image
|
||||
# aws.ecr_repositories_scan_vulnerabilities_in_latest_image
|
||||
# CRITICAL
|
||||
# HIGH
|
||||
# MEDIUM
|
||||
ecr_repository_vulnerability_minimum_severity: "MEDIUM"
|
||||
|
||||
# AWS Trusted Advisor
|
||||
# trustedadvisor_premium_support_plan_subscribed
|
||||
# aws.trustedadvisor_premium_support_plan_subscribed
|
||||
verify_premium_support_plans: True
|
||||
|
||||
# AWS CloudTrail Configuration
|
||||
# aws.cloudtrail_threat_detection_privilege_escalation
|
||||
threat_detection_privilege_escalation_entropy: 0.7 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.7 (70%)
|
||||
threat_detection_privilege_escalation_threshold: 0.1 # Percentage of actions found to decide if it is an privilege_escalation attack event, by default is 0.1 (10%)
|
||||
threat_detection_privilege_escalation_minutes: 1440 # Past minutes to search from now for privilege_escalation attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_privilege_escalation_actions: [
|
||||
"AddPermission",
|
||||
"AddRoleToInstanceProfile",
|
||||
"AddUserToGroup",
|
||||
"AssociateAccessPolicy",
|
||||
"AssumeRole",
|
||||
"AttachGroupPolicy",
|
||||
"AttachRolePolicy",
|
||||
"AttachUserPolicy",
|
||||
"ChangePassword",
|
||||
"CreateAccessEntry",
|
||||
"CreateAccessKey",
|
||||
"CreateDevEndpoint",
|
||||
"CreateEventSourceMapping",
|
||||
"CreateFunction",
|
||||
"CreateGroup",
|
||||
"CreateJob",
|
||||
"CreateKeyPair",
|
||||
"CreateLoginProfile",
|
||||
"CreatePipeline",
|
||||
"CreatePolicyVersion",
|
||||
"CreateRole",
|
||||
"CreateStack",
|
||||
"DeleteRolePermissionsBoundary",
|
||||
"DeleteRolePolicy",
|
||||
"DeleteUserPermissionsBoundary",
|
||||
"DeleteUserPolicy",
|
||||
"DetachRolePolicy",
|
||||
"DetachUserPolicy",
|
||||
"GetCredentialsForIdentity",
|
||||
"GetId",
|
||||
"GetPolicyVersion",
|
||||
"GetUserPolicy",
|
||||
"Invoke",
|
||||
"ModifyInstanceAttribute",
|
||||
"PassRole",
|
||||
"PutGroupPolicy",
|
||||
"PutPipelineDefinition",
|
||||
"PutRolePermissionsBoundary",
|
||||
"PutRolePolicy",
|
||||
"PutUserPermissionsBoundary",
|
||||
"PutUserPolicy",
|
||||
"ReplaceIamInstanceProfileAssociation",
|
||||
"RunInstances",
|
||||
"SetDefaultPolicyVersion",
|
||||
"UpdateAccessKey",
|
||||
"UpdateAssumeRolePolicy",
|
||||
"UpdateDevEndpoint",
|
||||
"UpdateEventSourceMapping",
|
||||
"UpdateFunctionCode",
|
||||
"UpdateJob",
|
||||
"UpdateLoginProfile",
|
||||
]
|
||||
threat_detection_privilege_escalation_actions:
|
||||
[
|
||||
"AddPermission",
|
||||
"AddRoleToInstanceProfile",
|
||||
"AddUserToGroup",
|
||||
"AssociateAccessPolicy",
|
||||
"AssumeRole",
|
||||
"AttachGroupPolicy",
|
||||
"AttachRolePolicy",
|
||||
"AttachUserPolicy",
|
||||
"ChangePassword",
|
||||
"CreateAccessEntry",
|
||||
"CreateAccessKey",
|
||||
"CreateDevEndpoint",
|
||||
"CreateEventSourceMapping",
|
||||
"CreateFunction",
|
||||
"CreateGroup",
|
||||
"CreateJob",
|
||||
"CreateKeyPair",
|
||||
"CreateLoginProfile",
|
||||
"CreatePipeline",
|
||||
"CreatePolicyVersion",
|
||||
"CreateRole",
|
||||
"CreateStack",
|
||||
"DeleteRolePermissionsBoundary",
|
||||
"DeleteRolePolicy",
|
||||
"DeleteUserPermissionsBoundary",
|
||||
"DeleteUserPolicy",
|
||||
"DetachRolePolicy",
|
||||
"DetachUserPolicy",
|
||||
"GetCredentialsForIdentity",
|
||||
"GetId",
|
||||
"GetPolicyVersion",
|
||||
"GetUserPolicy",
|
||||
"Invoke",
|
||||
"ModifyInstanceAttribute",
|
||||
"PassRole",
|
||||
"PutGroupPolicy",
|
||||
"PutPipelineDefinition",
|
||||
"PutRolePermissionsBoundary",
|
||||
"PutRolePolicy",
|
||||
"PutUserPermissionsBoundary",
|
||||
"PutUserPolicy",
|
||||
"ReplaceIamInstanceProfileAssociation",
|
||||
"RunInstances",
|
||||
"SetDefaultPolicyVersion",
|
||||
"UpdateAccessKey",
|
||||
"UpdateAssumeRolePolicy",
|
||||
"UpdateDevEndpoint",
|
||||
"UpdateEventSourceMapping",
|
||||
"UpdateFunctionCode",
|
||||
"UpdateJob",
|
||||
"UpdateLoginProfile",
|
||||
]
|
||||
# aws.cloudtrail_threat_detection_enumeration
|
||||
threat_detection_enumeration_entropy: 0.7 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.7 (70%)
|
||||
threat_detection_enumeration_threshold: 0.1 # Percentage of actions found to decide if it is an enumeration attack event, by default is 0.1 (10%)
|
||||
threat_detection_enumeration_minutes: 1440 # Past minutes to search from now for enumeration attacks, by default is 1440 minutes (24 hours)
|
||||
threat_detection_enumeration_actions: [
|
||||
"DescribeAccessEntry",
|
||||
"DescribeAccountAttributes",
|
||||
"DescribeAvailabilityZones",
|
||||
"DescribeBundleTasks",
|
||||
"DescribeCarrierGateways",
|
||||
"DescribeClientVpnRoutes",
|
||||
"DescribeCluster",
|
||||
"DescribeDhcpOptions",
|
||||
"DescribeFlowLogs",
|
||||
"DescribeImages",
|
||||
"DescribeInstanceAttribute",
|
||||
"DescribeInstanceInformation",
|
||||
"DescribeInstanceTypes",
|
||||
"DescribeInstances",
|
||||
"DescribeInstances",
|
||||
"DescribeKeyPairs",
|
||||
"DescribeLogGroups",
|
||||
"DescribeLogStreams",
|
||||
"DescribeOrganization",
|
||||
"DescribeRegions",
|
||||
"DescribeSecurityGroups",
|
||||
"DescribeSnapshotAttribute",
|
||||
"DescribeSnapshotTierStatus",
|
||||
"DescribeSubscriptionFilters",
|
||||
"DescribeTransitGatewayMulticastDomains",
|
||||
"DescribeVolumes",
|
||||
"DescribeVolumesModifications",
|
||||
"DescribeVpcEndpointConnectionNotifications",
|
||||
"DescribeVpcs",
|
||||
"GetAccount",
|
||||
"GetAccountAuthorizationDetails",
|
||||
"GetAccountSendingEnabled",
|
||||
"GetBucketAcl",
|
||||
"GetBucketLogging",
|
||||
"GetBucketPolicy",
|
||||
"GetBucketReplication",
|
||||
"GetBucketVersioning",
|
||||
"GetCallerIdentity",
|
||||
"GetCertificate",
|
||||
"GetConsoleScreenshot",
|
||||
"GetCostAndUsage",
|
||||
"GetDetector",
|
||||
"GetEbsDefaultKmsKeyId",
|
||||
"GetEbsEncryptionByDefault",
|
||||
"GetFindings",
|
||||
"GetFlowLogsIntegrationTemplate",
|
||||
"GetIdentityVerificationAttributes",
|
||||
"GetInstances",
|
||||
"GetIntrospectionSchema",
|
||||
"GetLaunchTemplateData",
|
||||
"GetLaunchTemplateData",
|
||||
"GetLogRecord",
|
||||
"GetParameters",
|
||||
"GetPolicyVersion",
|
||||
"GetPublicAccessBlock",
|
||||
"GetQueryResults",
|
||||
"GetRegions",
|
||||
"GetSMSAttributes",
|
||||
"GetSMSSandboxAccountStatus",
|
||||
"GetSendQuota",
|
||||
"GetTransitGatewayRouteTableAssociations",
|
||||
"GetUserPolicy",
|
||||
"HeadObject",
|
||||
"ListAccessKeys",
|
||||
"ListAccounts",
|
||||
"ListAllMyBuckets",
|
||||
"ListAssociatedAccessPolicies",
|
||||
"ListAttachedUserPolicies",
|
||||
"ListClusters",
|
||||
"ListDetectors",
|
||||
"ListDomains",
|
||||
"ListFindings",
|
||||
"ListHostedZones",
|
||||
"ListIPSets",
|
||||
"ListIdentities",
|
||||
"ListInstanceProfiles",
|
||||
"ListObjects",
|
||||
"ListOrganizationalUnitsForParent",
|
||||
"ListOriginationNumbers",
|
||||
"ListPolicyVersions",
|
||||
"ListRoles",
|
||||
"ListRoles",
|
||||
"ListRules",
|
||||
"ListServiceQuotas",
|
||||
"ListSubscriptions",
|
||||
"ListTargetsByRule",
|
||||
"ListTopics",
|
||||
"ListUsers",
|
||||
"LookupEvents",
|
||||
"Search",
|
||||
]
|
||||
threat_detection_enumeration_actions:
|
||||
[
|
||||
"DescribeAccessEntry",
|
||||
"DescribeAccountAttributes",
|
||||
"DescribeAvailabilityZones",
|
||||
"DescribeBundleTasks",
|
||||
"DescribeCarrierGateways",
|
||||
"DescribeClientVpnRoutes",
|
||||
"DescribeCluster",
|
||||
"DescribeDhcpOptions",
|
||||
"DescribeFlowLogs",
|
||||
"DescribeImages",
|
||||
"DescribeInstanceAttribute",
|
||||
"DescribeInstanceInformation",
|
||||
"DescribeInstanceTypes",
|
||||
"DescribeInstances",
|
||||
"DescribeInstances",
|
||||
"DescribeKeyPairs",
|
||||
"DescribeLogGroups",
|
||||
"DescribeLogStreams",
|
||||
"DescribeOrganization",
|
||||
"DescribeRegions",
|
||||
"DescribeSecurityGroups",
|
||||
"DescribeSnapshotAttribute",
|
||||
"DescribeSnapshotTierStatus",
|
||||
"DescribeSubscriptionFilters",
|
||||
"DescribeTransitGatewayMulticastDomains",
|
||||
"DescribeVolumes",
|
||||
"DescribeVolumesModifications",
|
||||
"DescribeVpcEndpointConnectionNotifications",
|
||||
"DescribeVpcs",
|
||||
"GetAccount",
|
||||
"GetAccountAuthorizationDetails",
|
||||
"GetAccountSendingEnabled",
|
||||
"GetBucketAcl",
|
||||
"GetBucketLogging",
|
||||
"GetBucketPolicy",
|
||||
"GetBucketReplication",
|
||||
"GetBucketVersioning",
|
||||
"GetCallerIdentity",
|
||||
"GetCertificate",
|
||||
"GetConsoleScreenshot",
|
||||
"GetCostAndUsage",
|
||||
"GetDetector",
|
||||
"GetEbsDefaultKmsKeyId",
|
||||
"GetEbsEncryptionByDefault",
|
||||
"GetFindings",
|
||||
"GetFlowLogsIntegrationTemplate",
|
||||
"GetIdentityVerificationAttributes",
|
||||
"GetInstances",
|
||||
"GetIntrospectionSchema",
|
||||
"GetLaunchTemplateData",
|
||||
"GetLaunchTemplateData",
|
||||
"GetLogRecord",
|
||||
"GetParameters",
|
||||
"GetPolicyVersion",
|
||||
"GetPublicAccessBlock",
|
||||
"GetQueryResults",
|
||||
"GetRegions",
|
||||
"GetSMSAttributes",
|
||||
"GetSMSSandboxAccountStatus",
|
||||
"GetSendQuota",
|
||||
"GetTransitGatewayRouteTableAssociations",
|
||||
"GetUserPolicy",
|
||||
"HeadObject",
|
||||
"ListAccessKeys",
|
||||
"ListAccounts",
|
||||
"ListAllMyBuckets",
|
||||
"ListAssociatedAccessPolicies",
|
||||
"ListAttachedUserPolicies",
|
||||
"ListClusters",
|
||||
"ListDetectors",
|
||||
"ListDomains",
|
||||
"ListFindings",
|
||||
"ListHostedZones",
|
||||
"ListIPSets",
|
||||
"ListIdentities",
|
||||
"ListInstanceProfiles",
|
||||
"ListObjects",
|
||||
"ListOrganizationalUnitsForParent",
|
||||
"ListOriginationNumbers",
|
||||
"ListPolicyVersions",
|
||||
"ListRoles",
|
||||
"ListRoles",
|
||||
"ListRules",
|
||||
"ListServiceQuotas",
|
||||
"ListSubscriptions",
|
||||
"ListTargetsByRule",
|
||||
"ListTopics",
|
||||
"ListUsers",
|
||||
"LookupEvents",
|
||||
"Search",
|
||||
]
|
||||
|
||||
# AWS RDS Configuration
|
||||
# aws.rds_instance_backup_enabled
|
||||
# Whether to check RDS instance replicas or not
|
||||
check_rds_instance_replicas: False
|
||||
|
||||
# AWS ACM Configuration
|
||||
# aws.acm_certificates_expiration_check
|
||||
days_to_expire_threshold: 7
|
||||
|
||||
# AWS EKS Configuration
|
||||
# aws.eks_control_plane_logging_all_types_enabled
|
||||
# EKS control plane logging types that must be enabled
|
||||
eks_required_log_types:
|
||||
[
|
||||
"api",
|
||||
"audit",
|
||||
"authenticator",
|
||||
"controllerManager",
|
||||
"scheduler",
|
||||
]
|
||||
|
||||
# aws.eks_cluster_uses_a_supported_version
|
||||
# EKS clusters must be version 1.28 or higher
|
||||
eks_cluster_oldest_version_supported: "1.28"
|
||||
|
||||
# AWS CodeBuild Configuration
|
||||
# aws.codebuild_project_no_secrets_in_variables
|
||||
# CodeBuild sensitive variables that are excluded from the check
|
||||
excluded_sensitive_environment_variables:
|
||||
[
|
||||
|
||||
]
|
||||
|
||||
# Azure Configuration
|
||||
azure:
|
||||
# Azure Network Configuration
|
||||
# azure.network_public_ip_shodan
|
||||
# TODO: create common config
|
||||
shodan_api_key: null
|
||||
|
||||
# Azure App Configuration
|
||||
# Azure App Service
|
||||
# azure.app_ensure_php_version_is_latest
|
||||
php_latest_version: "8.2"
|
||||
# azure.app_ensure_python_version_is_latest
|
||||
@@ -345,4 +433,34 @@ gcp:
|
||||
# gcp.compute_public_address_shodan
|
||||
shodan_api_key: null
|
||||
|
||||
# Kubernetes Configuration
|
||||
kubernetes:
|
||||
# Kubernetes API Server
|
||||
# kubernetes.apiserver_audit_log_maxbackup_set
|
||||
audit_log_maxbackup: 10
|
||||
# kubernetes.apiserver_audit_log_maxsize_set
|
||||
audit_log_maxsize: 100
|
||||
# kubernetes.apiserver_audit_log_maxage_set
|
||||
audit_log_maxage: 30
|
||||
# kubernetes.apiserver_strong_ciphers_only
|
||||
apiserver_strong_ciphers:
|
||||
[
|
||||
"TLS_AES_128_GCM_SHA256",
|
||||
"TLS_AES_256_GCM_SHA384",
|
||||
"TLS_CHACHA20_POLY1305_SHA256",
|
||||
]
|
||||
# Kubelet
|
||||
# kubernetes.kubelet_strong_ciphers_only
|
||||
kubelet_strong_ciphers:
|
||||
[
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256",
|
||||
"TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305",
|
||||
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305",
|
||||
"TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_RSA_WITH_AES_256_GCM_SHA384",
|
||||
"TLS_RSA_WITH_AES_128_GCM_SHA256",
|
||||
]
|
||||
|
||||
```
|
||||
|
||||
@@ -54,7 +54,7 @@ CustomChecksMetadata:
|
||||
RelatedUrl: https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html
|
||||
Remediation:
|
||||
Code:
|
||||
CLI: aws s3api put-bucket-versioning --bucket <bucket-name> --versioning-configuration Status=Enabled
|
||||
CLI: aws s3api put-bucket-versioning --bucket <bucket-name> --versioning-configuration Status=Enabled,MFADelete=Enabled
|
||||
NativeIaC: https://aws.amazon.com/es/s3/features/versioning/
|
||||
Other: https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html
|
||||
Terraform: https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_versioning
|
||||
|
||||
@@ -10,9 +10,11 @@ prowler dashboard
|
||||
To run Prowler local dashboard with Docker, use:
|
||||
|
||||
```sh
|
||||
docker run --env HOST=0.0.0.0 --publish 127.0.0.1:11666:11666 toniblyx/prowler:latest dashboard
|
||||
docker run -v /your/local/dir/prowler-output:/home/prowler/output --env HOST=0.0.0.0 --publish 127.0.0.1:11666:11666 toniblyx/prowler:latest dashboard
|
||||
```
|
||||
|
||||
Make sure you update the `/your/local/dir/prowler-output` to match the path that contains your prowler output.
|
||||
|
||||
???+ note
|
||||
**Remember that the `dashboard` server is not authenticated, if you expose it to the internet, you are running it at your own risk.**
|
||||
|
||||
@@ -81,7 +83,7 @@ def get_table(data):
|
||||
|
||||
## S3 Integration
|
||||
|
||||
If you are a Prowler Saas customer and you want to use your data from your S3 bucket, you can run:
|
||||
If you are using Prowler SaaS with the S3 integration or that integration from Prowler Open Source and you want to use your data from your S3 bucket, you can run:
|
||||
|
||||
```sh
|
||||
aws s3 cp s3://<your-bucket>/output/csv ./output --recursive
|
||||
|
||||
@@ -13,7 +13,7 @@ prowler <provider> -c <check_to_fix_1> <check_to_fix_2> ... --fixer
|
||||
```sh
|
||||
prowler <provider> --list-fixers
|
||||
```
|
||||
|
||||
It's important to note that using the fixers for `Access Analyzer`, `GuardDuty`, and `SecurityHub` may incur additional costs. These AWS services might trigger actions or deploy resources that can lead to charges on your AWS account.
|
||||
## Writing a Fixer
|
||||
To write a fixer, you need to create a file called `<check_id>_fixer.py` inside the check folder, with a function called `fixer` that receives either the region or the resource to be fixed as a parameter, and returns a boolean value indicating if the fix was successful or not.
|
||||
|
||||
|
||||
@@ -25,7 +25,17 @@ Prowler will follow the same credentials search as [Google authentication librar
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
|
||||
|
||||
# GCP Service APIs
|
||||
## Impersonate Service Account
|
||||
|
||||
If you want to impersonate a GCP service account, you can use the `--impersonate-service-account` argument:
|
||||
|
||||
```console
|
||||
prowler gcp --impersonate-service-account <service-account-email>
|
||||
```
|
||||
|
||||
This argument will use the default credentials to impersonate the service account provided.
|
||||
|
||||
## Service APIs
|
||||
|
||||
Prowler will use the Google Cloud APIs to get the information needed to perform the checks. Make sure that the following APIs are enabled in the project:
|
||||
|
||||
|
||||
BIN
docs/tutorials/img/create-sp.gif
Normal file
|
After Width: | Height: | Size: 4.5 MiB |
|
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 26 KiB |
20
docs/tutorials/kubernetes/in-cluster.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# In-Cluster Execution
|
||||
|
||||
For in-cluster execution, you can use the supplied yaml files inside `/kubernetes`:
|
||||
|
||||
* [job.yaml](https://github.com/prowler-cloud/prowler/blob/master/kubernetes/job.yaml)
|
||||
* [prowler-role.yaml](https://github.com/prowler-cloud/prowler/blob/master/kubernetes/prowler-role.yaml)
|
||||
* [prowler-rolebinding.yaml](https://github.com/prowler-cloud/prowler/blob/master/kubernetes/prowler-rolebinding.yaml)
|
||||
|
||||
They can be used to run Prowler as a job within a new Prowler namespace:
|
||||
|
||||
```console
|
||||
kubectl apply -f kubernetes/job.yaml
|
||||
kubectl apply -f kubernetes/prowler-role.yaml
|
||||
kubectl apply -f kubernetes/prowler-rolebinding.yaml
|
||||
kubectl get pods --namespace prowler-ns --> prowler-XXXXX
|
||||
kubectl logs prowler-XXXXX --namespace prowler-ns
|
||||
```
|
||||
|
||||
???+ note
|
||||
By default, `prowler` will scan all namespaces in your active Kubernetes context. Use the [`--namespace`](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/kubernetes/namespace/) flag to specify the namespace(s) to be scanned.
|
||||
23
docs/tutorials/kubernetes/misc.md
Normal file
@@ -0,0 +1,23 @@
|
||||
# Miscellaneous
|
||||
|
||||
## Context Filtering
|
||||
|
||||
Prowler will scan the active Kubernetes context by default.
|
||||
|
||||
To specify the Kubernetes context to be scanned, use the `--context` flag followed by the desired context name. For example:
|
||||
|
||||
```console
|
||||
prowler --context my-context
|
||||
```
|
||||
|
||||
This will ensure that Prowler scans the specified context/cluster for vulnerabilities and misconfigurations.
|
||||
|
||||
## Namespace Filtering
|
||||
|
||||
By default, `prowler` will scan all namespaces in the context you specify.
|
||||
|
||||
To specify the namespace(s) to be scanned, use the `--namespace` flag followed by the desired namespace(s) separated by spaces. For example:
|
||||
|
||||
```console
|
||||
prowler --namespace namespace1 namespace2
|
||||
```
|
||||
15
docs/tutorials/kubernetes/outside-cluster.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# Non in-cluster execution
|
||||
|
||||
For non in-cluster execution, you can provide the location of the [kubeconfig](https://kubernetes.io/docs/concepts/configuration/organize-cluster-access-kubeconfig/) file with the following argument:
|
||||
|
||||
```console
|
||||
prowler kubernetes --kubeconfig-file /path/to/kubeconfig
|
||||
```
|
||||
???+ note
|
||||
If no `--kubeconfig-file` is provided, Prowler will use the default KubeConfig file location (`~/.kube/config`).
|
||||
|
||||
???+ note
|
||||
`prowler` will scan the active Kubernetes context by default. Use the [`--context`](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/kubernetes/context/) flag to specify the context to be scanned.
|
||||
|
||||
???+ note
|
||||
By default, `prowler` will scan all namespaces in your active Kubernetes context. Use the [`--namespace`](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/kubernetes/namespace/) flag to specify the namespace(s) to be scanned.
|
||||
@@ -10,7 +10,7 @@ Execute Prowler in verbose mode (like in Version 2):
|
||||
prowler <provider> --verbose
|
||||
```
|
||||
## Filter findings by status
|
||||
Prowler can filter the findings by their status:
|
||||
Prowler can filter the findings by their status, so you can see only in the CLI and in the reports the findings with a specific status:
|
||||
```console
|
||||
prowler <provider> --status [PASS, FAIL, MANUAL]
|
||||
```
|
||||
|
||||
@@ -7,97 +7,155 @@ Mutelist option works along with other options and will modify the output in the
|
||||
- CSV: `muted` is `True`. The field `status` will keep the original status, `MANUAL`, `PASS` or `FAIL`, of the finding.
|
||||
|
||||
|
||||
You can use `-w`/`--mutelist-file` with the path of your mutelist yaml file:
|
||||
## How the Mutelist Works
|
||||
|
||||
The **Mutelist** uses both "AND" and "OR" logic to determine which resources, checks, regions, and tags should be muted. For each check, the Mutelist evaluates whether the account, region, and resource match the specified criteria using "AND" logic. If tags are specified, the Mutelist can apply either "AND" or "OR" logic.
|
||||
|
||||
If any of the criteria do not match, the check is not muted.
|
||||
|
||||
???+ note
|
||||
Remember that mutelist can be used with regular expressions.
|
||||
|
||||
## Mutelist Specification
|
||||
|
||||
???+ note
|
||||
- For Azure provider, the Account ID is the Subscription Name and the Region is the Location.
|
||||
- For GCP provider, the Account ID is the Project ID and the Region is the Zone.
|
||||
- For Kubernetes provider, the Account ID is the Cluster Name and the Region is the Namespace.
|
||||
|
||||
The Mutelist file uses the [YAML](https://en.wikipedia.org/wiki/YAML) format with the following syntax:
|
||||
|
||||
```yaml
|
||||
### Account, Check and/or Region can be * to apply for all the cases.
|
||||
### Resources and tags are lists that can have either Regex or Keywords.
|
||||
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
|
||||
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
|
||||
### For each check you can except Accounts, Regions, Resources and/or Tags.
|
||||
########################### MUTELIST EXAMPLE ###########################
|
||||
Mutelist:
|
||||
Accounts:
|
||||
"123456789012":
|
||||
Checks:
|
||||
"iam_user_hardware_mfa_enabled":
|
||||
Regions:
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
- "user-1" # Will mute user-1 in check iam_user_hardware_mfa_enabled
|
||||
- "user-2" # Will mute user-2 in check iam_user_hardware_mfa_enabled
|
||||
"ec2_*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*" # Will mute every EC2 check in every account and region
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "test=test" # Will mute every resource containing the string "test" and the tags 'test=test' and
|
||||
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "test=test"
|
||||
- "project=test" # This will mute every resource containing the string "test" and BOTH tags at the same time.
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test"
|
||||
Tags: # This will mute every resource containing the string "test" and the ones that contain EITHER the `test=test` OR `project=test` OR `project=dev`
|
||||
- "test=test|project=(test|dev)"
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "test=test" # This will mute every resource containing the string "test" and the tags `test=test` and either `project=test` OR `project=stage` in every account and region.
|
||||
- "project=test|project=stage"
|
||||
|
||||
"*":
|
||||
Checks:
|
||||
"s3_bucket_object_versioning":
|
||||
Regions:
|
||||
- "eu-west-1"
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
- "ci-logs" # Will mute bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
|
||||
- "logs" # Will mute EVERY BUCKET containing the string "logs" in specified check and regions
|
||||
- ".+-logs" # Will mute all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
|
||||
"ecs_task_definitions_no_environment_secrets":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Exceptions:
|
||||
Accounts:
|
||||
- "0123456789012"
|
||||
Regions:
|
||||
- "eu-west-1"
|
||||
- "eu-south-2" # Will mute every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Tags:
|
||||
- "environment=dev" # Will mute every resource containing the tag 'environment=dev' in every account and region
|
||||
|
||||
"123456789012":
|
||||
Checks:
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Exceptions:
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "environment=prod" # Will mute every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
|
||||
|
||||
"*":
|
||||
Checks:
|
||||
"ec2_*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test-resource" # Will mute the resource "test-resource" in all accounts and regions for whatever check from the EC2 service
|
||||
```
|
||||
|
||||
### Account, Check, Region, Resource, and Tag
|
||||
|
||||
| Field | Description | Logic |
|
||||
|----------|----------|----------|
|
||||
| `account_id` | Use `*` to apply the mutelist to all accounts. | `ANDed` |
|
||||
| `check_name` | The name of the Prowler check. Use `*` to apply the mutelist to all checks, or `service_*` to apply it to all service's checks. | `ANDed` |
|
||||
| `region` | The region identifier. Use `*` to apply the mutelist to all regions. | `ANDed` |
|
||||
| `resource` | The resource identifier. Use `*` to apply the mutelist to all resources. | `ANDed` |
|
||||
| `tag` | The tag value. | `ORed` |
|
||||
|
||||
|
||||
## How to Use the Mutelist
|
||||
|
||||
To use the Mutelist, you need to specify the path to the Mutelist YAML file using the `-w` or `--mutelist-file` option when running Prowler:
|
||||
|
||||
```
|
||||
prowler <provider> -w mutelist.yaml
|
||||
```
|
||||
|
||||
## Mutelist YAML File Syntax
|
||||
Replace `<provider>` with the appropriate provider name.
|
||||
|
||||
???+ note
|
||||
For Azure provider, the Account ID is the Subscription Name and the Region is the Location.
|
||||
## Considerations
|
||||
|
||||
???+ note
|
||||
For GCP provider, the Account ID is the Project ID and the Region is the Zone.
|
||||
- The Mutelist can be used in combination with other Prowler options, such as the `--service` or `--checks` option, to further customize the scanning process.
|
||||
- Make sure to review and update the Mutelist regularly to ensure it reflects the desired exclusions and remains up to date with your infrastructure.
|
||||
|
||||
???+ note
|
||||
For Kubernetes provider, the Account ID is the Cluster Name and the Region is the Namespace.
|
||||
|
||||
The Mutelist file is a YAML file with the following syntax:
|
||||
|
||||
```yaml
|
||||
### Account, Check and/or Region can be * to apply for all the cases.
|
||||
### Resources and tags are lists that can have either Regex or Keywords.
|
||||
### Tags is an optional list that matches on tuples of 'key=value' and are "ANDed" together.
|
||||
### Use an alternation Regex to match one of multiple tags with "ORed" logic.
|
||||
### For each check you can except Accounts, Regions, Resources and/or Tags.
|
||||
########################### MUTELIST EXAMPLE ###########################
|
||||
Mutelist:
|
||||
Accounts:
|
||||
"123456789012":
|
||||
Checks:
|
||||
"iam_user_hardware_mfa_enabled":
|
||||
Regions:
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
- "user-1" # Will ignore user-1 in check iam_user_hardware_mfa_enabled
|
||||
- "user-2" # Will ignore user-2 in check iam_user_hardware_mfa_enabled
|
||||
"ec2_*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*" # Will ignore every EC2 check in every account and region
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "test=test" # Will ignore every resource containing the string "test" and the tags 'test=test' and
|
||||
- "project=test|project=stage" # either of ('project=test' OR project=stage) in account 123456789012 and every region
|
||||
|
||||
"*":
|
||||
Checks:
|
||||
"s3_bucket_object_versioning":
|
||||
Regions:
|
||||
- "eu-west-1"
|
||||
- "us-east-1"
|
||||
Resources:
|
||||
- "ci-logs" # Will ignore bucket "ci-logs" AND ALSO bucket "ci-logs-replica" in specified check and regions
|
||||
- "logs" # Will ignore EVERY BUCKET containing the string "logs" in specified check and regions
|
||||
- ".+-logs" # Will ignore all buckets containing the terms ci-logs, qa-logs, etc. in specified check and regions
|
||||
"ecs_task_definitions_no_environment_secrets":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Exceptions:
|
||||
Accounts:
|
||||
- "0123456789012"
|
||||
Regions:
|
||||
- "eu-west-1"
|
||||
- "eu-south-2" # Will ignore every resource in check ecs_task_definitions_no_environment_secrets except the ones in account 0123456789012 located in eu-south-2 or eu-west-1
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Tags:
|
||||
- "environment=dev" # Will ignore every resource containing the tag 'environment=dev' in every account and region
|
||||
|
||||
"123456789012":
|
||||
Checks:
|
||||
"*":
|
||||
Regions:
|
||||
- "*"
|
||||
Resources:
|
||||
- "*"
|
||||
Exceptions:
|
||||
Resources:
|
||||
- "test"
|
||||
Tags:
|
||||
- "environment=prod" # Will ignore every resource except in account 123456789012 except the ones containing the string "test" and tag environment=prod
|
||||
```
|
||||
|
||||
## AWS Mutelist
|
||||
### Mute specific AWS regions
|
||||
|
||||
@@ -125,7 +125,7 @@ The JSON-OCSF output format implements the [Detection Finding](https://schema.oc
|
||||
"product": {
|
||||
"name": "Prowler",
|
||||
"vendor_name": "Prowler",
|
||||
"version": "4.2.2"
|
||||
"version": "4.2.4"
|
||||
},
|
||||
"version": "1.1.0"
|
||||
},
|
||||
|
||||
@@ -11,6 +11,12 @@ prowler <provider> --scan-unused-services
|
||||
|
||||
## Services that are ignored
|
||||
### AWS
|
||||
#### ACM
|
||||
You can have certificates in ACM that are not in use by any AWS resource.
|
||||
Prowler will check if every certificate is going to expire soon, if this certificate is not in use by default it is not going to be check if it is expired, is going to expire soon or it is good.
|
||||
|
||||
- `acm_certificates_expiration_check`
|
||||
|
||||
#### Athena
|
||||
When you create an AWS Account, Athena will create a default primary workgroup for you.
|
||||
Prowler will check if that workgroup is enabled and if it is being used by checking if there were queries in the last 45 days.
|
||||
@@ -30,10 +36,11 @@ If EBS default encyption is not enabled, sensitive information at rest is not pr
|
||||
|
||||
- `ec2_ebs_default_encryption`
|
||||
|
||||
If your Security groups are not properly configured the attack surface is increased, nonetheless, Prowler will detect those security groups that are being used (they are attached) to only notify those that are being used. This logic applies to the 15 checks related to open ports in security groups and the check for the default security group.
|
||||
If your Security groups are not properly configured the attack surface is increased, nonetheless, Prowler will detect those security groups that are being used (they are attached) to only notify those that are being used. This logic applies to the 15 checks related to open ports in security groups, the check for the default security group and for the security groups that allow ingress and egress traffic.
|
||||
|
||||
- `ec2_securitygroup_allow_ingress_from_internet_to_port_X` (15 checks)
|
||||
- `ec2_securitygroup_default_restrict_traffic`
|
||||
- `ec2_securitygroup_allow_wide_open_public_ipv4`
|
||||
|
||||
Prowler will also check for used Network ACLs to only alerts those with open ports that are being used.
|
||||
|
||||
|
||||
@@ -83,9 +83,14 @@ nav:
|
||||
- Authentication: tutorials/azure/authentication.md
|
||||
- Non default clouds: tutorials/azure/use-non-default-cloud.md
|
||||
- Subscriptions: tutorials/azure/subscriptions.md
|
||||
- Create Prowler Service Principal: tutorials/azure/create-prowler-service-principal.md
|
||||
- Google Cloud:
|
||||
- Authentication: tutorials/gcp/authentication.md
|
||||
- Projects: tutorials/gcp/projects.md
|
||||
- Kubernetes:
|
||||
- In-Cluster Execution: tutorials/kubernetes/in-cluster.md
|
||||
- Non In-Cluster Execution: tutorials/kubernetes/outside-cluster.md
|
||||
- Miscellaneous: tutorials/kubernetes/misc.md
|
||||
- Developer Guide:
|
||||
- Introduction: developer-guide/introduction.md
|
||||
- Provider: developer-guide/provider.md
|
||||
|
||||
@@ -58,20 +58,29 @@ Resources:
|
||||
- 'account:Get*'
|
||||
- 'appstream:Describe*'
|
||||
- 'appstream:List*'
|
||||
- 'backup:List*'
|
||||
- 'cloudtrail:GetInsightSelectors'
|
||||
- 'codeartifact:List*'
|
||||
- 'codebuild:BatchGet*'
|
||||
- 'cognito-idp:GetUserPoolMfaConfig'
|
||||
- 'dlm:Get*'
|
||||
- 'drs:Describe*'
|
||||
- 'ds:Get*'
|
||||
- 'ds:Describe*'
|
||||
- 'ds:List*'
|
||||
- 'dynamodb:GetResourcePolicy'
|
||||
- 'ec2:GetEbsEncryptionByDefault'
|
||||
- 'ec2:GetSnapshotBlockPublicAccessState'
|
||||
- 'ec2:GetInstanceMetadataDefaults'
|
||||
- 'ecr:Describe*'
|
||||
- 'ecr:GetRegistryScanningConfiguration'
|
||||
- 'elasticfilesystem:DescribeBackupPolicy'
|
||||
- 'glue:GetConnections'
|
||||
- 'glue:GetSecurityConfiguration*'
|
||||
- 'glue:SearchTables'
|
||||
- 'lambda:GetFunction*'
|
||||
- 'logs:FilterLogEvents'
|
||||
- 'lightsail:GetRelationalDatabases'
|
||||
- 'macie2:GetMacieSession'
|
||||
- 's3:GetAccountPublicAccessBlock'
|
||||
- 'shield:DescribeProtection'
|
||||
@@ -79,8 +88,10 @@ Resources:
|
||||
- 'securityhub:BatchImportFindings'
|
||||
- 'securityhub:GetFindings'
|
||||
- 'ssm:GetDocument'
|
||||
- 'ssm-incidents:List*'
|
||||
- 'support:Describe*'
|
||||
- 'tag:GetTagKeys'
|
||||
- 'wellarchitected:List*'
|
||||
Resource: '*'
|
||||
- PolicyName: ProwlerScanRoleAdditionalViewPrivilegesApiGateway
|
||||
PolicyDocument:
|
||||
|
||||
@@ -16,7 +16,10 @@
|
||||
"ds:Get*",
|
||||
"ds:Describe*",
|
||||
"ds:List*",
|
||||
"dynamodb:GetResourcePolicy",
|
||||
"ec2:GetEbsEncryptionByDefault",
|
||||
"ec2:GetSnapshotBlockPublicAccessState",
|
||||
"ec2:GetInstanceMetadataDefaults",
|
||||
"ecr:Describe*",
|
||||
"ecr:GetRegistryScanningConfiguration",
|
||||
"elasticfilesystem:DescribeBackupPolicy",
|
||||
@@ -25,6 +28,7 @@
|
||||
"glue:SearchTables",
|
||||
"lambda:GetFunction*",
|
||||
"logs:FilterLogEvents",
|
||||
"lightsail:GetRelationalDatabases",
|
||||
"macie2:GetMacieSession",
|
||||
"s3:GetAccountPublicAccessBlock",
|
||||
"shield:DescribeProtection",
|
||||
|
||||
20
permissions/prowler-azure-custom-role.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"properties": {
|
||||
"roleName": "ProwlerRole",
|
||||
"description": "Role used for checks that require read-only access to Azure resources and are not covered by the Reader role.",
|
||||
"assignableScopes": [
|
||||
"/"
|
||||
],
|
||||
"permissions": [
|
||||
{
|
||||
"actions": [
|
||||
"Microsoft.Web/sites/host/listkeys/action",
|
||||
"Microsoft.Web/sites/config/list/Action"
|
||||
],
|
||||
"notActions": [],
|
||||
"dataActions": [],
|
||||
"notDataActions": []
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
2741
poetry.lock
generated
@@ -6,11 +6,15 @@ from os import environ
|
||||
|
||||
from colorama import Fore, Style
|
||||
|
||||
from prowler.config.config import get_available_compliance_frameworks
|
||||
from prowler.config.config import (
|
||||
csv_file_suffix,
|
||||
get_available_compliance_frameworks,
|
||||
html_file_suffix,
|
||||
json_asff_file_suffix,
|
||||
json_ocsf_file_suffix,
|
||||
)
|
||||
from prowler.lib.banner import print_banner
|
||||
from prowler.lib.check.check import (
|
||||
bulk_load_checks_metadata,
|
||||
bulk_load_compliance_frameworks,
|
||||
exclude_checks_to_run,
|
||||
exclude_services_to_run,
|
||||
execute_checks,
|
||||
@@ -30,27 +34,47 @@ from prowler.lib.check.check import (
|
||||
)
|
||||
from prowler.lib.check.checks_loader import load_checks_to_execute
|
||||
from prowler.lib.check.compliance import update_checks_metadata_with_compliance
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.check.custom_checks_metadata import (
|
||||
parse_custom_checks_metadata_file,
|
||||
update_checks_metadata,
|
||||
)
|
||||
from prowler.lib.check.models import CheckMetadata
|
||||
from prowler.lib.cli.parser import ProwlerArgumentParser
|
||||
from prowler.lib.logger import logger, set_logging_config
|
||||
from prowler.lib.outputs.asff.asff import ASFF
|
||||
from prowler.lib.outputs.compliance.aws_well_architected.aws_well_architected import (
|
||||
AWSWellArchitected,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS
|
||||
from prowler.lib.outputs.compliance.cis.cis_kubernetes import KubernetesCIS
|
||||
from prowler.lib.outputs.compliance.compliance import display_compliance_table
|
||||
from prowler.lib.outputs.html.html import add_html_footer, fill_html_overview_statistics
|
||||
from prowler.lib.outputs.json.json import close_json
|
||||
from prowler.lib.outputs.compliance.ens.ens_aws import AWSENS
|
||||
from prowler.lib.outputs.compliance.generic.generic import GenericCompliance
|
||||
from prowler.lib.outputs.compliance.iso27001.iso27001_aws import AWSISO27001
|
||||
from prowler.lib.outputs.compliance.kisa_ismsp.kisa_ismsp_aws import AWSKISAISMSP
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_aws import AWSMitreAttack
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_azure import (
|
||||
AzureMitreAttack,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.mitre_attack.mitre_attack_gcp import GCPMitreAttack
|
||||
from prowler.lib.outputs.csv.csv import CSV
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
from prowler.lib.outputs.html.html import HTML
|
||||
from prowler.lib.outputs.ocsf.ocsf import OCSF
|
||||
from prowler.lib.outputs.outputs import extract_findings_statistics
|
||||
from prowler.lib.outputs.slack.slack import Slack
|
||||
from prowler.lib.outputs.summary_table import display_summary_table
|
||||
from prowler.providers.aws.lib.s3.s3 import send_to_s3_bucket
|
||||
from prowler.providers.aws.lib.security_hub.security_hub import (
|
||||
batch_send_to_security_hub,
|
||||
prepare_security_hub_findings,
|
||||
resolve_security_hub_previous_findings,
|
||||
verify_security_hub_integration_enabled_per_region,
|
||||
)
|
||||
from prowler.providers.aws.lib.s3.s3 import S3
|
||||
from prowler.providers.aws.lib.security_hub.security_hub import SecurityHub
|
||||
from prowler.providers.aws.models import AWSOutputOptions
|
||||
from prowler.providers.azure.models import AzureOutputOptions
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.common.quick_inventory import run_provider_quick_inventory
|
||||
from prowler.providers.gcp.models import GCPOutputOptions
|
||||
from prowler.providers.kubernetes.models import KubernetesOutputOptions
|
||||
|
||||
|
||||
def prowler():
|
||||
@@ -112,7 +136,7 @@ def prowler():
|
||||
|
||||
# Load checks metadata
|
||||
logger.debug("Loading checks metadata from .metadata.json files")
|
||||
bulk_checks_metadata = bulk_load_checks_metadata(provider)
|
||||
bulk_checks_metadata = CheckMetadata.get_bulk(provider)
|
||||
|
||||
if args.list_categories:
|
||||
print_categories(list_categories(bulk_checks_metadata))
|
||||
@@ -122,7 +146,7 @@ def prowler():
|
||||
# Load compliance frameworks
|
||||
logger.debug("Loading compliance frameworks from .json files")
|
||||
|
||||
bulk_compliance_frameworks = bulk_load_compliance_frameworks(provider)
|
||||
bulk_compliance_frameworks = Compliance.get_bulk(provider)
|
||||
# Complete checks metadata with the compliance framework specification
|
||||
bulk_checks_metadata = update_checks_metadata_with_compliance(
|
||||
bulk_compliance_frameworks, bulk_checks_metadata
|
||||
@@ -171,7 +195,7 @@ def prowler():
|
||||
sys.exit()
|
||||
|
||||
# Provider to scan
|
||||
Provider.set_global_provider(args)
|
||||
Provider.init_global_provider(args)
|
||||
global_provider = Provider.get_global_provider()
|
||||
|
||||
# Print Provider Credentials
|
||||
@@ -180,7 +204,17 @@ def prowler():
|
||||
|
||||
# Import custom checks from folder
|
||||
if checks_folder:
|
||||
parse_checks_from_folder(global_provider, checks_folder)
|
||||
custom_checks = parse_checks_from_folder(global_provider, checks_folder)
|
||||
# Workaround to be able to execute custom checks alongside all checks if nothing is explicitly set
|
||||
if (
|
||||
not checks_file
|
||||
and not checks
|
||||
and not services
|
||||
and not severities
|
||||
and not compliance_framework
|
||||
and not categories
|
||||
):
|
||||
checks_to_execute.update(custom_checks)
|
||||
|
||||
# Exclude checks if -e/--excluded-checks
|
||||
if excluded_checks:
|
||||
@@ -195,7 +229,8 @@ def prowler():
|
||||
# Once the provider is set and we have the eventual checks based on the resource identifier,
|
||||
# it is time to check what Prowler's checks are going to be executed
|
||||
checks_from_resources = global_provider.get_checks_to_execute_by_audit_resources()
|
||||
if checks_from_resources:
|
||||
# Intersect checks from resources with checks to execute so we only run the checks that apply to the resources with the specified ARNs or tags
|
||||
if getattr(args, "resource_arn", None) or getattr(args, "resource_tag", None):
|
||||
checks_to_execute = checks_to_execute.intersection(checks_from_resources)
|
||||
|
||||
# Sort final check list
|
||||
@@ -205,7 +240,22 @@ def prowler():
|
||||
global_provider.mutelist = args.mutelist_file
|
||||
|
||||
# Setup Output Options
|
||||
global_provider.output_options = (args, bulk_checks_metadata)
|
||||
if provider == "aws":
|
||||
output_options = AWSOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
elif provider == "azure":
|
||||
output_options = AzureOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
elif provider == "gcp":
|
||||
output_options = GCPOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
elif provider == "kubernetes":
|
||||
output_options = KubernetesOutputOptions(
|
||||
args, bulk_checks_metadata, global_provider.identity
|
||||
)
|
||||
|
||||
# Run the quick inventory for the provider if available
|
||||
if hasattr(args, "quick_inventory") and args.quick_inventory:
|
||||
@@ -221,6 +271,7 @@ def prowler():
|
||||
global_provider,
|
||||
custom_checks_metadata,
|
||||
args.config_file,
|
||||
output_options,
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
@@ -228,7 +279,7 @@ def prowler():
|
||||
)
|
||||
|
||||
# Prowler Fixer
|
||||
if global_provider.output_options.fixer:
|
||||
if output_options.fixer:
|
||||
print(f"{Style.BRIGHT}\nRunning Prowler Fixer, please wait...{Style.RESET_ALL}")
|
||||
# Check if there are any FAIL findings
|
||||
if any("FAIL" in finding.status for finding in findings):
|
||||
@@ -270,103 +321,342 @@ def prowler():
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Outputs
|
||||
# TODO: this part is needed since the checks generates a Check_Report_XXX and the output uses Finding
|
||||
# This will be refactored for the outputs generate directly the Finding
|
||||
finding_outputs = [
|
||||
Finding.generate_output(global_provider, finding, output_options)
|
||||
for finding in findings
|
||||
]
|
||||
|
||||
generated_outputs = {"regular": [], "compliance": []}
|
||||
|
||||
if args.output_formats:
|
||||
for mode in args.output_formats:
|
||||
# Close json file if exists
|
||||
if "json" in mode:
|
||||
close_json(
|
||||
global_provider.output_options.output_filename,
|
||||
global_provider.output_options.output_directory,
|
||||
mode,
|
||||
filename = (
|
||||
f"{output_options.output_directory}/"
|
||||
f"{output_options.output_filename}"
|
||||
)
|
||||
if mode == "csv":
|
||||
csv_output = CSV(
|
||||
findings=finding_outputs,
|
||||
create_file_descriptor=True,
|
||||
file_path=f"{filename}{csv_file_suffix}",
|
||||
)
|
||||
generated_outputs["regular"].append(csv_output)
|
||||
# Write CSV Finding Object to file
|
||||
csv_output.batch_write_data_to_file()
|
||||
|
||||
if mode == "json-asff":
|
||||
asff_output = ASFF(
|
||||
findings=finding_outputs,
|
||||
create_file_descriptor=True,
|
||||
file_path=f"{filename}{json_asff_file_suffix}",
|
||||
)
|
||||
generated_outputs["regular"].append(asff_output)
|
||||
# Write ASFF Finding Object to file
|
||||
asff_output.batch_write_data_to_file()
|
||||
|
||||
if mode == "json-ocsf":
|
||||
json_output = OCSF(
|
||||
findings=finding_outputs,
|
||||
create_file_descriptor=True,
|
||||
file_path=f"{filename}{json_ocsf_file_suffix}",
|
||||
)
|
||||
generated_outputs["regular"].append(json_output)
|
||||
json_output.batch_write_data_to_file()
|
||||
if mode == "html":
|
||||
html_output = HTML(
|
||||
findings=finding_outputs,
|
||||
create_file_descriptor=True,
|
||||
file_path=f"{filename}{html_file_suffix}",
|
||||
)
|
||||
generated_outputs["regular"].append(html_output)
|
||||
html_output.batch_write_data_to_file(
|
||||
provider=global_provider, stats=stats
|
||||
)
|
||||
|
||||
if "html" in mode:
|
||||
add_html_footer(
|
||||
global_provider.output_options.output_filename,
|
||||
global_provider.output_options.output_directory,
|
||||
# Compliance Frameworks
|
||||
input_compliance_frameworks = set(output_options.output_modes).intersection(
|
||||
get_available_compliance_frameworks(provider)
|
||||
)
|
||||
if provider == "aws":
|
||||
for compliance_name in input_compliance_frameworks:
|
||||
if compliance_name.startswith("cis_"):
|
||||
# Generate CIS Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
fill_html_overview_statistics(
|
||||
stats,
|
||||
global_provider.output_options.output_filename,
|
||||
global_provider.output_options.output_directory,
|
||||
cis = AWSCIS(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(cis)
|
||||
cis.batch_write_data_to_file()
|
||||
elif compliance_name == "mitre_attack_aws":
|
||||
# Generate MITRE ATT&CK Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
mitre_attack = AWSMitreAttack(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(mitre_attack)
|
||||
mitre_attack.batch_write_data_to_file()
|
||||
elif compliance_name.startswith("ens_"):
|
||||
# Generate ENS Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
ens = AWSENS(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(ens)
|
||||
ens.batch_write_data_to_file()
|
||||
elif compliance_name.startswith("aws_well_architected_framework"):
|
||||
# Generate AWS Well-Architected Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
aws_well_architected = AWSWellArchitected(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(aws_well_architected)
|
||||
aws_well_architected.batch_write_data_to_file()
|
||||
elif compliance_name.startswith("iso27001_"):
|
||||
# Generate ISO27001 Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
iso27001 = AWSISO27001(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(iso27001)
|
||||
iso27001.batch_write_data_to_file()
|
||||
elif compliance_name.startswith("kisa"):
|
||||
# Generate KISA-ISMS-P Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
kisa_ismsp = AWSKISAISMSP(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(kisa_ismsp)
|
||||
kisa_ismsp.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
generic_compliance = GenericCompliance(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(generic_compliance)
|
||||
generic_compliance.batch_write_data_to_file()
|
||||
|
||||
# Send output to S3 if needed (-B / -D)
|
||||
if provider == "aws" and (
|
||||
args.output_bucket or args.output_bucket_no_assume
|
||||
):
|
||||
output_bucket = args.output_bucket
|
||||
bucket_session = global_provider.session.current_session
|
||||
# Check if -D was input
|
||||
if args.output_bucket_no_assume:
|
||||
output_bucket = args.output_bucket_no_assume
|
||||
bucket_session = global_provider.session.original_session
|
||||
send_to_s3_bucket(
|
||||
global_provider.output_options.output_filename,
|
||||
args.output_directory,
|
||||
mode,
|
||||
output_bucket,
|
||||
bucket_session,
|
||||
elif provider == "azure":
|
||||
for compliance_name in input_compliance_frameworks:
|
||||
if compliance_name.startswith("cis_"):
|
||||
# Generate CIS Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
cis = AzureCIS(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(cis)
|
||||
cis.batch_write_data_to_file()
|
||||
elif compliance_name == "mitre_attack_azure":
|
||||
# Generate MITRE ATT&CK Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
mitre_attack = AzureMitreAttack(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(mitre_attack)
|
||||
mitre_attack.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
generic_compliance = GenericCompliance(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(generic_compliance)
|
||||
generic_compliance.batch_write_data_to_file()
|
||||
|
||||
elif provider == "gcp":
|
||||
for compliance_name in input_compliance_frameworks:
|
||||
if compliance_name.startswith("cis_"):
|
||||
# Generate CIS Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
cis = GCPCIS(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(cis)
|
||||
cis.batch_write_data_to_file()
|
||||
elif compliance_name == "mitre_attack_gcp":
|
||||
# Generate MITRE ATT&CK Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
mitre_attack = GCPMitreAttack(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(mitre_attack)
|
||||
mitre_attack.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
generic_compliance = GenericCompliance(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(generic_compliance)
|
||||
generic_compliance.batch_write_data_to_file()
|
||||
|
||||
elif provider == "kubernetes":
|
||||
for compliance_name in input_compliance_frameworks:
|
||||
if compliance_name.startswith("cis_"):
|
||||
# Generate CIS Finding Object
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
cis = KubernetesCIS(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(cis)
|
||||
cis.batch_write_data_to_file()
|
||||
else:
|
||||
filename = (
|
||||
f"{output_options.output_directory}/compliance/"
|
||||
f"{output_options.output_filename}_{compliance_name}.csv"
|
||||
)
|
||||
generic_compliance = GenericCompliance(
|
||||
findings=finding_outputs,
|
||||
compliance=bulk_compliance_frameworks[compliance_name],
|
||||
create_file_descriptor=True,
|
||||
file_path=filename,
|
||||
)
|
||||
generated_outputs["compliance"].append(generic_compliance)
|
||||
generic_compliance.batch_write_data_to_file()
|
||||
|
||||
# AWS Security Hub Integration
|
||||
if provider == "aws" and args.security_hub:
|
||||
print(
|
||||
f"{Style.BRIGHT}\nSending findings to AWS Security Hub, please wait...{Style.RESET_ALL}"
|
||||
)
|
||||
# Verify where AWS Security Hub is enabled
|
||||
aws_security_enabled_regions = []
|
||||
security_hub_regions = (
|
||||
global_provider.get_available_aws_service_regions("securityhub")
|
||||
if not global_provider.identity.audited_regions
|
||||
else global_provider.identity.audited_regions
|
||||
)
|
||||
for region in security_hub_regions:
|
||||
# Save the regions where AWS Security Hub is enabled
|
||||
if verify_security_hub_integration_enabled_per_region(
|
||||
global_provider.identity.partition,
|
||||
region,
|
||||
global_provider.session.current_session,
|
||||
global_provider.identity.account,
|
||||
):
|
||||
aws_security_enabled_regions.append(region)
|
||||
|
||||
# Prepare the findings to be sent to Security Hub
|
||||
security_hub_findings_per_region = prepare_security_hub_findings(
|
||||
findings,
|
||||
global_provider,
|
||||
global_provider.output_options,
|
||||
aws_security_enabled_regions,
|
||||
)
|
||||
|
||||
# Send the findings to Security Hub
|
||||
findings_sent_to_security_hub = batch_send_to_security_hub(
|
||||
security_hub_findings_per_region, global_provider.session.current_session
|
||||
)
|
||||
|
||||
print(
|
||||
f"{Style.BRIGHT}{Fore.GREEN}\n{findings_sent_to_security_hub} findings sent to AWS Security Hub!{Style.RESET_ALL}"
|
||||
)
|
||||
|
||||
# Resolve previous fails of Security Hub
|
||||
if not args.skip_sh_update:
|
||||
if provider == "aws":
|
||||
# Send output to S3 if needed (-B / -D) for all the output formats
|
||||
if args.output_bucket or args.output_bucket_no_assume:
|
||||
output_bucket = args.output_bucket
|
||||
bucket_session = global_provider.session.current_session
|
||||
# Check if -D was input
|
||||
if args.output_bucket_no_assume:
|
||||
output_bucket = args.output_bucket_no_assume
|
||||
bucket_session = global_provider.session.original_session
|
||||
s3 = S3(
|
||||
session=bucket_session,
|
||||
bucket_name=output_bucket,
|
||||
output_directory=args.output_directory,
|
||||
)
|
||||
s3.send_to_bucket(generated_outputs)
|
||||
if args.security_hub:
|
||||
print(
|
||||
f"{Style.BRIGHT}\nArchiving previous findings in AWS Security Hub, please wait...{Style.RESET_ALL}"
|
||||
f"{Style.BRIGHT}\nSending findings to AWS Security Hub, please wait...{Style.RESET_ALL}"
|
||||
)
|
||||
findings_archived_in_security_hub = resolve_security_hub_previous_findings(
|
||||
security_hub_findings_per_region,
|
||||
global_provider,
|
||||
|
||||
security_hub_regions = (
|
||||
global_provider.get_available_aws_service_regions("securityhub")
|
||||
if not global_provider.identity.audited_regions
|
||||
else global_provider.identity.audited_regions
|
||||
)
|
||||
|
||||
security_hub = SecurityHub(
|
||||
aws_account_id=global_provider.identity.account,
|
||||
aws_partition=global_provider.identity.partition,
|
||||
aws_session=global_provider.session.current_session,
|
||||
findings=asff_output.data,
|
||||
send_only_fails=output_options.send_sh_only_fails,
|
||||
aws_security_hub_available_regions=security_hub_regions,
|
||||
)
|
||||
# Send the findings to Security Hub
|
||||
findings_sent_to_security_hub = security_hub.batch_send_to_security_hub()
|
||||
print(
|
||||
f"{Style.BRIGHT}{Fore.GREEN}\n{findings_archived_in_security_hub} findings archived in AWS Security Hub!{Style.RESET_ALL}"
|
||||
f"{Style.BRIGHT}{Fore.GREEN}\n{findings_sent_to_security_hub} findings sent to AWS Security Hub!{Style.RESET_ALL}"
|
||||
)
|
||||
|
||||
# Resolve previous fails of Security Hub
|
||||
if not args.skip_sh_update:
|
||||
print(
|
||||
f"{Style.BRIGHT}\nArchiving previous findings in AWS Security Hub, please wait...{Style.RESET_ALL}"
|
||||
)
|
||||
findings_archived_in_security_hub = (
|
||||
security_hub.archive_previous_findings()
|
||||
)
|
||||
print(
|
||||
f"{Style.BRIGHT}{Fore.GREEN}\n{findings_archived_in_security_hub} findings archived in AWS Security Hub!{Style.RESET_ALL}"
|
||||
)
|
||||
|
||||
# Display summary table
|
||||
if not args.only_logs:
|
||||
display_summary_table(
|
||||
findings,
|
||||
global_provider,
|
||||
global_provider.output_options,
|
||||
output_options,
|
||||
)
|
||||
# Only display compliance table if there are findings (not all MANUAL) and it is a default execution
|
||||
if (
|
||||
@@ -385,13 +675,13 @@ def prowler():
|
||||
findings,
|
||||
bulk_checks_metadata,
|
||||
compliance,
|
||||
global_provider.output_options.output_filename,
|
||||
global_provider.output_options.output_directory,
|
||||
output_options.output_filename,
|
||||
output_options.output_directory,
|
||||
compliance_overview,
|
||||
)
|
||||
if compliance_overview:
|
||||
print(
|
||||
f"\nDetailed compliance results are in {Fore.YELLOW}{global_provider.output_options.output_directory}/compliance/{Style.RESET_ALL}\n"
|
||||
f"\nDetailed compliance results are in {Fore.YELLOW}{output_options.output_directory}/compliance/{Style.RESET_ALL}\n"
|
||||
)
|
||||
|
||||
# If custom checks were passed, remove the modules
|
||||
|
||||
4335
prowler/compliance/aws/kisa_isms_p_2023_aws.json
Normal file
4335
prowler/compliance/aws/kisa_isms_p_2023_korean_aws.json
Normal file
@@ -3044,7 +3044,7 @@
|
||||
"Id": "9.4",
|
||||
"Description": "Ensure that Register with Entra ID is enabled on App Service",
|
||||
"Checks": [
|
||||
"app_client_certificates_on"
|
||||
""
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -3066,7 +3066,7 @@
|
||||
"Id": "9.5",
|
||||
"Description": "Ensure That 'PHP version' is the Latest, If Used to Run the Web App",
|
||||
"Checks": [
|
||||
"app_register_with_identity"
|
||||
"app_ensure_php_version_is_latest"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -3088,7 +3088,7 @@
|
||||
"Id": "9.6",
|
||||
"Description": "Ensure that 'Python version' is the Latest Stable Version, if Used to Run the Web App",
|
||||
"Checks": [
|
||||
"app_ensure_php_version_is_latest"
|
||||
"app_ensure_python_version_is_latest"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -3110,7 +3110,7 @@
|
||||
"Id": "9.7",
|
||||
"Description": "Ensure that 'Java version' is the latest, if used to run the Web App",
|
||||
"Checks": [
|
||||
"app_ensure_python_version_is_latest"
|
||||
"app_ensure_java_version_is_latest"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -3132,7 +3132,7 @@
|
||||
"Id": "9.8",
|
||||
"Description": "Ensure that 'HTTP Version' is the Latest, if Used to Run the Web App",
|
||||
"Checks": [
|
||||
"app_ensure_java_version_is_latest"
|
||||
"app_ensure_using_http20"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -3154,7 +3154,7 @@
|
||||
"Id": "9.9",
|
||||
"Description": "Ensure FTP deployments are Disabled",
|
||||
"Checks": [
|
||||
"app_ensure_using_http20"
|
||||
"app_ftp_deployment_disabled"
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -3176,7 +3176,7 @@
|
||||
"Id": "9.10",
|
||||
"Description": "Ensure Azure Key Vaults are Used to Store Secrets",
|
||||
"Checks": [
|
||||
"app_ftp_deployment_disabled"
|
||||
""
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
@@ -3213,66 +3213,6 @@
|
||||
"References": "https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-lock-resources:https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-subscription-governance#azure-resource-locks:https://docs.microsoft.com/en-us/azure/governance/blueprints/concepts/resource-locking:https://learn.microsoft.com/en-us/security/benchmark/azure/mcsb-asset-management#am-4-limit-access-to-asset-management"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "9.10",
|
||||
"Description": "Ensure FTP deployments are Disabled",
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "9. AppService",
|
||||
"Profile": "Level 1",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "By default, Azure Functions, Web, and API Services can be deployed over FTP. If FTP is required for an essential deployment workflow, FTPS should be required for FTP login for all App Service Apps and Functions.",
|
||||
"RationaleStatement": "Azure FTP deployment endpoints are public. An attacker listening to traffic on a wifi network used by a remote employee or a corporate network could see login traffic in clear-text which would then grant them full control of the code base of the app or service. This finding is more severe if User Credentials for deployment are set at the subscription level rather than using the default Application Credentials which are unique per App.",
|
||||
"ImpactStatement": "Any deployment workflows that rely on FTP or FTPs rather than the WebDeploy or HTTPs endpoints may be affected.",
|
||||
"RemediationProcedure": "**From Azure Portal** 1. Go to the Azure Portal 2. Select `App Services` 3. Click on an app 4. Select `Settings` and then `Configuration` 5. Under `General Settings`, for the `Platform Settings`, the `FTP state` should be set to `Disabled` or `FTPS Only` **From Azure CLI** For each out of compliance application, run the following choosing either 'disabled' or 'FtpsOnly' as appropriate: ``` az webapp config set --resource-group <resource group name> --name <app name> --ftps-state [disabled|FtpsOnly] ``` **From PowerShell** For each out of compliance application, run the following: ``` Set-AzWebApp -ResourceGroupName <resource group name> -Name <app name> -FtpsState <Disabled or FtpsOnly> ```",
|
||||
"AuditProcedure": "**From Azure Portal** 1. Go to the Azure Portal 2. Select `App Services` 3. Click on an app 4. Select `Settings` and then `Configuration` 5. Under `General Settings`, for the `Platform Settings`, the `FTP state` should not be set to `All allowed` **From Azure CLI** List webapps to obtain the ids. ``` az webapp list ``` List the publish profiles to obtain the username, password and ftp server url. ``` az webapp deployment list-publishing-profiles --ids <ids> { publishUrl: <URL_FOR_WEB_APP>, userName: <USER_NAME>, userPWD: <USER_PASSWORD>, } ``` **From PowerShell** List all Web Apps: ``` Get-AzWebApp ``` For each app: ``` Get-AzWebApp -ResourceGroupName <resource group name> -Name <app name> | Select-Object -ExpandProperty SiteConfig ``` In the output, look for the value of **FtpsState**. If its value is **AllAllowed** the setting is out of compliance. Any other value is considered in compliance with this check.",
|
||||
"AdditionalInformation": "",
|
||||
"DefaultValue": "[Azure Web Service Deploy via FTP](https://docs.microsoft.com/en-us/azure/app-service/deploy-ftp):[Azure Web Service Deployment](https://docs.microsoft.com/en-us/azure/app-service/overview-security):https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-data-protection#dp-4-encrypt-sensitive-information-in-transit:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-posture-vulnerability-management#pv-7-rapidly-and-automatically-remediate-software-vulnerabilities",
|
||||
"References": "TA0008, T1570, M1031"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "9.11",
|
||||
"Description": "Ensure Azure Key Vaults are Used to Store Secrets",
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "9. AppService",
|
||||
"Profile": "Level 2",
|
||||
"AssessmentStatus": "Manual",
|
||||
"Description": "Azure Key Vault will store multiple types of sensitive information such as encryption keys, certificate thumbprints, and Managed Identity Credentials. Access to these 'Secrets' can be controlled through granular permissions.",
|
||||
"RationaleStatement": "The credentials given to an application have permissions to create, delete, or modify data stored within the systems they access. If these credentials are stored within the application itself, anyone with access to the application or a copy of the code has access to them. Storing within Azure Key Vault as secrets increases security by controlling access. This also allows for updates of the credentials without redeploying the entire application.",
|
||||
"ImpactStatement": "Integrating references to secrets within the key vault are required to be specifically integrated within the application code. This will require additional configuration to be made during the writing of an application, or refactoring of an already written one. There are also additional costs that are charged per 10000 requests to the Key Vault.",
|
||||
"RemediationProcedure": "Remediation has 2 steps 1. Setup the Key Vault 2. Setup the App Service to use the Key Vault **Step 1: Set up the Key Vault** **From Azure CLI** ``` az keyvault create --name <name> --resource-group <myResourceGroup> --location myLocation ``` **From Powershell** ``` New-AzKeyvault -name <name> -ResourceGroupName <myResourceGroup> -Location <myLocation> ``` **Step 2: Set up the App Service to use the Key Vault** Sample JSON Template for App Service Configuration: ``` { //... resources: [ { type: Microsoft.Storage/storageAccounts, name: [variables('storageAccountName')], //... }, { type: Microsoft.Insights/components, name: [variables('appInsightsName')], //... }, { type: Microsoft.Web/sites, name: [variables('functionAppName')], identity: { type: SystemAssigned }, //... resources: [ { type: config, name: appsettings, //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))], [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.KeyVault/vaults/secrets', variables('keyVaultName'), variables('storageConnectionStringName'))], [resourceId('Microsoft.KeyVault/vaults/secrets', variables('keyVaultName'), variables('appInsightsKeyName'))] ], properties: { AzureWebJobsStorage: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('storageConnectionStringResourceId')).secretUriWithVersion, ')')], WEBSITE_CONTENTAZUREFILECONNECTIONSTRING: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('storageConnectionStringResourceId')).secretUriWithVersion, ')')], APPINSIGHTS_INSTRUMENTATIONKEY: [concat('@Microsoft.KeyVault(SecretUri=', reference(variables('appInsightsKeyResourceId')).secretUriWithVersion, ')')], WEBSITE_ENABLE_SYNC_UPDATE_SITE: true //... } }, { type: sourcecontrols, name: web, //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))], [resourceId('Microsoft.Web/sites/config', variables('functionAppName'), 'appsettings')] ], } ] }, { type: Microsoft.KeyVault/vaults, name: [variables('keyVaultName')], //... dependsOn: [ [resourceId('Microsoft.Web/sites', variables('functionAppName'))] ], properties: { //... accessPolicies: [ { tenantId: [reference(concat('Microsoft.Web/sites/', variables('functionAppName'), '/providers/Microsoft.ManagedIdentity/Identities/default'), '2015-08-31-PREVIEW').tenantId], objectId: [reference(concat('Microsoft.Web/sites/', variables('functionAppName'), '/providers/Microsoft.ManagedIdentity/Identities/default'), '2015-08-31-PREVIEW').principalId], permissions: { secrets: [ get ] } } ] }, resources: [ { type: secrets, name: [variables('storageConnectionStringName')], //... dependsOn: [ [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.Storage/storageAccounts', variables('storageAccountName'))] ], properties: { value: [concat('DefaultEndpointsProtocol=https;AccountName=', variables('storageAccountName'), ';AccountKey=', listKeys(variables('storageAccountResourceId'),'2015-05-01-preview').key1)] } }, { type: secrets, name: [variables('appInsightsKeyName')], //... dependsOn: [ [resourceId('Microsoft.KeyVault/vaults/', variables('keyVaultName'))], [resourceId('Microsoft.Insights/components', variables('appInsightsName'))] ], properties: { value: [reference(resourceId('microsoft.insights/components/', variables('appInsightsName')), '2015-05-01').InstrumentationKey] } } ] } ] } ```",
|
||||
"AuditProcedure": "**From Azure Portal** 1. Login to Azure Portal 2. In the expandable menu on the left go to `Key Vaults` 3. View the Key Vaults listed. **From Azure CLI** To list key vaults within a subscription run the following command: ``` Get-AzKeyVault ``` To list the secrets within these key vaults run the following command: ``` Get-AzKeyVaultSecret [-VaultName] <vault name> ``` **From Powershell** To list key vaults within a subscription run the following command: ``` Get-AzKeyVault ``` To list all secrets in a key vault run the following command: ``` Get-AzKeyVaultSecret -VaultName '<vaultName' ```",
|
||||
"AdditionalInformation": "",
|
||||
"DefaultValue": "https://docs.microsoft.com/en-us/azure/app-service/app-service-key-vault-references:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-identity-management#im-2-manage-application-identities-securely-and-automatically:https://docs.microsoft.com/en-us/cli/azure/keyvault?view=azure-cli-latest:https://docs.microsoft.com/en-us/cli/azure/keyvault?view=azure-cli-latest",
|
||||
"References": "TA0006, T1552, M1041"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"Id": "10.1",
|
||||
"Description": "Ensure that Resource Locks are set for Mission-Critical Azure Resources",
|
||||
"Checks": [],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "10. Miscellaneous",
|
||||
"Profile": "Level 2",
|
||||
"AssessmentStatus": "Manual",
|
||||
"Description": "Resource Manager Locks provide a way for administrators to lock down Azure resources to prevent deletion of, or modifications to, a resource. These locks sit outside of the Role Based Access Controls (RBAC) hierarchy and, when applied, will place restrictions on the resource for all users. These locks are very useful when there is an important resource in a subscription that users should not be able to delete or change. Locks can help prevent accidental and malicious changes or deletion.",
|
||||
"RationaleStatement": "As an administrator, it may be necessary to lock a subscription, resource group, or resource to prevent other users in the organization from accidentally deleting or modifying critical resources. The lock level can be set to to `CanNotDelete` or `ReadOnly` to achieve this purpose. - `CanNotDelete` means authorized users can still read and modify a resource, but they cannot delete the resource. - `ReadOnly` means authorized users can read a resource, but they cannot delete or update the resource. Applying this lock is similar to restricting all authorized users to the permissions granted by the Reader role.",
|
||||
"ImpactStatement": "There can be unintended outcomes of locking a resource. Applying a lock to a parent service will cause it to be inherited by all resources within. Conversely, applying a lock to a resource may not apply to connected storage, leaving it unlocked. Please see the documentation for further information.",
|
||||
"RemediationProcedure": "**From Azure Portal** 1. Navigate to the specific Azure Resource or Resource Group 2. For each mission critical resource, click on `Locks` 3. Click `Add` 4. Give the lock a name and a description, then select the type, `Read-only` or `Delete` as appropriate 5. Click OK **From Azure CLI** To lock a resource, provide the name of the resource, its resource type, and its resource group name. ``` az lock create --name <LockName> --lock-type <CanNotDelete/Read-only> --resource-group <resourceGroupName> --resource-name <resourceName> --resource-type <resourceType> ``` **From Powershell** ``` Get-AzResourceLock -ResourceName <Resource Name> -ResourceType <Resource Type> -ResourceGroupName <Resource Group Name> -Locktype <CanNotDelete/Read-only> ```",
|
||||
"AuditProcedure": "**From Azure Portal** 1. Navigate to the specific Azure Resource or Resource Group 2. Click on `Locks` 3. Ensure the lock is defined with name and description, with type `Read-only` or `Delete` as appropriate. **From Azure CLI** Review the list of all locks set currently: ``` az lock list --resource-group <resourcegroupname> --resource-name <resourcename> --namespace <Namespace> --resource-type <type> --parent ``` **From Powershell** Run the following command to list all resources. ``` Get-AzResource ``` For each resource, run the following command to check for Resource Locks. ``` Get-AzResourceLock -ResourceName <Resource Name> -ResourceType <Resource Type> -ResourceGroupName <Resource Group Name> ``` Review the output of the `Properties` setting. Compliant settings will have the `CanNotDelete` or `ReadOnly` value.",
|
||||
"AdditionalInformation": "",
|
||||
"DefaultValue": "https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-lock-resources:https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-subscription-governance#azure-resource-locks:https://docs.microsoft.com/en-us/azure/governance/blueprints/concepts/resource-locking:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-asset-management#am-4-limit-access-to-asset-management",
|
||||
"References": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1288,7 +1288,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2 etcd",
|
||||
"Section": "2 Etcd",
|
||||
"Profile": "Level 1 - Master Node",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "Configure TLS encryption for the etcd service.",
|
||||
@@ -1310,7 +1310,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2 etcd",
|
||||
"Section": "2 Etcd",
|
||||
"Profile": "Level 1 - Master Node",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "Enable client authentication on etcd service.",
|
||||
@@ -1332,7 +1332,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2 etcd",
|
||||
"Section": "2 Etcd",
|
||||
"Profile": "Level 1 - Master Node",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "Do not use self-signed certificates for TLS.",
|
||||
@@ -1354,7 +1354,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2 etcd",
|
||||
"Section": "2 Etcd",
|
||||
"Profile": "Level 1 - Master Node",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "etcd should be configured to make use of TLS encryption for peer connections.",
|
||||
@@ -1376,7 +1376,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2 etcd",
|
||||
"Section": "2 Etcd",
|
||||
"Profile": "Level 1 - Master Node",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "etcd should be configured for peer authentication.",
|
||||
@@ -1398,7 +1398,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2 etcd",
|
||||
"Section": "2 Etcd",
|
||||
"Profile": "Level 1 - Master Node",
|
||||
"AssessmentStatus": "Automated",
|
||||
"Description": "Do not use automatically generated self-signed certificates for TLS connections between peers.",
|
||||
@@ -1420,7 +1420,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "2 etcd",
|
||||
"Section": "2 Etcd",
|
||||
"Profile": "Level 2 - Master Node",
|
||||
"AssessmentStatus": "Manual",
|
||||
"Description": "Use a different certificate authority for etcd from the one used for Kubernetes.",
|
||||
@@ -2634,7 +2634,7 @@
|
||||
],
|
||||
"Attributes": [
|
||||
{
|
||||
"Section": "5.4",
|
||||
"Section": "5.4 Secrets Management",
|
||||
"Profile": "Level 2 - Master Node",
|
||||
"AssessmentStatus": "Manual",
|
||||
"Description": "Kubernetes supports mounting secrets as data volumes or as environment variables. Minimize the use of environment variable secrets.",
|
||||
|
||||
@@ -19,8 +19,11 @@ Mutelist:
|
||||
- "StackSet-AWSControlTowerSecurityResources-*"
|
||||
- "StackSet-AWSControlTowerLoggingResources-*"
|
||||
- "StackSet-AWSControlTowerExecutionRole-*"
|
||||
- "AWSControlTowerBP-BASELINE-CLOUDTRAIL-MASTER"
|
||||
- "AWSControlTowerBP-BASELINE-CONFIG-MASTER"
|
||||
- "AWSControlTowerBP-BASELINE-CLOUDTRAIL-MASTER*"
|
||||
- "AWSControlTowerBP-BASELINE-CONFIG-MASTER*"
|
||||
- "StackSet-AWSControlTower*"
|
||||
- "CLOUDTRAIL-ENABLED-ON-SHARED-ACCOUNTS-*"
|
||||
- "AFT-Backend*"
|
||||
"cloudtrail_*":
|
||||
Regions:
|
||||
- "*"
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from os import getcwd
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
from packaging import version
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
timestamp = datetime.today()
|
||||
timestamp_utc = datetime.now(timezone.utc).replace(tzinfo=timezone.utc)
|
||||
prowler_version = "4.2.2"
|
||||
prowler_version = "4.4.2"
|
||||
html_logo_url = "https://github.com/prowler-cloud/prowler/"
|
||||
square_logo_img = "https://prowler.com/wp-content/uploads/logo-html.png"
|
||||
aws_logo = "https://user-images.githubusercontent.com/38561120/235953920-3e3fba08-0795-41dc-b480-9bea57db9f2e.png"
|
||||
@@ -65,6 +65,8 @@ default_config_file_path = (
|
||||
default_fixer_config_file_path = (
|
||||
f"{pathlib.Path(os.path.dirname(os.path.realpath(__file__)))}/fixer_config.yaml"
|
||||
)
|
||||
encoding_format_utf_8 = "utf-8"
|
||||
available_output_formats = ["csv", "json-asff", "json-ocsf", "html"]
|
||||
|
||||
|
||||
def get_default_mute_file_path(provider: str):
|
||||
@@ -85,7 +87,7 @@ def check_current_version():
|
||||
"https://api.github.com/repos/prowler-cloud/prowler/tags", timeout=1
|
||||
)
|
||||
latest_version = release_response.json()[0]["name"]
|
||||
if latest_version != prowler_version:
|
||||
if version.parse(latest_version) > version.parse(prowler_version):
|
||||
return f"{prowler_version_string} (latest is {latest_version}, upgrade for the latest features)"
|
||||
else:
|
||||
return (
|
||||
@@ -99,52 +101,84 @@ def check_current_version():
|
||||
|
||||
def load_and_validate_config_file(provider: str, config_file_path: str) -> dict:
|
||||
"""
|
||||
load_and_validate_config_file reads the Prowler config file in YAML format from the default location or the file passed with the --config-file flag
|
||||
Reads the Prowler config file in YAML format from the default location or the file passed with the --config-file flag.
|
||||
|
||||
Args:
|
||||
provider (str): The provider name (e.g., 'aws', 'gcp', 'azure', 'kubernetes').
|
||||
config_file_path (str): The path to the configuration file.
|
||||
|
||||
Returns:
|
||||
dict: The configuration dictionary for the specified provider.
|
||||
"""
|
||||
try:
|
||||
with open(config_file_path) as f:
|
||||
config = {}
|
||||
with open(config_file_path, "r", encoding=encoding_format_utf_8) as f:
|
||||
config_file = yaml.safe_load(f)
|
||||
|
||||
# Not to introduce a breaking change we have to allow the old format config file without any provider keys
|
||||
# and a new format with a key for each provider to include their configuration values within
|
||||
# Check if the new format is passed
|
||||
if (
|
||||
"aws" in config_file
|
||||
or "gcp" in config_file
|
||||
or "azure" in config_file
|
||||
or "kubernetes" in config_file
|
||||
):
|
||||
# Not to introduce a breaking change, allow the old format config file without any provider keys
|
||||
# and a new format with a key for each provider to include their configuration values within.
|
||||
if any(key in config_file for key in ["aws", "gcp", "azure", "kubernetes"]):
|
||||
config = config_file.get(provider, {})
|
||||
else:
|
||||
config = config_file if config_file else {}
|
||||
# Not to break Azure, K8s and GCP does not support neither use the old config format
|
||||
# Not to break Azure, K8s and GCP does not support or use the old config format
|
||||
if provider in ["azure", "gcp", "kubernetes"]:
|
||||
config = {}
|
||||
|
||||
return config
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
except FileNotFoundError as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
except yaml.YAMLError as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
except UnicodeDecodeError as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
def load_and_validate_fixer_config_file(
|
||||
provider: str, fixer_config_file_path: str
|
||||
) -> dict:
|
||||
"""
|
||||
load_and_validate_fixer_config_file reads the Prowler fixer config file in YAML format from the default location or the file passed with the --fixer-config flag
|
||||
Reads the Prowler fixer config file in YAML format from the default location or the file passed with the --fixer-config flag.
|
||||
|
||||
Args:
|
||||
provider (str): The provider name (e.g., 'aws', 'gcp', 'azure', 'kubernetes').
|
||||
fixer_config_file_path (str): The path to the fixer configuration file.
|
||||
|
||||
Returns:
|
||||
dict: The fixer configuration dictionary for the specified provider.
|
||||
"""
|
||||
try:
|
||||
with open(fixer_config_file_path) as f:
|
||||
with open(fixer_config_file_path, "r", encoding=encoding_format_utf_8) as f:
|
||||
fixer_config_file = yaml.safe_load(f)
|
||||
|
||||
return fixer_config_file.get(provider, {})
|
||||
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
except FileNotFoundError as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
sys.exit(1)
|
||||
except yaml.YAMLError as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
except UnicodeDecodeError as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
@@ -41,8 +41,24 @@ aws:
|
||||
[
|
||||
"amazon-elb"
|
||||
]
|
||||
# aws.ec2_securitygroup_allow_ingress_from_internet_to_high_risk_tcp_ports
|
||||
ec2_sg_high_risk_ports:
|
||||
[
|
||||
25,
|
||||
110,
|
||||
135,
|
||||
143,
|
||||
445,
|
||||
3000,
|
||||
4333,
|
||||
5000,
|
||||
5500,
|
||||
8080,
|
||||
8088,
|
||||
]
|
||||
|
||||
# AWS VPC Configuration (vpc_endpoint_connections_trust_boundaries, vpc_endpoint_services_allowed_principals_trust_boundaries)
|
||||
# AWS SSM Configuration (aws.ssm_documents_set_as_public)
|
||||
# Single account environment: No action required. The AWS account number will be automatically added by the checks.
|
||||
# Multi account environment: Any additional trusted account number should be added as a space separated list, e.g.
|
||||
# trusted_account_ids : ["123456789012", "098765432109", "678901234567"]
|
||||
@@ -86,6 +102,8 @@ aws:
|
||||
"ruby2.5",
|
||||
"ruby2.7",
|
||||
]
|
||||
# aws.awslambda_function_vpc_is_in_multi_azs
|
||||
lambda_min_azs: 2
|
||||
|
||||
# AWS Organizations
|
||||
# aws.organizations_scp_check_deny_regions
|
||||
@@ -262,10 +280,60 @@ aws:
|
||||
"LookupEvents",
|
||||
"Search",
|
||||
]
|
||||
|
||||
# AWS RDS Configuration
|
||||
# aws.rds_instance_backup_enabled
|
||||
# Whether to check RDS instance replicas or not
|
||||
check_rds_instance_replicas: False
|
||||
|
||||
# AWS ACM Configuration
|
||||
# aws.acm_certificates_expiration_check
|
||||
days_to_expire_threshold: 7
|
||||
# aws.acm_certificates_rsa_key_length
|
||||
insecure_key_algorithms:
|
||||
[
|
||||
"RSA-1024",
|
||||
]
|
||||
|
||||
# AWS EKS Configuration
|
||||
# aws.eks_control_plane_logging_all_types_enabled
|
||||
# EKS control plane logging types that must be enabled
|
||||
eks_required_log_types:
|
||||
[
|
||||
"api",
|
||||
"audit",
|
||||
"authenticator",
|
||||
"controllerManager",
|
||||
"scheduler",
|
||||
]
|
||||
|
||||
# aws.eks_cluster_uses_a_supported_version
|
||||
# EKS clusters must be version 1.28 or higher
|
||||
eks_cluster_oldest_version_supported: "1.28"
|
||||
|
||||
# AWS CodeBuild Configuration
|
||||
# aws.codebuild_project_no_secrets_in_variables
|
||||
# CodeBuild sensitive variables that are excluded from the check
|
||||
excluded_sensitive_environment_variables:
|
||||
[
|
||||
|
||||
]
|
||||
|
||||
# AWS ELB Configuration
|
||||
# aws.elb_is_in_multiple_az
|
||||
# Minimum number of Availability Zones that an CLB must be in
|
||||
elb_min_azs: 2
|
||||
|
||||
# AWS ELBv2 Configuration
|
||||
# aws.elbv2_is_in_multiple_az
|
||||
# Minimum number of Availability Zones that an ELBv2 must be in
|
||||
elbv2_min_azs: 2
|
||||
|
||||
|
||||
# AWS Secrets Configuration
|
||||
# Patterns to ignore in the secrets checks
|
||||
secrets_ignore_patterns: []
|
||||
|
||||
# Azure Configuration
|
||||
azure:
|
||||
# Azure Network Configuration
|
||||
|
||||
53
prowler/exceptions/exceptions.py
Normal file
@@ -0,0 +1,53 @@
|
||||
class ProwlerException(Exception):
|
||||
"""Base exception for all Prowler SDK errors."""
|
||||
|
||||
ERROR_CODES = {
|
||||
(1901, "UnexpectedError"): {
|
||||
"message": "Unexpected error occurred.",
|
||||
"remediation": "Please review the error message and try again.",
|
||||
}
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self, code, provider=None, file=None, original_exception=None, error_info=None
|
||||
):
|
||||
"""
|
||||
Initialize the ProwlerException class.
|
||||
|
||||
Args:
|
||||
code (int): The error code.
|
||||
provider (str): The provider name.
|
||||
file (str): The file name.
|
||||
original_exception (Exception): The original exception.
|
||||
error_info (dict): The error information.
|
||||
|
||||
Example:
|
||||
A ProwlerException is raised with the following parameters and format:
|
||||
>>> original_exception = Exception("Error occurred.")
|
||||
ProwlerException(1901, "AWS", "file.txt", original_exception)
|
||||
>>> [1901] Unexpected error occurred. - Exception: Error occurred.
|
||||
"""
|
||||
self.code = code
|
||||
self.provider = provider
|
||||
self.file = file
|
||||
if error_info is None:
|
||||
error_info = self.ERROR_CODES.get((code, self.__class__.__name__))
|
||||
self.message = error_info.get("message")
|
||||
self.remediation = error_info.get("remediation")
|
||||
self.original_exception = original_exception
|
||||
# Format -> [code] message - original_exception
|
||||
if original_exception is None:
|
||||
super().__init__(f"[{self.code}] {self.message}")
|
||||
else:
|
||||
super().__init__(
|
||||
f"[{self.code}] {self.message} - {self.original_exception}"
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
"""Overriding the __str__ method"""
|
||||
return f"{self.__class__.__name__}[{self.code}]: {self.message} - {self.original_exception}"
|
||||
|
||||
|
||||
class UnexpectedError(ProwlerException):
|
||||
def __init__(self, provider, file, original_exception=None):
|
||||
super().__init__(1901, provider, file, original_exception)
|
||||
@@ -6,7 +6,6 @@ import re
|
||||
import shutil
|
||||
import sys
|
||||
import traceback
|
||||
from pkgutil import walk_packages
|
||||
from types import ModuleType
|
||||
from typing import Any
|
||||
|
||||
@@ -15,69 +14,15 @@ from colorama import Fore, Style
|
||||
|
||||
import prowler
|
||||
from prowler.config.config import orange_color
|
||||
from prowler.lib.check.compliance_models import load_compliance_framework
|
||||
from prowler.lib.check.custom_checks_metadata import update_check_metadata
|
||||
from prowler.lib.check.models import Check, load_check_metadata
|
||||
from prowler.lib.check.models import Check
|
||||
from prowler.lib.check.utils import recover_checks_from_provider
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.mutelist.mutelist import mutelist_findings
|
||||
from prowler.lib.outputs.outputs import report
|
||||
from prowler.lib.utils.utils import open_file, parse_json_file, print_boxes
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
|
||||
# Load all checks metadata
|
||||
def bulk_load_checks_metadata(provider: str) -> dict:
|
||||
bulk_check_metadata = {}
|
||||
checks = recover_checks_from_provider(provider)
|
||||
# Build list of check's metadata files
|
||||
for check_info in checks:
|
||||
# Build check path name
|
||||
check_name = check_info[0]
|
||||
check_path = check_info[1]
|
||||
# Ignore fixer files
|
||||
if check_name.endswith("_fixer"):
|
||||
continue
|
||||
# Append metadata file extension
|
||||
metadata_file = f"{check_path}/{check_name}.metadata.json"
|
||||
# Load metadata
|
||||
check_metadata = load_check_metadata(metadata_file)
|
||||
bulk_check_metadata[check_metadata.CheckID] = check_metadata
|
||||
|
||||
return bulk_check_metadata
|
||||
|
||||
|
||||
# Bulk load all compliance frameworks specification
|
||||
def bulk_load_compliance_frameworks(provider: str) -> dict:
|
||||
"""Bulk load all compliance frameworks specification into a dict"""
|
||||
try:
|
||||
bulk_compliance_frameworks = {}
|
||||
available_compliance_framework_modules = list_compliance_modules()
|
||||
for compliance_framework in available_compliance_framework_modules:
|
||||
if provider in compliance_framework.name:
|
||||
compliance_specification_dir_path = (
|
||||
f"{compliance_framework.module_finder.path}/{provider}"
|
||||
)
|
||||
|
||||
# for compliance_framework in available_compliance_framework_modules:
|
||||
for filename in os.listdir(compliance_specification_dir_path):
|
||||
file_path = os.path.join(
|
||||
compliance_specification_dir_path, filename
|
||||
)
|
||||
# Check if it is a file and ti size is greater than 0
|
||||
if os.path.isfile(file_path) and os.stat(file_path).st_size > 0:
|
||||
# Open Compliance file in JSON
|
||||
# cis_v1.4_aws.json --> cis_v1.4_aws
|
||||
compliance_framework_name = filename.split(".json")[0]
|
||||
# Store the compliance info
|
||||
bulk_compliance_frameworks[compliance_framework_name] = (
|
||||
load_compliance_framework(file_path)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
return bulk_compliance_frameworks
|
||||
|
||||
|
||||
# Exclude checks to run
|
||||
def exclude_checks_to_run(checks_to_execute: set, excluded_checks: list) -> set:
|
||||
for check in excluded_checks:
|
||||
@@ -126,9 +71,10 @@ def parse_checks_from_file(input_file: str, provider: str) -> set:
|
||||
|
||||
|
||||
# Load checks from custom folder
|
||||
def parse_checks_from_folder(provider, input_folder: str) -> int:
|
||||
def parse_checks_from_folder(provider, input_folder: str) -> set:
|
||||
# TODO: move the AWS-specific code into the provider
|
||||
try:
|
||||
imported_checks = 0
|
||||
custom_checks = set()
|
||||
# Check if input folder is a S3 URI
|
||||
if provider.type == "aws" and re.search(
|
||||
"^s3://([^/]+)/(.*?([^/]+))/$", input_folder
|
||||
@@ -156,8 +102,8 @@ def parse_checks_from_folder(provider, input_folder: str) -> int:
|
||||
if os.path.exists(prowler_module):
|
||||
shutil.rmtree(prowler_module)
|
||||
shutil.copytree(check_module, prowler_module)
|
||||
imported_checks += 1
|
||||
return imported_checks
|
||||
custom_checks.add(check.name)
|
||||
return custom_checks
|
||||
except Exception as error:
|
||||
logger.critical(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}] -- {error}"
|
||||
@@ -373,100 +319,12 @@ def parse_checks_from_compliance_framework(
|
||||
return checks_to_execute
|
||||
|
||||
|
||||
def recover_checks_from_provider(
|
||||
provider: str, service: str = None, include_fixers: bool = False
|
||||
) -> list[tuple]:
|
||||
"""
|
||||
Recover all checks from the selected provider and service
|
||||
|
||||
Returns a list of tuples with the following format (check_name, check_path)
|
||||
"""
|
||||
try:
|
||||
checks = []
|
||||
modules = list_modules(provider, service)
|
||||
for module_name in modules:
|
||||
# Format: "prowler.providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_module_name = module_name.name
|
||||
# We need to exclude common shared libraries in services
|
||||
if (
|
||||
check_module_name.count(".") == 6
|
||||
and "lib" not in check_module_name
|
||||
and (not check_module_name.endswith("_fixer") or include_fixers)
|
||||
):
|
||||
check_path = module_name.module_finder.path
|
||||
# Check name is the last part of the check_module_name
|
||||
check_name = check_module_name.split(".")[-1]
|
||||
check_info = (check_name, check_path)
|
||||
checks.append(check_info)
|
||||
except ModuleNotFoundError:
|
||||
logger.critical(f"Service {service} was not found for the {provider} provider.")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
return checks
|
||||
|
||||
|
||||
def list_compliance_modules():
|
||||
"""
|
||||
list_compliance_modules returns the available compliance frameworks and returns their path
|
||||
"""
|
||||
# This module path requires the full path including "prowler."
|
||||
module_path = "prowler.compliance"
|
||||
return walk_packages(
|
||||
importlib.import_module(module_path).__path__,
|
||||
importlib.import_module(module_path).__name__ + ".",
|
||||
)
|
||||
|
||||
|
||||
# List all available modules in the selected provider and service
|
||||
def list_modules(provider: str, service: str):
|
||||
# This module path requires the full path including "prowler."
|
||||
module_path = f"prowler.providers.{provider}.services"
|
||||
if service:
|
||||
module_path += f".{service}"
|
||||
return walk_packages(
|
||||
importlib.import_module(module_path).__path__,
|
||||
importlib.import_module(module_path).__name__ + ".",
|
||||
)
|
||||
|
||||
|
||||
# Import an input check using its path
|
||||
def import_check(check_path: str) -> ModuleType:
|
||||
lib = importlib.import_module(f"{check_path}")
|
||||
return lib
|
||||
|
||||
|
||||
def run_check(check: Check, verbose: bool = False, only_logs: bool = False) -> list:
|
||||
"""
|
||||
Run the check and return the findings
|
||||
Args:
|
||||
check (Check): check class
|
||||
output_options (Any): output options
|
||||
Returns:
|
||||
list: list of findings
|
||||
"""
|
||||
findings = []
|
||||
if verbose:
|
||||
print(
|
||||
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity}]{Style.RESET_ALL}"
|
||||
)
|
||||
logger.debug(f"Executing check: {check.CheckID}")
|
||||
try:
|
||||
findings = check.execute()
|
||||
except Exception as error:
|
||||
if not only_logs:
|
||||
print(
|
||||
f"Something went wrong in {check.CheckID}, please use --log-level ERROR"
|
||||
)
|
||||
logger.error(
|
||||
f"{check.CheckID} -- {error.__class__.__name__}[{traceback.extract_tb(error.__traceback__)[-1].lineno}]: {error}"
|
||||
)
|
||||
finally:
|
||||
return findings
|
||||
|
||||
|
||||
def run_fixer(check_findings: list) -> int:
|
||||
"""
|
||||
Run the fixer for the check if it exists and there are any FAIL findings
|
||||
@@ -548,6 +406,7 @@ def execute_checks(
|
||||
global_provider: Any,
|
||||
custom_checks_metadata: Any,
|
||||
config_file: str,
|
||||
output_options: Any,
|
||||
) -> list:
|
||||
# List to store all the check's findings
|
||||
all_findings = []
|
||||
@@ -583,22 +442,51 @@ def execute_checks(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
# Set verbose flag
|
||||
verbose = False
|
||||
if hasattr(output_options, "verbose"):
|
||||
verbose = output_options.verbose
|
||||
elif hasattr(output_options, "fixer"):
|
||||
verbose = output_options.fixer
|
||||
|
||||
# Execution with the --only-logs flag
|
||||
if global_provider.output_options.only_logs:
|
||||
if output_options.only_logs:
|
||||
for check_name in checks_to_execute:
|
||||
# Recover service from check name
|
||||
service = check_name.split("_")[0]
|
||||
try:
|
||||
try:
|
||||
# Import check module
|
||||
check_module_path = f"prowler.providers.{global_provider.type}.services.{service}.{check_name}.{check_name}"
|
||||
lib = import_check(check_module_path)
|
||||
# Recover functions from check
|
||||
check_to_execute = getattr(lib, check_name)
|
||||
check = check_to_execute()
|
||||
except ModuleNotFoundError:
|
||||
logger.error(
|
||||
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
|
||||
)
|
||||
continue
|
||||
if verbose:
|
||||
print(
|
||||
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity}]{Style.RESET_ALL}"
|
||||
)
|
||||
check_findings = execute(
|
||||
service,
|
||||
check_name,
|
||||
check,
|
||||
global_provider,
|
||||
services_executed,
|
||||
checks_executed,
|
||||
custom_checks_metadata,
|
||||
output_options,
|
||||
)
|
||||
report(check_findings, global_provider, output_options)
|
||||
all_findings.extend(check_findings)
|
||||
|
||||
# Update Audit Status
|
||||
services_executed.add(service)
|
||||
checks_executed.add(check_name)
|
||||
global_provider.audit_metadata = update_audit_metadata(
|
||||
global_provider.audit_metadata, services_executed, checks_executed
|
||||
)
|
||||
|
||||
# If check does not exists in the provider or is from another provider
|
||||
except ModuleNotFoundError:
|
||||
logger.error(
|
||||
@@ -611,9 +499,9 @@ def execute_checks(
|
||||
else:
|
||||
# Prepare your messages
|
||||
messages = [f"Config File: {Fore.YELLOW}{config_file}{Style.RESET_ALL}"]
|
||||
if global_provider.mutelist_file_path:
|
||||
if global_provider.mutelist.mutelist_file_path:
|
||||
messages.append(
|
||||
f"Mutelist File: {Fore.YELLOW}{global_provider.mutelist_file_path}{Style.RESET_ALL}"
|
||||
f"Mutelist File: {Fore.YELLOW}{global_provider.mutelist.mutelist_file_path}{Style.RESET_ALL}"
|
||||
)
|
||||
if global_provider.type == "aws":
|
||||
messages.append(
|
||||
@@ -647,15 +535,39 @@ def execute_checks(
|
||||
f"-> Scanning {orange_color}{service}{Style.RESET_ALL} service"
|
||||
)
|
||||
try:
|
||||
try:
|
||||
# Import check module
|
||||
check_module_path = f"prowler.providers.{global_provider.type}.services.{service}.{check_name}.{check_name}"
|
||||
lib = import_check(check_module_path)
|
||||
# Recover functions from check
|
||||
check_to_execute = getattr(lib, check_name)
|
||||
check = check_to_execute()
|
||||
except ModuleNotFoundError:
|
||||
logger.error(
|
||||
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
|
||||
)
|
||||
continue
|
||||
if verbose:
|
||||
print(
|
||||
f"\nCheck ID: {check.CheckID} - {Fore.MAGENTA}{check.ServiceName}{Fore.YELLOW} [{check.Severity}]{Style.RESET_ALL}"
|
||||
)
|
||||
check_findings = execute(
|
||||
service,
|
||||
check_name,
|
||||
check,
|
||||
global_provider,
|
||||
custom_checks_metadata,
|
||||
output_options,
|
||||
)
|
||||
|
||||
report(check_findings, global_provider, output_options)
|
||||
|
||||
all_findings.extend(check_findings)
|
||||
services_executed.add(service)
|
||||
checks_executed.add(check_name)
|
||||
global_provider.audit_metadata = update_audit_metadata(
|
||||
global_provider.audit_metadata,
|
||||
services_executed,
|
||||
checks_executed,
|
||||
custom_checks_metadata,
|
||||
)
|
||||
all_findings.extend(check_findings)
|
||||
|
||||
# If check does not exists in the provider or is from another provider
|
||||
except ModuleNotFoundError:
|
||||
@@ -670,77 +582,96 @@ def execute_checks(
|
||||
)
|
||||
bar()
|
||||
bar.title = f"-> {Fore.GREEN}Scan completed!{Style.RESET_ALL}"
|
||||
|
||||
# Custom report interface
|
||||
if os.environ.get("PROWLER_REPORT_LIB_PATH"):
|
||||
try:
|
||||
logger.info("Using custom report interface ...")
|
||||
lib = os.environ["PROWLER_REPORT_LIB_PATH"]
|
||||
outputs_module = importlib.import_module(lib)
|
||||
custom_report_interface = getattr(outputs_module, "report")
|
||||
|
||||
# TODO: review this call and see if we can remove the global_provider.output_options since it is contained in the global_provider
|
||||
custom_report_interface(check_findings, output_options, global_provider)
|
||||
except Exception:
|
||||
sys.exit(1)
|
||||
|
||||
return all_findings
|
||||
|
||||
|
||||
def execute(
|
||||
service: str,
|
||||
check_name: str,
|
||||
check: Check,
|
||||
global_provider: Any,
|
||||
services_executed: set,
|
||||
checks_executed: set,
|
||||
custom_checks_metadata: Any,
|
||||
output_options: Any = None,
|
||||
):
|
||||
try:
|
||||
# Import check module
|
||||
check_module_path = f"prowler.providers.{global_provider.type}.services.{service}.{check_name}.{check_name}"
|
||||
lib = import_check(check_module_path)
|
||||
# Recover functions from check
|
||||
check_to_execute = getattr(lib, check_name)
|
||||
check_class = check_to_execute()
|
||||
"""
|
||||
Execute the check and report the findings
|
||||
|
||||
Args:
|
||||
service (str): service name
|
||||
check_name (str): check name
|
||||
global_provider (Any): provider object
|
||||
custom_checks_metadata (Any): custom checks metadata
|
||||
output_options (Any): output options, depending on the provider
|
||||
|
||||
Returns:
|
||||
list: list of findings
|
||||
"""
|
||||
try:
|
||||
# Update check metadata to reflect that in the outputs
|
||||
if custom_checks_metadata and custom_checks_metadata["Checks"].get(
|
||||
check_class.CheckID
|
||||
check.CheckID
|
||||
):
|
||||
check_class = update_check_metadata(
|
||||
check_class, custom_checks_metadata["Checks"][check_class.CheckID]
|
||||
check = update_check_metadata(
|
||||
check, custom_checks_metadata["Checks"][check.CheckID]
|
||||
)
|
||||
|
||||
# Run check
|
||||
verbose = (
|
||||
global_provider.output_options.verbose
|
||||
or global_provider.output_options.fixer
|
||||
)
|
||||
check_findings = run_check(
|
||||
check_class, verbose, global_provider.output_options.only_logs
|
||||
)
|
||||
only_logs = False
|
||||
if hasattr(output_options, "only_logs"):
|
||||
only_logs = output_options.only_logs
|
||||
|
||||
# Update Audit Status
|
||||
services_executed.add(service)
|
||||
checks_executed.add(check_name)
|
||||
global_provider.audit_metadata = update_audit_metadata(
|
||||
global_provider.audit_metadata, services_executed, checks_executed
|
||||
)
|
||||
|
||||
# Mutelist findings
|
||||
if hasattr(global_provider, "mutelist") and global_provider.mutelist:
|
||||
check_findings = mutelist_findings(
|
||||
global_provider,
|
||||
check_findings,
|
||||
)
|
||||
|
||||
# Refactor(Outputs)
|
||||
# Report the check's findings
|
||||
report(check_findings, global_provider)
|
||||
|
||||
# Refactor(Outputs)
|
||||
if os.environ.get("PROWLER_REPORT_LIB_PATH"):
|
||||
try:
|
||||
logger.info("Using custom report interface ...")
|
||||
lib = os.environ["PROWLER_REPORT_LIB_PATH"]
|
||||
outputs_module = importlib.import_module(lib)
|
||||
custom_report_interface = getattr(outputs_module, "report")
|
||||
|
||||
# TODO: review this call and see if we can remove the global_provider.output_options since it is contained in the global_provider
|
||||
custom_report_interface(
|
||||
check_findings, global_provider.output_options, global_provider
|
||||
# Execute the check
|
||||
check_findings = []
|
||||
logger.debug(f"Executing check: {check.CheckID}")
|
||||
try:
|
||||
check_findings = check.execute()
|
||||
except Exception as error:
|
||||
if not only_logs:
|
||||
print(
|
||||
f"Something went wrong in {check.CheckID}, please use --log-level ERROR"
|
||||
)
|
||||
except Exception:
|
||||
sys.exit(1)
|
||||
logger.error(
|
||||
f"{check.CheckID} -- {error.__class__.__name__}[{traceback.extract_tb(error.__traceback__)[-1].lineno}]: {error}"
|
||||
)
|
||||
|
||||
# Exclude findings per status
|
||||
if hasattr(output_options, "status") and output_options.status:
|
||||
check_findings = [
|
||||
finding
|
||||
for finding in check_findings
|
||||
if finding.status in output_options.status
|
||||
]
|
||||
|
||||
# Before returning the findings, we need to apply the mute list logic
|
||||
if hasattr(global_provider, "mutelist") and global_provider.mutelist.mutelist:
|
||||
is_finding_muted_args = {}
|
||||
if global_provider.type == "aws":
|
||||
is_finding_muted_args["aws_account_id"] = (
|
||||
global_provider.identity.account
|
||||
)
|
||||
elif global_provider.type == "kubernetes":
|
||||
is_finding_muted_args["cluster"] = global_provider.identity.cluster
|
||||
|
||||
for finding in check_findings:
|
||||
is_finding_muted_args["finding"] = finding
|
||||
finding.muted = global_provider.mutelist.is_finding_muted(
|
||||
**is_finding_muted_args
|
||||
)
|
||||
|
||||
except ModuleNotFoundError:
|
||||
logger.error(
|
||||
f"Check '{check_name}' was not found for the {global_provider.type.upper()} provider"
|
||||
f"Check '{check.CheckID}' was not found for the {global_provider.type.upper()} provider"
|
||||
)
|
||||
check_findings = []
|
||||
except Exception as error:
|
||||
@@ -770,34 +701,3 @@ def update_audit_metadata(
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def recover_checks_from_service(service_list: list, provider: str) -> set:
|
||||
"""
|
||||
Recover all checks from the selected provider and service
|
||||
|
||||
Returns a set of checks from the given services
|
||||
"""
|
||||
try:
|
||||
checks = set()
|
||||
service_list = [
|
||||
"awslambda" if service == "lambda" else service for service in service_list
|
||||
]
|
||||
for service in service_list:
|
||||
service_checks = recover_checks_from_provider(provider, service)
|
||||
if not service_checks:
|
||||
logger.error(f"Service '{service}' does not have checks.")
|
||||
|
||||
else:
|
||||
for check in service_checks:
|
||||
# Recover check name and module name from import path
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check[0].split(".")[-1]
|
||||
# If the service is present in the group list passed as parameters
|
||||
# if service_name in group_list: checks_from_arn.add(check_name)
|
||||
checks.add(check_name)
|
||||
return checks
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
@@ -4,6 +4,8 @@ from prowler.config.config import valid_severities
|
||||
from prowler.lib.check.check import (
|
||||
parse_checks_from_compliance_framework,
|
||||
parse_checks_from_file,
|
||||
)
|
||||
from prowler.lib.check.utils import (
|
||||
recover_checks_from_provider,
|
||||
recover_checks_from_service,
|
||||
)
|
||||
|
||||
@@ -1,16 +1,21 @@
|
||||
import sys
|
||||
|
||||
from pydantic import parse_obj_as
|
||||
|
||||
from prowler.lib.check.compliance_models import Compliance_Base_Model
|
||||
from prowler.lib.check.models import Check_Metadata_Model
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
def update_checks_metadata_with_compliance(
|
||||
bulk_compliance_frameworks: dict, bulk_checks_metadata: dict
|
||||
):
|
||||
"""Update the check metadata model with the compliance framework"""
|
||||
) -> dict:
|
||||
"""
|
||||
Update the check metadata model with the compliance framework
|
||||
Args:
|
||||
bulk_compliance_frameworks (dict): The compliance frameworks
|
||||
bulk_checks_metadata (dict): The checks metadata
|
||||
|
||||
Returns:
|
||||
dict: The checks metadata with the compliance frameworks
|
||||
"""
|
||||
try:
|
||||
for check in bulk_checks_metadata:
|
||||
check_compliance = []
|
||||
@@ -21,8 +26,8 @@ def update_checks_metadata_with_compliance(
|
||||
if check in requirement.Checks:
|
||||
# Include the requirement into the check's framework requirements
|
||||
compliance_requirements.append(requirement)
|
||||
# Create the Compliance_Model
|
||||
compliance = Compliance_Base_Model(
|
||||
# Create the Compliance
|
||||
compliance = Compliance(
|
||||
Framework=framework.Framework,
|
||||
Provider=framework.Provider,
|
||||
Version=framework.Version,
|
||||
@@ -33,53 +38,6 @@ def update_checks_metadata_with_compliance(
|
||||
check_compliance.append(compliance)
|
||||
# Save it into the check's metadata
|
||||
bulk_checks_metadata[check].Compliance = check_compliance
|
||||
|
||||
# Add requirements of Manual Controls
|
||||
for framework in bulk_compliance_frameworks.values():
|
||||
for requirement in framework.Requirements:
|
||||
compliance_requirements = []
|
||||
# Verify if requirement is Manual
|
||||
if not requirement.Checks:
|
||||
compliance_requirements.append(requirement)
|
||||
# Create the Compliance_Model
|
||||
compliance = Compliance_Base_Model(
|
||||
Framework=framework.Framework,
|
||||
Provider=framework.Provider,
|
||||
Version=framework.Version,
|
||||
Description=framework.Description,
|
||||
Requirements=compliance_requirements,
|
||||
)
|
||||
# Include the compliance framework for the check
|
||||
check_compliance.append(compliance)
|
||||
# Create metadata for Manual Control
|
||||
manual_check_metadata = {
|
||||
"Provider": framework.Provider.lower(),
|
||||
"CheckID": "manual_check",
|
||||
"CheckTitle": "Manual Check",
|
||||
"CheckType": [],
|
||||
"ServiceName": "",
|
||||
"SubServiceName": "",
|
||||
"ResourceIdTemplate": "",
|
||||
"Severity": "low",
|
||||
"ResourceType": "",
|
||||
"Description": "",
|
||||
"Risk": "",
|
||||
"RelatedUrl": "",
|
||||
"Remediation": {
|
||||
"Code": {"CLI": "", "NativeIaC": "", "Other": "", "Terraform": ""},
|
||||
"Recommendation": {"Text": "", "Url": ""},
|
||||
},
|
||||
"Categories": [],
|
||||
"Tags": {},
|
||||
"DependsOn": [],
|
||||
"RelatedTo": [],
|
||||
"Notes": "",
|
||||
}
|
||||
manual_check = parse_obj_as(Check_Metadata_Model, manual_check_metadata)
|
||||
# Save it into the check's metadata
|
||||
bulk_checks_metadata["manual_check"] = manual_check
|
||||
bulk_checks_metadata["manual_check"].Compliance = check_compliance
|
||||
|
||||
return bulk_checks_metadata
|
||||
except Exception as e:
|
||||
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import os
|
||||
import sys
|
||||
from enum import Enum
|
||||
from typing import Optional, Union
|
||||
|
||||
from pydantic import BaseModel, ValidationError, root_validator
|
||||
|
||||
from prowler.lib.check.utils import list_compliance_modules
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
@@ -91,7 +93,6 @@ class CIS_Requirement_Attribute(BaseModel):
|
||||
AdditionalInformation: str
|
||||
DefaultValue: Optional[str]
|
||||
References: str
|
||||
DefaultValue: Optional[str]
|
||||
|
||||
|
||||
# Well Architected Requirement Attribute
|
||||
@@ -168,6 +169,19 @@ class Mitre_Requirement(BaseModel):
|
||||
Checks: list[str]
|
||||
|
||||
|
||||
# KISA-ISMS-P Requirement Attribute
|
||||
class KISA_ISMSP_Requirement_Attribute(BaseModel):
|
||||
"""KISA ISMS-P Requirement Attribute"""
|
||||
|
||||
Domain: str
|
||||
Subdomain: str
|
||||
Section: str
|
||||
AuditChecklist: Optional[list[str]]
|
||||
RelatedRegulations: Optional[list[str]]
|
||||
AuditEvidence: Optional[list[str]]
|
||||
NonComplianceCases: Optional[list[str]]
|
||||
|
||||
|
||||
# Base Compliance Model
|
||||
# TODO: move this to compliance folder
|
||||
class Compliance_Requirement(BaseModel):
|
||||
@@ -182,6 +196,7 @@ class Compliance_Requirement(BaseModel):
|
||||
ENS_Requirement_Attribute,
|
||||
ISO27001_2013_Requirement_Attribute,
|
||||
AWS_Well_Architected_Requirement_Attribute,
|
||||
KISA_ISMSP_Requirement_Attribute,
|
||||
# Generic_Compliance_Requirement_Attribute must be the last one since it is the fallback for generic compliance framework
|
||||
Generic_Compliance_Requirement_Attribute,
|
||||
]
|
||||
@@ -189,8 +204,8 @@ class Compliance_Requirement(BaseModel):
|
||||
Checks: list[str]
|
||||
|
||||
|
||||
class Compliance_Base_Model(BaseModel):
|
||||
"""Compliance_Base_Model holds the base model for every compliance framework"""
|
||||
class Compliance(BaseModel):
|
||||
"""Compliance holds the base model for every compliance framework"""
|
||||
|
||||
Framework: str
|
||||
Provider: str
|
||||
@@ -214,16 +229,137 @@ class Compliance_Base_Model(BaseModel):
|
||||
raise ValueError("Framework or Provider must not be empty")
|
||||
return values
|
||||
|
||||
@staticmethod
|
||||
def list(bulk_compliance_frameworks: dict, provider: str = None) -> list[str]:
|
||||
"""
|
||||
Returns a list of compliance frameworks from bulk compliance frameworks
|
||||
|
||||
Args:
|
||||
bulk_compliance_frameworks (dict): The bulk compliance frameworks
|
||||
provider (str): The provider name
|
||||
|
||||
Returns:
|
||||
list: The list of compliance frameworks
|
||||
"""
|
||||
if provider:
|
||||
compliance_frameworks = [
|
||||
compliance_framework
|
||||
for compliance_framework in bulk_compliance_frameworks.keys()
|
||||
if provider in compliance_framework
|
||||
]
|
||||
else:
|
||||
compliance_frameworks = [
|
||||
compliance_framework
|
||||
for compliance_framework in bulk_compliance_frameworks.keys()
|
||||
]
|
||||
|
||||
return compliance_frameworks
|
||||
|
||||
@staticmethod
|
||||
def get(
|
||||
bulk_compliance_frameworks: dict, compliance_framework_name: str
|
||||
) -> "Compliance":
|
||||
"""
|
||||
Returns a compliance framework from bulk compliance frameworks
|
||||
|
||||
Args:
|
||||
bulk_compliance_frameworks (dict): The bulk compliance frameworks
|
||||
compliance_framework_name (str): The compliance framework name
|
||||
|
||||
Returns:
|
||||
Compliance: The compliance framework
|
||||
"""
|
||||
return bulk_compliance_frameworks.get(compliance_framework_name, None)
|
||||
|
||||
@staticmethod
|
||||
def list_requirements(
|
||||
bulk_compliance_frameworks: dict, compliance_framework: str = None
|
||||
) -> list:
|
||||
"""
|
||||
Returns a list of compliance requirements from a compliance framework
|
||||
|
||||
Args:
|
||||
bulk_compliance_frameworks (dict): The bulk compliance frameworks
|
||||
compliance_framework (str): The compliance framework name
|
||||
|
||||
Returns:
|
||||
list: The list of compliance requirements for the provided compliance framework
|
||||
"""
|
||||
compliance_requirements = []
|
||||
|
||||
if bulk_compliance_frameworks and compliance_framework:
|
||||
compliance_requirements = [
|
||||
compliance_requirement.Id
|
||||
for compliance_requirement in bulk_compliance_frameworks.get(
|
||||
compliance_framework
|
||||
).Requirements
|
||||
]
|
||||
|
||||
return compliance_requirements
|
||||
|
||||
@staticmethod
|
||||
def get_requirement(
|
||||
bulk_compliance_frameworks: dict, compliance_framework: str, requirement_id: str
|
||||
) -> Union[Mitre_Requirement, Compliance_Requirement]:
|
||||
"""
|
||||
Returns a compliance requirement from a compliance framework
|
||||
|
||||
Args:
|
||||
bulk_compliance_frameworks (dict): The bulk compliance frameworks
|
||||
compliance_framework (str): The compliance framework name
|
||||
requirement_id (str): The compliance requirement ID
|
||||
|
||||
Returns:
|
||||
Mitre_Requirement | Compliance_Requirement: The compliance requirement
|
||||
"""
|
||||
requirement = None
|
||||
for compliance_requirement in bulk_compliance_frameworks.get(
|
||||
compliance_framework
|
||||
).Requirements:
|
||||
if compliance_requirement.Id == requirement_id:
|
||||
requirement = compliance_requirement
|
||||
break
|
||||
|
||||
return requirement
|
||||
|
||||
@staticmethod
|
||||
def get_bulk(provider: str) -> dict:
|
||||
"""Bulk load all compliance frameworks specification into a dict"""
|
||||
try:
|
||||
bulk_compliance_frameworks = {}
|
||||
available_compliance_framework_modules = list_compliance_modules()
|
||||
for compliance_framework in available_compliance_framework_modules:
|
||||
if provider in compliance_framework.name:
|
||||
compliance_specification_dir_path = (
|
||||
f"{compliance_framework.module_finder.path}/{provider}"
|
||||
)
|
||||
# for compliance_framework in available_compliance_framework_modules:
|
||||
for filename in os.listdir(compliance_specification_dir_path):
|
||||
file_path = os.path.join(
|
||||
compliance_specification_dir_path, filename
|
||||
)
|
||||
# Check if it is a file and ti size is greater than 0
|
||||
if os.path.isfile(file_path) and os.stat(file_path).st_size > 0:
|
||||
# Open Compliance file in JSON
|
||||
# cis_v1.4_aws.json --> cis_v1.4_aws
|
||||
compliance_framework_name = filename.split(".json")[0]
|
||||
# Store the compliance info
|
||||
bulk_compliance_frameworks[compliance_framework_name] = (
|
||||
load_compliance_framework(file_path)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}] -- {e}")
|
||||
|
||||
return bulk_compliance_frameworks
|
||||
|
||||
|
||||
# Testing Pending
|
||||
def load_compliance_framework(
|
||||
compliance_specification_file: str,
|
||||
) -> Compliance_Base_Model:
|
||||
) -> Compliance:
|
||||
"""load_compliance_framework loads and parse a Compliance Framework Specification"""
|
||||
try:
|
||||
compliance_framework = Compliance_Base_Model.parse_file(
|
||||
compliance_specification_file
|
||||
)
|
||||
compliance_framework = Compliance.parse_file(compliance_specification_file)
|
||||
except ValidationError as error:
|
||||
logger.critical(
|
||||
f"Compliance Framework Specification from {compliance_specification_file} is not valid: {error}"
|
||||
|
||||
@@ -7,11 +7,20 @@ from dataclasses import dataclass
|
||||
from pydantic import BaseModel, ValidationError, validator
|
||||
|
||||
from prowler.config.config import valid_severities
|
||||
from prowler.lib.check.utils import recover_checks_from_provider
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
class Code(BaseModel):
|
||||
"""Check's remediation information using IaC like CloudFormation, Terraform or the native CLI"""
|
||||
"""
|
||||
Represents the remediation code using IaC like CloudFormation, Terraform or the native CLI.
|
||||
|
||||
Attributes:
|
||||
NativeIaC (str): The NativeIaC code.
|
||||
Terraform (str): The Terraform code.
|
||||
CLI (str): The CLI code.
|
||||
Other (str): Other code.
|
||||
"""
|
||||
|
||||
NativeIaC: str
|
||||
Terraform: str
|
||||
@@ -20,21 +29,61 @@ class Code(BaseModel):
|
||||
|
||||
|
||||
class Recommendation(BaseModel):
|
||||
"""Check's recommendation information"""
|
||||
"""
|
||||
Represents a recommendation.
|
||||
|
||||
Attributes:
|
||||
Text (str): The text of the recommendation.
|
||||
Url (str): The URL associated with the recommendation.
|
||||
"""
|
||||
|
||||
Text: str
|
||||
Url: str
|
||||
|
||||
|
||||
class Remediation(BaseModel):
|
||||
"""Check's remediation: Code and Recommendation"""
|
||||
"""
|
||||
Represents a remediation action for a specific .
|
||||
|
||||
Attributes:
|
||||
Code (Code): The code associated with the remediation action.
|
||||
Recommendation (Recommendation): The recommendation for the remediation action.
|
||||
"""
|
||||
|
||||
Code: Code
|
||||
Recommendation: Recommendation
|
||||
|
||||
|
||||
class Check_Metadata_Model(BaseModel):
|
||||
"""Check Metadata Model"""
|
||||
class CheckMetadata(BaseModel):
|
||||
"""
|
||||
Model representing the metadata of a check.
|
||||
|
||||
Attributes:
|
||||
Provider (str): The provider of the check.
|
||||
CheckID (str): The ID of the check.
|
||||
CheckTitle (str): The title of the check.
|
||||
CheckType (list[str]): The type of the check.
|
||||
CheckAliases (list[str], optional): The aliases of the check. Defaults to an empty list.
|
||||
ServiceName (str): The name of the service.
|
||||
SubServiceName (str): The name of the sub-service.
|
||||
ResourceIdTemplate (str): The template for the resource ID.
|
||||
Severity (str): The severity of the check.
|
||||
ResourceType (str): The type of the resource.
|
||||
Description (str): The description of the check.
|
||||
Risk (str): The risk associated with the check.
|
||||
RelatedUrl (str): The URL related to the check.
|
||||
Remediation (Remediation): The remediation steps for the check.
|
||||
Categories (list[str]): The categories of the check.
|
||||
DependsOn (list[str]): The dependencies of the check.
|
||||
RelatedTo (list[str]): The related checks.
|
||||
Notes (str): Additional notes for the check.
|
||||
Compliance (list, optional): The compliance information for the check. Defaults to None.
|
||||
|
||||
Validators:
|
||||
valid_category(value): Validator function to validate the categories of the check.
|
||||
severity_to_lower(severity): Validator function to convert the severity to lowercase.
|
||||
valid_severity(severity): Validator function to validate the severity of the check.
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
CheckID: str
|
||||
@@ -81,8 +130,36 @@ class Check_Metadata_Model(BaseModel):
|
||||
)
|
||||
return severity
|
||||
|
||||
@staticmethod
|
||||
def get_bulk(provider: str) -> dict[str, "CheckMetadata"]:
|
||||
"""
|
||||
Load the metadata of all checks for a given provider reading the check's metadata files.
|
||||
Args:
|
||||
provider (str): The name of the provider.
|
||||
Returns:
|
||||
dict[str, CheckMetadata]: A dictionary containing the metadata of all checks, with the CheckID as the key.
|
||||
"""
|
||||
|
||||
class Check(ABC, Check_Metadata_Model):
|
||||
bulk_check_metadata = {}
|
||||
checks = recover_checks_from_provider(provider)
|
||||
# Build list of check's metadata files
|
||||
for check_info in checks:
|
||||
# Build check path name
|
||||
check_name = check_info[0]
|
||||
check_path = check_info[1]
|
||||
# Ignore fixer files
|
||||
if check_name.endswith("_fixer"):
|
||||
continue
|
||||
# Append metadata file extension
|
||||
metadata_file = f"{check_path}/{check_name}.metadata.json"
|
||||
# Load metadata
|
||||
check_metadata = load_check_metadata(metadata_file)
|
||||
bulk_check_metadata[check_metadata.CheckID] = check_metadata
|
||||
|
||||
return bulk_check_metadata
|
||||
|
||||
|
||||
class Check(ABC, CheckMetadata):
|
||||
"""Prowler Check"""
|
||||
|
||||
def __init__(self, **data):
|
||||
@@ -93,9 +170,11 @@ class Check(ABC, Check_Metadata_Model):
|
||||
+ ".metadata.json"
|
||||
)
|
||||
# Store it to validate them with Pydantic
|
||||
data = Check_Metadata_Model.parse_file(metadata_file).dict()
|
||||
data = CheckMetadata.parse_file(metadata_file).dict()
|
||||
# Calls parents init function
|
||||
super().__init__(**data)
|
||||
# TODO: verify that the CheckID is the same as the filename and classname
|
||||
# to mimic the test done at test_<provider>_checks_metadata_is_valid
|
||||
|
||||
def metadata(self) -> dict:
|
||||
"""Return the JSON representation of the check's metadata"""
|
||||
@@ -112,14 +191,14 @@ class Check_Report:
|
||||
|
||||
status: str
|
||||
status_extended: str
|
||||
check_metadata: Check_Metadata_Model
|
||||
check_metadata: CheckMetadata
|
||||
resource_details: str
|
||||
resource_tags: list
|
||||
muted: bool
|
||||
|
||||
def __init__(self, metadata):
|
||||
self.status = ""
|
||||
self.check_metadata = Check_Metadata_Model.parse_raw(metadata)
|
||||
self.check_metadata = CheckMetadata.parse_raw(metadata)
|
||||
self.status_extended = ""
|
||||
self.resource_details = ""
|
||||
self.resource_tags = []
|
||||
@@ -192,12 +271,22 @@ class Check_Report_Kubernetes(Check_Report):
|
||||
|
||||
|
||||
# Testing Pending
|
||||
def load_check_metadata(metadata_file: str) -> Check_Metadata_Model:
|
||||
"""load_check_metadata loads and parse a Check's metadata file"""
|
||||
def load_check_metadata(metadata_file: str) -> CheckMetadata:
|
||||
"""
|
||||
Load check metadata from a file.
|
||||
Args:
|
||||
metadata_file (str): The path to the metadata file.
|
||||
Returns:
|
||||
CheckMetadata: The loaded check metadata.
|
||||
Raises:
|
||||
ValidationError: If the metadata file is not valid.
|
||||
"""
|
||||
|
||||
try:
|
||||
check_metadata = Check_Metadata_Model.parse_file(metadata_file)
|
||||
check_metadata = CheckMetadata.parse_file(metadata_file)
|
||||
except ValidationError as error:
|
||||
logger.critical(f"Metadata from {metadata_file} is not valid: {error}")
|
||||
# TODO: remove this exit and raise an exception
|
||||
sys.exit(1)
|
||||
else:
|
||||
return check_metadata
|
||||
|
||||
95
prowler/lib/check/utils.py
Normal file
@@ -0,0 +1,95 @@
|
||||
import importlib
|
||||
import sys
|
||||
from pkgutil import walk_packages
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
|
||||
def recover_checks_from_provider(
|
||||
provider: str, service: str = None, include_fixers: bool = False
|
||||
) -> list[tuple]:
|
||||
"""
|
||||
Recover all checks from the selected provider and service
|
||||
|
||||
Returns a list of tuples with the following format (check_name, check_path)
|
||||
"""
|
||||
try:
|
||||
checks = []
|
||||
modules = list_modules(provider, service)
|
||||
for module_name in modules:
|
||||
# Format: "prowler.providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_module_name = module_name.name
|
||||
# We need to exclude common shared libraries in services
|
||||
if (
|
||||
check_module_name.count(".") == 6
|
||||
and "lib" not in check_module_name
|
||||
and (not check_module_name.endswith("_fixer") or include_fixers)
|
||||
):
|
||||
check_path = module_name.module_finder.path
|
||||
# Check name is the last part of the check_module_name
|
||||
check_name = check_module_name.split(".")[-1]
|
||||
check_info = (check_name, check_path)
|
||||
checks.append(check_info)
|
||||
except ModuleNotFoundError:
|
||||
logger.critical(f"Service {service} was not found for the {provider} provider.")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
logger.critical(f"{e.__class__.__name__}[{e.__traceback__.tb_lineno}]: {e}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
return checks
|
||||
|
||||
|
||||
# List all available modules in the selected provider and service
|
||||
def list_modules(provider: str, service: str):
|
||||
# This module path requires the full path including "prowler."
|
||||
module_path = f"prowler.providers.{provider}.services"
|
||||
if service:
|
||||
module_path += f".{service}"
|
||||
return walk_packages(
|
||||
importlib.import_module(module_path).__path__,
|
||||
importlib.import_module(module_path).__name__ + ".",
|
||||
)
|
||||
|
||||
|
||||
def recover_checks_from_service(service_list: list, provider: str) -> set:
|
||||
"""
|
||||
Recover all checks from the selected provider and service
|
||||
|
||||
Returns a set of checks from the given services
|
||||
"""
|
||||
try:
|
||||
checks = set()
|
||||
service_list = [
|
||||
"awslambda" if service == "lambda" else service for service in service_list
|
||||
]
|
||||
for service in service_list:
|
||||
service_checks = recover_checks_from_provider(provider, service)
|
||||
if not service_checks:
|
||||
logger.error(f"Service '{service}' does not have checks.")
|
||||
|
||||
else:
|
||||
for check in service_checks:
|
||||
# Recover check name and module name from import path
|
||||
# Format: "providers.{provider}.services.{service}.{check_name}.{check_name}"
|
||||
check_name = check[0].split(".")[-1]
|
||||
# If the service is present in the group list passed as parameters
|
||||
# if service_name in group_list: checks_from_arn.add(check_name)
|
||||
checks.add(check_name)
|
||||
return checks
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
|
||||
def list_compliance_modules():
|
||||
"""
|
||||
list_compliance_modules returns the available compliance frameworks and returns their path
|
||||
"""
|
||||
# This module path requires the full path including "prowler."
|
||||
module_path = "prowler.compliance"
|
||||
return walk_packages(
|
||||
importlib.import_module(module_path).__path__,
|
||||
importlib.import_module(module_path).__name__ + ".",
|
||||
)
|
||||
@@ -5,6 +5,7 @@ from argparse import RawTextHelpFormatter
|
||||
from dashboard.lib.arguments.arguments import init_dashboard_parser
|
||||
from prowler.config.config import (
|
||||
available_compliance_frameworks,
|
||||
available_output_formats,
|
||||
check_current_version,
|
||||
default_config_file_path,
|
||||
default_fixer_config_file_path,
|
||||
@@ -147,7 +148,7 @@ Detailed documentation at https://docs.prowler.com
|
||||
nargs="+",
|
||||
help="Output modes, by default csv and json-oscf are saved. When using AWS Security Hub integration, json-asff output is also saved.",
|
||||
default=["csv", "json-ocsf", "html"],
|
||||
choices=["csv", "json-asff", "json-ocsf", "html"],
|
||||
choices=available_output_formats,
|
||||
)
|
||||
common_outputs_parser.add_argument(
|
||||
"--output-filename",
|
||||
@@ -262,7 +263,7 @@ Detailed documentation at https://docs.prowler.com
|
||||
group.add_argument(
|
||||
"--compliance",
|
||||
nargs="+",
|
||||
help="Compliance Framework to check against for. The format should be the following: framework_version_provider (e.g.: ens_rd2022_aws)",
|
||||
help="Compliance Framework to check against for. The format should be the following: framework_version_provider (e.g.: cis_3.0_aws)",
|
||||
choices=available_compliance_frameworks,
|
||||
)
|
||||
group.add_argument(
|
||||
|
||||
@@ -1,373 +1,345 @@
|
||||
import re
|
||||
from typing import Any
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
import yaml
|
||||
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.mutelist.models import mutelist_schema
|
||||
from prowler.lib.outputs.utils import unroll_tags
|
||||
|
||||
|
||||
def get_mutelist_file_from_local_file(mutelist_path: str):
|
||||
try:
|
||||
with open(mutelist_path) as f:
|
||||
mutelist = yaml.safe_load(f)["Mutelist"]
|
||||
return mutelist
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
return {}
|
||||
|
||||
|
||||
def validate_mutelist(mutelist: dict) -> dict:
|
||||
try:
|
||||
mutelist = mutelist_schema.validate(mutelist)
|
||||
return mutelist
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- Mutelist YAML is malformed - {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
return {}
|
||||
|
||||
|
||||
def mutelist_findings(
|
||||
global_provider: Any,
|
||||
check_findings: list[Any],
|
||||
) -> list[Any]:
|
||||
# Check if finding is muted
|
||||
for finding in check_findings:
|
||||
# TODO: Move this mapping to the execute_check function and pass that output to the mutelist and the report
|
||||
if global_provider.type == "aws":
|
||||
finding.muted = is_muted(
|
||||
global_provider.mutelist,
|
||||
global_provider.identity.account,
|
||||
finding.check_metadata.CheckID,
|
||||
finding.region,
|
||||
finding.resource_id,
|
||||
unroll_tags(finding.resource_tags),
|
||||
)
|
||||
elif global_provider.type == "azure":
|
||||
finding.muted = is_muted(
|
||||
global_provider.mutelist,
|
||||
finding.subscription,
|
||||
finding.check_metadata.CheckID,
|
||||
# TODO: add region to the findings when we add Azure Locations
|
||||
# finding.region,
|
||||
"",
|
||||
finding.resource_name,
|
||||
unroll_tags(finding.resource_tags),
|
||||
)
|
||||
elif global_provider.type == "gcp":
|
||||
finding.muted = is_muted(
|
||||
global_provider.mutelist,
|
||||
finding.project_id,
|
||||
finding.check_metadata.CheckID,
|
||||
finding.location,
|
||||
finding.resource_name,
|
||||
unroll_tags(finding.resource_tags),
|
||||
)
|
||||
elif global_provider.type == "kubernetes":
|
||||
finding.muted = is_muted(
|
||||
global_provider.mutelist,
|
||||
global_provider.identity.cluster,
|
||||
finding.check_metadata.CheckID,
|
||||
finding.namespace,
|
||||
finding.resource_name,
|
||||
unroll_tags(finding.resource_tags),
|
||||
)
|
||||
return check_findings
|
||||
|
||||
|
||||
def is_muted(
|
||||
mutelist: dict,
|
||||
audited_account: str,
|
||||
check: str,
|
||||
finding_region: str,
|
||||
finding_resource: str,
|
||||
finding_tags,
|
||||
) -> bool:
|
||||
class Mutelist(ABC):
|
||||
"""
|
||||
Check if the provided finding is muted for the audited account, check, region, resource and tags.
|
||||
Abstract base class for managing a mutelist.
|
||||
|
||||
Args:
|
||||
mutelist (dict): Dictionary containing information about muted checks for different accounts.
|
||||
audited_account (str): The account being audited.
|
||||
check (str): The check to be evaluated for muting.
|
||||
finding_region (str): The region where the finding occurred.
|
||||
finding_resource (str): The resource related to the finding.
|
||||
finding_tags: The tags associated with the finding.
|
||||
Attributes:
|
||||
_mutelist (dict): Dictionary containing information about muted checks for different accounts.
|
||||
_mutelist_file_path (str): Path to the mutelist file.
|
||||
MUTELIST_KEY (str): Key used to access the mutelist in the mutelist file.
|
||||
|
||||
Returns:
|
||||
bool: True if the finding is muted for the audited account, check, region, resource and tags., otherwise False.
|
||||
Methods:
|
||||
__init__: Initializes a Mutelist object.
|
||||
mutelist: Property that returns the mutelist dictionary.
|
||||
mutelist_file_path: Property that returns the mutelist file path.
|
||||
is_finding_muted: Abstract method to check if a finding is muted.
|
||||
get_mutelist_file_from_local_file: Retrieves the mutelist file from a local file.
|
||||
validate_mutelist: Validates the mutelist against a schema.
|
||||
is_muted: Checks if a finding is muted for the audited account, check, region, resource, and tags.
|
||||
is_muted_in_check: Checks if a check is muted.
|
||||
is_excepted: Checks if the account, region, resource, and tags are excepted based on the exceptions.
|
||||
"""
|
||||
try:
|
||||
# By default is not muted
|
||||
is_finding_muted = False
|
||||
|
||||
# We always check all the accounts present in the mutelist
|
||||
# if one mutes the finding we set the finding as muted
|
||||
for account in mutelist["Accounts"]:
|
||||
if account == audited_account or account == "*":
|
||||
if is_muted_in_check(
|
||||
mutelist["Accounts"][account]["Checks"],
|
||||
audited_account,
|
||||
check,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
_mutelist: dict = {}
|
||||
_mutelist_file_path: str = None
|
||||
|
||||
MUTELIST_KEY = "Mutelist"
|
||||
|
||||
def __init__(
|
||||
self, mutelist_path: str = "", mutelist_content: dict = {}
|
||||
) -> "Mutelist":
|
||||
if mutelist_path:
|
||||
self._mutelist_file_path = mutelist_path
|
||||
self.get_mutelist_file_from_local_file(mutelist_path)
|
||||
else:
|
||||
self._mutelist = mutelist_content
|
||||
|
||||
if self._mutelist:
|
||||
self.validate_mutelist()
|
||||
|
||||
@property
|
||||
def mutelist(self) -> dict:
|
||||
return self._mutelist
|
||||
|
||||
@property
|
||||
def mutelist_file_path(self) -> dict:
|
||||
return self._mutelist_file_path
|
||||
|
||||
@abstractmethod
|
||||
def is_finding_muted(self) -> bool:
|
||||
raise NotImplementedError
|
||||
|
||||
def get_mutelist_file_from_local_file(self, mutelist_path: str):
|
||||
try:
|
||||
with open(mutelist_path) as f:
|
||||
self._mutelist = yaml.safe_load(f)[self.MUTELIST_KEY]
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
|
||||
def validate_mutelist(self) -> bool:
|
||||
try:
|
||||
self._mutelist = mutelist_schema.validate(self._mutelist)
|
||||
return True
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- Mutelist YAML is malformed - {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
self._mutelist = {}
|
||||
return False
|
||||
|
||||
def is_muted(
|
||||
self,
|
||||
audited_account: str,
|
||||
check: str,
|
||||
finding_region: str,
|
||||
finding_resource: str,
|
||||
finding_tags,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if the provided finding is muted for the audited account, check, region, resource and tags.
|
||||
|
||||
The Mutelist works in a way that each field is ANDed, so if a check is muted for an account, region, resource and tags, it will be muted.
|
||||
The exceptions are ORed, so if a check is excepted for an account, region, resource or tags, it will not be muted.
|
||||
The only particularity is the tags, which are ORed.
|
||||
|
||||
So, for the following Mutelist:
|
||||
```
|
||||
Mutelist:
|
||||
Accounts:
|
||||
'*':
|
||||
Checks:
|
||||
ec2_instance_detailed_monitoring_enabled:
|
||||
Regions: ['*']
|
||||
Resources:
|
||||
- 'i-123456789'
|
||||
Tags:
|
||||
- 'Name=AdminInstance | Environment=Prod'
|
||||
```
|
||||
The check `ec2_instance_detailed_monitoring_enabled` will be muted for all accounts and regions and for the resource_id 'i-123456789' with at least one of the tags 'Name=AdminInstance' or 'Environment=Prod'.
|
||||
|
||||
Args:
|
||||
mutelist (dict): Dictionary containing information about muted checks for different accounts.
|
||||
audited_account (str): The account being audited.
|
||||
check (str): The check to be evaluated for muting.
|
||||
finding_region (str): The region where the finding occurred.
|
||||
finding_resource (str): The resource related to the finding.
|
||||
finding_tags: The tags associated with the finding.
|
||||
|
||||
Returns:
|
||||
bool: True if the finding is muted for the audited account, check, region, resource and tags., otherwise False.
|
||||
"""
|
||||
try:
|
||||
# By default is not muted
|
||||
is_finding_muted = False
|
||||
|
||||
# We always check all the accounts present in the mutelist
|
||||
# if one mutes the finding we set the finding as muted
|
||||
for account in self._mutelist.get("Accounts", []):
|
||||
if account == audited_account or account == "*":
|
||||
if self.is_muted_in_check(
|
||||
self._mutelist["Accounts"][account]["Checks"],
|
||||
audited_account,
|
||||
check,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
):
|
||||
is_finding_muted = True
|
||||
break
|
||||
|
||||
return is_finding_muted
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
return False
|
||||
|
||||
def is_muted_in_check(
|
||||
self,
|
||||
muted_checks,
|
||||
audited_account,
|
||||
check,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if the provided check is muted.
|
||||
|
||||
Args:
|
||||
muted_checks (dict): Dictionary containing information about muted checks.
|
||||
audited_account (str): The account to be audited.
|
||||
check (str): The check to be evaluated for muting.
|
||||
finding_region (str): The region where the finding occurred.
|
||||
finding_resource (str): The resource related to the finding.
|
||||
finding_tags (str): The tags associated with the finding.
|
||||
|
||||
Returns:
|
||||
bool: True if the check is muted, otherwise False.
|
||||
"""
|
||||
try:
|
||||
# Default value is not muted
|
||||
is_check_muted = False
|
||||
|
||||
for muted_check, muted_check_info in muted_checks.items():
|
||||
# map lambda to awslambda
|
||||
muted_check = re.sub("^lambda", "awslambda", muted_check)
|
||||
|
||||
check_match = (
|
||||
"*" == muted_check
|
||||
or check == muted_check
|
||||
or self.is_item_matched([muted_check], check)
|
||||
)
|
||||
|
||||
# Check if the finding is excepted
|
||||
exceptions = muted_check_info.get("Exceptions")
|
||||
if (
|
||||
self.is_excepted(
|
||||
exceptions,
|
||||
audited_account,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
)
|
||||
and check_match
|
||||
):
|
||||
is_finding_muted = True
|
||||
# Break loop and return default value since is excepted
|
||||
break
|
||||
|
||||
return is_finding_muted
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
return False
|
||||
muted_regions = muted_check_info.get("Regions")
|
||||
muted_resources = muted_check_info.get("Resources")
|
||||
muted_tags = muted_check_info.get("Tags", "*")
|
||||
# We need to set the muted_tags if None, "" or [], so the falsy helps
|
||||
if not muted_tags:
|
||||
muted_tags = "*"
|
||||
# If there is a *, it affects to all checks
|
||||
if check_match:
|
||||
muted_in_check = True
|
||||
muted_in_region = self.is_item_matched(
|
||||
muted_regions, finding_region
|
||||
)
|
||||
muted_in_resource = self.is_item_matched(
|
||||
muted_resources, finding_resource
|
||||
)
|
||||
muted_in_tags = self.is_item_matched(
|
||||
muted_tags, finding_tags, tag=True
|
||||
)
|
||||
|
||||
# For a finding to be muted requires the following set to True:
|
||||
# - muted_in_check -> True
|
||||
# - muted_in_region -> True
|
||||
# - muted_in_tags -> True
|
||||
# - muted_in_resource -> True
|
||||
# - excepted -> False
|
||||
|
||||
def is_muted_in_check(
|
||||
muted_checks,
|
||||
audited_account,
|
||||
check,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if the provided check is muted.
|
||||
if (
|
||||
muted_in_check
|
||||
and muted_in_region
|
||||
and muted_in_tags
|
||||
and muted_in_resource
|
||||
):
|
||||
is_check_muted = True
|
||||
|
||||
Args:
|
||||
muted_checks (dict): Dictionary containing information about muted checks.
|
||||
audited_account (str): The account to be audited.
|
||||
check (str): The check to be evaluated for muting.
|
||||
finding_region (str): The region where the finding occurred.
|
||||
finding_resource (str): The resource related to the finding.
|
||||
finding_tags (str): The tags associated with the finding.
|
||||
|
||||
Returns:
|
||||
bool: True if the check is muted, otherwise False.
|
||||
"""
|
||||
try:
|
||||
# Default value is not muted
|
||||
is_check_muted = False
|
||||
|
||||
for muted_check, muted_check_info in muted_checks.items():
|
||||
# map lambda to awslambda
|
||||
muted_check = re.sub("^lambda", "awslambda", muted_check)
|
||||
|
||||
check_match = (
|
||||
"*" == muted_check
|
||||
or check == muted_check
|
||||
or re.search(muted_check, check)
|
||||
return is_check_muted
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
# Check if the finding is excepted
|
||||
exceptions = muted_check_info.get("Exceptions")
|
||||
if (
|
||||
is_excepted(
|
||||
exceptions,
|
||||
audited_account,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
)
|
||||
and check_match
|
||||
):
|
||||
# Break loop and return default value since is excepted
|
||||
break
|
||||
return False
|
||||
|
||||
muted_regions = muted_check_info.get("Regions")
|
||||
muted_resources = muted_check_info.get("Resources")
|
||||
muted_tags = muted_check_info.get("Tags", "*")
|
||||
# We need to set the muted_tags if None, "" or [], so the falsy helps
|
||||
if not muted_tags:
|
||||
muted_tags = "*"
|
||||
# If there is a *, it affects to all checks
|
||||
if check_match:
|
||||
muted_in_check = True
|
||||
muted_in_region = is_muted_in_region(muted_regions, finding_region)
|
||||
muted_in_resource = is_muted_in_resource(
|
||||
muted_resources, finding_resource
|
||||
)
|
||||
muted_in_tags = is_muted_in_tags(muted_tags, finding_tags)
|
||||
def is_excepted(
|
||||
self,
|
||||
exceptions,
|
||||
audited_account,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if the provided account, region, resource, and tags are excepted based on the exceptions dictionary.
|
||||
|
||||
# For a finding to be muted requires the following set to True:
|
||||
# - muted_in_check -> True
|
||||
# - muted_in_region -> True
|
||||
# - muted_in_tags -> True
|
||||
# - muted_in_resource -> True
|
||||
# - excepted -> False
|
||||
Args:
|
||||
exceptions (dict): Dictionary containing exceptions for different attributes like Accounts, Regions, Resources, and Tags.
|
||||
audited_account (str): The account to be audited.
|
||||
finding_region (str): The region where the finding occurred.
|
||||
finding_resource (str): The resource related to the finding.
|
||||
finding_tags (str): The tags associated with the finding.
|
||||
|
||||
Returns:
|
||||
bool: True if the account, region, resource, and tags are excepted based on the exceptions, otherwise False.
|
||||
"""
|
||||
try:
|
||||
excepted = False
|
||||
is_account_excepted = False
|
||||
is_region_excepted = False
|
||||
is_resource_excepted = False
|
||||
is_tag_excepted = False
|
||||
if exceptions:
|
||||
excepted_accounts = exceptions.get("Accounts", [])
|
||||
is_account_excepted = self.is_item_matched(
|
||||
excepted_accounts, audited_account
|
||||
)
|
||||
|
||||
excepted_regions = exceptions.get("Regions", [])
|
||||
is_region_excepted = self.is_item_matched(
|
||||
excepted_regions, finding_region
|
||||
)
|
||||
|
||||
excepted_resources = exceptions.get("Resources", [])
|
||||
is_resource_excepted = self.is_item_matched(
|
||||
excepted_resources, finding_resource
|
||||
)
|
||||
|
||||
excepted_tags = exceptions.get("Tags", [])
|
||||
is_tag_excepted = self.is_item_matched(
|
||||
excepted_tags, finding_tags, tag=True
|
||||
)
|
||||
|
||||
if (
|
||||
muted_in_check
|
||||
and muted_in_region
|
||||
and muted_in_tags
|
||||
and muted_in_resource
|
||||
not is_account_excepted
|
||||
and not is_region_excepted
|
||||
and not is_resource_excepted
|
||||
and not is_tag_excepted
|
||||
):
|
||||
is_check_muted = True
|
||||
|
||||
return is_check_muted
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def is_muted_in_region(
|
||||
mutelist_regions,
|
||||
finding_region,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if the finding_region is present in the mutelist_regions.
|
||||
|
||||
Args:
|
||||
mutelist_regions (list): List of regions in the mute list.
|
||||
finding_region (str): Region to check if it is muted.
|
||||
|
||||
Returns:
|
||||
bool: True if the finding_region is muted in any of the mutelist_regions, otherwise False.
|
||||
"""
|
||||
try:
|
||||
return __is_item_matched__(mutelist_regions, finding_region)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def is_muted_in_tags(muted_tags, finding_tags) -> bool:
|
||||
"""
|
||||
Check if any of the muted tags are present in the finding tags.
|
||||
|
||||
Args:
|
||||
muted_tags (list): List of muted tags to be checked.
|
||||
finding_tags (str): String containing tags to search for muted tags.
|
||||
|
||||
Returns:
|
||||
bool: True if any of the muted tags are present in the finding tags, otherwise False.
|
||||
"""
|
||||
try:
|
||||
return __is_item_matched__(muted_tags, finding_tags)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def is_muted_in_resource(muted_resources, finding_resource) -> bool:
|
||||
"""
|
||||
Check if any of the muted_resources are present in the finding_resource.
|
||||
|
||||
Args:
|
||||
muted_resources (list): List of muted resources to be checked.
|
||||
finding_resource (str): Resource to search for muted resources.
|
||||
|
||||
Returns:
|
||||
bool: True if any of the muted_resources are present in the finding_resource, otherwise False.
|
||||
"""
|
||||
try:
|
||||
return __is_item_matched__(muted_resources, finding_resource)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def is_excepted(
|
||||
exceptions,
|
||||
audited_account,
|
||||
finding_region,
|
||||
finding_resource,
|
||||
finding_tags,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if the provided account, region, resource, and tags are excepted based on the exceptions dictionary.
|
||||
|
||||
Args:
|
||||
exceptions (dict): Dictionary containing exceptions for different attributes like Accounts, Regions, Resources, and Tags.
|
||||
audited_account (str): The account to be audited.
|
||||
finding_region (str): The region where the finding occurred.
|
||||
finding_resource (str): The resource related to the finding.
|
||||
finding_tags (str): The tags associated with the finding.
|
||||
|
||||
Returns:
|
||||
bool: True if the account, region, resource, and tags are excepted based on the exceptions, otherwise False.
|
||||
"""
|
||||
try:
|
||||
excepted = False
|
||||
is_account_excepted = False
|
||||
is_region_excepted = False
|
||||
is_resource_excepted = False
|
||||
is_tag_excepted = False
|
||||
if exceptions:
|
||||
excepted_accounts = exceptions.get("Accounts", [])
|
||||
is_account_excepted = __is_item_matched__(
|
||||
excepted_accounts, audited_account
|
||||
excepted = False
|
||||
elif (
|
||||
(is_account_excepted or not excepted_accounts)
|
||||
and (is_region_excepted or not excepted_regions)
|
||||
and (is_resource_excepted or not excepted_resources)
|
||||
and (is_tag_excepted or not excepted_tags)
|
||||
):
|
||||
excepted = True
|
||||
return excepted
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
return False
|
||||
|
||||
excepted_regions = exceptions.get("Regions", [])
|
||||
is_region_excepted = __is_item_matched__(excepted_regions, finding_region)
|
||||
@staticmethod
|
||||
def is_item_matched(matched_items, finding_items, tag=False) -> bool:
|
||||
"""
|
||||
Check if any of the items in matched_items are present in finding_items.
|
||||
|
||||
excepted_resources = exceptions.get("Resources", [])
|
||||
is_resource_excepted = __is_item_matched__(
|
||||
excepted_resources, finding_resource
|
||||
)
|
||||
Args:
|
||||
matched_items (list): List of items to be matched.
|
||||
finding_items (str): String to search for matched items.
|
||||
tag (bool): If True the search will have a different logic due to the tags being ANDed or ORed:
|
||||
- Check of AND logic -> True if all the tags are present in the finding.
|
||||
- Check of OR logic -> True if any of the tags is present in the finding.
|
||||
|
||||
excepted_tags = exceptions.get("Tags", [])
|
||||
is_tag_excepted = __is_item_matched__(excepted_tags, finding_tags)
|
||||
|
||||
if (
|
||||
not is_account_excepted
|
||||
and not is_region_excepted
|
||||
and not is_resource_excepted
|
||||
and not is_tag_excepted
|
||||
):
|
||||
excepted = False
|
||||
elif (
|
||||
(is_account_excepted or not excepted_accounts)
|
||||
and (is_region_excepted or not excepted_regions)
|
||||
and (is_resource_excepted or not excepted_resources)
|
||||
and (is_tag_excepted or not excepted_tags)
|
||||
):
|
||||
excepted = True
|
||||
return excepted
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def __is_item_matched__(matched_items, finding_items):
|
||||
"""
|
||||
Check if any of the items in matched_items are present in finding_items.
|
||||
|
||||
Args:
|
||||
matched_items (list): List of items to be matched.
|
||||
finding_items (str): String to search for matched items.
|
||||
|
||||
Returns:
|
||||
bool: True if any of the matched_items are present in finding_items, otherwise False.
|
||||
"""
|
||||
try:
|
||||
is_item_matched = False
|
||||
if matched_items and (finding_items or finding_items == ""):
|
||||
for item in matched_items:
|
||||
if item.startswith("*"):
|
||||
item = ".*" + item[1:]
|
||||
if re.search(item, finding_items):
|
||||
Returns:
|
||||
bool: True if any of the matched_items are present in finding_items, otherwise False.
|
||||
"""
|
||||
try:
|
||||
is_item_matched = False
|
||||
if matched_items and (finding_items or finding_items == ""):
|
||||
if tag:
|
||||
is_item_matched = True
|
||||
break
|
||||
return is_item_matched
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
return False
|
||||
for item in matched_items:
|
||||
if item.startswith("*"):
|
||||
item = ".*" + item[1:]
|
||||
if tag:
|
||||
if not re.search(item, finding_items):
|
||||
is_item_matched = False
|
||||
break
|
||||
else:
|
||||
if re.search(item, finding_items):
|
||||
is_item_matched = True
|
||||
break
|
||||
return is_item_matched
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__} -- {error}[{error.__traceback__.tb_lineno}]"
|
||||
)
|
||||
return False
|
||||
|
||||
405
prowler/lib/outputs/asff/asff.py
Normal file
@@ -0,0 +1,405 @@
|
||||
from json import dump
|
||||
from os import SEEK_SET
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, validator
|
||||
|
||||
from prowler.config.config import prowler_version, timestamp_utc
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
from prowler.lib.outputs.output import Output
|
||||
from prowler.lib.utils.utils import hash_sha512
|
||||
|
||||
|
||||
class ASFF(Output):
|
||||
"""
|
||||
ASFF class represents a transformation of findings into AWS Security Finding Format (ASFF).
|
||||
|
||||
This class provides methods to transform a list of findings into the ASFF format required by AWS Security Hub. It includes operations such as generating unique identifiers, formatting timestamps, handling compliance frameworks, and ensuring the status values match the allowed values in ASFF.
|
||||
|
||||
Attributes:
|
||||
- _data: A list to store the transformed findings.
|
||||
- _file_descriptor: A file descriptor to write to file.
|
||||
|
||||
Methods:
|
||||
- transform(findings: list[Finding]) -> None: Transforms a list of findings into ASFF format.
|
||||
- batch_write_data_to_file() -> None: Writes the findings data to a file in JSON ASFF format.
|
||||
- generate_status(status: str, muted: bool = False) -> str: Generates the ASFF status based on the provided status and muted flag.
|
||||
|
||||
References:
|
||||
- AWS Security Hub API Reference: https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
|
||||
- AWS Security Finding Format Syntax: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-syntax.html
|
||||
"""
|
||||
|
||||
def transform(self, findings: list[Finding]) -> None:
|
||||
"""
|
||||
Transforms a list of findings into AWS Security Finding Format (ASFF).
|
||||
|
||||
This method iterates over the list of findings provided as input and transforms each finding into the ASFF format required by AWS Security Hub. It performs several operations for each finding, including generating unique identifiers, formatting timestamps, handling compliance frameworks, and ensuring the status values match the allowed values in ASFF.
|
||||
|
||||
Parameters:
|
||||
- findings (list[Finding]): A list of Finding objects representing the findings to be transformed.
|
||||
|
||||
Returns:
|
||||
- None
|
||||
|
||||
Notes:
|
||||
- The method skips findings with a status of "MANUAL" as it is not valid in SecurityHub.
|
||||
- It generates unique identifiers for each finding based on specific attributes.
|
||||
- It formats timestamps in the required ASFF format.
|
||||
- It handles compliance frameworks and associated standards for each finding.
|
||||
- It ensures that the finding status matches the allowed values in ASFF.
|
||||
|
||||
References:
|
||||
- AWS Security Hub API Reference: https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
|
||||
- AWS Security Finding Format Syntax: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-syntax.html
|
||||
"""
|
||||
try:
|
||||
for finding in findings:
|
||||
# MANUAL status is not valid in SecurityHub
|
||||
# https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
|
||||
if finding.status == "MANUAL":
|
||||
continue
|
||||
timestamp = timestamp_utc.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
|
||||
associated_standards, compliance_summary = ASFF.format_compliance(
|
||||
finding.compliance
|
||||
)
|
||||
|
||||
# Ensures finding_status matches allowed values in ASFF
|
||||
finding_status = ASFF.generate_status(finding.status, finding.muted)
|
||||
self._data.append(
|
||||
AWSSecurityFindingFormat(
|
||||
# The following line cannot be changed because it is the format we use to generate unique findings for AWS Security Hub
|
||||
# If changed some findings could be lost because the unique identifier will be different
|
||||
Id=f"prowler-{finding.check_id}-{finding.account_uid}-{finding.region}-{hash_sha512(finding.resource_uid)}",
|
||||
ProductArn=f"arn:{finding.partition}:securityhub:{finding.region}::product/prowler/prowler",
|
||||
ProductFields=ProductFields(
|
||||
ProwlerResourceName=finding.resource_uid,
|
||||
),
|
||||
GeneratorId="prowler-" + finding.check_id,
|
||||
AwsAccountId=finding.account_uid,
|
||||
Types=(
|
||||
finding.check_type.split(",")
|
||||
if finding.check_type
|
||||
else ["Software and Configuration Checks"]
|
||||
),
|
||||
FirstObservedAt=timestamp,
|
||||
UpdatedAt=timestamp,
|
||||
CreatedAt=timestamp,
|
||||
Severity=Severity(Label=finding.severity.value),
|
||||
Title=finding.check_title,
|
||||
Description=(
|
||||
(finding.status_extended[:1000] + "...")
|
||||
if len(finding.status_extended) > 1000
|
||||
else finding.status_extended
|
||||
),
|
||||
Resources=[
|
||||
Resource(
|
||||
Id=finding.resource_uid,
|
||||
Type=finding.resource_type,
|
||||
Partition=finding.partition,
|
||||
Region=finding.region,
|
||||
Tags=finding.resource_tags,
|
||||
)
|
||||
],
|
||||
Compliance=Compliance(
|
||||
Status=finding_status,
|
||||
AssociatedStandards=associated_standards,
|
||||
RelatedRequirements=compliance_summary,
|
||||
),
|
||||
Remediation=Remediation(
|
||||
Recommendation=Recommendation(
|
||||
Text=finding.remediation_recommendation_text,
|
||||
Url=finding.remediation_recommendation_url,
|
||||
)
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
def batch_write_data_to_file(self) -> None:
|
||||
"""
|
||||
Writes the findings data to a file in JSON ASFF format.
|
||||
|
||||
This method iterates over the findings data stored in the '_data' attribute and writes it to the file descriptor '_file_descriptor' in JSON format. It starts by writing the JSON opening/header '[', then iterates over each finding, dumping it to the file with an indent of 4 spaces. After writing all findings, it writes the closing ']' to complete the JSON array structure. Finally, it closes the file descriptor.
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
try:
|
||||
if (
|
||||
getattr(self, "_file_descriptor", None)
|
||||
and not self._file_descriptor.closed
|
||||
and self._data
|
||||
):
|
||||
# Write JSON opening/header [
|
||||
self._file_descriptor.write("[")
|
||||
|
||||
# Write findings
|
||||
for finding in self._data:
|
||||
dump(
|
||||
finding.dict(exclude_none=True),
|
||||
self._file_descriptor,
|
||||
indent=4,
|
||||
)
|
||||
self._file_descriptor.write(",")
|
||||
|
||||
# Write footer/closing ]
|
||||
if self._file_descriptor.tell() > 0:
|
||||
if self._file_descriptor.tell() != 1:
|
||||
self._file_descriptor.seek(
|
||||
self._file_descriptor.tell() - 1, SEEK_SET
|
||||
)
|
||||
self._file_descriptor.truncate()
|
||||
self._file_descriptor.write("]")
|
||||
|
||||
# Close file descriptor
|
||||
self._file_descriptor.close()
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def generate_status(status: str, muted: bool = False) -> str:
|
||||
"""
|
||||
Generates the ASFF status based on the provided status and muted flag.
|
||||
|
||||
Parameters:
|
||||
- status (str): The status of the finding.
|
||||
- muted (bool): Flag indicating if the finding is muted.
|
||||
|
||||
Returns:
|
||||
- str: The ASFF status corresponding to the provided status and muted flag.
|
||||
|
||||
References:
|
||||
- AWS Security Hub API Reference: https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
|
||||
"""
|
||||
json_asff_status = ""
|
||||
if muted:
|
||||
# Per AWS Security Hub "MUTED" is not a valid status
|
||||
# https://docs.aws.amazon.com/securityhub/1.0/APIReference/API_Compliance.html
|
||||
json_asff_status = "WARNING"
|
||||
else:
|
||||
if status == "PASS":
|
||||
json_asff_status = "PASSED"
|
||||
elif status == "FAIL":
|
||||
json_asff_status = "FAILED"
|
||||
else:
|
||||
# MANUAL is set to NOT_AVAILABLE
|
||||
json_asff_status = "NOT_AVAILABLE"
|
||||
|
||||
return json_asff_status
|
||||
|
||||
@staticmethod
|
||||
def format_compliance(compliance: dict) -> tuple[list[dict], list[str]]:
|
||||
"""
|
||||
Transforms a dictionary of compliance data into a tuple of associated standards and compliance summaries.
|
||||
|
||||
Parameters:
|
||||
- compliance (dict): A dictionary containing compliance data where keys are standards and values are lists of compliance details.
|
||||
|
||||
Returns:
|
||||
- tuple[list[dict], list[str]]: A tuple containing a list of associated standards (each as a dictionary with 'StandardsId') and a list of compliance summaries.
|
||||
|
||||
Notes:
|
||||
- The method limits the number of associated standards to 20.
|
||||
- Each compliance summary is a concatenation of the standard key and its associated compliance details.
|
||||
- If the concatenated summary exceeds 64 characters, it is truncated to 63 characters.
|
||||
|
||||
Example:
|
||||
format_compliance({"standard1": ["detail1", "detail2"], "standard2": ["detail3"]}) -> ([{"StandardsId": "standard1"}, {"StandardsId": "standard2"}], ["standard1 detail1 detail2", "standard2 detail3"])
|
||||
"""
|
||||
compliance_summary = []
|
||||
associated_standards = []
|
||||
for key, value in compliance.items():
|
||||
if (
|
||||
len(associated_standards) < 20
|
||||
): # AssociatedStandards should NOT have more than 20 items
|
||||
associated_standards.append({"StandardsId": key})
|
||||
item = f"{key} {' '.join(value)}"
|
||||
if len(item) > 64:
|
||||
item = item[0:63]
|
||||
compliance_summary.append(item)
|
||||
return associated_standards, compliance_summary
|
||||
|
||||
|
||||
class ProductFields(BaseModel):
|
||||
"""
|
||||
Class representing the Product Fields of a finding in the AWS Security Finding Format.
|
||||
|
||||
Attributes:
|
||||
- ProviderName (str): The name of the provider, default value is "Prowler".
|
||||
- ProviderVersion (str): The version of the provider, fetched from the prowler_version in config.py.
|
||||
- ProwlerResourceName (str): The name of the Prowler resource.
|
||||
"""
|
||||
|
||||
ProviderName: str = "Prowler"
|
||||
ProviderVersion: str = prowler_version
|
||||
ProwlerResourceName: str
|
||||
|
||||
|
||||
class Severity(BaseModel):
|
||||
"""
|
||||
Class representing the severity of a finding in the AWS Security Finding Format.
|
||||
|
||||
Attributes:
|
||||
- Label (str): A string representing the severity label of the finding.
|
||||
|
||||
This class is used to define the severity level of a finding in the AWS Security Finding Format.
|
||||
"""
|
||||
|
||||
Label: str
|
||||
|
||||
@validator("Label", pre=True, always=True)
|
||||
def severity_uppercase(severity):
|
||||
return severity.upper()
|
||||
|
||||
|
||||
class Resource(BaseModel):
|
||||
"""
|
||||
Class representing a resource in the AWS Security Finding Format.
|
||||
|
||||
Attributes:
|
||||
- Type (str): The type of the resource.
|
||||
- Id (str): The unique identifier of the resource.
|
||||
- Partition (str): The partition where the resource resides.
|
||||
- Region (str): The region where the resource is located.
|
||||
- Tags (Optional[dict]): Optional dictionary of tags associated with the resource.
|
||||
|
||||
This class defines the structure of a resource within the AWS Security Finding Format. It includes attributes to specify the type, unique identifier, partition, region, and optional tags of the resource.
|
||||
"""
|
||||
|
||||
Type: str
|
||||
Id: str
|
||||
Partition: str
|
||||
Region: str
|
||||
Tags: Optional[dict]
|
||||
|
||||
@validator("Tags", pre=True, always=True)
|
||||
def tags_cannot_be_empty_dict(tags):
|
||||
if not tags:
|
||||
return None
|
||||
return tags
|
||||
|
||||
|
||||
class Compliance(BaseModel):
|
||||
"""
|
||||
Class representing the compliance details of a finding in the AWS Security Finding Format.
|
||||
|
||||
Attributes:
|
||||
- Status (str): The compliance status of the finding.
|
||||
- RelatedRequirements (list[str]): A list of related compliance requirements for the finding.
|
||||
- AssociatedStandards (list[dict]): A list of associated standards with the finding, where each item is a dictionary containing the 'StandardsId'.
|
||||
|
||||
This class defines the structure of compliance information within the AWS Security Finding Format. It includes attributes to specify the compliance status, related requirements, and associated standards of a finding.
|
||||
"""
|
||||
|
||||
Status: str
|
||||
RelatedRequirements: list[str]
|
||||
AssociatedStandards: list[dict]
|
||||
|
||||
@validator("Status", pre=True, always=True)
|
||||
def status(status):
|
||||
if status not in ["PASSED", "WARNING", "FAILED", "NOT_AVAILABLE"]:
|
||||
raise ValueError("must contain a space")
|
||||
return status
|
||||
|
||||
|
||||
class Recommendation(BaseModel):
|
||||
"""
|
||||
Class representing a recommendation for remediation in the AWS Security Finding Format.
|
||||
|
||||
Attributes:
|
||||
- Text (str): The text description of the recommendation.
|
||||
- Url (str): The URL link for additional information related to the recommendation.
|
||||
|
||||
This class defines the structure of a recommendation within the AWS Security Finding Format. It includes attributes to specify the text description and URL link for further details regarding the recommendation.
|
||||
"""
|
||||
|
||||
Text: str = ""
|
||||
Url: str = ""
|
||||
|
||||
@validator("Text", pre=True, always=True)
|
||||
def text_must_not_exceed_512_chars(text):
|
||||
text_validated = text
|
||||
if len(text) > 512:
|
||||
text_validated = text[:509] + "..."
|
||||
return text_validated
|
||||
|
||||
@validator("Url", pre=True, always=True)
|
||||
def set_default_url_if_empty(url):
|
||||
default_url = "https://docs.aws.amazon.com/securityhub/latest/userguide/what-is-securityhub.html"
|
||||
if url:
|
||||
default_url = url
|
||||
return default_url
|
||||
|
||||
|
||||
class Remediation(BaseModel):
|
||||
"""
|
||||
Class representing a remediation action in the AWS Security Finding Format.
|
||||
|
||||
Attributes:
|
||||
- Recommendation (Recommendation): An instance of the Recommendation class providing details for remediation.
|
||||
|
||||
This class defines the structure of a remediation action within the AWS Security Finding Format. It includes an attribute to specify the recommendation for remediation, which is an instance of the Recommendation class.
|
||||
"""
|
||||
|
||||
Recommendation: Recommendation
|
||||
|
||||
|
||||
class AWSSecurityFindingFormat(BaseModel):
|
||||
"""
|
||||
AWSSecurityFindingFormat generates a finding's output in JSON ASFF format: https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-findings-format-syntax.html
|
||||
|
||||
Attributes:
|
||||
- SchemaVersion (str): The version of the ASFF schema being used, default value is "2018-10-08".
|
||||
- Id (str): The unique identifier of the finding.
|
||||
- ProductArn (str): The ARN of the product generating the finding.
|
||||
- RecordState (str): The state of the finding record, default value is "ACTIVE".
|
||||
- ProductFields (ProductFields): An instance of the ProductFields class representing the product fields of the finding.
|
||||
- GeneratorId (str): The ID of the generator.
|
||||
- AwsAccountId (str): The AWS account ID associated with the finding.
|
||||
- Types (list[str]): A list of types associated with the finding, default value is None.
|
||||
- FirstObservedAt (str): The timestamp when the finding was first observed.
|
||||
- UpdatedAt (str): The timestamp when the finding was last updated.
|
||||
- CreatedAt (str): The timestamp when the finding was created.
|
||||
- Severity (Severity): An instance of the Severity class representing the severity of the finding.
|
||||
- Title (str): The title of the finding.
|
||||
- Description (str): The description of the finding, truncated to 1024 characters if longer.
|
||||
- Resources (list[Resource]): A list of resources associated with the finding, default value is None.
|
||||
- Compliance (Compliance): An instance of the Compliance class representing the compliance details of the finding.
|
||||
- Remediation (Remediation): An instance of the Remediation class providing details for remediation.
|
||||
|
||||
This class defines the structure of a finding in the AWS Security Finding Format, including various attributes such as schema version, identifiers, timestamps, severity, title, description, resources, compliance details, and remediation information.
|
||||
"""
|
||||
|
||||
SchemaVersion: str = "2018-10-08"
|
||||
Id: str
|
||||
ProductArn: str
|
||||
RecordState: str = "ACTIVE"
|
||||
ProductFields: ProductFields
|
||||
GeneratorId: str
|
||||
AwsAccountId: str
|
||||
Types: list[str] = None
|
||||
FirstObservedAt: str
|
||||
UpdatedAt: str
|
||||
CreatedAt: str
|
||||
Severity: Severity
|
||||
Title: str
|
||||
Description: str
|
||||
Resources: list[Resource] = None
|
||||
Compliance: Compliance
|
||||
Remediation: Remediation
|
||||
|
||||
@validator("Description", pre=True, always=True)
|
||||
def description_must_not_exceed_1024_chars(description):
|
||||
description_validated = description
|
||||
if len(description) > 1024:
|
||||
description_validated = description[:1021] + "..."
|
||||
return description_validated
|
||||
@@ -2,7 +2,6 @@ from operator import attrgetter
|
||||
|
||||
from prowler.config.config import timestamp
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.common_models import FindingOutput
|
||||
from prowler.lib.outputs.utils import unroll_list, unroll_tags
|
||||
from prowler.lib.utils.utils import outputs_unix_timestamp
|
||||
|
||||
@@ -22,87 +21,6 @@ def get_provider_data_mapping(provider) -> dict:
|
||||
return data
|
||||
|
||||
|
||||
def generate_provider_output(provider, finding, csv_data) -> FindingOutput:
|
||||
"""
|
||||
generate_provider_output returns the provider's Finding output model
|
||||
"""
|
||||
# TODO: we have to standardize this between the above mapping and the provider.get_output_mapping()
|
||||
try:
|
||||
if provider.type == "aws":
|
||||
# TODO: probably Organization UID is without the account id
|
||||
csv_data["auth_method"] = f"profile: {csv_data['auth_method']}"
|
||||
csv_data["resource_name"] = finding.resource_id
|
||||
csv_data["resource_uid"] = finding.resource_arn
|
||||
csv_data["region"] = finding.region
|
||||
|
||||
elif provider.type == "azure":
|
||||
# TODO: we should show the authentication method used I think
|
||||
csv_data["auth_method"] = (
|
||||
f"{provider.identity.identity_type}: {provider.identity.identity_id}"
|
||||
)
|
||||
# Get the first tenant domain ID, just in case
|
||||
csv_data["account_organization_uid"] = csv_data["account_organization_uid"][
|
||||
0
|
||||
]
|
||||
csv_data["account_uid"] = (
|
||||
csv_data["account_organization_uid"]
|
||||
if "Tenant:" in finding.subscription
|
||||
else provider.identity.subscriptions[finding.subscription]
|
||||
)
|
||||
csv_data["account_name"] = finding.subscription
|
||||
csv_data["resource_name"] = finding.resource_name
|
||||
csv_data["resource_uid"] = finding.resource_id
|
||||
csv_data["region"] = finding.location
|
||||
|
||||
elif provider.type == "gcp":
|
||||
csv_data["auth_method"] = f"Principal: {csv_data['auth_method']}"
|
||||
csv_data["account_uid"] = provider.projects[finding.project_id].id
|
||||
csv_data["account_name"] = provider.projects[finding.project_id].name
|
||||
csv_data["account_tags"] = provider.projects[finding.project_id].labels
|
||||
csv_data["resource_name"] = finding.resource_name
|
||||
csv_data["resource_uid"] = finding.resource_id
|
||||
csv_data["region"] = finding.location
|
||||
|
||||
if (
|
||||
provider.projects
|
||||
and finding.project_id in provider.projects
|
||||
and getattr(provider.projects[finding.project_id], "organization")
|
||||
):
|
||||
csv_data["account_organization_uid"] = provider.projects[
|
||||
finding.project_id
|
||||
].organization.id
|
||||
# TODO: for now is None since we don't retrieve that data
|
||||
csv_data["account_organization"] = provider.projects[
|
||||
finding.project_id
|
||||
].organization.display_name
|
||||
|
||||
elif provider.type == "kubernetes":
|
||||
if provider.identity.context == "In-Cluster":
|
||||
csv_data["auth_method"] = "in-cluster"
|
||||
else:
|
||||
csv_data["auth_method"] = "kubeconfig"
|
||||
csv_data["resource_name"] = finding.resource_name
|
||||
csv_data["resource_uid"] = finding.resource_id
|
||||
csv_data["account_name"] = f"context: {provider.identity.context}"
|
||||
csv_data["region"] = f"namespace: {finding.namespace}"
|
||||
|
||||
# Finding Unique ID
|
||||
# TODO: move this to a function
|
||||
# TODO: in Azure, GCP and K8s there are fidings without resource_name
|
||||
csv_data["finding_uid"] = (
|
||||
f"prowler-{provider.type}-{finding.check_metadata.CheckID}-{csv_data['account_uid']}-{csv_data['region']}-{csv_data['resource_name']}"
|
||||
)
|
||||
|
||||
finding_output = FindingOutput(**csv_data)
|
||||
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
else:
|
||||
return finding_output
|
||||
|
||||
|
||||
# TODO: add test for outputs_unix_timestamp
|
||||
def fill_common_finding_data(finding: dict, unix_timestamp: bool) -> dict:
|
||||
finding_data = {
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Optional, Union
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from prowler.config.config import prowler_version
|
||||
|
||||
|
||||
class Status(str, Enum):
|
||||
PASS = "PASS"
|
||||
FAIL = "FAIL"
|
||||
MANUAL = "MANUAL"
|
||||
|
||||
|
||||
class Severity(str, Enum):
|
||||
critical = "critical"
|
||||
high = "high"
|
||||
medium = "medium"
|
||||
low = "low"
|
||||
informational = "informational"
|
||||
|
||||
|
||||
class FindingOutput(BaseModel):
|
||||
"""
|
||||
FindingOutput generates a finding's output. It can be written to CSV or another format doing the mapping.
|
||||
|
||||
This is the base finding output model for every provider.
|
||||
"""
|
||||
|
||||
auth_method: str
|
||||
timestamp: Union[int, datetime]
|
||||
account_uid: str
|
||||
# Optional since depends on permissions
|
||||
account_name: Optional[str]
|
||||
# Optional since depends on permissions
|
||||
account_email: Optional[str]
|
||||
# Optional since depends on permissions
|
||||
account_organization_uid: Optional[str]
|
||||
# Optional since depends on permissions
|
||||
account_organization_name: Optional[str]
|
||||
# Optional since depends on permissions
|
||||
account_tags: Optional[list[str]]
|
||||
finding_uid: str
|
||||
provider: str
|
||||
check_id: str
|
||||
check_title: str
|
||||
check_type: str
|
||||
status: Status
|
||||
status_extended: str
|
||||
muted: bool = False
|
||||
service_name: str
|
||||
subservice_name: str
|
||||
severity: Severity
|
||||
resource_type: str
|
||||
resource_uid: str
|
||||
resource_name: str
|
||||
resource_details: str
|
||||
resource_tags: str
|
||||
# Only present for AWS and Azure
|
||||
partition: Optional[str]
|
||||
region: str
|
||||
description: str
|
||||
risk: str
|
||||
related_url: str
|
||||
remediation_recommendation_text: str
|
||||
remediation_recommendation_url: str
|
||||
remediation_code_nativeiac: str
|
||||
remediation_code_terraform: str
|
||||
remediation_code_cli: str
|
||||
remediation_code_other: str
|
||||
compliance: dict
|
||||
categories: str
|
||||
depends_on: str
|
||||
related_to: str
|
||||
notes: str
|
||||
prowler_version: str = prowler_version
|
||||
@@ -0,0 +1,97 @@
|
||||
from prowler.lib.check.compliance_models import Compliance
|
||||
from prowler.lib.outputs.compliance.aws_well_architected.models import (
|
||||
AWSWellArchitectedModel,
|
||||
)
|
||||
from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput
|
||||
from prowler.lib.outputs.finding import Finding
|
||||
|
||||
|
||||
class AWSWellArchitected(ComplianceOutput):
|
||||
"""
|
||||
This class represents the AWS Well-Architected compliance output.
|
||||
|
||||
Attributes:
|
||||
- _data (list): A list to store transformed data from findings.
|
||||
- _file_descriptor (TextIOWrapper): A file descriptor to write data to a file.
|
||||
|
||||
Methods:
|
||||
- transform: Transforms findings into AWS Well-Architected compliance format.
|
||||
"""
|
||||
|
||||
def transform(
|
||||
self,
|
||||
findings: list[Finding],
|
||||
compliance: Compliance,
|
||||
compliance_name: str,
|
||||
) -> None:
|
||||
"""
|
||||
Transforms a list of findings into AWS Well-Architected compliance format.
|
||||
|
||||
Parameters:
|
||||
- findings (list): A list of findings.
|
||||
- compliance (Compliance): A compliance model.
|
||||
- compliance_name (str): The name of the compliance model.
|
||||
|
||||
Returns:
|
||||
- None
|
||||
"""
|
||||
for finding in findings:
|
||||
# Get the compliance requirements for the finding
|
||||
finding_requirements = finding.compliance.get(compliance_name, [])
|
||||
for requirement in compliance.Requirements:
|
||||
if requirement.Id in finding_requirements:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = AWSWellArchitectedModel(
|
||||
Provider=finding.provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=finding.account_uid,
|
||||
Region=finding.region,
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Name=attribute.Name,
|
||||
Requirements_Attributes_WellArchitectedQuestionId=attribute.WellArchitectedQuestionId,
|
||||
Requirements_Attributes_WellArchitectedPracticeId=attribute.WellArchitectedPracticeId,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_SubSection=attribute.SubSection,
|
||||
Requirements_Attributes_LevelOfRisk=attribute.LevelOfRisk,
|
||||
Requirements_Attributes_AssessmentMethod=attribute.AssessmentMethod,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_ImplementationGuidanceUrl=attribute.ImplementationGuidanceUrl,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_uid,
|
||||
ResourceName=finding.resource_name,
|
||||
CheckId=finding.check_id,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
# Add manual requirements to the compliance output
|
||||
for requirement in compliance.Requirements:
|
||||
if not requirement.Checks:
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = AWSWellArchitectedModel(
|
||||
Provider=compliance.Provider.lower(),
|
||||
Description=compliance.Description,
|
||||
AccountId="",
|
||||
Region="",
|
||||
AssessmentDate=str(finding.timestamp),
|
||||
Requirements_Id=requirement.Id,
|
||||
Requirements_Description=requirement.Description,
|
||||
Requirements_Attributes_Name=attribute.Name,
|
||||
Requirements_Attributes_WellArchitectedQuestionId=attribute.WellArchitectedQuestionId,
|
||||
Requirements_Attributes_WellArchitectedPracticeId=attribute.WellArchitectedPracticeId,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_SubSection=attribute.SubSection,
|
||||
Requirements_Attributes_LevelOfRisk=attribute.LevelOfRisk,
|
||||
Requirements_Attributes_AssessmentMethod=attribute.AssessmentMethod,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_ImplementationGuidanceUrl=attribute.ImplementationGuidanceUrl,
|
||||
Status="MANUAL",
|
||||
StatusExtended="Manual check",
|
||||
ResourceId="manual_check",
|
||||
ResourceName="Manual check",
|
||||
CheckId="manual",
|
||||
Muted=False,
|
||||
)
|
||||
self._data.append(compliance_row)
|
||||
@@ -0,0 +1,32 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AWSWellArchitectedModel(BaseModel):
|
||||
"""
|
||||
AWSWellArchitectedModel generates a finding's output in AWS Well-Architected Framework format.
|
||||
"""
|
||||
|
||||
Provider: str
|
||||
Description: str
|
||||
AccountId: str
|
||||
Region: str
|
||||
AssessmentDate: str
|
||||
Requirements_Id: str
|
||||
Requirements_Description: str
|
||||
Requirements_Attributes_Name: str
|
||||
Requirements_Attributes_WellArchitectedQuestionId: str
|
||||
Requirements_Attributes_WellArchitectedPracticeId: str
|
||||
Requirements_Attributes_Section: str
|
||||
Requirements_Attributes_SubSection: Optional[str]
|
||||
Requirements_Attributes_LevelOfRisk: str
|
||||
Requirements_Attributes_AssessmentMethod: str
|
||||
Requirements_Attributes_Description: str
|
||||
Requirements_Attributes_ImplementationGuidanceUrl: str
|
||||
Status: str
|
||||
StatusExtended: str
|
||||
ResourceId: str
|
||||
CheckId: str
|
||||
Muted: bool
|
||||
ResourceName: str
|
||||
@@ -1,60 +0,0 @@
|
||||
from csv import DictWriter
|
||||
|
||||
from prowler.config.config import timestamp
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.outputs.compliance.models import Check_Output_CSV_AWS_Well_Architected
|
||||
from prowler.lib.outputs.csv.csv import generate_csv_fields
|
||||
from prowler.lib.utils.utils import outputs_unix_timestamp
|
||||
|
||||
|
||||
def write_compliance_row_aws_well_architected_framework(
|
||||
file_descriptors, finding, compliance, output_options, provider
|
||||
):
|
||||
try:
|
||||
compliance_output = compliance.Framework
|
||||
if compliance.Version != "":
|
||||
compliance_output += "_" + compliance.Version
|
||||
if compliance.Provider != "":
|
||||
compliance_output += "_" + compliance.Provider
|
||||
compliance_output = compliance_output.lower().replace("-", "_")
|
||||
csv_header = generate_csv_fields(Check_Output_CSV_AWS_Well_Architected)
|
||||
csv_writer = DictWriter(
|
||||
file_descriptors[compliance_output],
|
||||
fieldnames=csv_header,
|
||||
delimiter=";",
|
||||
)
|
||||
for requirement in compliance.Requirements:
|
||||
requirement_description = requirement.Description
|
||||
requirement_id = requirement.Id
|
||||
for attribute in requirement.Attributes:
|
||||
compliance_row = Check_Output_CSV_AWS_Well_Architected(
|
||||
Provider=finding.check_metadata.Provider,
|
||||
Description=compliance.Description,
|
||||
AccountId=provider.identity.account,
|
||||
Region=finding.region,
|
||||
AssessmentDate=outputs_unix_timestamp(
|
||||
output_options.unix_timestamp, timestamp
|
||||
),
|
||||
Requirements_Id=requirement_id,
|
||||
Requirements_Description=requirement_description,
|
||||
Requirements_Attributes_Name=attribute.Name,
|
||||
Requirements_Attributes_WellArchitectedQuestionId=attribute.WellArchitectedQuestionId,
|
||||
Requirements_Attributes_WellArchitectedPracticeId=attribute.WellArchitectedPracticeId,
|
||||
Requirements_Attributes_Section=attribute.Section,
|
||||
Requirements_Attributes_SubSection=attribute.SubSection,
|
||||
Requirements_Attributes_LevelOfRisk=attribute.LevelOfRisk,
|
||||
Requirements_Attributes_AssessmentMethod=attribute.AssessmentMethod,
|
||||
Requirements_Attributes_Description=attribute.Description,
|
||||
Requirements_Attributes_ImplementationGuidanceUrl=attribute.ImplementationGuidanceUrl,
|
||||
Status=finding.status,
|
||||
StatusExtended=finding.status_extended,
|
||||
ResourceId=finding.resource_id,
|
||||
CheckId=finding.check_metadata.CheckID,
|
||||
Muted=finding.muted,
|
||||
)
|
||||
|
||||
csv_writer.writerow(compliance_row.__dict__)
|
||||
except Exception as error:
|
||||
logger.error(
|
||||
f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||