Compare commits
552 Commits
4.0.0
...
dev-memory
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
000b5ec3d5 | ||
|
|
b9b5f66073 | ||
|
|
e3f66840aa | ||
|
|
0d6c529a46 | ||
|
|
5237658047 | ||
|
|
c00f61ac10 | ||
|
|
2cd840a2b5 | ||
|
|
7e630ebe27 | ||
|
|
2f1c0facfd | ||
|
|
603bb03f35 | ||
|
|
b7af1a06e8 | ||
|
|
02fc034b1f | ||
|
|
40522cdc62 | ||
|
|
dc11d85451 | ||
|
|
13c50086eb | ||
|
|
f7729381e0 | ||
|
|
d244475578 | ||
|
|
10dcbaea7b | ||
|
|
c91bbdcf2b | ||
|
|
c7dbcb17d6 | ||
|
|
5a8a9286db | ||
|
|
2476a1275a | ||
|
|
ac680c58cd | ||
|
|
68f0916ce4 | ||
|
|
dc896fc0af | ||
|
|
76af71d2df | ||
|
|
96f761e4ef | ||
|
|
9e16e477e9 | ||
|
|
2038e30d3e | ||
|
|
a4dc6975b0 | ||
|
|
a4a89fa581 | ||
|
|
fc449bfd7b | ||
|
|
2477948ae9 | ||
|
|
ca98584ded | ||
|
|
489830f01a | ||
|
|
bd56ca2979 | ||
|
|
04483a9a4f | ||
|
|
684f63d398 | ||
|
|
b528dd44cd | ||
|
|
dfdeac0a46 | ||
|
|
b52b67fd4b | ||
|
|
5cf7d89aab | ||
|
|
f5e6b1e438 | ||
|
|
aa44bde940 | ||
|
|
ddc927a4ad | ||
|
|
fbc99259e2 | ||
|
|
28f6f0abcc | ||
|
|
0933a04239 | ||
|
|
5185f3a41e | ||
|
|
6d20b11394 | ||
|
|
a01635e9ea | ||
|
|
3bf9cd3db1 | ||
|
|
e15f0b2d0f | ||
|
|
f2de059ca1 | ||
|
|
8c8ac95d9c | ||
|
|
89159c2111 | ||
|
|
70eb59185b | ||
|
|
f97af19860 | ||
|
|
5ccd8af2a2 | ||
|
|
b53e8abc87 | ||
|
|
db4c4fdaeb | ||
|
|
44afe2db3e | ||
|
|
204d548cd0 | ||
|
|
3faf80c0d7 | ||
|
|
5078e4a823 | ||
|
|
d1b57ebd75 | ||
|
|
fdab3a737a | ||
|
|
b6f01b92dd | ||
|
|
c92537c791 | ||
|
|
3e7cc2e0a2 | ||
|
|
b8cfdb590b | ||
|
|
577afbd521 | ||
|
|
d01cc51b6d | ||
|
|
ffa60b4ccd | ||
|
|
d6dd0f7244 | ||
|
|
4df0dc4904 | ||
|
|
386a1e1d1a | ||
|
|
db9d7a4439 | ||
|
|
5725035e29 | ||
|
|
96a49e97d2 | ||
|
|
2a95750525 | ||
|
|
b868d1a7fe | ||
|
|
37ade2a722 | ||
|
|
c67032e07f | ||
|
|
0de8ef032a | ||
|
|
027aa9796d | ||
|
|
a505776227 | ||
|
|
3be9de376a | ||
|
|
bd26d74b28 | ||
|
|
ca27854ff0 | ||
|
|
abd18dc14d | ||
|
|
297f506fd3 | ||
|
|
78ca4b93a5 | ||
|
|
c80d51b585 | ||
|
|
cf9b23c302 | ||
|
|
ef4b9e8d6a | ||
|
|
a5a8c2a769 | ||
|
|
64b21ae2b9 | ||
|
|
3da4824a1d | ||
|
|
2247296cf9 | ||
|
|
615127f790 | ||
|
|
42f21a52c9 | ||
|
|
e9442b2f89 | ||
|
|
6336b1c0d9 | ||
|
|
a0603b972e | ||
|
|
f319884532 | ||
|
|
d49139c4f4 | ||
|
|
046c82232d | ||
|
|
027aafd9ea | ||
|
|
215d5dabd7 | ||
|
|
f5e2ac7486 | ||
|
|
6fc24b5435 | ||
|
|
3d99e6ea28 | ||
|
|
b23aefadc1 | ||
|
|
b585a31a14 | ||
|
|
9c817ae8a9 | ||
|
|
cd7f19c00e | ||
|
|
d1a7d19799 | ||
|
|
d7dffbc44b | ||
|
|
0402cc7e2d | ||
|
|
bf83f38c89 | ||
|
|
673619c8a1 | ||
|
|
2345a7384b | ||
|
|
e387c591c3 | ||
|
|
47a37c7d0d | ||
|
|
7b359cf1eb | ||
|
|
35d525b903 | ||
|
|
b5b193427d | ||
|
|
e6ae539323 | ||
|
|
541b907038 | ||
|
|
040e1eaa5e | ||
|
|
e23a674277 | ||
|
|
e73cefdf1a | ||
|
|
9ed4e89c60 | ||
|
|
da547b2bbe | ||
|
|
ca033745c9 | ||
|
|
fb49fb83ae | ||
|
|
76e0b23365 | ||
|
|
82ccdc45d2 | ||
|
|
de777a6417 | ||
|
|
87d8cda745 | ||
|
|
64abd0a6d0 | ||
|
|
096d7c6304 | ||
|
|
4908e06544 | ||
|
|
d42cc66d9f | ||
|
|
7a5318b936 | ||
|
|
ffb494f9a4 | ||
|
|
f515b2b53b | ||
|
|
a3cf7665ac | ||
|
|
dbaf72958e | ||
|
|
169d1686d2 | ||
|
|
ba726b205d | ||
|
|
630d980861 | ||
|
|
7d81040eae | ||
|
|
4009d96f8a | ||
|
|
cee5064b11 | ||
|
|
e5c911abef | ||
|
|
ff5c41f363 | ||
|
|
cf84875355 | ||
|
|
fc23eccc7b | ||
|
|
c5fb11e815 | ||
|
|
fdab1edd3e | ||
|
|
ea74d82c48 | ||
|
|
093738c65f | ||
|
|
bae224c891 | ||
|
|
32cded949d | ||
|
|
6463dcdde0 | ||
|
|
0b16dab2ad | ||
|
|
825c620e6f | ||
|
|
819a5597a3 | ||
|
|
4bae3d2600 | ||
|
|
131cb82751 | ||
|
|
029caf3b10 | ||
|
|
9ee23a39b5 | ||
|
|
4837df4352 | ||
|
|
d173d58a93 | ||
|
|
af29570fe9 | ||
|
|
9253cd42dd | ||
|
|
836b4ba2cc | ||
|
|
f28c0578aa | ||
|
|
536f0df9d3 | ||
|
|
465261e1df | ||
|
|
3667370604 | ||
|
|
9ca64e7bdb | ||
|
|
95a9f1c458 | ||
|
|
9fbd627f9a | ||
|
|
7203fcf4f1 | ||
|
|
f10bb343a6 | ||
|
|
9147a45e2f | ||
|
|
5353d515b6 | ||
|
|
e8a94733bf | ||
|
|
625be45742 | ||
|
|
ecb6cb897f | ||
|
|
f07bd79442 | ||
|
|
b7c1fabae1 | ||
|
|
59d3b2f33e | ||
|
|
6c098e98e3 | ||
|
|
380011fd1e | ||
|
|
e97bf32a90 | ||
|
|
ed18ea0ec4 | ||
|
|
dc897986bc | ||
|
|
e296d6e5c1 | ||
|
|
1252e6163b | ||
|
|
8ad14c7833 | ||
|
|
61b9ecc214 | ||
|
|
f8f2c19454 | ||
|
|
922438a7a0 | ||
|
|
920f98c9ef | ||
|
|
9b1ad5dd2e | ||
|
|
d7a97b6e1d | ||
|
|
07db051d14 | ||
|
|
6fec85589d | ||
|
|
f82aa1c3e1 | ||
|
|
ee9faedbbe | ||
|
|
e5dec1251d | ||
|
|
692a39b08f | ||
|
|
60b3523def | ||
|
|
e1428bc1ff | ||
|
|
0ff8b7e02a | ||
|
|
7b84008046 | ||
|
|
30a092e2aa | ||
|
|
11a7ff2977 | ||
|
|
12ba978361 | ||
|
|
42182a2b70 | ||
|
|
26eaec3101 | ||
|
|
daf6194dee | ||
|
|
e28300a1db | ||
|
|
1a225c334f | ||
|
|
1d64ca4372 | ||
|
|
2a139e3dc7 | ||
|
|
89d1712ff1 | ||
|
|
45ea9e1e79 | ||
|
|
4b46fe9788 | ||
|
|
28b9e269b7 | ||
|
|
0a41ec4746 | ||
|
|
e6472f9bfc | ||
|
|
c033af6194 | ||
|
|
4d662dc446 | ||
|
|
0de10c4742 | ||
|
|
f7b7ce3b95 | ||
|
|
7b43b3d31e | ||
|
|
84b9c442fe | ||
|
|
a890895e8b | ||
|
|
f3c6720a1c | ||
|
|
8c29bbfe4e | ||
|
|
910c969473 | ||
|
|
2795673ebc | ||
|
|
dc510e0683 | ||
|
|
070edc1693 | ||
|
|
8645ee20c3 | ||
|
|
8d4abd7638 | ||
|
|
f4106f4b72 | ||
|
|
4087aaf6cf | ||
|
|
c3ef0d4ca8 | ||
|
|
a1aed37482 | ||
|
|
d05a15ef5a | ||
|
|
ef9d3b902e | ||
|
|
366bb91a1e | ||
|
|
0c01cf28c4 | ||
|
|
f895e4df6a | ||
|
|
2affed81ad | ||
|
|
b33b529e74 | ||
|
|
0bbb762c74 | ||
|
|
ec5fb035b1 | ||
|
|
e45a189422 | ||
|
|
b2b66bd080 | ||
|
|
b905d73b82 | ||
|
|
6ed3167e17 | ||
|
|
3a2fea7136 | ||
|
|
212ff2439e | ||
|
|
7b2a7faf6b | ||
|
|
2725d476a4 | ||
|
|
dfa940440c | ||
|
|
862bc8cae8 | ||
|
|
a51bdef083 | ||
|
|
52955f9c6e | ||
|
|
581cfcc917 | ||
|
|
4ee29225bc | ||
|
|
095b6bc463 | ||
|
|
bd1fcdd68a | ||
|
|
98f6003069 | ||
|
|
583c3c6ca7 | ||
|
|
a5378b58f7 | ||
|
|
98b7df643a | ||
|
|
533f7cbd5a | ||
|
|
f4a1130c03 | ||
|
|
38c9187a5e | ||
|
|
c7827cdc80 | ||
|
|
33246a4dab | ||
|
|
7bc09fb1c8 | ||
|
|
950adb109f | ||
|
|
a98d095be0 | ||
|
|
a029296811 | ||
|
|
3e6c682fa1 | ||
|
|
ab06627ee8 | ||
|
|
5fe85aa2a5 | ||
|
|
ceac9eee60 | ||
|
|
24d8c05ae0 | ||
|
|
e6e7303640 | ||
|
|
a6b2ec42b8 | ||
|
|
d51fd0e997 | ||
|
|
9c8280d980 | ||
|
|
b27155790e | ||
|
|
ca554ad3ff | ||
|
|
b72e4a657c | ||
|
|
7371104194 | ||
|
|
96fc4c3383 | ||
|
|
ee178c2305 | ||
|
|
4dc2070853 | ||
|
|
e9670d7291 | ||
|
|
3aa28329d2 | ||
|
|
aa425077b7 | ||
|
|
eb7f56f512 | ||
|
|
a591f07bdf | ||
|
|
90e4bf7d69 | ||
|
|
a590ef52da | ||
|
|
011c6c4571 | ||
|
|
6c54e305d9 | ||
|
|
c7550d8902 | ||
|
|
cdd10a49f6 | ||
|
|
374567a858 | ||
|
|
c118e34ada | ||
|
|
d1632d71c2 | ||
|
|
d007555a64 | ||
|
|
0e71756db3 | ||
|
|
69166a0352 | ||
|
|
9923845f20 | ||
|
|
05d4338d83 | ||
|
|
db504965a1 | ||
|
|
a8c6d29679 | ||
|
|
9e934b8e87 | ||
|
|
248c7c51d6 | ||
|
|
ea4a3b4e11 | ||
|
|
2f57f1f594 | ||
|
|
716d38814f | ||
|
|
1971d19a5d | ||
|
|
3eb95a349e | ||
|
|
921cbb14d6 | ||
|
|
a9b7fc5e48 | ||
|
|
b0d33ce20c | ||
|
|
06a338f5fb | ||
|
|
f4eaf2d909 | ||
|
|
41a4750b45 | ||
|
|
114921ef8e | ||
|
|
8570493ff7 | ||
|
|
7fc19510a4 | ||
|
|
bf1616d705 | ||
|
|
db29c758ef | ||
|
|
6c632ddcf3 | ||
|
|
12f9f8a044 | ||
|
|
73b3484ce8 | ||
|
|
0f7c301896 | ||
|
|
6f3eca7249 | ||
|
|
7da7726fe9 | ||
|
|
53cfcff68e | ||
|
|
e3015c6af4 | ||
|
|
5cf4b638d5 | ||
|
|
4aedba71fd | ||
|
|
416e406394 | ||
|
|
378e1599ed | ||
|
|
c33c3e3e21 | ||
|
|
c6786881fb | ||
|
|
32c28572a4 | ||
|
|
d77fb51795 | ||
|
|
03530d3e0d | ||
|
|
4628b823cf | ||
|
|
8423e328ce | ||
|
|
923176796a | ||
|
|
d7c4a1c789 | ||
|
|
e73a533f41 | ||
|
|
4fbddd5b42 | ||
|
|
45ccd7e793 | ||
|
|
bc80edd586 | ||
|
|
5d2af9b9f7 | ||
|
|
6601b4231d | ||
|
|
6e88b260d0 | ||
|
|
ebe3c5db54 | ||
|
|
1df93b62df | ||
|
|
225e12be91 | ||
|
|
73b7d76219 | ||
|
|
e226cb06e0 | ||
|
|
d35fd463a2 | ||
|
|
c197aa8594 | ||
|
|
6f0dc44975 | ||
|
|
d9cf113882 | ||
|
|
b776a6414d | ||
|
|
4cfd4b3e31 | ||
|
|
1b083eec67 | ||
|
|
b4c04c7cfc | ||
|
|
5d1f40e104 | ||
|
|
7f105e4d7a | ||
|
|
c183a47637 | ||
|
|
9fd29ca5e4 | ||
|
|
b5d153948d | ||
|
|
1f49d6d74c | ||
|
|
d23c2a9be5 | ||
|
|
a03a5d147b | ||
|
|
a54a0dd7c5 | ||
|
|
b60354ec4d | ||
|
|
d4a079a559 | ||
|
|
eb05d637a2 | ||
|
|
b19b80008d | ||
|
|
5c263db5d4 | ||
|
|
808d87a0dd | ||
|
|
3162f6cd92 | ||
|
|
2fbb47d839 | ||
|
|
f26f5d3c72 | ||
|
|
eb35f60d6b | ||
|
|
cd0253e477 | ||
|
|
6ceb2c1e56 | ||
|
|
c67c23dd42 | ||
|
|
8b0bae1c57 | ||
|
|
c873f95743 | ||
|
|
ddd94e6f64 | ||
|
|
722554ad3f | ||
|
|
484cf6f49d | ||
|
|
e4154ed4a2 | ||
|
|
86cb9f5838 | ||
|
|
1622d0aa35 | ||
|
|
b54ecb50bf | ||
|
|
f16857fdf1 | ||
|
|
ab109c935c | ||
|
|
8e7e456431 | ||
|
|
46114cd5f4 | ||
|
|
275e509c8d | ||
|
|
12f135669f | ||
|
|
f004df673d | ||
|
|
3ed24b5d7a | ||
|
|
77eade01a2 | ||
|
|
a2158983f7 | ||
|
|
c0d57c9498 | ||
|
|
35c8ea5e3f | ||
|
|
b36152484d | ||
|
|
768ca3f0ce | ||
|
|
bedd05c075 | ||
|
|
721f73fdbe | ||
|
|
34c2128d88 | ||
|
|
14de3acdaa | ||
|
|
899b2f8eb6 | ||
|
|
27bb05fedc | ||
|
|
e1909b8ad9 | ||
|
|
0ed7a247b6 | ||
|
|
ee46bf3809 | ||
|
|
469254094b | ||
|
|
acac3fc693 | ||
|
|
022b7ef756 | ||
|
|
69d4f55734 | ||
|
|
a0bff4b859 | ||
|
|
23df599a03 | ||
|
|
c8d74ca350 | ||
|
|
8d6ba43ad0 | ||
|
|
44ca2f7a66 | ||
|
|
ec0be1c7fe | ||
|
|
fd732db91b | ||
|
|
67f45b7767 | ||
|
|
396e6a1c36 | ||
|
|
326c46defd | ||
|
|
7a1762be51 | ||
|
|
b466b476a3 | ||
|
|
e4652d4339 | ||
|
|
f1e4cd3938 | ||
|
|
e192a98079 | ||
|
|
833dc83922 | ||
|
|
ab1751c595 | ||
|
|
fff06f971e | ||
|
|
a138d2964e | ||
|
|
e6d7965453 | ||
|
|
ab714f0fc7 | ||
|
|
465b0f6a16 | ||
|
|
bd87351ea7 | ||
|
|
d79ec44e4c | ||
|
|
a2f84a12ea | ||
|
|
6fd71356ee | ||
|
|
a0a305d9b1 | ||
|
|
6396d90fa6 | ||
|
|
e324750ec2 | ||
|
|
5d99f020fa | ||
|
|
b82e928f58 | ||
|
|
da871897e6 | ||
|
|
81778f73e4 | ||
|
|
2623728518 | ||
|
|
97f1d1b476 | ||
|
|
2f6a837bc0 | ||
|
|
5e22c2d9a5 | ||
|
|
99bd637de4 | ||
|
|
b9177e5580 | ||
|
|
fc7ec184d9 | ||
|
|
7a6ca342af | ||
|
|
30b6e5e5c6 | ||
|
|
f8476decf7 | ||
|
|
49e238577c | ||
|
|
026fff79c6 | ||
|
|
36c3870c2f | ||
|
|
54c309dbda | ||
|
|
f00dd35f93 | ||
|
|
e040efb3c8 | ||
|
|
805d50586b | ||
|
|
a289a807c5 | ||
|
|
e9117f95ee | ||
|
|
82bd4e940f | ||
|
|
ad3b0b33f2 | ||
|
|
b2b664a5b0 | ||
|
|
571f3ebe1d | ||
|
|
c7f09df4e7 | ||
|
|
8758ecae97 | ||
|
|
f13c843ba6 | ||
|
|
e95f7dd540 | ||
|
|
693329b87e | ||
|
|
f1ad521f64 | ||
|
|
82fbba6513 | ||
|
|
66fba8e4cd | ||
|
|
417131fa36 | ||
|
|
9c9d270053 | ||
|
|
f7fab165ba | ||
|
|
93bdf43c95 | ||
|
|
b3866b5b71 | ||
|
|
2308084dee | ||
|
|
6eb5496c27 | ||
|
|
c5514fdb63 | ||
|
|
c78c3058fd | ||
|
|
10d9ef9906 | ||
|
|
43426041ef | ||
|
|
125eb9ac53 | ||
|
|
681407e0a2 | ||
|
|
082f3a8fe8 | ||
|
|
397cc26b2a | ||
|
|
331ae92843 | ||
|
|
06843cd41a | ||
|
|
28b5ef9ee9 | ||
|
|
63dcc057d3 | ||
|
|
0bc16ee5ff | ||
|
|
abcc9c2c80 | ||
|
|
daf2ad38bd | ||
|
|
3dc418df39 | ||
|
|
00aaafbc12 | ||
|
|
bd49a55f3d | ||
|
|
013975b7a6 | ||
|
|
392026286a | ||
|
|
29ef974565 | ||
|
|
06c8216092 | ||
|
|
03f04d24a5 | ||
|
|
7b45ed63cc | ||
|
|
6e4dd1d69c | ||
|
|
185b4cba0c | ||
|
|
8198ea4a2c | ||
|
|
aaf3e8a5cf | ||
|
|
ecef56fa8f | ||
|
|
349ce3f2d0 | ||
|
|
e3d4741213 | ||
|
|
9d6d5f1d76 | ||
|
|
3152d67f58 | ||
|
|
cb41c8d15b |
6
.github/CODEOWNERS
vendored
@@ -1 +1,5 @@
|
||||
* @prowler-cloud/prowler-oss
|
||||
* @prowler-cloud/sdk @prowler-cloud/detection-and-remediation
|
||||
|
||||
# To protect a repository fully against unauthorized changes, you also need to define an owner for the CODEOWNERS file itself.
|
||||
# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#codeowners-and-branch-protection
|
||||
/.github/ @prowler-cloud/sdk
|
||||
|
||||
3
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,6 +1,5 @@
|
||||
name: 🐞 Bug Report
|
||||
description: Create a report to help us improve
|
||||
title: "[Bug]: "
|
||||
labels: ["bug", "status/needs-triage"]
|
||||
|
||||
body:
|
||||
@@ -27,7 +26,7 @@ body:
|
||||
id: actual
|
||||
attributes:
|
||||
label: Actual Result with Screenshots or Logs
|
||||
description: If applicable, add screenshots to help explain your problem. Also, you can add logs (anonymize them first!). Here a command that may help to share a log `prowler <your arguments> --log-level DEBUG --log-file $(date +%F)_debug.log` then attach here the log file.
|
||||
description: If applicable, add screenshots to help explain your problem. Also, you can add logs (anonymize them first!). Here a command that may help to share a log `prowler <your arguments> --log-level ERROR --log-file $(date +%F)_error.log` then attach here the log file.
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
3
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
@@ -1,8 +1,7 @@
|
||||
name: 💡 Feature Request
|
||||
name: 💡 Feature Request
|
||||
description: Suggest an idea for this project
|
||||
labels: ["feature-request", "status/needs-triage"]
|
||||
|
||||
|
||||
body:
|
||||
- type: textarea
|
||||
id: Problem
|
||||
|
||||
30
.github/dependabot.yml
vendored
@@ -5,10 +5,11 @@
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
labels:
|
||||
- "dependencies"
|
||||
@@ -16,5 +17,26 @@ updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: master
|
||||
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "pip"
|
||||
- "v3"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: v3
|
||||
labels:
|
||||
- "github_actions"
|
||||
- "v3"
|
||||
|
||||
54
.github/labeler.yml
vendored
@@ -25,3 +25,57 @@ provider/kubernetes:
|
||||
github_actions:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: ".github/workflows/*"
|
||||
|
||||
cli:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "cli/**"
|
||||
|
||||
mutelist:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/aws/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/azure/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/gcp/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "prowler/providers/kubernetes/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/aws/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/azure/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/gcp/lib/mutelist/**"
|
||||
- any-glob-to-any-file: "tests/providers/kubernetes/lib/mutelist/**"
|
||||
|
||||
integration/s3:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/aws/lib/s3/**"
|
||||
- any-glob-to-any-file: "tests/providers/aws/lib/s3/**"
|
||||
|
||||
integration/slack:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/slack/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/slack/**"
|
||||
|
||||
integration/security-hub:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/providers/aws/lib/security_hub/**"
|
||||
- any-glob-to-any-file: "tests/providers/aws/lib/security_hub/**"
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/asff/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/asff/**"
|
||||
|
||||
output/html:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/html/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/html/**"
|
||||
|
||||
output/asff:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/asff/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/asff/**"
|
||||
|
||||
output/ocsf:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/ocsf/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/ocsf/**"
|
||||
|
||||
output/csv:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "prowler/lib/outputs/csv/**"
|
||||
- any-glob-to-any-file: "tests/lib/outputs/csv/**"
|
||||
|
||||
@@ -4,7 +4,7 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'prowler-4.0-dev'
|
||||
- 'v3'
|
||||
paths:
|
||||
- 'docs/**'
|
||||
|
||||
|
||||
31
.github/workflows/build-lint-push-containers.yml
vendored
@@ -3,10 +3,8 @@ name: build-lint-push-containers
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
# TODO: update it for v3 and v4
|
||||
# Add master after changing branches
|
||||
- "v3"
|
||||
- "v4"
|
||||
- "master"
|
||||
paths-ignore:
|
||||
- ".github/**"
|
||||
- "README.md"
|
||||
@@ -36,13 +34,16 @@ env:
|
||||
# TEMPORARY_TAG: temporary
|
||||
|
||||
# Python configuration
|
||||
PYTHON_VERSION: 3.11
|
||||
PYTHON_VERSION: 3.12
|
||||
|
||||
jobs:
|
||||
# Build Prowler OSS container
|
||||
container-build-push:
|
||||
# needs: dockerfile-linter
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
prowler_version_major: ${{ steps.get-prowler-version.outputs.PROWLER_VERSION_MAJOR }}
|
||||
prowler_version: ${{ steps.update-prowler-version.outputs.PROWLER_VERSION }}
|
||||
env:
|
||||
POETRY_VIRTUALENVS_CREATE: "false"
|
||||
|
||||
@@ -61,21 +62,23 @@ jobs:
|
||||
pipx inject poetry poetry-bumpversion
|
||||
|
||||
- name: Get Prowler version
|
||||
id: get-prowler-version
|
||||
run: |
|
||||
PROWLER_VERSION="$(poetry version -s 2>/dev/null)"
|
||||
|
||||
# Store prowler version major just for the release
|
||||
PROWLER_VERSION_MAJOR="${PROWLER_VERSION%%.*}"
|
||||
echo "PROWLER_VERSION_MAJOR=${PROWLER_VERSION_MAJOR}" >> "${GITHUB_ENV}"
|
||||
echo "PROWLER_VERSION_MAJOR=${PROWLER_VERSION_MAJOR}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
case ${PROWLER_VERSION_MAJOR} in
|
||||
3)
|
||||
echo "LATEST=v3-latest" >> "${GITHUB_ENV}"
|
||||
echo "LATEST_TAG=v3-latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=v3-stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
|
||||
4)
|
||||
echo "LATEST=latest" >> "${GITHUB_ENV}"
|
||||
echo "LATEST_TAG=latest" >> "${GITHUB_ENV}"
|
||||
echo "STABLE_TAG=stable" >> "${GITHUB_ENV}"
|
||||
;;
|
||||
|
||||
@@ -87,12 +90,14 @@ jobs:
|
||||
esac
|
||||
|
||||
- name: Update Prowler version (release)
|
||||
id: update-prowler-version
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
PROWLER_VERSION="${{ github.event.release.tag_name }}"
|
||||
poetry version "${PROWLER_VERSION}"
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_ENV}"
|
||||
|
||||
echo "PROWLER_VERSION=${PROWLER_VERSION}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -113,7 +118,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (latest)
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
tags: |
|
||||
@@ -125,7 +130,7 @@ jobs:
|
||||
|
||||
- name: Build and push container image (release)
|
||||
if: github.event_name == 'release'
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
# Use local context to get changes
|
||||
# https://github.com/docker/build-push-action#path-context
|
||||
@@ -151,19 +156,19 @@ jobs:
|
||||
echo "LATEST_COMMIT_HASH=${LATEST_COMMIT_HASH}" >> $GITHUB_ENV
|
||||
|
||||
- name: Dispatch event (latest)
|
||||
if: github.event_name == 'push' && ${{ env. PROWLER_VERSION_MAJOR }} == '3'
|
||||
if: github.event_name == 'push' && needs.container-build-push.outputs.prowler_version_major == '3'
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"v3-latest", "tag": "${{ env.LATEST_COMMIT_HASH }}"}}'
|
||||
|
||||
- name: Dispatch event (release)
|
||||
if: github.event_name == 'release' && ${{ env. PROWLER_VERSION_MAJOR }} == '3'
|
||||
if: github.event_name == 'release' && needs.container-build-push.outputs.prowler_version_major == '3'
|
||||
run: |
|
||||
curl https://api.github.com/repos/${{ secrets.DISPATCH_OWNER }}/${{ secrets.DISPATCH_REPO }}/dispatches \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "Authorization: Bearer ${{ secrets.ACCESS_TOKEN }}" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ env.PROWLER_VERSION }}"}}'
|
||||
--data '{"event_type":"dispatch","client_payload":{"version":"release", "tag":"${{ needs.container-build-push.outputs.prowler_version }}"}}'
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
@@ -13,10 +13,10 @@ name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master", "prowler-4.0-dev" ]
|
||||
branches: [ "master", "v3" ]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ "master", "prowler-4.0-dev" ]
|
||||
branches: [ "master", "v3" ]
|
||||
schedule:
|
||||
- cron: '00 12 * * *'
|
||||
|
||||
|
||||
3
.github/workflows/find-secrets.yml
vendored
@@ -11,8 +11,9 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: TruffleHog OSS
|
||||
uses: trufflesecurity/trufflehog@v3.71.2
|
||||
uses: trufflesecurity/trufflehog@v3.80.2
|
||||
with:
|
||||
path: ./
|
||||
base: ${{ github.event.repository.default_branch }}
|
||||
head: HEAD
|
||||
extra_args: --only-verified
|
||||
2
.github/workflows/labeler.yml
vendored
@@ -4,7 +4,7 @@ on:
|
||||
pull_request_target:
|
||||
branches:
|
||||
- "master"
|
||||
- "prowler-4.0-dev"
|
||||
- "v3"
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
|
||||
6
.github/workflows/pull-request.yml
vendored
@@ -4,11 +4,11 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
- "prowler-4.0-dev"
|
||||
- "v3"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
- "prowler-4.0-dev"
|
||||
- "v3"
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -73,7 +73,7 @@ jobs:
|
||||
- name: Safety
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
poetry run safety check
|
||||
poetry run safety check --ignore 70612
|
||||
- name: Vulture
|
||||
if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true'
|
||||
run: |
|
||||
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
token: ${{ secrets.PROWLER_ACCESS_TOKEN }}
|
||||
commit-message: "feat(regions_update): Update regions for AWS services."
|
||||
branch: "aws-services-regions-updated-${{ github.sha }}"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws"
|
||||
labels: "status/waiting-for-revision, severity/low, provider/aws, backport-v3"
|
||||
title: "chore(regions_update): Changes in regions for AWS services."
|
||||
body: |
|
||||
### Description
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
repos:
|
||||
## GENERAL
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-yaml
|
||||
@@ -15,7 +15,7 @@ repos:
|
||||
|
||||
## TOML
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.12.0
|
||||
rev: v2.13.0
|
||||
hooks:
|
||||
- id: pretty-format-toml
|
||||
args: [--autofix]
|
||||
@@ -23,12 +23,13 @@ repos:
|
||||
|
||||
## BASH
|
||||
- repo: https://github.com/koalaman/shellcheck-precommit
|
||||
rev: v0.9.0
|
||||
rev: v0.10.0
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
exclude: contrib
|
||||
## PYTHON
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v2.2.1
|
||||
rev: v2.3.1
|
||||
hooks:
|
||||
- id: autoflake
|
||||
args:
|
||||
@@ -45,7 +46,7 @@ repos:
|
||||
args: ["--profile", "black"]
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 24.1.1
|
||||
rev: 24.4.2
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
@@ -57,14 +58,14 @@ repos:
|
||||
args: ["--ignore=E266,W503,E203,E501,W605"]
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.7.0
|
||||
rev: 1.8.0
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
- id: poetry-lock
|
||||
args: ["--no-update"]
|
||||
|
||||
- repo: https://github.com/hadolint/hadolint
|
||||
rev: v2.12.1-beta
|
||||
rev: v2.13.0-beta
|
||||
hooks:
|
||||
- id: hadolint
|
||||
args: ["--ignore=DL3013"]
|
||||
@@ -96,7 +97,7 @@ repos:
|
||||
- id: safety
|
||||
name: safety
|
||||
description: "Safety is a tool that checks your installed dependencies for known security vulnerabilities"
|
||||
entry: bash -c 'safety check'
|
||||
entry: bash -c 'safety check --ignore 70612'
|
||||
language: system
|
||||
|
||||
- id: vulture
|
||||
|
||||
@@ -10,4 +10,4 @@
|
||||
Want some swag as appreciation for your contribution?
|
||||
|
||||
# Prowler Developer Guide
|
||||
https://docs.prowler.cloud/en/latest/tutorials/developer-guide/
|
||||
https://docs.prowler.com/projects/prowler-open-source/en/latest/developer-guide/introduction/
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.11-alpine
|
||||
FROM python:3.12-alpine
|
||||
|
||||
LABEL maintainer="https://github.com/prowler-cloud/prowler"
|
||||
|
||||
@@ -15,7 +15,8 @@ USER prowler
|
||||
|
||||
# Copy necessary files
|
||||
WORKDIR /home/prowler
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY prowler/ /home/prowler/prowler/
|
||||
COPY dashboard/ /home/prowler/dashboard/
|
||||
COPY pyproject.toml /home/prowler
|
||||
COPY README.md /home/prowler
|
||||
|
||||
@@ -26,6 +27,10 @@ ENV PATH="$HOME/.local/bin:$PATH"
|
||||
RUN pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir .
|
||||
|
||||
# Remove deprecated dash dependencies
|
||||
RUN pip uninstall dash-html-components -y && \
|
||||
pip uninstall dash-core-components -y
|
||||
|
||||
# Remove Prowler directory and build files
|
||||
USER 0
|
||||
RUN rm -rf /home/prowler/prowler /home/prowler/pyproject.toml /home/prowler/README.md /home/prowler/build /home/prowler/prowler.egg-info
|
||||
|
||||
250
README.md
@@ -1,6 +1,6 @@
|
||||
<p align="center">
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-black.png?raw=True#gh-light-mode-only" width="350" height="115">
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png?raw=True#gh-dark-mode-only" width="350" height="115">
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-black.png#gh-light-mode-only" width="50%" height="50%">
|
||||
<img align="center" src="https://github.com/prowler-cloud/prowler/blob/master/docs/img/prowler-logo-white.png#gh-dark-mode-only" width="50%" height="50%">
|
||||
</p>
|
||||
<p align="center">
|
||||
<b><i>Prowler SaaS </b> and <b>Prowler Open Source</b> are as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
|
||||
@@ -10,11 +10,10 @@
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/3985464/3617e470-670c-47c9-9794-ce895ebdb627"></a>
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img width="30" height="30" alt="Prowler community on Slack" src="https://github.com/prowler-cloud/prowler/assets/38561120/3c8b4ec5-6849-41a5-b5e1-52bbb94af73a"></a>
|
||||
<br>
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog">Join our Prowler community!</a>
|
||||
</p>
|
||||
|
||||
<hr>
|
||||
<p align="center">
|
||||
<a href="https://join.slack.com/t/prowler-workspace/shared_invite/zt-1hix76xsl-2uq222JIXrC7Q8It~9ZNog"><img alt="Slack Shield" src="https://img.shields.io/badge/slack-prowler-brightgreen.svg?logo=slack"></a>
|
||||
@@ -38,28 +37,38 @@
|
||||
<a href="https://twitter.com/prowlercloud"><img alt="Twitter" src="https://img.shields.io/twitter/follow/prowlercloud?style=social"></a>
|
||||
</p>
|
||||
<hr>
|
||||
<p align="center">
|
||||
<img align="center" src="/docs/img/prowler-cli-quick.gif" width="100%" height="100%">
|
||||
</p>
|
||||
|
||||
# Description
|
||||
|
||||
`Prowler` is an Open Source security tool to perform AWS, GCP and Azure security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness.
|
||||
**Prowler** is an Open Source security tool to perform AWS, Azure, Google Cloud and Kubernetes security best practices assessments, audits, incident response, continuous monitoring, hardening and forensics readiness, and also remediations! We have Prowler CLI (Command Line Interface) that we call Prowler Open Source and a service on top of it that we call <a href="https://prowler.com">Prowler SaaS</a>.
|
||||
|
||||
## Prowler CLI
|
||||
|
||||
```console
|
||||
prowler <provider>
|
||||
```
|
||||

|
||||
|
||||
## Prowler Dashboard
|
||||
|
||||
```console
|
||||
prowler dashboard
|
||||
```
|
||||

|
||||
|
||||
It contains hundreds of controls covering CIS, NIST 800, NIST CSF, CISA, RBI, FedRAMP, PCI-DSS, GDPR, HIPAA, FFIEC, SOC2, GXP, AWS Well-Architected Framework Security Pillar, AWS Foundational Technical Review (FTR), ENS (Spanish National Security Scheme) and your custom security frameworks.
|
||||
|
||||
| Provider | Checks | Services | [Compliance Frameworks](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/compliance/) | [Categories](https://docs.prowler.com/projects/prowler-open-source/en/latest/tutorials/misc/#categories) |
|
||||
|---|---|---|---|---|
|
||||
| AWS | 304 | 61 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 6 -> `prowler aws --list-categories` |
|
||||
| GCP | 75 | 11 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 126 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| AWS | 385 | 67 -> `prowler aws --list-services` | 28 -> `prowler aws --list-compliance` | 7 -> `prowler aws --list-categories` |
|
||||
| GCP | 77 | 13 -> `prowler gcp --list-services` | 1 -> `prowler gcp --list-compliance` | 2 -> `prowler gcp --list-categories`|
|
||||
| Azure | 135 | 16 -> `prowler azure --list-services` | 2 -> `prowler azure --list-compliance` | 2 -> `prowler azure --list-categories` |
|
||||
| Kubernetes | 83 | 7 -> `prowler kubernetes --list-services` | 1 -> `prowler kubernetes --list-compliance` | 7 -> `prowler kubernetes --list-categories` |
|
||||
|
||||
# 📖 Documentation
|
||||
|
||||
The full documentation can now be found at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
|
||||
|
||||
## Looking for Prowler v2 documentation?
|
||||
For Prowler v2 Documentation, please go to https://github.com/prowler-cloud/prowler/tree/2.12.1.
|
||||
|
||||
# ⚙️ Install
|
||||
# 💻 Installation
|
||||
|
||||
## Pip package
|
||||
Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-cloud/), thus can be installed using pip with Python >= 3.9, < 3.13:
|
||||
@@ -68,22 +77,24 @@ Prowler is available as a project in [PyPI](https://pypi.org/project/prowler-clo
|
||||
pip install prowler
|
||||
prowler -v
|
||||
```
|
||||
More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
|
||||
>More details at [https://docs.prowler.com](https://docs.prowler.com/projects/prowler-open-source/en/latest/)
|
||||
|
||||
## Containers
|
||||
|
||||
The available versions of Prowler are the following:
|
||||
|
||||
- `latest`: in sync with master branch (bear in mind that it is not a stable version)
|
||||
- `latest`: in sync with `master` branch (bear in mind that it is not a stable version)
|
||||
- `v3-latest`: in sync with `v3` branch (bear in mind that it is not a stable version)
|
||||
- `<x.y.z>` (release): you can find the releases [here](https://github.com/prowler-cloud/prowler/releases), those are stable releases.
|
||||
- `stable`: this tag always point to the latest release.
|
||||
- `v3-stable`: this tag always point to the latest release for v3.
|
||||
|
||||
The container images are available here:
|
||||
|
||||
- [DockerHub](https://hub.docker.com/r/toniblyx/prowler/tags)
|
||||
- [AWS Public ECR](https://gallery.ecr.aws/prowler-cloud/prowler)
|
||||
|
||||
## From Github
|
||||
## From GitHub
|
||||
|
||||
Python >= 3.9, < 3.13 is required with pip and poetry:
|
||||
|
||||
@@ -94,203 +105,32 @@ poetry shell
|
||||
poetry install
|
||||
python prowler.py -v
|
||||
```
|
||||
|
||||
> If you want to clone Prowler from Windows, use `git config core.longpaths true` to allow long file paths.
|
||||
# 📐✏️ High level architecture
|
||||
|
||||
You can run Prowler from your workstation, an EC2 instance, Fargate or any other container, Codebuild, CloudShell and Cloud9.
|
||||
You can run Prowler from your workstation, a Kubernetes Job, a Google Compute Engine, an Azure VM, an EC2 instance, Fargate or any other container, CloudShell and many more.
|
||||
|
||||

|
||||

|
||||
|
||||
# 📝 Requirements
|
||||
# Deprecations from v3
|
||||
|
||||
Prowler has been written in Python using the [AWS SDK (Boto3)](https://boto3.amazonaws.com/v1/documentation/api/latest/index.html#), [Azure SDK](https://azure.github.io/azure-sdk-for-python/) and [GCP API Python Client](https://github.com/googleapis/google-api-python-client/).
|
||||
## AWS
|
||||
## General
|
||||
- `Allowlist` now is called `Mutelist`.
|
||||
- The `--quiet` option has been deprecated, now use the `--status` flag to select the finding's status you want to get from PASS, FAIL or MANUAL.
|
||||
- All `INFO` finding's status has changed to `MANUAL`.
|
||||
- The CSV output format is common for all the providers.
|
||||
|
||||
Since Prowler uses AWS Credentials under the hood, you can follow any authentication method as described [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html#cli-configure-quickstart-precedence).
|
||||
Make sure you have properly configured your AWS-CLI with a valid Access Key and Region or declare AWS variables properly (or instance profile/role):
|
||||
|
||||
```console
|
||||
aws configure
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```console
|
||||
export AWS_ACCESS_KEY_ID="ASXXXXXXX"
|
||||
export AWS_SECRET_ACCESS_KEY="XXXXXXXXX"
|
||||
export AWS_SESSION_TOKEN="XXXXXXXXX"
|
||||
```
|
||||
|
||||
Those credentials must be associated to a user or role with proper permissions to do all checks. To make sure, add the following AWS managed policies to the user or role being used:
|
||||
|
||||
- `arn:aws:iam::aws:policy/SecurityAudit`
|
||||
- `arn:aws:iam::aws:policy/job-function/ViewOnlyAccess`
|
||||
|
||||
> Moreover, some read-only additional permissions are needed for several checks, make sure you attach also the custom policy [prowler-additions-policy.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-additions-policy.json) to the role you are using.
|
||||
|
||||
> If you want Prowler to send findings to [AWS Security Hub](https://aws.amazon.com/security-hub), make sure you also attach the custom policy [prowler-security-hub.json](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-security-hub.json).
|
||||
|
||||
## Azure
|
||||
|
||||
Prowler for Azure supports the following authentication types:
|
||||
|
||||
- Service principal authentication by environment variables (Enterprise Application)
|
||||
- Current az cli credentials stored
|
||||
- Interactive browser authentication
|
||||
- Managed identity authentication
|
||||
|
||||
### Service Principal authentication
|
||||
|
||||
To allow Prowler assume the service principal identity to start the scan, it is needed to configure the following environment variables:
|
||||
|
||||
```console
|
||||
export AZURE_CLIENT_ID="XXXXXXXXX"
|
||||
export AZURE_TENANT_ID="XXXXXXXXX"
|
||||
export AZURE_CLIENT_SECRET="XXXXXXX"
|
||||
```
|
||||
|
||||
If you try to execute Prowler with the `--sp-env-auth` flag and those variables are empty or not exported, the execution is going to fail.
|
||||
### AZ CLI / Browser / Managed Identity authentication
|
||||
|
||||
The other three cases do not need additional configuration, `--az-cli-auth` and `--managed-identity-auth` are automated options, `--browser-auth` needs the user to authenticate using the default browser to start the scan. Also `--browser-auth` needs the tenant id to be specified with `--tenant-id`.
|
||||
|
||||
### Permissions
|
||||
|
||||
To use each one, you need to pass the proper flag to the execution. Prowler for Azure handles two types of permission scopes, which are:
|
||||
|
||||
- **Azure Active Directory permissions**: Used to retrieve metadata from the identity assumed by Prowler and future AAD checks (not mandatory to have access to execute the tool)
|
||||
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool.
|
||||
|
||||
|
||||
#### Azure Active Directory scope
|
||||
|
||||
Azure Active Directory (AAD) permissions required by the tool are the following:
|
||||
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
|
||||
|
||||
#### Subscriptions scope
|
||||
|
||||
Regarding the subscription scope, Prowler by default scans all the subscriptions that is able to list, so it is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
|
||||
|
||||
- `Security Reader`
|
||||
- `Reader`
|
||||
|
||||
|
||||
## Google Cloud Platform
|
||||
|
||||
Prowler will follow the same credentials search as [Google authentication libraries](https://cloud.google.com/docs/authentication/application-default-credentials#search_order):
|
||||
|
||||
1. [GOOGLE_APPLICATION_CREDENTIALS environment variable](https://cloud.google.com/docs/authentication/application-default-credentials#GAC)
|
||||
2. [User credentials set up by using the Google Cloud CLI](https://cloud.google.com/docs/authentication/application-default-credentials#personal)
|
||||
3. [The attached service account, returned by the metadata server](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa)
|
||||
|
||||
Those credentials must be associated to a user or service account with proper permissions to do all checks. To make sure, add the `Viewer` role to the member associated with the credentials.
|
||||
|
||||
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
|
||||
|
||||
# 💻 Basic Usage
|
||||
|
||||
To run prowler, you will need to specify the provider (e.g aws or azure):
|
||||
|
||||
```console
|
||||
prowler <provider>
|
||||
```
|
||||
|
||||

|
||||
|
||||
> Running the `prowler` command without options will use your environment variable credentials.
|
||||
|
||||
By default, prowler will generate a CSV, a JSON and a HTML report, however you can generate JSON-ASFF (only for AWS Security Hub) report with `-M` or `--output-modes`:
|
||||
|
||||
```console
|
||||
prowler <provider> -M csv json json-asff html
|
||||
```
|
||||
|
||||
The html report will be located in the `output` directory as the other files and it will look like:
|
||||
|
||||

|
||||
|
||||
You can use `-l`/`--list-checks` or `--list-services` to list all available checks or services within the provider.
|
||||
|
||||
```console
|
||||
prowler <provider> --list-checks
|
||||
prowler <provider> --list-services
|
||||
```
|
||||
|
||||
For executing specific checks or services you can use options `-c`/`--checks` or `-s`/`--services`:
|
||||
|
||||
```console
|
||||
prowler aws --checks s3_bucket_public_access
|
||||
prowler aws --services s3 ec2
|
||||
```
|
||||
|
||||
Also, checks and services can be excluded with options `-e`/`--excluded-checks` or `--excluded-services`:
|
||||
|
||||
```console
|
||||
prowler aws --excluded-checks s3_bucket_public_access
|
||||
prowler aws --excluded-services s3 ec2
|
||||
```
|
||||
|
||||
You can always use `-h`/`--help` to access to the usage information and all the possible options:
|
||||
|
||||
```console
|
||||
prowler -h
|
||||
```
|
||||
|
||||
## Checks Configurations
|
||||
Several Prowler's checks have user configurable variables that can be modified in a common **configuration file**.
|
||||
This file can be found in the following path:
|
||||
```
|
||||
prowler/config/config.yaml
|
||||
```
|
||||
We have deprecated some of our outputs formats:
|
||||
- The native JSON is replaced for the JSON [OCSF](https://schema.ocsf.io/) v1.1.0, common for all the providers.
|
||||
|
||||
## AWS
|
||||
- Deprecate the AWS flag --sts-endpoint-region since we use AWS STS regional tokens.
|
||||
- To send only FAILS to AWS Security Hub, now use either `--send-sh-only-fails` or `--security-hub --status FAIL`.
|
||||
|
||||
Use a custom AWS profile with `-p`/`--profile` and/or AWS regions which you want to audit with `-f`/`--filter-region`:
|
||||
|
||||
```console
|
||||
prowler aws --profile custom-profile -f us-east-1 eu-south-2
|
||||
```
|
||||
> By default, `prowler` will scan all AWS regions.
|
||||
# 📖 Documentation
|
||||
|
||||
## Azure
|
||||
|
||||
With Azure you need to specify which auth method is going to be used:
|
||||
|
||||
```console
|
||||
prowler azure [--sp-env-auth, --az-cli-auth, --browser-auth, --managed-identity-auth]
|
||||
```
|
||||
> By default, `prowler` will scan all Azure subscriptions.
|
||||
|
||||
## Google Cloud Platform
|
||||
|
||||
Optionally, you can provide the location of an application credential JSON file with the following argument:
|
||||
|
||||
```console
|
||||
prowler gcp --credentials-file path
|
||||
```
|
||||
> By default, `prowler` will scan all accessible GCP Projects, use flag `--project-ids` to specify the projects to be scanned.
|
||||
|
||||
## Kubernetes
|
||||
|
||||
For non in-cluster execution, you can provide the location of the KubeConfig file with the following argument:
|
||||
|
||||
```console
|
||||
prowler kubernetes --kubeconfig-file path
|
||||
```
|
||||
|
||||
For in-cluster execution, you can use the supplied yaml to run Prowler as a job:
|
||||
```console
|
||||
kubectl apply -f job.yaml
|
||||
kubectl apply -f prowler-role.yaml
|
||||
kubectl apply -f prowler-rolebinding.yaml
|
||||
kubectl get pods --> prowler-XXXXX
|
||||
kubectl logs prowler-XXXXX
|
||||
```
|
||||
|
||||
> By default, `prowler` will scan all namespaces in your active Kubernetes context, use flag `--context` to specify the context to be scanned and `--namespaces` to specify the namespaces to be scanned.
|
||||
Install, Usage, Tutorials and Developer Guide is at https://docs.prowler.com/
|
||||
|
||||
# 📃 License
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ As an **AWS Partner** and we have passed the [AWS Foundation Technical Review (F
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or ProwlerPro service, please submit the information by contacting to help@prowler.pro.
|
||||
If you would like to report a vulnerability or have a security concern regarding Prowler Open Source or ProwlerPro service, please submit the information by contacting to https://support.prowler.com.
|
||||
|
||||
The information you share with ProwlerPro as part of this process is kept confidential within ProwlerPro. We will only share this information with a third party if the vulnerability you report is found to affect a third-party product, in which case we will share this information with the third-party product's author or manufacturer. Otherwise, we will only share this information as permitted by you.
|
||||
|
||||
|
||||
@@ -14,4 +14,4 @@ cd ~ || exit
|
||||
python3.9 -m pip install prowler-cloud
|
||||
prowler -v
|
||||
# Run Prowler
|
||||
prowler
|
||||
prowler aws
|
||||
8
contrib/aws/cloudshell/cloudshell-installation.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
sudo bash
|
||||
adduser prowler
|
||||
su prowler
|
||||
pip install prowler
|
||||
cd /tmp
|
||||
prowler aws
|
||||
@@ -212,6 +212,7 @@ Resources:
|
||||
- appstream:Describe*
|
||||
- codeartifact:List*
|
||||
- codebuild:BatchGet*
|
||||
- cognito-idp:GetUserPoolMfaConfig
|
||||
- ds:Get*
|
||||
- ds:Describe*
|
||||
- ds:List*
|
||||
|
Before Width: | Height: | Size: 125 KiB After Width: | Height: | Size: 125 KiB |
|
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 30 KiB |
@@ -1,17 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Install system dependencies
|
||||
sudo yum -y install openssl-devel bzip2-devel libffi-devel gcc
|
||||
# Upgrade to Python 3.9
|
||||
cd /tmp && wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz
|
||||
tar zxf Python-3.9.13.tgz
|
||||
cd Python-3.9.13/ || exit
|
||||
./configure --enable-optimizations
|
||||
sudo make altinstall
|
||||
python3.9 --version
|
||||
# Install Prowler
|
||||
cd ~ || exit
|
||||
python3.9 -m pip install prowler-cloud
|
||||
prowler -v
|
||||
# Run Prowler
|
||||
prowler
|
||||
47
contrib/gcp/enable_apis_in_projects.sh
Executable file
@@ -0,0 +1,47 @@
|
||||
#!/bin/bash
|
||||
|
||||
# List of project IDs
|
||||
PROJECT_IDS=(
|
||||
"project-id-1"
|
||||
"project-id-2"
|
||||
"project-id-3"
|
||||
# Add more project IDs as needed
|
||||
)
|
||||
|
||||
# List of Prowler APIs to enable
|
||||
APIS=(
|
||||
"apikeys.googleapis.com"
|
||||
"artifactregistry.googleapis.com"
|
||||
"bigquery.googleapis.com"
|
||||
"sqladmin.googleapis.com" # Cloud SQL
|
||||
"storage.googleapis.com" # Cloud Storage
|
||||
"compute.googleapis.com"
|
||||
"dataproc.googleapis.com"
|
||||
"dns.googleapis.com"
|
||||
"containerregistry.googleapis.com" # GCR (Google Container Registry)
|
||||
"container.googleapis.com" # GKE (Google Kubernetes Engine)
|
||||
"iam.googleapis.com"
|
||||
"cloudkms.googleapis.com" # KMS (Key Management Service)
|
||||
"logging.googleapis.com"
|
||||
)
|
||||
|
||||
# Function to enable APIs for a given project
|
||||
enable_apis_for_project() {
|
||||
local PROJECT_ID=$1
|
||||
|
||||
echo "Enabling APIs for project: ${PROJECT_ID}"
|
||||
|
||||
for API in "${APIS[@]}"; do
|
||||
echo "Enabling API: $API for project: ${PROJECT_ID}"
|
||||
if gcloud services enable "${API}" --project="${PROJECT_ID}"; then
|
||||
echo "Successfully enabled API $API for project ${PROJECT_ID}."
|
||||
else
|
||||
echo "Failed to enable API $API for project ${PROJECT_ID}."
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
# Loop over each project and enable the APIs
|
||||
for PROJECT_ID in "${PROJECT_IDS[@]}"; do
|
||||
enable_apis_for_project "${PROJECT_ID}"
|
||||
done
|
||||
23
contrib/k8s/helm/.helmignore
Normal file
@@ -0,0 +1,23 @@
|
||||
# Patterns to ignore when building packages.
|
||||
# This supports shell glob matching, relative path matching, and
|
||||
# negation (prefixed with !). Only one pattern per line.
|
||||
.DS_Store
|
||||
# Common VCS dirs
|
||||
.git/
|
||||
.gitignore
|
||||
.bzr/
|
||||
.bzrignore
|
||||
.hg/
|
||||
.hgignore
|
||||
.svn/
|
||||
# Common backup files
|
||||
*.swp
|
||||
*.bak
|
||||
*.tmp
|
||||
*.orig
|
||||
*~
|
||||
# Various IDEs
|
||||
.project
|
||||
.idea/
|
||||
*.tmproj
|
||||
.vscode/
|
||||
24
contrib/k8s/helm/Chart.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
apiVersion: v2
|
||||
name: prowler
|
||||
description: Prowler Security Tool Helm chart for Kubernetes
|
||||
|
||||
# A chart can be either an 'application' or a 'library' chart.
|
||||
#
|
||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
||||
# to be deployed.
|
||||
#
|
||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
||||
type: application
|
||||
|
||||
# This is the chart version. This version number should be incremented each time you make changes
|
||||
# to the chart and its templates, including the app version.
|
||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
||||
version: 0.1.1
|
||||
|
||||
# This is the version number of the application being deployed. This version number should be
|
||||
# incremented each time you make changes to the application. Versions are not expected to
|
||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
||||
# It is recommended to use it with quotes.
|
||||
appVersion: "1.16.0"
|
||||
78
contrib/k8s/helm/README.md
Normal file
@@ -0,0 +1,78 @@
|
||||
# prowler
|
||||
|
||||
  
|
||||
|
||||
Prowler Security Tool Helm chart for Kubernetes
|
||||
|
||||
# Prowler Helm Chart Deployment
|
||||
|
||||
This guide provides step-by-step instructions for deploying the Prowler Helm chart.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before you begin, ensure you have the following:
|
||||
|
||||
1. A running Kubernetes cluster.
|
||||
2. Helm installed on your local machine. If you don't have Helm installed, you can follow the [Helm installation guide](https://helm.sh/docs/intro/install/).
|
||||
3. Proper access to your Kubernetes cluster (e.g., `kubectl` is configured and working).
|
||||
|
||||
## Deployment Steps
|
||||
|
||||
### 1. Clone the Repository
|
||||
|
||||
Clone the repository containing the Helm chart to your local machine.
|
||||
|
||||
```sh
|
||||
git clone git@github.com:prowler-cloud/prowler.git
|
||||
cd prowler/contrib/k8s/helm
|
||||
```
|
||||
|
||||
### 2. Deploy the helm chart
|
||||
|
||||
```
|
||||
helm install prowler .
|
||||
```
|
||||
|
||||
### 3. Verify the deployment
|
||||
|
||||
```
|
||||
helm status prowler
|
||||
kubectl get all -n prowler-ns
|
||||
```
|
||||
|
||||
### 4. Clean Up
|
||||
To uninstall the Helm release and clean up the resources, run:
|
||||
|
||||
```helm uninstall prowler
|
||||
kubectl delete namespace prowler-ns
|
||||
```
|
||||
|
||||
## Values
|
||||
|
||||
| Key | Type | Default | Description |
|
||||
|-----|------|---------|-------------|
|
||||
| clusterRole.name | string | `"prowler-read-cluster"` | |
|
||||
| clusterRoleBinding.name | string | `"prowler-read-cluster-binding"` | |
|
||||
| configMap.name | string | `"prowler-hostpaths"` | |
|
||||
| configMapData.etcCniNetd | string | `"/etc/cni/net.d"` | |
|
||||
| configMapData.etcKubernetes | string | `"/etc/kubernetes"` | |
|
||||
| configMapData.etcSystemd | string | `"/etc/systemd"` | |
|
||||
| configMapData.libSystemd | string | `"/lib/systemd"` | |
|
||||
| configMapData.optCniBin | string | `"/opt/cni/bin"` | |
|
||||
| configMapData.usrBin | string | `"/usr/bin"` | |
|
||||
| configMapData.varLibCni | string | `"/var/lib/cni"` | |
|
||||
| configMapData.varLibEtcd | string | `"/var/lib/etcd"` | |
|
||||
| configMapData.varLibKubeControllerManager | string | `"/var/lib/kube-controller-manager"` | |
|
||||
| configMapData.varLibKubeScheduler | string | `"/var/lib/kube-scheduler"` | |
|
||||
| configMapData.varLibKubelet | string | `"/var/lib/kubelet"` | |
|
||||
| cronjob.hostPID | bool | `true` | |
|
||||
| cronjob.name | string | `"prowler"` | |
|
||||
| cronjob.schedule | string | `"0 0 * * *"` | |
|
||||
| image.pullPolicy | string | `"Always"` | |
|
||||
| image.repository | string | `"toniblyx/prowler"` | |
|
||||
| image.tag | string | `"stable"` | |
|
||||
| namespace.name | string | `"prowler"` | |
|
||||
| serviceAccount.name | string | `"prowler"` | |
|
||||
|
||||
----------------------------------------------
|
||||
Autogenerated from chart metadata using [helm-docs v1.11.3](https://github.com/norwoodj/helm-docs/releases/v1.11.3)
|
||||
11
contrib/k8s/helm/templates/cluster-role.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
name: {{ .Values.clusterRole.name }}
|
||||
rules:
|
||||
- apiGroups: [""]
|
||||
resources: ["pods", "configmaps", "nodes", "namespaces"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
- apiGroups: ["rbac.authorization.k8s.io"]
|
||||
resources: ["clusterrolebindings", "rolebindings", "clusterroles", "roles"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
18
contrib/k8s/helm/templates/cm.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ .Values.configMap.name }}
|
||||
namespace: {{ .Values.namespace.name }}
|
||||
data:
|
||||
varLibCni: "{{ .Values.configMap.data.varLibCni }}"
|
||||
varLibEtcd: "{{ .Values.configMap.data.varLibEtcd }}"
|
||||
varLibKubelet: "{{ .Values.configMap.data.varLibKubelet }}"
|
||||
varLibKubeScheduler: "{{ .Values.configMap.data.varLibKubeScheduler }}"
|
||||
varLibKubeControllerManager: "{{ .Values.configMap.data.varLibKubeControllerManager }}"
|
||||
etcSystemd: "{{ .Values.configMap.data.etcSystemd }}"
|
||||
libSystemd: "{{ .Values.configMap.data.libSystemd }}"
|
||||
etcKubernetes: "{{ .Values.configMap.data.etcKubernetes }}"
|
||||
usrBin: "{{ .Values.configMap.data.usrBin }}"
|
||||
etcCniNetd: "{{ .Values.configMap.data.etcCniNetd }}"
|
||||
optCniBin: "{{ .Values.configMap.data.optCniBin }}"
|
||||
srvKubernetes: "{{ .Values.configMap.data.srvKubernetes }}"
|
||||
42
contrib/k8s/helm/templates/job.yaml
Normal file
@@ -0,0 +1,42 @@
|
||||
apiVersion: batch/v1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: {{ .Values.cronjob.name }}
|
||||
namespace: {{ .Values.namespace.name }}
|
||||
spec:
|
||||
schedule: "{{ .Values.cronjob.schedule }}"
|
||||
jobTemplate:
|
||||
spec:
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: prowler
|
||||
spec:
|
||||
serviceAccountName: {{ .Values.serviceAccount.name }}
|
||||
containers:
|
||||
- name: prowler
|
||||
image: {{ .Values.image.repository }}:{{ .Values.image.tag }}
|
||||
command: ["prowler"]
|
||||
args: ["kubernetes", "-z", "-b"]
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
volumeMounts:
|
||||
{{- range $key, $value := .Values.configMap.data }}
|
||||
{{- if and (eq $.Values.clusterType "gke") (eq $key "srvKubernetes") }}
|
||||
{{- else }}
|
||||
- name: {{ $key | lower }}
|
||||
mountPath: {{ $value }}
|
||||
readOnly: true
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
hostPID: {{ .Values.cronjob.hostPID }}
|
||||
restartPolicy: Never
|
||||
volumes:
|
||||
{{- range $key, $value := .Values.configMap.data }}
|
||||
{{- if and (eq $.Values.clusterType "gke") (eq $key "srvKubernetes") }}
|
||||
{{- else }}
|
||||
- name: {{ $key | lower }}
|
||||
hostPath:
|
||||
path: {{ $value }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
4
contrib/k8s/helm/templates/namespace.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
apiVersion: v1
|
||||
kind: Namespace
|
||||
metadata:
|
||||
name: {{ .Values.namespace.name }}
|
||||
12
contrib/k8s/helm/templates/role-binding.yaml
Normal file
@@ -0,0 +1,12 @@
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: {{ .Values.clusterRoleBinding.name }}
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: {{ .Values.clusterRole.name }}
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: {{ .Values.serviceAccount.name }}
|
||||
namespace: {{ .Values.namespace.name }}
|
||||
5
contrib/k8s/helm/templates/sa.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: {{ .Values.serviceAccount.name }}
|
||||
namespace: {{ .Values.namespace.name }}
|
||||
40
contrib/k8s/helm/values.yaml
Normal file
@@ -0,0 +1,40 @@
|
||||
namespace:
|
||||
name: prowler-ns
|
||||
|
||||
cronjob:
|
||||
name: prowler
|
||||
schedule: "0 0 * * *"
|
||||
hostPID: true
|
||||
|
||||
serviceAccount:
|
||||
name: prowler-sa
|
||||
|
||||
image:
|
||||
repository: toniblyx/prowler
|
||||
tag: stable
|
||||
pullPolicy: Always
|
||||
|
||||
clusterType:
|
||||
|
||||
configMap:
|
||||
name: prowler-config
|
||||
data:
|
||||
varLibCni: "/var/lib/cni"
|
||||
varLibEtcd: "/var/lib/etcd"
|
||||
varLibKubelet: "/var/lib/kubelet"
|
||||
varLibKubeScheduler: "/var/lib/kube-scheduler"
|
||||
varLibKubeControllerManager: "/var/lib/kube-controller-manager"
|
||||
etcSystemd: "/etc/systemd"
|
||||
libSystemd: "/lib/systemd"
|
||||
etcKubernetes: "/etc/kubernetes"
|
||||
usrBin: "/usr/bin"
|
||||
etcCniNetd: "/etc/cni/net.d"
|
||||
optCniBin: "/opt/cni/bin"
|
||||
srvKubernetes: "/srv/kubernetes"
|
||||
|
||||
clusterRole:
|
||||
name: prowler-read-cluster
|
||||
|
||||
clusterRoleBinding:
|
||||
name: prowler-read-cluster-binding
|
||||
roleName: prowler-read-cluster
|
||||
2
dashboard/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
DASHBOARD_PORT = 11666
|
||||
DASHBOARD_ARGS = {"debug": True, "port": DASHBOARD_PORT, "use_reloader": False}
|
||||
@@ -16,18 +16,18 @@ from prowler.lib.banner import print_banner
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
cli = sys.modules["flask.cli"]
|
||||
print_banner(verbose=False)
|
||||
print_banner()
|
||||
print(
|
||||
f"{Fore.GREEN}Loading all CSV files from the folder {folder_path_overview} ...\n{Style.RESET_ALL}"
|
||||
)
|
||||
cli.show_server_banner = lambda *x: click.echo(
|
||||
f"{Fore.YELLOW}NOTE:{Style.RESET_ALL} If you are a {Fore.GREEN}{Style.BRIGHT}Prowler SaaS{Style.RESET_ALL} customer and you want to use your data from your S3 bucket,\nrun: `{orange_color}aws s3 cp s3://<your-bucket>/output/csv ./output --recursive{Style.RESET_ALL}`\nand then run `prowler dashboard` again to load the new files."
|
||||
f"{Fore.YELLOW}NOTE:{Style.RESET_ALL} If you are using {Fore.GREEN}{Style.BRIGHT}Prowler SaaS{Style.RESET_ALL} with the S3 integration or that integration \nfrom {Fore.CYAN}{Style.BRIGHT}Prowler Open Source{Style.RESET_ALL} and you want to use your data from your S3 bucket,\nrun: `{orange_color}aws s3 cp s3://<your-bucket>/output/csv ./output --recursive{Style.RESET_ALL}`\nand then run `prowler dashboard` again to load the new files."
|
||||
)
|
||||
|
||||
# Initialize the app - incorporate css
|
||||
dashboard = dash.Dash(
|
||||
__name__,
|
||||
external_stylesheets=[dbc.themes.DARKLY],
|
||||
external_stylesheets=[dbc.themes.FLATLY],
|
||||
use_pages=True,
|
||||
suppress_callback_exceptions=True,
|
||||
title="Prowler Dashboard",
|
||||
@@ -60,7 +60,9 @@ def generate_nav_links(current_path):
|
||||
link_content = html.Span(
|
||||
[
|
||||
html.Img(src=icon_url, className="w-5"),
|
||||
html.Span(page["name"], className="font-medium text-base leading-6"),
|
||||
html.Span(
|
||||
page["name"], className="font-medium text-base leading-6 text-white"
|
||||
),
|
||||
],
|
||||
className="flex justify-center lg:justify-normal items-center gap-x-3 py-2 px-3",
|
||||
)
|
||||
@@ -96,7 +98,8 @@ def generate_help_menu():
|
||||
[
|
||||
html.Img(src=link["icon"], className="w-5"),
|
||||
html.Span(
|
||||
link["title"], className="font-medium text-base leading-6"
|
||||
link["title"],
|
||||
className="font-medium text-base leading-6 text-white",
|
||||
),
|
||||
],
|
||||
className="flex items-center gap-x-3 py-2 px-3",
|
||||
@@ -160,7 +163,7 @@ def update_nav_bar(pathname):
|
||||
html.Img(src="assets/favicon.ico", className="w-5"),
|
||||
"Subscribe to prowler SaaS",
|
||||
],
|
||||
className="flex items-center gap-x-3",
|
||||
className="flex items-center gap-x-3 text-white",
|
||||
),
|
||||
],
|
||||
href="https://prowler.com/",
|
||||
|
||||
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
1
dashboard/assets/images/icons/arrows.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" shape-rendering="geometricPrecision" text-rendering="geometricPrecision" image-rendering="optimizeQuality" fill-rule="evenodd" clip-rule="evenodd" viewBox="0 0 443 511.62"><path fill-rule="nonzero" d="M152.93 286.97c0 17.1-13.87 30.97-30.97 30.97-17.11 0-30.98-13.87-30.98-30.97v-177.4l-37.45 40.31c-11.63 12.5-31.19 13.2-43.68 1.57-12.49-11.62-13.19-31.18-1.57-43.68L99.33 9.79l2.06-1.94c12.69-11.35 32.2-10.26 43.55 2.43l91.05 101.47c11.35 12.69 10.26 32.2-2.43 43.55-12.68 11.36-32.19 10.27-43.55-2.42l-37.08-41.33v175.42zm236.24 71.77c11.35-12.69 30.86-13.78 43.55-2.43 12.69 11.36 13.78 30.87 2.42 43.56L344.1 501.34c-11.36 12.69-30.87 13.78-43.55 2.42l-2.02-1.97-91.09-97.95c-11.63-12.49-10.93-32.05 1.57-43.67 12.49-11.63 32.05-10.93 43.67 1.57l37.46 40.31V231.53c0-17.11 13.87-30.97 30.97-30.97s30.97 13.86 30.97 30.97v168.54l37.09-41.33z"/></svg>
|
||||
|
After Width: | Height: | Size: 896 B |
1
dashboard/assets/images/icons/dropdown.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg class="svg-icon" style="width: 1.001953125em; height: 1em;vertical-align: middle;fill: currentColor;overflow: hidden;" viewBox="0 0 1026 1024" version="1.1" xmlns="http://www.w3.org/2000/svg"><path d="M1013.7 90.8C997.8 75.5 972.4 76 957.1 92L510.9 557.1 73.2 90.8C58 74.7 32.7 73.9 16.6 89 0.5 104.1-0.3 129.4 14.8 145.5l466.6 497.1 1.5 1.5c0.2 0.2 0.4 0.4 0.7 0.6 0.3 0.3 0.6 0.5 0.9 0.8 0.3 0.3 0.6 0.5 0.9 0.7 0.2 0.2 0.4 0.4 0.7 0.6 0.3 0.2 0.6 0.5 0.9 0.7 0.2 0.2 0.5 0.4 0.7 0.5l0.9 0.6c0.3 0.2 0.5 0.4 0.8 0.5 0.3 0.2 0.6 0.3 0.9 0.5 0.3 0.2 0.6 0.3 0.9 0.5 0.3 0.2 0.5 0.3 0.8 0.4 0.3 0.2 0.6 0.3 1 0.5 0.3 0.1 0.5 0.3 0.8 0.4 0.3 0.2 0.7 0.3 1 0.5 0.2 0.1 0.5 0.2 0.7 0.3 0.4 0.2 0.7 0.3 1.1 0.4 0.2 0.1 0.5 0.2 0.7 0.3 0.4 0.1 0.8 0.3 1.2 0.4 0.2 0.1 0.5 0.1 0.7 0.2l1.2 0.3c0.2 0.1 0.4 0.1 0.7 0.2 0.4 0.1 0.8 0.2 1.3 0.3 0.2 0 0.4 0.1 0.6 0.1 0.4 0.1 0.9 0.2 1.3 0.2 0.2 0 0.4 0.1 0.6 0.1 0.5 0.1 0.9 0.1 1.4 0.2 0.2 0 0.4 0 0.6 0.1 0.5 0 1 0.1 1.5 0.1h4.6c0.5 0 1-0.1 1.5-0.1 0.2 0 0.4 0 0.5-0.1 0.5 0 0.9-0.1 1.4-0.2 0.2 0 0.4-0.1 0.6-0.1 0.4-0.1 0.9-0.1 1.3-0.2 0.2 0 0.4-0.1 0.6-0.1l1.2-0.3c0.2-0.1 0.4-0.1 0.7-0.2l1.2-0.3c0.2-0.1 0.5-0.1 0.7-0.2 0.4-0.1 0.8-0.2 1.1-0.4 0.2-0.1 0.5-0.2 0.7-0.3 0.4-0.1 0.7-0.3 1.1-0.4 0.3-0.1 0.5-0.2 0.8-0.3 0.3-0.1 0.7-0.3 1-0.5 0.3-0.1 0.5-0.2 0.8-0.4 0.3-0.2 0.6-0.3 0.9-0.5 0.3-0.1 0.6-0.3 0.8-0.4 0.3-0.2 0.6-0.3 0.8-0.5 0.3-0.2 0.6-0.3 0.9-0.5 0.3-0.2 0.5-0.3 0.8-0.5l0.9-0.6c0.2-0.2 0.4-0.3 0.7-0.5 0.3-0.2 0.6-0.5 1-0.7 0.2-0.1 0.4-0.3 0.6-0.5 0.3-0.3 0.7-0.5 1-0.8 0.2-0.1 0.3-0.3 0.5-0.5 0.5-0.5 1-0.9 1.5-1.4l0.9-0.9 475.4-495.6c15.3-15.7 14.7-41.1-1.2-56.3z" fill="#898989" /></svg>
|
||||
|
After Width: | Height: | Size: 1.6 KiB |
265
dashboard/assets/styles/dist/output.css
vendored
@@ -5,7 +5,7 @@
|
||||
/* Use this file to add custom styles using Tailwind's utility classes. */
|
||||
|
||||
/*
|
||||
! tailwindcss v3.4.1 | MIT License | https://tailwindcss.com */
|
||||
! tailwindcss v3.4.3 | MIT License | https://tailwindcss.com */
|
||||
|
||||
/*
|
||||
1. Prevent padding and border from affecting element width. (https://github.com/mozdevs/cssremedy/issues/4)
|
||||
@@ -216,6 +216,8 @@ textarea {
|
||||
/* 1 */
|
||||
line-height: inherit;
|
||||
/* 1 */
|
||||
letter-spacing: inherit;
|
||||
/* 1 */
|
||||
color: inherit;
|
||||
/* 1 */
|
||||
margin: 0;
|
||||
@@ -239,9 +241,9 @@ select {
|
||||
*/
|
||||
|
||||
button,
|
||||
[type='button'],
|
||||
[type='reset'],
|
||||
[type='submit'] {
|
||||
input:where([type='button']),
|
||||
input:where([type='reset']),
|
||||
input:where([type='submit']) {
|
||||
-webkit-appearance: button;
|
||||
/* 1 */
|
||||
background-color: transparent;
|
||||
@@ -497,6 +499,10 @@ video {
|
||||
--tw-backdrop-opacity: ;
|
||||
--tw-backdrop-saturate: ;
|
||||
--tw-backdrop-sepia: ;
|
||||
--tw-contain-size: ;
|
||||
--tw-contain-layout: ;
|
||||
--tw-contain-paint: ;
|
||||
--tw-contain-style: ;
|
||||
}
|
||||
|
||||
::backdrop {
|
||||
@@ -547,113 +553,18 @@ video {
|
||||
--tw-backdrop-opacity: ;
|
||||
--tw-backdrop-saturate: ;
|
||||
--tw-backdrop-sepia: ;
|
||||
--tw-contain-size: ;
|
||||
--tw-contain-layout: ;
|
||||
--tw-contain-paint: ;
|
||||
--tw-contain-style: ;
|
||||
}
|
||||
|
||||
.custom-grid {
|
||||
grid-template-columns: minmax(0, 16fr) repeat(11, minmax(0, 11fr));
|
||||
}
|
||||
|
||||
/* Styles for the table in the overview page */
|
||||
|
||||
.table-overview thead {
|
||||
display: table;
|
||||
width: 100%;
|
||||
table-layout: fixed;
|
||||
}
|
||||
|
||||
.table-overview tbody {
|
||||
-ms-overflow-style: none;
|
||||
/* IE and Edge */
|
||||
scrollbar-width: none;
|
||||
/* Firefox */
|
||||
}
|
||||
|
||||
.table-overview tbody tr {
|
||||
display: table;
|
||||
width: 100%;
|
||||
table-layout: fixed;
|
||||
}
|
||||
|
||||
/* Styles for thead */
|
||||
|
||||
.table-overview th {
|
||||
--tw-bg-opacity: 1;
|
||||
background-color: rgb(41 37 36 / var(--tw-bg-opacity));
|
||||
padding-top: 0.75rem;
|
||||
padding-bottom: 0.75rem;
|
||||
font-size: 0.875rem;
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
.table-overview td {
|
||||
--tw-bg-opacity: 1;
|
||||
background-color: rgb(255 255 255 / var(--tw-bg-opacity));
|
||||
padding-top: 0.5rem;
|
||||
padding-bottom: 0.5rem;
|
||||
font-size: 0.875rem;
|
||||
font-weight: 700;
|
||||
--tw-text-opacity: 1;
|
||||
color: rgb(41 37 36 / var(--tw-text-opacity));
|
||||
}
|
||||
|
||||
/* Check ID */
|
||||
|
||||
.table-overview td:nth-child(1),
|
||||
.table-overview th:nth-child(1) {
|
||||
width: 52%;
|
||||
}
|
||||
|
||||
/* Severity */
|
||||
|
||||
.table-overview td:nth-child(2),
|
||||
.table-overview th:nth-child(2) {
|
||||
width: 8%;
|
||||
text-transform: capitalize;
|
||||
}
|
||||
|
||||
/* Status */
|
||||
|
||||
.table-overview td:nth-child(3),
|
||||
.table-overview th:nth-child(3) {
|
||||
width: 7%;
|
||||
}
|
||||
|
||||
.table-overview td:nth-child(3) {
|
||||
font-weight: 700;
|
||||
--tw-text-opacity: 1;
|
||||
color: rgb(185 28 28 / var(--tw-text-opacity));
|
||||
}
|
||||
|
||||
/* Region */
|
||||
|
||||
.table-overview td:nth-child(4),
|
||||
.table-overview th:nth-child(4) {
|
||||
width: 9%;
|
||||
}
|
||||
|
||||
/* Service */
|
||||
|
||||
.table-overview td:nth-child(5),
|
||||
.table-overview th:nth-child(5) {
|
||||
width: 6%;
|
||||
}
|
||||
|
||||
/* Provider */
|
||||
|
||||
.table-overview td:nth-child(6),
|
||||
.table-overview th:nth-child(6) {
|
||||
width: 7%;
|
||||
}
|
||||
|
||||
/* Account ID */
|
||||
|
||||
.table-overview td:nth-child(7),
|
||||
.table-overview th:nth-child(7) {
|
||||
width: 11%;
|
||||
}
|
||||
|
||||
.visible {
|
||||
visibility: visible;
|
||||
.collapse {
|
||||
visibility: collapse;
|
||||
}
|
||||
|
||||
.relative {
|
||||
@@ -693,6 +604,10 @@ video {
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
.mb-0 {
|
||||
margin-bottom: 0px;
|
||||
}
|
||||
|
||||
.mb-2 {
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
@@ -717,6 +632,14 @@ video {
|
||||
margin-top: auto;
|
||||
}
|
||||
|
||||
.mb-\[30px\] {
|
||||
margin-bottom: 30px;
|
||||
}
|
||||
|
||||
.mt-\[30px\] {
|
||||
margin-top: 30px;
|
||||
}
|
||||
|
||||
.block {
|
||||
display: block;
|
||||
}
|
||||
@@ -733,14 +656,6 @@ video {
|
||||
display: inline-flex;
|
||||
}
|
||||
|
||||
.min-w-36 {
|
||||
min-width: 9rem;
|
||||
}
|
||||
|
||||
.min-w-44 {
|
||||
min-width: 11rem;
|
||||
}
|
||||
|
||||
.table {
|
||||
display: table;
|
||||
}
|
||||
@@ -761,6 +676,10 @@ video {
|
||||
max-height: 300px;
|
||||
}
|
||||
|
||||
.w-3 {
|
||||
width: 0.75rem;
|
||||
}
|
||||
|
||||
.w-5 {
|
||||
width: 1.25rem;
|
||||
}
|
||||
@@ -769,6 +688,50 @@ video {
|
||||
width: 2rem;
|
||||
}
|
||||
|
||||
.w-\[10\%\] {
|
||||
width: 10%;
|
||||
}
|
||||
|
||||
.w-\[10\.5\%\] {
|
||||
width: 10.5%;
|
||||
}
|
||||
|
||||
.w-\[11\%\] {
|
||||
width: 11%;
|
||||
}
|
||||
|
||||
.w-\[13\.5\%\] {
|
||||
width: 13.5%;
|
||||
}
|
||||
|
||||
.w-\[14\.5\%\] {
|
||||
width: 14.5%;
|
||||
}
|
||||
|
||||
.w-\[15\%\] {
|
||||
width: 15%;
|
||||
}
|
||||
|
||||
.w-\[36\%\] {
|
||||
width: 36%;
|
||||
}
|
||||
|
||||
.w-\[4\%\] {
|
||||
width: 4%;
|
||||
}
|
||||
|
||||
.w-\[40\.5\%\] {
|
||||
width: 40.5%;
|
||||
}
|
||||
|
||||
.w-\[9\%\] {
|
||||
width: 9%;
|
||||
}
|
||||
|
||||
.w-\[9\.5\%\] {
|
||||
width: 9.5%;
|
||||
}
|
||||
|
||||
.w-fit {
|
||||
width: -moz-fit-content;
|
||||
width: fit-content;
|
||||
@@ -778,6 +741,10 @@ video {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.min-w-36 {
|
||||
min-width: 9rem;
|
||||
}
|
||||
|
||||
.grid-cols-12 {
|
||||
grid-template-columns: repeat(12, minmax(0, 1fr));
|
||||
}
|
||||
@@ -895,30 +862,31 @@ video {
|
||||
}
|
||||
|
||||
.bg-gradient-failed {
|
||||
background-image: linear-gradient(127.43deg, #F1F5F8 -177.68%, #e67272 87.35%);
|
||||
background-image: linear-gradient(127.43deg, #F1F5F8 -177.68%, #EF4444 87.35%);
|
||||
}
|
||||
|
||||
.bg-gradient-passed {
|
||||
background-image: linear-gradient(127.43deg, #F1F5F8 -177.68%, #54d283 87.35%);
|
||||
}
|
||||
|
||||
.bg-gradient-muted {
|
||||
background-image: linear-gradient(127.43deg, #F1F5F8 -177.68%, #636c78 87.35%);
|
||||
}
|
||||
|
||||
.p-3 {
|
||||
padding: 0.75rem;
|
||||
background-image: linear-gradient(127.43deg, #F1F5F8 -177.68%, #4ADE80 87.35%);
|
||||
}
|
||||
|
||||
.p-2 {
|
||||
padding: 0.5rem;
|
||||
}
|
||||
|
||||
.p-3 {
|
||||
padding: 0.75rem;
|
||||
}
|
||||
|
||||
.px-10 {
|
||||
padding-left: 2.5rem;
|
||||
padding-right: 2.5rem;
|
||||
}
|
||||
|
||||
.px-2 {
|
||||
padding-left: 0.5rem;
|
||||
padding-right: 0.5rem;
|
||||
}
|
||||
|
||||
.px-3 {
|
||||
padding-left: 0.75rem;
|
||||
padding-right: 0.75rem;
|
||||
@@ -953,6 +921,10 @@ video {
|
||||
padding-bottom: 0.75rem;
|
||||
}
|
||||
|
||||
.pr-2 {
|
||||
padding-right: 0.5rem;
|
||||
}
|
||||
|
||||
.text-center {
|
||||
text-align: center;
|
||||
}
|
||||
@@ -1028,6 +1000,11 @@ video {
|
||||
color: rgb(41 37 36 / var(--tw-text-opacity));
|
||||
}
|
||||
|
||||
.text-white {
|
||||
--tw-text-opacity: 1;
|
||||
color: rgb(255 255 255 / var(--tw-text-opacity));
|
||||
}
|
||||
|
||||
.opacity-90 {
|
||||
opacity: 0.9;
|
||||
}
|
||||
@@ -1127,6 +1104,10 @@ video {
|
||||
color: rgb(41 37 36 / var(--tw-text-opacity));
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion .accordion-collapse.collapse {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion .accordion-button:not(.collapsed) {
|
||||
--tw-bg-opacity: 1;
|
||||
background-color: rgb(231 229 228 / var(--tw-bg-opacity));
|
||||
@@ -1243,6 +1224,10 @@ video {
|
||||
width: auto;
|
||||
}
|
||||
|
||||
.overview-table .card .collapse {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
@media (min-width: 1536px) {
|
||||
.\32xl\:container {
|
||||
width: 100%;
|
||||
@@ -1385,3 +1370,37 @@ video {
|
||||
row-gap: 0px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (min-width: 1536px) {
|
||||
.\32xl\:w-\[10\%\] {
|
||||
width: 10%;
|
||||
}
|
||||
|
||||
.\32xl\:w-\[12\.5\%\] {
|
||||
width: 12.5%;
|
||||
}
|
||||
|
||||
.\32xl\:w-\[14\%\] {
|
||||
width: 14%;
|
||||
}
|
||||
|
||||
.\32xl\:w-\[15\.5\%\] {
|
||||
width: 15.5%;
|
||||
}
|
||||
|
||||
.\32xl\:w-\[2\%\] {
|
||||
width: 2%;
|
||||
}
|
||||
|
||||
.\32xl\:w-\[48\%\] {
|
||||
width: 48%;
|
||||
}
|
||||
|
||||
.\32xl\:w-\[71\.5\%\] {
|
||||
width: 71.5%;
|
||||
}
|
||||
|
||||
.\32xl\:w-\[9\%\] {
|
||||
width: 9%;
|
||||
}
|
||||
}
|
||||
@@ -1535,7 +1535,7 @@ def get_section_container_iso(data, section_1, section_2):
|
||||
return html.Div(section_containers, className="compliance-data-layout")
|
||||
|
||||
|
||||
def get_section_containers_pci(data, section_1):
|
||||
def get_section_containers_format4(data, section_1):
|
||||
|
||||
data["STATUS"] = data["STATUS"].apply(map_status_to_icon)
|
||||
data[section_1] = data[section_1].astype(str)
|
||||
@@ -1654,9 +1654,13 @@ def get_section_containers_pci(data, section_1):
|
||||
)
|
||||
|
||||
graph_div_service = html.Div(graph_service, className="graph-section-req")
|
||||
if "REQUIREMENTS_NAME" not in specific_data.columns:
|
||||
title_internal = f"{service}"
|
||||
else:
|
||||
title_internal = f"{service} - {specific_data['REQUIREMENTS_NAME'].iloc[0]}"
|
||||
|
||||
internal_accordion_item = dbc.AccordionItem(
|
||||
title=service,
|
||||
title=title_internal,
|
||||
children=[html.Div([data_table], className="inner-accordion-content")],
|
||||
)
|
||||
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format1
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_ATTRIBUTES_SECTION",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format1(
|
||||
aux, "REQUIREMENTS_ATTRIBUTES_SECTION", "REQUIREMENTS_ID"
|
||||
)
|
||||
@@ -6,6 +6,13 @@ warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
# append the requirements_description to idgrupocontrol
|
||||
data["REQUIREMENTS_ATTRIBUTES_IDGRUPOCONTROL"] = (
|
||||
data["REQUIREMENTS_ATTRIBUTES_IDGRUPOCONTROL"]
|
||||
+ " - "
|
||||
+ data["REQUIREMENTS_DESCRIPTION"]
|
||||
)
|
||||
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ATTRIBUTES_MARCO",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
from dashboard.common_methods import get_section_containers_format4
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
@@ -9,15 +9,13 @@ def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_SUBTECHNIQUES",
|
||||
"REQUIREMENTS_NAME",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
]
|
||||
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_SUBTECHNIQUES"
|
||||
)
|
||||
return get_section_containers_format4(aux, "REQUIREMENTS_ID")
|
||||
|
||||
21
dashboard/compliance/mitre_attack_azure.py
Normal file
@@ -0,0 +1,21 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format4
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_NAME",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_format4(aux, "REQUIREMENTS_ID")
|
||||
23
dashboard/compliance/mitre_attack_gcp.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_format2
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
|
||||
def get_table(data):
|
||||
aux = data[
|
||||
[
|
||||
"REQUIREMENTS_ID",
|
||||
"REQUIREMENTS_SUBTECHNIQUES",
|
||||
"CHECKID",
|
||||
"STATUS",
|
||||
"REGION",
|
||||
"ACCOUNTID",
|
||||
"RESOURCEID",
|
||||
]
|
||||
].copy()
|
||||
|
||||
return get_section_containers_format2(
|
||||
aux, "REQUIREMENTS_ID", "REQUIREMENTS_SUBTECHNIQUES"
|
||||
)
|
||||
@@ -1,6 +1,6 @@
|
||||
import warnings
|
||||
|
||||
from dashboard.common_methods import get_section_containers_pci
|
||||
from dashboard.common_methods import get_section_containers_format4
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
|
||||
@@ -17,4 +17,4 @@ def get_table(data):
|
||||
]
|
||||
]
|
||||
|
||||
return get_section_containers_pci(aux, "REQUIREMENTS_ID")
|
||||
return get_section_containers_format4(aux, "REQUIREMENTS_ID")
|
||||
|
||||
@@ -6,6 +6,28 @@ fail_emoji = "❌"
|
||||
info_emoji = "ℹ️"
|
||||
manual_emoji = "✋🏽"
|
||||
|
||||
# Main colors
|
||||
fail_color = "#e67272"
|
||||
pass_color = "#54d283"
|
||||
info_color = "#2684FF"
|
||||
manual_color = "#636c78"
|
||||
|
||||
# Muted colors
|
||||
muted_fail_color = "#fca903"
|
||||
muted_pass_color = "#03fccf"
|
||||
muted_manual_color = "#b33696"
|
||||
|
||||
# Severity colors
|
||||
critical_color = "#951649"
|
||||
high_color = "#e11d48"
|
||||
medium_color = "#ee6f15"
|
||||
low_color = "#fcf45d"
|
||||
informational_color = "#3274d9"
|
||||
|
||||
# Folder output path
|
||||
folder_path_overview = os.getcwd() + "/output"
|
||||
folder_path_compliance = os.getcwd() + "/output/compliance"
|
||||
|
||||
encoding_format = "utf-8"
|
||||
# Error action, it is recommended to use "ignore" or "replace"
|
||||
error_action = "ignore"
|
||||
|
||||
5
dashboard/lib/arguments/arguments.py
Normal file
@@ -0,0 +1,5 @@
|
||||
def init_dashboard_parser(self):
|
||||
"""Init the Dashboard CLI parser"""
|
||||
# If we don't set `help="Dashboard"` this won't be rendered
|
||||
# We don't want the dashboard to inherit from the common providers parser since it's a different component
|
||||
self.subparsers.add_parser("dashboard")
|
||||
@@ -78,7 +78,7 @@ def create_region_dropdown(regions: list) -> html.Div:
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Region:",
|
||||
"Region / Location / Namespace :",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
@@ -104,7 +104,7 @@ def create_region_dropdown_compliance(regions: list) -> html.Div:
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Region:",
|
||||
"Region / Location / Namespace :",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
@@ -130,7 +130,7 @@ def create_account_dropdown(accounts: list) -> html.Div:
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Account:",
|
||||
"Account / Subscription / Project / Cluster :",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
@@ -156,7 +156,7 @@ def create_account_dropdown_compliance(accounts: list) -> html.Div:
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Account:",
|
||||
"Account / Subscription / Project / Cluster :",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
),
|
||||
dcc.Dropdown(
|
||||
@@ -193,3 +193,97 @@ def create_compliance_dropdown(compliance: list) -> html.Div:
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_severity_dropdown(severity: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the severity.
|
||||
Args:
|
||||
severity (list): List of severity.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the severity.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Severity:", className="text-prowler-stone-900 font-bold text-sm"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="severity-filter",
|
||||
options=[{"label": i, "value": i} for i in severity],
|
||||
value=["All"],
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_service_dropdown(services: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the service.
|
||||
Args:
|
||||
services (list): List of services.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the service.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label(
|
||||
"Service:", className="text-prowler-stone-900 font-bold text-sm"
|
||||
),
|
||||
dcc.Dropdown(
|
||||
id="service-filter",
|
||||
options=[{"label": i, "value": i} for i in services],
|
||||
value=["All"],
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_status_dropdown(status: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the status.
|
||||
Args:
|
||||
status (list): List of status.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the status.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
html.Label("Status:", className="text-prowler-stone-900 font-bold text-sm"),
|
||||
dcc.Dropdown(
|
||||
id="status-filter",
|
||||
options=[{"label": i, "value": i} for i in status],
|
||||
value=["All"],
|
||||
clearable=False,
|
||||
multi=True,
|
||||
style={"color": "#000000"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def create_table_row_dropdown(table_rows: list) -> html.Div:
|
||||
"""
|
||||
Dropdown to select the number of rows in the table.
|
||||
Args:
|
||||
table_rows (list): List of number of rows.
|
||||
Returns:
|
||||
html.Div: Dropdown to select the number of rows in the table.
|
||||
"""
|
||||
return html.Div(
|
||||
[
|
||||
dcc.Dropdown(
|
||||
id="table-rows",
|
||||
options=[{"label": i, "value": i} for i in table_rows],
|
||||
value=table_rows[0],
|
||||
clearable=False,
|
||||
style={"color": "#000000", "margin-right": "10px"},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
@@ -5,7 +5,13 @@ def create_layout_overview(
|
||||
account_dropdown: html.Div,
|
||||
date_dropdown: html.Div,
|
||||
region_dropdown: html.Div,
|
||||
download_button: html.Button,
|
||||
download_button_csv: html.Button,
|
||||
download_button_xlsx: html.Button,
|
||||
severity_dropdown: html.Div,
|
||||
service_dropdown: html.Div,
|
||||
table_row_dropdown: html.Div,
|
||||
status_dropdown: html.Div,
|
||||
table_div_header: html.Div,
|
||||
) -> html.Div:
|
||||
"""
|
||||
Create the layout of the dashboard.
|
||||
@@ -35,20 +41,28 @@ def create_layout_overview(
|
||||
html.Div([account_dropdown], className=""),
|
||||
html.Div([region_dropdown], className=""),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-3 lg:gap-y-0",
|
||||
className="grid gap-x-4 mt-[30px] mb-[30px] sm:grid-cols-2 lg:grid-cols-3",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(className="flex", id="aws_card"),
|
||||
html.Div(className="flex", id="azure_card"),
|
||||
html.Div(className="flex", id="gcp_card"),
|
||||
html.Div(className="flex", id="k8s_card"),
|
||||
html.Div([severity_dropdown], className=""),
|
||||
html.Div([service_dropdown], className=""),
|
||||
html.Div([status_dropdown], className=""),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 sm:grid-cols-2 lg:grid-cols-4 lg:gap-y-0",
|
||||
className="grid gap-x-4 mb-[30px] sm:grid-cols-2 lg:grid-cols-3",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(className="flex", id="aws_card", n_clicks=0),
|
||||
html.Div(className="flex", id="azure_card", n_clicks=0),
|
||||
html.Div(className="flex", id="gcp_card", n_clicks=0),
|
||||
html.Div(className="flex", id="k8s_card", n_clicks=0),
|
||||
],
|
||||
className="grid gap-x-4 mb-[30px] sm:grid-cols-2 lg:grid-cols-4",
|
||||
),
|
||||
html.H4(
|
||||
"Count of Failed Findings by severity",
|
||||
className="text-prowler-stone-900 text-lg font-bold",
|
||||
"Count of Findings by severity",
|
||||
className="text-prowler-stone-900 text-lg font-bold mb-[30px]",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
@@ -65,22 +79,37 @@ def create_layout_overview(
|
||||
id="line_plot",
|
||||
),
|
||||
],
|
||||
className="grid gap-x-4 gap-y-4 grid-cols-12 lg:gap-y-0",
|
||||
className="grid gap-x-4 grid-cols-12 mb-[30px]",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.H4(
|
||||
"Top 25 Failed Findings by Severity",
|
||||
"Top Findings by Severity",
|
||||
className="text-prowler-stone-900 text-lg font-bold",
|
||||
),
|
||||
download_button,
|
||||
html.Div(
|
||||
[
|
||||
(
|
||||
html.Label(
|
||||
"Table Rows:",
|
||||
className="text-prowler-stone-900 font-bold text-sm",
|
||||
style={"margin-right": "10px"},
|
||||
)
|
||||
),
|
||||
table_row_dropdown,
|
||||
download_button_csv,
|
||||
download_button_xlsx,
|
||||
],
|
||||
className="flex justify-between items-center",
|
||||
),
|
||||
dcc.Download(id="download-data"),
|
||||
],
|
||||
className="flex justify-between items-center",
|
||||
),
|
||||
table_div_header,
|
||||
html.Div(id="table", className="grid"),
|
||||
],
|
||||
className="grid gap-x-8 gap-y-8 2xl:container mx-auto",
|
||||
className="grid gap-x-8 2xl:container mx-auto",
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -14,7 +14,15 @@ from dash import callback, dcc, html
|
||||
from dash.dependencies import Input, Output
|
||||
|
||||
# Config import
|
||||
from dashboard.config import folder_path_compliance
|
||||
from dashboard.config import (
|
||||
encoding_format,
|
||||
error_action,
|
||||
fail_color,
|
||||
folder_path_compliance,
|
||||
info_color,
|
||||
manual_color,
|
||||
pass_color,
|
||||
)
|
||||
from dashboard.lib.dropdowns import (
|
||||
create_account_dropdown_compliance,
|
||||
create_compliance_dropdown,
|
||||
@@ -22,6 +30,7 @@ from dashboard.lib.dropdowns import (
|
||||
create_region_dropdown_compliance,
|
||||
)
|
||||
from dashboard.lib.layouts import create_layout_compliance
|
||||
from prowler.lib.logger import logger
|
||||
|
||||
# Suppress warnings
|
||||
warnings.filterwarnings("ignore")
|
||||
@@ -31,11 +40,16 @@ warnings.filterwarnings("ignore")
|
||||
|
||||
csv_files = []
|
||||
for file in glob.glob(os.path.join(folder_path_compliance, "*.csv")):
|
||||
with open(file, "r", newline="") as csvfile:
|
||||
reader = csv.reader(csvfile)
|
||||
num_rows = sum(1 for row in reader)
|
||||
if num_rows > 1:
|
||||
csv_files.append(file)
|
||||
try:
|
||||
with open(
|
||||
file, "r", newline="", encoding=encoding_format, errors=error_action
|
||||
) as csvfile:
|
||||
reader = csv.reader(csvfile)
|
||||
num_rows = sum(1 for row in reader)
|
||||
if num_rows > 1:
|
||||
csv_files.append(file)
|
||||
except UnicodeDecodeError:
|
||||
logger.error(f"Error decoding file: {file}")
|
||||
|
||||
|
||||
def load_csv_files(csv_files):
|
||||
@@ -43,7 +57,7 @@ def load_csv_files(csv_files):
|
||||
dfs = []
|
||||
results = []
|
||||
for file in csv_files:
|
||||
df = pd.read_csv(file, sep=";", on_bad_lines="skip")
|
||||
df = pd.read_csv(file, sep=";", on_bad_lines="skip", encoding=encoding_format)
|
||||
if "CHECKID" in df.columns:
|
||||
dfs.append(df)
|
||||
result = file
|
||||
@@ -231,7 +245,9 @@ def display_data(
|
||||
"""Load CSV files into a single pandas DataFrame."""
|
||||
dfs = []
|
||||
for file in files:
|
||||
df = pd.read_csv(file, sep=";", on_bad_lines="skip")
|
||||
df = pd.read_csv(
|
||||
file, sep=";", on_bad_lines="skip", encoding=encoding_format
|
||||
)
|
||||
dfs.append(df.astype(str))
|
||||
return pd.concat(dfs, ignore_index=True)
|
||||
|
||||
@@ -256,10 +272,11 @@ def display_data(
|
||||
# Rename the column PROJECTID to ACCOUNTID for GCP
|
||||
if data.columns.str.contains("PROJECTID").any():
|
||||
data.rename(columns={"PROJECTID": "ACCOUNTID"}, inplace=True)
|
||||
|
||||
data["REGION"] = "-"
|
||||
# Rename the column SUBSCRIPTIONID to ACCOUNTID for Azure
|
||||
if data.columns.str.contains("SUBSCRIPTIONID").any():
|
||||
data.rename(columns={"SUBSCRIPTIONID": "ACCOUNTID"}, inplace=True)
|
||||
data["REGION"] = "-"
|
||||
# Handle v3 azure cis compliance
|
||||
if data.columns.str.contains("SUBSCRIPTION").any():
|
||||
data.rename(columns={"SUBSCRIPTION": "ACCOUNTID"}, inplace=True)
|
||||
@@ -426,6 +443,12 @@ def display_data(
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ATTRIBUTES_SERVICE")
|
||||
current_filter = "services"
|
||||
elif (
|
||||
"REQUIREMENTS_ID" in df.columns
|
||||
and not df["REQUIREMENTS_ID"].isnull().values.any()
|
||||
):
|
||||
pie_2 = get_bar_graph(df, "REQUIREMENTS_ID")
|
||||
current_filter = "techniques"
|
||||
else:
|
||||
fig = px.pie()
|
||||
fig.update_layout(
|
||||
@@ -451,7 +474,7 @@ def display_data(
|
||||
overall_status_result_graph = get_graph(pie_1, "Overall Status Result")
|
||||
|
||||
security_level_graph = get_graph(
|
||||
pie_2, f"Top 5 failed {current_filter} by findings"
|
||||
pie_2, f"Top 5 failed {current_filter} by requirements"
|
||||
)
|
||||
|
||||
return (
|
||||
@@ -505,7 +528,7 @@ def get_bar_graph(df, column_name):
|
||||
x="counts",
|
||||
y=colums,
|
||||
color="STATUS",
|
||||
color_discrete_map={"FAIL": "#e67272"},
|
||||
color_discrete_map={"FAIL": fail_color},
|
||||
orientation="h",
|
||||
)
|
||||
|
||||
@@ -530,11 +553,11 @@ def get_bar_graph(df, column_name):
|
||||
def get_pie(df):
|
||||
# Define custom colors
|
||||
color_mapping = {
|
||||
"FAIL": "#e67272",
|
||||
"PASS": "#54d283",
|
||||
"INFO": "#2684FF",
|
||||
"FAIL": fail_color,
|
||||
"PASS": pass_color,
|
||||
"INFO": info_color,
|
||||
"WARN": "#260000",
|
||||
"MANUAL": "#636c78",
|
||||
"MANUAL": manual_color,
|
||||
}
|
||||
|
||||
# Use the color_discrete_map parameter to map categories to custom colors
|
||||
|
||||
@@ -8,10 +8,6 @@
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
#_dash-app-content {
|
||||
@apply bg-prowler-stone-500;
|
||||
}
|
||||
|
||||
@layer components {
|
||||
.custom-grid {
|
||||
grid-template-columns: minmax(0, 16fr) repeat(11, minmax(0, 11fr));
|
||||
@@ -20,71 +16,22 @@
|
||||
.custom-grid-large {
|
||||
grid-template-columns: minmax(0, 10fr) repeat(11, minmax(0, 11fr));
|
||||
}
|
||||
}
|
||||
|
||||
/* Styles for the table in the overview page */
|
||||
.table-overview thead {
|
||||
display: table;
|
||||
width: 100%;
|
||||
table-layout: fixed;
|
||||
@layer utilities {
|
||||
/* Hide scrollbar for Chrome, Safari and Opera */
|
||||
.no-scrollbar::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.table-overview tbody {
|
||||
/* Hide scrollbar for IE, Edge and Firefox */
|
||||
.no-scrollbar {
|
||||
-ms-overflow-style: none; /* IE and Edge */
|
||||
scrollbar-width: none; /* Firefox */
|
||||
}
|
||||
}
|
||||
|
||||
.table-overview tbody tr {
|
||||
display: table;
|
||||
width: 100%;
|
||||
table-layout: fixed;
|
||||
}
|
||||
/* Styles for thead */
|
||||
.table-overview th {
|
||||
@apply bg-prowler-stone-900 text-sm py-3 font-bold;
|
||||
}
|
||||
|
||||
.table-overview td {
|
||||
@apply text-prowler-stone-900 bg-prowler-white text-sm py-2 font-bold;
|
||||
}
|
||||
|
||||
/* Check ID */
|
||||
.table-overview td:nth-child(1),
|
||||
.table-overview th:nth-child(1) {
|
||||
@apply w-[52%];
|
||||
}
|
||||
/* Severity */
|
||||
.table-overview td:nth-child(2),
|
||||
.table-overview th:nth-child(2) {
|
||||
@apply w-[8%] capitalize;
|
||||
}
|
||||
/* Status */
|
||||
.table-overview td:nth-child(3),
|
||||
.table-overview th:nth-child(3) {
|
||||
@apply w-[7%];
|
||||
}
|
||||
.table-overview td:nth-child(3) {
|
||||
@apply font-bold text-prowler-error;
|
||||
}
|
||||
/* Region */
|
||||
.table-overview td:nth-child(4),
|
||||
.table-overview th:nth-child(4) {
|
||||
@apply w-[9%];
|
||||
}
|
||||
/* Service */
|
||||
.table-overview td:nth-child(5),
|
||||
.table-overview th:nth-child(5) {
|
||||
@apply w-[6%];
|
||||
}
|
||||
/* Provider */
|
||||
.table-overview td:nth-child(6),
|
||||
.table-overview th:nth-child(6) {
|
||||
@apply w-[7%];
|
||||
}
|
||||
/* Account ID */
|
||||
.table-overview td:nth-child(7),
|
||||
.table-overview th:nth-child(7) {
|
||||
@apply w-[11%];
|
||||
}
|
||||
#_dash-app-content {
|
||||
@apply bg-prowler-stone-500;
|
||||
}
|
||||
|
||||
/* Styles for the accordion in the compliance page */
|
||||
@@ -96,6 +43,10 @@
|
||||
@apply text-prowler-stone-900 bg-prowler-white rounded-lg;
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion .accordion-collapse.collapse {
|
||||
@apply visible
|
||||
}
|
||||
|
||||
#_dash-app-content .accordion .accordion-button:not(.collapsed) {
|
||||
@apply text-prowler-stone-900 bg-prowler-stone-500;
|
||||
}
|
||||
@@ -166,14 +117,6 @@
|
||||
@apply absolute right-6 top-2 w-auto h-8 z-50;
|
||||
}
|
||||
|
||||
@layer utilities {
|
||||
/* Hide scrollbar for Chrome, Safari and Opera */
|
||||
.no-scrollbar::-webkit-scrollbar {
|
||||
display: none;
|
||||
}
|
||||
/* Hide scrollbar for IE, Edge and Firefox */
|
||||
.no-scrollbar {
|
||||
-ms-overflow-style: none; /* IE and Edge */
|
||||
scrollbar-width: none; /* Firefox */
|
||||
}
|
||||
}
|
||||
.overview-table .card .collapse {
|
||||
@apply visible
|
||||
}
|
||||
@@ -1,11 +1,9 @@
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
module.exports = {
|
||||
content: [
|
||||
"./assets/**/*.{py,html,js}",
|
||||
"./components/**/*.{py,html,js}",
|
||||
"./pages/**/*.{py,html,js}",
|
||||
"./utils/**/*.{py,html,js}",
|
||||
"./app.py",
|
||||
"*.{py,html,js}",
|
||||
"./**/*.{py,html,js}",
|
||||
"./**/**/*.{py,html,js}",
|
||||
],
|
||||
theme: {
|
||||
extend: {
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
# Audit Info
|
||||
|
||||
In each Prowler provider we have a Python object called `audit_info` which is in charge of keeping the credentials, the configuration and the state of each audit, and it's passed to each service during the `__init__`.
|
||||
|
||||
- AWS: https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/lib/audit_info/models.py#L34-L54
|
||||
- GCP: https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/lib/audit_info/models.py#L7-L30
|
||||
- Azure: https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/lib/audit_info/models.py#L17-L31
|
||||
|
||||
This `audit_info` object is shared during the Prowler execution and for that reason is important to mock it in each test to isolate them. See the [testing guide](./unit-testing.md) for more information.
|
||||
@@ -5,9 +5,15 @@ Here you can find how to create new checks for Prowler.
|
||||
**To create a check is required to have a Prowler provider service already created, so if the service is not present or the attribute you want to audit is not retrieved by the service, please refer to the [Service](./services.md) documentation.**
|
||||
|
||||
## Introduction
|
||||
|
||||
The checks are the fundamental piece of Prowler. A check is a simply piece of code that ensures if something is configured against cybersecurity best practices. Then the check generates a finding with the result and includes the check's metadata to give the user more contextual information about the result, the risk and how to remediate it.
|
||||
|
||||
To create a new check for a supported Prowler provider, you will need to create a folder with the check name inside the specific service for the selected provider.
|
||||
|
||||
We are going to use the `ec2_ami_public` check form the `AWS` provider as an example. So the folder name will `prowler/providers/aws/services/ec2/ec2_ami_public` (following the format `prowler/providers/<provider>/services/<service>/<check_name>`), with the name of check following the pattern: `service_subservice/resource_action`.
|
||||
We are going to use the `ec2_ami_public` check from the `AWS` provider as an example. So the folder name will be `prowler/providers/aws/services/ec2/ec2_ami_public` (following the format `prowler/providers/<provider>/services/<service>/<check_name>`), with the name of check following the pattern: `service_subservice_resource_action`.
|
||||
|
||||
???+ note
|
||||
A subservice is an specific component of a service that is gonna be audited. Sometimes it could be the shortened name of the class attribute that is gonna be accessed in the check.
|
||||
|
||||
Inside that folder, we need to create three files:
|
||||
|
||||
@@ -101,7 +107,7 @@ All the checks MUST fill the `report.status` and `report.status_extended` with t
|
||||
|
||||
- Status -- `report.status`
|
||||
- `PASS` --> If the check is passing against the configured value.
|
||||
- `FAIL` --> If the check is passing against the configured value.
|
||||
- `FAIL` --> If the check is failing against the configured value.
|
||||
- `MANUAL` --> This value cannot be used unless a manual operation is required in order to determine if the `report.status` is whether `PASS` or `FAIL`.
|
||||
- Status Extended -- `report.status_extended`
|
||||
- MUST end in a dot `.`
|
||||
@@ -111,9 +117,45 @@ All the checks MUST fill the `report.status` and `report.status_extended` with t
|
||||
|
||||
All the checks MUST fill the `report.region` with the following criteria:
|
||||
|
||||
- If the audited resource is regional use the `region` attribute within the resource object.
|
||||
- If the audited resource is regional use the `region` (the name changes depending on the provider: `location` in Azure and GCP and `namespace` in K8s) attribute within the resource object.
|
||||
- If the audited resource is global use the `service_client.region` within the service client object.
|
||||
|
||||
### Check Severity
|
||||
|
||||
The severity of the checks are defined in the metadata file with the `Severity` field. The severity is always in lowercase and can be one of the following values:
|
||||
|
||||
- `critical`
|
||||
- `high`
|
||||
- `medium`
|
||||
- `low`
|
||||
- `informational`
|
||||
|
||||
You may need to change it in the check's code if the check has different scenarios that could change the severity. This can be done by using the `report.check_metadata.Severity` attribute:
|
||||
|
||||
```python
|
||||
if <valid for more than 6 months>:
|
||||
report.status = "PASS"
|
||||
report.check_metadata.Severity = "informational"
|
||||
report.status_extended = f"RDS Instance {db_instance.id} certificate has over 6 months of validity left."
|
||||
elif <valid for more than 3 months>:
|
||||
report.status = "PASS"
|
||||
report.check_metadata.Severity = "low"
|
||||
report.status_extended = f"RDS Instance {db_instance.id} certificate has between 3 and 6 months of validity."
|
||||
elif <valid for more than 1 month>:
|
||||
report.status = "FAIL"
|
||||
report.check_metadata.Severity = "medium"
|
||||
report.status_extended = f"RDS Instance {db_instance.id} certificate less than 3 months of validity."
|
||||
elif <valid for less than 1 month>:
|
||||
report.status = "FAIL"
|
||||
report.check_metadata.Severity = "high"
|
||||
report.status_extended = f"RDS Instance {db_instance.id} certificate less than 1 month of validity."
|
||||
else:
|
||||
report.status = "FAIL"
|
||||
report.check_metadata.Severity = "critical"
|
||||
report.status_extended = (
|
||||
f"RDS Instance {db_instance.id} certificate has expired."
|
||||
)
|
||||
```
|
||||
### Resource ID, Name and ARN
|
||||
All the checks MUST fill the `report.resource_id` and `report.resource_arn` with the following criteria:
|
||||
|
||||
@@ -140,7 +182,7 @@ All the checks MUST fill the `report.resource_id` and `report.resource_arn` with
|
||||
### Python Model
|
||||
The following is the Python model for the check's class.
|
||||
|
||||
As per August 5th 2023 the `Check_Metadata_Model` can be found [here](https://github.com/prowler-cloud/prowler/blob/master/prowler/lib/check/models.py#L59-L80).
|
||||
As per April 11th 2024 the `Check_Metadata_Model` can be found [here](https://github.com/prowler-cloud/prowler/blob/master/prowler/lib/check/models.py#L36-L82).
|
||||
|
||||
```python
|
||||
class Check(ABC, Check_Metadata_Model):
|
||||
@@ -243,11 +285,11 @@ Each Prowler check has metadata associated which is stored at the same level of
|
||||
# Code holds different methods to remediate the FAIL finding
|
||||
"Code": {
|
||||
# CLI holds the command in the provider native CLI to remediate it
|
||||
"CLI": "https://docs.bridgecrew.io/docs/public_8#cli-command",
|
||||
"CLI": "https://docs.prowler.com/checks/public_8#cli-command",
|
||||
# NativeIaC holds the native IaC code to remediate it, use "https://docs.bridgecrew.io/docs"
|
||||
"NativeIaC": "",
|
||||
# Other holds the other commands, scripts or code to remediate it, use "https://www.trendmicro.com/cloudoneconformity"
|
||||
"Other": "https://docs.bridgecrew.io/docs/public_8#aws-console",
|
||||
"Other": "https://docs.prowler.com/checks/public_8#aws-console",
|
||||
# Terraform holds the Terraform code to remediate it, use "https://docs.bridgecrew.io/docs"
|
||||
"Terraform": ""
|
||||
},
|
||||
@@ -277,7 +319,7 @@ Each Prowler check has metadata associated which is stored at the same level of
|
||||
For the Remediation Code we use the following knowledge base to fill it:
|
||||
|
||||
- Official documentation for the provider
|
||||
- https://docs.bridgecrew.io
|
||||
- https://docs.prowler.com/checks/checks-index
|
||||
- https://www.trendmicro.com/cloudoneconformity
|
||||
- https://github.com/cloudmatos/matos/tree/master/remediations
|
||||
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
# Debugging
|
||||
|
||||
Debugging in Prowler make things easier!
|
||||
If you are developing Prowler, it's possible that you will encounter some situations where you have to inspect the code in depth to fix some unexpected issues during the execution. To do that, if you are using VSCode you can run the code using the integrated debugger. Please, refer to this [documentation](https://code.visualstudio.com/docs/editor/debugging) for guidance about the debugger in VSCode.
|
||||
If you are developing Prowler, it's possible that you will encounter some situations where you have to inspect the code in depth to fix some unexpected issues during the execution.
|
||||
|
||||
## VSCode
|
||||
|
||||
In VSCode you can run the code using the integrated debugger. Please, refer to this [documentation](https://code.visualstudio.com/docs/editor/debugging) for guidance about the debugger in VSCode.
|
||||
The following file is an example of the [debugging configuration](https://code.visualstudio.com/docs/editor/debugging#_launch-configurations) file that you can add to [Virtual Studio Code](https://code.visualstudio.com/).
|
||||
|
||||
This file should inside the *.vscode* folder and its name has to be *launch.json*:
|
||||
@@ -11,31 +15,62 @@ This file should inside the *.vscode* folder and its name has to be *launch.json
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Current File",
|
||||
"type": "python",
|
||||
"name": "Debug AWS Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"aws",
|
||||
"-f",
|
||||
"eu-west-1",
|
||||
"--service",
|
||||
"cloudwatch",
|
||||
"--log-level",
|
||||
"ERROR",
|
||||
"-p",
|
||||
"dev",
|
||||
"-c",
|
||||
"<check_name>"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": "Python: Debug Tests",
|
||||
"type": "python",
|
||||
"name": "Debug Azure Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${file}",
|
||||
"purpose": [
|
||||
"debug-test"
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"azure",
|
||||
"--sp-env-auth",
|
||||
"--log-level",
|
||||
"ERROR",
|
||||
"-c",
|
||||
"<check_name>"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": "Debug GCP Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"gcp",
|
||||
"--log-level",
|
||||
"ERROR",
|
||||
"-c",
|
||||
"<check_name>"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": "Debug K8s Check",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "prowler.py",
|
||||
"args": [
|
||||
"kubernetes",
|
||||
"--log-level",
|
||||
"ERROR",
|
||||
"-c",
|
||||
"<check_name>"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
## Contribute with documentation
|
||||
|
||||
We use `mkdocs` to build this Prowler documentation site so you can easily contribute back with new docs or improving them.
|
||||
We use `mkdocs` to build this Prowler documentation site so you can easily contribute back with new docs or improving them. To install all necessary dependencies use `poetry install --with docs`.
|
||||
|
||||
1. Install `mkdocs` with your favorite package manager.
|
||||
2. Inside the `prowler` repository folder run `mkdocs serve` and point your browser to `http://localhost:8000` and you will see live changes to your local copy of this documentation site.
|
||||
|
||||
@@ -4,10 +4,14 @@ You can extend Prowler Open Source in many different ways, in most cases you wil
|
||||
|
||||
## Get the code and install all dependencies
|
||||
|
||||
First of all, you need a version of Python 3.9 or higher and also pip installed to be able to install all dependencies required. Once that is satisfied go a head and clone the repo:
|
||||
First of all, you need a version of Python 3.9 or higher and also `pip` installed to be able to install all dependencies required.
|
||||
|
||||
Then, to start working with the Prowler Github repository you need to fork it to be able to propose changes for new features, bug fixing, etc. To fork the Prowler repo please refer to [this guide](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo?tool=webui#forking-a-repository).
|
||||
|
||||
Once that is satisfied go ahead and clone your forked repo:
|
||||
|
||||
```
|
||||
git clone https://github.com/prowler-cloud/prowler
|
||||
git clone https://github.com/<your-github-user>/prowler
|
||||
cd prowler
|
||||
```
|
||||
For isolation and avoid conflicts with other environments, we recommend usage of `poetry`:
|
||||
@@ -44,6 +48,11 @@ Before we merge any of your pull requests we pass checks to the code, we use the
|
||||
|
||||
You can see all dependencies in file `pyproject.toml`.
|
||||
|
||||
Moreover, you would need to install [`TruffleHog`](https://github.com/trufflesecurity/trufflehog) on the latest version to check for secrets in the code. You can install it using the official installation guide [here](https://github.com/trufflesecurity/trufflehog?tab=readme-ov-file#floppy_disk-installation).
|
||||
|
||||
???+ note
|
||||
If you have any trouble when committing to the Prowler repository, add the `--no-verify` flag to the `git commit` command.
|
||||
|
||||
## Pull Request Checklist
|
||||
|
||||
If you create or review a PR in https://github.com/prowler-cloud/prowler please follow this checklist:
|
||||
|
||||
281
docs/developer-guide/provider.md
Normal file
@@ -0,0 +1,281 @@
|
||||
|
||||
# Create a new Provider for Prowler
|
||||
|
||||
Here you can find how to create a new Provider in Prowler to give support for making all security checks needed and make your cloud safer!
|
||||
|
||||
## Introduction
|
||||
|
||||
Providers are the foundation on which Prowler is built, a simple definition for a cloud provider could be "third-party company that offers a platform where any IT resource you need is available at any time upon request". The most well-known cloud providers are Amazon Web Services, Azure from Microsoft and Google Cloud which are already supported by Prowler.
|
||||
|
||||
To create a new provider that is not supported now by Prowler and add your security checks you must create a new folder to store all the related files within it (services, checks, etc.). It must be store in route `prowler/providers/<new_provider_name>/`.
|
||||
|
||||
Inside that folder, you MUST create the following files and folders:
|
||||
|
||||
- A `lib` folder: to store all extra functions.
|
||||
- A `services` folder: to store all [services](./services.md) to audit.
|
||||
- An empty `__init__.py`: to make Python treat this service folder as a package.
|
||||
- A `<new_provider_name>_provider.py`, containing all the provider's logic necessary to get authenticated in the provider, configurations and extra data useful for final report.
|
||||
- A `models.py`, containing all the models necessary for the new provider.
|
||||
|
||||
## Provider
|
||||
|
||||
The structure for Prowler's providers is set up in such a way that they can be utilized through a generic service specific to each provider. This is achieved by passing the required parameters to the constructor, which in turn initializes all the necessary session values.
|
||||
|
||||
### Base Class
|
||||
|
||||
All the providers in Prowler inherits from the same [base class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/common/provider.py). It is an [abstract base class](https://docs.python.org/3/library/abc.html) that defines the interface for all provider classes. The code of the class is the next:
|
||||
|
||||
```python title="Provider Base Class"
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any
|
||||
|
||||
class Provider(ABC):
|
||||
"""
|
||||
The Provider class is an abstract base class that defines the interface for all provider classes in the auditing system.
|
||||
|
||||
Attributes:
|
||||
type (property): The type of the provider.
|
||||
identity (property): The identity of the provider for auditing.
|
||||
session (property): The session of the provider for auditing.
|
||||
audit_config (property): The audit configuration of the provider.
|
||||
output_options (property): The output configuration of the provider for auditing.
|
||||
|
||||
Methods:
|
||||
print_credentials(): Displays the provider's credentials used for auditing in the command-line interface.
|
||||
setup_session(): Sets up the session for the provider.
|
||||
get_output_mapping(): Returns the output mapping between the provider and the generic model.
|
||||
validate_arguments(): Validates the arguments for the provider.
|
||||
get_checks_to_execute_by_audit_resources(): Returns a set of checks based on the input resources to scan.
|
||||
|
||||
Note:
|
||||
This is an abstract base class and should not be instantiated directly. Each provider should implement its own
|
||||
version of the Provider class by inheriting from this base class and implementing the required methods and properties.
|
||||
"""
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def type(self) -> str:
|
||||
"""
|
||||
type method stores the provider's type.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def identity(self) -> str:
|
||||
"""
|
||||
identity method stores the provider's identity to audit.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
def setup_session(self) -> Any:
|
||||
"""
|
||||
setup_session sets up the session for the provider.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def session(self) -> str:
|
||||
"""
|
||||
session method stores the provider's session to audit.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def audit_config(self) -> str:
|
||||
"""
|
||||
audit_config method stores the provider's audit configuration.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
def print_credentials(self) -> None:
|
||||
"""
|
||||
print_credentials is used to display in the CLI the provider's credentials used to audit.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def output_options(self) -> str:
|
||||
"""
|
||||
output_options method returns the provider's audit output configuration.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@output_options.setter
|
||||
@abstractmethod
|
||||
def output_options(self, value: str) -> Any:
|
||||
"""
|
||||
output_options.setter sets the provider's audit output configuration.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
def get_output_mapping(self) -> dict:
|
||||
"""
|
||||
get_output_mapping returns the output mapping between the provider and the generic model.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def validate_arguments(self) -> None:
|
||||
"""
|
||||
validate_arguments validates the arguments for the provider.
|
||||
|
||||
This method can be overridden in each provider if needed.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_checks_to_execute_by_audit_resources(self) -> set:
|
||||
"""
|
||||
get_checks_to_execute_by_audit_resources returns a set of checks based on the input resources to scan.
|
||||
|
||||
This is a fallback that returns None if the service has not implemented this function.
|
||||
"""
|
||||
return set()
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def mutelist(self):
|
||||
"""
|
||||
mutelist method returns the provider's mutelist.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@mutelist.setter
|
||||
@abstractmethod
|
||||
def mutelist(self, path: str):
|
||||
"""
|
||||
mutelist.setter sets the provider's mutelist.
|
||||
|
||||
This method needs to be created in each provider.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
```
|
||||
|
||||
### Provider Class
|
||||
|
||||
Due to the complexity and differences of each provider use the rest of the providers as a template for the implementation.
|
||||
|
||||
- [AWS](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/aws_provider.py)
|
||||
- [GCP](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/gcp/gcp_provider.py)
|
||||
- [Azure](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/azure_provider.py)
|
||||
- [Kubernetes](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/kubernetes/kubernetes_provider.py)
|
||||
|
||||
To facilitate understanding here is a pseudocode of how the most basic provider could be with examples.
|
||||
|
||||
```python title="Provider Example Class"
|
||||
|
||||
# Library imports to authenticate in the Provider
|
||||
|
||||
from prowler.config.config import load_and_validate_config_file
|
||||
from prowler.lib.logger import logger
|
||||
from prowler.lib.mutelist.mutelist import parse_mutelist_file
|
||||
from prowler.lib.utils.utils import print_boxes
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.<new_provider_name>.models import (
|
||||
# All providers models needed
|
||||
ProvierSessionModel,
|
||||
ProvierIdentityModel,
|
||||
ProvierOutputOptionsModel
|
||||
)
|
||||
|
||||
class NewProvider(Provider):
|
||||
# All properties from the class, some of this are properties in the base class
|
||||
_type: str = "<provider_name>"
|
||||
_session: <ProvierSessionModel>
|
||||
_identity: <ProvierIdentityModel>
|
||||
_audit_config: dict
|
||||
_output_options: ProvierOutputOptionsModel
|
||||
_mutelist: dict
|
||||
audit_metadata: Audit_Metadata
|
||||
|
||||
def __init__(self, arguments):
|
||||
"""
|
||||
Initializes the NewProvider instance.
|
||||
Args:
|
||||
arguments (dict): A dictionary containing configuration arguments.
|
||||
"""
|
||||
logger.info("Setting <NewProviderName> provider ...")
|
||||
# First get from arguments the necesary from the cloud acount (subscriptions or projects or whatever the provider use for storing services)
|
||||
|
||||
# Set the session with the method enforced by parent class
|
||||
self._session = self.setup_session(credentials_file)
|
||||
|
||||
# Set the Identity class normaly the provider class give by Python provider library
|
||||
self._identity = <ProvierIdentityModel>()
|
||||
|
||||
# Set the provider configuration
|
||||
self._audit_config = load_and_validate_config_file(
|
||||
self._type, arguments.config_file
|
||||
)
|
||||
|
||||
# All enforced properties by the parent class
|
||||
@property
|
||||
def identity(self):
|
||||
return self._identity
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
return self._session
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return self._type
|
||||
|
||||
@property
|
||||
def audit_config(self):
|
||||
return self._audit_config
|
||||
|
||||
@property
|
||||
def output_options(self):
|
||||
return self._output_options
|
||||
|
||||
def setup_session(self, <all_needed_for_auth>):
|
||||
"""
|
||||
Sets up the Provider session.
|
||||
|
||||
Args:
|
||||
<all_needed_for_auth> Can include all necessary arguments to setup the session
|
||||
|
||||
Returns:
|
||||
Credentials necesary to communicate with the provider.
|
||||
"""
|
||||
pass
|
||||
|
||||
"""
|
||||
This method is enforced by parent class and is used to print all relevant
|
||||
information during the prowler execution as a header of execution.
|
||||
Normally the Account ID, User name or stuff like this is displayed in colors using the colorama module (Fore).
|
||||
"""
|
||||
def print_credentials(self):
|
||||
pass
|
||||
|
||||
|
||||
|
||||
```
|
||||
@@ -4,33 +4,36 @@ Here you can find how to create a new service, or to complement an existing one,
|
||||
|
||||
## Introduction
|
||||
|
||||
To create a new service, you will need to create a folder inside the specific provider, i.e. `prowler/providers/<provider>/services/<service>/`.
|
||||
In Prowler, a service is basically a solution that is offered by a cloud provider i.e. [ec2](https://aws.amazon.com/ec2/). Essentially it is a class that stores all the necessary stuff that we will need later in the checks to audit some aspects of our Cloud account.
|
||||
|
||||
To create a new service, you will need to create a folder inside the specific provider, i.e. `prowler/providers/<provider>/services/<new_service_name>/`.
|
||||
|
||||
Inside that folder, you MUST create the following files:
|
||||
|
||||
- An empty `__init__.py`: to make Python treat this service folder as a package.
|
||||
- A `<service>_service.py`, containing all the service's logic and API calls.
|
||||
- A `<service>_client_.py`, containing the initialization of the service's class we have just created so the checks's checks can use it.
|
||||
- A `<new_service_name>_service.py`, containing all the service's logic and API calls.
|
||||
- A `<new_service_name>_client_.py`, containing the initialization of the service's class we have just created so the checks's checks can use it.
|
||||
|
||||
## Service
|
||||
|
||||
The Prowler's service structure is the following and the way to initialise it is just by importing the service client in a check.
|
||||
|
||||
## Service Base Class
|
||||
### Service Base Class
|
||||
|
||||
All the Prowler provider's services inherits from a base class depending on the provider used.
|
||||
|
||||
- [AWS Service Base Class](https://github.com/prowler-cloud/prowler/blob/22f8855ad7dad2e976dabff78611b643e234beaf/prowler/providers/aws/lib/service/service.py)
|
||||
- [GCP Service Base Class](https://github.com/prowler-cloud/prowler/blob/22f8855ad7dad2e976dabff78611b643e234beaf/prowler/providers/gcp/lib/service/service.py)
|
||||
- [Azure Service Base Class](https://github.com/prowler-cloud/prowler/blob/22f8855ad7dad2e976dabff78611b643e234beaf/prowler/providers/azure/lib/service/service.py)
|
||||
- [AWS Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/aws/lib/service/service.py)
|
||||
- [GCP Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/gcp/lib/service/service.py)
|
||||
- [Azure Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/azure/lib/service/service.py)
|
||||
- [Kubernetes Service Base Class](https://github.com/prowler-cloud/prowler/blob/master/prowler/providers/kubernetes/lib/service/service.py)
|
||||
|
||||
Each class is used to initialize the credentials and the API's clients to be used in the service. If some threading is used it must be coded there.
|
||||
|
||||
## Service Class
|
||||
### Service Class
|
||||
|
||||
Due to the complexity and differencies of each provider API we are going to use an example service to guide you in how can it be created.
|
||||
Due to the complexity and differences of each provider API we are going to use an example service to guide you in how can it be created.
|
||||
|
||||
The following is the `<service>_service.py` file:
|
||||
The following is the `<new_service_name>_service.py` file:
|
||||
|
||||
```python title="Service Class"
|
||||
from datetime import datetime
|
||||
@@ -55,12 +58,12 @@ from prowler.providers.<provider>.lib.service.service import ServiceParentClass
|
||||
# Create a class for the Service
|
||||
################## <Service>
|
||||
class <Service>(ServiceParentClass):
|
||||
def __init__(self, audit_info):
|
||||
def __init__(self, provider):
|
||||
# Call Service Parent Class __init__
|
||||
# We use the __class__.__name__ to get it automatically
|
||||
# from the Service Class name but you can pass a custom
|
||||
# string if the provider's API service name is different
|
||||
super().__init__(__class__.__name__, audit_info)
|
||||
super().__init__(__class__.__name__, provider)
|
||||
|
||||
# Create an empty dictionary of items to be gathered,
|
||||
# using the unique ID as the dictionary key
|
||||
@@ -175,10 +178,12 @@ class <Service>(ServiceParentClass):
|
||||
f"{<item>.region} -- {error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}"
|
||||
)
|
||||
```
|
||||
???+note
|
||||
To avoid fake findings, when Prowler can't retrieve the items, because an Access Denied or similar error, we set that items value as `None`.
|
||||
|
||||
### Service Models
|
||||
#### Service Models
|
||||
|
||||
For each class object we need to model we use the Pydantic's [BaseModel](https://docs.pydantic.dev/latest/api/base_model/#pydantic.BaseModel) to take advantage of the data validation.
|
||||
Service models are classes that are used in the service to design all that we need to store in each class object extrated from API calls. We use the Pydantic's [BaseModel](https://docs.pydantic.dev/latest/api/base_model/#pydantic.BaseModel) to take advantage of the data validation.
|
||||
|
||||
```python title="Service Model"
|
||||
# In each service class we have to create some classes using
|
||||
@@ -202,7 +207,7 @@ class <Item>(BaseModel):
|
||||
tags: Optional[list]
|
||||
"""<Items>[].tags"""
|
||||
```
|
||||
### Service Objects
|
||||
#### Service Objects
|
||||
In the service each group of resources should be created as a Python [dictionary](https://docs.python.org/3/tutorial/datastructures.html#dictionaries). This is because we are performing lookups all the time and the Python dictionary lookup has [O(1) complexity](https://en.wikipedia.org/wiki/Big_O_notation#Orders_of_common_functions).
|
||||
|
||||
We MUST set as the dictionary key a unique ID, like the resource Unique ID or ARN.
|
||||
@@ -213,17 +218,17 @@ self.vpcs = {}
|
||||
self.vpcs["vpc-01234567890abcdef"] = VPC_Object_Class()
|
||||
```
|
||||
|
||||
## Service Client
|
||||
### Service Client
|
||||
|
||||
Each Prowler service requires a service client to use the service in the checks.
|
||||
|
||||
The following is the `<service>_client.py` containing the initialization of the service's class we have just created so the service's checks can use them:
|
||||
The following is the `<new_service_name>_client.py` containing the initialization of the service's class we have just created so the service's checks can use them:
|
||||
|
||||
```python
|
||||
from prowler.providers.<provider>.lib.audit_info.audit_info import audit_info
|
||||
from prowler.providers.<provider>.services.<service>.<service>_service import <Service>
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.<provider>.services.<new_service_name>.<new_service_name>_service import <Service>
|
||||
|
||||
<service>_client = <Service>(audit_info)
|
||||
<new_service_name>_client = <Service>(Provider.get_global_provider())
|
||||
```
|
||||
|
||||
## Permissions
|
||||
|
||||
@@ -62,50 +62,6 @@ For the AWS provider we have ways to test a Prowler check based on the following
|
||||
|
||||
In the following section we are going to explain all of the above scenarios with examples. The main difference between those scenarios comes from if the [Moto](https://github.com/getmoto/moto) library covers the AWS API calls made by the service. You can check the covered API calls [here](https://github.com/getmoto/moto/blob/master/IMPLEMENTATION_COVERAGE.md).
|
||||
|
||||
An important point for the AWS testing is that in each check we MUST have a unique `audit_info` which is the key object during the AWS execution to isolate the test execution.
|
||||
|
||||
Check the [Audit Info](./audit-info.md) section to get more details.
|
||||
|
||||
```python
|
||||
# We need to import the AWS_Audit_Info and the Audit_Metadata
|
||||
# to set the audit_info to call AWS APIs
|
||||
from prowler.providers.aws.lib.audit_info.models import AWS_Audit_Info
|
||||
from prowler.providers.common.models import Audit_Metadata
|
||||
|
||||
AWS_ACCOUNT_NUMBER = "123456789012"
|
||||
|
||||
def set_mocked_audit_info(self):
|
||||
audit_info = AWS_Audit_Info(
|
||||
session_config=None,
|
||||
original_session=None,
|
||||
audit_session=session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
audit_config=None,
|
||||
audited_account=AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn=f"arn:aws:iam::{AWS_ACCOUNT_NUMBER}:root",
|
||||
audited_user_id=None,
|
||||
audited_partition="aws",
|
||||
audited_identity_arn=None,
|
||||
profile=None,
|
||||
profile_region=None,
|
||||
credentials=None,
|
||||
assumed_role_info=None,
|
||||
audited_regions=["us-east-1", "eu-west-1"],
|
||||
organizations_metadata=None,
|
||||
audit_resources=None,
|
||||
mfa_enabled=False,
|
||||
audit_metadata=Audit_Metadata(
|
||||
services_scanned=0,
|
||||
expected_checks=[],
|
||||
completed_checks=0,
|
||||
audit_progress=0,
|
||||
),
|
||||
)
|
||||
|
||||
return audit_info
|
||||
```
|
||||
### Checks
|
||||
|
||||
For the AWS tests examples we are going to use the tests for the `iam_password_policy_uppercase` check.
|
||||
@@ -148,29 +104,29 @@ class Test_iam_password_policy_uppercase:
|
||||
# policy we want to set to False the RequireUppercaseCharacters
|
||||
iam_client.update_account_password_policy(RequireUppercaseCharacters=False)
|
||||
|
||||
# We set a mocked audit_info for AWS not to share the same audit state
|
||||
# between checks
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
# The aws_provider is mocked using set_mocked_aws_provider to use it as the return of the get_global_provider method.
|
||||
# this mocked provider is defined in fixtures
|
||||
aws_provider = set_mocked_aws_provider([AWS_REGION_US_EAST_1])
|
||||
|
||||
# The Prowler service import MUST be made within the decorated
|
||||
# code not to make real API calls to the AWS service.
|
||||
from prowler.providers.aws.services.iam.iam_service import IAM
|
||||
|
||||
# Prowler for AWS uses a shared object called `current_audit_info` where it stores
|
||||
# the audit's state, credentials and configuration.
|
||||
# Prowler for AWS uses a shared object called aws_provider where it stores
|
||||
# the info related with the provider
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
new=current_audit_info,
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=aws_provider,
|
||||
),
|
||||
# We have to mock also the iam_client from the check to enforce that the iam_client used is the one
|
||||
# created within this check because patch != import, and if you execute tests in parallel some objects
|
||||
# can be already initialised hence the check won't be isolated
|
||||
mock.patch(
|
||||
"prowler.providers.aws.services.iam.iam_password_policy_uppercase.iam_password_policy_uppercase.iam_client",
|
||||
new=IAM(current_audit_info),
|
||||
new=IAM(aws_provider),
|
||||
):
|
||||
# We import the check within the two mocks not to initialise the iam_client with some shared information from
|
||||
# the current_audit_info or the IAM service.
|
||||
# the aws_provider or the IAM service.
|
||||
from prowler.providers.aws.services.iam.iam_password_policy_uppercase.iam_password_policy_uppercase import (
|
||||
iam_password_policy_uppercase,
|
||||
)
|
||||
@@ -235,10 +191,6 @@ class Test_iam_password_policy_uppercase:
|
||||
expiration=True,
|
||||
)
|
||||
|
||||
# We set a mocked audit_info for AWS not to share the same audit state
|
||||
# between checks
|
||||
current_audit_info = self.set_mocked_audit_info()
|
||||
|
||||
# In this scenario we have to mock also the IAM service and the iam_client from the check to enforce # that the iam_client used is the one created within this check because patch != import, and if you # execute tests in parallel some objects can be already initialised hence the check won't be isolated.
|
||||
# In this case we don't use the Moto decorator, we use the mocked IAM client for both objects
|
||||
with mock.patch(
|
||||
@@ -249,7 +201,7 @@ class Test_iam_password_policy_uppercase:
|
||||
new=mocked_iam_client,
|
||||
):
|
||||
# We import the check within the two mocks not to initialise the iam_client with some shared information from
|
||||
# the current_audit_info or the IAM service.
|
||||
# the aws_provider or the IAM service.
|
||||
from prowler.providers.aws.services.iam.iam_password_policy_uppercase.iam_password_policy_uppercase import (
|
||||
iam_password_policy_uppercase,
|
||||
)
|
||||
@@ -333,19 +285,48 @@ Note that this does not use Moto, to keep it simple, but if you use any `moto`-d
|
||||
|
||||
#### Mocking more than one service
|
||||
|
||||
Since we are mocking the provider, it can be customized setting multiple attributes to the provider:
|
||||
```python
|
||||
def set_mocked_aws_provider(
|
||||
audited_regions: list[str] = [],
|
||||
audited_account: str = AWS_ACCOUNT_NUMBER,
|
||||
audited_account_arn: str = AWS_ACCOUNT_ARN,
|
||||
audited_partition: str = AWS_COMMERCIAL_PARTITION,
|
||||
expected_checks: list[str] = [],
|
||||
profile_region: str = None,
|
||||
audit_config: dict = {},
|
||||
fixer_config: dict = {},
|
||||
scan_unused_services: bool = True,
|
||||
audit_session: session.Session = session.Session(
|
||||
profile_name=None,
|
||||
botocore_session=None,
|
||||
),
|
||||
original_session: session.Session = None,
|
||||
enabled_regions: set = None,
|
||||
arguments: Namespace = Namespace(),
|
||||
create_default_organization: bool = True,
|
||||
) -> AwsProvider:
|
||||
```
|
||||
|
||||
If the test your are creating belongs to a check that uses more than one provider service, you should mock each of the services used. For example, the check `cloudtrail_logs_s3_bucket_access_logging_enabled` requires the CloudTrail and the S3 client, hence the service's mock part of the test will be as follows:
|
||||
|
||||
|
||||
```python
|
||||
with mock.patch(
|
||||
"prowler.providers.aws.lib.audit_info.audit_info.current_audit_info",
|
||||
new=mock_audit_info,
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_aws_provider(
|
||||
[AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1]
|
||||
),
|
||||
), mock.patch(
|
||||
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_client",
|
||||
new=Cloudtrail(mock_audit_info),
|
||||
new=Cloudtrail(
|
||||
set_mocked_aws_provider([AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1])
|
||||
),
|
||||
), mock.patch(
|
||||
"prowler.providers.aws.services.cloudtrail.cloudtrail_logs_s3_bucket_access_logging_enabled.cloudtrail_logs_s3_bucket_access_logging_enabled.s3_client",
|
||||
new=S3(mock_audit_info),
|
||||
new=S3(
|
||||
set_mocked_aws_provider([AWS_REGION_US_EAST_1, AWS_REGION_EU_WEST_1])
|
||||
),
|
||||
):
|
||||
```
|
||||
|
||||
@@ -363,10 +344,10 @@ from prowler.providers.<provider>.services.<service>.<service>_client import <se
|
||||
```
|
||||
2. `<service>_client.py`:
|
||||
```python
|
||||
from prowler.providers.<provider>.lib.audit_info.audit_info import audit_info
|
||||
from prowler.providers.common.provider import Provider
|
||||
from prowler.providers.<provider>.services.<service>.<service>_service import <SERVICE>
|
||||
|
||||
<service>_client = <SERVICE>(audit_info)
|
||||
<service>_client = <SERVICE>(Provider.get_global_provider())
|
||||
```
|
||||
|
||||
Due to the above import path it's not the same to patch the following objects because if you run a bunch of tests, either in parallel or not, some clients can be already instantiated by another check, hence your test execution will be using another test's service instance:
|
||||
@@ -384,19 +365,20 @@ A useful read about this topic can be found in the following article: https://st
|
||||
|
||||
Mocking a service client using the following code ...
|
||||
|
||||
Once the needed attributes are set for the mocked provider, you can use the mocked provider:
|
||||
```python title="Mocking the service_client"
|
||||
with mock.patch(
|
||||
"prowler.providers.<provider>.lib.audit_info.audit_info.audit_info",
|
||||
new=audit_info,
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
new=set_mocked_aws_provider([<region>]),
|
||||
), mock.patch(
|
||||
"prowler.providers.<provider>.services.<service>.<check>.<check>.<service>_client",
|
||||
new=<SERVICE>(audit_info),
|
||||
new=<SERVICE>(set_mocked_aws_provider([<region>])),
|
||||
):
|
||||
```
|
||||
will cause that the service will be initialised twice:
|
||||
|
||||
1. When the `<SERVICE>(audit_info)` is mocked out using `mock.patch` to have the object ready for the patching.
|
||||
2. At the `<service>_client.py` when we are patching it since the `mock.patch` needs to go to that object an initialise it, hence the `<SERVICE>(audit_info)` will be called again.
|
||||
1. When the `<SERVICE>(set_mocked_aws_provider([<region>]))` is mocked out using `mock.patch` to have the object ready for the patching.
|
||||
2. At the `<service>_client.py` when we are patching it since the `mock.patch` needs to go to that object an initialise it, hence the `<SERVICE>(set_mocked_aws_provider([<region>]))` will be called again.
|
||||
|
||||
Then, when we import the `<service>_client.py` at `<check>.py`, since we are mocking where the object is used, Python will use the mocked one.
|
||||
|
||||
@@ -408,24 +390,24 @@ Mocking a service client using the following code ...
|
||||
|
||||
```python title="Mocking the service and the service_client"
|
||||
with mock.patch(
|
||||
"prowler.providers.<provider>.lib.audit_info.audit_info.audit_info",
|
||||
new=audit_info,
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
new=set_mocked_aws_provider([<region>]),
|
||||
), mock.patch(
|
||||
"prowler.providers.<provider>.services.<service>.<SERVICE>",
|
||||
new=<SERVICE>(audit_info),
|
||||
new=<SERVICE>(set_mocked_aws_provider([<region>])),
|
||||
) as service_client, mock.patch(
|
||||
"prowler.providers.<provider>.services.<service>.<service>_client.<service>_client",
|
||||
new=service_client,
|
||||
):
|
||||
```
|
||||
will cause that the service will be initialised once, just when the `<SERVICE>(audit_info)` is mocked out using `mock.patch`.
|
||||
will cause that the service will be initialised once, just when the `set_mocked_aws_provider([<region>])` is mocked out using `mock.patch`.
|
||||
|
||||
Then, at the check_level when Python tries to import the client with `from prowler.providers.<provider>.services.<service>.<service>_client`, since it is already mocked out, the execution will continue using the `service_client` without getting into the `<service>_client.py`.
|
||||
|
||||
|
||||
### Services
|
||||
|
||||
For testing the AWS services we have to follow the same logic as with the AWS checks, we have to check if the AWS API calls made by the service are covered by Moto and we have to test the service `__init__` to verifiy that the information is being correctly retrieved.
|
||||
For testing the AWS services we have to follow the same logic as with the AWS checks, we have to check if the AWS API calls made by the service are covered by Moto and we have to test the service `__init__` to verify that the information is being correctly retrieved.
|
||||
|
||||
The service tests could act as *Integration Tests* since we test how the service retrieves the information from the provider, but since Moto or the custom mock objects mocks that calls this test will fall into *Unit Tests*.
|
||||
|
||||
@@ -437,79 +419,208 @@ Please refer to the [AWS checks tests](./unit-testing.md#checks) for more inform
|
||||
|
||||
For the GCP Provider we don't have any library to mock out the API calls we use. So in this scenario we inject the objects in the service client using [MagicMock](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.MagicMock).
|
||||
|
||||
The following code shows how to use MagicMock to create the service objects for a GCP check test.
|
||||
The following code shows how to use MagicMock to create the service objects for a GCP check test. It is a real example adapted for informative purposes.
|
||||
|
||||
```python
|
||||
# We need to import the unittest.mock to allow us to patch some objects
|
||||
# not to use shared ones between test, hence to isolate the test
|
||||
from re import search
|
||||
from unittest import mock
|
||||
|
||||
# GCP Constants
|
||||
GCP_PROJECT_ID = "123456789012"
|
||||
# Import some constant values needed in every check
|
||||
from tests.providers.gcp.gcp_fixtures import GCP_PROJECT_ID, set_mocked_gcp_provider
|
||||
|
||||
# We are going to create a test for the compute_firewall_rdp_access_from_the_internet_allowed check
|
||||
class Test_compute_firewall_rdp_access_from_the_internet_allowed:
|
||||
# We are going to create a test for the compute_project_os_login_enabled check
|
||||
class Test_compute_project_os_login_enabled:
|
||||
|
||||
# We name the tests with test_<service>_<check_name>_<test_action>
|
||||
def test_compute_compute_firewall_rdp_access_from_the_internet_allowed_one_compliant_rule_with_valid_port(self):
|
||||
def test_one_compliant_project(self):
|
||||
# Import the service resource model to create the mocked object
|
||||
from prowler.providers.gcp.services.compute.compute_service import Project
|
||||
# Create the custom Project object to be tested
|
||||
project = Project(
|
||||
id=GCP_PROJECT_ID,
|
||||
enable_oslogin=True,
|
||||
)
|
||||
# Mocked client with MagicMock
|
||||
compute_client = mock.MagicMock
|
||||
|
||||
# Assign GCP client configuration
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.region = "global"
|
||||
compute_client.projects = [project]
|
||||
|
||||
# Import the service resource model to create the mocked object
|
||||
from prowler.providers.gcp.services.compute.compute_service import Firewall
|
||||
|
||||
# Create the custom Firewall object to be tested
|
||||
firewall = Firewall(
|
||||
name="test",
|
||||
id="1234567890",
|
||||
source_ranges=["0.0.0.0/0"],
|
||||
direction="INGRESS",
|
||||
allowed_rules=[{"IPProtocol": "tcp", "ports": ["443"]}],
|
||||
project_id=GCP_PROJECT_ID,
|
||||
)
|
||||
compute_client.firewalls = [firewall]
|
||||
|
||||
# In this scenario we have to mock also the Compute service and the compute_client from the check to enforce that the compute_client used is the one created within this check because patch != import, and if you execute tests in parallel some objects can be already initialised hence the check won't be isolated.
|
||||
# In this case we don't use the Moto decorator, we use the mocked Compute client for both objects
|
||||
# In this scenario we have to mock the app_client from the check to enforce that the compute_client used is the one created above
|
||||
# And also is mocked the return value of get_global_provider function to return our GCP mocked provider defined in fixtures
|
||||
with mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_service.Compute",
|
||||
new=defender_client,
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
), mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_client.compute_client",
|
||||
new=defender_client,
|
||||
"prowler.providers.gcp.services.compute.compute_project_os_login_enabled.compute_project_os_login_enabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
|
||||
# We import the check within the two mocks not to initialise the iam_client with some shared information from
|
||||
# the current_audit_info or the Compute service.
|
||||
from prowler.providers.gcp.services.compute.compute_firewall_rdp_access_from_the_internet_allowed.compute_firewall_rdp_access_from_the_internet_allowed import (
|
||||
compute_firewall_rdp_access_from_the_internet_allowed,
|
||||
# We import the check within the two mocks
|
||||
from prowler.providers.gcp.services.compute.compute_project_os_login_enabled.compute_project_os_login_enabled import (
|
||||
compute_project_os_login_enabled,
|
||||
)
|
||||
|
||||
# Once imported, we only need to instantiate the check's class
|
||||
check = compute_firewall_rdp_access_from_the_internet_allowed()
|
||||
|
||||
check = compute_project_os_login_enabled()
|
||||
# And then, call the execute() function to run the check
|
||||
# against the IAM client we've set up.
|
||||
# against the Compute client we've set up.
|
||||
result = check.execute()
|
||||
|
||||
# Last but not least, we need to assert all the fields
|
||||
# from the check's results
|
||||
# Assert the expected results
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert result[0].status_extended == f"Firewall {firewall.name} does not expose port 3389 (RDP) to the internet."
|
||||
assert result[0].resource_name = firewall.name
|
||||
assert result[0].resource_id == firewall.id
|
||||
assert result[0].project_id = GCP_PROJECT_ID
|
||||
assert result[0].location = compute_client.region
|
||||
assert search(
|
||||
f"Project {project.id} has OS Login enabled",
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == project.id
|
||||
assert result[0].location == "global"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
# Complementary test to make more coverage for different scenarios
|
||||
def test_one_non_compliant_project(self):
|
||||
from prowler.providers.gcp.services.compute.compute_service import Project
|
||||
|
||||
project = Project(
|
||||
id=GCP_PROJECT_ID,
|
||||
enable_oslogin=False,
|
||||
)
|
||||
|
||||
compute_client = mock.MagicMock
|
||||
compute_client.project_ids = [GCP_PROJECT_ID]
|
||||
compute_client.projects = [project]
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_gcp_provider(),
|
||||
), mock.patch(
|
||||
"prowler.providers.gcp.services.compute.compute_project_os_login_enabled.compute_project_os_login_enabled.compute_client",
|
||||
new=compute_client,
|
||||
):
|
||||
from prowler.providers.gcp.services.compute.compute_project_os_login_enabled.compute_project_os_login_enabled import (
|
||||
compute_project_os_login_enabled,
|
||||
)
|
||||
|
||||
check = compute_project_os_login_enabled()
|
||||
result = check.execute()
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert search(
|
||||
f"Project {project.id} does not have OS Login enabled",
|
||||
result[0].status_extended,
|
||||
)
|
||||
assert result[0].resource_id == project.id
|
||||
assert result[0].location == "global"
|
||||
assert result[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
```
|
||||
|
||||
### Services
|
||||
|
||||
Coming soon ...
|
||||
For testing Google Cloud Services, we have to follow the same logic as with the Google Cloud checks. We still mocking all API calls, but in this case, every API call to set up an attribute is defined in [fixtures file](https://github.com/prowler-cloud/prowler/blob/master/tests/providers/gcp/gcp_fixtures.py) in `mock_api_client` function. Remember that EVERY method of a service must be tested.
|
||||
|
||||
The following code shows a real example of a testing class, but it has more comments than usual for educational purposes.
|
||||
|
||||
```python title="BigQuery Service Test"
|
||||
# We need to import the unittest.mock.patch to allow us to patch some objects
|
||||
# not to use shared ones between test, hence to isolate the test
|
||||
from unittest.mock import patch
|
||||
# Import the class needed from the service file
|
||||
from prowler.providers.gcp.services.bigquery.bigquery_service import BigQuery
|
||||
# Necessary constans and functions from fixtures file
|
||||
from tests.providers.gcp.gcp_fixtures import (
|
||||
GCP_PROJECT_ID,
|
||||
mock_api_client,
|
||||
mock_is_api_active,
|
||||
set_mocked_gcp_provider,
|
||||
)
|
||||
|
||||
|
||||
class TestBigQueryService:
|
||||
# Only method needed to test full service
|
||||
def test_service(self):
|
||||
# In this case we are mocking the __is_api_active__ to ensure our mocked project is used
|
||||
# And all the client to use our mocked API calls
|
||||
with patch(
|
||||
"prowler.providers.gcp.lib.service.service.GCPService.__is_api_active__",
|
||||
new=mock_is_api_active,
|
||||
), patch(
|
||||
"prowler.providers.gcp.lib.service.service.GCPService.__generate_client__",
|
||||
new=mock_api_client,
|
||||
):
|
||||
# Instantiate an object of class with the mocked provider
|
||||
bigquery_client = BigQuery(
|
||||
set_mocked_gcp_provider(project_ids=[GCP_PROJECT_ID])
|
||||
)
|
||||
# Check all attributes of the tested class is well set up according API calls mocked from GCP fixture file
|
||||
assert bigquery_client.service == "bigquery"
|
||||
assert bigquery_client.project_ids == [GCP_PROJECT_ID]
|
||||
|
||||
assert len(bigquery_client.datasets) == 2
|
||||
|
||||
assert bigquery_client.datasets[0].name == "unique_dataset1_name"
|
||||
assert bigquery_client.datasets[0].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.datasets[0].region == "US"
|
||||
assert bigquery_client.datasets[0].cmk_encryption
|
||||
assert bigquery_client.datasets[0].public
|
||||
assert bigquery_client.datasets[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
assert bigquery_client.datasets[1].name == "unique_dataset2_name"
|
||||
assert bigquery_client.datasets[1].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.datasets[1].region == "EU"
|
||||
assert not bigquery_client.datasets[1].cmk_encryption
|
||||
assert not bigquery_client.datasets[1].public
|
||||
assert bigquery_client.datasets[1].project_id == GCP_PROJECT_ID
|
||||
|
||||
assert len(bigquery_client.tables) == 2
|
||||
|
||||
assert bigquery_client.tables[0].name == "unique_table1_name"
|
||||
assert bigquery_client.tables[0].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.tables[0].region == "US"
|
||||
assert bigquery_client.tables[0].cmk_encryption
|
||||
assert bigquery_client.tables[0].project_id == GCP_PROJECT_ID
|
||||
|
||||
assert bigquery_client.tables[1].name == "unique_table2_name"
|
||||
assert bigquery_client.tables[1].id.__class__.__name__ == "str"
|
||||
assert bigquery_client.tables[1].region == "US"
|
||||
assert not bigquery_client.tables[1].cmk_encryption
|
||||
assert bigquery_client.tables[1].project_id == GCP_PROJECT_ID
|
||||
```
|
||||
As it can be confusing where all these values come from, I'll give an example to make this clearer. First we need to check
|
||||
what is the API call used to obtain the datasets. In this case if we check the service the call is
|
||||
`self.client.datasets().list(projectId=project_id)`.
|
||||
|
||||
Now in the fixture file we have to mock this call in our `MagicMock` client in the function `mock_api_client`. The best way to mock
|
||||
is following the actual format, add one function where the client is passed to be changed, the format of this function name must be
|
||||
`mock_api_<endpoint>_calls` (*endpoint* refers to the first attribute pointed after *client*).
|
||||
|
||||
In the example of BigQuery the function is called `mock_api_dataset_calls`. And inside of this function we found an assignation to
|
||||
be used in the `__get_datasets__` method in BigQuery class:
|
||||
|
||||
```python
|
||||
# Mocking datasets
|
||||
dataset1_id = str(uuid4())
|
||||
dataset2_id = str(uuid4())
|
||||
|
||||
client.datasets().list().execute.return_value = {
|
||||
"datasets": [
|
||||
{
|
||||
"datasetReference": {
|
||||
"datasetId": "unique_dataset1_name",
|
||||
"projectId": GCP_PROJECT_ID,
|
||||
},
|
||||
"id": dataset1_id,
|
||||
"location": "US",
|
||||
},
|
||||
{
|
||||
"datasetReference": {
|
||||
"datasetId": "unique_dataset2_name",
|
||||
"projectId": GCP_PROJECT_ID,
|
||||
},
|
||||
"id": dataset2_id,
|
||||
"location": "EU",
|
||||
},
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Azure
|
||||
|
||||
@@ -517,80 +628,186 @@ Coming soon ...
|
||||
|
||||
For the Azure Provider we don't have any library to mock out the API calls we use. So in this scenario we inject the objects in the service client using [MagicMock](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.MagicMock).
|
||||
|
||||
The following code shows how to use MagicMock to create the service objects for a Azure check test.
|
||||
The following code shows how to use MagicMock to create the service objects for a Azure check test. It is a real example adapted for informative purposes.
|
||||
|
||||
```python
|
||||
```python title="app_ensure_http_is_redirected_to_https_test.py"
|
||||
# We need to import the unittest.mock to allow us to patch some objects
|
||||
# not to use shared ones between test, hence to isolate the test
|
||||
from unittest import mock
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
# Azure Constants
|
||||
from tests.providers.azure.azure_fixtures import AZURE_SUBSCRIPTION
|
||||
# Import some constans values needed in almost every check
|
||||
from tests.providers.azure.azure_fixtures import (
|
||||
AZURE_SUBSCRIPTION_ID,
|
||||
set_mocked_azure_provider,
|
||||
)
|
||||
|
||||
|
||||
|
||||
# We are going to create a test for the Test_defender_ensure_defender_for_arm_is_on check
|
||||
class Test_defender_ensure_defender_for_arm_is_on:
|
||||
# We are going to create a test for the app_ensure_http_is_redirected_to_https check
|
||||
class Test_app_ensure_http_is_redirected_to_https:
|
||||
|
||||
# We name the tests with test_<service>_<check_name>_<test_action>
|
||||
def test_defender_defender_ensure_defender_for_arm_is_on_arm_pricing_tier_not_standard(self):
|
||||
resource_id = str(uuid4())
|
||||
|
||||
def test_app_http_to_https_disabled(self):
|
||||
resource_id = f"/subscriptions/{uuid4()}"
|
||||
# Mocked client with MagicMock
|
||||
defender_client = mock.MagicMock
|
||||
app_client = mock.MagicMock
|
||||
|
||||
# Import the service resource model to create the mocked object
|
||||
from prowler.providers.azure.services.defender.defender_service import Defender_Pricing
|
||||
|
||||
# Create the custom Defender object to be tested
|
||||
defender_client.pricings = {
|
||||
AZURE_SUBSCRIPTION: {
|
||||
"Arm": Defender_Pricing(
|
||||
resource_id=resource_id,
|
||||
pricing_tier="Not Standard",
|
||||
free_trial_remaining_time=0,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
# In this scenario we have to mock also the Defender service and the defender_client from the check to enforce that the defender_client used is the one created within this check because patch != import, and if you execute tests in parallel some objects can be already initialised hence the check won't be isolated.
|
||||
# In this case we don't use the Moto decorator, we use the mocked Defender client for both objects
|
||||
# In this scenario we have to mock the app_client from the check to enforce that the app_client used is the one created above
|
||||
# And also is mocked the return value of get_global_provider function to return our Azure mocked provider defined in fixtures
|
||||
with mock.patch(
|
||||
"prowler.providers.azure.services.defender.defender_service.Defender",
|
||||
new=defender_client,
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
), mock.patch(
|
||||
"prowler.providers.azure.services.defender.defender_client.defender_client",
|
||||
new=defender_client,
|
||||
"prowler.providers.azure.services.app.app_ensure_http_is_redirected_to_https.app_ensure_http_is_redirected_to_https.app_client",
|
||||
new=app_client,
|
||||
):
|
||||
|
||||
# We import the check within the two mocks not to initialise the iam_client with some shared information from
|
||||
# the current_audit_info or the Defender service.
|
||||
from prowler.providers.azure.services.defender.defender_ensure_defender_for_arm_is_on.defender_ensure_defender_for_arm_is_on import (
|
||||
defender_ensure_defender_for_arm_is_on,
|
||||
# We import the check within the two mocks
|
||||
from prowler.providers.azure.services.app.app_ensure_http_is_redirected_to_https.app_ensure_http_is_redirected_to_https import (
|
||||
app_ensure_http_is_redirected_to_https,
|
||||
)
|
||||
# Import the service resource model to create the mocked object
|
||||
from prowler.providers.azure.services.app.app_service import WebApp
|
||||
|
||||
# Create the custom App object to be tested
|
||||
app_client.apps = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
"app_id-1": WebApp(
|
||||
resource_id=resource_id,
|
||||
auth_enabled=True,
|
||||
configurations=mock.MagicMock(),
|
||||
client_cert_mode="Ignore",
|
||||
https_only=False,
|
||||
identity=None,
|
||||
location="West Europe",
|
||||
)
|
||||
}
|
||||
}
|
||||
# Once imported, we only need to instantiate the check's class
|
||||
check = defender_ensure_defender_for_arm_is_on()
|
||||
|
||||
check = app_ensure_http_is_redirected_to_https()
|
||||
# And then, call the execute() function to run the check
|
||||
# against the IAM client we've set up.
|
||||
# against the App client we've set up.
|
||||
result = check.execute()
|
||||
|
||||
# Last but not least, we need to assert all the fields
|
||||
# from the check's results
|
||||
# Assert the expected results
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "FAIL"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"Defender plan Defender for ARM from subscription {AZURE_SUBSCRIPTION} is set to OFF (pricing tier not standard)"
|
||||
== f"HTTP is not redirected to HTTPS for app 'app_id-1' in subscription '{AZURE_SUBSCRIPTION_ID}'."
|
||||
)
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION
|
||||
assert result[0].resource_name == "Defender plan ARM"
|
||||
assert result[0].resource_name == "app_id-1"
|
||||
assert result[0].resource_id == resource_id
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].location == "West Europe"
|
||||
|
||||
# Complementary test to make more coverage for different scenarios
|
||||
def test_app_http_to_https_enabled(self):
|
||||
resource_id = f"/subscriptions/{uuid4()}"
|
||||
app_client = mock.MagicMock
|
||||
|
||||
with mock.patch(
|
||||
"prowler.providers.common.provider.Provider.get_global_provider",
|
||||
return_value=set_mocked_azure_provider(),
|
||||
), mock.patch(
|
||||
"prowler.providers.azure.services.app.app_ensure_http_is_redirected_to_https.app_ensure_http_is_redirected_to_https.app_client",
|
||||
new=app_client,
|
||||
):
|
||||
from prowler.providers.azure.services.app.app_ensure_http_is_redirected_to_https.app_ensure_http_is_redirected_to_https import (
|
||||
app_ensure_http_is_redirected_to_https,
|
||||
)
|
||||
from prowler.providers.azure.services.app.app_service import WebApp
|
||||
|
||||
app_client.apps = {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
"app_id-1": WebApp(
|
||||
resource_id=resource_id,
|
||||
auth_enabled=True,
|
||||
configurations=mock.MagicMock(),
|
||||
client_cert_mode="Ignore",
|
||||
https_only=True,
|
||||
identity=None,
|
||||
location="West Europe",
|
||||
)
|
||||
}
|
||||
}
|
||||
check = app_ensure_http_is_redirected_to_https()
|
||||
result = check.execute()
|
||||
assert len(result) == 1
|
||||
assert result[0].status == "PASS"
|
||||
assert (
|
||||
result[0].status_extended
|
||||
== f"HTTP is redirected to HTTPS for app 'app_id-1' in subscription '{AZURE_SUBSCRIPTION_ID}'."
|
||||
)
|
||||
assert result[0].resource_name == "app_id-1"
|
||||
assert result[0].resource_id == resource_id
|
||||
assert result[0].subscription == AZURE_SUBSCRIPTION_ID
|
||||
assert result[0].location == "West Europe"
|
||||
|
||||
```
|
||||
|
||||
### Services
|
||||
|
||||
Coming soon ...
|
||||
For testing Azure services, we have to follow the same logic as with the Azure checks. We still mock all the API calls, but in this case, every method that uses an API call to set up an attribute is mocked with the [patch](https://docs.python.org/3/library/unittest.mock.html#unittest.mock.patch) decorator at the beginning of the class. Remember that every method of a service MUST be tested.
|
||||
|
||||
The following code shows a real example of a testing class, but it has more comments than usual for educational purposes.
|
||||
|
||||
```python title="AppInsights Service Test"
|
||||
# We need to import the unittest.mock.patch to allow us to patch some objects
|
||||
# not to use shared ones between test, hence to isolate the test
|
||||
from unittest.mock import patch
|
||||
# Import the models needed from the service file
|
||||
from prowler.providers.azure.services.appinsights.appinsights_service import (
|
||||
AppInsights,
|
||||
Component,
|
||||
)
|
||||
# Import some constans values needed in almost every check
|
||||
from tests.providers.azure.azure_fixtures import (
|
||||
AZURE_SUBSCRIPTION_ID,
|
||||
set_mocked_azure_provider,
|
||||
)
|
||||
|
||||
# Function to mock the service function __get_components__, this function task is to return a possible value that real function could returns
|
||||
def mock_appinsights_get_components(_):
|
||||
return {
|
||||
AZURE_SUBSCRIPTION_ID: {
|
||||
"app_id-1": Component(
|
||||
resource_id="/subscriptions/resource_id",
|
||||
resource_name="AppInsightsTest",
|
||||
location="westeurope",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
# Patch decorator to use the mocked function instead the function with the real API call
|
||||
@patch(
|
||||
"prowler.providers.azure.services.appinsights.appinsights_service.AppInsights.__get_components__",
|
||||
new=mock_appinsights_get_components,
|
||||
)
|
||||
class Test_AppInsights_Service:
|
||||
# Mandatory test for every service, this method test the instance of the client is correct
|
||||
def test__get_client__(self):
|
||||
app_insights = AppInsights(set_mocked_azure_provider())
|
||||
assert (
|
||||
app_insights.clients[AZURE_SUBSCRIPTION_ID].__class__.__name__
|
||||
== "ApplicationInsightsManagementClient"
|
||||
)
|
||||
# Second typical method that test if subscriptions is defined inside the client object
|
||||
def test__get_subscriptions__(self):
|
||||
app_insights = AppInsights(set_mocked_azure_provider())
|
||||
assert app_insights.subscriptions.__class__.__name__ == "dict"
|
||||
# Test for the function __get_components__, inside this client is used the mocked function
|
||||
def test__get_components__(self):
|
||||
appinsights = AppInsights(set_mocked_azure_provider())
|
||||
assert len(appinsights.components) == 1
|
||||
assert (
|
||||
appinsights.components[AZURE_SUBSCRIPTION_ID]["app_id-1"].resource_id
|
||||
== "/subscriptions/resource_id"
|
||||
)
|
||||
assert (
|
||||
appinsights.components[AZURE_SUBSCRIPTION_ID]["app_id-1"].resource_name
|
||||
== "AppInsightsTest"
|
||||
)
|
||||
assert (
|
||||
appinsights.components[AZURE_SUBSCRIPTION_ID]["app_id-1"].location
|
||||
== "westeurope"
|
||||
)
|
||||
```
|
||||
|
||||
BIN
docs/favicon.ico
|
Before Width: | Height: | Size: 1.2 KiB After Width: | Height: | Size: 15 KiB |
@@ -40,10 +40,10 @@ If your IAM entity enforces MFA you can use `--mfa` and Prowler will ask you to
|
||||
|
||||
Prowler for Azure supports the following authentication types:
|
||||
|
||||
- Service principal authentication by environment variables (Enterprise Application)
|
||||
- [Service principal application](https://learn.microsoft.com/en-us/entra/identity-platform/app-objects-and-service-principals?tabs=browser#service-principal-object) by environment variables (recommended)
|
||||
- Current az cli credentials stored
|
||||
- Interactive browser authentication
|
||||
- Managed identity authentication
|
||||
- [Managed identity](https://learn.microsoft.com/en-us/entra/identity/managed-identities-azure-resources/overview) authentication
|
||||
|
||||
### Service Principal authentication
|
||||
|
||||
@@ -56,6 +56,8 @@ export AZURE_CLIENT_SECRET="XXXXXXX"
|
||||
```
|
||||
|
||||
If you try to execute Prowler with the `--sp-env-auth` flag and those variables are empty or not exported, the execution is going to fail.
|
||||
Follow the instructions in the [Create Prowler Service Principal](../tutorials/azure/create-prowler-service-principal.md) section to create a service principal.
|
||||
|
||||
### AZ CLI / Browser / Managed Identity authentication
|
||||
|
||||
The other three cases does not need additional configuration, `--az-cli-auth` and `--managed-identity-auth` are automated options. To use `--browser-auth` the user needs to authenticate against Azure using the default browser to start the scan, also `tenant-id` is required.
|
||||
@@ -64,55 +66,22 @@ The other three cases does not need additional configuration, `--az-cli-auth` an
|
||||
|
||||
To use each one you need to pass the proper flag to the execution. Prowler for Azure handles two types of permission scopes, which are:
|
||||
|
||||
- **Microsoft Entra ID permissions**: Used to retrieve metadata from the identity assumed by Prowler (not mandatory to have access to execute the tool).
|
||||
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool.
|
||||
|
||||
|
||||
#### Microsoft Entra ID scope
|
||||
|
||||
Microsoft Entra ID (AAD earlier) permissions required by the tool are the following:
|
||||
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
- `UserAuthenticationMethod.Read.All`
|
||||
|
||||
The best way to assign it is through the Azure web console:
|
||||
|
||||
1. Access to Microsoft Entra ID
|
||||
2. In the left menu bar, go to "App registrations"
|
||||
3. Once there, in the menu bar click on "+ New registration" to register a new application
|
||||
4. Fill the "Name, select the "Supported account types" and click on "Register. You will be redirected to the applications page.
|
||||

|
||||
4. Select the new application
|
||||
5. In the left menu bar, select "API permissions"
|
||||
6. Then click on "+ Add a permission" and select "Microsoft Graph"
|
||||
7. Once in the "Microsoft Graph" view, select "Application permissions"
|
||||
8. Finally, search for "Directory", "Policy" and "UserAuthenticationMethod" select the following permissions:
|
||||
- **Microsoft Entra ID permissions**: Used to retrieve metadata from the identity assumed by Prowler and specific Entra checks (not mandatory to have access to execute the tool). The permissions required by the tool are the following:
|
||||
- `Directory.Read.All`
|
||||
- `Policy.Read.All`
|
||||
- `UserAuthenticationMethod.Read.All`
|
||||

|
||||
- **Subscription scope permissions**: Required to launch the checks against your resources, mandatory to launch the tool. It is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
|
||||
- `Reader`
|
||||
- `ProwlerRole` (custom role defined in [prowler-azure-custom-role](https://github.com/prowler-cloud/prowler/blob/master/permissions/prowler-azure-custom-role.json))
|
||||
|
||||
To assign the permissions, follow the instructions in the [Microsoft Entra ID permissions](../tutorials/azure/create-prowler-service-principal.md#assigning-the-proper-permissions) section and the [Azure subscriptions permissions](../tutorials/azure/subscriptions.md#assigning-proper-permissions) section, respectively.
|
||||
|
||||
#### Subscriptions scope
|
||||
#### Checks that require ProwlerRole
|
||||
|
||||
Regarding the subscription scope, Prowler by default scans all the subscriptions that is able to list, so it is required to add the following RBAC builtin roles per subscription to the entity that is going to be assumed by the tool:
|
||||
The following checks require the `ProwlerRole` custom role to be executed, if you want to run them, make sure you have assigned the role to the identity that is going to be assumed by Prowler:
|
||||
|
||||
- `Security Reader`
|
||||
- `Reader`
|
||||
|
||||
To assign this roles, follow the instructions:
|
||||
|
||||
1. Access your subscription, then select your subscription.
|
||||
2. Select "Access control (IAM)".
|
||||
3. In the overview, select "Roles"
|
||||

|
||||
4. Click on "+ Add" and select "Add role assignment"
|
||||
5. In the search bar, type `Security Reader`, select it and click on "Next"
|
||||
6. In the Members tab, click on "+ Select members" and add the members you want to assign this role.
|
||||
7. Click on "Review + assign" to apply the new role.
|
||||
|
||||
*Repeat these steps for `Reader` role*
|
||||
- `app_function_access_keys_configured`
|
||||
- `app_function_ftps_deployment_disabled`
|
||||
|
||||
## Google Cloud
|
||||
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 240.29 285.79"><defs><style>.cls-1{fill:url(#linear-gradient);}.cls-2{fill:#71be44;}</style><linearGradient id="linear-gradient" x1="157.45" y1="97.85" x2="211.7" y2="97.85" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#5a9b37"/><stop offset="1" stop-color="#71be44"/></linearGradient></defs><circle class="cls-1" cx="148.2" cy="97.85" r="67.45"/><path class="cls-2" d="M66.28,30.4H148.2a0,0,0,0,1,0,0V185.35a81.93,81.93,0,0,1-81.93,81.93h0a0,0,0,0,1,0,0V30.4A0,0,0,0,1,66.28,30.4Z"/></svg>
|
||||
|
Before Width: | Height: | Size: 635 B |
|
Before Width: | Height: | Size: 8.7 KiB |
BIN
docs/img/add-reader-role.gif
Normal file
|
After Width: | Height: | Size: 1.4 MiB |
BIN
docs/img/add-sub-to-management-group.gif
Normal file
|
After Width: | Height: | Size: 357 KiB |
|
Before Width: | Height: | Size: 283 KiB After Width: | Height: | Size: 351 KiB |
BIN
docs/img/create-management-group.gif
Normal file
|
After Width: | Height: | Size: 688 KiB |
BIN
docs/img/dashboard.png
Normal file
|
After Width: | Height: | Size: 746 KiB |
|
Before Width: | Height: | Size: 848 KiB After Width: | Height: | Size: 258 KiB |
|
Before Width: | Height: | Size: 631 KiB |
|
Before Width: | Height: | Size: 348 KiB |
BIN
docs/img/prowler-cli-quick.gif
Normal file
|
After Width: | Height: | Size: 552 KiB |
|
Before Width: | Height: | Size: 11 KiB |
|
Before Width: | Height: | Size: 9.2 KiB After Width: | Height: | Size: 21 KiB |
|
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 22 KiB |